hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3301f1363e782e5509c28cf2cb73fd5c67b65ec9 | 550 | py | Python | src/second/B.py | DPS0340/kpu-oj-2020 | 2f4cd86402688e46a41124614534181f966e8f66 | [
"MIT"
] | null | null | null | src/second/B.py | DPS0340/kpu-oj-2020 | 2f4cd86402688e46a41124614534181f966e8f66 | [
"MIT"
] | null | null | null | src/second/B.py | DPS0340/kpu-oj-2020 | 2f4cd86402688e46a41124614534181f966e8f66 | [
"MIT"
] | null | null | null | def is_palin(w):
mid = len(w)//2
if len(w) % 2 == 1:
return w[:mid] == w[mid+1:][::-1]
else:
return w[:mid] == w[mid::][::-1]
def add(w1, w2):
res = []
x = 0
cnt = 0
for i in range(len(w1))[::-1]:
x += (ord(w1[i]) + ord(w2[i]) - ord('a') * 2) * (26 ** cnt)
cnt += 1
while x:
res.append(chr(x % 26 + ord('a')))
x //= 26
return ''.join(res[::-1])
word = input()
rev = word[::-1]
while not is_palin(word):
word = add(word, rev)
rev = word[::-1]
print(word)
| 19.642857 | 67 | 0.429091 | 91 | 550 | 2.571429 | 0.373626 | 0.08547 | 0.042735 | 0.094017 | 0.128205 | 0.128205 | 0 | 0 | 0 | 0 | 0 | 0.067024 | 0.321818 | 550 | 27 | 68 | 20.37037 | 0.560322 | 0 | 0 | 0.086957 | 0 | 0 | 0.003636 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0 | 0 | 0 | 0.217391 | 0.043478 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33041e93f47ec14a94d4ce42881d008c428227ca | 8,087 | py | Python | vvlab/agents/DDPG_base.py | yuhuihan/Reinforcement-Learning | 02bba66885bcd9cd9e13881c573ba5778cc3b93d | [
"MIT"
] | null | null | null | vvlab/agents/DDPG_base.py | yuhuihan/Reinforcement-Learning | 02bba66885bcd9cd9e13881c573ba5778cc3b93d | [
"MIT"
] | null | null | null | vvlab/agents/DDPG_base.py | yuhuihan/Reinforcement-Learning | 02bba66885bcd9cd9e13881c573ba5778cc3b93d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding=utf-8
"""
@author: Jiawei Wu
@create time: 2019-12-04 10:36
@edit time: 2020-04-07 19:55
@FilePath: /vvlab/agents/DDPG_base.py
"""
import numpy as np
import os
import logging
import torch.nn as nn
import torch
from torch.utils.tensorboard import SummaryWriter
from ..utils import CUDA, OUProcess, ReplayBuffer
from ..utils.update import soft_update
import warnings
class DDPGBase(object):
def __init__(self, n_states, n_actions, action_bound=1, buff_size=1000, buff_thres=0, batch_size=32,
lr_a=0.001, lr_c=0.002, tau=0.01, gamma=0.9,
summary=False, *args, **kwargs):
# 兼容参数
# TODO 在0.3.0删除参数兼容
if 'bound' in kwargs:
warnings.warn("'bound' is deprecated and will remove after 0.3.0. "
"Use 'action_bound' instead.",
DeprecationWarning, stacklevel=2)
action_bound = kwargs['bound']
self.bound = action_bound
if 'exp_size' in kwargs:
warnings.warn("'exp_size' is deprecated and will remove after 0.3.0. "
"Use 'buff_size' instead.",
DeprecationWarning, stacklevel=2)
buff_size = kwargs['exp_size']
self.exp_size = buff_size
if 'exp_thres' in kwargs:
warnings.warn("'exp_thres' is deprecated and will remove after 0.3.0. "
"Use 'buff_thres' instead.",
DeprecationWarning, stacklevel=2)
buff_thres = kwargs['exp_thres']
self.exp_thres = buff_thres
# 参数复制
self.n_states, self.n_actions, self.action_bound = n_states, n_actions, action_bound
self.buff_size, self.buff_thres, self.batch_size = buff_size, buff_thres, batch_size
self.lr_a, self.lr_c, self.tau, self.gamma = lr_a, lr_c, tau, gamma
self.summary = summary
self.kwargs = kwargs
# 初始化episode和step
self.episode, self.step = 0, 0
# 参数覆盖
self._param_override()
# 创建经验回放池
self.buff = ReplayBuffer(self.n_states, self.n_actions, buff_size=self.buff_size, buff_thres=self.buff_thres)
# 创建神经网络
self._build_net()
# 指定优化器
self.actor_optim = torch.optim.Adam(self.actor_eval.parameters(), lr=self.lr_a)
self.critic_optim = torch.optim.Adam(self.critic_eval.parameters(), lr=self.lr_c)
# 约定损失函数
self.mse_loss = nn.MSELoss()
# 指定噪声发生器
self._build_noise()
# 指定summary writer
self._build_summary_writer()
# 开启cuda
if CUDA:
self.cuda()
def _param_override(self):
"""覆盖参数
提供子类简单覆写基类参数的方法
例如:修改summary是否开启
应当谨慎使用这个方法
"""
pass
def _build_net(self):
raise TypeError("网络构建函数未被实现")
def _build_noise(self, *args):
raise TypeError("噪声发生器构建函数未被实现")
def _build_summary_writer(self):
"""构建summary_writer
如果指定了不需要summary_writer,会将其置为None
如果指定了保存路径就使用保存路径,否则使用默认路径
"""
if self.summary:
if 'summary_path' in self.kwargs:
self.summary_writer = SummaryWriter(log_dir=kwargs['summary_path'])
self._build_summary_writer(kwargs['summary_path'])
else:
self.summary_writer = SummaryWriter()
else:
self.summary_writer = None
def get_summary_writer(self):
return self.summary_writer
def _get_action(self, s):
"""给定当前状态,获取选择的动作"""
s = torch.unsqueeze(torch.FloatTensor(s), 0)
action = self.actor_eval.forward(s).detach().cpu().numpy()
return action
def get_action(self, s):
return self._get_action(s)
def _save(self, save_path, append_dict={}):
"""保存当前模型的网络参数
@param save_path: 模型的保存位置
@param append_dict: 除了网络模型之外需要保存的内容
"""
states = {
'actor_eval_net': self.actor_eval.state_dict(),
'actor_target_net': self.actor_target.state_dict(),
'critic_eval_net': self.critic_eval.state_dict(),
'critic_target_net': self.critic_target.state_dict(),
}
states.update(append_dict)
torch.save(states, save_path)
def save(self, episode=None, save_path='./cur_model.pth'):
"""保存的默认实现
@param episode: 当前的episode
@param save_path: 模型的保存位置,默认是'./cur_model.pth'
"""
append_dict = {
'episode': self.episode if episode is None else episode,
'step': self.step
}
self._save(save_path, append_dict)
def _load(self, save_path):
"""加载模型参数
@param save_path: 模型的保存位置
@return: 加载得到的模型字典
"""
if CUDA:
states = torch.load(save_path, map_location=torch.device('cuda'))
else:
states = torch.load(save_path, map_location=torch.device('cpu'))
# 从模型中加载网络参数
self.actor_eval.load_state_dict(states['actor_eval_net'])
self.actor_target.load_state_dict(states['actor_target_net'])
self.critic_eval.load_state_dict(states['critic_eval_net'])
self.critic_target.load_state_dict(states['critic_target_net'])
# 从模型中加载episode和step信息
self.episode, self.step = states['episode'], states['step']
# 返回states
return states
def load(self, save_path='./cur_model.pth'):
"""加载模型的默认实现
@param save_path: 模型的保存位置, 默认是 './cur_model.pth'
@return: 被记录的episode值
"""
print('\033[1;31;40m{}\033[0m'.format('加载模型参数...'))
if not os.path.exists(save_path):
print('\033[1;31;40m{}\033[0m'.format('没找到保存文件'))
return -1
else:
states = self._load(save_path)
return states['episode']
def _learn(self):
"""训练网络"""
# 将eval网络参数赋给target网络
soft_update(self.actor_target, self.actor_eval, self.tau)
soft_update(self.critic_target, self.critic_eval, self.tau)
# 获取batch并拆解
batch = self.buff.get_batch_splited_tensor(CUDA, self.batch_size)
if batch is None:
return None, None
else:
self.start_train = True
batch_cur_states, batch_actions, batch_rewards, batch_dones, batch_next_states = batch
# 计算target_q,指导cirtic更新
# 通过a_target和next_state计算target网络会选择的下一动作 next_action;通过target_q和next_states、刚刚计算的next_actions计算下一状态的q_values
target_q_next = self.critic_target(batch_next_states, self.actor_target(batch_next_states))
target_q = batch_rewards + self.gamma * (1 - batch_dones) * target_q_next # 如果done,则不考虑未来
# 指导critic更新
q_value = self.critic_eval(batch_cur_states, batch_actions)
td_error = self.mse_loss(target_q, q_value)
self.critic_optim.zero_grad()
td_error.backward()
self.critic_optim.step()
# 指导actor更新
policy_loss = self.critic_eval(batch_cur_states, self.actor_eval(batch_cur_states)) # 用更新的eval网络评估这个动作
# 如果 a是一个正确的行为的话,那么它的policy_loss应该更贴近0
loss_a = -torch.mean(policy_loss)
self.actor_optim.zero_grad()
loss_a.backward()
self.actor_optim.step()
return td_error.detach().cpu().numpy(), loss_a.detach().cpu().numpy()
def learn(self):
c_loss, a_loss = self._learn()
if all((c_loss is not None, a_loss is not None)):
self.step += 1
if self.summary_writer:
self.summary_writer.add_scalar('c_loss', c_loss, self.step)
self.summary_writer.add_scalar('a_loss', a_loss, self.step)
def _add_step(self, s, a, r, d, s_):
"""向经验回放池添加一条记录"""
self.buff.add_step(s, a, r, d, s_)
def add_step(self, s, a, r, d, s_):
"""添加记录的默认实现
除了添加记录之外不做任何操作
"""
self._add_step(s, a, r, d, s_)
def cuda(self):
self.actor_eval.cuda()
self.actor_target.cuda()
self.critic_eval.cuda()
self.critic_target.cuda()
| 34.559829 | 117 | 0.608878 | 1,001 | 8,087 | 4.658342 | 0.236763 | 0.028951 | 0.019515 | 0.017156 | 0.247051 | 0.128029 | 0.083423 | 0.083423 | 0.052327 | 0.024877 | 0 | 0.015843 | 0.281934 | 8,087 | 233 | 118 | 34.708155 | 0.787153 | 0.126376 | 0 | 0.070423 | 0 | 0 | 0.088291 | 0.006464 | 0 | 0 | 0 | 0.004292 | 0 | 1 | 0.119718 | false | 0.007042 | 0.06338 | 0.014085 | 0.246479 | 0.014085 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3306bf86dd077d475c70918d613e4c401998d15b | 1,577 | py | Python | tests/test_ansi.py | getcuia/stransi | 6997722fb946aa8ac732b54e3fd623f87706013a | [
"MIT"
] | 10 | 2021-11-21T20:31:35.000Z | 2022-02-15T02:02:05.000Z | tests/test_ansi.py | getcuia/stransi | 6997722fb946aa8ac732b54e3fd623f87706013a | [
"MIT"
] | 12 | 2021-11-21T20:27:00.000Z | 2022-03-25T12:01:28.000Z | tests/test_ansi.py | getcuia/stransi | 6997722fb946aa8ac732b54e3fd623f87706013a | [
"MIT"
] | null | null | null | """Tests for the Ansi class."""
from typing import Text
import ochre
import pytest
from stransi import Ansi, SetAttribute, SetColor
from stransi.attribute import Attribute
from stransi.color import ColorRole
@pytest.fixture
def raw_example() -> Text:
"""Return a raw example string."""
return "\x1b[0;31;1mHello\033[m, \x1B[32mWorld!\N{ESC}[0m"
@pytest.fixture
def example(raw_example: Text) -> Ansi:
"""Return an example Ansi string."""
return Ansi(raw_example)
def test_ansi_has_pattern():
"""Ensure the class has a (constant) pattern property."""
assert hasattr(Ansi, "PATTERN")
def test_ansi_is_a_string(example: Ansi, raw_example: Text):
"""Ansi is a string."""
assert isinstance(example, Ansi)
assert isinstance(example, Text)
assert example == raw_example
def test_ansi_can_be_concatenated(example: Ansi, raw_example: Text):
"""Ansi can be concatenated."""
double_example = example + example
assert double_example == raw_example * 2
# assert not isinstance(double_example, Ansi)
assert isinstance(double_example, Text)
def test_ansi_can_be_iterated(example: Ansi):
"""Ansi can be iterated."""
assert list(example.instructions()) == [
SetAttribute(Attribute.NORMAL),
SetColor(role=ColorRole.FOREGROUND, color=ochre.Ansi256(1)),
SetAttribute(Attribute.BOLD),
"Hello",
SetAttribute(Attribute.NORMAL),
", ",
SetColor(role=ColorRole.FOREGROUND, color=ochre.Ansi256(2)),
"World!",
SetAttribute(Attribute.NORMAL),
]
| 26.283333 | 68 | 0.69182 | 195 | 1,577 | 5.461538 | 0.317949 | 0.075117 | 0.052582 | 0.050704 | 0.25446 | 0.195305 | 0.140845 | 0.140845 | 0.140845 | 0.140845 | 0 | 0.016419 | 0.188966 | 1,577 | 59 | 69 | 26.728814 | 0.816263 | 0.157261 | 0 | 0.147059 | 0 | 0.029412 | 0.053364 | 0.037123 | 0 | 0 | 0 | 0 | 0.205882 | 1 | 0.176471 | false | 0 | 0.176471 | 0 | 0.411765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
330c5c53e4aef134f51a364ad30994a1a548b27b | 1,791 | py | Python | api/metrics/weekly.py | scarletstudio/transithealth | 408e6c1a063e46edb95040c26db93c2ff93c6d33 | [
"MIT"
] | 2 | 2021-05-18T15:34:19.000Z | 2021-10-07T01:29:31.000Z | api/metrics/weekly.py | scarletstudio/transithealth | 408e6c1a063e46edb95040c26db93c2ff93c6d33 | [
"MIT"
] | 89 | 2021-05-21T18:31:04.000Z | 2021-08-16T01:13:02.000Z | api/metrics/weekly.py | scarletstudio/transithealth | 408e6c1a063e46edb95040c26db93c2ff93c6d33 | [
"MIT"
] | 1 | 2021-06-15T10:28:26.000Z | 2021-06-15T10:28:26.000Z | from api.utils.database import rows_to_dicts
class WeeklyMetrics:
"""
Metrics for weekly timeline view.
"""
def __init__(self, con):
self.con = con
def rideshare_pickups(self, since):
"""
Returns the number of rideshare pickups per week, since the given date
Args:
since (str): date string to use as the start of the time period
"""
query = """
SELECT
week as date,
SUM(n_trips) as value
FROM rideshare
WHERE week >= ?
GROUP BY date
"""
cur = self.con.cursor()
cur.execute(query, (since,))
rows = rows_to_dicts(cur, cur.fetchall())
return rows
def rideshare_avg_cost_cents(self, since):
"""
Returns the average cost in cents of rideshare trips per week, since the given date.
Args:
since (str): date string to use as the start of the time period
"""
query = """
SELECT
week as date,
CAST(
SUM(n_trips * avg_cost_no_tip_cents)
/ SUM(n_trips)
as INTEGER) as value
FROM rideshare
WHERE week >= ?
GROUP BY date
"""
cur = self.con.cursor()
cur.execute(query, (since,))
rows = rows_to_dicts(cur, cur.fetchall())
return rows
def covid_cases(self):
"""
Returns the number of COVID cases per week.
"""
query = """
SELECT
week as date,
SUM(cases_weekly) as value
FROM covid_spread
GROUP BY date
"""
cur = self.con.cursor()
cur.execute(query)
rows = rows_to_dicts(cur, cur.fetchall())
return rows | 26.731343 | 92 | 0.526521 | 212 | 1,791 | 4.325472 | 0.301887 | 0.038168 | 0.047983 | 0.055616 | 0.586696 | 0.586696 | 0.557252 | 0.557252 | 0.557252 | 0.514722 | 0 | 0 | 0.38861 | 1,791 | 67 | 93 | 26.731343 | 0.837443 | 0.21273 | 0 | 0.666667 | 0 | 0 | 0.423493 | 0.017002 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088889 | false | 0 | 0.022222 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3310753a5d684a9e7baec3df8c64391562d32fcb | 934 | py | Python | fastapi_discord/models/user.py | abhishek0220/fastapi-discord | f06cc61a4e800eae5c09fd55329a74fbfc6e270e | [
"MIT"
] | 2 | 2022-02-03T18:03:33.000Z | 2022-03-21T10:54:41.000Z | fastapi_discord/models/user.py | abhishek0220/fastapi-discord | f06cc61a4e800eae5c09fd55329a74fbfc6e270e | [
"MIT"
] | null | null | null | fastapi_discord/models/user.py | abhishek0220/fastapi-discord | f06cc61a4e800eae5c09fd55329a74fbfc6e270e | [
"MIT"
] | null | null | null | class User(object):
id: str = None
username: str
discriminator: str
avatar: str
avatar_url: str
locale: str
email: str
def __init__(self, payload: dict):
self.id = payload['id']
self.username = payload['username']
self.discriminator = payload['discriminator']
self.avatar = payload['avatar']
self.avatar_url = f'https://cdn.discordapp.com/avatars/{self.id}/{self.avatar}.png'
self.locale = payload['locale']
if 'email' in payload:
self.email = payload['email']
else:
self.email = ''
@property
def dict(self) -> dict:
return {
"id": self.id,
"username": self.username,
"discriminator": self.discriminator,
"avatar": self.avatar,
"avatar_url": self.avatar_url,
"locale": self.locale,
"email": self.email
}
| 28.30303 | 91 | 0.546039 | 98 | 934 | 5.122449 | 0.295918 | 0.099602 | 0.063745 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.324411 | 934 | 32 | 92 | 29.1875 | 0.795563 | 0 | 0 | 0 | 0 | 0 | 0.168094 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0 | 0.033333 | 0.366667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33120d95556ce5f4dee1fe65ce51a43ea042a7b1 | 6,547 | py | Python | python_toolbox/interpolate_log_file.py | raphaelsulzer/TanksAndTemples | 01c04fd2d48b17da8d0ecef0d5b466319d0c3007 | [
"MIT"
] | 22 | 2019-04-17T11:21:35.000Z | 2021-06-06T16:56:36.000Z | python_toolbox/interpolate_log_file.py | raphaelsulzer/TanksAndTemples | 01c04fd2d48b17da8d0ecef0d5b466319d0c3007 | [
"MIT"
] | 15 | 2019-05-22T09:19:50.000Z | 2021-06-24T15:52:24.000Z | python_toolbox/interpolate_log_file.py | raphaelsulzer/TanksAndTemples | 01c04fd2d48b17da8d0ecef0d5b466319d0c3007 | [
"MIT"
] | 13 | 2019-04-26T16:50:13.000Z | 2021-05-27T11:21:49.000Z | # ----------------------------------------------------------------------------
# - TanksAndTemples Website Toolbox -
# - http://www.tanksandtemples.org -
# ----------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2017
# Arno Knapitsch <arno.knapitsch@gmail.com >
# Jaesik Park <syncle@gmail.com>
# Qian-Yi Zhou <Qianyi.Zhou@gmail.com>
# Vladlen Koltun <vkoltun@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ----------------------------------------------------------------------------
#
# This python script is for interpolating camera poses
import sys
import math
import numpy as np
class CameraPose:
def __init__(self, meta, mat):
self.metadata = meta
self.pose = mat
def __str__(self):
return 'Metadata : ' + ' '.join(map(str, self.metadata)) + '\n' + \
"Pose : " + "\n" + np.array_str(self.pose)
def read_trajectory(filename):
traj = []
with open(filename, 'r') as f:
metastr = f.readline()
while metastr:
metadata = list(map(int, metastr.split()))
mat = np.zeros(shape = (4, 4))
for i in range(4):
matstr = f.readline();
mat[i, :] = np.fromstring(matstr, dtype = float, sep=' \t')
traj.append(CameraPose(metadata, mat))
metastr = f.readline()
return traj
def write_trajectory(traj, filename):
with open(filename, 'w') as f:
for x in traj:
p = x.pose.tolist()
f.write(' '.join(map(str, x.metadata)) + '\n')
f.write('\n'.join(' '.join(map('{0:.12f}'.format, p[i])) for i in range(4)))
f.write('\n')
def read_mapping(filename):
mapping = []
with open(filename, 'r') as f:
n_sampled_frames = int(f.readline())
n_total_frames = int(f.readline())
mapping = np.zeros(shape = (n_sampled_frames, 2))
metastr = f.readline()
for iter in range(n_sampled_frames):
metadata = list(map(int, metastr.split()))
mapping[iter, :] = metadata
metastr = f.readline()
return [n_sampled_frames, n_total_frames, mapping]
def transform_matrix_4d_to_vector_6d(pose):
pose_vec = list(range(6))
R = pose[0:3, 0:3]
r00, r01, r02, r10, r11, r12, r20, r21, r22 = R.flat
sy = math.sqrt(r00 * r00 + r10 * r10)
if ~(sy < 1e-6):
pose_vec[0] = math.atan2(r21, r22)
pose_vec[1] = math.atan2(-r20, sy)
pose_vec[2] = math.atan2(r10, r00)
else:
pose_vec[0] = math.atan2(-r12, r11)
pose_vec[1] = math.atan2(-r20, sy)
pose_vec[2] = 0
pose_vec[3:] = pose[0:3, 3]
return pose_vec;
def euler_2_rotation_matrix(x, y, z):
cosx = math.cos(x)
sinx = math.sin(x)
Rx = np.array([[1, 0, 0], [0, cosx, -sinx], [0, sinx, cosx]])
cosy = math.cos(y)
siny = math.sin(y)
Ry = np.array([[cosy, 0, siny], [0, 1, 0], [-siny, 0, cosy]])
cosz = math.cos(z)
sinz = math.sin(z)
Rz = np.array([[cosz, -sinz, 0], [sinz, cosz, 0], [0, 0, 1]])
return Rz.dot(Ry.dot(Rx))
def transform_vector_6d_to_matrix_4d(pose_vec):
pose = np.identity(4)
pose[0:3, 3] = pose_vec[3:]
pose[0:3, 0:3] = euler_2_rotation_matrix(
pose_vec[0], pose_vec[1], pose_vec[2])
return pose
if __name__ == "__main__":
print('')
print('==================================================================')
print('Python script for interpolating camera poses')
print('==================================================================')
print('Algorithm : ')
print(' 1) Transform n-SE(3) camera pose matrices to nx6 matrix,')
print(' where each row contains euler angles and translation')
print(' 2) Independently interpolate each column of nx6 matrix')
print(' using 1D cubic interpolation')
print('==================================================================')
if len(sys.argv) != 4:
print('Usage : python %s [input_log] [mapping_txt_file] [output_log]'
% sys.argv[0])
print('Example : python %s ../test_data/test.log ../test_data/mapping.txt ../test_data/test_interpolated.log'
% sys.argv[0])
print('')
print('Convention of [input_log]')
print('[frame ID] [frame ID] 0')
print('[R t]')
print('[0 1]')
print(': (repeats)')
print('')
print('Convention of [mapping_txt_file]')
print('[number of input camera poses]')
print('[number of desired number of interpolated poses]')
print('[Image ID] [video frame ID]')
print(': (repeats)')
sys.exit()
# read files
trajectory = read_trajectory(sys.argv[1])
n_sampled_frames, n_total_frames, mapping = read_mapping(sys.argv[2])
print('%d camera poses are loaded' % n_sampled_frames)
print('Input poses are interpolated to %d poses' % n_total_frames)
# make nx6 matrix
n_trajectory = len(trajectory)
pose_matrix = np.zeros(shape = (n_trajectory, 6))
for iter in range(n_trajectory):
pose_vector = transform_matrix_4d_to_vector_6d(trajectory[iter].pose)
pose_matrix[iter,:] = pose_vector
# interpolation
pose_frame_desired = np.linspace(1, n_total_frames, n_total_frames)
pose_matrix_interpolation = np.zeros(shape = (n_total_frames, 6))
for iter in range(6):
pose_element_slice = pose_matrix[:, iter]
pose_frame_id = mapping[:, 1]
pose_matrix_interpolation[:, iter] = np.interp(
pose_frame_desired, pose_frame_id, pose_element_slice)
# transform interpolated vector to SE(3) and output result
traj_interpolated = []
for iter in range(n_total_frames):
pose_vector = pose_matrix_interpolation[iter, :]
pose = transform_vector_6d_to_matrix_4d(pose_vector)
metadata = [iter, iter, n_total_frames]
traj_interpolated.append(CameraPose(metadata, pose))
write_trajectory(traj_interpolated, sys.argv[3])
| 36.780899 | 111 | 0.640293 | 938 | 6,547 | 4.32516 | 0.289979 | 0.024156 | 0.026621 | 0.013803 | 0.130885 | 0.0912 | 0.04634 | 0.014789 | 0.014789 | 0.014789 | 0 | 0.024858 | 0.16435 | 6,547 | 177 | 112 | 36.988701 | 0.716688 | 0.266229 | 0 | 0.15873 | 0 | 0.007937 | 0.201385 | 0.058108 | 0 | 0 | 0 | 0 | 0 | 1 | 0.063492 | false | 0 | 0.02381 | 0.007937 | 0.142857 | 0.206349 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3313adefb1a30da0df4e1f16c9e43856f2a91e08 | 8,010 | py | Python | src/fidalgo/azext_fidalgo/vendored_sdks/fidalgo/_fidalgo.py | tbyfield/azure-cli-extensions | e7e5f37fdcea3afb5c4aecb61fa72eac72c2128e | [
"MIT"
] | null | null | null | src/fidalgo/azext_fidalgo/vendored_sdks/fidalgo/_fidalgo.py | tbyfield/azure-cli-extensions | e7e5f37fdcea3afb5c4aecb61fa72eac72c2128e | [
"MIT"
] | null | null | null | src/fidalgo/azext_fidalgo/vendored_sdks/fidalgo/_fidalgo.py | tbyfield/azure-cli-extensions | e7e5f37fdcea3afb5c4aecb61fa72eac72c2128e | [
"MIT"
] | 1 | 2022-02-14T21:43:29.000Z | 2022-02-14T21:43:29.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional
from azure.core.credentials import TokenCredential
from ._configuration import FidalgoConfiguration
from .operations import DevCentersOperations
from .operations import ProjectsOperations
from .operations import AttachedNetworksOperations
from .operations import EnvironmentsOperations
from .operations import DeploymentsOperations
from .operations import EnvironmentTypesOperations
from .operations import CatalogItemsOperations
from .operations import GalleriesOperations
from .operations import ImagesOperations
from .operations import ImageVersionsOperations
from .operations import CatalogsOperations
from .operations import MappingsOperations
from .operations import DevBoxDefinitionsOperations
from .operations import Operations
from .operations import OperationStatusesOperations
from .operations import SkusOperations
from .operations import PoolsOperations
from .operations import MachineDefinitionsOperations
from .operations import NetworkSettingsOperations
from . import models
class Fidalgo(object):
"""Project Fidalgo Management API.
:ivar dev_centers: DevCentersOperations operations
:vartype dev_centers: fidalgo.operations.DevCentersOperations
:ivar projects: ProjectsOperations operations
:vartype projects: fidalgo.operations.ProjectsOperations
:ivar attached_networks: AttachedNetworksOperations operations
:vartype attached_networks: fidalgo.operations.AttachedNetworksOperations
:ivar environments: EnvironmentsOperations operations
:vartype environments: fidalgo.operations.EnvironmentsOperations
:ivar deployments: DeploymentsOperations operations
:vartype deployments: fidalgo.operations.DeploymentsOperations
:ivar environment_types: EnvironmentTypesOperations operations
:vartype environment_types: fidalgo.operations.EnvironmentTypesOperations
:ivar catalog_items: CatalogItemsOperations operations
:vartype catalog_items: fidalgo.operations.CatalogItemsOperations
:ivar galleries: GalleriesOperations operations
:vartype galleries: fidalgo.operations.GalleriesOperations
:ivar images: ImagesOperations operations
:vartype images: fidalgo.operations.ImagesOperations
:ivar image_versions: ImageVersionsOperations operations
:vartype image_versions: fidalgo.operations.ImageVersionsOperations
:ivar catalogs: CatalogsOperations operations
:vartype catalogs: fidalgo.operations.CatalogsOperations
:ivar mappings: MappingsOperations operations
:vartype mappings: fidalgo.operations.MappingsOperations
:ivar dev_box_definitions: DevBoxDefinitionsOperations operations
:vartype dev_box_definitions: fidalgo.operations.DevBoxDefinitionsOperations
:ivar operations: Operations operations
:vartype operations: fidalgo.operations.Operations
:ivar operation_statuses: OperationStatusesOperations operations
:vartype operation_statuses: fidalgo.operations.OperationStatusesOperations
:ivar skus: SkusOperations operations
:vartype skus: fidalgo.operations.SkusOperations
:ivar pools: PoolsOperations operations
:vartype pools: fidalgo.operations.PoolsOperations
:ivar machine_definitions: MachineDefinitionsOperations operations
:vartype machine_definitions: fidalgo.operations.MachineDefinitionsOperations
:ivar network_settings: NetworkSettingsOperations operations
:vartype network_settings: fidalgo.operations.NetworkSettingsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Unique identifier of the Azure subscription. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000).
:type subscription_id: str
:param str base_url: Service URL
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
base_url=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
if not base_url:
base_url = 'https://management.azure.com'
self._config = FidalgoConfiguration(credential, subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.dev_centers = DevCentersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.projects = ProjectsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.attached_networks = AttachedNetworksOperations(
self._client, self._config, self._serialize, self._deserialize)
self.environments = EnvironmentsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.deployments = DeploymentsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.environment_types = EnvironmentTypesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.catalog_items = CatalogItemsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.galleries = GalleriesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.images = ImagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.image_versions = ImageVersionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.catalogs = CatalogsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.mappings = MappingsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.dev_box_definitions = DevBoxDefinitionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.operation_statuses = OperationStatusesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.skus = SkusOperations(
self._client, self._config, self._serialize, self._deserialize)
self.pools = PoolsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.machine_definitions = MachineDefinitionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.network_settings = NetworkSettingsOperations(
self._client, self._config, self._serialize, self._deserialize)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> Fidalgo
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| 50.0625 | 149 | 0.745069 | 757 | 8,010 | 7.676354 | 0.235139 | 0.03958 | 0.065393 | 0.065393 | 0.169334 | 0.169334 | 0.169334 | 0.169334 | 0.161074 | 0 | 0 | 0.004959 | 0.169288 | 8,010 | 159 | 150 | 50.377358 | 0.86835 | 0.417853 | 0 | 0.218391 | 0 | 0 | 0.006277 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045977 | false | 0 | 0.298851 | 0 | 0.367816 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
331ba1284c22345fddae01ed951947b89e18b30e | 1,904 | py | Python | visual/listDensityPlot (copy).py | dparnold/data-analysis | 898893398d2fd40efcd538239b3e22625107bf37 | [
"MIT"
] | null | null | null | visual/listDensityPlot (copy).py | dparnold/data-analysis | 898893398d2fd40efcd538239b3e22625107bf37 | [
"MIT"
] | null | null | null | visual/listDensityPlot (copy).py | dparnold/data-analysis | 898893398d2fd40efcd538239b3e22625107bf37 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
# Create some example data to see whether we are doing the job right
xExampleData = [1, 1, 1, 2, 2, 2, 3, 3, 3]
yExampleData = [10, 20, 30, 10, 20, 30, 10, 20, 30]
zExampleData = [0, 3, 0, 3, 4, 5, 1, 1, 6]
def listDensityPlot(x,y,z):
# Convert to numpy arrays
x = np.array(x)
y = np.array(y)
z = np.array(z)
xUnique = np.unique(x) # Returns the sorted unique elements of an array
yUnique = np.unique(y)
imageZ = np.zeros((len(xUnique),len(yUnique))) # Create 2D numpy array for plotting
for i,xitem in enumerate(xUnique):
xMask = x==xitem # Create a array of Bool values where the value xitem is located in x
for j,yitem in enumerate(yUnique):
yMask = y==yitem # Create a array of Bool values where the value yitem is located in y
if np.sum(xMask*yMask)!=0: # Ignore missing values
imageZ[i,j] = z[xMask * yMask] # Set the value of imageZ at i,j to the appropriate value of z for the combination of xitem and yitem
# xMask * yMask gives a Bool array where only one value is true
# Example: exampleMask = np.array([False,False,True,False,False,False], dtype=bool)
# exampleArray = np.array([1, 2, 3, 4, 5, 6])
# exampleArray[exampleMask] gives the value of 3 because there is the only True value
# Do the plotting
plt.imshow(np.rot90(imageZ), aspect='auto', extent=(np.min(xUnique),np.max(xUnique),np.min(yUnique),np.max(yUnique)))
# Add norm=LogNorm(vmin=np.min(z), vmax= np.max(z)) for logarithmic axis
plt.colorbar(label='zLabel [zUnit]')
plt.title('plotTitle')
plt.xlabel('xLabel [xUnit]')
plt.ylabel('yLabel [yUnit]')
plt.grid()
if __name__ == '__main__':
listDensityPlot(xExampleData,yExampleData,zExampleData)
# I took the plt.show() out of the function in order to be able to change parts of the plot
plt.xlabel('This is a new xlabel')
plt.show() | 40.510638 | 137 | 0.702206 | 327 | 1,904 | 4.06422 | 0.400612 | 0.026336 | 0.013544 | 0.012039 | 0.069225 | 0.069225 | 0.055681 | 0.055681 | 0.055681 | 0 | 0 | 0.029917 | 0.174895 | 1,904 | 47 | 138 | 40.510638 | 0.816041 | 0.464811 | 0 | 0 | 0 | 0 | 0.082669 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034483 | false | 0 | 0.103448 | 0 | 0.137931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
331d8e3fb4a50327277b4a5968988cd16d8d2ba6 | 3,334 | py | Python | we_are_venom/utils/accumulation.py | best-doctor/we_are_venom | 93983351807c3dec163ec71e65f7086b5f59dc5c | [
"MIT"
] | null | null | null | we_are_venom/utils/accumulation.py | best-doctor/we_are_venom | 93983351807c3dec163ec71e65f7086b5f59dc5c | [
"MIT"
] | 4 | 2020-06-02T12:00:00.000Z | 2020-06-04T12:36:55.000Z | we_are_venom/utils/accumulation.py | best-doctor/we_are_venom | 93983351807c3dec163ec71e65f7086b5f59dc5c | [
"MIT"
] | 1 | 2021-01-13T09:00:40.000Z | 2021-01-13T09:00:40.000Z | import collections
from typing import List, Mapping, Any, Optional, Iterable, Tuple
from git import Commit
from unidiff import PatchSet
from we_are_venom.common_types import ModuleAccumulation
from we_are_venom.utils.files import fetch_modules_total_lines_map, should_be_skipped
if False: # TYPE_CHECKING
from typing import DefaultDict
def _get_file_module(filename: str, modules: Iterable[str]) -> Optional[str]:
for module in modules:
if filename.startswith(module):
return module
return None
def is_module_accumulated(
touched_lines: Optional[int],
total_lines: Optional[int],
config: Mapping[str, Any],
) -> Optional[bool]:
if not total_lines or total_lines < config['min_lines_in_module']:
return None
return bool(touched_lines and touched_lines >= config['min_touched_lines_for_accumulated_module'])
def get_touched_files_in_commit(
commit: Commit,
config: Mapping[str, Any],
) -> Iterable[Tuple[str, int]]:
touched_lines_per_module: DefaultDict[str, int] = collections.defaultdict(int)
raw_diff = commit.repo.git.diff(commit.tree, commit.parents[0] if commit.parents else None)
for changed_file in PatchSet(raw_diff):
filename = changed_file.path
if should_be_skipped(filename, config['skip_dirs']):
continue
module = _get_file_module(filename, config['modules'])
if not module:
continue
touched_lines_per_module[module] += changed_file.added + changed_file.removed
return touched_lines_per_module.items()
def calclulate_module_accumulation_info(
raw_git_history: List[Commit],
email: str,
config: Mapping[str, Any],
) -> List[ModuleAccumulation]:
touched_lines_per_module: DefaultDict[str, int] = collections.defaultdict(int)
for commit in raw_git_history:
for module, new_touched_lines in get_touched_files_in_commit(commit, config):
touched_lines_per_module[module] += new_touched_lines
modules_total_lines_map = fetch_modules_total_lines_map(
raw_git_history[0].repo.working_dir,
config,
)
accumulated_modules_info = [
ModuleAccumulation(
module_name=m,
touched_lines=l,
total_lines=modules_total_lines_map.get(m),
is_accumulated=is_module_accumulated(
l,
modules_total_lines_map.get(m),
config,
),
)
for (m, l) in touched_lines_per_module.items()
]
empty_modules_info = [
ModuleAccumulation(
module_name=m,
touched_lines=0,
total_lines=modules_total_lines_map.get(m),
is_accumulated=is_module_accumulated(
None,
modules_total_lines_map.get(m),
config,
),
)
for m in config['modules']
]
return accumulated_modules_info + empty_modules_info
def calculate_total_accumulation_percent(module_accumulation_info: List[ModuleAccumulation]) -> int:
accumulated_modules_number = len([m for m in module_accumulation_info if m.is_accumulated])
scored_modules_number = len([m for m in module_accumulation_info if m.is_accumulated is not None])
return int(accumulated_modules_number / scored_modules_number * 100)
| 35.468085 | 102 | 0.694061 | 417 | 3,334 | 5.211031 | 0.215827 | 0.077312 | 0.054763 | 0.064427 | 0.364013 | 0.280718 | 0.254027 | 0.254027 | 0.206167 | 0.174873 | 0 | 0.002332 | 0.228254 | 3,334 | 93 | 103 | 35.849462 | 0.842208 | 0.003899 | 0 | 0.3 | 0 | 0 | 0.024706 | 0.012052 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.0875 | 0 | 0.2375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33207a258d942ba814485373c54b717a46b8215a | 3,670 | py | Python | src/unittest/python/plugins/python/sonarqube_plugin_tests.py | igordertigor/pybuilder | 772cf66a6fea86c59bd76f22388b0ce964b2fc1a | [
"Apache-2.0"
] | null | null | null | src/unittest/python/plugins/python/sonarqube_plugin_tests.py | igordertigor/pybuilder | 772cf66a6fea86c59bd76f22388b0ce964b2fc1a | [
"Apache-2.0"
] | null | null | null | src/unittest/python/plugins/python/sonarqube_plugin_tests.py | igordertigor/pybuilder | 772cf66a6fea86c59bd76f22388b0ce964b2fc1a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from test_utils import Mock, patch
from os.path import normcase as nc
from pybuilder.core import Project
from pybuilder.errors import BuildFailedException
from pybuilder.plugins.python.sonarqube_plugin import (SonarCommandBuilder,
build_sonar_runner,
run_sonar_analysis)
class RunSonarAnalysisTest(TestCase):
def setUp(self):
self.project = Project("any-project")
self.project.version = "0.0.1"
self.project.set_property("sonarqube_project_key", "project_key")
self.project.set_property("sonarqube_project_name", "project_name")
self.project.set_property("dir_source_main_python", "src/main/python")
self.project.set_property("dir_target", "target")
self.project.set_property("dir_reports", "target/reports")
def test_should_build_sonar_runner_for_project(self):
self.assertEqual(
build_sonar_runner(self.project).as_string,
"sonar-runner -Dsonar.projectKey=project_key "
"-Dsonar.projectName=project_name "
"-Dsonar.projectVersion=0.0.1 "
"-Dsonar.sources=src/main/python "
"-Dsonar.python.coverage.reportPath=%s" % nc("target/reports/coverage*.xml"))
@patch("pybuilder.plugins.python.sonarqube_plugin.SonarCommandBuilder.run")
def test_should_break_build_when_sonar_runner_fails(self, run_sonar_command):
run_sonar_command.return_value = Mock(exit_code=1)
self.assertRaises(BuildFailedException, run_sonar_analysis, self.project, Mock())
@patch("pybuilder.plugins.python.sonarqube_plugin.SonarCommandBuilder.run")
def test_should_not_break_build_when_sonar_runner_succeeds(self, run_sonar_command):
run_sonar_command.return_value = Mock(exit_code=0)
run_sonar_analysis(self.project, Mock())
class SonarCommandBuilderTests(TestCase):
def setUp(self):
self.project = Project("any-project")
self.project.set_property("any-property-name", "any-property-value")
self.sonar_builder = SonarCommandBuilder("sonar", self.project)
def test_should_set_sonar_key_to_specific_value(self):
self.sonar_builder.set_sonar_key("anySonarKey").to("anyValue")
self.assertEqual(
self.sonar_builder.as_string,
"sonar -DanySonarKey=anyValue")
def test_should_set_sonar_key_to_two_specific_values(self):
self.sonar_builder.set_sonar_key("anySonarKey").to("anyValue").set_sonar_key("other").to("otherValue")
self.assertEqual(
self.sonar_builder.as_string,
"sonar -DanySonarKey=anyValue -Dother=otherValue")
def test_should_set_sonar_key_to_property_value(self):
self.sonar_builder.set_sonar_key("anySonarKey").to_property_value("any-property-name")
self.assertEqual(self.sonar_builder.as_string, "sonar -DanySonarKey=any-property-value")
| 41.235955 | 110 | 0.708174 | 456 | 3,670 | 5.460526 | 0.311404 | 0.05743 | 0.04498 | 0.053012 | 0.444177 | 0.379116 | 0.323695 | 0.292369 | 0.292369 | 0.26988 | 0 | 0.007104 | 0.19455 | 3,670 | 88 | 111 | 41.704545 | 0.83525 | 0.171935 | 0 | 0.215686 | 0 | 0 | 0.244452 | 0.151375 | 0 | 0 | 0 | 0 | 0.098039 | 1 | 0.156863 | false | 0 | 0.117647 | 0 | 0.313725 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3320fbe982e32947e19be96e10d1fd1e9f4c38bb | 934 | py | Python | ClosedWayGP/skoods/utils.py | skoods-org/getting-started | 50035f5a9163b34fee815fa021135e3588ca0df0 | [
"MIT"
] | 12 | 2018-12-04T12:01:40.000Z | 2020-10-30T20:22:20.000Z | skoods/utils.py | allan-cedric/deep_learning_skoods | aa146322d6aa093e434d7f7546280aec69c30120 | [
"MIT"
] | 2 | 2018-10-25T18:37:55.000Z | 2018-10-25T18:52:28.000Z | skoods/utils.py | allan-cedric/deep_learning_skoods | aa146322d6aa093e434d7f7546280aec69c30120 | [
"MIT"
] | 3 | 2019-01-07T16:23:20.000Z | 2019-04-23T14:01:26.000Z | ##########################################
### Skoods.org -> Self-Racing Car Team ###
##########################################
import math
def distance_of_two_points(x0, y0, x1, y1):
return math.sqrt((x0-x1)**2 + (y0-y1)**2)
def get_distance_of_point_to_line(point, line_point_0, line_point_1):
# Get the distânce of the car position to the trajectory, calculating steering error
distance = 0.0
# Point
x0 = point[0]
y0 = point[1]
# Line Segment
x1 = line_point_0[0]
y1 = line_point_0[1]
x2 = line_point_1[0]
y2 = line_point_1[1]
# equations and checks
denominator = math.sqrt((y2-y1)**2 + (x2-x1)**2)
if denominator<0.0000001: # Avoid equal waypoints
denominator = 0.001
distance = abs( ((y2-y1)*x0 - (x2-x1)*y0 + x2*y1 - y2*x1) / denominator )
else:
distance = abs( ((y2-y1)*x0 - (x2-x1)*y0 + x2*y1 - y2*x1) / denominator )
return distance | 33.357143 | 88 | 0.566381 | 136 | 934 | 3.742647 | 0.352941 | 0.123772 | 0.058939 | 0.058939 | 0.165029 | 0.165029 | 0.165029 | 0.165029 | 0.165029 | 0.165029 | 0 | 0.092769 | 0.215203 | 934 | 28 | 89 | 33.357143 | 0.601637 | 0.192719 | 0 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.055556 | 0.055556 | 0.277778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33259cac1b8a3e058b4c89078c66ff54cbef745c | 2,822 | py | Python | ffcv/reader.py | neuroailab/ffcv | 43d249762d32ce873678a9b8d7983ae653ad6f84 | [
"Apache-2.0"
] | 1,969 | 2022-01-18T18:59:01.000Z | 2022-03-30T12:53:55.000Z | ffcv/reader.py | neuroailab/ffcv | 43d249762d32ce873678a9b8d7983ae653ad6f84 | [
"Apache-2.0"
] | 138 | 2022-01-19T01:42:47.000Z | 2022-03-31T23:00:21.000Z | ffcv/reader.py | neuroailab/ffcv | 43d249762d32ce873678a9b8d7983ae653ad6f84 | [
"Apache-2.0"
] | 107 | 2022-01-18T20:54:38.000Z | 2022-03-31T16:26:51.000Z | import numpy as np
from .utils import decode_null_terminated_string
from .types import (ALLOC_TABLE_TYPE, HeaderType, CURRENT_VERSION,
FieldDescType, get_handlers, get_metadata_type)
class Reader:
def __init__(self, fname, custom_handlers={}):
self._fname = fname
self._custom_handlers = custom_handlers
self.read_header()
self.read_field_descriptors()
self.read_metadata()
self.read_allocation_table()
@property
def file_name(self):
return self._fname
def read_header(self):
header = np.fromfile(self._fname, dtype=HeaderType, count=1)[0]
header.setflags(write=False)
version = header['version']
if version != CURRENT_VERSION:
msg = f"file format mismatch: code={CURRENT_VERSION},file={version}"
raise AssertionError(msg)
self.num_samples = header['num_samples']
self.page_size = header['page_size']
self.num_fields = header['num_fields']
self.header = header
def read_field_descriptors(self):
offset = HeaderType.itemsize
field_descriptors = np.fromfile(self._fname, dtype=FieldDescType,
count=self.num_fields, offset=offset)
field_descriptors.setflags(write=False)
handlers = get_handlers(field_descriptors)
self.field_descriptors = field_descriptors
self.field_names = list(map(decode_null_terminated_string,
self.field_descriptors['name']))
self.handlers = dict(zip(self.field_names, handlers))
for field_name, field_desc in zip(self.field_names, self.field_descriptors):
if field_name in self._custom_handlers:
CustomHandler = self._custom_handlers[field_name]
self.handlers[field_name] = CustomHandler.from_binary(field_desc['arguments'])
for field_name, handler in self.handlers.items():
if handler is None:
raise ValueError(f"Must specify a custom_field entry " \
f"for custom field {field_name}")
self.metadata_type = get_metadata_type(list(self.handlers.values()))
def read_metadata(self):
offset = HeaderType.itemsize + self.field_descriptors.nbytes
self.metadata = np.fromfile(self._fname, dtype=self.metadata_type,
count=self.num_samples, offset=offset)
self.metadata.setflags(write=False)
def read_allocation_table(self):
offset = self.header['alloc_table_ptr']
alloc_table = np.fromfile(self._fname, dtype=ALLOC_TABLE_TYPE,
offset=offset)
alloc_table.setflags(write=False)
self.alloc_table = alloc_table
| 38.657534 | 94 | 0.64068 | 319 | 2,822 | 5.394984 | 0.260188 | 0.092969 | 0.046485 | 0.04416 | 0.055782 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000974 | 0.272502 | 2,822 | 72 | 95 | 39.194444 | 0.837311 | 0 | 0 | 0 | 0 | 0 | 0.066312 | 0.013121 | 0 | 0 | 0 | 0 | 0.017857 | 1 | 0.107143 | false | 0 | 0.053571 | 0.017857 | 0.196429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3329a32504011e53aff2aac9c4df2f006807a5a9 | 7,909 | py | Python | Lib/site-packages/fidget/widgets/confirmer.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | 1 | 2021-03-31T07:33:38.000Z | 2021-03-31T07:33:38.000Z | src/fidget/widgets/confirmer.py | talos-gis/qTalos | 276b5581b6b20e8a0c4f7c85998a13030b2160ba | [
"MIT"
] | 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | Lib/site-packages/fidget/widgets/confirmer.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | null | null | null | from __future__ import annotations
from typing import TypeVar, Generic, Union, NoReturn, Callable
from fidget.backend.QtWidgets import QHBoxLayout, QApplication, QPushButton, QVBoxLayout, QBoxLayout, QMessageBox, \
QWidget
from fidget.backend.QtCore import Qt, QEventLoop
from fidget.core import Fidget, FidgetTemplate, ParseError, TemplateLike, inner_plaintext_printer, PlaintextPrintError
from fidget.core.__util__ import first_valid
from fidget.widgets.idiomatic_inner import SingleFidgetWrapper
from fidget.widgets.__util__ import only_valid
T = TypeVar('T')
C = TypeVar('C')
class FidgetConfirmer(Generic[T, C], SingleFidgetWrapper[T, Union[T, C]]):
"""
A Fidget that wraps another Fidget. Adding an Ok and (potentially) Cancel buttons, that trigger this
Fidget's validation. Useful for dialogs or for slow validations
"""
NO_CANCEL: NoReturn = object()
def __init__(self, inner_template: TemplateLike[T] = None, layout_cls=None,
cancel_value: C = NO_CANCEL, close_on_confirm=None, ok_text=None, cancel_text=None,
window_modality=None, **kwargs):
"""
:param inner_template: an inner template to wrap
:param layout_cls: the class of the layout
:param cancel_value: the value to parse upon the cancel button being clicked. If this argument is provided,
a cancel button is created.
:param close_on_confirm: whether to close this widget if Ok or Cancel is clicked and the value is valid.
:param ok_text: text for the ok button
:param cancel_text: text for the cancel button
:param window_modality: the modality of the widget, convenience parameter
:param kwargs: forwarded to Fidget
"""
inner_template = only_valid(inner_template=inner_template, INNER_TEMPLATE=self.INNER_TEMPLATE, _self=self).template_of()
super().__init__(inner_template.title, **kwargs)
self.inner_template = inner_template
self.inner: Fidget[T] = None
self.ok_button: QPushButton = None
self.cancel_button: QPushButton = None
self.cancel_value = cancel_value
self.make_cancel = cancel_value is not self.NO_CANCEL
self.cancel_flag = False
self.close_on_confirm = first_valid(close_on_confirm=close_on_confirm, CLOSE_ON_CONFIRM=self.CLOSE_ON_CONFIRM, _self=self)
self.init_ui(layout_cls=layout_cls, ok_text=ok_text, cancel_text=cancel_text, modality=window_modality)
self._inner_changed()
INNER_TEMPLATE: FidgetTemplate[T] = None
LAYOUT_CLS = QVBoxLayout
MAKE_TITLE = MAKE_PLAINTEXT = MAKE_INDICATOR = False
CLOSE_ON_CONFIRM = False
WINDOW_MODALITY = None
OK_TEXT = 'OK'
CANCEL_TEXT = 'Cancel'
def init_ui(self, layout_cls=None, ok_text=None, cancel_text=None, modality=None):
super().init_ui()
layout_cls = first_valid(layout_cls=layout_cls, LAYOUT_CLS=self.LAYOUT_CLS, _self=self)
modality = modality or self.WINDOW_MODALITY
layout: QBoxLayout = layout_cls(self)
self.inner = self.inner_template()
with self.setup_provided(layout):
self.inner.on_change.connect(self._inner_changed)
layout.addWidget(self.inner)
btn_layout = QHBoxLayout()
if self.make_cancel:
self.cancel_button = QPushButton(first_valid(cancel_text=cancel_text, CANCEL_TEXT=self.CANCEL_TEXT, _self=self))
self.cancel_button.clicked.connect(self._cancel_btn_clicked)
btn_layout.addWidget(self.cancel_button)
self.ok_button = QPushButton(first_valid(ok_text=ok_text, CANCEL_TEXT=self.OK_TEXT, _self=self))
self.ok_button.clicked.connect(self._ok_btn_clicked)
btn_layout.addWidget(self.ok_button)
layout.addLayout(btn_layout)
self.setFocusProxy(self.inner)
if modality:
self.setWindowModality(modality)
#if not self.make_cancel:
#self.setWindowFlags(Qt.WindowMinimizeButtonHint)
self.add_plaintext_delegates(self.inner)
return layout
def parse(self):
if self.cancel_flag:
if self.make_cancel:
return self.cancel_value
raise ParseError('invalid cancel value')
inner_value = self.inner.value()
if not inner_value.is_ok():
raise ParseError(offender=self.inner) from inner_value.exception
return inner_value.value
@staticmethod
def to_widget(w: Union[QWidget, Callable[[], QWidget], None]):
if not w or isinstance(w, QWidget):
return w
return w()
def _inner_changed(self):
value = self.inner.value()
self.ok_button.setEnabled(value.is_ok())
def _ok_btn_clicked(self, *a):
self.cancel_flag = False
self.change_value()
if self.close_on_confirm:
value = self.value()
if value.is_ok():
self.close()
else:
QMessageBox.critical(self, 'error parsing value', value.details)
def _cancel_btn_clicked(self, *a):
self.cancel_flag = True
self.change_value()
if self.close_on_confirm:
value = self.value()
if value.is_ok():
self.close()
else:
QMessageBox.critical(self, 'error parsing value', value.details)
def keyPressEvent(self, event):
if event.key() == Qt.Key_Return \
or (event.modifiers() == Qt.KeypadModifier and event.key() == Qt.Key_Enter):
self.ok_button.click()
elif self.cancel_button and (not event.modifiers() and event.key() == Qt.Key_Escape):
self.cancel_button.click()
else:
super().keyPressEvent(event)
def closeEvent(self, event):
if event.spontaneous():
self.cancel_flag = True
self.change_value()
super().closeEvent(event)
@inner_plaintext_printer
def cancel_printer(self, v):
if v is self.cancel_value:
return str(v)
raise PlaintextPrintError('cannot print non-cancel value')
def fill(self, v):
self.inner.fill(v)
class FidgetQuestion(Generic[T, C], FidgetConfirmer[T, C]):
"""
A specialization of FidgetConfirmer designed for dialogs
"""
CLOSE_ON_CONFIRM = True
FLAGS = Qt.Dialog
WINDOW_MODALITY = Qt.WindowModal
def exec(self):
"""
show the widget and block until its value is set
"""
self.show()
event_loop = QEventLoop()
self.on_change.connect(event_loop.quit)
event_loop.exec_()
return self.value()
exec_ = exec
def question(*args, **kwargs) -> \
Callable[[TemplateLike[T]], FidgetTemplate[T]]:
"""
decorator to wrap a Fidget template in a FidgetQuestion template
:param args: forwarded to ConfirmFidget
:param kwargs: forwarded to FidgetConfirmer
:return: a FidgetConfirmer template
"""
def ret(c: TemplateLike[T]) -> FidgetTemplate[T]:
if isinstance(c, type) and issubclass(c, TemplateLike):
template_of = c.template()
elif isinstance(c, TemplateLike):
template_of = c.template_of()
else:
raise TypeError(f'cannot wrap {c} in ask')
return FidgetQuestion.template(template_of, *args, **kwargs)
return ret
if __name__ == '__main__':
from fidget.widgets import *
app = QApplication([])
w = FidgetConverter(
FidgetConfirmer(
FidgetInt.template('source ovr', make_title=True, make_indicator=True),
cancel_value=None
),
converter_func=lambda x: (x * x if isinstance(x, int) else None)
)
w.on_change.connect(lambda: print(w.value()))
w.show()
res = app.exec_()
print(w.value())
exit(res)
| 34.537118 | 130 | 0.655456 | 970 | 7,909 | 5.124742 | 0.198969 | 0.028968 | 0.03098 | 0.014484 | 0.170388 | 0.139006 | 0.081875 | 0.051499 | 0.051499 | 0.051499 | 0 | 0 | 0.253129 | 7,909 | 228 | 131 | 34.688596 | 0.841544 | 0.133645 | 0 | 0.153333 | 0 | 0 | 0.020512 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.093333 | false | 0 | 0.06 | 0 | 0.32 | 0.04 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33326cedde58b83256a76215fd97c6fcf9e1c6e9 | 1,360 | py | Python | .devablog/lib/python3.5/site-packages/guess_language/__main__.py | jhenriquetdg/devablog | 5a0e8246c0be01117aee0448bf900e5b01e1fb5d | [
"MIT"
] | 3 | 2017-11-11T23:51:24.000Z | 2020-10-23T11:27:42.000Z | .devablog/lib/python3.5/site-packages/guess_language/__main__.py | jhenriquetdg/devablog | 5a0e8246c0be01117aee0448bf900e5b01e1fb5d | [
"MIT"
] | 6 | 2020-06-05T18:39:11.000Z | 2022-01-13T00:49:22.000Z | venv/Lib/site-packages/guess_language/__main__.py | Sudhish2101/Payroll-System | 30d5113748b4917c8172ed46737daa5dc3594d2a | [
"MIT"
] | 2 | 2018-02-15T10:10:12.000Z | 2018-06-22T19:14:06.000Z | """Guess the natural language of a text
"""
import argparse
import locale
import os
import sys
import guess_language.console_mode #@UnusedImport
def parse_args():
parser = argparse.ArgumentParser(
description=__doc__.strip(),
prog="{} -m {}".format(os.path.basename(sys.executable),
"guess_language")
)
parser.add_argument("file",
help="plain text file or “-” for stdin")
parser.add_argument("-c", "--encoding",
help="input encoding")
parser.add_argument("--disable-enchant", dest="use_enchant",
action="store_false",
help="disable enchant")
return parser.parse_args()
def main():
args = parse_args()
if args.file == "-":
file = sys.stdin.fileno()
encoding = args.encoding or (
sys.stdin.encoding if sys.stdin.isatty()
else locale.getpreferredencoding()
)
else:
file = args.file
encoding = args.encoding or "utf-8"
with open(file, encoding=encoding) as f:
text = "".join(f.readlines())
if not args.use_enchant:
guess_language.use_enchant(False)
tag = guess_language.guess_language(text)
print(tag)
return 0 if tag else 1
if __name__ == "__main__":
sys.exit(main())
| 25.185185 | 64 | 0.583088 | 154 | 1,360 | 4.967532 | 0.448052 | 0.084967 | 0.066667 | 0.057516 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003135 | 0.296324 | 1,360 | 53 | 65 | 25.660377 | 0.796238 | 0.036765 | 0 | 0 | 0 | 0 | 0.116654 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051282 | false | 0 | 0.128205 | 0 | 0.230769 | 0.025641 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3336a1aeca7c88b6ebc94ed5e338926b73f09a00 | 13,446 | py | Python | xiao_asgi/routing.py | jonathanstaniforth/xiao-asgi | 9a1c2b59d034a242574736a2a8130804bb799049 | [
"MIT"
] | null | null | null | xiao_asgi/routing.py | jonathanstaniforth/xiao-asgi | 9a1c2b59d034a242574736a2a8130804bb799049 | [
"MIT"
] | 17 | 2021-11-10T12:08:29.000Z | 2021-12-04T18:34:04.000Z | xiao_asgi/routing.py | jonathanstaniforth/xiao-asgi | 9a1c2b59d034a242574736a2a8130804bb799049 | [
"MIT"
] | null | null | null | """Routes and endpoints.
The available classes in this module can be used to construct routes for a
particular protocol and their endpoints.
Classes:
Route: abstract base class for building route classes for a protocol.
HttpRoute: a HTTP route and endpoints.
WebSocketRoute: a WebSocket route.
"""
import re
from abc import ABC
from collections.abc import Callable, Coroutine
from xiao_asgi.connections import (
Connection,
HttpConnection,
ProtocolMismatch,
WebSocketConnection,
)
from xiao_asgi.requests import Request
from xiao_asgi.responses import PlainTextResponse
route_regex = re.compile("{([a-zA-Z_][a-zA-Z0-9_]*)?}")
class Route(ABC):
"""A base class for routes.
Can be extended to create routes that involve a particular protocol.
Attributes:
path (str): the path for this route.
path_regex (re.Pattern): the regex object version of path.
protocol (str): the protocol for this route.
"""
protocol: str
def __init__(self, path: str) -> None:
"""Establish the path for this route.
Args:
path (str): the path for this route.
Example:
Creating a route::
>>> route = Route("/about")
"""
self.path = path
self.path_regex: re.Pattern = self.compile_path(path)
@staticmethod
def compile_path(path: str) -> re.Pattern:
"""Create a regex object for a path.
Args:
path (str): the path to create a regex object from.
Returns:
re.Pattern: the created regex object.
Example:
Creating a regex object::
>>> compiled_path = Route.compile_path("/post/{id}")
"""
index = 0
path_regex = "^"
for match in route_regex.finditer(path):
param_name = match.groups()[0]
path_regex += re.escape(path[index : match.start()])
path_regex += f"(?P<{param_name}>[^/]+)"
index = match.end()
path_regex += re.escape(path[index:].split(":")[0]) + "$"
return re.compile(path_regex)
async def get_endpoint(
self, endpoint: str
) -> Callable[[type[Connection], Request], Coroutine]:
"""Return the coroutine function for an endpoint.
The coroutine function must exist on this instance and its name must
match ``endpoint``.
Args:
endpoint (str): the required endpoint.
Returns:
Callable[[type[Connection], Request], Coroutine]: the coroutine
function associated with ``endpoint``.
Example:
Retrieving the coroutine function for an endpoint::
>>> route = Route("/")
>>> endpoint = route.get_endpoint("get")
"""
return getattr(self, endpoint)
async def __call__(self, connection: type[Connection]) -> None:
"""Pass the connection to the appropriate endpoint.
This method should be extended to implement the appropriate approach
to finding and calling the endpoint. When extended, the parent method
should be called first. See ``HttpRoute`` and ``WebSocketRoute`` for
examples on how to extend this method.
Args:
connection (type[Connection]): a ``Connection`` instance with
the connection information.
Raises:
ProtocolMismatch: if the connection's protocol does not match this
route's protocol.
"""
if connection.protocol != self.protocol:
raise ProtocolMismatch()
class HttpRoute(Route):
"""A HTTP route.
Attributes:
protocol (str, optional): the protocol for this route. Defaults to
http.
Example:
Creating a HTTP route::
>>> http_route = HttpRoute("/about")
"""
protocol: str = "http"
async def get(self, connection: HttpConnection, request: Request) -> None:
"""Endpoint for a GET request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def head(self, connection: HttpConnection, request: Request) -> None:
"""Endpoint for a HEAD request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def post(self, connection: HttpConnection, request: Request) -> None:
"""Endpoint for a POST request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def put(self, connection: HttpConnection, request: Request) -> None:
"""Endpoint for a PUT request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def delete(
self, connection: HttpConnection, request: Request
) -> None:
"""Endpoint for a DELETE request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def connect(
self, connection: HttpConnection, request: Request
) -> None:
"""Endpoint for a CONNECT request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def options(
self, connection: HttpConnection, request: Request
) -> None:
"""Endpoint for a OPTIONS request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def trace(
self, connection: HttpConnection, request: Request
) -> None:
"""Endpoint for a TRACE request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def patch(
self, connection: HttpConnection, request: Request
) -> None:
"""Endpoint for a PATCH request method.
Override to implement this endpoint.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
request (Request): the received request.
"""
await self.send_method_not_allowed(connection)
async def send_internal_server_error(
self, connection: HttpConnection
) -> None:
"""Send a 500 HTTP response.
Override to change the response that is sent.
Args:
connection (HttpConnection): the connection to send the response
to.
"""
await connection.send_response(
PlainTextResponse(status=500, body=b"Internal Server Error")
)
async def send_not_implemented(self, connection: HttpConnection) -> None:
"""Send a 501 HTTP response.
Override to change the response that is sent.
Args:
connection (HttpConnection): the connection to send the response
to.
"""
await connection.send_response(
PlainTextResponse(status=501, body=b"Not Implemented")
)
async def send_method_not_allowed(
self, connection: HttpConnection
) -> None:
"""Send a 405 HTTP response.
Override to change the response that is sent.
Args:
connection (HttpConnection): the connection to send the response
to.
"""
await connection.send_response(
PlainTextResponse(status=405, body=b"Method Not Allowed")
)
async def __call__(self, connection: HttpConnection) -> None:
"""Pass the connection to the appropriate endpoint.
Sends a 500 HTTP response if an exception is raised when receiving or
processesing the request. Sends a 501 HTTP response if the endpoint is
not found.
Args:
connection (HttpConnection): a ``Connection`` instance with
the connection information.
Raises:
Exception: re-raises any exception that is raised when receiving or
processesing the request.
Example:
Routing a HTTP connection::
>>> connection = HttpConnection(scope, receive, send)
>>> route = HttpRoute("/")
>>> route(connection)
"""
await super().__call__(connection)
try:
endpoint = await self.get_endpoint(connection.method.lower())
except AttributeError:
await self.send_not_implemented(connection)
raise
try:
request = await connection.receive_request()
await endpoint(connection, request)
except Exception:
await self.send_internal_server_error(connection)
raise
class WebSocketRoute(Route):
"""A WebSocket route.
Attributes:
protocol (str, optional): the protocol for this route. Defaults to
websocket.
Example:
Creating a WebSocket route:
>>> websocket_route = WebSocketRoute("/chat")
"""
protocol: str = "websocket"
async def connect(
self, connection: WebSocketConnection, request: Request
) -> None:
"""Endpoint for a connect request type.
Override to implement this endpoint. Sends a WebSocket accept response.
Args:
connection (WebSocketConnection): a ``Connection`` instance
with the connection information.
request (Request): the received request.
"""
await connection.accept_connection()
async def receive(
self, connection: WebSocketConnection, request: Request
) -> None:
"""Endpoint for a receive request type.
Override to implement this endpoint.
Args:
connection (WebSocketConnection): a ``Connection`` instance
with the connection information.
request (Request): the received request.
"""
async def disconnect(
self, connection: WebSocketConnection, request: Request
) -> None:
"""Endpoint for a disconnect request type.
Override to implement this endpoint.
Args:
connection (WebSocketConnection): a ``Connection`` instance
with the connection information.
request (Request): the received request.
"""
async def send_internal_error(
self, connection: WebSocketConnection
) -> None:
"""Send a close response with a code of 1011 (Internal Error).
Override to change how internal errors are handled.
Args:
connection (WebSocketConnection): the connection to send the
reponse.
"""
await connection.close_connection(code=1011)
async def __call__(self, connection: WebSocketConnection) -> None:
"""Pass the connection to the appropriate endpoint.
Sends a 1011 close response if an exception is raised when receiving or
processesing the request.
Args:
connection (WebSocketConnection): a ``Connection`` instance
with the connection information.
Raises:
Exception: re-raises any exception that is raised when receiving or
processesing the request.
Example:
Routing a WebSocket connection::
>>> connection = WebSocketConnection(scope, receive, send)
>>> route = WebSocketRoute("/")
>>> route(connection)
"""
await super().__call__(connection)
try:
request = await connection.receive_request()
endpoint = await self.get_endpoint(request.type)
await endpoint(connection, request)
except Exception:
await self.send_internal_error(connection)
raise
| 30.628702 | 79 | 0.608731 | 1,367 | 13,446 | 5.91368 | 0.136064 | 0.083127 | 0.035255 | 0.042677 | 0.640277 | 0.60094 | 0.563335 | 0.542182 | 0.527462 | 0.43951 | 0 | 0.004428 | 0.311319 | 13,446 | 438 | 80 | 30.69863 | 0.868575 | 0.10293 | 0 | 0.411765 | 0 | 0 | 0.024082 | 0.010034 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016807 | false | 0 | 0.05042 | 0 | 0.134454 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3337208933a4ad28110c7909d86f80e4880b9b57 | 5,757 | py | Python | oommfpy/tools/plot_omf.py | davidcortesortuno/oommfpy | daa56f96fd0d301d42a4bc260f7142f0a8e62f8d | [
"BSD-2-Clause"
] | 9 | 2019-05-25T07:42:14.000Z | 2022-02-22T21:08:47.000Z | oommfpy/tools/plot_omf.py | davidcortesortuno/oommfpy | daa56f96fd0d301d42a4bc260f7142f0a8e62f8d | [
"BSD-2-Clause"
] | 9 | 2019-05-25T07:41:57.000Z | 2021-11-27T14:12:28.000Z | oommfpy/tools/plot_omf.py | davidcortesortuno/oommfpy | daa56f96fd0d301d42a4bc260f7142f0a8e62f8d | [
"BSD-2-Clause"
] | 7 | 2019-07-21T05:42:39.000Z | 2022-03-28T13:57:03.000Z | from .. import MagnetisationData
from . import plot_tools
import numpy as np
import click
import scipy.spatial as ss
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider
# import matplotlib
# from matplotlib import rcParams
from mpl_toolkits.axes_grid1 import make_axes_locatable
# -----------------------------------------------------------------------------
def plot_omf(omf_file, ax=None,
savefig=None, cmap='hls', cbar=True, dpi=150,
cbar_offsets=[-0.15, -0.2], cbar_size=0.2):
"""
A simple function to plot a slice of the system in the xy-plane with a
specific colormap (HLS by default)
omf_file :: Path to omf file
ax :: Matplotlib axes object where to plot if specified
savefig :: Filename of a figure to be saved with a format allowed
by matplotlib
cmap :; hls or any colormap accepted by matplotlib
cbar :: show a wheel colorbar for the hls cmap or a linear
colorbar if other cmap is used
dpi :: figure dpi
cbar_offsets :: offset x-y positions for the colorbar (hls only)
cbar_size :: size relative to plot axes fraction (only for hls color)
"""
data = MagnetisationData(omf_file)
data.generate_field()
data.generate_coordinates()
if not ax:
f = plt.figure()
ax = f.add_subplot(111)
if cmap == 'hls':
spin_data = plot_tools.generate_colours(data.field[:, :])
spin_data[data.field_norm < 1e-10] = [1., 1., 1.]
spin_data = spin_data.reshape(-1, data.nx, 3)
p = ax.imshow(spin_data, origin='lower', interpolation='None',
vmin=0, vmax=2 * np.pi,
extent=[data.xmin * 1e9, data.xmax * 1e9,
data.ymin * 1e9, data.ymax * 1e9]
)
if cbar:
box = ax.get_position()
axColor = plt.axes([box.x1 + cbar_offsets[0],
box.y1 + cbar_offsets[1],
cbar_size, cbar_size], projection='polar')
daz = 361 # Number of discretised values i.e. N of colours
azimuths = np.linspace(0, 360, daz)
dzn = 30 # Zeniths or "rings" from the centre towards the bndry
zeniths = np.arange(0, dzn, 1)
rgb = np.ones((dzn * daz, 3))
# Set the HLS hue value from 0 to 2 PI from the azimuth values
# We tile the circle "dz" times:
# [0 ... 2PI] -> [0...2PI 0 .. 2PI ...]
rgb[:, 0] = np.tile(np.radians(azimuths), dzn)
# For every circle (daz values) we increase the Light value
# from 1 to 0, i.e. from white to black, dz times:
# |--dzn--|
# [1 .. 0] -> [1 1 ... 1 0.8 0.8 ... 0.8 0.6 ... 0 0 ... 0]
greys = np.zeros(dzn)
# Last 2 rings are left completely black:
greys[:-1] = np.linspace(1, 0, dzn - 1)
rgb[:, 1] = np.repeat(greys, daz)
# Now we convert every row in HLS to RGB values
rgb = np.apply_along_axis(plot_tools.convert_to_RGB, 1, rgb)
# And plot in the polar axes:
axColor.pcolormesh(np.radians(azimuths), zeniths,
# only necessary as required n of args:
np.zeros((dzn, daz)),
# cmap=plt.cm.hsv
color=rgb,
shading='auto'
)
axColor.set_yticks([])
# axColor.set_xticks([0, np.pi * 0.5, np.pi, 1.5 * np.pi])
axColor.set_thetagrids([0, 90, 180, 270])
axColor.tick_params(axis='x', pad=0)
axColor.set_xticklabels([r'$0$', r'$\pi/2$',
r'$\pi$', r'$3\pi/2$'],
# fontsize=18
)
axColor.text(0.5, 0.5, r'$\vec{m}$',
horizontalalignment='center',
verticalalignment='center',
transform=axColor.transAxes,
# fontsize=20
)
else:
# Spin data in a grid
spin_z = data.field[:, 2].reshape(-1, data.nx)
ax.imshow(spin_z, origin='lower', cmap=cmap, interpolation='None',
vmin=-1, vmax=1,
extent=[data.xmin * 1e9, data.xmax * 1e9,
data.ymin * 1e9, data.ymax * 1e9]
)
if cbar:
plt.colorbar()
ax.set_ylabel(r'y (nm)')
ax.set_xlabel(r'x (nm)')
if savefig:
plt.savefig(savefig, bbox_inches='tight', dpi=dpi)
plt.show()
def plot_charge_density(omf_file, ax=None, savefig=None, dpi=150,
plane='xy', index=0):
"""
Testing
Plot the sk number density
"""
data = MagnetisationData(omf_file)
data.generate_field()
data.generate_coordinates()
data.compute_sk_number(plane=plane, index=index)
if not ax:
f, ax = plt.subplots()
charge = data.sk_number
vmax = np.max(np.abs(charge))
charge.reshape(-1,)[data.field_norm < 1e-10] = np.nan
ax.imshow(charge, origin='lower', cmap='RdYlBu', interpolation='None',
vmin=-vmax, vmax=vmax,
extent=[data.xmin * 1e9, data.xmax * 1e9,
data.ymin * 1e9, data.ymax * 1e9]
)
ax.set_ylabel(r'y (nm)')
ax.set_xlabel(r'x (nm)')
plt.colorbar()
if savefig:
plt.savefig(savefig, bbox_inches='tight', dpi=dpi)
| 37.141935 | 79 | 0.505645 | 717 | 5,757 | 3.980474 | 0.322176 | 0.022074 | 0.00946 | 0.01787 | 0.189559 | 0.175193 | 0.158374 | 0.158374 | 0.158374 | 0.158374 | 0 | 0.0377 | 0.368768 | 5,757 | 154 | 80 | 37.383117 | 0.747661 | 0.26559 | 0 | 0.282609 | 0 | 0 | 0.031242 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021739 | false | 0 | 0.086957 | 0 | 0.108696 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
333900ab48c1eeb5f993fcde786861b8ac255a00 | 38,673 | py | Python | EtherCAT/motion_master/python/lib/somanet_cia402_state_control.py | synapticon/application_examples | 3fdda85918f4a276cac107d2a4928025c3973be0 | [
"MIT"
] | null | null | null | EtherCAT/motion_master/python/lib/somanet_cia402_state_control.py | synapticon/application_examples | 3fdda85918f4a276cac107d2a4928025c3973be0 | [
"MIT"
] | null | null | null | EtherCAT/motion_master/python/lib/somanet_cia402_state_control.py | synapticon/application_examples | 3fdda85918f4a276cac107d2a4928025c3973be0 | [
"MIT"
] | null | null | null | from motion_master_wrapper import MotionMasterWrapper
from typing import Any, List, Tuple, Dict
from enum import Enum, unique
import time
import logging
logger = logging.getLogger(__name__)
class ExceptionStateControl(Exception):
pass
class ExceptionTimeout(ExceptionStateControl):
pass
class ExceptionOpMode(ExceptionStateControl):
pass
class Controlword:
"""
Helps manage the bits in the CiA402 Control word.
Bit table
---------
Bit Description
=== ===========
0 Switch on
1 Enable voltage
2 Quick stop (INVERTED LOGIC)
3 Enable operation
4 -opmode specific-
5 -opmode specific-
6 -opmode specific-
7 Fault reset
8 Halt
9 -reserved-
10 -reserved-
11 -manufacture-unused-
12 -manufacture-unused-
13 -manufacture-unused-
14 -manufacture-unused-
15 -manufacture-unused-
Bit Profile position Homing
=== ================ ======
4 New set point Homing operation start
5 Change set now -
6 Relative mode -
See Also
--------
IEC 61800-7-201 Generic interface and use of profiles for power drive systems
"""
#
# Define the bits in the Controlword.
#
SWITCH_ON = 0x0001
ENABLE_VOLTAGE = 0x0002
QUICK_STOP = 0x0004
ENABLE_OPERATION = 0x0008
FAULT_RESET = 0x0080
HALT = 0x0100
# Operation mode specific
PP_NEW_SETPOINT = 0x0010
PP_CHANGE_SET_POINT_NOW = 0x0020
PP_RELATIVE_MODE = 0x0040
PP_CHANGE_ON_SET_POINT = 0x0200
HOMING_OPERATION_START = 0x0010
def __init__(self, value: int = 0x00):
self.value = int(value)
def __int__(self):
return self.value
def update(self, cw: int):
"""
Update internal controlword value
Parameters
----------
cw : int
New controlword
"""
self.value = cw
def set(self, bits_in_word: int):
"""Set the bits in the argument in the Controlword."""
self.value |= bits_in_word
return self.value
def clear(self, bits_in_word: int):
"""Clear the bits in the argument from the Controlword."""
self.value &= ~bits_in_word
return self.value
def _bit_manipulator(self, bit: int, set_bit: bool = True):
"""
Changes bit depending on set_bit.
Parameters
----------
bit : int
Bit position
set_bit : bool
If true, set bit, otherwise clear it
Returns
-------
int
Return new Controlword
"""
return self.set(bit) if set_bit else self.clear(bit)
def shutdown(self):
"""Set the command to shutdown. (Enter Switch on disabled)
Transitions: 2, 6, 8
Returns
-------
value : int
The value of the control word
"""
# Clear Fault reset (7), Switch on (0)
self.value &= (~self.FAULT_RESET & ~self.SWITCH_ON) & 0xffff
# Set Quick-stop (2), Enable voltage (1)
self.value |= self.QUICK_STOP | self.ENABLE_VOLTAGE
return self.value
def switch_on(self):
"""Set the command to switch the drive on. (Enter Switched on)
Transitions: 3
Returns
-------
value : int
The value of the control word
"""
# Clear Fault reset (7), Enable operation (3)
self.value &= (~self.FAULT_RESET & ~self.ENABLE_OPERATION) & 0xffff
# Set Quick-stop (2), Enable voltage (1), Switch on (0)
self.value |= self.QUICK_STOP | self.ENABLE_VOLTAGE | self.SWITCH_ON
return self.value
def disable_voltage(self):
"""Set the command to disable voltage. (Enter Switch on disabled)
Transitions: 7, 9, 10, 12
Returns
-------
value : int
The value of the control word
"""
# Clear Fault reset (7), Enable voltage (1)
self.value &= (~self.FAULT_RESET & ~self.ENABLE_VOLTAGE) & 0xffff
return self.value
def quick_stop(self):
"""Set the command to issue a quick-stop. (Enter Quick stop)
Transition: 7, 10, 11
Returns
-------
value : int
The value of the control word
"""
# Clear Fault reset (7), Quick stop (2)
self.value &= (~self.FAULT_RESET & ~self.QUICK_STOP) & 0xffff
# Set Enable voltage (1)
self.value |= self.ENABLE_VOLTAGE
return self.value
def disable_operation(self):
"""Set the command to disable operation. (Enter Ready to switch on)
Transitions: 5
Returns
-------
value : int
The value of the control word
"""
# Clear Fault reset (7), Enable operation (3)
self.value &= (~self.FAULT_RESET & ~self.ENABLE_OPERATION) & 0xffff
# Set Quick-stop (2), Enable voltage (1), Switch on (0)
self.value |= self.QUICK_STOP | self.ENABLE_VOLTAGE | self.SWITCH_ON
return self.value
def enable_operation(self):
"""Set the command to enable operation (Enter Operation enabled)
This can be used to automatically transition through Switched on.
Transition: 4, 16
Returns
-------
value : int
The value of the control word
"""
# Clear Fault reset (7)
self.value &= ~self.FAULT_RESET & 0xffff
# Set Enable operation (3), Quick-stop (2), Enable voltage (1), Switch on (0)
self.value |= self.QUICK_STOP | self.ENABLE_OPERATION | self.SWITCH_ON | self.ENABLE_VOLTAGE
return self.value
def fault_reset(self):
"""Set the command to reset the fault. (Enter Switch on disabled)
This will reset the fault only after the bit transitions from 0 to 1. For this to
work properly when a fault is triggered right after another fault was cleared, you can
send the `shutdown()` command and _then_ send this `fault_reset()`. Therefore, unless
speed is essential, clearing a fault should be always preceded by a `shutdown()`.
Transition: 15
Returns
-------
value : int
The value of the control word
"""
# Set Fault reset (7)
self.value = self.FAULT_RESET
return self.value
def halt(self, set_bit: bool = True) -> int:
"""
Set the Halt bit (bit 8)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
Returns
-------
int
The new controlword
"""
return self._bit_manipulator(self.HALT, set_bit)
def relative_position_mode(self, set_bit: bool = True) -> int:
"""
Changes bit 6 in controlword (Profile Position Mode).
If bit is 1, target position shall be a relative value.
If bit is 0, target position shall be an absolute value.
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
Returns
-------
int
The new controlword
"""
return self._bit_manipulator(self.PP_RELATIVE_MODE, set_bit)
def new_setpoint(self, set_bit: bool = True) -> int:
"""
Changes bit 4 in controlword. (Profile Position Mode)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
Returns
-------
int
The new controlword
"""
return self._bit_manipulator(self.PP_NEW_SETPOINT, set_bit)
def change_set_point_now(self, set_bit: bool = True) -> int:
"""
Changes bit 5 in controlword. (Profile Position Mode)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
Returns
-------
int
The new controlword
"""
return self._bit_manipulator(self.PP_CHANGE_SET_POINT_NOW, set_bit)
def change_on_set_point(self, set_bit: bool = True) -> int:
"""
Changes bit 9 in controlword (Profile Position Mode)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
Returns
-------
int
The new controlword
"""
return self._bit_manipulator(self.PP_CHANGE_ON_SET_POINT, set_bit)
def start_homing(self, set_bit: bool = True) -> int:
"""
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
Returns
-------
int
The new controlword
"""
return self._bit_manipulator(self.HOMING_OPERATION_START, set_bit)
class Statusword:
"""
Helps manage the bits in the CiA402 Statusword.
Bit table
---------
Bit Description
=== ===========
0 Ready to switch on
1 Switched on
2 Operation enabled
3 Fault
4 Voltage enabled
5 Quick stop
6 Switch on disabled
7 Warning
8 -manufacturer-specific-
9 Remote
10 Target reached
11 Internal limit active
12 -operation mode specific-
13 -operation mode specific-
14 -manufacturer-specific-
15 -manufacturer-specific-
Bit Homing ?
=== ================ ======
12 Homing attained ?
13 Homing error ?
See Also
--------
IEC 61800-7-201 Generic interface and use of profiles for power drive systems
"""
def __init__(self, sw: int = 0x00):
self.value = sw
def __int__(self):
return self.value
def update(self, value):
"""Update the value of the Statusword."""
self.value = value
def _compare(self, sw: int, mask: int, bitpattern: int) -> bool:
"""
Checks, if bit pattern is set in statusword.
Internal helper function.
Parameters
----------
sw : int
Statusword. Can be None. Then it will not be updated
mask : int
Bit mask for statusword
bitpattern : int
Bit pattern against which the status word is compared
Returns
-------
bool
"""
if sw:
self.update(sw)
return self.value & mask == bitpattern
def is_state_not_ready_to_switch_on(self, sw=None) -> bool:
"""Is the state 'Not ready to switch on'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Not ready to switch on'
"""
mask = 0b0000000001001111 # the bits we care about
value = 0b0000000000000000 # the value of those bits
return self._compare(sw, mask, value)
def is_state_switch_on_disabled(self, sw=None) -> bool:
"""Is the state 'Switch on disabled'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Switch on disabled'
"""
mask = 0b0000000001001111 # the bits we care about
value = 0b0000000001000000 # the value of those bits
return self._compare(sw, mask, value)
def is_state_ready_to_switch_on(self, sw=None) -> bool:
"""Is the state 'Ready to switch on'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Ready to switch on'
"""
mask = 0b0000000001101111 # the bits we care about
value = 0b0000000000100001 # the value of those bits
return self._compare(sw, mask, value)
def is_state_switched_on(self, sw=None) -> bool:
"""Is the state 'Switched on'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Switched on'
"""
mask = 0b0000000001101111 # the bits we care about
value = 0b0000000000100011 # the value of those bits
return self._compare(sw, mask, value)
def is_state_operation_enabled(self, sw=None) -> bool:
"""Is the state 'Operation enabled'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Operation enabled'
"""
mask = 0b0000000001101111 # the bits we care about
value = 0b0000000000100111 # the value of those bits
return self._compare(sw, mask, value)
def is_state_quick_stop_active(self, sw=None) -> bool:
"""Is the state 'Quick stop active'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Quick stop active'
"""
mask = 0b0000000001101111 # the bits we care about
value = 0b0000000000000111 # the value of those bits
return self._compare(sw, mask, value)
def is_state_fault_reaction_active(self, sw=None) -> bool:
"""Is the state 'Fault reaction active'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Fault reaction active'
"""
mask = 0b0000000001001111 # the bits we care about
value = 0b0000000000001111 # the value of those bits
return self._compare(sw, mask, value)
def is_state_fault(self, sw=None) -> bool:
"""Is the state 'Fault'?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if in state 'Fault'
"""
mask = 0b0000000001001111 # the bits we care about
value = 0b0000000000001000 # the value of those bits
return self._compare(sw, mask, value)
def has_fault(self, sw=None) -> bool:
"""Is the fault bit set?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if 'Fault' bit is set
"""
fault_bit = 0b0000000000001000
return self._compare(sw, fault_bit, fault_bit)
def has_warning(self, sw=None) -> bool:
"""Is the warning bit set?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if 'Warning' bit is set
"""
warning_bit = 0b0000000010000000
return self._compare(sw, warning_bit, warning_bit)
def is_target_reached(self, sw=None) -> bool:
"""Is the target reached bit set?
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if target is reached
"""
target_reached_bit = 0b0000010000000000
return self._compare(sw, target_reached_bit, target_reached_bit)
def is_homing_attained(self, sw=None) -> bool:
"""Is the homing attained bit set? (Bit 12, Operation Mode Specific)
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if homing is attained
"""
homing_attained_bit = 0b0001000000000000
return self._compare(sw, homing_attained_bit, homing_attained_bit)
def has_homing_error(self, sw=None) -> bool:
"""Is the homing error bit set? (Bit 13, Operation Mode Specific)
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
bool
True if homing has error
"""
homing_error_bit = 0b0010000000000000
return self._compare(sw, homing_error_bit, homing_error_bit)
def is_speed_zero(self, sw=None) -> bool:
"""
Check, if bit 12 is set. (operation mode specific)
Parameters
----------
sw : int
Statusword. Will update the internal value.
Returns
-------
"""
speed_zero_bit = (1 << 12)
return self._compare(sw, speed_zero_bit, speed_zero_bit)
def get_human_readable_state(self, sw=None) -> str:
"""
Return method name of the matching state.
Parameters
----------
sw : int
Statusword. If None, method will use internal value
Returns
-------
"""
if not sw:
sw = self.value
method_names = [a for a in dir(self) if a.startswith('is_state')]
for m in method_names:
if m in ['get_human_readable_state', 'update', 'value']:
continue
attr = getattr(self, m)
if attr(sw & 0b1111111): # Just use the state-relevant bits
self.update(sw)
return m.replace('is_', '')
@unique
class StateCommands(Enum):
SHUTDOWN = 0x1
SWITCH_ON = 0x2
ENABLE_OPERATION = 0x3
DISABLE_OPERATION = 0x4
DISABLE_VOLTAGE = 0x5
QUICK_STOP = 0x6
FAULT_RESET = 0x7
@unique
class OpModes(Enum):
NOT_SET = 1000
COMMUTATION_OFFSET = -2
COGGING_COMP_RECORD = -1
PROFILE_POSITION = 1
PROFILE_VELOCITY = 3
PROFILE_TORQUE = 4
HOMING_MODE = 6
CSP = 8
CSV = 9
CST = 10
@unique
class HomingState(Enum):
HOMING_IN_PROGRESS = 0
HOMING_IS_INTERRUPTED = 1
HOMING_IS_ATTAINED = 2
HOMING_COMPLETED = 3
HOMING_ERROR_SPEED_NOT_ZERO = 4
HOMING_ERROR_SPEED_ZERO = 5
class StateControl:
OD_CONTROLWORD = (0x6040, 0)
OD_STATUSWORD = (0x6041, 0)
OD_OPERATIONMODE = (0x6060, 0)
OD_OPERATIONMODEDISPLAY = (0x6061, 0)
TIMEOUT = 4 # s
TIMEOUT_FIND_INDEX_PULL_BRAKE = 8 # s
FAULT_RESET_TRY = 5
STATE_COMMANDS = StateCommands
OP_MODES = OpModes
HOMING_STATES = HomingState
def __init__(self, mmw: MotionMasterWrapper, dev_address: int, log: bool = False):
"""
StateControl class. Takes care of CiA402 states on our nodes.
Parameters
----------
mmw : MotionMasterWrapper
Motion master wrapper provide communication with nodes.
dev_address : int
Device address in Ethercat chain.
"""
self.dev_address = dev_address
self.mmw = mmw
self.op_mode = self.OP_MODES.NOT_SET
self.cw = Controlword(self._get_controlword())
self.sw = Statusword(self._get_statusword())
self._current_state = self.sw.get_human_readable_state()
self.current_timeout = 0
self._fault_reset_counter = self.FAULT_RESET_TRY
logger.disabled = not log
def _mmw_get_param(self, index: int, subindex: int) -> Any:
"""
Get parameter from device by motion master wrapper.
Parameters
----------
index : int
Object dictionary index
subindex : int
Object dictionary sub index
Returns
-------
Any
Value inside object dictionary
"""
return self.mmw.get_device_parameter_value(self.dev_address, index, subindex)
def _mmw_set_param(self, index: int, subindex: int, value: Any):
"""
Set parameter on device by motion master wrapper.
Parameters
----------
index : int
Object dictionary index
subindex : int
Object dictionary sub index
value : Any
The new value for the OD entry
"""
self.mmw.set_device_parameter_value(self.dev_address, index, subindex, value)
def _set_timeout(self, timeout: int):
"""
(Re-)Set timeout.
Reset timeout after every successful state transmission.
Parameters
----------
timeout : int
New timeout in seconds
"""
self.current_timeout = time.time() + timeout
def _get_statusword(self) -> int:
"""
Get statusword from device.
Returns
-------
int
Statusword from object dictionary
"""
return self._mmw_get_param(*self.OD_STATUSWORD)
def _get_controlword(self) -> int:
"""
Get controlword from device.
Returns
-------
int
Controlword from object dictionary
"""
return self._mmw_get_param(*self.OD_CONTROLWORD)
def _update_statusword(self):
"""
Update statusword inside statusword manager.
"""
self.sw.update(self._get_statusword())
def _send_controlword(self, cw: int):
"""
Send controlword to device.
Parameters
----------
cw : int
Controlword value.
"""
logger.debug("Controlword: {w:016b}, 0x{w:x}, {w}".format(w=int(self.cw)))
self._mmw_set_param(*self.OD_CONTROLWORD, cw)
def set_op_mode(self, mode: OpModes):
"""
Set operation mode on device.
Parameters
----------
mode : OpModes
Operation mode value from Enum OpModes.
"""
self.op_mode = mode
logger.debug("Change Op Mode to {}".format(self.op_mode.name))
self._mmw_set_param(*self.OD_OPERATIONMODE, self.op_mode.value)
self._set_timeout(self.TIMEOUT)
# Raise exception if Op mode display is not updated.
while True:
op_mode_display = self._mmw_get_param(*self.OD_OPERATIONMODEDISPLAY)
if op_mode_display == self.op_mode.value:
break
elif time.time() > self.current_timeout:
raise ExceptionTimeout(
"Op mode is {} and didn't change to {}.".format(op_mode_display, self.op_mode.value))
@property
def device_address(self):
return self.dev_address
@property
def current_state(self) -> str:
"""
Retuns the current device state as an human readable string.
Returns
-------
str
Human readable state string (e.g. "state_switched_on")
"""
return self._current_state
def _state_machine(self, command: StateCommands) -> bool:
"""
Heart of this class: The CiA402 state machine.
Parameters
----------
command : StateCommands
The command for the state machine (e.g. Fault Reset, Enable Operations). As value of Enum _StateCommands.
Returns
-------
bool
True if requested state was reached, otherwise false.
"""
self._current_state = self.sw.get_human_readable_state()
if command == self.STATE_COMMANDS.FAULT_RESET:
if not self.sw.has_fault():
self._fault_reset_counter = self.FAULT_RESET_TRY
return True
if self._fault_reset_counter > 0:
self._fault_reset_counter -= 1
self._send_controlword(self.cw.shutdown())
time.sleep(0.3)
self._set_timeout(self.TIMEOUT)
self._send_controlword(self.cw.fault_reset())
time.sleep(0.3)
return False
else:
error_description = self._mmw_get_param(0x203f, 1)
raise ExceptionStateControl("Could not reset fault '{}'".format(error_description) )
if self.sw.is_state_not_ready_to_switch_on():
# Transition 1
# Automatic state change to switched on disabled. Nothing to do
return False
elif self.sw.is_state_switch_on_disabled():
logger.debug("state_switch_on_disabled")
# 1, 7, 9, 15, 10, 12
if command in [self.STATE_COMMANDS.DISABLE_VOLTAGE, self.STATE_COMMANDS.QUICK_STOP]:
return True
else:
# Transition 2
logger.debug("Do Shutdown")
self._send_controlword(self.cw.shutdown())
return False
elif self.sw.is_state_ready_to_switch_on():
logger.debug("state_ready_to_switch_on")
# 2, 6, 8
if command in [self.STATE_COMMANDS.SHUTDOWN, self.STATE_COMMANDS.FAULT_RESET]:
return True
elif command == self.STATE_COMMANDS.QUICK_STOP:
# Transition 7
logger.debug("Do Quick stop")
self._send_controlword(self.cw.quick_stop())
elif command == self.STATE_COMMANDS.DISABLE_VOLTAGE:
# Transition 7
logger.debug("Do Disable Voltage")
self._send_controlword(self.cw.disable_voltage())
else:
# Transition 3
logger.debug("Do Switch On")
self._send_controlword(self.cw.switch_on())
return False
elif self.sw.is_state_switched_on():
logger.debug("state_switched_on")
# 3, 5
if command in [self.STATE_COMMANDS.SWITCH_ON, self.STATE_COMMANDS.DISABLE_OPERATION]:
return True
elif command == self.STATE_COMMANDS.DISABLE_VOLTAGE:
# Transition 10
logger.debug("Do Disable Voltage")
self._send_controlword(self.cw.disable_voltage())
elif command == self.STATE_COMMANDS.QUICK_STOP:
# Transition 10
logger.debug("Do Quick Stop")
self._send_controlword(self.cw.quick_stop())
else:
# Transition 4
logger.debug("Do Enable Operation")
self._send_controlword(self.cw.enable_operation())
return False
elif self.sw.is_state_operation_enabled():
logger.debug("state_operation_enabled")
# 4, 16
if command == self.STATE_COMMANDS.ENABLE_OPERATION:
return True
elif command == self.STATE_COMMANDS.DISABLE_VOLTAGE:
# Transition 9
logger.debug("Do disable voltage")
self._send_controlword(self.cw.disable_voltage())
elif command == self.STATE_COMMANDS.SHUTDOWN:
# Transition 8
logger.debug("Do shutdown")
self._send_controlword(self.cw.shutdown())
else:
# Transition 11
logger.debug("Do quick stop")
self._send_controlword(self.cw.quick_stop())
return False
elif self.sw.is_state_quick_stop_active():
logger.debug("state_quick_stop_active")
# 11
if command == self.STATE_COMMANDS.QUICK_STOP:
return True
elif command == self.STATE_COMMANDS.DISABLE_VOLTAGE:
# Transition 12
logger.debug("Do disable voltage")
self._send_controlword(self.cw.disable_voltage())
else:
# Transition 16
logger.debug("Do enable operation")
self._send_controlword(self.cw.enable_operation())
elif self.sw.is_state_fault_reaction_active():
return False
def do_transition(self, command: StateCommands):
"""
Transition manager. Checks, if states was reached. Throws timeout exception, if transition takes to long.
Parameters
----------
command : StateCommands
The command for the state machine (e.g. Fault Reset, Enable Operations). As value of Enum _StateCommands.
Raises
------
ExceptionOpMode
If op mode is not set by user.
ExceptionTimeout
If transition takes too long.
"""
old_state = self.current_state
# Check, if op mode was set by user.
if self.op_mode == self.OP_MODES.NOT_SET and command == self.STATE_COMMANDS.ENABLE_OPERATION:
raise ExceptionStateControl("No operation mode set")
# Set timeout
self._set_timeout(self.TIMEOUT)
while True:
logger.debug("Statusword: {sw:016b}, 0x{sw:x}, {st}".format(sw=int(self.sw), st=self.sw.get_human_readable_state()))
# Get current statusword from device
self._update_statusword()
# Do transition.
if self._state_machine(command):
return
# Check if state was changed and reset timeout
if self.current_state != old_state:
if self.current_state == 'state_switched_on' and command == self.STATE_COMMANDS.ENABLE_OPERATION:
# Set timeout for transition 4 to a higher value, because index detection and pulling brake,
# which both is happening in this state, might take longer than the standard timeout.
self._set_timeout(self.TIMEOUT_FIND_INDEX_PULL_BRAKE)
else:
self._set_timeout(self.TIMEOUT)
old_state = self.current_state
# Raise exception, if transition takes too long.
if time.time() > self.current_timeout:
error_description = self._mmw_get_param(0x203f, 1)
raise ExceptionTimeout("Timeout during state '{}'. Fault: {}".format(self.current_state, error_description))
def shutdown(self):
"""
Send command "Shutdown"
"""
self.do_transition(self.STATE_COMMANDS.SHUTDOWN)
def switch_on(self):
"""
Send command "Switch On"
"""
self.do_transition(self.STATE_COMMANDS.SWITCH_ON)
def disable_voltage(self):
"""
Send command "Disable Voltage"
"""
self.do_transition(self.STATE_COMMANDS.DISABLE_VOLTAGE)
def quick_stop(self):
"""
Send command "Quick Stop"
"""
self.do_transition(self.STATE_COMMANDS.QUICK_STOP)
def disable_operation(self):
"""
Send command "Disable Operation"
"""
self.do_transition(self.STATE_COMMANDS.DISABLE_OPERATION)
def enable_operation(self, reset_fault: bool =True):
"""
Send command "Enable Operation"
"""
if reset_fault:
self.fault_reset()
self.do_transition(self.STATE_COMMANDS.ENABLE_OPERATION)
def fault_reset(self):
"""
Send command "Fault Reset"
"""
self.do_transition(self.STATE_COMMANDS.FAULT_RESET)
def set_state(self, command: StateCommands):
"""
Send your own state (for automation purposes)
Parameters
----------
command : StateCommands
The command for the state machine (e.g. Fault Reset, Enable Operations). As value of Enum _StateCommands.
"""
self.do_transition(command)
def has_fault(self) -> bool:
"""
Checks, if bit 3 is set.
Returns
-------
bool
True, if bit is 1
"""
sw = self._get_statusword()
return self.sw.has_fault(sw)
def has_warning(self) -> bool:
"""
Checks, if bit 7 is set.
Returns
-------
bool
True, if bit is 1
"""
sw = self._get_statusword()
return self.sw.has_warning(sw)
def is_target_reached(self) -> bool:
"""
Checks, if bit 10 is set.
Returns
-------
bool
True, if bit is 1
"""
sw = self._get_statusword()
return self.sw.is_target_reached(sw)
def is_homing_attained(self) -> bool:
"""
Checks, if bit 12 is set. (Homing Mode)
Returns
-------
bool
True, if bit is 1
"""
sw = self._get_statusword()
return self.sw.is_homing_attained(sw)
def has_homing_error(self) -> bool:
"""
Checks, if bit 13 is set. (Homing Mode)
Returns
-------
bool
True, if bit is 1
"""
sw = self._get_statusword()
return self.sw.has_homing_error(sw)
def halt(self, set_bit: bool = True):
"""
Sets bit 8 in controlword. (Operation mode specific)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
"""
self._send_controlword(self.cw.halt(set_bit))
def is_speed_zero(self) -> bool:
"""
Checks, if bit 12 is set. (Profile Velocity Mode)
Returns
-------
bool
True, if bit is 1
"""
sw = self._get_statusword()
return self.sw.is_speed_zero(sw)
####################################################
# Homing Mode
####################################################
def start_homing(self, set_bit: bool = True):
"""
Set bit 4 to start homing procedure.
True starts homing.
False stops homing.
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
"""
self._send_controlword(self.cw.start_homing(set_bit))
def get_homing_state(self) -> HomingState:
"""
Return the current state of the homing procedure.
Returns
-------
HomingState
"""
if not (self.has_homing_error() or self.is_homing_attained() or self.is_target_reached()):
return self.HOMING_STATES.HOMING_IN_PROGRESS
elif not self.has_homing_error() and not self.is_homing_attained() and self.is_target_reached():
return self.HOMING_STATES.HOMING_IS_INTERRUPTED
elif not self.has_homing_error() and self.is_homing_attained() and not self.is_target_reached():
return self.HOMING_STATES.HOMING_IS_ATTAINED
elif not self.has_homing_error() and self.is_homing_attained() and self.is_target_reached():
return self.HOMING_STATES.HOMING_COMPLETED
elif self.has_homing_error() and not self.is_homing_attained() and not self.is_target_reached():
return self.HOMING_STATES.HOMING_ERROR_SPEED_NOT_ZERO
elif self.has_homing_error() and not self.is_homing_attained() and self.is_target_reached():
return self.HOMING_STATES.HOMING_ERROR_SPEED_ZERO
####################################################
# Profile Position Mode
####################################################
def relative_position_mode(self, set_bit: bool = True):
"""
Changes bit 6 in controlword (Profile Position Mode).
If set_bit is True, target position shall be a relative value.
If set_bit is False, target position shall be an absolute value.
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
"""
self._send_controlword(self.cw.relative_position_mode(set_bit))
# Bit definitions (relates to next six methods)
# +=======+=======+========+===================================================================================+
# | Bit 9 | Bit 5 | Bit 4 | Definition |
# +=======+=======+========+===================================================================================+
# | 0 | 0 | 0 -> 1 | Positioning shall be completed (target reached) before the next one gets started. |
# +-------+-------+--------+-----------------------------------------------------------------------------------+
# | x | 1 | 0 -> 1 | Next positioning shall be started immediately. |
# +-------+-------+--------+-----------------------------------------------------------------------------------+
# | 1 | 0 | 0 -> 1 | Positioning with the current profile velocity up to the current set-point shall |
# | | | | be proceeded and then next positioning shall be applied. |
# +-------+-------+--------+-----------------------------------------------------------------------------------+
def new_setpoint(self, set_bit: bool = True):
"""
Changes bit 4 in controlword. (Profile Position Mode)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
"""
self._send_controlword(self.cw.new_setpoint(set_bit))
def change_set_point_now(self, set_bit: bool = True):
"""
Changes bit 5 in controlword. (Profile Position Mode)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
"""
self._send_controlword(self.cw.change_set_point_now(set_bit))
def change_on_set_point(self, set_bit: bool = True):
"""
Changes bit 9 in controlword (Profile Position Mode)
Parameters
----------
set_bit : bool
If true, set bit, otherwise clear it
"""
self._send_controlword(self.cw.relative_position_mode(set_bit))
def pp_complete_next_position(self):
"""
See first table row
"""
self.change_set_point_now(False)
self.change_on_set_point(False)
self.new_setpoint()
def pp_start_next_position_now(self):
"""
See second table row
"""
self.change_set_point_now()
self.new_setpoint()
def pp_finish_positioning_then_next(self):
"""
See third table row
"""
self.change_on_set_point()
self.change_set_point_now(False)
self.new_setpoint()
def pp_reset_bits(self):
"""
Reset all position profile related bits
"""
self.change_on_set_point(False)
self.change_set_point_now(False)
self.new_setpoint(False)
| 29.209215 | 128 | 0.552375 | 4,302 | 38,673 | 4.787076 | 0.092748 | 0.017189 | 0.012625 | 0.021414 | 0.578032 | 0.513159 | 0.472322 | 0.426581 | 0.367923 | 0.334806 | 0 | 0.028716 | 0.336359 | 38,673 | 1,323 | 129 | 29.231293 | 0.7737 | 0.364001 | 0 | 0.316953 | 0 | 0 | 0.028804 | 0.005963 | 0 | 0 | 0.008439 | 0 | 0 | 1 | 0.186732 | false | 0.007371 | 0.012285 | 0.007371 | 0.488943 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3339b9c44565e5317411d959eeba2f844a3a798a | 3,319 | py | Python | tests/migration_test.py | jpmn/sqlalchemy-imageattach | 4d965bba348e62026df442099cebcf2b856f769e | [
"MIT"
] | null | null | null | tests/migration_test.py | jpmn/sqlalchemy-imageattach | 4d965bba348e62026df442099cebcf2b856f769e | [
"MIT"
] | 1 | 2020-08-07T14:40:27.000Z | 2020-08-07T14:40:27.000Z | tests/migration_test.py | jpmn/sqlalchemy-imageattach | 4d965bba348e62026df442099cebcf2b856f769e | [
"MIT"
] | null | null | null | import io
import os.path
from pytest import fixture
from sqlalchemy_imageattach.context import store_context
from sqlalchemy_imageattach.migration import migrate, migrate_class
from sqlalchemy_imageattach.store import Store
from .conftest import Base, sample_images_dir
from .entity_test import Samething, Something, SomethingCover
class SourceStore(Store):
def __init__(self):
self.files = {}
def put_file(self, file, object_type, object_id, width, height, mimetype,
reproducible):
key = object_type, object_id, width, height, mimetype
self.files[key] = file.read(), reproducible
def get_file(self, object_type, object_id, width, height, mimetype):
key = object_type, object_id, width, height, mimetype
return io.BytesIO(self.files[key][0])
@fixture
def fx_source_store():
return SourceStore()
@fixture
def fx_migration(fx_session, fx_source_store):
with store_context(fx_source_store):
with fx_session.begin():
a1 = Something(name='a1')
fx_session.add(a1)
with open(os.path.join(sample_images_dir, 'iu.jpg'), 'rb') as f:
a1.cover.from_file(f)
a1.cover.generate_thumbnail(height=480)
a1.cover.generate_thumbnail(height=320)
a1.cover.generate_thumbnail(height=160)
a2 = Something(name='a2')
fx_session.add(a2)
with open(os.path.join(sample_images_dir, 'iu2.jpg'), 'rb') as f:
a2.cover.from_file(f)
b1 = Samething(name='b1')
fx_session.add(b1)
with open(os.path.join(sample_images_dir, 'asuka.jpg'), 'rb') as f:
b1.cover.from_file(f)
b1.cover.generate_thumbnail(height=375)
b1.cover.generate_thumbnail(height=250)
b1.cover.generate_thumbnail(height=125)
b2 = Samething(name='b2')
fx_session.add(b2)
with open(os.path.join(sample_images_dir, 'shinji.jpg'),
'rb') as f:
b2.cover.from_file(f)
def test_migrate_class_execute(fx_session, fx_source_store, fx_migration):
dst = SourceStore()
plan = migrate_class(fx_session, SomethingCover, fx_source_store, dst)
assert dst.files == {}
plan.execute()
assert dst.files == dict(
(k, v)
for k, v in fx_source_store.files.items()
if k[0] == 'something-cover'
)
def test_migrate_class_iter(fx_session, fx_source_store, fx_migration):
dst = SourceStore()
plan = migrate_class(fx_session, SomethingCover, fx_source_store, dst)
assert dst.files == {}
for _ in plan:
pass
assert dst.files == dict(
(k, v)
for k, v in fx_source_store.files.items()
if k[0] == 'something-cover'
)
def test_migrate_execute(fx_session, fx_source_store, fx_migration):
dst = SourceStore()
plan = migrate(fx_session, Base, fx_source_store, dst)
assert dst.files == {}
plan.execute()
assert fx_source_store.files == dst.files
def test_migrate_iter(fx_session, fx_source_store, fx_migration):
dst = SourceStore()
plan = migrate(fx_session, Base, fx_source_store, dst)
assert dst.files == {}
for _ in plan:
pass
assert fx_source_store.files == dst.files
| 32.539216 | 79 | 0.648087 | 441 | 3,319 | 4.641723 | 0.197279 | 0.058622 | 0.095261 | 0.082071 | 0.61065 | 0.497313 | 0.497313 | 0.43576 | 0.332193 | 0.332193 | 0 | 0.017579 | 0.245857 | 3,319 | 101 | 80 | 32.861386 | 0.80024 | 0 | 0 | 0.390244 | 0 | 0 | 0.023501 | 0 | 0 | 0 | 0 | 0 | 0.097561 | 1 | 0.109756 | false | 0.02439 | 0.097561 | 0.012195 | 0.243902 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
333af0ad0fa9c494dd0bf7a963c9754911443e0e | 2,852 | py | Python | unittest/base/JavaConfigTest.py | hamatoma/snakeboxx | de4609e0d980c7ce775060e3813e71752e8670aa | [
"CC0-1.0"
] | null | null | null | unittest/base/JavaConfigTest.py | hamatoma/snakeboxx | de4609e0d980c7ce775060e3813e71752e8670aa | [
"CC0-1.0"
] | null | null | null | unittest/base/JavaConfigTest.py | hamatoma/snakeboxx | de4609e0d980c7ce775060e3813e71752e8670aa | [
"CC0-1.0"
] | null | null | null | '''
Created on 12.04.2018
@author: hm
'''
import os
from unittest.UnitTestCase import UnitTestCase
import base.JavaConfig
import base.StringUtils
import base.MemoryLogger
DEBUG = False
class JavaConfigTest(UnitTestCase):
def debugFlag(self):
base.StringUtils.avoidWarning(self)
return DEBUG
def testBasic(self):
logger = base.MemoryLogger.MemoryLogger()
fn = self.tempFile('javaconf.conf')
base.StringUtils.toFile(fn, '# comment\nabc.def=/dev\n\t\n\tFile = /tmp/x')
config = base.JavaConfig.JavaConfig(fn, logger)
self.assertIsEqual('/dev', config.getString('abc.def'))
self.assertIsEqual('/tmp/x', config.getString('File'))
self.assertNone(config.getString('file'))
self.assertNone(config.getString('unknown'))
os.unlink(fn)
def testSyntaxError(self):
fn = self.tempFile('error.conf')
base.StringUtils.toFile(fn, '# comment\nabc.def:=/dev\n\t\n\tFile')
logger = base.MemoryLogger.MemoryLogger()
base.JavaConfig.JavaConfig(fn, logger)
self.assertTrue(logger.contains('error.conf line 2: unexpected syntax [expected: <var>=<value>]: abc.def:=/dev', True))
self.assertTrue(logger.contains('error.conf line 4: unexpected syntax [expected: <var>=<value>]: File', True))
def testIntVar(self):
fn = self.tempFile('javaconf.conf')
base.StringUtils.toFile(fn, '# comment\nnumber=123\nWrong = zwo')
logger = base.MemoryLogger.MemoryLogger()
config = base.JavaConfig.JavaConfig(fn, logger)
self.assertIsEqual(123, config.getInt('number'))
self.assertIsEqual(456, config.getInt('unknown', 456))
self.assertIsEqual(111, config.getInt('Wrong', 111))
self.assertTrue(logger.contains('javaconf.conf: variable Wrong is not an integer: zwo', True))
os.unlink(fn)
def testGetKeys(self):
fn = self.tempFile('javaconf.conf')
base.StringUtils.toFile(fn, '# comment\nnumber=123\nWrong = zwo')
logger = base.MemoryLogger.MemoryLogger()
config = base.JavaConfig.JavaConfig(fn, logger)
keys = config.getKeys()
self.assertIsEqual(2, len(keys))
self.assertIsEqual('Wrong', keys[0])
self.assertIsEqual('number', keys[1])
os.unlink(fn)
def testGetKeysRegExpr(self):
fn = self.tempFile('javaconf.conf')
base.StringUtils.toFile(fn, '# comment\nnumber=123\nWrong = zwo')
logger = base.MemoryLogger.MemoryLogger()
config = base.JavaConfig.JavaConfig(fn, logger)
keys = config.getKeys(r'number|int')
self.assertIsEqual(1, len(keys))
self.assertIsEqual('number', keys[0])
os.unlink(fn)
if __name__ == '__main__':
#import sys;sys.argv = ['', 'Test.testName']
tester = JavaConfigTest()
tester.run()
| 38.026667 | 127 | 0.654979 | 328 | 2,852 | 5.670732 | 0.286585 | 0.091398 | 0.05914 | 0.091398 | 0.507527 | 0.473118 | 0.455914 | 0.36828 | 0.32957 | 0.32957 | 0 | 0.01712 | 0.201262 | 2,852 | 74 | 128 | 38.540541 | 0.799385 | 0.027349 | 0 | 0.338983 | 0 | 0.033898 | 0.190166 | 0.052422 | 0 | 0 | 0 | 0 | 0.254237 | 1 | 0.101695 | false | 0 | 0.084746 | 0 | 0.220339 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
333cef037136b7281bd9ce4d9d7c5d23e7fb2fef | 9,719 | py | Python | marble_match/utils/account.py | ConstObject/marble-match-manager | abb7354656685482cc7289e8f6c68483af615e33 | [
"MIT"
] | 1 | 2021-04-02T21:09:46.000Z | 2021-04-02T21:09:46.000Z | marble_match/utils/account.py | ConstObject/marble-match-manager | abb7354656685482cc7289e8f6c68483af615e33 | [
"MIT"
] | 7 | 2021-04-02T07:34:03.000Z | 2021-06-11T07:11:55.000Z | marble_match/utils/account.py | ConstObject/marble-match-manager | abb7354656685482cc7289e8f6c68483af615e33 | [
"MIT"
] | null | null | null | import sqlite3
import logging
from typing import Union
from dataclasses import dataclass
from datetime import datetime
import discord
from discord.ext import commands
import database.database_operation as database_operation
import database.database_setup as database_setup
import utils.discord_utils as du
import utils.exception as exception
logger = logging.getLogger('marble_match.acc')
@dataclass(order=True)
class Account:
id: int
member: discord.Member
_nickname: str
_marbles: int
server_id: int
_wins: int
_loses: int
@property
def winrate(self) -> float:
if self._wins:
return 100 * (self._wins / (self._wins + self._loses))
else:
return 0
@property
def friendly_last_used(self) -> Union[datetime, int]:
logger.debug(f'friendly_last_used_getter')
# Try and get value from database
try:
time = database_operation.get_friendly_last_used(database_setup.DbHandler.db_cnc, self.id)
if not time:
logger.debug(f'No last_used value')
database_operation.create_friendly(database_setup.DbHandler.db_cnc, self.id)
return 0
else:
return time
except Exception as e:
logger.error(f'Unable to read friendly_last_used: {e}')
raise exception.UnableToRead(class_='Account', attribute='friendly_last_used')
@friendly_last_used.setter
def friendly_last_used(self, time: datetime):
logger.debug(f'friendly_last_used_setter: {time}')
# Try and write to database
try:
database_operation.update_friendly(database_setup.DbHandler.db_cnc, self.id, time)
logger.debug(f'Wrote friendly_last_used: {time}')
except Exception as e:
logger.error(f'Unable to write friendly_last_used: {e}')
raise exception.UnableToWrite(class_='Account', attribute='friendly_last_used')
@property
def nickname(self) -> str:
# Check if nickname equals member, if it does return member.display_name
if self._nickname == str(self.member):
logger.debug(f'nickname is same as member')
return self.member.display_name
else:
return self._nickname
@nickname.setter
def nickname(self, nickname: str):
logger.debug(f'nickname_setter: {nickname}')
# Update nickname in database, check if write was successful then update Account info
if database_operation.update_player_nickname(database_setup.DbHandler.db_cnc, self.id, nickname):
self._nickname = nickname
logger.debug('Updated nickname')
else:
logger.error('Unable to update nickname')
raise exception.UnableToWrite(class_='Account', attribute='nickname', value=nickname)
@property
def marbles(self) -> int:
return self._marbles
@marbles.setter
def marbles(self, amount: int):
logger.debug(f'marbles_setter: {amount}')
# Check if amount is negative, set to zero if it's negative
if amount < 0:
logger.debug('amount was less than zero')
amount = 0
# Update marble count in database, check if write was successful then update Account info
if database_operation.update_marble_count(database_setup.DbHandler.db_cnc, self.id, amount):
self._marbles = amount
logger.debug('Updated marbles')
else:
logger.error('Unable to update marbles')
raise exception.UnableToWrite(class_='Account', attribute='marbles', value=amount)
@property
def wins(self) -> int:
return self._wins
@wins.setter
def wins(self, amount: int):
logger.debug(f'wins_setter: {amount}')
# Check if amount is negative, set to zero if it's negative
if amount < 0:
logger.debug('amount was less than zero')
amount = 0
# Update wins in database, check if write was successful then update Account info
if database_operation.update_player_wins(database_setup.DbHandler.db_cnc, self.id, amount):
self._wins = amount
logger.debug('Updated wins')
else:
logger.error('Unable to update marbles')
raise exception.UnableToWrite(class_='Account', attribute='wins', value=amount)
@property
def loses(self) -> int:
return self._loses
@loses.setter
def loses(self, amount: int):
logger.debug(f'loses_setter: {amount}')
# Check if amount is negative, set to zero if it's negative
if amount < 0:
logger.debug('amount was less than zero')
amount = 0
# Update loses in database, check if write was successful then update Account info
if database_operation.update_player_loses(database_setup.DbHandler.db_cnc, self.id, amount):
self._loses = amount
logger.debug('Updated loses')
else:
logger.error('Unable to update loses')
raise exception.UnableToWrite(class_='Account', attribute='loses', value=amount)
def get_account_from_db(ctx: commands.Context, connection: sqlite3.Connection, player_id: int):
"""Returns Account of player_id
**Arguments**
- `<ctx>` Context used to get information.
- `<connection>` sqlite3 connection to read from database.
- `<player_id>` id of players acc in database
"""
logger.debug(f'get_account_from_db: {player_id}')
# get player_info from database to use to create a Account
player_info = database_operation.get_player_info(connection, player_id)
logger.debug(f'player_info: {player_info}')
# check that player_info has player information return 0 if it doesn't
if not player_info:
logger.error('player_info is empty')
raise exception.UnexpectedEmpty(attribute='users')
# create and place new Account into acc to return
account = Account(player_info[0], du.get_member_by_uuid(ctx, player_info[1]), player_info[2], player_info[3],
player_info[4], player_info[5], player_info[6])
logger.debug(f'acc: {account}')
return account
def get_account(ctx: commands.Context, connection: sqlite3.Connection, member: Union[discord.Member, str]):
"""Returns Account of member
**Arguments**
- `<ctx>` Context used to get server information
- `<connection>` sqlite3 connection to read from database
- `<member>` member who's acc we wish to get
"""
logger.debug(f'get_account: {member}')
# Check if ctx.channel is dm, return 0 if it is
if isinstance(ctx.channel, discord.DMChannel):
logger.error('ctx channel is dm, get_account not allowed in dms')
raise exception.DiscordDM
# Get id from database and put into player_id
if isinstance(member, discord.Member):
player_id = database_operation.get_player_id(connection, member.id, ctx.guild.id)
else:
player_id = database_operation.get_player_id_by_username(connection, member)
# If player_id is 0, no index in database, return 0
if not player_id:
logger.error('player_id was not found')
if isinstance(member, str):
raise exception.InvalidNickname
raise exception.UnexpectedEmpty(attribute='user')
# Get Account from database
account = get_account_from_db(ctx, connection, player_id)
# Check if acc is zero, to return 0 if Account creation failed
if not account:
logger.error('Unable to create acc')
raise exception.UnexpectedEmpty(class_='Account', attribute='account')
logger.debug(f'acc: {account}')
return account
def get_account_server_all(ctx: commands.Context, connection: sqlite3.Connection, server_id: int) -> Union[list, int]:
"""Returns list of all Accounts on a server
**Arguments**
- `<ctx>` Context used to get information
- `<connection>` Connection for database
- `<server_id>` Server_id to get all accounts for
"""
logger.debug(f'get_account_server_all: {server_id}')
# Get player_list from database and validate
player_list = database_operation.get_player_info_all_by_server(connection, server_id)
logger.debug(f'player_list: {player_list}')
if not player_list:
logger.error('Unable to get player_list')
raise exception.UnableToRead(attribute='user')
# Create list to return, and propagate list with accounts from player_list
account_list = []
for player in player_list:
logger.debug(f'player: {player}')
account_list.append(Account(player[0], du.get_member_by_uuid(ctx, player[1]), player[2],
player[3], player[4], player[5], player[6]))
# Check if list has been propagated
if not len(account_list):
logger.error('account_list is empty')
raise exception.UnexpectedEmpty(class_='Account', attribute='account')
return account_list
def get_account_by_nick(ctx: commands.Context, nickname: str):
"""Returns an account from a nickname
**Arguments**
- `<ctx>` Context used to get information
- `<nickname>` Nickname of users account to get
"""
logger.debug(f'get_account_by_nick: {nickname}')
# Get player_id from nickname, and validate
player_id = database_operation.get_player_id_by_username(database_setup.DbHandler.db_cnc, nickname)
logger.debug(f'player_id: {player_id}')
if not player_id:
logger.debug(f'Unable to get player_id for nickname')
raise exception.InvalidNickname
return get_account_from_db(ctx, database_setup.DbHandler.db_cnc, player_id)
| 36.675472 | 118 | 0.672806 | 1,258 | 9,719 | 5.034976 | 0.128776 | 0.04689 | 0.037891 | 0.034102 | 0.452479 | 0.390748 | 0.296495 | 0.247711 | 0.1964 | 0.161509 | 0 | 0.00485 | 0.236341 | 9,719 | 264 | 119 | 36.814394 | 0.848558 | 0.200329 | 0 | 0.246988 | 0 | 0 | 0.146613 | 0.009678 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090361 | false | 0 | 0.066265 | 0.018072 | 0.283133 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
334004553b3344745ba7f09dcfd1122dd1189b4c | 2,416 | py | Python | chiamon/src/interfaces/discordbot.py | danielringch/chiatools | 2825f71acc68d613de3c8b3b2f784ccd75610b71 | [
"MIT"
] | null | null | null | chiamon/src/interfaces/discordbot.py | danielringch/chiatools | 2825f71acc68d613de3c8b3b2f784ccd75610b71 | [
"MIT"
] | null | null | null | chiamon/src/interfaces/discordbot.py | danielringch/chiatools | 2825f71acc68d613de3c8b3b2f784ccd75610b71 | [
"MIT"
] | null | null | null | import os, discord, asyncio
from ..core.interface import Interface
from ..core import Config
class Discordbot(Interface):
def __init__(self, config, _):
super(Discordbot, self).__init__()
config_data = Config(config)
self.__client = discord.Client()
self.__channels = {}
for channel, name in self.channel_names.items():
if name in config_data.data:
id, id_given = config_data.get_value_or_default(None, name, 'id')
if not id_given:
print(f'[logfile] WARNING: Channel {name} ignored, since no id is given.')
continue
self.__channels[channel] = Discordbot.Channel(
self.__client,
id,
config_data.get_value_or_default(None, name, 'whitelist')[0],
config_data.get_value_or_default(None, name, 'blacklist')[0])
with open(os.path.join(os.path.dirname(config), config_data.data['token']), "r") as stream:
self.__token = stream.readline()
async def start(self):
asyncio.ensure_future(self.__client.start(self.__token))
await self.__client.wait_for('ready')
for channel in self.__channels.values():
channel.activate()
channels = ','.join(self.channel_names[x] for x in self.__channels.keys())
print(f'[discordbot] Discord bot {self.__client.user} ready, available channels: {channels}')
async def send_message(self, channel, prefix, message):
if channel not in self.__channels:
return
await self.__channels[channel].send(prefix, message)
class Channel:
def __init__(self, client, id, whitelist, blacklist):
self.__client = client
self.__id = id
self.__channel = None
self.__whitelist = set(whitelist) if whitelist is not None else None
self.__blacklist = set(blacklist) if blacklist is not None else None
def activate(self):
self.__channel = self.__client.get_channel(self.__id)
async def send(self, prefix, message):
if self.__whitelist is not None and prefix not in self.__whitelist:
return
if self.__blacklist is not None and prefix in self.__blacklist:
return
await self.__channel.send(f'{prefix} {message}')
| 39.606557 | 101 | 0.611341 | 286 | 2,416 | 4.856643 | 0.262238 | 0.057595 | 0.025918 | 0.038877 | 0.12599 | 0.075594 | 0.075594 | 0.075594 | 0 | 0 | 0 | 0.001167 | 0.290563 | 2,416 | 60 | 102 | 40.266667 | 0.809218 | 0 | 0 | 0.0625 | 0 | 0 | 0.081641 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.0625 | 0 | 0.229167 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
334044fc475b0ead4025b7744a488cbebbb81672 | 4,940 | py | Python | tests/unit/streamalert/rule_promotion/test_statistic.py | cninja1/streamalert | bfde778bc216bff1dfd7372164fd20cb78012dee | [
"Apache-2.0"
] | 2,770 | 2017-01-31T06:13:08.000Z | 2022-03-30T14:40:09.000Z | tests/unit/streamalert/rule_promotion/test_statistic.py | cninja1/streamalert | bfde778bc216bff1dfd7372164fd20cb78012dee | [
"Apache-2.0"
] | 1,184 | 2017-02-01T04:31:00.000Z | 2022-03-21T17:36:38.000Z | tests/unit/streamalert/rule_promotion/test_statistic.py | cninja1/streamalert | bfde778bc216bff1dfd7372164fd20cb78012dee | [
"Apache-2.0"
] | 401 | 2017-01-31T17:37:35.000Z | 2022-03-22T06:11:40.000Z | """
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from datetime import datetime, timedelta
import os
from mock import patch
from nose.tools import assert_equal
from streamalert.rule_promotion.statistic import StagingStatistic
class TestStagingStatistic:
"""Tests for rule_promotion/statistic.py:StagingStatistic"""
# pylint: disable=protected-access
@patch.dict(os.environ, {'AWS_DEFAULT_REGION': 'us-east-1'})
def setup(self):
"""StagingStatistic - Setup"""
# pylint: disable=attribute-defined-outside-init
stage_time = datetime(year=2000, month=1, day=1, hour=1, minute=1, second=1)
self.statistic = StagingStatistic(
staged_at=stage_time,
staged_until=stage_time + timedelta(days=2),
current_time=stage_time + timedelta(days=1),
rule='test_rule'
)
def test_construct_compound_count_query(self):
"""StagingStatistic - Construct Compound Count Query"""
query = StagingStatistic.construct_compound_count_query([self.statistic, self.statistic])
expected_query = ("SELECT rule_name, count(*) AS count "
"FROM alerts WHERE "
"(dt >= '2000-01-01-01' AND rule_name = 'test_rule') OR "
"(dt >= '2000-01-01-01' AND rule_name = 'test_rule') "
"GROUP BY rule_name")
assert_equal(query, expected_query)
def test_sql_where_fragment(self):
"""StagingStatistic - SQL Count Where Fragment"""
expected_sql = ("(dt >= '2000-01-01-01' AND rule_name = 'test_rule')")
assert_equal(self.statistic.sql_where_fragment, expected_sql)
def test_sql_info_statement(self):
"""StagingStatistic - SQL Info Statement"""
expected_sql = ("SELECT id, rule_name, created, cluster, log_source, source_entity, "
"record FROM alerts WHERE dt >= '2000-01-01-01' AND "
"rule_name = 'test_rule' ORDER BY created DESC")
assert_equal(self.statistic.sql_info_statement, expected_sql)
def test_stringer_past(self):
"""StagingStatistic - Stringer, Past Staging"""
self.statistic.alert_count = 200
self.statistic._current_time += timedelta(days=2, hours=10)
expected_string = '''\u25E6 test_rule
- Staged At: 2000-01-01 01:01:01 UTC
- Staged Until: 2000-01-03 01:01:01 UTC
- Time Past Staging: 1d 10h 0m
- Alert Count: 200
- Alert Info: n/a'''
assert_equal(str(self.statistic), expected_string)
def test_stringer_remaining(self):
"""StagingStatistic - Stringer, Staging Remaining"""
self.statistic.alert_count = 100
self.statistic.execution_id = '678cc350-d4e1-4296-86d5-9351b7f92ed4'
expected_string = '''\u25E6 test_rule
- Staged At: 2000-01-01 01:01:01 UTC
- Staged Until: 2000-01-03 01:01:01 UTC
- Remaining Stage Time: 1d 0h 0m
- Alert Count: 100
- Alert Info: https://console.aws.amazon.com/athena/\
home#query/history/678cc350-d4e1-4296-86d5-9351b7f92ed4'''
assert_equal(str(self.statistic), expected_string)
def test_comp(self):
"""StagingStatistic - Comparison"""
self.statistic.alert_count = 200
second_stat = StagingStatistic(
staged_at='fake_staged_at_time',
staged_until='fake_staged_until_time',
current_time='fake_current_time',
rule='test_rule'
)
second_stat.alert_count = 100
assert_equal(self.statistic > second_stat, True)
def test_comp_no_alert_count(self):
"""StagingStatistic - Comparison when alert_count is default value"""
# self.statistic.alert_count = 200
second_stat = StagingStatistic(
staged_at='fake_staged_at_time',
staged_until='fake_staged_until_time',
current_time='fake_current_time',
rule='test_rule'
)
second_stat.alert_count = 100
assert_equal(self.statistic > second_stat, False)
self.statistic._current_time += timedelta(days=2, hours=10)
expected_string = '''\u25E6 test_rule
- Staged At: 2000-01-01 01:01:01 UTC
- Staged Until: 2000-01-03 01:01:01 UTC
- Time Past Staging: 1d 10h 0m
- Alert Count: unknown
- Alert Info: n/a'''
assert_equal(str(self.statistic), expected_string)
| 39.206349 | 97 | 0.660729 | 629 | 4,940 | 5.00318 | 0.292528 | 0.033047 | 0.030505 | 0.022243 | 0.452812 | 0.352399 | 0.352399 | 0.352399 | 0.352399 | 0.334922 | 0 | 0.063796 | 0.238462 | 4,940 | 125 | 98 | 39.52 | 0.772727 | 0.216194 | 0 | 0.417722 | 0 | 0 | 0.326958 | 0.035368 | 0 | 0 | 0 | 0 | 0.113924 | 1 | 0.101266 | false | 0 | 0.063291 | 0 | 0.177215 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3341348bd425676c57efeb58262b70c1ac026d14 | 1,519 | py | Python | src/accounts/api.py | officechristo/question-answer | 26dd610524477828a083d97a4034dc674b022680 | [
"MIT"
] | null | null | null | src/accounts/api.py | officechristo/question-answer | 26dd610524477828a083d97a4034dc674b022680 | [
"MIT"
] | null | null | null | src/accounts/api.py | officechristo/question-answer | 26dd610524477828a083d97a4034dc674b022680 | [
"MIT"
] | null | null | null | from .serializers import LoginSerializer, RegisterSerializer, UserSerializer
from rest_framework import generics, permissions
from rest_framework.response import Response
from rest_framework_simplejwt.tokens import RefreshToken
class LoginAPI(generics.GenericAPIView):
serializer_class = LoginSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data
refresh = RefreshToken.for_user(user)
res = {
"refresh": str(refresh),
"access": str(refresh.access_token),
}
return Response({
'user': UserSerializer(user).data,
'res': res
})
class RegisterAPI(generics.GenericAPIView):
serializer_class = RegisterSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
refresh = RefreshToken.for_user(user)
res = {
"refresh": str(refresh),
"access": str(refresh.access_token),
}
return Response({
'user': UserSerializer(user).data,
'res': res
})
class UserAPI(generics.RetrieveAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = UserSerializer
def get_object(self):
self.request.user
| 29.784314 | 76 | 0.654378 | 146 | 1,519 | 6.671233 | 0.328767 | 0.041068 | 0.065708 | 0.075975 | 0.507187 | 0.507187 | 0.507187 | 0.507187 | 0.507187 | 0.507187 | 0 | 0 | 0.25214 | 1,519 | 50 | 77 | 30.38 | 0.857394 | 0 | 0 | 0.536585 | 0 | 0 | 0.026333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.073171 | false | 0 | 0.097561 | 0 | 0.390244 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3341652ef7661b655ebff72f3a99fdacef54deb7 | 8,900 | py | Python | movie_rec.py | jcg1183/movie_recommender | 4d3df68979ce5ab5f62888ffa2b972bdbba54437 | [
"MIT"
] | null | null | null | movie_rec.py | jcg1183/movie_recommender | 4d3df68979ce5ab5f62888ffa2b972bdbba54437 | [
"MIT"
] | null | null | null | movie_rec.py | jcg1183/movie_recommender | 4d3df68979ce5ab5f62888ffa2b972bdbba54437 | [
"MIT"
] | null | null | null | # Movie Recommender Semester Project for
# Machine Learning at UTSA Computer Science
# Josh Greene
# add your names here
# The blog post below was used as a jumping off point for the project
# https://towardsdatascience.com/the-4-recommendation-engines-that-can-predict-your-movie-tastes-109dc4e10c52
# Code to slide the predictions generated by model.fit was taken from the blog.
# Formatted Movie Lens data was taken from the blog.
import math
import numpy as np
import pandas as pd
import tensorflow as tf
from keras.callbacks import Callback, EarlyStopping, ModelCheckpoint
from keras.layers import (
Embedding,
Reshape,
multiply,
average,
dot,
concatenate,
merge,
Input,
Dense,
)
from keras.models import Model, Sequential
# display all columns of a dataframe
pd.set_option('display.max_columns', None)
pd.set_option('expand_frame_repr', False)
# Set up global variables
data_path = "./data/"
# dataset = "movielens_100k"
dataset = "movielens_1m"
full_dataset_path = data_path + dataset + "/"
# place holder variables for Movie Lens data
ratings = 0
movies = 0
users = 0
TEST_USER = 2000 # A random test user (user_id = 2000)
max_userid = 0
max_movieid = 0
# hyperparameters to test for the project
embedding_sizes = [50, 100] # The number of dimensional embeddings for movies and users
merge_types = [
"concatenate",
"multiply",
"average",
"dot",
]
layer_sizes = [[64, 32, 1], [32, 16, 1]]
epochs = 30
# load and prepare the dataset files
def load_data(dataset_path):
global max_userid
global max_movieid
global ratings
global users
global movies
# load the raw ratings data
ratings = pd.read_csv(
full_dataset_path + "ratings.csv",
sep="\t",
encoding="latin-1",
usecols=["user_id", "movie_id", "user_emb_id", "movie_emb_id", "rating"],
)
# get the number of users
max_userid = ratings["user_id"].drop_duplicates().max()
# get the number of movies
max_movieid = ratings["movie_id"].drop_duplicates().max()
# create training set
shuffled_ratings = ratings.sample(frac=1.0, random_state=1)
# load the user data
users = pd.read_csv(
full_dataset_path + "users.csv",
sep="\t",
encoding="latin-1",
usecols=["user_id", "gender", "zipcode", "age_desc", "occ_desc"],
)
# load the movie data
movies = pd.read_csv(
full_dataset_path + "movies.csv",
sep="\t",
encoding="latin-1",
usecols=["movie_id", "title", "genres"],
)
print("\nLoading data for Movie Lens: Users, Movies, Ratings\n")
# Shuffling users
Users = shuffled_ratings["user_emb_id"].values
print("Users:", Users, ", shape =", Users.shape)
# Shuffling movies
Movies = shuffled_ratings["movie_emb_id"].values
print("Movies:", Movies, ", shape =", Movies.shape)
# Shuffling ratings
Ratings = shuffled_ratings["rating"].values
print("Ratings:", Ratings, ", shape =", Ratings.shape)
print()
return [Users, Movies], Ratings
# The experiment loops through each of the hyperparameter settings
# and trains on each configuration. The same output is generated
# for each set of hyperparameters, allowing us to compare results.
def run_experiment(X, y):
global merge_types
global embedding_sizes
global layer_sizes
global epochs
global callbacks
# test out all the merge layer types
for merge_type in merge_types:
# test out all the embedding sizes
for embedding_size in embedding_sizes:
# test out all the dense layer sizes
for layers in layer_sizes:
# build model name for saved file
model_name = merge_type + "_" + str(embedding_size)
for num in layers:
model_name = model_name + "_" + str(num)
model_name = model_name + ".h5"
# early stopping allows the model to quit if progress is not being made
# in training.
# model checkpoint saves the weights of the model for evaluation later.
callbacks = [
EarlyStopping("val_loss", patience=2),
ModelCheckpoint(model_name, save_best_only=True),
]
model = build_model(merge_type, layers, embedding_size)
print("\nModel Summary\n")
print("Model Parameters:\n\tmerge layer: {0}\n\tembedding size: {1}\n\tdense layers: {2}".format(merge_type, embedding_size, layers))
print("\nSave name: {0}".format(model_name))
print(model.summary())
model.compile(loss="mse", optimizer="adamax")
# compile the model and train on it
history = model.fit(
X,
y,
epochs=epochs,
validation_split=0.1,
verbose=2,
callbacks=callbacks,
)
# Show the best validation RMSE
min_val_loss, idx = min(
(val, idx) for (idx, val) in enumerate(history.history["val_loss"])
)
print(
"Minimum RMSE at epoch",
"{:d}".format(idx + 1),
"=",
"{:.4f}".format(math.sqrt(min_val_loss)),
)
print("\nModel Evaluation\n")
trained_model = build_model(merge_type, layers, embedding_size)
trained_model.load_weights(model_name)
get_user_top20(trained_model)
get_user_prediction(trained_model)
# build model takes the merge type (concat, dot, etc), the number
# and size of each dense layer, and the embedding size. It builds
# and returns a model according to these specifications.
def build_model(merge_type, dense_layers, embedding_size):
global max_userid
global max_movieid
print("build_model: {0}, dense layers: {1}".format(merge_type, dense_layers))
# build embedding layer for users
first_input = Input(shape=(1,))
first_embed = Embedding(max_userid, embedding_size, input_length=1)(first_input)
# build embedding layer for movies
second_input = Input(shape=(1,))
second_embed = Embedding(max_movieid, embedding_size, input_length=1)(second_input)
# dynamic selection of merge layer type
if merge_type == "concatenate":
x = concatenate([first_embed, second_embed])
elif merge_type == "multiply":
x = multiply([first_embed, second_embed])
elif merge_type == "average":
x = average([first_embed, second_embed])
elif merge_type == "dot":
x = dot([first_embed, second_embed], axes=1)
# add a dense layer of specified size
for layer_size in dense_layers:
x = Dense(layer_size)(x)
# build the model
model = Model(inputs=[first_input, second_input], outputs=x)
return model
# predict rating uses a trained moved, movie, and user to make a prediction
def predict_rating(trained_model, user_id, movie_id):
return trained_model.predict([np.array([user_id - 1]), np.array([movie_id - 1])])[
0
][0]
# prints the user rating and our prediction for top 20 movies
def get_user_top20(trained_model):
global ratings
user_ratings = ratings[ratings["user_id"] == TEST_USER][
["user_id", "movie_id", "rating"]
]
user_ratings["prediction"] = user_ratings.apply(
lambda x: predict_rating(trained_model, TEST_USER, x["movie_id"]), axis=1
)
print("\nDisplay Predicted User Ratings and Actual Ratings\n")
print(
user_ratings.sort_values(by="rating", ascending=False)
.merge(movies, on="movie_id", how="inner", suffixes=["_u", "_m"])
.head(20)
)
# Prints the predicted user ratings for top 20 movies
def get_user_prediction(trained_model):
user_ratings = ratings[ratings["user_id"] == TEST_USER][
["user_id", "movie_id", "rating"]
]
user_ratings["prediction"] = user_ratings.apply(
lambda x: predict_rating(trained_model, TEST_USER, x["movie_id"]), axis=1
)
recommendations = ratings[
ratings["movie_id"].isin(user_ratings["movie_id"]) == False
][["movie_id"]].drop_duplicates()
recommendations["prediction"] = recommendations.apply(
lambda x: predict_rating(trained_model, TEST_USER, x["movie_id"]), axis=1
)
print("\nDisplay Predicted User Ratings for Recommendation\n")
print(
recommendations.sort_values(by="prediction", ascending=False)
.merge(movies, on="movie_id", how="inner", suffixes=["_u", "_m"])
.head(20)
)
# load the dataset
X, y = load_data(full_dataset_path)
# run the experiment
run_experiment(X, y)
| 30.795848 | 149 | 0.633258 | 1,125 | 8,900 | 4.833778 | 0.258667 | 0.019309 | 0.013792 | 0.009562 | 0.217359 | 0.184259 | 0.159618 | 0.126885 | 0.11107 | 0.098566 | 0 | 0.013116 | 0.263258 | 8,900 | 288 | 150 | 30.902778 | 0.816227 | 0.225506 | 0 | 0.158192 | 0 | 0.00565 | 0.137094 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033898 | false | 0 | 0.039548 | 0.00565 | 0.090395 | 0.090395 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33465ce6d11daa3721aaaca774204377dae1f9da | 5,718 | py | Python | tests/crypt/test_es256.py | kpister/google-auth-library-python | e79b8310bfd961557bf2496e15c9148dac79b213 | [
"Apache-2.0"
] | null | null | null | tests/crypt/test_es256.py | kpister/google-auth-library-python | e79b8310bfd961557bf2496e15c9148dac79b213 | [
"Apache-2.0"
] | null | null | null | tests/crypt/test_es256.py | kpister/google-auth-library-python | e79b8310bfd961557bf2496e15c9148dac79b213 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import os
from cryptography.hazmat.primitives.asymmetric import ec
import pytest # type: ignore
from google.auth import _helpers
from google.auth.crypt import base
from google.auth.crypt import es256
DATA_DIR = os.path.join(os.path.dirname(__file__), "..", "data")
# To generate es256_privatekey.pem, es256_privatekey.pub, and
# es256_public_cert.pem:
# $ openssl ecparam -genkey -name prime256v1 -noout -out es256_privatekey.pem
# $ openssl ec -in es256-private-key.pem -pubout -out es256-publickey.pem
# $ openssl req -new -x509 -key es256_privatekey.pem -out \
# > es256_public_cert.pem
with open(os.path.join(DATA_DIR, "es256_privatekey.pem"), "rb") as fh:
PRIVATE_KEY_BYTES = fh.read()
PKCS1_KEY_BYTES = PRIVATE_KEY_BYTES
with open(os.path.join(DATA_DIR, "es256_publickey.pem"), "rb") as fh:
PUBLIC_KEY_BYTES = fh.read()
with open(os.path.join(DATA_DIR, "es256_public_cert.pem"), "rb") as fh:
PUBLIC_CERT_BYTES = fh.read()
SERVICE_ACCOUNT_JSON_FILE = os.path.join(DATA_DIR, "es256_service_account.json")
with open(SERVICE_ACCOUNT_JSON_FILE, "rb") as fh:
SERVICE_ACCOUNT_INFO = json.load(fh)
class TestES256Verifier(object):
def test_verify_success(self):
to_sign = b"foo"
signer = es256.ES256Signer.from_string(PRIVATE_KEY_BYTES)
actual_signature = signer.sign(to_sign)
verifier = es256.ES256Verifier.from_string(PUBLIC_KEY_BYTES)
assert verifier.verify(to_sign, actual_signature)
def test_verify_unicode_success(self):
to_sign = u"foo"
signer = es256.ES256Signer.from_string(PRIVATE_KEY_BYTES)
actual_signature = signer.sign(to_sign)
verifier = es256.ES256Verifier.from_string(PUBLIC_KEY_BYTES)
assert verifier.verify(to_sign, actual_signature)
def test_verify_failure(self):
verifier = es256.ES256Verifier.from_string(PUBLIC_KEY_BYTES)
bad_signature1 = b""
assert not verifier.verify(b"foo", bad_signature1)
bad_signature2 = b"a"
assert not verifier.verify(b"foo", bad_signature2)
def test_verify_failure_with_wrong_raw_signature(self):
to_sign = b"foo"
# This signature has a wrong "r" value in the "(r,s)" raw signature.
wrong_signature = base64.urlsafe_b64decode(
b"m7oaRxUDeYqjZ8qiMwo0PZLTMZWKJLFQREpqce1StMIa_yXQQ-C5WgeIRHW7OqlYSDL0XbUrj_uAw9i-QhfOJQ=="
)
verifier = es256.ES256Verifier.from_string(PUBLIC_KEY_BYTES)
assert not verifier.verify(to_sign, wrong_signature)
def test_from_string_pub_key(self):
verifier = es256.ES256Verifier.from_string(PUBLIC_KEY_BYTES)
assert isinstance(verifier, es256.ES256Verifier)
assert isinstance(verifier._pubkey, ec.EllipticCurvePublicKey)
def test_from_string_pub_key_unicode(self):
public_key = _helpers.from_bytes(PUBLIC_KEY_BYTES)
verifier = es256.ES256Verifier.from_string(public_key)
assert isinstance(verifier, es256.ES256Verifier)
assert isinstance(verifier._pubkey, ec.EllipticCurvePublicKey)
def test_from_string_pub_cert(self):
verifier = es256.ES256Verifier.from_string(PUBLIC_CERT_BYTES)
assert isinstance(verifier, es256.ES256Verifier)
assert isinstance(verifier._pubkey, ec.EllipticCurvePublicKey)
def test_from_string_pub_cert_unicode(self):
public_cert = _helpers.from_bytes(PUBLIC_CERT_BYTES)
verifier = es256.ES256Verifier.from_string(public_cert)
assert isinstance(verifier, es256.ES256Verifier)
assert isinstance(verifier._pubkey, ec.EllipticCurvePublicKey)
class TestES256Signer(object):
def test_from_string_pkcs1(self):
signer = es256.ES256Signer.from_string(PKCS1_KEY_BYTES)
assert isinstance(signer, es256.ES256Signer)
assert isinstance(signer._key, ec.EllipticCurvePrivateKey)
def test_from_string_pkcs1_unicode(self):
key_bytes = _helpers.from_bytes(PKCS1_KEY_BYTES)
signer = es256.ES256Signer.from_string(key_bytes)
assert isinstance(signer, es256.ES256Signer)
assert isinstance(signer._key, ec.EllipticCurvePrivateKey)
def test_from_string_bogus_key(self):
key_bytes = "bogus-key"
with pytest.raises(ValueError):
es256.ES256Signer.from_string(key_bytes)
def test_from_service_account_info(self):
signer = es256.ES256Signer.from_service_account_info(SERVICE_ACCOUNT_INFO)
assert signer.key_id == SERVICE_ACCOUNT_INFO[base._JSON_FILE_PRIVATE_KEY_ID]
assert isinstance(signer._key, ec.EllipticCurvePrivateKey)
def test_from_service_account_info_missing_key(self):
with pytest.raises(ValueError) as excinfo:
es256.ES256Signer.from_service_account_info({})
assert excinfo.match(base._JSON_FILE_PRIVATE_KEY)
def test_from_service_account_file(self):
signer = es256.ES256Signer.from_service_account_file(SERVICE_ACCOUNT_JSON_FILE)
assert signer.key_id == SERVICE_ACCOUNT_INFO[base._JSON_FILE_PRIVATE_KEY_ID]
assert isinstance(signer._key, ec.EllipticCurvePrivateKey)
| 39.708333 | 103 | 0.743792 | 744 | 5,718 | 5.431452 | 0.223118 | 0.049493 | 0.077209 | 0.059391 | 0.557535 | 0.502846 | 0.454096 | 0.380104 | 0.357832 | 0.315763 | 0 | 0.045493 | 0.173487 | 5,718 | 143 | 104 | 39.986014 | 0.809564 | 0.166842 | 0 | 0.325843 | 0 | 0 | 0.044918 | 0.028469 | 0 | 0 | 0 | 0 | 0.247191 | 1 | 0.157303 | false | 0 | 0.089888 | 0 | 0.269663 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
334af2ec5993eb3fae226746c0274e5a97593b1d | 5,010 | py | Python | measuerments.py | b4ldr/atlas-kibana | d736119a9b75d0582901c2c06c8f9bb78464f9cf | [
"Apache-2.0"
] | 1 | 2020-11-03T18:30:30.000Z | 2020-11-03T18:30:30.000Z | measuerments.py | b4ldr/atlas-kibana | d736119a9b75d0582901c2c06c8f9bb78464f9cf | [
"Apache-2.0"
] | 1 | 2015-05-12T14:16:41.000Z | 2015-05-12T14:16:41.000Z | measuerments.py | b4ldr/atlas-kibana | d736119a9b75d0582901c2c06c8f9bb78464f9cf | [
"Apache-2.0"
] | null | null | null | import logging
import datetime
import libwhois
from ripe.atlas.sagan import Result, ResultParseError
class Measurment(object):
'''Parent object for atlas measurment'''
parsed_error = None
parsed = None
def __init__(self, payload, probe):
'''Initiate generic measurment data'''
self.logger = logging.getLogger('atlas-kibana.Measurment')
self.probe = probe
self.payload = payload
try:
self.parsed = Result(payload).get(payload, on_error=Result.ACTION_IGNORE)
except ResultParseError as e:
self.parsed_error = e
self.logger.debug('Error parsing msm:\n{}'.format(e))
@staticmethod
def _clean_dict(dict_in):
'''clean dict usless stuuf'''
remove = ['fw', 'lts', 'msm_name', 'is_error', 'error_message', 'is_malformed', 'icmp_header',
'_on_error', '_on_malformation', 'klass', 'raw_data', 'stat_api', 'logger']
for word in remove:
try:
del dict_in[word]
except KeyError:
pass
return dict_in
def _clean_array(self, list_in):
'''try to force a dict from a list of objects'''
return [ self._clean_dict(value.__dict__) for value in list_in ]
def _get_source(self):
source = self._clean_dict(self.payload)
source['_index'] = 'atlas-{}'.format(self.payload['type'])
source['_type'] = 'atlas-document'
source['probe'] = self._clean_dict(self.probe.__dict__)
#remove the result we will replace this with something nicer
if 'result' in source:
del source['result']
source['timestamp'] = datetime.datetime.utcfromtimestamp(source['timestamp']).isoformat()
return source
def get_actions(self):
self.logger.warning('no defined parser for {} so just throwing what we get from sagan'.format(self.payload['type']))
return [self._get_source()]
class MeasurmentDNS(Measurment):
def __init__(self, payload, probe):
super(MeasurmentDNS, self).__init__(payload, probe)
self.logger = logging.getLogger('atlas-kibana.MeasurmentDNS')
def get_actions(self):
source = self._get_source()
actions = []
for response in self.parsed.responses:
if response.abuf.header:
source['header'] = self._clean_dict(response.abuf.header.__dict__)
if response.abuf.edns0:
source['edns0'] = self._clean_dict(response.abuf.edns0.__dict__)
source['edns0']['options'] = self._clean_array(source['edns0']['options'])
if response.abuf.questions:
source['questions'] = self._clean_array(response.abuf.questions)
if response.abuf.answers:
source['answers'] = self._clean_array(response.abuf.answers)
if response.abuf.authorities:
source['authorities'] = self._clean_array(response.abuf.authorities)
if response.abuf.additionals:
source['additionals'] = self._clean_array(response.abuf.additionals)
self.logger.debug('Yeild measuerment {}'.format(source))
actions.append(source)
return actions
class MeasurmentTraceroute(Measurment):
def __init__(self, payload, probe):
super(MeasurmentTraceroute, self).__init__(payload, probe)
self.logger = logging.getLogger('atlas-kibana.MeasurmentTraceroute')
self.asn_whois = libwhois.ASNWhois()
def get_actions(self):
source = self._get_source()
source['hops'] = self._clean_array(self.parsed.hops)
source['destination_ip_responded'] = self.parsed.destination_ip_responded
source['last_hop_responded'] = self.parsed.last_hop_responded
source['last_rtt'] = self.parsed.last_rtt
source['total_hops'] = self.parsed.total_hops
seen_as = set()
self.asn_whois.query = []
for hop in source['hops']:
hop['packets'] = self._clean_array(hop['packets'])
hop['first_origin'] = hop['packets'][0].get('origin', None)
if hop['first_origin']:
self.asn_whois.query.append(hop['first_origin'])
#loop twice to so we only make one call to shadow servers
for hop in source['hops']:
try:
hop['asn'] = self.asn_whois.result[hop['first_origin']].asn
seen_as.add(hop['asn'])
except KeyError:
self.logger.debug('unable to get first_origin for {} {}'.format(self.parsed, hop['first_origin']))
except libwhois.QueryError:
self.logger.warning('unable to get ASN for {} {}'.format(self.parsed, hop['first_origin']))
source['total_as_hops'] = len(seen_as)
return [source]
| 42.820513 | 124 | 0.600399 | 556 | 5,010 | 5.185252 | 0.260791 | 0.037461 | 0.033992 | 0.030524 | 0.200486 | 0.126604 | 0.11377 | 0.064516 | 0.039542 | 0.039542 | 0 | 0.001669 | 0.282236 | 5,010 | 116 | 125 | 43.189655 | 0.800056 | 0.0499 | 0 | 0.16129 | 0 | 0 | 0.147593 | 0.022382 | 0 | 0 | 0 | 0 | 0 | 1 | 0.096774 | false | 0.010753 | 0.043011 | 0 | 0.258065 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
334bdf8ecb9cdc2f9f5b2afdf945a754213b2091 | 2,555 | py | Python | sdk/cognitiveservices/azure-cognitiveservices-knowledge-qnamaker/azure/cognitiveservices/knowledge/qnamaker/authoring/models/operation.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/cognitiveservices/azure-cognitiveservices-knowledge-qnamaker/azure/cognitiveservices/knowledge/qnamaker/authoring/models/operation.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 226 | 2019-07-24T07:57:21.000Z | 2019-10-15T01:07:24.000Z | sdk/cognitiveservices/azure-cognitiveservices-knowledge-qnamaker/azure/cognitiveservices/knowledge/qnamaker/authoring/models/operation.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Operation(Model):
"""Record to track long running operation.
:param operation_state: Operation state. Possible values include:
'Failed', 'NotStarted', 'Running', 'Succeeded'
:type operation_state: str or
~azure.cognitiveservices.knowledge.qnamaker.authoring.models.OperationStateType
:param created_timestamp: Timestamp when the operation was created.
:type created_timestamp: str
:param last_action_timestamp: Timestamp when the current state was
entered.
:type last_action_timestamp: str
:param resource_location: Relative URI to the target resource location for
completed resources.
:type resource_location: str
:param user_id: User Id
:type user_id: str
:param operation_id: Operation Id.
:type operation_id: str
:param error_response: Error details in case of failures.
:type error_response:
~azure.cognitiveservices.knowledge.qnamaker.authoring.models.ErrorResponse
"""
_attribute_map = {
'operation_state': {'key': 'operationState', 'type': 'str'},
'created_timestamp': {'key': 'createdTimestamp', 'type': 'str'},
'last_action_timestamp': {'key': 'lastActionTimestamp', 'type': 'str'},
'resource_location': {'key': 'resourceLocation', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'operation_id': {'key': 'operationId', 'type': 'str'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
}
def __init__(self, **kwargs):
super(Operation, self).__init__(**kwargs)
self.operation_state = kwargs.get('operation_state', None)
self.created_timestamp = kwargs.get('created_timestamp', None)
self.last_action_timestamp = kwargs.get('last_action_timestamp', None)
self.resource_location = kwargs.get('resource_location', None)
self.user_id = kwargs.get('user_id', None)
self.operation_id = kwargs.get('operation_id', None)
self.error_response = kwargs.get('error_response', None)
| 44.051724 | 84 | 0.655186 | 280 | 2,555 | 5.8 | 0.392857 | 0.038793 | 0.058498 | 0.04803 | 0.066502 | 0.066502 | 0 | 0 | 0 | 0 | 0 | 0.000474 | 0.174951 | 2,555 | 57 | 85 | 44.824561 | 0.769924 | 0.51272 | 0 | 0 | 0 | 0 | 0.331593 | 0.036554 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.05 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
334c2572e74f7773bb5f78485020339e7d798d30 | 8,635 | py | Python | homeassistant/components/heos/__init__.py | jasl8r/home-assistant | 7331eb1f7177e86cad2bbcd146fa6cc4b987494c | [
"Apache-2.0"
] | 1 | 2019-05-11T09:52:57.000Z | 2019-05-11T09:52:57.000Z | homeassistant/components/heos/__init__.py | jasl8r/home-assistant | 7331eb1f7177e86cad2bbcd146fa6cc4b987494c | [
"Apache-2.0"
] | 2 | 2022-01-13T04:00:03.000Z | 2022-03-12T01:02:40.000Z | homeassistant/components/heos/__init__.py | jasl8r/home-assistant | 7331eb1f7177e86cad2bbcd146fa6cc4b987494c | [
"Apache-2.0"
] | 1 | 2021-06-16T09:57:31.000Z | 2021-06-16T09:57:31.000Z | """Denon HEOS Media Player."""
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.media_player.const import (
DOMAIN as MEDIA_PLAYER_DOMAIN)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.util import Throttle
from .config_flow import format_title
from .const import (
COMMAND_RETRY_ATTEMPTS, COMMAND_RETRY_DELAY, DATA_CONTROLLER,
DATA_SOURCE_MANAGER, DOMAIN, SIGNAL_HEOS_SOURCES_UPDATED)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_HOST): cv.string
})
}, extra=vol.ALLOW_EXTRA)
MIN_UPDATE_SOURCES = timedelta(seconds=1)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the HEOS component."""
if DOMAIN not in config:
return True
host = config[DOMAIN][CONF_HOST]
entries = hass.config_entries.async_entries(DOMAIN)
if not entries:
# Create new entry based on config
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={'source': 'import'},
data={CONF_HOST: host}))
else:
# Check if host needs to be updated
entry = entries[0]
if entry.data[CONF_HOST] != host:
entry.data[CONF_HOST] = host
entry.title = format_title(host)
hass.config_entries.async_update_entry(entry)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Initialize config entry which represents the HEOS controller."""
from pyheos import Heos, CommandError
host = entry.data[CONF_HOST]
# Setting all_progress_events=False ensures that we only receive a
# media position update upon start of playback or when media changes
controller = Heos(host, all_progress_events=False)
try:
await controller.connect(auto_reconnect=True)
# Auto reconnect only operates if initial connection was successful.
except (asyncio.TimeoutError, ConnectionError, CommandError) as error:
await controller.disconnect()
_LOGGER.debug("Unable to connect to controller %s: %s", host, error)
raise ConfigEntryNotReady
# Disconnect when shutting down
async def disconnect_controller(event):
await controller.disconnect()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, disconnect_controller)
# Get players and sources
try:
players = await controller.get_players()
favorites = {}
if controller.is_signed_in:
favorites = await controller.get_favorites()
else:
_LOGGER.warning("%s is not logged in to your HEOS account and will"
" be unable to retrieve your favorites", host)
inputs = await controller.get_input_sources()
except (asyncio.TimeoutError, ConnectionError, CommandError) as error:
await controller.disconnect()
_LOGGER.debug("Unable to retrieve players and sources: %s", error,
exc_info=isinstance(error, CommandError))
raise ConfigEntryNotReady
source_manager = SourceManager(favorites, inputs)
source_manager.connect_update(hass, controller)
hass.data[DOMAIN] = {
DATA_CONTROLLER: controller,
DATA_SOURCE_MANAGER: source_manager,
MEDIA_PLAYER_DOMAIN: players
}
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
entry, MEDIA_PLAYER_DOMAIN))
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload a config entry."""
controller = hass.data[DOMAIN][DATA_CONTROLLER]
controller.dispatcher.disconnect_all()
await controller.disconnect()
hass.data.pop(DOMAIN)
return await hass.config_entries.async_forward_entry_unload(
entry, MEDIA_PLAYER_DOMAIN)
class SourceManager:
"""Class that manages sources for players."""
def __init__(self, favorites, inputs, *,
retry_delay: int = COMMAND_RETRY_DELAY,
max_retry_attempts: int = COMMAND_RETRY_ATTEMPTS):
"""Init input manager."""
self.retry_delay = retry_delay
self.max_retry_attempts = max_retry_attempts
self.favorites = favorites
self.inputs = inputs
self.source_list = self._build_source_list()
def _build_source_list(self):
"""Build a single list of inputs from various types."""
source_list = []
source_list.extend([favorite.name for favorite
in self.favorites.values()])
source_list.extend([source.name for source in self.inputs])
return source_list
async def play_source(self, source: str, player):
"""Determine type of source and play it."""
index = next((index for index, favorite in self.favorites.items()
if favorite.name == source), None)
if index is not None:
await player.play_favorite(index)
return
input_source = next((input_source for input_source in self.inputs
if input_source.name == source), None)
if input_source is not None:
await player.play_input_source(input_source)
return
_LOGGER.error("Unknown source: %s", source)
def get_current_source(self, now_playing_media):
"""Determine current source from now playing media."""
from pyheos import const
# Match input by input_name:media_id
if now_playing_media.source_id == const.MUSIC_SOURCE_AUX_INPUT:
return next((input_source.name for input_source in self.inputs
if input_source.input_name ==
now_playing_media.media_id), None)
# Try matching favorite by name:station or media_id:album_id
return next((source.name for source in self.favorites.values()
if source.name == now_playing_media.station
or source.media_id == now_playing_media.album_id), None)
def connect_update(self, hass, controller):
"""
Connect listener for when sources change and signal player update.
EVENT_SOURCES_CHANGED is often raised multiple times in response to a
physical event therefore throttle it. Retrieving sources immediately
after the event may fail so retry.
"""
from pyheos import CommandError, const
@Throttle(MIN_UPDATE_SOURCES)
async def get_sources():
retry_attempts = 0
while True:
try:
favorites = {}
if controller.is_signed_in:
favorites = await controller.get_favorites()
inputs = await controller.get_input_sources()
return favorites, inputs
except (asyncio.TimeoutError, ConnectionError, CommandError) \
as error:
if retry_attempts < self.max_retry_attempts:
retry_attempts += 1
_LOGGER.debug("Error retrieving sources and will "
"retry: %s", error,
exc_info=isinstance(error, CommandError))
await asyncio.sleep(self.retry_delay)
else:
_LOGGER.error("Unable to update sources: %s", error,
exc_info=isinstance(error, CommandError))
return
async def update_sources(event, data):
if event in (const.EVENT_SOURCES_CHANGED,
const.EVENT_USER_CHANGED):
sources = await get_sources()
# If throttled, it will return None
if sources:
self.favorites, self.inputs = sources
self.source_list = self._build_source_list()
_LOGGER.debug("Sources updated due to changed event")
# Let players know to update
hass.helpers.dispatcher.async_dispatcher_send(
SIGNAL_HEOS_SOURCES_UPDATED)
controller.dispatcher.connect(
const.SIGNAL_CONTROLLER_EVENT, update_sources)
| 40.539906 | 79 | 0.645281 | 969 | 8,635 | 5.541796 | 0.22291 | 0.027933 | 0.01676 | 0.016387 | 0.235382 | 0.207263 | 0.157356 | 0.095345 | 0.07784 | 0.063315 | 0 | 0.000647 | 0.28454 | 8,635 | 212 | 80 | 40.731132 | 0.868566 | 0.104343 | 0 | 0.211538 | 0 | 0 | 0.040503 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025641 | false | 0 | 0.108974 | 0 | 0.211538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
334fa1a043ff14e1c3781fd4df3a7da2dbe35a74 | 16,626 | py | Python | losses.py | NoamRosenberg/anchorF | daf093f9851050dc46fd1c01721c91153cc4ff62 | [
"Apache-2.0"
] | 1 | 2021-03-05T18:44:36.000Z | 2021-03-05T18:44:36.000Z | losses.py | NoamRosenberg/anchorF | daf093f9851050dc46fd1c01721c91153cc4ff62 | [
"Apache-2.0"
] | null | null | null | losses.py | NoamRosenberg/anchorF | daf093f9851050dc46fd1c01721c91153cc4ff62 | [
"Apache-2.0"
] | 1 | 2021-06-18T18:19:13.000Z | 2021-06-18T18:19:13.000Z | import numpy as np
import torch
import torch.nn as nn
def calc_iou(a, b):
area = (b[:, 2] - b[:, 0]) * (b[:, 3] - b[:, 1])
iw = torch.min(torch.unsqueeze(a[:, 2], dim=1), b[:, 2]) - torch.max(torch.unsqueeze(a[:, 0], 1), b[:, 0])
ih = torch.min(torch.unsqueeze(a[:, 3], dim=1), b[:, 3]) - torch.max(torch.unsqueeze(a[:, 1], 1), b[:, 1])
iw = torch.clamp(iw, min=0)
ih = torch.clamp(ih, min=0)
ua = torch.unsqueeze((a[:, 2] - a[:, 0]) * (a[:, 3] - a[:, 1]), dim=1) + area - iw * ih
ua = torch.clamp(ua, min=1e-8)
intersection = iw * ih
IoU = intersection / ua
return IoU
class FocalLoss(nn.Module):
#def __init__(self):
def forward(self, classifications, regressions, annotations, image, x_grid_order, y_grid_order, pyramid_reset, args):
alpha = 0.25
gamma = 2.0
rest_norm = args.rest_norm
s_norm = args.s_norm
t_val = args.t_val
IOULoss = bool(args.IOU)
batch_size = classifications.shape[0]
classification_losses = []
regression_losses = []
losses = []
for j in range(batch_size):
pyramid = pyramid_reset
classification = classifications[j, :, :]
regression = regressions[j, :, :]
bbox_annotation = annotations[j, :, :]
bbox_annotation = bbox_annotation[bbox_annotation[:, 4] != -1]
if bbox_annotation.shape[0] == 0:
regression_losses.append(torch.tensor(0).float().cuda())
classification_losses.append(torch.tensor(0).float().cuda())
continue
classification = torch.clamp(classification, 1e-4, 1.0 - 1e-4)
##build projections
pyramid_levels = [3, 4, 5, 6, 7]
strides = [2 ** x for x in pyramid_levels]
image_shape = image.shape[2:]
image_shape = np.array(image_shape)
feature_shapes = [(image_shape + 2 ** x - 1) // (2 ** x) for x in pyramid_levels]
#compute projection boxes
projection_boxes_ls = []
effective_boxes_ls = []
ignoring_boxes_ls = []
single_box_projections = torch.ones(len(pyramid_levels),5)*-1
effective_box = torch.ones(len(pyramid_levels),5)*-1
ignoring_box = torch.ones(len(pyramid_levels),5)*-1
for single_annotation_box in bbox_annotation:
#TODO: make sure this equation is correct
single_box_projections[:,:4] = torch.stack([(single_annotation_box[:4] + 2 ** x - 1) // (2 ** x) for x in pyramid_levels])### NOT SURE IF THIS IS ACCURATE
#set classes for boxes
single_box_projections[:,4] = single_annotation_box[4]
effective_box[:,4] = single_annotation_box[4]
ignoring_box[:,4] = single_annotation_box[4]
# assert (single_box_projections == -1).sum() == 0, "single box projections haven't been filled with values"
# compute coordinates of effective and ignoring and the rest, regions
e_ef = 0.2
e_ig = 0.5
projections_height = single_box_projections[:,3] - single_box_projections[:,1]
projections_width = single_box_projections[:,2] - single_box_projections[:,0]
effective_box[:,3] = single_box_projections[:,3] - ((1.0 - e_ef)/2) * projections_height
effective_box[:,1] = single_box_projections[:,1] + ((1.0 - e_ef)/2) * projections_height
effective_box[:,2] = single_box_projections[:,2] - ((1.0 - e_ef)/2) * projections_width
effective_box[:,0] = single_box_projections[:,0] + ((1.0 - e_ef)/2) * projections_width
ignoring_box[:,3] = single_box_projections[:,3] - ((1.0 - e_ig)/2) * projections_height
ignoring_box[:,1] = single_box_projections[:,1] + ((1.0 - e_ig)/2) * projections_height
ignoring_box[:,2] = single_box_projections[:,2] - ((1.0 - e_ig)/2) * projections_width
ignoring_box[:,0] = single_box_projections[:,0] + ((1.0 - e_ig)/2) * projections_width
# assert (effective_box[:,3] < effective_box[:,1]).sum() == 0, "effective box not computed correctly y2 is smaller than y1"
# assert (effective_box[:,2] < effective_box[:,0]).sum() == 0, "effective box not computed correctly x2 is smaller than x1"
# assert (ignoring_box[:,3] < ignoring_box[:,1]).sum() == 0, "effective box not computed correctly y2 is smaller than y1"
# assert (ignoring_box[:,2] < ignoring_box[:,0]).sum() == 0, "effective box not computed correctly x2 is smaller than x1"
projection_boxes_ls.append(single_box_projections.tolist())
effective_boxes_ls.append(effective_box.tolist())
ignoring_boxes_ls.append(ignoring_box.tolist())
#Dimensions number_of_annotation_in_image X 5_features X 4_coordinates+1_class
projection_boxes = torch.Tensor(projection_boxes_ls)
effective_boxes = torch.Tensor(effective_boxes_ls)
ignoring_boxes = torch.Tensor(ignoring_boxes_ls)
#Fill target maps with zeros
#assert classification.shape == regression.shape, 'should be same shape'
targets = torch.zeros(classification.shape)
targets_d_left = torch.ones(regression.shape[0]) * -1
targets_d_right = torch.ones(regression.shape[0]) * -1
targets_d_down = torch.ones(regression.shape[0]) * -1
targets_d_up = torch.ones(regression.shape[0]) * -1
#map instances
instance = torch.ones(regression.shape[0]) * -1
targets = targets.cuda()
targets_d_left = targets_d_left.cuda()
targets_d_right = targets_d_right.cuda()
targets_d_down = targets_d_down.cuda()
targets_d_up = targets_d_up.cuda()
instance = instance.cuda()
new_feature_idx = np.cumsum([0] + [feature_shapes[pyramid_idx][0] * feature_shapes[pyramid_idx][1] for pyramid_idx in range(len(pyramid_levels))])
for pyramid_idx in range(len(pyramid_levels)):
#create indices for looping over features
next_feature_index = int(new_feature_idx[pyramid_idx + 1])
last_feature_idx = int(new_feature_idx[pyramid_idx])
# TODO: There is a redundancy in this loop take care when have time
feature_indices = torch.arange(last_feature_idx, next_feature_index).cuda().long()
#Fill up ignoring boxes with -1
for box in ignoring_boxes:
x_indices_inside_igbox = (x_grid_order[feature_indices] >= (box[pyramid_idx][0] + 1).cuda().long()) * (
x_grid_order[feature_indices] <= (box[pyramid_idx][2]).cuda().long())
y_indices_inside_igbox = (y_grid_order[feature_indices] >= (box[pyramid_idx][1] + 1).cuda().long()) * (
y_grid_order[feature_indices] <= (box[pyramid_idx][3]).cuda().long())
#only return indices where both x and y coordinates are inside ignoring box
pixel_indices_inside_igbox = x_indices_inside_igbox * y_indices_inside_igbox#THIS IS NOT AN INDEX!!!
#compute class
box_class = box[0, 4].long()
#from bool indices to regular indices
regular_pixel_indices_inside_igbox = pixel_indices_inside_igbox.nonzero().view(-1)
#Fill targets inside effective box with 1. (for the right class)
#Still have to give preference to smaller objects as in paper
#combine indices
combined_indices = feature_indices[regular_pixel_indices_inside_igbox]
targets[combined_indices, box_class] = -1.
#Fill up effective boxes with 1
for i, box in enumerate(effective_boxes):
x_indices_inside_effbox = (x_grid_order[feature_indices] >= (box[pyramid_idx][0]).cuda().long()) * (
x_grid_order[feature_indices] <= (box[pyramid_idx][2] + 1.).cuda().long())
y_indices_inside_effbox = (y_grid_order[feature_indices] >= (box[pyramid_idx][1]).cuda().long()) * (
y_grid_order[feature_indices] <= (box[pyramid_idx][3] + 1.).cuda().long())
#only return indices where both x and y coordinates are inside effective box
bool_pixel_indices_inside_effbox = x_indices_inside_effbox * y_indices_inside_effbox
#compute class
box_class = box[0, 4].long()
# from bool indices to regular indices
pixel_indices_inside_effbox = bool_pixel_indices_inside_effbox.nonzero().view(-1)
#combine indices
combined_indices = feature_indices[pixel_indices_inside_effbox]
#Fill targets inside effective box with 1. (for the right class)
#TODO: Still have to give preference to smaller objects in classification as in paper
#TODO: But maybe regression best not be trained at all?
targets[combined_indices, box_class] = 1.
projections_for_single_box = projection_boxes[i]
#fill regression targets inside effective box indices
targets_d_left[combined_indices] = x_grid_order[combined_indices].float() - projections_for_single_box[pyramid_idx][0].cuda()
targets_d_right[combined_indices] = projections_for_single_box[pyramid_idx][2].cuda() - x_grid_order[combined_indices].float()
targets_d_down[combined_indices] = y_grid_order[combined_indices].float() - projections_for_single_box[pyramid_idx][1].cuda()
targets_d_up[combined_indices] = projections_for_single_box[pyramid_idx][3].cuda() - y_grid_order[combined_indices].float()
instance[combined_indices] = i
######Think about adding acception for smaller objects when they overlap with larger ones....
#compute classification loss
alpha_factor = torch.ones(targets.shape).cuda() * alpha
alpha_factor = torch.where(torch.eq(targets, 1.), alpha_factor, 1. - alpha_factor)
focal_weight = torch.where(torch.eq(targets, 1.), 1. - classification, classification)
focal_weight = alpha_factor * torch.pow(focal_weight, gamma)
bce = -(targets * torch.log(classification) + (1.0 - targets) * torch.log(1.0 - classification))
# cls_loss = focal_weight * torch.pow(bce, gamma)
cls_loss = focal_weight * bce
#only back-propagate gradients when output not equal -1 i.e. lets set the loss to zero
cls_loss = torch.where(torch.ne(targets, -1.0), cls_loss, torch.zeros(cls_loss.shape).cuda())
#Normalize with number of pixels in effective region
num_in_effective_region = (targets == 1.).sum()
norm_sum_cls_loss = cls_loss.sum()/torch.clamp(num_in_effective_region.float(), min=1.0)
classification_losses.append(norm_sum_cls_loss)
# compute the loss for regression
if num_in_effective_region == 0:
regression_losses.append(torch.tensor(0).float().cuda())
continue
targets_reg = torch.stack((targets_d_left, targets_d_right, targets_d_down, targets_d_up))
targets_reg = targets_reg.t()
#assert ((targets_reg < -1.).sum() > 0), 'something isnt right about computing of the target regression values'
#compute indices for effective region
indices_for_eff_region = (targets == 1.).nonzero()[:,0]
#only compute regression for effective region
targets_reg = targets_reg[indices_for_eff_region]
regression = regression[indices_for_eff_region]
pyramid = pyramid[indices_for_eff_region]
instance = instance[indices_for_eff_region]
#retreive pixels from classification branch that are in the effective region and then in rest region
class_indices_for_eff_region = (targets == 1.).nonzero()
class_indices_for_rest_region = (targets == 0.).nonzero()
eff_cls_loss = cls_loss[class_indices_for_eff_region[:,0],class_indices_for_eff_region[:,1]]
rest_cls_loss = cls_loss[class_indices_for_rest_region[:,0], class_indices_for_rest_region[:,1]]
#compute loss for each pixel
if IOULoss:
# normalization constant
targets_reg = targets_reg / s_norm
x_gt = (targets_reg[:, 2] + targets_reg[:, 3] + 1.) * (targets_reg[:, 0] + targets_reg[:, 1] + 1.)
x_pred = (regression[:, 2] + regression[:, 3] + 1.) * (regression[:, 0] + regression[:, 1] + 1.)
i_h = torch.where(targets_reg[:, 2] < regression[:, 2], targets_reg[:, 2], regression[:, 2]) + \
torch.where(targets_reg[:, 3] < regression[:, 3], targets_reg[:, 3], regression[:, 3]) + 1.
i_w = torch.where(targets_reg[:, 0] < regression[:, 0], targets_reg[:, 0], regression[:, 0]) + \
torch.where(targets_reg[:, 1] < regression[:, 1], targets_reg[:, 1], regression[:, 1]) + 1.
i = i_h * i_w
u = x_pred + x_gt - i
IOU = i / u
IOU = torch.clamp(IOU,1e-4, 1.0 - 1e-4)
regression_loss = -torch.log(IOU)
else:
#targets_reg = targets_reg / s_norm #in anchors its 0.1
regression_diff = torch.abs(targets_reg - regression)
regression_loss = torch.where(torch.le(regression_diff, 1.0 / 9.0), 0.5 * 9.0 * torch.pow(regression_diff, 2), regression_diff - 0.5 / 9.0)
#WHERE THE MAGIC HAPPENS
losses_for_these_instances_ls = []
follow_pyramid_losses = []
for unique_instance in torch.unique(instance):
losses_for_this_instance_ls = []
for level in pyramid_levels:
bool_indices_for_pyramid_level = (pyramid == level)
bool_indices_for_instance = (instance == unique_instance)
combined_bool_indices = bool_indices_for_pyramid_level * bool_indices_for_instance
if combined_bool_indices.sum() == 0.:
continue
comb_indices = combined_bool_indices.nonzero().view(-1)
reg_loss_per_instance_per_pyramid_level = regression_loss[comb_indices]
cls_loss_per_instance_per_pyramid_level = eff_cls_loss[comb_indices]
assert(reg_loss_per_instance_per_pyramid_level.mean() > 0), "weird, instance:" + str(unique_instance.item()) + " and pyramid:" + str(level) + "have regression mean zero"
loss_per_instance_per_pyramid_level = reg_loss_per_instance_per_pyramid_level.mean() + cls_loss_per_instance_per_pyramid_level.mean()
#TODO: THIS LOSS
losses_for_this_instance_ls.append(loss_per_instance_per_pyramid_level)
if args.perc:
sorted_losses_for_this_instance_ls = sorted(losses_for_this_instance_ls)
normalized_ls = [sorted_losses_for_this_instance_ls[i] / (i + 1) ** t_val for i in
range(len(sorted_losses_for_this_instance_ls))]
loss_for_this_instance = sum(normalized_ls)
else:
loss_for_this_instance = torch.prod(torch.stack(losses_for_this_instance_ls))
follow_pyramid_losses.append([round(loss.item(), 2) for loss in losses_for_this_instance_ls])
losses_for_these_instances_ls.append(loss_for_this_instance)
losses_for_these_instances = torch.stack(losses_for_these_instances_ls)
total_loss = losses_for_these_instances.mean() + rest_cls_loss.mean() * rest_norm
losses.append(total_loss)
# return torch.stack(losses), follow_pyramid_losses
return torch.stack(losses)
| 56.55102 | 190 | 0.599483 | 2,052 | 16,626 | 4.557018 | 0.132066 | 0.022137 | 0.038499 | 0.019677 | 0.452572 | 0.341354 | 0.266923 | 0.219014 | 0.161801 | 0.100096 | 0 | 0.022749 | 0.294058 | 16,626 | 293 | 191 | 56.744027 | 0.773963 | 0.164562 | 0 | 0.04918 | 0 | 0 | 0.003905 | 0 | 0 | 0 | 0 | 0.003413 | 0.005464 | 1 | 0.010929 | false | 0 | 0.016393 | 0 | 0.043716 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33503966c1b82f623fdd0f3ad54a0c01ea1f3301 | 538 | py | Python | ca_candidates/people.py | dogooderapp/scrapers-ca | 5e852eea93f05f3f397eec318d3094a3b1b0b458 | [
"MIT"
] | 19 | 2015-05-26T03:18:50.000Z | 2022-01-31T03:27:41.000Z | ca_candidates/people.py | dogooderapp/scrapers-ca | 5e852eea93f05f3f397eec318d3094a3b1b0b458 | [
"MIT"
] | 119 | 2015-01-09T06:09:35.000Z | 2022-01-20T23:05:05.000Z | ca_candidates/people.py | dogooderapp/scrapers-ca | 5e852eea93f05f3f397eec318d3094a3b1b0b458 | [
"MIT"
] | 17 | 2015-11-23T05:00:10.000Z | 2021-09-15T16:03:33.000Z | from utils import CSVScraper
from datetime import date
class CanadaCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQg-GxpZeCdOMumAu5AbmDC6Ff2fXpcnSkIGaKTbow_PPwtznC4riqKxBuJZlX4c7XB4n7opnPzFdGI/pub?output=csv'
updated_at = date(2019, 4, 17)
contact_person = 'andrew@newmode.net'
encoding = 'utf-8'
corrections = {
'district name': {
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
| 31.647059 | 158 | 0.708178 | 62 | 538 | 6.048387 | 0.806452 | 0.032 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.040909 | 0.182156 | 538 | 16 | 159 | 33.625 | 0.811364 | 0 | 0 | 0 | 0 | 0.076923 | 0.366171 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.153846 | 0.076923 | 0.769231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33564820dffbc61c9061537e2209b4a4732c0f0c | 2,730 | py | Python | pepperboard/dashboards/__init__.py | webedia-dev/pepperboard | 904020f95feb8cda3a821af20bd2689564a99a5d | [
"Apache-2.0"
] | 29 | 2016-11-04T10:48:12.000Z | 2019-07-25T00:43:35.000Z | pepperboard/dashboards/__init__.py | webedia-dev/pepperboard | 904020f95feb8cda3a821af20bd2689564a99a5d | [
"Apache-2.0"
] | 7 | 2016-11-30T16:53:39.000Z | 2019-05-08T07:31:22.000Z | pepperboard/dashboards/__init__.py | webedia-dev/pepperboard | 904020f95feb8cda3a821af20bd2689564a99a5d | [
"Apache-2.0"
] | 6 | 2017-02-28T02:17:38.000Z | 2021-03-02T06:38:43.000Z | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
import importlib
import sys
from datetime import datetime
def gendashboard(ident, output, nthreads=None, input=None):
modulename = "pepperboard.dashboards."+ident
mod = importlib.import_module(modulename, package=None)
if "gendata" in dir(mod):
gentable(mod.gendata(input, nthreads), output)
elif "gendash" in dir(mod):
mod.gendash(output, nthreads)
else:
print("Error : gendata() or gendash() methods not found in "+mod.__name__)
sys.exit(2)
def gentable(input, output):
foutput = open(output, 'w')
foutput.write('<html><head>')
foutput.write(
'<script type=\"text/javascript\">\nfunction toggle_hstlist() {\nvar e = document.querySelectorAll (\".hstlist\")\nfor (var i = 0; i < e.length; i++) {\nvar el = e[i];\nif(el.style.display == \'block\')\nel.style.display = \'none\';\nelse\nel.style.display = \'block\';\n}\n}\n</script>')
foutput.write(
'<script type=\"text/javascript\">\nfunction toggle_visibility(id) {\nvar e = document.getElementById(id);\nif(e.style.display == \'block\')\ne.style.display = \'none\';\nelse\ne.style.display = \'block\';\n}\n</script>')
foutput.write('<script src=\"//www.kryogenix.org/code/browser/sorttable/sorttable.js\"></script>')
foutput.write('<style>table.sortable th:not(.sorttable_sorted):not(.sorttable_sorted_reverse):not(.sorttable_nosort):after {content: \" \\25B4\\25BE\"}</style>')
foutput.write('<link rel=\"stylesheet\" href=\"//yui-s.yahooapis.com/pure/0.6.0/pure-min.css\">')
foutput.write('<title>Pepperboard</title>')
foutput.write('</head>')
foutput.write('<body><div>')
foutput.write('<table class=\"pure-table pure-table-bordered sortable\"><thead><tr>')
for header in input['headers']:
foutput.write('<th>'+header+'</th>')
foutput.write('</tr></thead><tbody>\n')
for k, v in input['data'].items():
if input['ncol'] == 2:
foutput.write('<tr><td valign=\"top\">'+str(k)+'</td><td>')
if isinstance(v,dict):
foutput.write(str(v[header]))
else:
foutput.write(str(v))
foutput.write('</td></tr>\n')
else:
foutput.write('<tr><td valign=\"top\">'+str(k)+'</td>')
for header in input['headers'][1:]:
if header in v:
foutput.write('<td>' + str(v[header]) + '</td>')
else:
foutput.write('<td>grain '+header+' not available</td>')
foutput.write('</tr>\n')
foutput.write('</tbody></table>Last updated on ' + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + '</body></html>')
foutput.close()
| 48.75 | 296 | 0.595604 | 348 | 2,730 | 4.640805 | 0.405172 | 0.156037 | 0.042105 | 0.027245 | 0.172755 | 0.122601 | 0.101548 | 0.101548 | 0.03839 | 0 | 0 | 0.006338 | 0.190842 | 2,730 | 55 | 297 | 49.636364 | 0.724762 | 0.015385 | 0 | 0.125 | 0 | 0.104167 | 0.450112 | 0.219285 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0 | 0.083333 | 0 | 0.125 | 0.020833 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3356679e111d9cdea788645c03c6225de7d20600 | 1,656 | py | Python | hcloud/load_balancer_types/domain.py | DoctorJohn/hcloud-python | fad870790a19621fd130fd28d564a8d6ba7a566c | [
"MIT"
] | 156 | 2019-02-21T09:19:07.000Z | 2022-03-14T12:19:23.000Z | hcloud/load_balancer_types/domain.py | DoctorJohn/hcloud-python | fad870790a19621fd130fd28d564a8d6ba7a566c | [
"MIT"
] | 69 | 2018-12-20T16:05:30.000Z | 2022-03-28T21:06:31.000Z | hcloud/load_balancer_types/domain.py | DoctorJohn/hcloud-python | fad870790a19621fd130fd28d564a8d6ba7a566c | [
"MIT"
] | 37 | 2019-02-22T08:32:07.000Z | 2022-02-02T12:42:45.000Z | # -*- coding: utf-8 -*-
from hcloud.core.domain import BaseDomain, DomainIdentityMixin
class LoadBalancerType(BaseDomain, DomainIdentityMixin):
"""LoadBalancerType Domain
:param id: int
ID of the Load Balancer type
:param name: str
Name of the Load Balancer type
:param description: str
Description of the Load Balancer type
:param max_connections: int
Max amount of connections the Load Balancer can handle
:param max_services: int
Max amount of services the Load Balancer can handle
:param max_targets: int
Max amount of targets the Load Balancer can handle
:param max_assigned_certificates: int
Max amount of certificates the Load Balancer can serve
:param prices: Dict
Prices in different locations
"""
__slots__ = (
"id",
"name",
"description",
"max_connections",
"max_services",
"max_targets",
"max_assigned_certificates",
"prices",
)
def __init__(
self,
id=None,
name=None,
description=None,
max_connections=None,
max_services=None,
max_targets=None,
max_assigned_certificates=None,
prices=None,
):
self.id = id
self.name = name
self.description = description
self.max_connections = max_connections
self.max_services = max_services
self.max_targets = max_targets
self.max_assigned_certificates = max_assigned_certificates
self.prices = prices
| 29.052632 | 67 | 0.605676 | 175 | 1,656 | 5.542857 | 0.245714 | 0.050515 | 0.108247 | 0.057732 | 0.179381 | 0.179381 | 0.098969 | 0 | 0 | 0 | 0 | 0.000906 | 0.333333 | 1,656 | 56 | 68 | 29.571429 | 0.877717 | 0.383454 | 0 | 0 | 0 | 0 | 0.096738 | 0.028121 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032258 | false | 0 | 0.032258 | 0 | 0.129032 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
335cdb5e1fe73a737e52b94f9778d0f6a44f3421 | 10,712 | py | Python | main_scripts/LSTM_sea_level_prediction_w_tuner.py | LOCO-EX/LSTM_tidal_prediction | 76f0d700746d1a54d0b9ae48d6563ac4cc56b94c | [
"Apache-2.0"
] | null | null | null | main_scripts/LSTM_sea_level_prediction_w_tuner.py | LOCO-EX/LSTM_tidal_prediction | 76f0d700746d1a54d0b9ae48d6563ac4cc56b94c | [
"Apache-2.0"
] | null | null | null | main_scripts/LSTM_sea_level_prediction_w_tuner.py | LOCO-EX/LSTM_tidal_prediction | 76f0d700746d1a54d0b9ae48d6563ac4cc56b94c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This script uses an LSTM network in keras to predict the tides (sea level) as a function of astronomical motions
It is based on an example given in
https://machinelearningmastery.com/multivariate-time-series-forecasting-lstms-keras/
For this script to work it is necessary to already have the time series of the relative postion of the Moon and the Sun.
This time series can be obtained by running the script
@author: Matias Duran-Matute (m.duran.matute@tue.nl)
"""
#%%
from math import sqrt
from numpy import concatenate
from matplotlib import pyplot
from pandas import read_csv
from pandas import DataFrame
from pandas import concat
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM
from tensorflow.keras.optimizers import Adam
import keras_tuner as kt
import pandas as pd
import numpy as np
import datetime
# %% convert series to supervised learning
def series_to_supervised(data, n_in=1, n_out=1, n_f=1, dropnan=True):
n_vars = 1 if type(data) is list else data.shape[1]
df = DataFrame(data)
cols, names = list(), list()
# input sequence (t-n, ... t-1)
for i in range(n_in-1, 0, -1):
cols.append(df.loc[:,0:(n_f-1)].shift(i))
#
for i in range(n_in, 0, -1):
names += [('var%d(t-%d)' % (j+1, i-1)) for j in range(n_f)]
# forecast sequence (t, t+1, ... t+n)
for i in range(0, n_out):
cols.append(df.shift(-i))
if i == 0:
names += [('var%d(t)' % data.shape[1])]
else:
names += [('var%d(t+%d)' % (j+1, i)) for j in range(n_out)]
# put it all together
agg = concat(cols, axis=1)
agg.columns = names
# drop rows with NaN values
if dropnan:
agg.dropna(inplace=True)
return agg
#%% Define model with tuner
class MyHyperModel(kt.HyperModel):
def build(self, hp):
#nl = 2
n_layers = 2 #hp.Int('n_layers', 1, nl)
model = Sequential()
if n_layers == 1:
model.add(LSTM(hp.Int('lstm_1_units',min_value=24,max_value=72,step=12),input_shape=(train_X.shape[1], train_X.shape[2])))
else:
model.add(LSTM(hp.Int('lstm_1_units',min_value=24,max_value=72,step=12),return_sequences=True,input_shape=(train_X.shape[1], train_X.shape[2])))
for i in range(2,n_layers + 1):
#print('layers')
#print(i)
model.add(LSTM(hp.Int(f'lstm_{i}_units', min_value=4, max_value=28, step=8), return_sequences=True)) #=(n_steps_in,n_features)
#for j in range(n_layers+1, nl+1):
# void = hp.Int(f'lstm_{j}_units', min_value = 0, max_value = 0)
model.add(Dense(1))
#learning_rate = hp.Float("lr", min_value=1e-4, max_value=1e-3, sampling="log")
learning_rate = hp.Float("lr", min_value=5e-4, max_value=1e-3, sampling="log")
Adam(lr=learning_rate)
#model.compile(hp.Choice("loss", ["mse", "mae"]), optimizer="adam", metrics=["mse"]) #mean absolute error "mse" "mae"
model.compile(loss="mse", optimizer="adam", metrics=["mse"]) #mean absolute error "mse" "mae"
return model
def fit(self, hp, model, *args, **kwargs):
return model.fit(
*args,
batch_size=hp.Choice("batch_size", [48, 72, 96]),
#batch_size = 96
**kwargs,
)
# %% Load data
# Load sea level data
p_name = "test_SL_0"
# Load atmospheric data
W = pd.read_csv('data/Wind_data_10min.csv')
#L = pd.read_csv('data/level_DH_10min.csv')
L = pd.read_csv('data/level_DH_10min.csv')
L["level"] = L["level"][:]
# Load astronomic data
A = pd.read_csv('data/astronomic_10min.csv')
A = A[:-1]
# %%
ti = datetime.datetime(1996,1,1,0,0) #Starting date
tf = datetime.datetime(1998,7,1,1,0) #End date
ti_d = ( ti - datetime.datetime(1970,1,1)).total_seconds()/86400.
tf_d = ( tf - datetime.datetime(1970,1,1)).total_seconds()/86400.
idi = (np.abs(L['time']-ti_d)).argmin()
idf = (np.abs(L['time']-tf_d)).argmin()
nt = 2 # This can be used to reduce temporal resolution (see following lines)
L = L[idi:idf:nt]
A = A[idi:idf:nt]
W = W[idi:idf:nt]
#%% Moon and sun azimuth into sine and cosine
gdr = np.pi/180 # useful to transform from degrees to radians
ma_cos = np.cos(A['azimuth_moon_deg']*gdr)
ma_sin = np.sin(A['azimuth_moon_deg']*gdr)
sa_cos = np.cos(A['azimuth_sun_deg']*gdr)
sa_sin = np.sin(A['azimuth_sun_deg']*gdr)
# %% Arrange data
# FULL INPUT (three variables for each Moon and Sun position)
tmp = np.stack((A['altitude_moon_deg'], A['distance_moon_au'], ma_cos, ma_sin,
A['altitude_sun_deg'], A['distance_sun_au'], sa_cos, sa_sin,
W['wind_speed'],W['sine_wind_angle'],W['cosine_wind_angle'],W['pressure'],
L['level'][0:ma_cos.shape[0]]))
d = {'altitude_moon_deg': tmp[0,:], 'distance_moon_au': tmp[1,:]**(-3), 'azimuth_moon_cos': tmp[2,:], 'azimuth_moon_sin': tmp[3,:],
'altitude_sun_deg': tmp[4,:], 'distance_sun_au': tmp[5,:]**(-3), 'azimuth_sun_cos': tmp[6,:], 'azimuth_sun_sin': tmp[7,:],
'wind_speed': tmp[8,:], 'sin_wind_dir': tmp[9,:], 'cos_wind_dir': tmp[10,:], 'pressure': tmp[11,:],
'level': tmp[12,:]}
# ONLY MOON
#tmp = np.stack((A['altitude_moon_deg'], A['distance_moon_au'], ma_cos, ma_sin,tide['h']))
#in_pein_periods:(n_test_periods+n_train_periods)riods:(n_test_periods+n_train_periods)d = {'altitude_moon_deg': tmp[0,:], 'distance_moon_au': tmp[1,:], 'azimuth_moon_cos': tmp[2,:], 'azimuth_moon_sin': tmp[3,:], 'level': #tmp[4,:]}
dataset = pd.DataFrame(data=d)
values = dataset.values
nsamples=values.shape[0] #=14107
n_train_periods = int(nsamples*0.7) #percentage for training
n_test_periods = int(nsamples*0.3) #percentage for testing
# %%
# ensure all data is float
values = values.astype('float32')
# normalize features
#scaler = MinMaxScaler(feature_range=(0, 1))
scaler = StandardScaler()
sc_fit = scaler.fit(values[:n_train_periods,:])
scaled = scaler.transform(values)
# frame as supervised learning
n_steps_in = 144 #specify the number of the previous time steps to use for the prediction = 1 in this case
n_steps_out = 1 #specify the number of time steps to predict = 1 in this case because we are predicting only 1 time step
n_features = 12 #number of features (variables) used to predict
# frame as supervised learning
reframed = series_to_supervised(scaled, n_steps_in, n_steps_out, n_features)
reframed.shape
# %%
# split into train and test sets
values = reframed.values
train = values[:n_train_periods, :]
test = values[n_train_periods:(n_test_periods+n_train_periods), :]
# split into input and outputs (works only with n_steps_in=n_steps_out=1)
n_obs = n_steps_in * n_features #(features=predictors) #1*3=3
#for predicting sea level at time t using predictors at time <=t
train_X, train_y = train[:, :n_obs], train[:, -1]
test_X, test_y = test[:, :n_obs], test[:, -1]
#
#%%
print(train_X.shape, train_y.shape)
# reshape input to be 3D [samples, timesteps, features]
train_X = train_X.reshape((train_X.shape[0], n_steps_in, n_features))
test_X = test_X.reshape((test_X.shape[0], n_steps_in, n_features))
print(train_X.shape, train_y.shape, test_X.shape, test_y.shape)
#%%
tuner = kt.BayesianOptimization(
MyHyperModel(),
objective="val_mse",
max_trials=10,
executions_per_trial=2,
overwrite=True,
directory="./tuner",
project_name=p_name,
)
tuner.search(train_X, train_y, epochs=2, validation_data=(test_X, test_y))
#tuner.search(train_X, train_y, epochs=2, validation_data=(test_X, test_y), callbacks=[tf.keras.callbacks.EarlyStopping('val_loss', patience=10)])
tuner.search_space_summary()
tuner.results_summary()
#%% Determine optimal hyperparameters
# Get the optimal hyperparameters
best_hps=tuner.get_best_hyperparameters()[0]
#%% Find optimal epoch
model = tuner.hypermodel.build(best_hps)
history = model.fit(train_X, train_y, epochs=120, validation_data=(test_X, test_y))
val_mse_per_epoch = history.history['val_mse']
best_epoch = val_mse_per_epoch.index(min(val_mse_per_epoch)) + 1
print('Best epoch: %d' % (best_epoch,))
# Retrain the model
model2 = tuner.hypermodel.build(best_hps)
history_b = model2.fit(train_X, train_y, epochs=best_epoch, validation_data=(test_X, test_y))
# plot history
pyplot.plot(history_b.history['loss'], label='train')
pyplot.plot(history_b.history['val_loss'], label='test')
pyplot.legend()
pyplot.savefig("./models/loss.png", dpi=150)
pyplot.close()
# %% Save model
#best_model = tuner.get_best_models(num_models=1)[0]
model2.save('./models/')
# %% Make a prediction
yhat = model2.predict(test_X)
test_X0 = test_X.reshape((test_X.shape[0], n_steps_in*n_features))
# invert scaling for forecast
#inv_yhat = concatenate((yhat, test_X[:, -7:]), axis=1)
inv_yhat = concatenate((test_X0,yhat), axis=1)
inv_yhat = scaler.inverse_transform(inv_yhat[:,-(n_features+1):])
inv_yhat = inv_yhat[:,-1]
# invert scaling for actual
test_y0 = test_y.reshape((len(test_y), 1))
#inv_y = concatenate((test_y, test_X[:, -4:]), axis=1)
inv_y = concatenate((test_X0,test_y0), axis=1)
inv_y = scaler.inverse_transform(inv_y[:,-(n_features+1):])
inv_y = inv_y[:,-1]
# calculate RMSE
rmse = sqrt(mean_squared_error(inv_y, inv_yhat))
print('Test RMSE: %.3f' % rmse)
print('Test std: %.3f' % inv_y.std())
tuner.search_space_summary()
print(f"""
The hyperparameter search is complete. The optimal number of units in the first densely-connected
layer is {best_hps.get('units')} and the optimal learning rate for the optimizer
is {best_hps.get('lr')}.
""")
# %% Comparison plots
t = L['time']-L['time'][idi]
pyplot.plot(inv_y, inv_yhat,'o')
pyplot.xlabel("data")
pyplot.ylabel("prediction")
pyplot.grid()
pyplot.axis([500,900,500,900])
pyplot.axis("equal")
pyplot.savefig('./models/comp1.png', dpi=150)
pyplot.close()
pyplot.plot(t[0:inv_y.size],inv_y,'r',label="data")
pyplot.plot(t[0:inv_y.size],inv_yhat,'b:',label="prediction")
pyplot.legend()
pyplot.savefig('./models/comp2.png', dpi=150)
pyplot.close()
pyplot.plot(t[0:600],inv_y[0:600],'r',label="data")
pyplot.plot(t[0:600],inv_yhat[0:600],'b:',label="prediction")
pyplot.legend()
pyplot.savefig('./models/comp3.png', dpi=150)
pyplot.close()
# %% Comparison ffts
freq = np.fft.fftfreq(inv_y.size, d=t[idi+nt])[0:int(inv_y.size/4)]
fft_y = np.abs(np.fft.fft(inv_y))[0:int(inv_y.size/4)]
#fft_y = fft_y[0:int(inv_y.size/2)]
fft_yhat = np.abs(np.fft.fft(inv_yhat))[0:int(inv_yhat.size/4)]
pyplot.plot(freq,fft_yhat)
pyplot.plot(freq,fft_y)
pyplot.yscale('log')
pyplot.savefig('./models/spectrum.png', dpi=150)
pyplot.close()
| 34.333333 | 232 | 0.693615 | 1,785 | 10,712 | 3.977031 | 0.220728 | 0.010142 | 0.009015 | 0.008874 | 0.29807 | 0.237217 | 0.194534 | 0.158755 | 0.117622 | 0.095647 | 0 | 0.029844 | 0.142924 | 10,712 | 311 | 233 | 34.44373 | 0.743383 | 0.290235 | 0 | 0.066298 | 0 | 0 | 0.134211 | 0.018223 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016575 | false | 0 | 0.093923 | 0.005525 | 0.132597 | 0.033149 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
335d723c7aae47c37accfa08219fe3758423d305 | 483 | py | Python | leetcode/405.py | windniw/just-for-fun | 54e5c2be145f3848811bfd127f6a89545e921570 | [
"Apache-2.0"
] | 1 | 2019-08-28T23:15:25.000Z | 2019-08-28T23:15:25.000Z | leetcode/405.py | windniw/just-for-fun | 54e5c2be145f3848811bfd127f6a89545e921570 | [
"Apache-2.0"
] | null | null | null | leetcode/405.py | windniw/just-for-fun | 54e5c2be145f3848811bfd127f6a89545e921570 | [
"Apache-2.0"
] | null | null | null | """
link: https://leetcode.com/problems/convert-a-number-to-hexadecimal
problem: 十进制转16进制,[-2^31, 2^31-1],不用库函数
solution: 负数转相应unsigned int后,反复取模
"""
class Solution:
def toHex(self, num: int) -> str:
def f(x: int) -> str:
return str(x) if x < 10 else chr(x - 10 + 97)
if num < 0:
num = 0x100000000 + num
res = ""
while num:
res += f(num % 16)
num //= 16
return res[::-1] if res else '0' | 23 | 67 | 0.519669 | 68 | 483 | 3.691176 | 0.588235 | 0.023904 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098765 | 0.329193 | 483 | 21 | 68 | 23 | 0.675926 | 0.296066 | 0 | 0 | 0 | 0 | 0.003021 | 0 | 0 | 0 | 0.033233 | 0 | 0 | 1 | 0.181818 | false | 0 | 0 | 0.090909 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
335dc75e023cb25b3f777fc37538aa31a81563a9 | 2,108 | py | Python | delete_snapshot.py | XAjzh/automate-management-of-vultr | e271231aad1b29f8a74d4279aff5df9ce230b6a1 | [
"MIT"
] | 1 | 2022-03-03T13:14:59.000Z | 2022-03-03T13:14:59.000Z | delete_snapshot.py | XAjzh/automate-management-of-vultr | e271231aad1b29f8a74d4279aff5df9ce230b6a1 | [
"MIT"
] | null | null | null | delete_snapshot.py | XAjzh/automate-management-of-vultr | e271231aad1b29f8a74d4279aff5df9ce230b6a1 | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2020 xaj
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import requests, os
auth_head = {'API-Key': os.getenv("VULTR_KEY")}
api_url = "https://api.vultr.com/v1"
snapshot_list_url = api_url + '/snapshot/list'
server_list_url = api_url + '/server/list'
snapshot_delete_url = api_url + '/snapshot/destroy'
server_list = requests.get(server_list_url, headers=auth_head)
have_vm = False
if server_list.json():
for k, v in server_list.json().items():
if v['label'] == 'auto_deployed_server':
have_vm = True
break
if not have_vm:
exit(-1)
snapshot_list = requests.get(snapshot_list_url, headers=auth_head)
snapshot_delete_data = {}
if snapshot_list.json():
for k,v in snapshot_list.json().items():
if v['description'] == 'auto_created_snapshot':
snapshot_delete_data['SNAPSHOTID']=k
if 'SNAPSHOTID' not in snapshot_delete_data:
exit(-1)
res = requests.post(snapshot_delete_url, headers=auth_head, data=snapshot_delete_data)
if res.status_code != 200:
exit(-1)
| 35.133333 | 86 | 0.736243 | 319 | 2,108 | 4.730408 | 0.454545 | 0.058317 | 0.047714 | 0.035785 | 0.070245 | 0.019881 | 0 | 0 | 0 | 0 | 0 | 0.007446 | 0.171727 | 2,108 | 59 | 87 | 35.728814 | 0.856816 | 0.516129 | 0 | 0.115385 | 0 | 0 | 0.15984 | 0.020979 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.038462 | 0 | 0.038462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
335f16a8fd3a5b4d6a14e3eb60bcfe6db63769b6 | 12,392 | py | Python | course/admin.py | hsingh23/courseflow | ba52e4fa964cdeb1042486fb0821809386faa9e9 | [
"Unlicense"
] | null | null | null | course/admin.py | hsingh23/courseflow | ba52e4fa964cdeb1042486fb0821809386faa9e9 | [
"Unlicense"
] | null | null | null | course/admin.py | hsingh23/courseflow | ba52e4fa964cdeb1042486fb0821809386faa9e9 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
__copyright__ = "Copyright (C) 2014 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from django.contrib import admin
from course.models import (
UserStatus,
Course, Event,
Participation, ParticipationPreapproval,
InstantFlowRequest,
FlowSession, FlowPageData,
FlowPageVisit, FlowPageVisitGrade,
FlowAccessException, FlowAccessExceptionEntry,
GradingOpportunity, GradeChange, InstantMessage)
from django import forms
from course.enrollment import (approve_enrollment, deny_enrollment)
# {{{ user status
class UserStatusAdmin(admin.ModelAdmin):
def get_user_first_name(self, obj):
return obj.user.first_name
get_user_first_name.short_description = "First name"
get_user_first_name.admin_order_field = "user__first_name"
def get_user_last_name(self, obj):
return obj.user.last_name
get_user_last_name.short_description = "Last name"
get_user_last_name.admin_order_field = "user__last_name"
list_display = (
"user",
"get_user_first_name",
"get_user_last_name",
"status",
"key_time")
list_filter = ("status",)
date_hierarchy = "key_time"
search_fields = (
"user__username",
"user__first_name",
"user__last_name",
)
def __unicode__(self):
return u"%s in status %s" % (self.user, self.status)
admin.site.register(UserStatus, UserStatusAdmin)
# }}}
class UnsafePasswordInput(forms.TextInput):
# This sends passwords back to the user--not ideal, but OK for the XMPP
# password.
input_type = 'password'
class CourseAdminForm(forms.ModelForm):
class Meta:
model = Course
widgets = {
"course_xmpp_password": UnsafePasswordInput
}
exclude = ()
class CourseAdmin(admin.ModelAdmin):
list_display = ("identifier", "hidden", "valid")
list_filter = ("hidden", "valid",)
form = CourseAdminForm
save_on_top = True
admin.site.register(Course, CourseAdmin)
# {{{ events
class EventAdmin(admin.ModelAdmin):
list_display = ("course", "kind", "ordinal", "time", "end_time")
list_filter = ("course", "kind")
date_hierarchy = "time"
def __unicode__(self):
return u"%s %d in %s" % (self.kind, self.ordinal, self.course)
list_editable = ("ordinal", "time", "end_time")
admin.site.register(Event, EventAdmin)
# }}}
# {{{ participation
class ParticipationAdmin(admin.ModelAdmin):
def get_user_first_name(self, obj):
return obj.user.first_name
get_user_first_name.short_description = "First name"
get_user_first_name.admin_order_field = "participation__user__first_name"
def get_user_last_name(self, obj):
return obj.user.last_name
get_user_last_name.short_description = "Last name"
get_user_last_name.admin_order_field = "participation__user__last_name"
list_display = (
"user",
"get_user_first_name",
"get_user_last_name",
"course",
"role",
"status",
"enroll_time")
list_filter = ("course", "role", "status")
search_fields = (
"course__identifier",
"user__username",
"user__first_name",
"user__last_name",
)
actions = [approve_enrollment, deny_enrollment]
admin.site.register(Participation, ParticipationAdmin)
class ParticipationPreapprovalAdmin(admin.ModelAdmin):
list_display = ["email", "course", "role"]
list_filter = ["course", "role"]
search_fields = (
"email",
)
admin.site.register(ParticipationPreapproval, ParticipationPreapprovalAdmin)
# }}}
class InstantFlowRequestAdmin(admin.ModelAdmin):
pass
admin.site.register(InstantFlowRequest, InstantFlowRequestAdmin)
# {{{ flow sessions
class FlowPageDataInline(admin.TabularInline):
model = FlowPageData
extra = 0
class FlowSessionAdmin(admin.ModelAdmin):
def get_participant(self, obj):
if obj.participation is None:
return None
return obj.participation.user
get_participant.short_description = "Participant"
get_participant.admin_order_field = "participation__user"
search_fields = (
"=id",
"flow_id",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
)
list_display = (
"id",
"flow_id",
"get_participant",
"course",
"start_time",
"in_progress",
"for_credit",
)
list_display_links = (
"flow_id",
"get_participant",
)
date_hierarchy = "start_time"
list_filter = (
"course",
"flow_id",
"in_progress",
"for_credit",
)
inlines = (FlowPageDataInline,)
raw_id_fields = ("participation",)
save_on_top = True
admin.site.register(FlowSession, FlowSessionAdmin)
# }}}
# {{{ flow page visit
class FlowPageVisitGradeInline(admin.TabularInline):
model = FlowPageVisitGrade
extra = 0
class FlowPageVisitAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.flow_session.course
get_course.short_description = "Course"
get_course.admin_order_field = "flow_session__course"
def get_flow_id(self, obj):
return obj.flow_session.flow_id
get_flow_id.short_description = "Flow ID"
get_flow_id.admin_order_field = "flow_session__flow_id"
def get_page_id(self, obj):
return "%s/%s (%d)" % (
obj.page_data.group_id,
obj.page_data.page_id,
obj.page_data.ordinal)
get_page_id.short_description = "Page ID"
get_page_id.admin_order_field = "page_data__page_id"
def get_participant(self, obj):
if obj.flow_session.participation:
return obj.flow_session.participation.user
else:
return "(anonymous)"
get_participant.short_description = "Participant"
get_participant.admin_order_field = "flow_session__participation"
def get_flow_session_id(self, obj):
return obj.flow_session.id
get_flow_session_id.short_description = "Flow Session ID"
get_flow_session_id.admin_order_field = "flow_session__id"
list_filter = (
"flow_session__participation__course",
"flow_session__flow_id",
"is_graded_answer",
"is_synthetic",
)
date_hierarchy = "visit_time"
list_display = (
"id",
"get_course",
"get_flow_id",
"get_page_id",
"get_participant",
"get_flow_session_id",
"visit_time",
"is_graded_answer",
"is_synthetic",
)
list_display_links = (
"id",
)
search_fields = (
"=id",
"flow_session__flow_id",
"page_data__group_id",
"page_data__page_id",
"flow_session__participation__user__username",
"flow_session__participation__user__first_name",
"flow_session__participation__user__last_name",
)
raw_id_fields = ("flow_session", "page_data")
inlines = (FlowPageVisitGradeInline,)
save_on_top = True
admin.site.register(FlowPageVisit, FlowPageVisitAdmin)
# }}}
# {{{ flow access
class FlowAccessExceptionEntryInline(admin.StackedInline):
model = FlowAccessExceptionEntry
extra = 5
class FlowAccessExceptionAdmin(admin.ModelAdmin):
inlines = (FlowAccessExceptionEntryInline,)
def get_course(self, obj):
return obj.participation.course
get_course.short_description = "Course"
get_course.admin_order_field = "participation__course"
def get_participant(self, obj):
return obj.participation.user
get_participant.short_description = "Participant"
get_participant.admin_order_field = "participation__user"
list_display = (
"get_participant",
"get_course",
"flow_id",
"expiration",
"creation_time",
)
list_display_links = (
"get_participant",
"flow_id",
)
list_filter = (
"participation__course",
"flow_id",
)
date_hierarchy = "creation_time"
raw_id_fields = ("participation",)
admin.site.register(FlowAccessException, FlowAccessExceptionAdmin)
# }}}
# {{{ grading
class GradingOpportunityAdmin(admin.ModelAdmin):
list_display = ("course", "name", "due_time", "identifier",)
list_filter = ("course",)
admin.site.register(GradingOpportunity, GradingOpportunityAdmin)
class GradeChangeAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.participation.course
get_course.short_description = "Course"
get_course.admin_order_field = "participation__course"
def get_opportunity(self, obj):
return obj.opportunity.name
get_opportunity.short_description = "Opportunity"
get_opportunity.admin_order_field = "opportunity"
def get_participant(self, obj):
return obj.participation.user
get_participant.short_description = "Participant"
get_participant.admin_order_field = "participation__user"
def get_percentage(self, obj):
return round(100*obj.points/obj.max_points)
get_percentage.short_description = "%"
list_display = (
"get_opportunity",
"get_participant",
"get_course",
"state",
"points",
"get_percentage",
"grade_time",
)
list_display_links = (
"get_opportunity",
"get_participant",
)
date_hierarchy = "grade_time"
search_fields = (
"opportunity__name",
"opportunity__flow_id",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
)
list_filter = (
"opportunity__course",
"opportunity",
"state",
)
raw_id_fields = ("flow_session",)
admin.site.register(GradeChange, GradeChangeAdmin)
# }}}
# {{{ instant message
class InstantMessageAdmin(admin.ModelAdmin):
def get_course(self, obj):
return obj.participation.course
get_course.short_description = "Course"
get_course.admin_order_field = "participation__course"
def get_participant(self, obj):
return obj.participation.user
get_participant.short_description = "Participant"
get_participant.admin_order_field = "participation__user"
list_filter = ("participation__course",)
list_display = (
"get_course",
"get_participant",
"time",
"text",
)
date_hierarchy = "time"
search_fields = (
"text",
"participation__user__username",
"participation__user__first_name",
"participation__user__last_name",
)
admin.site.register(InstantMessage, InstantMessageAdmin)
# }}}
# vim: foldmethod=marker
| 26.535332 | 77 | 0.646788 | 1,288 | 12,392 | 5.858696 | 0.193323 | 0.051816 | 0.03101 | 0.029685 | 0.360191 | 0.326531 | 0.307713 | 0.274185 | 0.264378 | 0.25868 | 0 | 0.001202 | 0.261378 | 12,392 | 466 | 78 | 26.592275 | 0.823227 | 0.02316 | 0 | 0.444099 | 0 | 0 | 0.271764 | 0.057349 | 0 | 0 | 0 | 0 | 0 | 1 | 0.062112 | false | 0.012422 | 0.012422 | 0.055901 | 0.385093 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
335f33cde18fa41e6c1c28005aa93836e370d11e | 2,749 | py | Python | sustainableCityManagement/main_project/Bike_API/views_bike_api/graph_bike_data.py | Josh-repository/Dashboard-CityManager- | 6287881be9fb2c6274a755ce5d75ad355346468a | [
"RSA-MD"
] | null | null | null | sustainableCityManagement/main_project/Bike_API/views_bike_api/graph_bike_data.py | Josh-repository/Dashboard-CityManager- | 6287881be9fb2c6274a755ce5d75ad355346468a | [
"RSA-MD"
] | null | null | null | sustainableCityManagement/main_project/Bike_API/views_bike_api/graph_bike_data.py | Josh-repository/Dashboard-CityManager- | 6287881be9fb2c6274a755ce5d75ad355346468a | [
"RSA-MD"
] | 1 | 2021-05-13T16:33:18.000Z | 2021-05-13T16:33:18.000Z | import os
import random
import tempfile
import uuid
import json
from django.http import JsonResponse
from django.http import HttpResponse
from rest_framework.views import APIView
from django.views.decorators.csrf import csrf_exempt
import time as processTiming
from datetime import timedelta, datetime, time, date
from rest_framework.decorators import api_view
from django.shortcuts import render
from ..graphvalues_bike import GraphValuesBike
# API to fetch bike data -> Historical, live and locations are fetched through this API.
class GraphBikeData(APIView):
# API to fetch bike graph data -> values for graph (location based or overall) are fetched through this API.
@classmethod
def get(self, request, graphvalues_bike = GraphValuesBike()):
startTime = processTiming.time()
call_uuid = uuid.uuid4()
ID = "BIKE_INFO_GRAPH"
result = {}
# try :
inputType = request.query_params.get("location_based", "")
days_historical = request.query_params.get("days_historic", "")
if len(days_historical) == 0 or not days_historical.isnumeric():
raise ValueError('days_historic must contain a number')
days_data = int(days_historical)
# If location_based is yes, then graph values for all the locations is delivered.
if inputType == "yes":
result = graphvalues_bike.graphvalue_call_locationbased(
days_historical=days_data)
# If location_based is no, then graph values are delivered in cumulative format from all the locations.
elif inputType == "no":
result = graphvalues_bike.graphvalue_call_overall(
days_historical=days_data)
else:
return JsonResponse({
"API_ID": ID,
"ERROR": "Give valid query parameters.",
"TIME_TO_RUN": "{} seconds".format(float(round(processTiming.time() - startTime, 2)))}
)
# If query param doesn't match any condition above.
return JsonResponse(
{
"API_ID": ID,
"CALL_UUID": call_uuid,
"DATA": {
"RESULT": result
},
"TIMESTAMP": "{} seconds".format(float(round(processTiming.time() - startTime, 2)))}
)
# except (KeyError, TypeError):
# return JsonResponse({
# "API_ID" : ID,
# "ERROR" : "BIKE_INFO_GRAPH API not working, check fetch_bikeAPI, and check the query parameters.",
# "TIME_TO_RUN" : "{} seconds".format(float(round(processTiming.time() - startTime,2)))}
# )
| 40.426471 | 132 | 0.61586 | 299 | 2,749 | 5.518395 | 0.394649 | 0.050909 | 0.038182 | 0.041818 | 0.24303 | 0.156364 | 0.12 | 0.12 | 0.089697 | 0.089697 | 0 | 0.002591 | 0.297927 | 2,749 | 67 | 133 | 41.029851 | 0.852332 | 0.285922 | 0 | 0.085106 | 0 | 0 | 0.095385 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021277 | false | 0 | 0.297872 | 0 | 0.382979 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
335fb5860b9cecbe5a265b52c492a160ab1d7a7a | 11,078 | py | Python | acme/wrappers/pysc2_wrapper_reaver.py | MEDCOMP/SC2_ACME | 511f5c4388ad4b8ef157e46678cc22bb0a199ad4 | [
"Apache-2.0"
] | null | null | null | acme/wrappers/pysc2_wrapper_reaver.py | MEDCOMP/SC2_ACME | 511f5c4388ad4b8ef157e46678cc22bb0a199ad4 | [
"Apache-2.0"
] | null | null | null | acme/wrappers/pysc2_wrapper_reaver.py | MEDCOMP/SC2_ACME | 511f5c4388ad4b8ef157e46678cc22bb0a199ad4 | [
"Apache-2.0"
] | null | null | null | import sys
import numpy as np
from absl import flags
from pysc2.lib import actions
from pysc2.lib import features
from pysc2.lib import protocol
from pysc2.lib import named_array
from pysc2.env.environment import StepType
from acme import specs
from acme.sc2_types import SC2Space, SC2FuncIdSpace, Space
from acme.sc2_spec import Spec
import dm_env
from pysc2.env import sc2_env
import copy
ACTIONS_MINIGAMES, ACTIONS_MINIGAMES_ALL, ACTIONS_ALL = [
'minigames', 'minigames_all', 'all']
class Pysc2Wrapper(dm_env.Environment):
"""
'minigames' action set is enough to solve all minigames listed in SC2LE
'minigames_all' expands that set with actions that may improve end result, but will drop performance
'all' is the full action set, only necessary for generic agent playing full game with all three races
You can also specify your own action set in the gin config file under SC2Env.action_ids
Full list of available actions https://github.com/deepmind/pysc2/blob/master/pysc2/lib/actions.py#L447-L1008
"""
def __init__(
self,
environment: sc2_env.SC2Env,
spatial_dim=16,
action_ids=ACTIONS_MINIGAMES,
obs_features=None
):
self._env = environment
self.spatial_dim = spatial_dim
# sensible action set for all minigames
# 24 actions
if not action_ids or action_ids in [ACTIONS_MINIGAMES, ACTIONS_MINIGAMES_ALL]:
action_ids = [0, 1, 2, 3, 4, 6, 7, 12, 13, 42,
44, 50, 79, 91, 183, 234, 309, 331, 332, 333,
334, 451, 477, 452, 490]
# some additional actions for minigames (not necessary to solve)
# additional 18 actions
if action_ids == ACTIONS_MINIGAMES_ALL:
action_ids += [11, 71, 72, 73, 74, 140, 168,
239, 261, 264, 269, 274, 318, 335, 336, 453, 477]
# full action space, including outdated / unusable to current race / usable only in certain cases
if action_ids == ACTIONS_ALL:
action_ids = [f.id for f in actions.FUNCTIONS]
# by default use majority of obs features, except for some that are unnecessary for minigames
# e.g. race-specific like creep and shields or redundant like player_id
if not obs_features:
obs_features = {
'screen': ['player_relative', 'selected', 'visibility_map', 'unit_hit_points_ratio', 'unit_density'],
'minimap': ['player_relative', 'selected', 'visibility_map', 'camera'],
# available actions should always be present and in first position
'non-spatial': ['available_actions', 'player']}
self.act_wrapper = ActionWrapper(spatial_dim, action_ids)
self.obs_wrapper = ObservationWrapper(obs_features, action_ids)
def step(self, action) -> dm_env.TimeStep:
try:
timestep = self.obs_wrapper(self._env.step(self.act_wrapper(action)))
return timestep
# hacky fix from websocket timeout issue...
except protocol.ConnectionError:
self.restart()
def reset(self) -> dm_env.TimeStep:
try:
timestep = self.obs_wrapper(self._env.reset())
return timestep
# hacky fix from websocket timeout issue...
except protocol.ConnectionError:
self.restart()
def stop(self):
self._env.close()
# Restart the environment and return a timestep
def restart(self) -> dm_env.TimeStep:
self.stop()
old_environment = self._env
new_environment = sc2_env.SC2Env(
map_name=old_environment._map_name,
players=old_environment._players,
agent_interface_format=old_environment._agent_interface_format,
step_mul=old_environment._step_mul,
save_replay_episodes=old_environment._save_replay_episodes,
replay_dir=old_environment._replay_dir)
self._env = new_environment
return self.reset()
def observation_spec(self) -> Spec:
if not self.obs_wrapper.spec:
self.make_specs()
return self.obs_wrapper.spec
def action_spec(self) -> Spec:
if not self.act_wrapper.spec:
self.make_specs()
return self.act_wrapper.spec
def reward_spec(self) -> Space:
return Space(shape=(), dtype=np.int64, domain=(0,float('inf')), name='reward')
def discount_spec(self) -> Space:
return Space(shape=(), dtype=np.float, domain=(0,1), name='discount')
def make_specs(self):
# importing here to lazy-load
from pysc2.env import mock_sc2_env
mock_env = mock_sc2_env.SC2TestEnv(agent_interface_format=[
features.parse_agent_interface_format(feature_screen=self.spatial_dim, feature_minimap=self.spatial_dim)])
self.act_wrapper.make_spec(mock_env.action_spec())
self.obs_wrapper.make_spec(mock_env.observation_spec())
mock_env.close()
class ObservationWrapper:
def __init__(self, _features=None, action_ids=None):
self.spec = None
self.features = _features
self.action_ids = action_ids
screen_feature_to_idx = {feat: idx for idx, feat in enumerate(
features.SCREEN_FEATURES._fields)}
minimap_feature_to_idx = {feat: idx for idx, feat in enumerate(
features.MINIMAP_FEATURES._fields)}
# use feature masks to select necessary features for minigames.
self.feature_masks = {
'screen': [screen_feature_to_idx[f] for f in _features['screen']],
'minimap': [minimap_feature_to_idx[f] for f in _features['minimap']]
}
def __call__(self, timestep) -> dm_env.TimeStep:
timestep = timestep[0]
obs = timestep.observation
obs_wrapped = [
obs['feature_screen'][self.feature_masks['screen']],
obs['feature_minimap'][self.feature_masks['minimap']]
]
for feat_name in self.features['non-spatial']:
if feat_name == 'available_actions':
fn_ids_idxs = [i for i, fn_id in enumerate(
self.action_ids) if fn_id in obs[feat_name]]
mask = np.zeros((len(self.action_ids),), dtype=np.int32)
mask[fn_ids_idxs] = 1
obs[feat_name] = mask
obs_wrapped.append(obs[feat_name])
wrapped_timestep = dm_env.TimeStep(observation=obs_wrapped,
reward=timestep.reward,
step_type=timestep.step_type,
discount=timestep.discount)
return wrapped_timestep
def make_spec(self, spec):
spec = spec[0]
default_dims = {
'available_actions': (len(self.action_ids), ),
}
screen_shape = (
len(self.features['screen']), *spec['feature_screen'][1:])
minimap_shape = (
len(self.features['minimap']), *spec['feature_minimap'][1:])
screen_dims = get_spatial_dims(
self.features['screen'], features.SCREEN_FEATURES)
minimap_dims = get_spatial_dims(
self.features['minimap'], features.MINIMAP_FEATURES)
spaces = [
SC2Space(screen_shape, 'screen',
self.features['screen'], screen_dims),
SC2Space(minimap_shape, 'minimap',
self.features['minimap'], minimap_dims),
]
for feat in self.features['non-spatial']:
if 0 in spec[feat]:
spec[feat] = default_dims[feat]
spaces.append(Space(spec[feat], name=feat))
self.spec = Spec(spaces, 'Observation')
class ActionWrapper:
def __init__(self, spatial_dim, action_ids, args=None):
self.spec = None
if not args:
args = [
'screen',
'minimap',
'screen2',
'queued',
'control_group_act',
'control_group_id',
'select_add',
'select_point_act',
'select_unit_act',
# 'select_unit_id'
'select_worker',
'build_queue_id',
# 'unload_id'
]
self.func_ids = action_ids
self.args, self.spatial_dim = args, spatial_dim
def __call__(self, action):
# create a deepcoy of the action list so that it won't affect the original one
action_to_func_call = copy.deepcopy(action)
default = {
'control_group_act': 0,
'control_group_id': 0,
'select_point_act': 0,
'select_unit_act': 0,
'select_unit_id': 0,
'build_queue_id': 0,
'unload_id': 0,
}
# fn_id_idx is the index position of the chosen function_id in the list "func_ids"
fn_id_idx, args = action_to_func_call.pop(0), []
# get the real function id defined in pysc2 library
fn_id = self.func_ids[fn_id_idx]
# create the argument list for the function id chosen
for arg_type in actions.FUNCTIONS[fn_id].args:
arg_name = arg_type.name
if arg_name in self.args:
arg = action_to_func_call[self.args.index(arg_name)]
# pysc2 expects all args in their separate list
if type(arg) not in [list, tuple]:
arg = [arg]
# pysc2 expects spatial coords, but we have flattened => attempt to fix
# In random agent under the spatial coordinates are not flattened.
# if len(arg_type.sizes) > 1 and len(arg) == 1:
# arg = [arg[0] % self.spatial_dim,
# arg[0] // self.spatial_dim]
args.append(arg)
else:
arg = [default[arg_name]]
args.append(arg)
return [actions.FunctionCall(fn_id, args)]
def make_spec(self, spec):
spec = spec[0]
spaces = [SC2FuncIdSpace(self.func_ids, self.args)]
for arg_name in self.args:
arg = getattr(spec.types, arg_name)
if len(arg.sizes) > 1:
# two dimensional of the array to describe the spatial information
# example:
# Space "minimap", value: (2, 3)
spaces.append(
Space(shape=(2,), domain=(0, self.spatial_dim), categorical=True, name=arg_name))
else:
spaces.append(
Space(shape=(), domain=(0, arg.sizes[0]), categorical=True, name=arg_name))
self.spec = Spec(spaces, "Action")
def get_spatial_dims(feat_names, feats):
feats_dims = []
for feat_name in feat_names:
feat = getattr(feats, feat_name)
feats_dims.append(1)
if feat.type == features.FeatureType.CATEGORICAL:
feats_dims[-1] = feat.scale
return feats_dims
| 37.680272 | 118 | 0.599115 | 1,345 | 11,078 | 4.711524 | 0.234944 | 0.026984 | 0.017674 | 0.011362 | 0.181 | 0.127821 | 0.097207 | 0.086476 | 0.057756 | 0.057756 | 0 | 0.023368 | 0.308539 | 11,078 | 293 | 119 | 37.808874 | 0.803916 | 0.166817 | 0 | 0.10628 | 0 | 0 | 0.071109 | 0.00229 | 0 | 0 | 0 | 0 | 0 | 1 | 0.082126 | false | 0 | 0.072464 | 0.009662 | 0.217391 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
336163156d46f4ac30959298aface46183260e72 | 5,508 | py | Python | build/bdist.win-amd64/wheel/tests/test_monitor_changes.py | GouravRDutta/IemLabsAV | 8d397a3d59e067176269c5e84d73bf53951b7b3f | [
"MIT"
] | null | null | null | build/bdist.win-amd64/wheel/tests/test_monitor_changes.py | GouravRDutta/IemLabsAV | 8d397a3d59e067176269c5e84d73bf53951b7b3f | [
"MIT"
] | null | null | null | build/bdist.win-amd64/wheel/tests/test_monitor_changes.py | GouravRDutta/IemLabsAV | 8d397a3d59e067176269c5e84d73bf53951b7b3f | [
"MIT"
] | 1 | 2021-07-02T12:29:10.000Z | 2021-07-02T12:29:10.000Z | # -*- coding: utf-8 -*-
import unittest
from iemlav.lib.antivirus.monitor.monitor_changes import MonitorChanges
from iemlav.lib.antivirus.antivirus_logger import AntiVirusLogger
try:
# if python 3.x.x
from unittest.mock import patch
except ImportError: # python 2.x.x
from mock import patch
class TestMonitorChanges(unittest.TestCase):
"""
Test class for SecureTea AntiVirus MonitorChanges.
"""
@patch.object(MonitorChanges, "check_uid")
@patch.object(MonitorChanges, "get_initial_uid")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.utils")
@patch.object(AntiVirusLogger, "log")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.file_gather")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.os")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.time")
def test_check_file(self, mck_time, mck_os, mck_fg, mck_log, mck_utils, mck_giu, mck_cu):
"""
Test check_file.
"""
# Mock class creation
mck_utils.json_to_dict.return_value = {
"debian": {
"update": {
"hash": {
"storage": "/etc/iemlav/antivirus/md5_hash/"
},
"yara": {
"storage": "/etc/iemlav/antivirus/yara/"
}
},
"scanner": {
"malicious_file_log_path": "/etc/iemlav/antivirus/malicious_files.log",
"hash": {
"threads": 2
},
"yara": {
"threads": 2
},
"clamav": {
"threads": 2
}
},
"monitor": {
"threshold_min": 20,
"password_log_file": "/etc/passwd"
}
}
}
mck_utils.categorize_os.return_value = "debian"
mck_fg.GatherFile.return_value = True
mck_giu.return_value = []
# Mock neccessary
mck_time.time.return_value = 1
mck_os.path.getmtime.return_value = 1
mck_cu.return_value = False
# Create MonitorChanges object
monitor_changes_obj = MonitorChanges(config_path="random_path")
# Case 1: When time difference is greater than threshold
monitor_changes_obj._THRESHOLD = -1
monitor_changes_obj.check_file("random_path")
self.assertFalse(mck_log.called)
# Case 2: When time difference is less than threshold
monitor_changes_obj._THRESHOLD = 10
monitor_changes_obj.check_file("random_path")
mck_log.assert_called_with('File: random_path recently modified or created',
logtype='warning')
@patch.object(MonitorChanges, "get_initial_uid")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.file_gather")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.utils")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.os")
def test_check_uid(self, mck_os, mck_utils, mck_fg, mck_giu):
"""
Test check_uid.
"""
# Mock class creation
mck_utils.json_to_dict.return_value = {
"debian": {
"update": {
"hash": {
"storage": "/etc/iemlav/antivirus/md5_hash/"
},
"yara": {
"storage": "/etc/iemlav/antivirus/yara/"
}
},
"scanner": {
"malicious_file_log_path": "/etc/iemlav/antivirus/malicious_files.log",
"hash": {
"threads": 2
},
"yara": {
"threads": 2
},
"clamav": {
"threads": 2
}
},
"monitor": {
"threshold_min": 20,
"password_log_file": "/etc/passwd"
}
}
}
mck_utils.categorize_os.return_value = "debian"
mck_fg.GatherFile.return_value = True
mck_giu.return_value = []
# Mock neccessary
mck_os.stat.st_uid.return_value = 1
# Create MonitorChanges object
monitor_changes_obj = MonitorChanges(config_path="random_path")
bool_data = monitor_changes_obj.check_uid("random_file")
self.assertTrue(bool_data)
@patch("iemlav.lib.antivirus.monitor.monitor_changes.file_gather")
@patch("iemlav.lib.antivirus.monitor.monitor_changes.utils")
def test_get_initial_uid(self, mck_utils, mck_fg):
"""
Test get_initial_uid.
"""
# Mock class creation
mck_utils.json_to_dict.return_value = {
"debian": {
"update": {
"hash": {
"storage": "/etc/iemlav/antivirus/md5_hash/"
},
"yara": {
"storage": "/etc/iemlav/antivirus/yara/"
}
},
"scanner": {
"malicious_file_log_path": "/etc/iemlav/antivirus/malicious_files.log",
"hash": {
"threads": 2
},
"yara": {
"threads": 2
},
"clamav": {
"threads": 2
}
},
"monitor": {
"threshold_min": 20,
"password_log_file": "/etc/passwd"
}
}
}
mck_utils.categorize_os.return_value = "debian"
mck_fg.GatherFile.return_value = True
mck_utils.open_file.return_value = ["root:x:0:0:root:/root:/bin/bash"]
# Create MonitorChanges object
monitor_changes_obj = MonitorChanges(config_path="random_path")
self.assertEqual(monitor_changes_obj.verified_uid_list, [0])
| 31.655172 | 93 | 0.566993 | 570 | 5,508 | 5.214035 | 0.198246 | 0.089502 | 0.066622 | 0.084118 | 0.686406 | 0.686406 | 0.647039 | 0.608008 | 0.59354 | 0.588493 | 0 | 0.008479 | 0.314815 | 5,508 | 173 | 94 | 31.83815 | 0.779014 | 0.080428 | 0 | 0.574627 | 0 | 0 | 0.283615 | 0.172705 | 0 | 0 | 0 | 0 | 0.029851 | 1 | 0.022388 | false | 0.022388 | 0.044776 | 0 | 0.074627 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3362f951d66a5b5929d2799a04e88f1404c05905 | 462 | py | Python | campam.py | avikram553/opencv | f10706ffa9eb1688550953102112298ac8d13454 | [
"MIT"
] | null | null | null | campam.py | avikram553/opencv | f10706ffa9eb1688550953102112298ac8d13454 | [
"MIT"
] | null | null | null | campam.py | avikram553/opencv | f10706ffa9eb1688550953102112298ac8d13454 | [
"MIT"
] | null | null | null | import cv2
cap = cv2.VideoCapture(0);
print(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
print(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
cap.set(3,300)
cap.set(4,160)
print(cap.get(3))
print(cap.get(4))
while(cap.isOpened()):
ret,frame=cap.read()
if ret==True:
gray=cv2.cvtColor(frame,cv2.COLOR_BGRA2GRAY)
cv2.imshow('frame',gray)
if cv2.waitKey(1) & 0xFF==ord('b'):
break
else:
break
cap.release()
cv2.destroyAllWindows() | 24.315789 | 52 | 0.65368 | 73 | 462 | 4.041096 | 0.493151 | 0.108475 | 0.149153 | 0.094915 | 0.176271 | 0.176271 | 0.176271 | 0 | 0 | 0 | 0 | 0.060367 | 0.175325 | 462 | 19 | 53 | 24.315789 | 0.713911 | 0 | 0 | 0.105263 | 0 | 0 | 0.012959 | 0 | 0 | 0 | 0.008639 | 0 | 0 | 1 | 0 | false | 0 | 0.052632 | 0 | 0.052632 | 0.210526 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
33650b9e5951913219846b9786e8e37b04532f01 | 3,512 | py | Python | tests/run_send_recv.py | justengel/ipc_mngr | 3883857e701600105cba93473dd7731ff55d1bad | [
"MIT"
] | null | null | null | tests/run_send_recv.py | justengel/ipc_mngr | 3883857e701600105cba93473dd7731ff55d1bad | [
"MIT"
] | null | null | null | tests/run_send_recv.py | justengel/ipc_mngr | 3883857e701600105cba93473dd7731ff55d1bad | [
"MIT"
] | null | null | null | """
Simple example of a service that saves items which can be added or requested from a separate process.
Example:
..code-block:: python
$ python tests/run_send_recv.py --listen
# New terminal
$ python tests/run_send_recv.py --send --name abc --value 1
$ python tests/run_send_recv.py --send --name fun --value 2
$ python tests/run_send_recv.py --list
$ List Items:
$ Item: abc = 1
$ Item: fun = 2
"""
import ipc_mngr
import argparse
class Item(object):
def __init__(self, name='', value=0):
self.name = name
self.value = value
class ListItems(object):
def __init__(self, items=None):
self.items = items or []
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(description='Listen for commands from a separate process or send commands')
PARSER.add_argument('--address', default=ipc_mngr.MY_IP, type=str, help='IP Address to connect to')
PARSER.add_argument('--port', default=54212, type=int, help='Port to connect with')
PARSER.add_argument('--authkey', default=None, type=bytes, help='Password to protect the connection')
PARSER.add_argument('--list', action='store_true', help='Send the list items command.')
PARSER.add_argument('--send', action='store_true', help='If this is a command to send an item.')
PARSER.add_argument('--name', default=None, type=str, help='Item name to send to the listener')
PARSER.add_argument('--value', default=0, type=int, help='Item value to send to the listener')
args = PARSER.parse_args()
if args.list:
# ===== Send the ListItems Command and print the list of items when received =====
with ipc_mngr.Client((args.address, args.port), authkey=args.authkey) as client:
# Send the command
client.send(ListItems())
# Receive the ListItems filled with items to print
msg = client.recv()
if isinstance(msg, ListItems):
print('List Items:')
for item in msg.items:
print('\tItem: {} = {}'.format(item.name, item.value))
else:
raise ipc_mngr.IPCError('Invalid response message. The response message should have been ListItems')
elif args.send:
# ===== Send an item =====
name, value = args.name, args.value
if name is None:
name, value = input('Enter Name=value: ').split('=')
item = Item(name.strip(), int(value))
with ipc_mngr.Client((args.address, args.port), authkey=args.authkey) as client:
# Send the command
client.send(item)
else:
# ===== Listen for commands =====
ITEMS = {}
def msg_handler(sock, cmd):
"""Handle received commands.
Args:
sock (multiprocessing.connection.Client): Client socket that received the command.
cmd (object): Command object that was received.
"""
if isinstance(cmd, Item):
# Store the sent item
ITEMS[cmd.name] = cmd.value
elif isinstance(cmd, ListItems):
# Return a list of all items.
li = ListItems([Item(k, v) for k, v in ITEMS.items()])
sock.send(li)
listener = ipc_mngr.Listener((args.address, args.port), authkey=args.authkey)
listener.msg_handler = msg_handler
print("listening ...")
listener.listen() # Listen forever
| 36.206186 | 116 | 0.604499 | 444 | 3,512 | 4.682432 | 0.288288 | 0.030303 | 0.057239 | 0.034632 | 0.172679 | 0.154401 | 0.131313 | 0.113516 | 0.082732 | 0.082732 | 0 | 0.004319 | 0.274772 | 3,512 | 96 | 117 | 36.583333 | 0.811936 | 0.257688 | 0 | 0.083333 | 0 | 0 | 0.188412 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0.020833 | 0.041667 | 0 | 0.145833 | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
336789f10e8a30a71bee60a5b77f211cd6f19508 | 2,545 | py | Python | text/en/sentiments.py | LisaDawn/TextBlob | 6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9 | [
"MIT"
] | 2 | 2015-03-10T14:33:16.000Z | 2015-03-10T14:33:18.000Z | text/en/sentiments.py | LisaDawn/TextBlob | 6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9 | [
"MIT"
] | null | null | null | text/en/sentiments.py | LisaDawn/TextBlob | 6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Sentiment analysis implementations.
.. versionadded:: 0.5.0
"""
from __future__ import absolute_import
from text.packages import nltk
from text.en import sentiment as pattern_sentiment
from text.tokenizers import WordTokenizer
from text.decorators import requires_nltk_corpus
from text.base import BaseSentimentAnalyzer, DISCRETE, CONTINUOUS
class PatternAnalyzer(BaseSentimentAnalyzer):
'''Sentiment analyzer that uses the same implementation as the
pattern library. Returns results as a tuple of the form:
``(polarity, subjectivity)``
'''
kind = CONTINUOUS
def analyze(self, text):
"""Return the sentiment as a tuple of the form:
``(polarity, subjectivity)``
"""
return pattern_sentiment(text)
class NaiveBayesAnalyzer(BaseSentimentAnalyzer):
'''Naive Bayes analyzer that is trained on a dataset of movie reviews.
Returns results as a tuple of the form:
``(classification, pos_probability, neg_probability)``
'''
kind = DISCRETE
def __init__(self):
super(NaiveBayesAnalyzer, self).__init__()
self._classifier = None
@requires_nltk_corpus
def train(self):
'''Train the Naive Bayes classifier on the movie review corpus.'''
super(NaiveBayesAnalyzer, self).train()
neg_ids = nltk.corpus.movie_reviews.fileids('neg')
pos_ids = nltk.corpus.movie_reviews.fileids('pos')
neg_feats = [(self._extract_feats(
nltk.corpus.movie_reviews.words(fileids=[f])), 'neg') for f in neg_ids]
pos_feats = [(self._extract_feats(
nltk.corpus.movie_reviews.words(fileids=[f])), 'pos') for f in pos_ids]
train_data = neg_feats + pos_feats
self._classifier = nltk.classify.NaiveBayesClassifier.train(train_data)
def _extract_feats(self, words):
return dict([(word, True) for word in words])
def analyze(self, text):
"""Return the sentiment as a tuple of the form:
``(classification, pos_probability, neg_probability)``
"""
# Lazily train the classifier
super(NaiveBayesAnalyzer, self).analyze(text)
tokenizer = WordTokenizer()
tokens = tokenizer.tokenize(text, include_punc=False)
filtered = [t.lower() for t in tokens if len(t) >= 3]
feats = self._extract_feats(filtered)
prob_dist = self._classifier.prob_classify(feats)
# classification, p_pos, p_neg
return prob_dist.max(), prob_dist.prob('pos'), prob_dist.prob("neg")
| 34.391892 | 83 | 0.680157 | 307 | 2,545 | 5.456026 | 0.325733 | 0.035821 | 0.019104 | 0.023881 | 0.279403 | 0.279403 | 0.241194 | 0.241194 | 0.190448 | 0.190448 | 0 | 0.002509 | 0.216896 | 2,545 | 73 | 84 | 34.863014 | 0.837933 | 0.269548 | 0 | 0.055556 | 0 | 0 | 0.010198 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.138889 | false | 0 | 0.166667 | 0.027778 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3368ca2d4f36f2a642bcfa09b98a2e35513fac7a | 1,938 | py | Python | meta_data/hadoop_wordcount.py | hilsdsg3/Google_Cloud_Platform_Demos | 891314269d078c133fa82f69d23f051c88879ad5 | [
"MIT"
] | null | null | null | meta_data/hadoop_wordcount.py | hilsdsg3/Google_Cloud_Platform_Demos | 891314269d078c133fa82f69d23f051c88879ad5 | [
"MIT"
] | null | null | null | meta_data/hadoop_wordcount.py | hilsdsg3/Google_Cloud_Platform_Demos | 891314269d078c133fa82f69d23f051c88879ad5 | [
"MIT"
] | null | null | null | import datetime
import os
from airflow import models
from airflow.contrib.operators import dataproc_operator
from airflow.utils import trigger_rule
output_file = os.path.join(
models.Variable.get('gcs_bucket'), 'wordcount',
datetime.datetime.now().strftime('%Y%m%d-%H%M%S')) + os.sep
WORDCOUNT_JAR = (
'file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
)
wordcount_args = ['wordcount', 'gs://us-central1-spikey-composer-486ba46f-bucket/dags/doc.txt', output_file]
yesterday = datetime.datetime.combine(
datetime.datetime.today() - datetime.timedelta(1),
datetime.datetime.min.time())
default_dag_args = {
'start_date': yesterday,
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': datetime.timedelta(minutes=5),
'project_id': models.Variable.get('gcp_project')
}
with models.DAG(
'composer_hadoop_wordcount',
schedule_interval=datetime.timedelta(days=1),
default_args=default_dag_args) as dag:
create_dataproc_cluster = dataproc_operator.DataprocClusterCreateOperator(
task_id='create_dataproc_cluster',
cluster_name='spikey-wordcount-cluster-{{ ds_nodash }}',
num_workers=2,
zone=models.Variable.get('gce_zone'),
master_machine_type='n1-standard-1',
worker_machine_type='n1-standard-1')
run_dataproc_hadoop = dataproc_operator.DataProcHadoopOperator(
task_id='run_dataproc_hadoop',
main_jar=WORDCOUNT_JAR,
cluster_name='spikey-wordcount-cluster-{{ ds_nodash }}',
arguments=wordcount_args)
delete_dataproc_cluster = dataproc_operator.DataprocClusterDeleteOperator(
task_id='delete_dataproc_cluster',
cluster_name='spikey-wordcount-cluster-{{ ds_nodash }}',
trigger_rule=trigger_rule.TriggerRule.ALL_DONE)
create_dataproc_cluster >> run_dataproc_hadoop >> delete_dataproc_cluster | 30.28125 | 108 | 0.720846 | 230 | 1,938 | 5.791304 | 0.43913 | 0.067568 | 0.038288 | 0.058559 | 0.147898 | 0.114865 | 0.114865 | 0.084084 | 0.084084 | 0 | 0 | 0.009225 | 0.160991 | 1,938 | 64 | 109 | 30.28125 | 0.809963 | 0 | 0 | 0.068182 | 0 | 0 | 0.25116 | 0.141826 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.113636 | 0 | 0.113636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
336becc87f518a09b3356159d36dc59b489cb583 | 5,555 | py | Python | gretel/auto_balance_dataset/bias_bp_graphs.py | reetam-ganguli/gretel-blueprints | 43191178afb43082b24f2ef846fda4256aafdb11 | [
"Apache-2.0"
] | 30 | 2020-10-27T20:00:24.000Z | 2022-02-07T13:21:25.000Z | gretel/auto_balance_dataset/bias_bp_graphs.py | reetam-ganguli/gretel-blueprints | 43191178afb43082b24f2ef846fda4256aafdb11 | [
"Apache-2.0"
] | 14 | 2021-04-29T20:49:34.000Z | 2022-01-24T19:06:48.000Z | gretel/auto_balance_dataset/bias_bp_graphs.py | reetam-ganguli/gretel-blueprints | 43191178afb43082b24f2ef846fda4256aafdb11 | [
"Apache-2.0"
] | 18 | 2021-01-05T09:25:39.000Z | 2022-03-19T14:58:34.000Z | import math
from typing import Tuple, Dict
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import pandas as pd
_GRETEL_PALETTE = ['#C18DFC', '#47E0B3']
_GRAPH_OPACITY = 0.75
_GRAPH_BARGAP = 0.2
_GRAPH_BARGROUPGAP = .1
_GRAPH_MAX_BARS = 1000
def get_graph_dimen(fields: dict, uniq_cnt_threshold: int) -> Tuple[int,int]:
"""
Helper function to first figure out how many graphs we'll be
displaying, and then based on that determine the appropriate
row and column count for display
"""
graph_cnt = 0
for field in fields:
if fields[field]["cardinality"] <= uniq_cnt_threshold:
graph_cnt += 1
col_cnt = 1
if uniq_cnt_threshold <= 50:
col_cnt = min(3, graph_cnt)
elif uniq_cnt_threshold <= 100:
col_cnt = min(2, graph_cnt)
else:
col_cnt = 1
row_cnt = math.ceil(graph_cnt/col_cnt)
return row_cnt, col_cnt
def get_distrib_show(distrib: Dict[str, float]) -> Dict[str, float]:
"""
Plotly slighly freaks with more than 1000 bars, so in the remote
chance they chose to see graphs with more than 1000 unique values
limit the graph bars to the highest 1000 values
"""
if len(distrib) <= _GRAPH_MAX_BARS:
return distrib
cnt = 0
new_distrib = {}
for field in distrib:
new_distrib[field] = distrib[field]
cnt += 1
if cnt == _GRAPH_MAX_BARS:
return new_distrib
def show_field_graphs(fields: dict, uniq_cnt_threshold=10):
"""
This function takes the categorical fields in a project that have
a unique value count less than or equal to the parameter
"uniq_cnt_threshold" and displays their current distributions
using plotly bar charts. The number of columns used to display
the graphs will depend on this value as well.
"""
row_cnt, col_cnt = get_graph_dimen(fields, uniq_cnt_threshold)
titles = []
for field in fields:
if fields[field]["cardinality"] <= uniq_cnt_threshold:
titles.append(field)
shared_yaxes = True
if col_cnt == 1:
shared_yaxes = False
fig = make_subplots(rows=row_cnt, cols=col_cnt, shared_yaxes=shared_yaxes, subplot_titles=titles)
row = 1
col = 1
for field in fields:
if fields[field]["cardinality"] <= uniq_cnt_threshold:
distrib = get_distrib_show(fields[field]["distrib"])
fig.add_trace(
go.Bar(
x=list(distrib.keys()),
y=list(distrib.values()),
name=field
),
row,
col
)
if col == col_cnt:
col = 1
row += 1
else:
col += 1
height = (700 / col_cnt) * row_cnt
fig.update_layout(
height=height,
width=900,
showlegend=False,
title='<span style="font-size: 16px;">Existing Categorical Field Distributions</span>',
font=dict(
size=8,
color="RebeccaPurple"
)
)
fig.show()
def get_new_distrib(field: pd.Series) -> Dict[str, float]:
"""
Even though we know what the new distribution will be, here
we compute it fresh from the new data as a sanity check
"""
distribution = {}
for v in field:
distribution[str(v)] = distribution.get(str(v), 0) + 1
series_len = float(len(field))
for k in distribution.keys():
distribution[k] = distribution[k] / series_len
return distribution
def show_bar_chart(orig: Dict[str, float], new: Dict[str, float], field: str, mode: str):
"""
This function takes two distributions (orig and new), along
with the name of the field and mode and plots the
distributions on the same graph
"""
fig = go.Figure()
fig.add_trace(
go.Bar(
x=list(orig.keys()),
y=list(orig.values()),
name='Training',
marker_color=_GRETEL_PALETTE[0],
opacity=_GRAPH_OPACITY
)
)
name = "Synthetic"
if mode == "additive":
name = "Training + Synthetic"
fig.add_trace(
go.Bar(
x=list(new.keys()),
y=list(new.values()),
name=name,
marker_color=_GRETEL_PALETTE[1],
opacity=_GRAPH_OPACITY
)
)
fig.update_layout(
title='<span style="font-size: 16px;">Field: ' + field + '</span>',
yaxis_title_text='Percentage',
bargap=_GRAPH_BARGAP,
bargroupgap=_GRAPH_BARGROUPGAP,
barmode='group'
)
fig.show()
def show_new_graphs(project_info: dict, synth_df: pd.DataFrame):
"""
This function is called at the conclusion of the synth auto-balance notebook to take
a look at how the new distributions compare to the original
"""
new_df = pd.DataFrame()
if project_info["mode"] == "additive":
new_df = pd.concat([project_info["records"], synth_df], ignore_index=True)
else:
new_df = synth_df
for field in project_info["field_stats"]:
if project_info["field_stats"][field]["use"]:
new = pd.Series(new_df[field]).dropna()
new_distrib = get_new_distrib(new)
show_bar_chart(project_info["field_stats"][field]["distrib"], new_distrib, field, project_info["mode"])
| 29.547872 | 115 | 0.590999 | 706 | 5,555 | 4.466006 | 0.29745 | 0.020932 | 0.045671 | 0.015224 | 0.12274 | 0.089756 | 0.073264 | 0.053283 | 0.053283 | 0.053283 | 0 | 0.016785 | 0.313591 | 5,555 | 188 | 116 | 29.547872 | 0.810123 | 0.185779 | 0 | 0.2 | 0 | 0 | 0.072064 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.048 | false | 0 | 0.04 | 0 | 0.12 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
336c5d5b7a849ace0a1f80484cda8a4bdfd1096e | 2,586 | py | Python | ircrtgate/hello.py | EliotNapa/slack_irc_rt_gate | 84fac9f78bfa7ea9157031a17515000a78af3e86 | [
"MIT"
] | null | null | null | ircrtgate/hello.py | EliotNapa/slack_irc_rt_gate | 84fac9f78bfa7ea9157031a17515000a78af3e86 | [
"MIT"
] | null | null | null | ircrtgate/hello.py | EliotNapa/slack_irc_rt_gate | 84fac9f78bfa7ea9157031a17515000a78af3e86 | [
"MIT"
] | null | null | null | #coding: UTF-8
import re
import urllib.parse
from xml.sax.saxutils import unescape
from slackbot.bot import listen_to
from ircbot.iso2022encx import Iso2022jpEncX
@listen_to('.+$')
def hello_send(message):
"""
send slack messeget to irc
"""
msg_str = Iso2022jpEncX.regularize('{0}'.format(message.body['text']))
msg_str = url_convert(msg_str)
user_str = Iso2022jpEncX.regularize('{0}'.format(message.body['username']))
#send_str = unescape(user_str), unescape(msg_str)
rest_str = unescape(msg_str)
send_usr_str = unescape(user_str)
send_usr_len = len(send_usr_str)
lest_str_len = len(rest_str)
index = 0
while len(rest_str.encode('utf-8')) > 512 - (send_usr_len + 50):
rest_str = rest_str[0:lest_str_len - index]
index += 1
#send_str = send_usr_str, rest_str
message._client.irc_bot.send_to_irc(send_usr_str, rest_str)
#message._client.irc_bot.send_to_irc('({1}) {0}'.format(message.body['text'], message.body['username']))
#message.send('{0} {1}!'.format(message.body['text'], message.body['username']))
def url_convert(src_string):
"""
encode last path of url
"""
result = src_string
match = re.search(r'<ht.+\|ht.+>',src_string)
if match:
result = re.findall(r'<ht.+\|(ht.+)>',src_string)
result = '<' + result[0] + '>'
match_enc = re.search(r'<http.+\/\/.+>',result)
Encode_Path_only = True
if match_enc:
if Encode_Path_only:
fragment_pos = result.rfind('#')
work_src = result[:-1]
fragment_part = ''
if 0 < fragment_pos:
fragment_part = work_src[fragment_pos:]
work_src = work_src[:fragment_pos]
param_part = ''
param_pos = work_src.rfind('?')
if 0 < param_pos:
param_part = work_src[param_pos:]
work_src = work_src[:param_pos]
path_part = ''
path_pos = work_src.rfind('/')
if 0 < path_pos:
path_part = urllib.parse.unquote(work_src[path_pos:])
path_part = urllib.parse.quote(path_part)
work_src = work_src[:path_pos]
result = work_src + path_part + param_part + fragment_part + '>'
else:
find_pos = result.find('://')
if 0 < find_pos:
result = '<{0}://{1}>'.format(
result[1:find_pos],
urllib.parse.quote(result[find_pos+3:-1])
)
return result
| 30.069767 | 108 | 0.575793 | 333 | 2,586 | 4.183183 | 0.228228 | 0.065327 | 0.048816 | 0.038765 | 0.296482 | 0.248385 | 0.185212 | 0.064609 | 0.064609 | 0.064609 | 0 | 0.023268 | 0.285383 | 2,586 | 85 | 109 | 30.423529 | 0.730519 | 0.12645 | 0 | 0 | 0 | 0 | 0.038669 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036364 | false | 0 | 0.090909 | 0 | 0.145455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3370e9f49de198f970a6658f78efe118682630d5 | 704 | py | Python | mpf/devices/kickback.py | Wolfmarsh/mpf | ad71f381ce8a0e65f28958e51cf8a8b38a6154fb | [
"MIT"
] | null | null | null | mpf/devices/kickback.py | Wolfmarsh/mpf | ad71f381ce8a0e65f28958e51cf8a8b38a6154fb | [
"MIT"
] | null | null | null | mpf/devices/kickback.py | Wolfmarsh/mpf | ad71f381ce8a0e65f28958e51cf8a8b38a6154fb | [
"MIT"
] | null | null | null | """A kickback device which will fire a ball back into the playfield."""
from mpf.core.device_monitor import DeviceMonitor
from mpf.devices.autofire import AutofireCoil
@DeviceMonitor(_enabled="enabled")
class Kickback(AutofireCoil):
"""A kickback device which will fire a ball back into the playfield."""
config_section = 'kickbacks'
collection = 'kickbacks'
class_label = 'kickback'
__slots__ = []
def _hit(self):
"""Post fired event."""
super()._hit()
if self._enabled:
self.machine.events.post("kickback_{}_fired".format(self.name))
'''event: kickback_(name)_fired
desc: Kickback fired a ball.
'''
| 27.076923 | 75 | 0.649148 | 81 | 704 | 5.45679 | 0.506173 | 0.033937 | 0.067873 | 0.090498 | 0.239819 | 0.239819 | 0.239819 | 0.239819 | 0.239819 | 0.239819 | 0 | 0 | 0.237216 | 704 | 25 | 76 | 28.16 | 0.823091 | 0.211648 | 0 | 0 | 0 | 0 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.166667 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
337191f9082feadb6485ba0e3bf696b86cabe146 | 10,822 | py | Python | main_eval.py | SDNAFIO/LearnTrajDep | 8d55963a52a50117a40e87521038f2c7710cd679 | [
"MIT"
] | null | null | null | main_eval.py | SDNAFIO/LearnTrajDep | 8d55963a52a50117a40e87521038f2c7710cd679 | [
"MIT"
] | null | null | null | main_eval.py | SDNAFIO/LearnTrajDep | 8d55963a52a50117a40e87521038f2c7710cd679 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""overall code framework is adapped from https://github.com/weigq/3d_pose_baseline_pytorch"""
from __future__ import print_function, absolute_import, division
import os
import time
import torch
import torch.nn as nn
import torch.optim
from torch.utils.data import DataLoader
import numpy as np
from progress.bar import Bar
import pandas as pd
from utils import loss_funcs, utils as utils
from utils.opt import Options
from utils.h36motion import H36motion
import utils.model as nnmodel
import utils.data_utils as data_utils
def main(opt):
start_epoch = 0
err_best = 10000
lr_now = opt.lr
is_cuda = torch.cuda.is_available()
opt.is_load = True
# define log csv file
script_name = os.path.basename(__file__).split('.')[0]
script_name = script_name + "_in{:d}_out{:d}_dctn{:d}".format(opt.input_n, opt.output_n, opt.dct_n)
# create model
print(">>> creating model")
input_n = opt.input_n
output_n = opt.output_n
dct_n = opt.dct_n
sample_rate = opt.sample_rate
# 48 nodes for angle prediction
model = nnmodel.GCN(input_feature=dct_n, hidden_feature=opt.linear_size, p_dropout=opt.dropout,
num_stage=opt.num_stage, node_n=48)
if is_cuda:
model.cuda()
print(">>> total params: {:.2f}M".format(sum(p.numel() for p in model.parameters()) / 1000000.0))
optimizer = torch.optim.Adam(model.parameters(), lr=opt.lr)
# continue from checkpoint
if opt.is_load:
model_path_len = 'checkpoint/pretrained/h36m_in10_out25_dctn35.pth.tar'
print(">>> loading ckpt len from '{}'".format(model_path_len))
if is_cuda:
ckpt = torch.load(model_path_len)
else:
ckpt = torch.load(model_path_len, map_location='cpu')
start_epoch = ckpt['epoch']
err_best = ckpt['err']
lr_now = ckpt['lr']
model.load_state_dict(ckpt['state_dict'])
optimizer.load_state_dict(ckpt['optimizer'])
print(">>> ckpt len loaded (epoch: {} | err: {})".format(start_epoch, err_best))
# data loading
print(">>> loading data")
dim_used = [6, 7, 8, 9, 12, 13, 14, 15, 21, 22, 23, 24, 27, 28, 29, 30, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
47, 51, 52, 53, 54, 55, 56, 57, 60, 61, 62, 75, 76, 77, 78, 79, 80, 81, 84, 85, 86]
acts = data_utils.define_actions('walking')
test_data = dict()
for act in acts:
test_dataset = H36motion(path_to_data=opt.data_dir, actions=act, input_n=input_n, output_n=output_n, split=1,
sample_rate=sample_rate, dct_n=dct_n)
test_data[act] = DataLoader(
dataset=test_dataset,
batch_size=opt.test_batch,
shuffle=False,
num_workers=opt.job,
pin_memory=True)
print(">>> data loaded !")
ret_log = np.array([start_epoch])
head = np.array(['epoch'])
test_3d_temp = np.array([])
test_3d_head = np.array([])
for act in acts:
test_e, test_3d = test(test_data[act], model, input_n=input_n, output_n=output_n, is_cuda=is_cuda,
dim_used=dim_used, dct_n=dct_n)
ret_log = np.append(ret_log, test_e)
test_3d_temp = np.append(test_3d_temp, test_3d)
test_3d_head = np.append(test_3d_head,
[act + '3d80', act + '3d160', act + '3d320', act + '3d400'])
head = np.append(head, [act + '80', act + '160', act + '320', act + '400'])
if output_n > 10:
head = np.append(head, [act + '560', act + '1000'])
test_3d_head = np.append(test_3d_head,
[act + '3d560', act + '3d1000'])
ret_log = np.append(ret_log, test_3d_temp)
head = np.append(head, test_3d_head)
# update log file and save checkpoint
df = pd.DataFrame(np.expand_dims(ret_log, axis=0))
df.columns = head
print(df)
df.to_csv(opt.ckpt + '/' + script_name + '.csv', header=head, index=False)
def train(train_loader, model, optimizer, input_n=20, dct_n=20, lr_now=None, max_norm=True, is_cuda=False, dim_used=[]):
t_l = utils.AccumLoss()
t_e = utils.AccumLoss()
t_3d = utils.AccumLoss()
model.train()
st = time.time()
bar = Bar('>>>', fill='>', max=len(train_loader))
for i, (inputs, targets, all_seq) in enumerate(train_loader):
# skip the last batch if only have one sample for batch_norm layers
batch_size = inputs.shape[0]
if batch_size == 1:
continue
bt = time.time()
if is_cuda:
inputs = inputs.cuda().float()
# targets = Variable(targets.cuda(async=True)).float()
all_seq = all_seq.cuda().float()
outputs = model(inputs)
n = outputs.shape[0]
outputs = outputs.view(n, -1)
# targets = targets.view(n, -1)
loss = loss_funcs.sen_loss(outputs, all_seq, dim_used, dct_n)
# calculate loss and backward
optimizer.zero_grad()
loss.backward()
if max_norm:
nn.utils.clip_grad_norm(model.parameters(), max_norm=1)
optimizer.step()
n, _, _ = all_seq.data.shape
# 3d error
m_err = loss_funcs.mpjpe_error(outputs, all_seq, input_n, dim_used, dct_n)
# angle space error
e_err = loss_funcs.euler_error(outputs, all_seq, input_n, dim_used, dct_n)
# update the training loss
t_l.update(loss.cpu().data.numpy()[0] * n, n)
t_e.update(e_err.cpu().data.numpy()[0] * n, n)
t_3d.update(m_err.cpu().data.numpy()[0] * n, n)
bar.suffix = '{}/{}|batch time {:.4f}s|total time{:.2f}s'.format(i + 1, len(train_loader), time.time() - bt,
time.time() - st)
bar.next()
bar.finish()
return lr_now, t_l.avg, t_e.avg, t_3d.avg
def test(train_loader, model, input_n=20, output_n=50, dct_n=20, is_cuda=False, dim_used=[]):
N = 0
# t_l = 0
if output_n >= 25:
eval_frame = [1, 3, 7, 9, 13, 24]
elif output_n == 10:
eval_frame = [1, 3, 7, 9]
t_e = np.zeros(len(eval_frame))
t_3d = np.zeros(len(eval_frame))
model.eval()
st = time.time()
bar = Bar('>>>', fill='>', max=len(train_loader))
for i, (inputs, targets, all_seq) in enumerate(train_loader):
bt = time.time()
if is_cuda:
inputs = inputs.cuda().float()
# targets = Variable(targets.cuda(async=True)).float()
all_seq = all_seq.cuda().float()
outputs = model(inputs)
n = outputs.shape[0]
# outputs = outputs.view(n, -1)
# targets = targets.view(n, -1)
# loss = loss_funcs.sen_loss(outputs, all_seq, dim_used)
n, seq_len, dim_full_len = all_seq.data.shape
dim_used_len = len(dim_used)
# inverse dct transformation
_, idct_m = data_utils.get_dct_matrix(seq_len)
idct_m = torch.from_numpy(idct_m).float().cuda()
outputs_t = outputs.view(-1, dct_n).transpose(0, 1)
outputs_exp = torch.matmul(idct_m[:, :dct_n], outputs_t).transpose(0, 1).contiguous().view(-1, dim_used_len,
seq_len).transpose(1,
2)
pred_expmap = all_seq.clone()
dim_used = np.array(dim_used)
pred_expmap[:, :, dim_used] = outputs_exp
pred_expmap = pred_expmap[:, input_n:, :].contiguous().view(-1, dim_full_len)
targ_expmap = all_seq[:, input_n:, :].clone().contiguous().view(-1, dim_full_len)
pred_expmap[:, 0:6] = 0
targ_expmap[:, 0:6] = 0
pred_expmap = pred_expmap.view(-1, 3)
targ_expmap = targ_expmap.view(-1, 3)
# get euler angles from expmap
pred_eul = data_utils.rotmat2euler_torch(data_utils.expmap2rotmat_torch(pred_expmap))
pred_eul = pred_eul.view(-1, dim_full_len).view(-1, output_n, dim_full_len)
targ_eul = data_utils.rotmat2euler_torch(data_utils.expmap2rotmat_torch(targ_expmap))
targ_eul = targ_eul.view(-1, dim_full_len).view(-1, output_n, dim_full_len)
# get 3d coordinates
targ_p3d = data_utils.expmap2xyz_torch(targ_expmap.view(-1, dim_full_len)).view(n, output_n, -1, 3)
pred_p3d = data_utils.expmap2xyz_torch(pred_expmap.view(-1, dim_full_len)).view(n, output_n, -1, 3)
# update loss and testing errors
for k in np.arange(0, len(eval_frame)):
j = eval_frame[k]
t_e[k] += torch.mean(torch.norm(pred_eul[:, j, :] - targ_eul[:, j, :], 2, 1)).cpu().data.numpy() * n
t_3d[k] += torch.mean(torch.norm(
targ_p3d[:, j, :, :].contiguous().view(-1, 3) - pred_p3d[:, j, :, :].contiguous().view(-1, 3), 2,
1)).cpu().data.numpy() * n
# t_l += loss.cpu().data.numpy()[0] * n
N += n
bar.suffix = '{}/{}|batch time {:.4f}s|total time{:.2f}s'.format(i + 1, len(train_loader), time.time() - bt,
time.time() - st)
bar.next()
bar.finish()
return t_e / N, t_3d / N
def val(train_loader, model, input_n=20, dct_n=20, is_cuda=False, dim_used=[]):
# t_l = utils.AccumLoss()
t_e = utils.AccumLoss()
t_3d = utils.AccumLoss()
model.eval()
st = time.time()
bar = Bar('>>>', fill='>', max=len(train_loader))
for i, (inputs, targets, all_seq) in enumerate(train_loader):
bt = time.time()
if is_cuda:
inputs = inputs.cuda().float()
# targets = Variable(targets.cuda(async=True)).float()
all_seq = all_seq.cuda().float()
outputs = model(inputs)
n = outputs.shape[0]
outputs = outputs.view(n, -1)
# targets = targets.view(n, -1)
# loss = loss_funcs.sen_loss(outputs, all_seq, dim_used)
n, _, _ = all_seq.data.shape
m_err = loss_funcs.mpjpe_error(outputs, all_seq, input_n, dim_used, dct_n)
e_err = loss_funcs.euler_error(outputs, all_seq, input_n, dim_used, dct_n)
# t_l.update(loss.cpu().data.numpy()[0] * n, n)
t_e.update(e_err.cpu().data.numpy()[0] * n, n)
t_3d.update(m_err.cpu().data.numpy()[0] * n, n)
bar.suffix = '{}/{}|batch time {:.4f}s|total time{:.2f}s'.format(i + 1, len(train_loader), time.time() - bt,
time.time() - st)
bar.next()
bar.finish()
return t_e.avg, t_3d.avg
if __name__ == "__main__":
option = Options().parse()
main(option)
| 38.375887 | 120 | 0.577989 | 1,562 | 10,822 | 3.766325 | 0.199744 | 0.021418 | 0.018358 | 0.015468 | 0.45844 | 0.419174 | 0.377019 | 0.36036 | 0.346422 | 0.316845 | 0 | 0.039306 | 0.280632 | 10,822 | 281 | 121 | 38.512456 | 0.716378 | 0.094714 | 0 | 0.316583 | 0 | 0 | 0.047839 | 0.007785 | 0 | 0 | 0 | 0 | 0 | 1 | 0.020101 | false | 0 | 0.075377 | 0 | 0.110553 | 0.040201 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
3372888726713532a0071aaa02912ff971e2efa0 | 216,322 | py | Python | sql.py | TylerEli617/sqlpydb | 84756087b308414e7f9f7a83fd711d515f4743f8 | [
"Apache-2.0"
] | null | null | null | sql.py | TylerEli617/sqlpydb | 84756087b308414e7f9f7a83fd711d515f4743f8 | [
"Apache-2.0"
] | null | null | null | sql.py | TylerEli617/sqlpydb | 84756087b308414e7f9f7a83fd711d515f4743f8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import ctypes
import enum
import os
if os.name == "nt":
DM_ODBC_NAME = "odbc32.dll"
else:
DM_ODBC_NAME = "libodbc.so"
def UnimplementedSQLFunction(*args):
raise NotImplementedError("This SQL function is not implemented")
class Driver:
def __init__(self, odbc_driver_name = DM_ODBC_NAME, size_of_long = 8, unicode = True, legacy = True):
self.ODBC_DRIVER_NAME = odbc_driver_name
self.SIZE_OF_LONG = size_of_long
self.UNICODE = unicode
self.LEGACY = legacy
self.ODBC_DRIVER = ctypes.CDLL(self.ODBC_DRIVER_NAME)
self.UnimplementedSQLFunction = UnimplementedSQLFunction
####----------------------------------------------------------------------------
####This section mimics iodbcunix.h---------------------------------------------
####----------------------------------------------------------------------------
self.TRUE = 1
self.FALSE = 0
################################################################################
####Windows style typedefs######################################################
################################################################################
self.BYTE = ctypes.c_ubyte
self.WORD = ctypes.c_ushort
self.DWORD = ctypes.c_uint
self.LPSTR = ctypes.c_char_p
self.LPCSTR = ctypes.c_char_p
self.LPWSTR = ctypes.c_wchar_p
self.LPCWSTR = ctypes.c_wchar_p
self.LPDWORD = ctypes.POINTER(self.DWORD,)
self.BOOL = ctypes.c_int
####----------------------------------------------------------------------------
####This section mimics sqltypes.h----------------------------------------------
####----------------------------------------------------------------------------
################################################################################
####ODBC Specification##########################################################
################################################################################
self.ODBCVER = 0x0351
################################################################################
####ODBC Types##################################################################
################################################################################
self.SQLCHAR = ctypes.c_ubyte
self.SQLSMALLINT = ctypes.c_short
self.SQLUSMALLINT = ctypes.c_ushort
if self.SIZE_OF_LONG == 8:
self.SQLINTEGER = ctypes.c_int
self.SQLUINTEGER = ctypes.c_uint
else:
self.SQLINTEGER = ctypes.c_long
self.SQLUINTEGER = ctypes.c_ulong
self.SQLPOINTER = ctypes.c_void_p
self.SQLSCHAR = ctypes.c_char
self.SQLDATE = ctypes.c_ubyte
self.SQLDECIMAL = ctypes.c_ubyte
self.SQLNUMERIC = ctypes.c_ubyte
self.SQLDOUBLE = ctypes.c_double
self.SQLFLOAT = ctypes.c_double
self.SQLREAL = ctypes.c_float
self.SQLTIME = ctypes.c_ubyte
self.SQLTIMESTAMP = ctypes.c_ubyte
self.SQLVARCHAR = ctypes.c_ubyte
self.SQLBIGINT = ctypes.c_longlong
self.SQLUBIGINT = ctypes.c_ulonglong
self.SQLWCHAR = ctypes.c_ushort
if self.UNICODE:
self.SQLTCHAR = self.SQLWCHAR
else:
self.SQLTCHAR = self.SQLCHAR
if self.LEGACY:
self.SQLLEN = ctypes.c_int
self.SQLULEN = ctypes.c_uint
self.SQLSETPOSIROW = ctypes.c_ushort
else:
self.SQLLEN = ctypes.c_long
self.SQLULEN = ctypes.c_ulong
self.SQLSETPOSIROW = ctypes.c_ushort
################################################################################
####Backward Compatibility with older platform SDKs#############################
################################################################################
self.SQLROWCOUNT = self.SQLULEN
self.SQLROWSETSIZE = self.SQLULEN
self.SQLTRANSID = self.SQLULEN
self.SQLROWOFFSET = self.SQLLEN
################################################################################
####Generic Pointer Types#######################################################
################################################################################
self.PTR = ctypes.c_void_p
self.SQLHANDLE = ctypes.c_void_p
################################################################################
####Handles#####################################################################
################################################################################
self.HENV = ctypes.c_void_p
self.HDBC = ctypes.c_void_p
self.HSTMT = ctypes.c_void_p
self.SQLHENV = self.SQLHANDLE
self.SQLHDBC = self.SQLHANDLE
self.SQLHSTMT = self.SQLHANDLE
self.SQLHDESC = self.SQLHANDLE
self.HWND = self.SQLPOINTER
self.SQLHWND = self.SQLPOINTER
################################################################################
####Portable Types##############################################################
################################################################################
self.UCHAR = ctypes.c_ubyte
self.SCHAR = ctypes.c_char
self.SWORD = ctypes.c_short
self.UWORD = ctypes.c_ushort
self.SDWORD = ctypes.c_long
self.UDWORD = ctypes.c_ulong
self.SSHORT = ctypes.c_short
self.USHORT = ctypes.c_ushort
self.SLONG = ctypes.c_long
self.ULONG = ctypes.c_ulong
self.SFLOAT = ctypes.c_float
self.SDOUBLE = ctypes.c_double
self.LDOUBLE = ctypes.c_double
self.ODBCINT64 = ctypes.c_longlong
self.ODBCUINT64 = ctypes.c_ulonglong
################################################################################
####Return Types################################################################
################################################################################
self.RETCODE = ctypes.c_short
self.SQLRETURN = self.RETCODE
################################################################################
####Portable Types:DATA, TIME, TIMESTAMO, BOOKMARK##############################
################################################################################
self.BOOKMARK = self.SQLULEN
class DATE_STUCT_DEFINITION(ctypes.Structure):
_fields_ = [("year", self.SQLSMALLINT),
("month", self.SQLUSMALLINT),
("day", self.SQLUSMALLINT)]
self.DATE_STRUCT = DATE_STUCT_DEFINITION
self.SQL_DATE_STRUCT = self.DATE_STRUCT
class TIME_STRUCT_DEFINITION(ctypes.Structure):
_fields_ = [("hour", self.SQLUSMALLINT),
("minute", self.SQLUSMALLINT),
("second", self.SQLUSMALLINT)]
self.TIME_STRUCT = TIME_STRUCT_DEFINITION
self.SQL_TIME_STRUCT = self.TIME_STRUCT
class TIMESTAMP_STRUCT_DEFINITION(ctypes.Structure):
_fields_ = [("year", self.SQLSMALLINT),
("month", self.SQLUSMALLINT),
("day", self.SQLUSMALLINT),
("hour", self.SQLUSMALLINT),
("minute", self.SQLUSMALLINT),
("second", self.SQLUSMALLINT),
("fraction", self.SQLUINTEGER)]
self.TIMESTAMP_STRUCT = TIMESTAMP_STRUCT_DEFINITION
self.SQL_TIMESTAMP_STRUCT = self.TIMESTAMP_STRUCT
################################################################################
####enumeration for DATETIME_INTERVAL_SUBCODE###################################
################################################################################
self.SQLINTERVAL = ctypes.c_int
self.SQL_IS_YEAR = 1
self.SQL_IS_MONTH = 2
self.SQL_IS_DAY = 3
self.SQL_IS_HOUR = 4
self.SQL_IS_MINUTE = 5
self.SQL_IS_SECOND = 6
self.SQL_IS_YEAR_TO_MONTH = 7
self.SQL_IS_DAY_TO_HOUR = 8
self.SQL_IS_DAY_TO_MINUTE = 9
self.SQL_IS_DAY_TO_SECOND = 10
self.SQL_IS_HOUR_TO_MINUTE = 11
self.SQL_IS_HOUR_TO_SECOND = 12
self.SQL_IS_MINUTE_TO_SECOND = 13
class SQL_YEAR_MONTH_STRUCT_DEFINITION(ctypes.Structure):
_fields_ = [("year", self.SQLUINTEGER),
("month", self.SQLUINTEGER)]
self.SQL_YEAR_MONTH_STRUCT = SQL_YEAR_MONTH_STRUCT_DEFINITION
class SQL_DAY_SECOND_STRUCT_DEFINITION(ctypes.Structure):
_fields_ = [("day", self.SQLUINTEGER),
("hour", self.SQLUINTEGER),
("minute", self.SQLUINTEGER),
("second", self.SQLUINTEGER),
("fraction", self.SQLUINTEGER)]
self.SQL_DAY_SECOND_STRUCT = SQL_DAY_SECOND_STRUCT_DEFINITION
class SQL_INTERVAL_UNION_DEFINITION(ctypes.Union):
_fields_ = [("year_month", self.SQL_YEAR_MONTH_STRUCT),
("day_second", self.SQL_DAY_SECOND_STRUCT)]
self.SQL_INTERVAL_UNION = SQL_INTERVAL_UNION_DEFINITION
class SQL_INTERVAL_STRUCT_DEFINITION(ctypes.Structure):
_fields_ = [("interval_type", self.SQLINTERVAL),
("interval_sign", self.SQLSMALLINT),
("intval", self.SQL_INTERVAL_UNION)]
self.SQL_INTERVAL_STRUCT = SQL_INTERVAL_STRUCT_DEFINITION
################################################################################
####Numeric Data Type###########################################################
################################################################################
self.SQL_MAX_NUMERIC_LEN = 16
class SQL_NUMERIC_STRUCT_DEFINITION(ctypes.Structure):
_fields_ = [("precision", self.SQLCHAR),
("scale", self.SQLSCHAR),
("sign", self.SQLCHAR),
("val", self.SQLCHAR * self.SQL_MAX_NUMERIC_LEN)]
self.SQL_NUMERIC_STRUCT = SQL_NUMERIC_STRUCT_DEFINITION
################################################################################
################################################################################
################################################################################
class SQLGUID_DEFINITION(ctypes.Structure):
_fields_ = [("Data1", ctypes.c_uint),
("Data2", ctypes.c_ushort),
("Data3", ctypes.c_ushort),
("Data4", ctypes.c_char * 8)]
self.SQLGUID = SQLGUID_DEFINITION
####----------------------------------------------------------------------------
####This section mimics sql.h---------------------------------------------------
####----------------------------------------------------------------------------
################################################################################
####Useful Constans#############################################################
################################################################################
self.SQL_MAX_MESSAGE_LENGTH = 512
################################################################################
####Handle Types################################################################
################################################################################
self.SQL_HANDLE_ENV = 1
self.SQL_HANDLE_DBC = 2
self.SQL_HANDLE_STMT = 3
self.SQL_HANDLE_DESC = 4
################################################################################
####Function return codes#######################################################
################################################################################
self.SQL_SUCCESS = 0
self.SQL_SUCCESS_WITH_INFO = 1
self.SQL_STILL_EXECUTING = 2
self.SQL_ERROR = -1
self.SQL_INVALID_HANDLE = -2
self.SQL_NEED_DATA = 99
self.SQL_NO_DATA = 100
################################################################################
####Test for success############################################################
################################################################################
def SQL_SUCCEEDED_DEFINITION(return_code):
return (return_code & ~1) == 0
self.SQL_SUCCEEDED = SQL_SUCCEEDED_DEFINITION
################################################################################
####Special length vlaues#######################################################
################################################################################
self.SQL_NULL_DATA = -1
self.SQL_DATA_AT_EXEC = -2
################################################################################
####Flags for null-terminated strings###########################################
################################################################################
self.SQL_NTS = -3
self.SQL_NTSL = -3
################################################################################
####Standard SQL datatypes######################################################
################################################################################
self.SQL_UNKNOWN_TYPE = 0
self.SQL_CHAR = 1
self.SQL_NUMERIC = 2
self.SQL_DECIMAL = 3
self.SQL_INTEGER = 4
self.SQL_SMALLINT = 5
self.SQL_FLOAT = 6
self.SQL_REAL = 7
self.SQL_DOUBLE = 8
self.SQL_DATETIME = 9
self.SQL_VARCHAR = 12
################################################################################
####SQLGetTypeInfo request for all data types###################################
################################################################################
self.SQL_ALL_TYPES = 0
################################################################################
####Statement attribute values for date/time data types#########################
################################################################################
self.SQL_TYPE_DATE = 91
self.SQL_TYPE_TIME = 92
self.SQL_TYPE_TIMESTAMP = 93
################################################################################
####Date/Time constants#########################################################
################################################################################
self.SQL_DATE_LEN = 10
self.SQL_TIME_LEN = 8
self.SQL_TIMESTAMP_LEN = 19
################################################################################
####Null status constants#######################################################
################################################################################
self.SQL_NO_NULLS = 0
self.SQL_NULLABLE = 1
self.SQL_NULLABLE_UNKNOWN = 2
################################################################################
####NULL Handles################################################################
################################################################################
self.SQL_NULL_HENV = self.SQLHANDLE()
self.SQL_NULL_HDBC = self.SQLHANDLE()
self.SQL_NULL_HSTMT = self.SQLHANDLE()
self.SQL_NULL_HDESC = self.SQLHANDLE()
self.SQL_NULL_SQLLEN = ctypes.POINTER(self.SQLLEN)()
self.SQL_NULL_SQLULEN = ctypes.POINTER(self.SQLULEN)()
self.SQL_NULL_SQLSMALLINT = ctypes.POINTER(self.SQLSMALLINT)()
self.SQL_NULL_SQLUSMALLINT = ctypes.POINTER(self.SQLUSMALLINT)()
self.SQL_NULL_SQLINTEGER = ctypes.POINTER(self.SQLINTEGER)()
self.SQL_NULL_SQLUINTEGER = ctypes.POINTER(self.SQLUINTEGER)()
self.SQL_NULL_SQLCHAR = ctypes.POINTER(self.SQLCHAR)()
self.SQL_NULL_SQLWCHAR = ctypes.POINTER(self.SQLWCHAR)()
self.SQL_NULL_SQLTCHAR = ctypes.POINTER(self.SQLTCHAR)()
################################################################################
####Null parent for self.SQLHENV#####################################################
################################################################################
self.SQL_NULL_HANDLE = self.SQLHANDLE(0)
################################################################################
####CLI option values###########################################################
################################################################################
self.SQL_FALSE = 0
self.SQL_TRUE = 1
################################################################################
####Default conversion code#####################################################
################################################################################
self.SQL_DEFAULT = 99
################################################################################
####SQLDataSources/SQLFetchScroll - FetchOrientation############################
################################################################################
self.SQL_FETCH_NEXT = 1
self.SQL_FETCH_FIRST = 2
################################################################################
####SQLFetchScroll - FetchOrientation###########################################
################################################################################
self.SQL_FETCH_LAST = 3
self.SQL_FETCH_PRIOR = 4
self.SQL_FETCH_ABSOLUTE = 5
self.SQL_FETCH_RELATIVE = 6
################################################################################
####SQLFreeStmt#################################################################
################################################################################
self.SQL_CLOSE = 0
self.SQL_DROP = 1
self.SQL_UNBIND = 2
self.SQL_RESET_PARAMS = 3
################################################################################
####SQLGetConnectAttr - connection attributes###################################
################################################################################
self.SQL_ATTR_AUTO_IPD = 10001
self.SQL_ATTR_METADATA_ID = 10014
################################################################################
####SQLGetData code indicating that the application row descriptor##############
####specifies the data type#####################################################
################################################################################
self.SQL_ARD_TYPE = -99
################################################################################
####SQLGetDescField - identifiers of fields in the SQL descriptor###############
################################################################################
self.SQL_DESC_COUNT = 1001
self.SQL_DESC_TYPE = 1002
self.SQL_DESC_LENGTH = 1003
self.SQL_DESC_OCTET_LENGTH_PTR = 1004
self.SQL_DESC_PRECISION = 1005
self.SQL_DESC_SCALE = 1006
self.SQL_DESC_DATETIME_INTERVAL_CODE = 1007
self.SQL_DESC_NULLABLE = 1008
self.SQL_DESC_INDICATOR_PTR = 1009
self.SQL_DESC_DATA_PTR = 1010
self.SQL_DESC_NAME = 1011
self.SQL_DESC_UNNAMED = 1012
self.SQL_DESC_OCTET_LENGTH = 1013
self.SQL_DESC_ALLOC_TYPE = 1099
################################################################################
####SQLGetDescField - SQL_DESC_ALLOC_TYPE#######################################
################################################################################
self.SQL_DESC_ALLOC_AUTO = 1
self.SQL_DESC_ALLOC_USER = 2
################################################################################
####SQLGetDescField - SQL_DESC_DATETIME_INTERVAL_CODE###########################
################################################################################
self.SQL_CODE_DATE = 1
self.SQL_CODE_TIME = 2
self.SQL_CODE_TIMESTAMP = 3
################################################################################
####SQLGetDescField - SQL_DESC_UNNAMED##########################################
################################################################################
self.SQL_NAMED = 0
self.SQL_UNNAMED = 1
################################################################################
####SQLGetDiagField - identifiers of fields in the diagnostics area#############
################################################################################
self.SQL_DIAG_RETURNCODE = 1
self.SQL_DIAG_NUMBER = 2
self.SQL_DIAG_ROW_COUNT = 3
self.SQL_DIAG_SQLSTATE = 4
self.SQL_DIAG_NATIVE = 5
self.SQL_DIAG_MESSAGE_TEXT = 6
self.SQL_DIAG_DYNAMIC_FUNCTION = 7
self.SQL_DIAG_CLASS_ORIGIN = 8
self.SQL_DIAG_SUBCLASS_ORIGIN = 9
self.SQL_DIAG_CONNECTION_NAME = 10
self.SQL_DIAG_SERVER_NAME = 11
self.SQL_DIAG_DYNAMIC_FUNCTION_CODE = 12
################################################################################
####SQLGetDiagField - SQL_DIAG_DYNAMIC_FUNCTION_CODE############################
################################################################################
self.SQL_DIAG_ALTER_DOMAIN = 3
self.SQL_DIAG_ALTER_TABLE = 4
self.SQL_DIAG_CALL = 7
self.SQL_DIAG_CREATE_ASSERTION = 6
self.SQL_DIAG_CREATE_CHARACTER_SET = 8
self.SQL_DIAG_CREATE_COLLATION = 10
self.SQL_DIAG_CREATE_DOMAIN = 23
self.SQL_DIAG_CREATE_INDEX = -1
self.SQL_DIAG_CREATE_SCHEMA = 64
self.SQL_DIAG_CREATE_TABLE = 77
self.SQL_DIAG_CREATE_TRANSLATION = 79
self.SQL_DIAG_CREATE_VIEW = 84
self.SQL_DIAG_DELETE_WHERE = 19
self.SQL_DIAG_DROP_ASSERTION = 24
self.SQL_DIAG_DROP_CHARACTER_SET = 25
self.SQL_DIAG_DROP_COLLATION = 26
self.SQL_DIAG_DROP_DOMAIN = 27
self.SQL_DIAG_DROP_INDEX = -2
self.SQL_DIAG_DROP_SCHEMA = 31
self.SQL_DIAG_DROP_TABLE = 32
self.SQL_DIAG_DROP_TRANSLATION = 33
self.SQL_DIAG_DROP_VIEW = 36
self.SQL_DIAG_DYNAMIC_DELETE_CURSOR = 38
self.SQL_DIAG_DYNAMIC_UPDATE_CURSOR = 81
self.SQL_DIAG_GRANT = 48
self.SQL_DIAG_INSERT = 50
self.SQL_DIAG_REVOKE = 59
self.SQL_DIAG_SELECT_CURSOR = 85
self.SQL_DIAG_UNKNOWN_STATEMENT = 0
self.SQL_DIAG_UPDATE_WHERE = 82
################################################################################
####SQLGetEnvAttr - environment attribute#######################################
################################################################################
self.SQL_ATTR_OUTPUT_NTS = 10001
################################################################################
####SQLGetFunctions#############################################################
################################################################################
self.SQL_API_SQLALLOCCONNECT = 1
self.SQL_API_SQLALLOCENV = 2
self.SQL_API_SQLALLOCHANDLE = 1001
self.SQL_API_SQLALLOCSTMT = 3
self.SQL_API_SQLBINDCOL = 4
self.SQL_API_SQLBINDPARAM = 1002
self.SQL_API_SQLCANCEL = 5
self.SQL_API_SQLCLOSECURSOR = 1003
self.SQL_API_SQLCOLATTRIBUTE = 6
self.SQL_API_SQLCOLUMNS = 40
self.SQL_API_SQLCONNECT = 7
self.SQL_API_SQLCOPYDESC = 1004
self.SQL_API_SQLDATASOURCES = 57
self.SQL_API_SQLDESCRIBECOL = 8
self.SQL_API_SQLDISCONNECT = 9
self.SQL_API_SQLENDTRAN = 1005
self.SQL_API_SQLERROR = 10
self.SQL_API_SQLEXECDIRECT = 11
self.SQL_API_SQLEXECUTE = 12
self.SQL_API_SQLFETCH = 13
self.SQL_API_SQLFETCHSCROLL = 1021
self.SQL_API_SQLFREECONNECT = 14
self.SQL_API_SQLFREEENV = 15
self.SQL_API_SQLFREEHANDLE = 1006
self.SQL_API_SQLFREESTMT = 16
self.SQL_API_SQLGETCONNECTATTR = 1007
self.SQL_API_SQLGETCONNECTOPTION = 42
self.SQL_API_SQLGETCURSORNAME = 17
self.SQL_API_SQLGETDATA = 43
self.SQL_API_SQLGETDESCFIELD = 1008
self.SQL_API_SQLGETDESCREC = 1009
self.SQL_API_SQLGETDIAGFIELD = 1010
self.SQL_API_SQLGETDIAGREC = 1011
self.SQL_API_SQLGETENVATTR = 1012
self.SQL_API_SQLGETFUNCTIONS = 44
self.SQL_API_SQLGETINFO = 45
self.SQL_API_SQLGETSTMTATTR = 1014
self.SQL_API_SQLGETSTMTOPTION = 46
self.SQL_API_SQLGETTYPEINFO = 47
self.SQL_API_SQLNUMRESULTCOLS = 18
self.SQL_API_SQLPARAMDATA = 48
self.SQL_API_SQLPREPARE = 19
self.SQL_API_SQLPUTDATA = 49
self.SQL_API_SQLROWCOUNT = 20
self.SQL_API_SQLSETCONNECTATTR = 1016
self.SQL_API_SQLSETCONNECTOPTION = 50
self.SQL_API_SQLSETCURSORNAME = 21
self.SQL_API_SQLSETDESCFIELD = 1017
self.SQL_API_SQLSETDESCREC = 1018
self.SQL_API_SQLSETENVATTR = 1019
self.SQL_API_SQLSETPARAM = 22
self.SQL_API_SQLSETSTMTATTR = 1020
self.SQL_API_SQLSETSTMTOPTION = 51
self.SQL_API_SQLSPECIALCOLUMNS = 52
self.SQL_API_SQLSTATISTICS = 53
self.SQL_API_SQLTABLES = 54
self.SQL_API_SQLTRANSACT = 23
################################################################################
####SQLGetInfo##################################################################
################################################################################
self.SQL_MAX_DRIVER_CONNECTIONS = 0
self.SQL_MAXIMUM_DRIVER_CONNECTIONS = self.SQL_MAX_DRIVER_CONNECTIONS
self.SQL_MAX_CONCURRENT_ACTIVITIES = 1
self.SQL_MAXIMUM_CONCURRENT_ACTIVITIES = self.SQL_MAX_CONCURRENT_ACTIVITIES
self.SQL_DATA_SOURCE_NAME = 2
self.SQL_FETCH_DIRECTION = 8
self.SQL_SERVER_NAME = 13
self.SQL_SEARCH_PATTERN_ESCAPE = 14
self.SQL_DBMS_NAME = 17
self.SQL_DBMS_VER = 18
self.SQL_ACCESSIBLE_TABLES = 19
self.SQL_ACCESSIBLE_PROCEDURES = 20
self.SQL_CURSOR_COMMIT_BEHAVIOR = 23
self.SQL_DATA_SOURCE_READ_ONLY = 25
self.SQL_DEFAULT_TXN_ISOLATION = 26
self.SQL_IDENTIFIER_CASE = 28
self.SQL_IDENTIFIER_QUOTE_CHAR = 29
self.SQL_MAX_COLUMN_NAME_LEN = 30
self.SQL_MAXIMUM_COLUMN_NAME_LENGTH = self.SQL_MAX_COLUMN_NAME_LEN
self.SQL_MAX_CURSOR_NAME_LEN = 31
self.SQL_MAXIMUM_CURSOR_NAME_LENGTH = self.SQL_MAX_CURSOR_NAME_LEN
self.SQL_MAX_SCHEMA_NAME_LEN = 32
self.SQL_MAXIMUM_SCHEMA_NAME_LENGTH = self.SQL_MAX_SCHEMA_NAME_LEN
self.SQL_MAX_CATALOG_NAME_LEN = 34
self.SQL_MAXIMUM_CATALOG_NAME_LENGTH = self.SQL_MAX_CATALOG_NAME_LEN
self.SQL_MAX_TABLE_NAME_LEN = 35
self.SQL_SCROLL_CONCURRENCY = 43
self.SQL_TXN_CAPABLE = 46
self.SQL_TRANSACTION_CAPABLE = self.SQL_TXN_CAPABLE
self.SQL_USER_NAME = 47
self.SQL_TXN_ISOLATION_OPTION = 72
self.SQL_TRANSACTION_ISOLATION_OPTION = self.SQL_TXN_ISOLATION_OPTION
self.SQL_INTEGRITY = 73
self.SQL_GETDATA_EXTENSIONS = 81
self.SQL_NULL_COLLATION = 85
self.SQL_ALTER_TABLE = 86
self.SQL_ORDER_BY_COLUMNS_IN_SELECT = 90
self.SQL_SPECIAL_CHARACTERS = 94
self.SQL_MAX_COLUMNS_IN_GROUP_BY = 97
self.SQL_MAXIMUM_COLUMNS_IN_GROUP_BY = self.SQL_MAX_COLUMNS_IN_GROUP_BY
self.SQL_MAX_COLUMNS_IN_INDEX = 98
self.SQL_MAXIMUM_COLUMNS_IN_INDEX = self.SQL_MAX_COLUMNS_IN_INDEX
self.SQL_MAX_COLUMNS_IN_ORDER_BY = 99
self.SQL_MAXIMUM_COLUMNS_IN_ORDER_BY = self.SQL_MAX_COLUMNS_IN_ORDER_BY
self.SQL_MAX_COLUMNS_IN_SELECT = 100
self.SQL_MAXIMUM_COLUMNS_IN_SELECT = self.SQL_MAX_COLUMNS_IN_SELECT
self.SQL_MAX_COLUMNS_IN_TABLE = 101
self.SQL_MAX_INDEX_SIZE = 102
self.SQL_MAXIMUM_INDEX_SIZE = self.SQL_MAX_INDEX_SIZE
self.SQL_MAX_ROW_SIZE = 104
self.SQL_MAXIMUM_ROW_SIZE = self.SQL_MAX_ROW_SIZE
self.SQL_MAX_STATEMENT_LEN = 105
self.SQL_MAXIMUM_STATEMENT_LENGTH = self.SQL_MAX_STATEMENT_LEN
self.SQL_MAX_TABLES_IN_SELECT = 106
self.SQL_MAXIMUM_TABLES_IN_SELECT = self.SQL_MAX_TABLES_IN_SELECT
self.SQL_MAX_USER_NAME_LEN = 107
self.SQL_MAXIMUM_USER_NAME_LENGTH = self.SQL_MAX_USER_NAME_LEN
self.SQL_OJ_CAPABILITIES = 115
self.SQL_OUTER_JOIN_CAPABILITIES = self.SQL_OJ_CAPABILITIES
self.SQL_XOPEN_CLI_YEAR = 10000
self.SQL_CURSOR_SENSITIVITY = 10001
self.SQL_DESCRIBE_PARAMETER = 10002
self.SQL_CATALOG_NAME = 10003
self.SQL_COLLATION_SEQ = 10004
self.SQL_MAX_IDENTIFIER_LEN = 10005
self.SQL_MAXIMUM_IDENTIFIER_LENGTH = self.SQL_MAX_IDENTIFIER_LEN
################################################################################
####SQLGetInfo - SQL_ALTER_TABLE################################################
################################################################################
self.SQL_AT_ADD_COLUMN = 0x00000001
self.SQL_AT_DROP_COLUMN = 0x00000002
self.SQL_AT_ADD_CONSTRAINT = 0x00000008
################################################################################
####SQLGetInfo - SQL_ASYNC_MODE#################################################
################################################################################
self.SQL_AM_NONE = 0
self.SQL_AM_CONNECTION = 1
self.SQL_AM_STATEMENT = 2
################################################################################
####SQLGetInfo - SQL_CURSOR_COMMIT_BEHAVIOR#####################################
################################################################################
self.SQL_CB_DELETE = 0
self.SQL_CB_CLOSE = 1
self.SQL_CB_PRESERVE = 2
################################################################################
####SQLGetInfo - SQL_FETCH_DIRECTION############################################
################################################################################
self.SQL_FD_FETCH_NEXT = 0x00000001
self.SQL_FD_FETCH_FIRST = 0x00000002
self.SQL_FD_FETCH_LAST = 0x00000004
self.SQL_FD_FETCH_PRIOR = 0x00000008
self.SQL_FD_FETCH_ABSOLUTE = 0x00000010
self.SQL_FD_FETCH_RELATIVE = 0x00000020
################################################################################
####SQLGetInfo - SQL_GETDATA_EXTENSIONS#########################################
################################################################################
self.SQL_GD_ANY_COLUMN = 0x00000001
self.SQL_GD_ANY_ORDER = 0x00000002
################################################################################
####SQLGetInfo - SQL_IDENTIFIER_CASE############################################
################################################################################
self.SQL_IC_UPPER = 1
self.SQL_IC_LOWER = 2
self.SQL_IC_SENSITIVE = 3
self.SQL_IC_MIXED = 4
################################################################################
####SQLGetInfo - SQL_NULL_COLLATION#############################################
################################################################################
self.SQL_NC_HIGH = 0
self.SQL_NC_LOW = 1
################################################################################
####SQLGetInfo - SQL_OJ_CAPABILITIES############################################
################################################################################
self.SQL_OJ_LEFT = 0x00000001
self.SQL_OJ_RIGHT = 0x00000002
self.SQL_OJ_FULL = 0x00000004
self.SQL_OJ_NESTED = 0x00000008
self.SQL_OJ_NOT_ORDERED = 0x00000010
self.SQL_OJ_INNER = 0x00000020
self.SQL_OJ_ALL_COMPARISON_OPS = 0x00000040
################################################################################
####SQLGetInfo - SQL_SCROLL_CONCURRENCY#########################################
################################################################################
self.SQL_SCCO_READ_ONLY = 0x00000001
self.SQL_SCCO_LOCK = 0x00000002
self.SQL_SCCO_OPT_ROWVER = 0x00000004
self.SQL_SCCO_OPT_VALUES = 0x00000008
################################################################################
####SQLGetInfo - SQL_TXN_CAPABLE################################################
################################################################################
self.SQL_TC_NONE = 0
self.SQL_TC_DML = 1
self.SQL_TC_ALL = 2
self.SQL_TC_DDL_COMMIT = 3
self.SQL_TC_DDL_IGNORE = 4
################################################################################
####SQLGetInfo - SQL_TXN_ISOLATION_OPTION#######################################
################################################################################
self.SQL_TXN_READ_UNCOMMITTED = 0x00000001
self.SQL_TRANSACTION_READ_UNCOMMITTED = self.SQL_TXN_READ_UNCOMMITTED
self.SQL_TXN_READ_COMMITTED = 0x00000002
self.SQL_TRANSACTION_READ_COMMITTED = self.SQL_TXN_READ_COMMITTED
self.SQL_TXN_REPEATABLE_READ = 0x00000004
self.SQL_TRANSACTION_REPEATABLE_READ = self.SQL_TXN_REPEATABLE_READ
self.SQL_TXN_SERIALIZABLE = 0x00000008
self.SQL_TRANSACTION_SERIALIZABLE = self.SQL_TXN_SERIALIZABLE
################################################################################
####SQLGetStmtAttr - statement attributes#######################################
################################################################################
self.SQL_ATTR_APP_ROW_DESC = 10010
self.SQL_ATTR_APP_PARAM_DESC = 10011
self.SQL_ATTR_IMP_ROW_DESC = 10012
self.SQL_ATTR_IMP_PARAM_DESC = 10013
self.SQL_ATTR_CURSOR_SCROLLABLE = -1
self.SQL_ATTR_CURSOR_SENSITIVITY = -2
################################################################################
####SQLGetStmtAttr - SQL_ATTR_CURSOR_SCROLLABLE#################################
################################################################################
self.SQL_NONSCROLLABLE = 0
self.SQL_SCROLLABLE = 1
################################################################################
####SQLGetStmtAttr - SQL_ATTR_CURSOR_SENSITIVITY################################
################################################################################
self.SQL_UNSPECIFIED = 0
self.SQL_INSENSITIVE = 1
self.SQL_SENSITIVE = 2
################################################################################
####SQLGetTypeInfo - SEARCHABLE#################################################
################################################################################
self.SQL_PRED_NONE = 0
self.SQL_PRED_CHAR = 1
self.SQL_PRED_BASIC = 2
################################################################################
####SQLSpecialColumns - Column scopes###########################################
################################################################################
self.SQL_SCOPE_CURROW = 0
self.SQL_SCOPE_TRANSACTION = 1
self.SQL_SCOPE_SESSION = 2
################################################################################
####SQLSpecialColumns - PSEUDO_COLUMN###########################################
################################################################################
self.SQL_PC_UNKNOWN = 0
self.SQL_PC_NON_PSEUDO = 1
self.SQL_PC_PSEUDO = 2
################################################################################
####SQLSpecialColumns - IdentifierType##########################################
################################################################################
self.SQL_ROW_IDENTIFIER = 1
################################################################################
####SQLStatistics - fUnique#####################################################
################################################################################
self.SQL_INDEX_UNIQUE = 0
self.SQL_INDEX_ALL = 1
################################################################################
####SQLStatistics - TYPE########################################################
################################################################################
self.SQL_INDEX_CLUSTERED = 1
self.SQL_INDEX_HASHED = 2
self.SQL_INDEX_OTHER = 3
################################################################################
####SQLTransact/SQLEndTran######################################################
################################################################################
self.SQL_COMMIT = 0
self.SQL_ROLLBACK = 1
################################################################################
####Function Prototypes#########################################################
################################################################################
if hasattr(self.ODBC_DRIVER, "SQLAllocConnect"):
self.ODBC_DRIVER.SQLAllocConnect.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLAllocConnect.argtypes = (self.SQLHENV, ctypes.POINTER(self.SQLHDBC),)
self.SQLAllocConnect = self.ODBC_DRIVER.SQLAllocConnect
else:
self.SQLAllocConnect = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLAllocEnv"):
self.ODBC_DRIVER.SQLAllocEnv.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLAllocEnv.argtypes = (ctypes.POINTER(self.SQLHENV),)
self.SQLAllocEnv = self.ODBC_DRIVER.SQLAllocEnv
else:
self.SQLAllocEnv = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLAllocHandle"):
self.ODBC_DRIVER.SQLAllocHandle.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLAllocHandle.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, ctypes.POINTER(self.SQLHANDLE),)
self.SQLAllocHandle = self.ODBC_DRIVER.SQLAllocHandle
else:
self.SQLAllocHandle = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLAllocStmt"):
self.ODBC_DRIVER.SQLAllocStmt.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLAllocStmt.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLHSTMT),)
self.SQLAllocStmt = self.ODBC_DRIVER.SQLAllocStmt
else:
self.SQLAllocStmt = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLBindCol"):
self.ODBC_DRIVER.SQLBindCol.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLBindCol.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLLEN, ctypes.POINTER(self.SQLLEN),)
self.SQLBindCol = self.ODBC_DRIVER.SQLBindCol
else:
self.SQLBindCol = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLBindParam"):
self.ODBC_DRIVER.SQLBindParam.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLBindParam.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLULEN, self.SQLSMALLINT, self.SQLPOINTER, ctypes.POINTER(self.SQLLEN),)
self.SQLBindParam = self.ODBC_DRIVER.SQLBindParam
else:
self.SQLBindParam = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLCancel"):
self.ODBC_DRIVER.SQLCancel.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLCancel.argtypes = (self.SQLHSTMT,)
self.SQLCancel = self.ODBC_DRIVER.SQLCancel
else:
self.SQLCancel = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLCloseCursor"):
self.ODBC_DRIVER.SQLCloseCursor.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLCloseCursor.argtypes = (self.SQLHSTMT,)
self.SQLCloseCursor = self.ODBC_DRIVER.SQLCloseCursor
else:
self.SQLCloseCursor = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColAttribute"):
self.ODBC_DRIVER.SQLColAttribute.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColAttribute.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN),)
self.SQLColAttribute = self.ODBC_DRIVER.SQLColAttribute
self.SQLColAttribute = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColumns"):
self.ODBC_DRIVER.SQLColumns.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColumns.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLColumns = self.ODBC_DRIVER.SQLColumns
else:
self.SQLColumns = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLConnect"):
self.ODBC_DRIVER.SQLConnect.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLConnect.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLConnect = self.ODBC_DRIVER.SQLConnect
else:
self.SQLConnect = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLCopyDesc"):
self.ODBC_DRIVER.SQLCopyDesc.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLCopyDesc.argtypes = (self.SQLHDESC, self.SQLHDESC,)
self.SQLCopyDesc = self.ODBC_DRIVER.SQLCopyDesc
else:
self.SQLCopyDesc = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDataSources"):
self.ODBC_DRIVER.SQLDataSources.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDataSources.argtypes = (self.SQLHENV, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDataSources = self.ODBC_DRIVER.SQLDataSources
else:
self.SQLDataSources = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDescribeCol"):
self.ODBC_DRIVER.SQLDescribeCol.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDescribeCol.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLULEN), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDescribeCol = self.ODBC_DRIVER.SQLDescribeCol
else:
self.SQLDescribeCol = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDisconnect"):
self.ODBC_DRIVER.SQLDisconnect.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDisconnect.argtypes = (self.SQLHDBC,)
self.SQLDisconnect = self.ODBC_DRIVER.SQLDisconnect
else:
self.SQLDisconnect = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLEndTran"):
self.ODBC_DRIVER.SQLEndTran.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLEndTran.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, self.SQLSMALLINT,)
self.SQLEndTran = self.ODBC_DRIVER.SQLEndTran
else:
self.SQLEndTran = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLError"):
self.ODBC_DRIVER.SQLError.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLError.argtypes = (self.SQLHENV, self.SQLHDBC, self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), ctypes.POINTER(self.SQLINTEGER), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLError = self.ODBC_DRIVER.SQLError
else:
self.SQLError = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLExecDirect"):
self.ODBC_DRIVER.SQLExecDirect.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLExecDirect.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER,)
self.SQLExecDirect = self.ODBC_DRIVER.SQLExecDirect
else:
self.SQLExecDirect = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLExecute"):
self.ODBC_DRIVER.SQLExecute.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLExecute.argtypes = (self.SQLHSTMT,)
self.SQLExecute = self.ODBC_DRIVER.SQLExecute
else:
self.SQLExecute = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLFetch"):
self.ODBC_DRIVER.SQLFetch.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLFetch.argtypes = (self.SQLHSTMT,)
self.SQLFetch = self.ODBC_DRIVER.SQLFetch
else:
self.SQLFetch = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLFetchScroll"):
self.ODBC_DRIVER.SQLFetchScroll.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLFetchScroll.argtypes = (self.SQLHSTMT, self.SQLSMALLINT, self.SQLLEN,)
self.SQLFetchScroll = self.ODBC_DRIVER.SQLFetchScroll
else:
self.SQLFetchScroll = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLFreeConnect"):
self.ODBC_DRIVER.SQLFreeConnect.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLFreeConnect.argtypes = (self.SQLHDBC,)
self.SQLFreeConnect = self.ODBC_DRIVER.SQLFreeConnect
else:
self.SQLFreeConnect = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLFreeEnv"):
self.ODBC_DRIVER.SQLFreeEnv.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLFreeEnv.argtypes = (self.SQLHENV,)
self.SQLFreeEnv = self.ODBC_DRIVER.SQLFreeEnv
else:
self.SQLFreeEnv = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLFreeHandle"):
self.ODBC_DRIVER.SQLFreeHandle.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLFreeHandle.argtypes = (self.SQLSMALLINT, self.SQLHANDLE,)
self.SQLFreeHandle = self.ODBC_DRIVER.SQLFreeHandle
else:
self.SQLFreeHandle = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLFreeStmt"):
self.ODBC_DRIVER.SQLFreeStmt.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLFreeStmt.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT,)
self.SQLFreeStmt = self.ODBC_DRIVER.SQLFreeStmt
else:
self.SQLFreeStmt = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetConnectAttr"):
self.ODBC_DRIVER.SQLGetConnectAttr.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetConnectAttr.argtypes = (self.SQLHDBC, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetConnectAttr = self.ODBC_DRIVER.SQLGetConnectAttr
else:
self.SQLGetConnectAttr = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetConnectOption"):
self.ODBC_DRIVER.SQLGetConnectOption.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetConnectOption.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLPOINTER,)
self.SQLGetConnectOption = self.ODBC_DRIVER.SQLGetConnectOption
else:
self.SQLGetConnectOption = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetCursorName"):
self.ODBC_DRIVER.SQLGetCursorName.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetCursorName.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetCursorName = self.ODBC_DRIVER.SQLGetCursorName
else:
self.SQLGetCursorName = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetData"):
self.ODBC_DRIVER.SQLGetData.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetData.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLLEN, ctypes.POINTER(self.SQLLEN),)
self.SQLGetData = self.ODBC_DRIVER.SQLGetData
else:
self.SQLGetData = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDescField"):
self.ODBC_DRIVER.SQLGetDescField.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDescField.argtypes = (self.SQLHDESC, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetDescField = self.ODBC_DRIVER.SQLGetDescField
else:
self.SQLGetDescField = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDescRec"):
self.ODBC_DRIVER.SQLGetDescRec.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDescRec.argtypes = (self.SQLHDESC, self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDescRec = self.ODBC_DRIVER.SQLGetDescRec
else:
self.SQLGetDescRec = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDiagField"):
self.ODBC_DRIVER.SQLGetDiagField.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDiagField.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDiagField = self.ODBC_DRIVER.SQLGetDiagField
else:
self.SQLGetDiagField = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDiagRec"):
self.ODBC_DRIVER.SQLGetDiagRec.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDiagRec.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), ctypes.POINTER(self.SQLINTEGER), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDiagRec = self.ODBC_DRIVER.SQLGetDiagRec
else:
self.SQLGetDiagRec = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetEnvAttr"):
self.ODBC_DRIVER.SQLGetEnvAttr.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetEnvAttr.argtypes = (self.SQLHENV, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetEnvAttr = self.ODBC_DRIVER.SQLGetEnvAttr
else:
self.SQLGetEnvAttr = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetFunctions"):
self.ODBC_DRIVER.SQLGetFunctions.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetFunctions.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, ctypes.POINTER(self.SQLUSMALLINT),)
self.SQLGetFunctions = self.ODBC_DRIVER.SQLGetFunctions
else:
self.SQLGetFunctions = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetInfo"):
self.ODBC_DRIVER.SQLGetInfo.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetInfo.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetInfo = self.ODBC_DRIVER.SQLGetInfo
else:
self.SQLGetInfo = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetStmtAttr"):
self.ODBC_DRIVER.SQLGetStmtAttr.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetStmtAttr.argtypes = (self.SQLHSTMT, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetStmtAttr = self.ODBC_DRIVER.SQLGetStmtAttr
else:
self.SQLGetStmtAttr = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetStmtOption"):
self.ODBC_DRIVER.SQLGetStmtOption.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetStmtOption.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLPOINTER,)
self.SQLGetStmtOption = self.ODBC_DRIVER.SQLGetStmtOption
else:
self.SQLGetStmtOption = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetTypeInfo"):
self.ODBC_DRIVER.SQLGetTypeInfo.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetTypeInfo.argtypes = (self.SQLHSTMT, self.SQLSMALLINT,)
self.SQLGetTypeInfo = self.ODBC_DRIVER.SQLGetTypeInfo
else:
self.SQLGetTypeInfo = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLNumResultCols"):
self.ODBC_DRIVER.SQLNumResultCols.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLNumResultCols.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLNumResultCols = self.ODBC_DRIVER.SQLNumResultCols
else:
self.SQLNumResultCols = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLParamData"):
self.ODBC_DRIVER.SQLParamData.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLParamData.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLPOINTER),)
self.SQLParamData = self.ODBC_DRIVER.SQLParamData
else:
self.SQLParamData = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLPrepare"):
self.ODBC_DRIVER.SQLPrepare.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLPrepare.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER,)
self.SQLPrepare = self.ODBC_DRIVER.SQLPrepare
else:
self.SQLPrepare = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLPutData"):
self.ODBC_DRIVER.SQLPutData.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLPutData.argtypes = (self.SQLHSTMT, self.SQLPOINTER, self.SQLLEN,)
self.SQLPutData = self.ODBC_DRIVER.SQLPutData
else:
self.SQLPutData = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLRowCount"):
self.ODBC_DRIVER.SQLRowCount.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLRowCount.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLLEN),)
self.SQLRowCount = self.ODBC_DRIVER.SQLRowCount
else:
self.SQLRowCount = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetConnectAttr"):
self.ODBC_DRIVER.SQLSetConnectAttr.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetConnectAttr.argtypes = (self.SQLHDBC, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetConnectAttr = self.ODBC_DRIVER.SQLSetConnectAttr
else:
self.SQLSetConnectAttr = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetConnectOption"):
self.ODBC_DRIVER.SQLSetConnectOption.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetConnectOption.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLULEN,)
self.SQLSetConnectOption = self.ODBC_DRIVER.SQLSetConnectOption
else:
self.SQLSetConnectOption = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetCursorName"):
self.ODBC_DRIVER.SQLSetCursorName.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetCursorName.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLSetCursorName = self.ODBC_DRIVER.SQLSetCursorName
else:
self.SQLSetCursorName = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetDescField"):
self.ODBC_DRIVER.SQLSetDescField.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetDescField.argtypes = (self.SQLHDESC, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetDescField = self.ODBC_DRIVER.SQLSetDescField
else:
self.SQLSetDescField = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetDescRec"):
self.ODBC_DRIVER.SQLSetDescRec.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetDescRec.argtypes = (self.SQLHDESC, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLLEN, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, ctypes.POINTER(self.SQLLEN), ctypes.POINTER(self.SQLLEN),)
self.SQLSetDescRec = self.ODBC_DRIVER.SQLSetDescRec
else:
self.SQLSetDescRec = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetEnvAttr"):
self.ODBC_DRIVER.SQLSetEnvAttr.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetEnvAttr.argtypes = (self.SQLHENV, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetEnvAttr = self.ODBC_DRIVER.SQLSetEnvAttr
else:
self.SQLSetEnvAttr = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetStmtAttr"):
self.ODBC_DRIVER.SQLSetStmtAttr.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetStmtAttr.argtypes = (self.SQLHSTMT, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetStmtAttr = self.ODBC_DRIVER.SQLSetStmtAttr
else:
self.SQLSetStmtAttr = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetStmtOption"):
self.ODBC_DRIVER.SQLSetStmtOption.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetStmtOption.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLULEN,)
self.SQLSetStmtOption = self.ODBC_DRIVER.SQLSetStmtOption
else:
self.SQLSetStmtOption = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSpecialColumns"):
self.ODBC_DRIVER.SQLSpecialColumns.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSpecialColumns.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, self.SQLUSMALLINT, self.SQLUSMALLINT,)
self.SQLSpecialColumns = self.ODBC_DRIVER.SQLSpecialColumns
else:
self.SQLSpecialColumns = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLStatistics"):
self.ODBC_DRIVER.SQLStatistics.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLStatistics.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, self.SQLUSMALLINT, self.SQLUSMALLINT,)
self.SQLStatistics = self.ODBC_DRIVER.SQLStatistics
else:
self.SQLStatistics = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLTables"):
self.ODBC_DRIVER.SQLTables.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLTables.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLTables = self.ODBC_DRIVER.SQLTables
else:
self.SQLTables = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLTransact"):
self.ODBC_DRIVER.SQLTransact.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLTransact.argtypes = (self.SQLHENV, self.SQLHDBC, self.SQLUSMALLINT,)
self.SQLTransact = self.ODBC_DRIVER.SQLTransact
else:
self.SQLTransact = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetParam"):
self.ODBC_DRIVER.SQLSetParam.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetParam.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLULEN, self.SQLSMALLINT, self.SQLPOINTER, ctypes.POINTER(self.SQLLEN),)
self.SQLSetParam = self.ODBC_DRIVER.SQLSetParam
else:
self.SQLSetParam = self.UnimplementedSQLFunction
####----------------------------------------------------------------------------
####This section mimics sqlucode.h----------------------------------------------
####----------------------------------------------------------------------------
################################################################################
####SQL datatypes - Unicode#####################################################
################################################################################
self.SQL_WCHAR = -8
self.SQL_WVARCHAR = -9
self.SQL_WLONGVARCHAR = -10
self.SQL_C_WCHAR = self.SQL_WCHAR
if self.UNICODE:
self.SQL_C_TCHAR = self.SQL_C_WCHAR
else:
self.SQL_C_TCHAR = self.SQL_C_CHAR
################################################################################
####SQLTablesW##################################################################
################################################################################
self.SQL_ALL_CATALOGSW = "%"
self.SQL_ALL_SCHEMASW = "%"
self.SQL_ALL_TABLE_TYPESW = "%"
################################################################################
####SQL_SQLSTATE_SIZEW##########################################################
################################################################################
self.SQL_SQLSTATE_SIZEW = 10
################################################################################
####Function Prototypes - Unicode###############################################
################################################################################
if hasattr(self.ODBC_DRIVER, "SQLColAttributeW"):
self.ODBC_DRIVER.SQLColAttributeW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColAttributeW.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN),)
self.SQLColAttributeW = self.ODBC_DRIVER.SQLColAttributeW
else:
self.SQLColAttributeW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColAttributesW"):
self.ODBC_DRIVER.SQLColAttributesW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColAttributesW.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN),)
self.SQLColAttributesW = self.ODBC_DRIVER.SQLColAttributesW
else:
self.SQLColAttributesW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLConnectW"):
self.ODBC_DRIVER.SQLConnectW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLConnectW.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLConnectW = self.ODBC_DRIVER.SQLConnectW
else:
self.SQLConnectW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDescribeColW"):
self.ODBC_DRIVER.SQLDescribeColW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDescribeColW.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLULEN), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDescribeColW = self.ODBC_DRIVER.SQLDescribeColW
else:
self.SQLDescribeColW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLErrorW"):
self.ODBC_DRIVER.SQLErrorW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLErrorW.argtypes = (self.SQLHENV, self.SQLHDBC, self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), ctypes.POINTER(self.SQLINTEGER), ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLErrorW = self.ODBC_DRIVER.SQLErrorW
else:
self.SQLErrorW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLExecDirectW"):
self.ODBC_DRIVER.SQLExecDirectW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLExecDirectW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLINTEGER,)
self.SQLExecDirectW = self.ODBC_DRIVER.SQLExecDirectW
else:
self.SQLExecDirectW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetConnectAttrW"):
self.ODBC_DRIVER.SQLGetConnectAttrW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetConnectAttrW.argtypes = (self.SQLHDBC, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetConnectAttrW = self.ODBC_DRIVER.SQLGetConnectAttrW
else:
self.SQLGetConnectAttrW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetCursorNameW"):
self.ODBC_DRIVER.SQLGetCursorNameW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetCursorNameW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetCursorNameW = self.ODBC_DRIVER.SQLGetCursorNameW
else:
self.SQLGetCursorNameW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetDescFieldW"):
self.ODBC_DRIVER.SQLSetDescFieldW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetDescFieldW.argtypes = (self.SQLHDESC, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetDescFieldW = self.ODBC_DRIVER.SQLSetDescFieldW
else:
self.SQLSetDescFieldW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDescFieldW"):
self.ODBC_DRIVER.SQLGetDescFieldW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDescFieldW.argtypes = (self.SQLHDESC, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetDescFieldW = self.ODBC_DRIVER.SQLGetDescFieldW
else:
self.SQLGetDescFieldW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDescRecW"):
self.ODBC_DRIVER.SQLGetDescRecW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDescRecW.argtypes = (self.SQLHDESC, self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDescRecW = self.ODBC_DRIVER.SQLGetDescRecW
else:
self.SQLGetDescRecW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDiagFieldW"):
self.ODBC_DRIVER.SQLGetDiagFieldW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDiagFieldW.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDiagFieldW = self.ODBC_DRIVER.SQLGetDiagFieldW
else:
self.SQLGetDiagFieldW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDiagRecW"):
self.ODBC_DRIVER.SQLGetDiagRecW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDiagRecW.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), ctypes.POINTER(self.SQLINTEGER), ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDiagRecW = self.ODBC_DRIVER.SQLGetDiagRecW
else:
self.SQLGetDiagRecW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLPrepareW"):
self.ODBC_DRIVER.SQLPrepareW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLPrepareW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLINTEGER,)
self.SQLPrepareW = self.ODBC_DRIVER.SQLPrepareW
else:
self.SQLPrepareW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetConnectAttrW"):
self.ODBC_DRIVER.SQLSetConnectAttrW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetConnectAttrW.argtypes = (self.SQLHDBC, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetConnectAttrW = self.ODBC_DRIVER.SQLSetConnectAttrW
else:
self.SQLSetConnectAttrW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetCursorNameW"):
self.ODBC_DRIVER.SQLSetCursorNameW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetCursorNameW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLSetCursorNameW = self.ODBC_DRIVER.SQLSetCursorNameW
else:
self.SQLSetCursorNameW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColumnsW"):
self.ODBC_DRIVER.SQLColumnsW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColumnsW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLColumnsW = self.ODBC_DRIVER.SQLColumnsW
else:
self.SQLColumnsW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetConnectOptionW"):
self.ODBC_DRIVER.SQLGetConnectOptionW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetConnectOptionW.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLPOINTER,)
self.SQLGetConnectOptionW = self.ODBC_DRIVER.SQLGetConnectOptionW
else:
self.SQLGetConnectOptionW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetInfoW"):
self.ODBC_DRIVER.SQLGetInfoW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetInfoW.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetInfoW = self.ODBC_DRIVER.SQLGetInfoW
else:
self.SQLGetInfoW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetTypeInfoW"):
self.ODBC_DRIVER.SQLGetTypeInfoW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetTypeInfoW.argtypes = (self.SQLHSTMT, self.SQLSMALLINT,)
self.SQLGetTypeInfoW = self.ODBC_DRIVER.SQLGetTypeInfoW
else:
self.SQLGetTypeInfoW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetConnectOptionW"):
self.ODBC_DRIVER.SQLSetConnectOptionW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetConnectOptionW.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLULEN,)
self.SQLSetConnectOptionW = self.ODBC_DRIVER.SQLSetConnectOptionW
else:
self.SQLSetConnectOptionW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSpecialColumnsW"):
self.ODBC_DRIVER.SQLSpecialColumnsW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSpecialColumnsW.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, self.SQLUSMALLINT, self.SQLUSMALLINT,)
self.SQLSpecialColumnsW = self.ODBC_DRIVER.SQLSpecialColumnsW
else:
self.SQLSpecialColumnsW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLStatisticsW"):
self.ODBC_DRIVER.SQLStatisticsW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLStatisticsW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, self.SQLUSMALLINT, self.SQLUSMALLINT,)
self.SQLStatisticsW = self.ODBC_DRIVER.SQLStatisticsW
else:
self.SQLStatisticsW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLTablesW"):
self.ODBC_DRIVER.SQLTablesW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLTablesW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLTablesW = self.ODBC_DRIVER.SQLTablesW
else:
self.SQLTablesW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDataSourcesW"):
self.ODBC_DRIVER.SQLDataSourcesW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDataSourcesW.argtypes = (self.SQLHENV, self.SQLUSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDataSourcesW = self.ODBC_DRIVER.SQLDataSourcesW
else:
self.SQLDataSourcesW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDriverConnectW"):
self.ODBC_DRIVER.SQLDriverConnectW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDriverConnectW.argtypes = (self.SQLHDBC, self.SQLHWND, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), self.SQLUSMALLINT,)
self.SQLDriverConnectW = self.ODBC_DRIVER.SQLDriverConnectW
else:
self.SQLDriverConnectW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLBrowseConnectW"):
self.ODBC_DRIVER.SQLBrowseConnectW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLBrowseConnectW.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLBrowseConnectW = self.ODBC_DRIVER.SQLBrowseConnectW
else:
self.SQLBrowseConnectW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColumnPrivilegesW"):
self.ODBC_DRIVER.SQLColumnPrivilegesW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColumnPrivilegesW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLColumnPrivilegesW = self.ODBC_DRIVER.SQLColumnPrivilegesW
else:
self.SQLColumnPrivilegesW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetStmtAttrW"):
self.ODBC_DRIVER.SQLGetStmtAttrW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetStmtAttrW.argtypes = (self.SQLHSTMT, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetStmtAttrW = self.ODBC_DRIVER.SQLGetStmtAttrW
else:
self.SQLGetStmtAttrW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetStmtAttrW"):
self.ODBC_DRIVER.SQLSetStmtAttrW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetStmtAttrW.argtypes = (self.SQLHSTMT, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetStmtAttrW = self.ODBC_DRIVER.SQLSetStmtAttrW
else:
self.SQLSetStmtAttrW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLForeignKeysW"):
self.ODBC_DRIVER.SQLForeignKeysW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLForeignKeysW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLForeignKeysW = self.ODBC_DRIVER.SQLForeignKeysW
else:
self.SQLForeignKeysW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLNativeSqlW"):
self.ODBC_DRIVER.SQLNativeSqlW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLNativeSqlW.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLWCHAR), self.SQLINTEGER, ctypes.POINTER(self.SQLWCHAR), self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLNativeSqlW = self.ODBC_DRIVER.SQLNativeSqlW
else:
self.SQLNativeSqlW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLPrimaryKeysW"):
self.ODBC_DRIVER.SQLPrimaryKeysW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLPrimaryKeysW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLPrimaryKeysW = self.ODBC_DRIVER.SQLPrimaryKeysW
else:
self.SQLPrimaryKeysW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLProcedureColumnsW"):
self.ODBC_DRIVER.SQLProcedureColumnsW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLProcedureColumnsW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLProcedureColumnsW = self.ODBC_DRIVER.SQLProcedureColumnsW
else:
self.SQLProcedureColumnsW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLProceduresW"):
self.ODBC_DRIVER.SQLProceduresW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLProceduresW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLProceduresW = self.ODBC_DRIVER.SQLProceduresW
else:
self.SQLProceduresW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLTablePrivilegesW"):
self.ODBC_DRIVER.SQLTablePrivilegesW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLTablePrivilegesW.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT,)
self.SQLTablePrivilegesW = self.ODBC_DRIVER.SQLTablePrivilegesW
else:
self.SQLTablePrivilegesW = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDriversW"):
self.ODBC_DRIVER.SQLDriversW.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDriversW.argtypes = (self.SQLHENV, self.SQLUSMALLINT, ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLWCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDriversW = self.ODBC_DRIVER.SQLDriversW
else:
self.SQLDriversW = self.UnimplementedSQLFunction
################################################################################
####Function prototypes - ANSI##################################################
################################################################################
if hasattr(self.ODBC_DRIVER, "SQLColAttributeA"):
self.ODBC_DRIVER.SQLColAttributeA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColAttributeA.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN),)
self.SQLColAttributeA = self.ODBC_DRIVER.SQLColAttributeA
else:
self.SQLColAttributeA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColAttributesA"):
self.ODBC_DRIVER.SQLColAttributesA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColAttributesA.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN),)
self.SQLColAttributesA = self.ODBC_DRIVER.SQLColAttributesA
else:
self.SQLColAttributesA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLConnectA"):
self.ODBC_DRIVER.SQLConnectA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLConnectA.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLConnectA = self.ODBC_DRIVER.SQLConnectA
else:
self.SQLConnectA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDescribeColA"):
self.ODBC_DRIVER.SQLDescribeColA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDescribeColA.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLULEN), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDescribeColA = self.ODBC_DRIVER.SQLDescribeColA
else:
self.SQLDescribeColA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLErrorA"):
self.ODBC_DRIVER.SQLErrorA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLErrorA.argtypes = (self.SQLHENV, self.SQLHDBC, self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), ctypes.POINTER(self.SQLINTEGER), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLErrorA = self.ODBC_DRIVER.SQLErrorA
else:
self.SQLErrorA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLExecDirectA"):
self.ODBC_DRIVER.SQLExecDirectA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLExecDirectA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER,)
self.SQLExecDirectA = self.ODBC_DRIVER.SQLExecDirectA
else:
self.SQLExecDirectA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetConnectAttrA"):
self.ODBC_DRIVER.SQLGetConnectAttrA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetConnectAttrA.argtypes = (self.SQLHDBC, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetConnectAttrA = self.ODBC_DRIVER.SQLGetConnectAttrA
else:
self.SQLGetConnectAttrA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetCursorNameA"):
self.ODBC_DRIVER.SQLGetCursorNameA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetCursorNameA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetCursorNameA = self.ODBC_DRIVER.SQLGetCursorNameA
else:
self.SQLGetCursorNameA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetDescFieldA"):
self.ODBC_DRIVER.SQLSetDescFieldA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetDescFieldA.argtypes = (self.SQLHDESC, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetDescFieldA = self.ODBC_DRIVER.SQLSetDescFieldA
else:
self.SQLSetDescFieldA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDescFieldA"):
self.ODBC_DRIVER.SQLGetDescFieldA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDescFieldA.argtypes = (self.SQLHDESC, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetDescFieldA = self.ODBC_DRIVER.SQLGetDescFieldA
else:
self.SQLGetDescFieldA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDescRecA"):
self.ODBC_DRIVER.SQLGetDescRecA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDescRecA.argtypes = (self.SQLHDESC, self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDescRecA = self.ODBC_DRIVER.SQLGetDescRecA
else:
self.SQLGetDescRecA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDiagFieldA"):
self.ODBC_DRIVER.SQLGetDiagFieldA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDiagFieldA.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDiagFieldA = self.ODBC_DRIVER.SQLGetDiagFieldA
else:
self.SQLGetDiagFieldA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetDiagRecA"):
self.ODBC_DRIVER.SQLGetDiagRecA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetDiagRecA.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), ctypes.POINTER(self.SQLINTEGER), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetDiagRecA = self.ODBC_DRIVER.SQLGetDiagRecA
else:
self.SQLGetDiagRecA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLPrepareA"):
self.ODBC_DRIVER.SQLPrepareA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLPrepareA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER,)
self.SQLPrepareA = self.ODBC_DRIVER.SQLPrepareA
else:
self.SQLPrepareA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetConnectAttrA"):
self.ODBC_DRIVER.SQLSetConnectAttrA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetConnectAttrA.argtypes = (self.SQLHDBC, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetConnectAttrA = self.ODBC_DRIVER.SQLSetConnectAttrA
else:
self.SQLSetConnectAttrA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetCursorNameA"):
self.ODBC_DRIVER.SQLSetCursorNameA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetCursorNameA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLSetCursorNameA = self.ODBC_DRIVER.SQLSetCursorNameA
else:
self.SQLSetCursorNameA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColumnsA"):
self.ODBC_DRIVER.SQLColumnsA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColumnsA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLColumnsA = self.ODBC_DRIVER.SQLColumnsA
else:
self.SQLColumnsA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetConnectOptionA"):
self.ODBC_DRIVER.SQLGetConnectOptionA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetConnectOptionA.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLPOINTER,)
self.SQLGetConnectOptionA = self.ODBC_DRIVER.SQLGetConnectOptionA
else:
self.SQLGetConnectOptionA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetInfoA"):
self.ODBC_DRIVER.SQLGetInfoA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetInfoA.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLGetInfoA = self.ODBC_DRIVER.SQLGetInfoA
else:
self.SQLGetInfoA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetTypeInfoA"):
self.ODBC_DRIVER.SQLGetTypeInfoA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetTypeInfoA.argtypes = (self.SQLHSTMT, self.SQLSMALLINT,)
self.SQLGetTypeInfoA = self.ODBC_DRIVER.SQLGetTypeInfoA
else:
self.SQLGetTypeInfoA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetConnectOptionA"):
self.ODBC_DRIVER.SQLSetConnectOptionA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetConnectOptionA.argtypes = (self.SQLHDBC, self.SQLUSMALLINT, self.SQLULEN,)
self.SQLSetConnectOptionA = self.ODBC_DRIVER.SQLSetConnectOptionA
else:
self.SQLSetConnectOptionA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSpecialColumnsA"):
self.ODBC_DRIVER.SQLSpecialColumnsA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSpecialColumnsA.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, self.SQLUSMALLINT, self.SQLUSMALLINT,)
self.SQLSpecialColumnsA = self.ODBC_DRIVER.SQLSpecialColumnsA
else:
self.SQLSpecialColumnsA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLStatisticsA"):
self.ODBC_DRIVER.SQLStatisticsA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLStatisticsA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, self.SQLUSMALLINT, self.SQLUSMALLINT,)
self.SQLStatisticsA = self.ODBC_DRIVER.SQLStatisticsA
else:
self.SQLStatisticsA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLTablesA"):
self.ODBC_DRIVER.SQLTablesA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLTablesA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLTablesA = self.ODBC_DRIVER.SQLTablesA
else:
self.SQLTablesA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDataSourcesA"):
self.ODBC_DRIVER.SQLDataSourcesA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDataSourcesA.argtypes = (self.SQLHENV, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDataSourcesA = self.ODBC_DRIVER.SQLDataSourcesA
else:
self.SQLDataSourcesA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDriverConnectA"):
self.ODBC_DRIVER.SQLDriverConnectA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDriverConnectA.argtypes = (self.SQLHDBC, self.SQLHWND, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), self.SQLUSMALLINT,)
self.SQLDriverConnectA = self.ODBC_DRIVER.SQLDriverConnectA
else:
self.SQLDriverConnectA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLBrowseConnectA"):
self.ODBC_DRIVER.SQLBrowseConnectA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLBrowseConnectA.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLBrowseConnectA = self.ODBC_DRIVER.SQLBrowseConnectA
else:
self.SQLBrowseConnectA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColumnPrivilegesA"):
self.ODBC_DRIVER.SQLColumnPrivilegesA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColumnPrivilegesA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLColumnPrivilegesA = self.ODBC_DRIVER.SQLColumnPrivilegesA
else:
self.SQLColumnPrivilegesA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLGetStmtAttrA"):
self.ODBC_DRIVER.SQLGetStmtAttrA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLGetStmtAttrA.argtypes = (self.SQLHSTMT, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLGetStmtAttrA = self.ODBC_DRIVER.SQLGetStmtAttrA
else:
self.SQLGetStmtAttrA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetStmtAttrA"):
self.ODBC_DRIVER.SQLSetStmtAttrA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetStmtAttrA.argtypes = (self.SQLHSTMT, self.SQLINTEGER, self.SQLPOINTER, self.SQLINTEGER,)
self.SQLSetStmtAttrA = self.ODBC_DRIVER.SQLSetStmtAttrA
else:
self.SQLSetStmtAttrA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLForeignKeysA"):
self.ODBC_DRIVER.SQLForeignKeysA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLForeignKeysA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLForeignKeysA = self.ODBC_DRIVER.SQLForeignKeysA
else:
self.SQLForeignKeysA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLNativeSqlA"):
self.ODBC_DRIVER.SQLNativeSqlA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLNativeSqlA.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLNativeSqlA = self.ODBC_DRIVER.SQLNativeSqlA
else:
self.SQLNativeSqlA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLPrimaryKeysA"):
self.ODBC_DRIVER.SQLPrimaryKeysA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLPrimaryKeysA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLPrimaryKeysA = self.ODBC_DRIVER.SQLPrimaryKeysA
else:
self.SQLPrimaryKeysA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLProcedureColumnsA"):
self.ODBC_DRIVER.SQLProcedureColumnsA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLProcedureColumnsA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLProcedureColumnsA = self.ODBC_DRIVER.SQLProcedureColumnsA
else:
self.SQLProcedureColumnsA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLProceduresA"):
self.ODBC_DRIVER.SQLProceduresA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLProceduresA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLProceduresA = self.ODBC_DRIVER.SQLProceduresA
else:
self.SQLProceduresA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLTablePrivilegesA"):
self.ODBC_DRIVER.SQLTablePrivilegesA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLTablePrivilegesA.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLTablePrivilegesA = self.ODBC_DRIVER.SQLTablePrivilegesA
else:
self.SQLTablePrivilegesA = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDriversA"):
self.ODBC_DRIVER.SQLDriversA.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDriversA.argtypes = (self.SQLHENV, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDriversA = self.ODBC_DRIVER.SQLDriversA
else:
self.SQLDriversA = self.UnimplementedSQLFunction
################################################################################
####Mapping macros for Unicode##################################################
################################################################################
#if self.UNICODE:
# self.SQLColAttribute = self.SQLColAttributeW
# self.SQLColAttributes = self.SQLColAttributesW
# self.SQLConnect = self.SQLConnectW
# self.SQLDescribeCol = self.SQLDescribeColW
# self.SQLError = self.SQLErrorW
# self.SQLExecDirect = self.SQLExecDirectW
# self.SQLGetConnectAttr = self.SQLGetConnectAttrW
# self.SQLGetCursorName = self.SQLGetCursorNameW
# self.SQLGetDescField = self.SQLGetDescFieldW
# self.SQLGetDescRec = self.SQLGetDescRecW
# self.SQLGetDiagField = self.SQLGetDiagFieldW
# self.SQLGetDiagRec = self.SQLGetDiagRecW
# self.SQLPrepare = self.SQLPrepareW
# self.SQLSetConnectAttr = self.SQLSetConnectAttrW
# self.SQLSetCursorName = self.SQLSetCursorNameW
# self.SQLSetDescField = self.SQLSetDescFieldW
# self.SQLSetStmtAttr = self.SQLSetStmtAttrW
# self.SQLGetStmtAttr = self.SQLGetStmtAttrW
# self.SQLColumns = self.SQLColumnsW
# self.SQLGetConnectOption = self.SQLGetConnectOptionW
# self.SQLGetInfo = self.SQLGetInfoW
# self.SQLGetTypeInfo = self.SQLGetTypeInfoW
# self.SQLSetConnectOption = self.SQLSetConnectOptionW
# self.SQLSpecialColumns = self.SQLSpecialColumnsW
# self.SQLStatistics = self.SQLStatisticsW
# self.SQLTables = self.SQLTablesW
# self.SQLDataSources = self.SQLDataSourcesW
# self.SQLDriverConnect = self.SQLDriverConnectW
# self.SQLBrowseConnect = self.SQLBrowseConnectW
# self.SQLColumnPrivileges = self.SQLColumnPrivilegesW
# self.SQLForeignKeys = self.SQLForeignKeysW
# self.SQLNativeSql = self.SQLNativeSqlW
# self.SQLPrimaryKeys = self.SQLPrimaryKeysW
# self.SQLProcedureColumns = self.SQLProcedureColumnsW
# self.SQLProcedures = self.SQLProceduresW
# self.SQLTablePrivileges = self.SQLTablePrivilegesW
# self.SQLDrivers = self.SQLDriversW
#else:
# self.SQLColAttribute = self.SQLColAttributeA
# self.SQLColAttributes = self.SQLColAttributesA
# self.SQLConnect = self.SQLConnectA
# self.SQLDescribeCol = self.SQLDescribeColA
# self.SQLError = self.SQLErrorA
# self.SQLExecDirect = self.SQLExecDirectA
# self.SQLGetConnectAttr = self.SQLGetConnectAttrA
# self.SQLGetCursorName = self.SQLGetCursorNameA
# self.SQLGetDescField = self.SQLGetDescFieldA
# self.SQLGetDescRec = self.SQLGetDescRecA
# self.SQLGetDiagField = self.SQLGetDiagFieldA
# self.SQLGetDiagRec = self.SQLGetDiagRecA
# self.SQLPrepare = self.SQLPrepareA
# self.SQLSetConnectAttr = self.SQLSetConnectAttrA
# self.SQLSetCursorName = self.SQLSetCursorNameA
# self.SQLSetDescField = self.SQLSetDescFieldA
# self.SQLSetStmtAttr = self.SQLSetStmtAttrA
# self.SQLGetStmtAttr = self.SQLGetStmtAttrA
# self.SQLColumns = self.SQLColumnsA
# self.SQLGetConnectOption = self.SQLGetConnectOptionA
# self.SQLGetInfo = self.SQLGetInfoA
# self.SQLGetTypeInfo = self.SQLGetTypeInfoA
# self.SQLSetConnectOption = self.SQLSetConnectOptionA
# self.SQLSpecialColumns = self.SQLSpecialColumnsA
# self.SQLStatistics = self.SQLStatisticsA
# self.SQLTables = self.SQLTablesA
# self.SQLDataSources = self.SQLDataSourcesA
# self.SQLDriverConnect = self.SQLDriverConnectA
# self.SQLBrowseConnect = self.SQLBrowseConnectA
# self.SQLColumnPrivileges = self.SQLColumnPrivilegesA
# self.SQLForeignKeys = self.SQLForeignKeysA
# self.SQLNativeSql = self.SQLNativeSqlA
# self.SQLPrimaryKeys = self.SQLPrimaryKeysA
# self.SQLProcedureColumns = self.SQLProcedureColumnsA
# self.SQLProcedures = self.SQLProceduresA
# self.SQLTablePrivileges = self.SQLTablePrivilegesA
# self.SQLDrivers = self.SQLDriversW
####----------------------------------------------------------------------------
####This section mimics sqlext.h------------------------------------------------
####----------------------------------------------------------------------------
################################################################################
################################################################################
####Useful Constants############################################################
################################################################################
self.SQL_SPEC_MAJOR = 3
self.SQL_SPEC_MINOR = 52
self.SQL_SPEC_STRING = "03.52"
self.SQL_SQLSTATE_SIZE = 5
self.SQL_MAX_DSN_LENGTH = 32
self.SQL_MAX_OPTION_STRING_LENGTH = 256
################################################################################
####Handle types################################################################
################################################################################
self.SQL_HANDLE_SENV = 5
################################################################################
####Function return codes#######################################################
################################################################################
self.SQL_NO_DATA_FOUND = self.SQL_NO_DATA
################################################################################
####Special length values for attributes########################################
################################################################################
self.SQL_IS_POINTER = -4
self.SQL_IS_UINTEGER = -5
self.SQL_IS_INTEGER = -6
self.SQL_IS_USMALLINT = -7
self.SQL_IS_SMALLINT = -8
################################################################################
####SQL extended datatypes######################################################
################################################################################
self.SQL_DATE = 9
self.SQL_INTERVAL = 10
self.SQL_TIME = 10
self.SQL_TIMESTAMP = 11
self.SQL_LONGVARCHAR = -1
self.SQL_BINARY = -2
self.SQL_VARBINARY = -3
self.SQL_LONGVARBINARY = -4
self.SQL_BIGINT = -5
self.SQL_TINYINT = -6
self.SQL_BIT = -7
self.SQL_GUID = -11
################################################################################
####SQL Interval datatypes######################################################
################################################################################
self.SQL_CODE_YEAR = 1
self.SQL_CODE_MONTH = 2
self.SQL_CODE_DAY = 3
self.SQL_CODE_HOUR = 4
self.SQL_CODE_MINUTE = 5
self.SQL_CODE_SECOND = 6
self.SQL_CODE_YEAR_TO_MONTH = 7
self.SQL_CODE_DAY_TO_HOUR = 8
self.SQL_CODE_DAY_TO_MINUTE = 9
self.SQL_CODE_DAY_TO_SECOND = 10
self.SQL_CODE_HOUR_TO_MINUTE = 11
self.SQL_CODE_HOUR_TO_SECOND = 12
self.SQL_CODE_MINUTE_TO_SECOND = 13
self.SQL_INTERVAL_YEAR = 100 + self.SQL_CODE_YEAR
self.SQL_INTERVAL_MONTH = 100 + self.SQL_CODE_MONTH
self.SQL_INTERVAL_DAY = 100 + self.SQL_CODE_DAY
self.SQL_INTERVAL_HOUR = 100 + self.SQL_CODE_HOUR
self.SQL_INTERVAL_MINUTE = 100 + self.SQL_CODE_MINUTE
self.SQL_INTERVAL_SECOND = 100 + self.SQL_CODE_SECOND
self.SQL_INTERVAL_YEAR_TO_MONTH = 100 + self.SQL_CODE_YEAR_TO_MONTH
self.SQL_INTERVAL_DAY_TO_HOUR = 100 + self.SQL_CODE_DAY_TO_HOUR
self.SQL_INTERVAL_DAY_TO_MINUTE = 100 + self.SQL_CODE_DAY_TO_MINUTE
self.SQL_INTERVAL_DAY_TO_SECOND = 100 + self.SQL_CODE_DAY_TO_SECOND
self.SQL_INTERVAL_HOUR_TO_MINUTE = 100 + self.SQL_CODE_HOUR_TO_MINUTE
self.SQL_INTERVAL_HOUR_TO_SECOND = 100 + self.SQL_CODE_HOUR_TO_SECOND
self.SQL_INTERVAL_MINUTE_TO_SECOND = 100 + self.SQL_CODE_MINUTE_TO_SECOND
################################################################################
#### SQL unicode data types#####################################################
################################################################################
self.SQL_UNICODE = self.SQL_WCHAR
self.SQL_UNICODE_VARCHAR = self.SQL_WVARCHAR
self.SQL_UNICODE_LONGVARCHAR = self.SQL_WLONGVARCHAR
self.SQL_UNICODE_CHAR = self.SQL_WCHAR
self.SQL_TYPE_DRIVER_START = self.SQL_INTERVAL_YEAR
self.SQL_TYPE_DRIVER_END = self.SQL_UNICODE_LONGVARCHAR
self.SQL_SIGNED_OFFSET = -20
self.SQL_UNSIGNED_OFFSET = -22
################################################################################
####C datatype to SQL datatype mapping##########################################
################################################################################
self.SQL_C_CHAR = self.SQL_CHAR
self.SQL_C_LONG = self.SQL_INTEGER
self.SQL_C_SHORT = self.SQL_SMALLINT
self.SQL_C_FLOAT = self.SQL_REAL
self.SQL_C_DOUBLE = self.SQL_DOUBLE
self.SQL_C_NUMERIC = self.SQL_NUMERIC
self.SQL_C_DEFAULT = 99
self.SQL_C_DATE = self.SQL_DATE
self.SQL_C_TIME = self.SQL_TIME
self.SQL_C_TIMESTAMP = self.SQL_TIMESTAMP
self.SQL_C_BINARY = self.SQL_BINARY
self.SQL_C_BIT = self.SQL_BIT
self.SQL_C_TINYINT = self.SQL_TINYINT
self.SQL_C_SLONG = self.SQL_C_LONG+self.SQL_SIGNED_OFFSET
self.SQL_C_SSHORT = self.SQL_C_SHORT+self.SQL_SIGNED_OFFSET
self.SQL_C_STINYINT = self.SQL_TINYINT+self.SQL_SIGNED_OFFSET
self.SQL_C_ULONG = self.SQL_C_LONG+self.SQL_UNSIGNED_OFFSET
self.SQL_C_USHORT = self.SQL_C_SHORT+self.SQL_UNSIGNED_OFFSET
self.SQL_C_UTINYINT = self.SQL_TINYINT+self.SQL_UNSIGNED_OFFSET
self.SQL_C_TYPE_DATE = self.SQL_TYPE_DATE
self.SQL_C_TYPE_TIME = self.SQL_TYPE_TIME
self.SQL_C_TYPE_TIMESTAMP = self.SQL_TYPE_TIMESTAMP
self.SQL_C_INTERVAL_YEAR = self.SQL_INTERVAL_YEAR
self.SQL_C_INTERVAL_MONTH = self.SQL_INTERVAL_MONTH
self.SQL_C_INTERVAL_DAY = self.SQL_INTERVAL_DAY
self.SQL_C_INTERVAL_HOUR = self.SQL_INTERVAL_HOUR
self.SQL_C_INTERVAL_MINUTE = self.SQL_INTERVAL_MINUTE
self.SQL_C_INTERVAL_SECOND = self.SQL_INTERVAL_SECOND
self.SQL_C_INTERVAL_YEAR_TO_MONTH = self.SQL_INTERVAL_YEAR_TO_MONTH
self.SQL_C_INTERVAL_DAY_TO_HOUR = self.SQL_INTERVAL_DAY_TO_HOUR
self.SQL_C_INTERVAL_DAY_TO_MINUTE = self.SQL_INTERVAL_DAY_TO_MINUTE
self.SQL_C_INTERVAL_DAY_TO_SECOND = self.SQL_INTERVAL_DAY_TO_SECOND
self.SQL_C_INTERVAL_HOUR_TO_MINUTE = self.SQL_INTERVAL_HOUR_TO_MINUTE
self.SQL_C_INTERVAL_HOUR_TO_SECOND = self.SQL_INTERVAL_HOUR_TO_SECOND
self.SQL_C_INTERVAL_MINUTE_TO_SECOND = self.SQL_INTERVAL_MINUTE_TO_SECOND
self.SQL_C_SBIGINT = self.SQL_BIGINT+self.SQL_SIGNED_OFFSET
self.SQL_C_UBIGINT = self.SQL_BIGINT+self.SQL_UNSIGNED_OFFSET
self.SQL_C_BOOKMARK = self.SQL_C_UBIGINT
self.SQL_C_VARBOOKMARK = self.SQL_C_BINARY
self.SQL_C_GUID = self.SQL_GUID
self.SQL_TYPE_NULL = 0
self.SQL_TYPE_MIN = self.SQL_BIT
self.SQL_TYPE_MAX = self.SQL_VARCHAR
################################################################################
####Level 1 Functions###########################################################
################################################################################
################################################################################
####SQLBindParameter############################################################
################################################################################
self.SQL_DEFAULT_PARAM = -5
self.SQL_IGNORE = -6
self.SQL_COLUMN_IGNORE = self.SQL_IGNORE
self.SQL_LEN_DATA_AT_EXEC_OFFSET = -100
def SQL_LEN_DATA_AT_EXEC_DEFINITION(length):
return (-length) + self.SQL_LEN_DATA_AT_EXEC_OFFSET
self.SQL_LEN_DATA_AT_EXEC = SQL_LEN_DATA_AT_EXEC_DEFINITION
################################################################################
####binary length for driver specific attributes################################
################################################################################
self.SQL_LEN_BINARY_ATTR_OFFSET = -100
def SQL_LEN_BINARY_ATTR_DEFINITION(length):
return (-length) + self.SQL_LEN_BINARY_ATTR_OFFSET
self.SQL_LEN_BINARY_ATTR = SQL_LEN_BINARY_ATTR_DEFINITION
################################################################################
####SQLColAttributes - ODBC 2.x defines#########################################
################################################################################
self.SQL_COLUMN_COUNT = 0
self.SQL_COLUMN_NAME = 1
self.SQL_COLUMN_TYPE = 2
self.SQL_COLUMN_LENGTH = 3
self.SQL_COLUMN_PRECISION = 4
self.SQL_COLUMN_SCALE = 5
self.SQL_COLUMN_DISPLAY_SIZE = 6
self.SQL_COLUMN_NULLABLE = 7
self.SQL_COLUMN_UNSIGNED = 8
self.SQL_COLUMN_MONEY = 9
self.SQL_COLUMN_UPDATABLE = 10
self.SQL_COLUMN_AUTO_INCREMENT = 11
self.SQL_COLUMN_CASE_SENSITIVE = 12
self.SQL_COLUMN_SEARCHABLE = 13
self.SQL_COLUMN_TYPE_NAME = 14
self.SQL_COLUMN_TABLE_NAME = 15
self.SQL_COLUMN_OWNER_NAME = 16
self.SQL_COLUMN_QUALIFIER_NAME = 17
self.SQL_COLUMN_LABEL = 18
self.SQL_COLATT_OPT_MAX = self.SQL_COLUMN_LABEL
self.SQL_COLUMN_DRIVER_START = 1000
self.SQL_COLATT_OPT_MIN = self.SQL_COLUMN_COUNT
################################################################################
####SQLColAttributes - SQL_COLUMN_UPDATABLE#####################################
################################################################################
self.SQL_ATTR_READONLY = 0
self.SQL_ATTR_WRITE = 1
self.SQL_ATTR_READWRITE_UNKNOWN = 2
################################################################################
####SQLColAttributes - SQL_COLUMN_SEARCHABLE####################################
################################################################################
self.SQL_UNSEARCHABLE = 0
self.SQL_LIKE_ONLY = 1
self.SQL_ALL_EXCEPT_LIKE = 2
self.SQL_SEARCHABLE = 3
self.SQL_PRED_SEARCHABLE = self.SQL_SEARCHABLE
################################################################################
####SQLDataSources - additional fetch directions################################
################################################################################
self.SQL_FETCH_FIRST_USER = 31
self.SQL_FETCH_FIRST_SYSTEM = 32
################################################################################
####SQLDriverConnect############################################################
################################################################################
self.SQL_DRIVER_NOPROMPT = 0
self.SQL_DRIVER_COMPLETE = 1
self.SQL_DRIVER_PROMPT = 2
self.SQL_DRIVER_COMPLETE_REQUIRED = 3
################################################################################
####SQLGetConnectAttr - ODBC 2.x attributes#####################################
################################################################################
self.SQL_ACCESS_MODE = 101
self.SQL_AUTOCOMMIT = 102
self.SQL_LOGIN_TIMEOUT = 103
self.SQL_OPT_TRACE = 104
self.SQL_OPT_TRACEFILE = 105
self.SQL_TRANSLATE_DLL = 106
self.SQL_TRANSLATE_OPTION = 107
self.SQL_TXN_ISOLATION = 108
self.SQL_CURRENT_QUALIFIER = 109
self.SQL_ODBC_CURSORS = 110
self.SQL_QUIET_MODE = 111
self.SQL_PACKET_SIZE = 112
################################################################################
####SQLGetConnectAttr - ODBC 3.0 attributes#####################################
################################################################################
self.SQL_ATTR_ACCESS_MODE = self.SQL_ACCESS_MODE
self.SQL_ATTR_AUTOCOMMIT = self.SQL_AUTOCOMMIT
self.SQL_ATTR_CONNECTION_TIMEOUT = 113
self.SQL_ATTR_CURRENT_CATALOG = self.SQL_CURRENT_QUALIFIER
self.SQL_ATTR_DISCONNECT_BEHAVIOR = 114
self.SQL_ATTR_ENLIST_IN_DTC = 1207
self.SQL_ATTR_ENLIST_IN_XA = 1208
self.SQL_ATTR_LOGIN_TIMEOUT = self.SQL_LOGIN_TIMEOUT
self.SQL_ATTR_ODBC_CURSORS = self.SQL_ODBC_CURSORS
self.SQL_ATTR_PACKET_SIZE = self.SQL_PACKET_SIZE
self.SQL_ATTR_QUIET_MODE = self.SQL_QUIET_MODE
self.SQL_ATTR_TRACE = self.SQL_OPT_TRACE
self.SQL_ATTR_TRACEFILE = self.SQL_OPT_TRACEFILE
self.SQL_ATTR_TRANSLATE_LIB = self.SQL_TRANSLATE_DLL
self.SQL_ATTR_TRANSLATE_OPTION = self.SQL_TRANSLATE_OPTION
self.SQL_ATTR_TXN_ISOLATION = self.SQL_TXN_ISOLATION
self.SQL_ATTR_CONNECTION_DEAD = 1209
################################################################################
####These options have no meaning for a 3.0 driver##############################
################################################################################
self.SQL_CONN_OPT_MIN = self.SQL_ACCESS_MODE
self.SQL_CONN_OPT_MAX = self.SQL_PACKET_SIZE
self.SQL_CONNECT_OPT_DRVR_START = 1000
################################################################################
####SQLGetConnectAttr - SQL_ACCESS_MODE#########################################
################################################################################
self.SQL_MODE_READ_WRITE = 0
self.SQL_MODE_READ_ONLY = 1
self.SQL_MODE_DEFAULT = self.SQL_MODE_READ_WRITE
################################################################################
####SQLGetConnectAttr - SQL_AUTOCOMMIT##########################################
################################################################################
self.SQL_AUTOCOMMIT_OFF = 0
self.SQL_AUTOCOMMIT_ON = 1
self.SQL_AUTOCOMMIT_DEFAULT = self.SQL_AUTOCOMMIT_ON
################################################################################
####SQLGetConnectAttr - SQL_LOGIN_TIMEOUT#######################################
################################################################################
self.SQL_LOGIN_TIMEOUT_DEFAULT = 15
################################################################################
####SQLGetConnectAttr - SQL_ODBC_CURSORS########################################
################################################################################
self.SQL_CUR_USE_IF_NEEDED = 0
self.SQL_CUR_USE_ODBC = 1
self.SQL_CUR_USE_DRIVER = 2
self.SQL_CUR_DEFAULT = self.SQL_CUR_USE_DRIVER
################################################################################
####SQLGetConnectAttr - SQL_OPT_TRACE###########################################
################################################################################
self.SQL_OPT_TRACE_OFF = 0
self.SQL_OPT_TRACE_ON = 1
self.SQL_OPT_TRACE_DEFAULT = self.SQL_OPT_TRACE_OFF
self.SQL_OPT_TRACE_FILE_DEFAULT = "odbc.log"
self.SQL_OPT_TRACE_FILE_DEFAULTW = "odbc.log"
################################################################################
####SQLGetConnectAttr - SQL_ATTR_ANSI_APP#######################################
################################################################################
self.SQL_AA_TRUE = 1
self.SQL_AA_FALSE = 0
################################################################################
####SQLGetConnectAttr - SQL_ATTR_CONNECTION_DEAD################################
################################################################################
self.SQL_CD_TRUE = 1
self.SQL_CD_FALSE = 0
################################################################################
####SQLGetConnectAttr - SQL_ATTR_DISCONNECT_BEHAVIOR############################
################################################################################
self.SQL_DB_RETURN_TO_POOL = 0
self.SQL_DB_DISCONNECT = 1
self.SQL_DB_DEFAULT = self.SQL_DB_RETURN_TO_POOL
################################################################################
####SQLGetConnectAttr - SQL_ATTR_ENLIST_IN_DTC##################################
################################################################################
self.SQL_DTC_DONE = 0
################################################################################
#### self.SQLGetConnectAttr - Unicode drivers########################################
################################################################################
self.SQL_ATTR_ANSI_APP = 115
################################################################################
####SQLGetData##################################################################
################################################################################
self.SQL_NO_TOTAL = -4
################################################################################
####SQLGetDescField - extended descriptor field#################################
################################################################################
self.SQL_DESC_ARRAY_SIZE = 20
self.SQL_DESC_ARRAY_STATUS_PTR = 21
self.SQL_DESC_AUTO_UNIQUE_VALUE = self.SQL_COLUMN_AUTO_INCREMENT
self.SQL_DESC_BASE_COLUMN_NAME = 22
self.SQL_DESC_BASE_TABLE_NAME = 23
self.SQL_DESC_BIND_OFFSET_PTR = 24
self.SQL_DESC_BIND_TYPE = 25
self.SQL_DESC_CASE_SENSITIVE = self.SQL_COLUMN_CASE_SENSITIVE
self.SQL_DESC_CATALOG_NAME = self.SQL_COLUMN_QUALIFIER_NAME
self.SQL_DESC_CONCISE_TYPE = self.SQL_COLUMN_TYPE
self.SQL_DESC_DATETIME_INTERVAL_PRECISION = 26
self.SQL_DESC_DISPLAY_SIZE = self.SQL_COLUMN_DISPLAY_SIZE
self.SQL_DESC_FIXED_PREC_SCALE = self.SQL_COLUMN_MONEY
self.SQL_DESC_LABEL = self.SQL_COLUMN_LABEL
self.SQL_DESC_LITERAL_PREFIX = 27
self.SQL_DESC_LITERAL_SUFFIX = 28
self.SQL_DESC_LOCAL_TYPE_NAME = 29
self.SQL_DESC_MAXIMUM_SCALE = 30
self.SQL_DESC_MINIMUM_SCALE = 31
self.SQL_DESC_NUM_PREC_RADIX = 32
self.SQL_DESC_PARAMETER_TYPE = 33
self.SQL_DESC_ROWS_PROCESSED_PTR = 34
self.SQL_DESC_ROWVER = 35
self.SQL_DESC_SCHEMA_NAME = self.SQL_COLUMN_OWNER_NAME
self.SQL_DESC_SEARCHABLE = self.SQL_COLUMN_SEARCHABLE
self.SQL_DESC_TYPE_NAME = self.SQL_COLUMN_TYPE_NAME
self.SQL_DESC_TABLE_NAME = self.SQL_COLUMN_TABLE_NAME
self.SQL_DESC_UNSIGNED = self.SQL_COLUMN_UNSIGNED
self.SQL_DESC_UPDATABLE = self.SQL_COLUMN_UPDATABLE
################################################################################
####SQLGetDiagField - defines for diagnostics fields############################
################################################################################
self.SQL_DIAG_CURSOR_ROW_COUNT = -1249
self.SQL_DIAG_ROW_NUMBER = -1248
self.SQL_DIAG_COLUMN_NUMBER = -1247
################################################################################
####SQLGetDiagField - SQL_DIAG_ROW_NUMBER and SQL_DIAG_COLUMN_NUMBER############
################################################################################
self.SQL_NO_ROW_NUMBER = -1
self.SQL_NO_COLUMN_NUMBER = -1
self.SQL_ROW_NUMBER_UNKNOWN = -2
self.SQL_COLUMN_NUMBER_UNKNOWN = -2
################################################################################
####SQLGetEnvAttr - Attributes##################################################
################################################################################
self.SQL_ATTR_ODBC_VERSION = 200
self.SQL_ATTR_CONNECTION_POOLING = 201
self.SQL_ATTR_CP_MATCH = 202
################################################################################
####SQLGetEnvAttr - SQL_ATTR_ODBC_VERSION#######################################
################################################################################
self.SQL_OV_ODBC2 = 2
self.SQL_OV_ODBC3 = 3
################################################################################
####SQLGetEnvAttr - SQL_ATTR_CONNECTION_POOLING#################################
################################################################################
self.SQL_CP_OFF = 0
self.SQL_CP_ONE_PER_DRIVER = 1
self.SQL_CP_ONE_PER_HENV = 2
self.SQL_CP_DEFAULT = self.SQL_CP_OFF
################################################################################
####SQLGetEnvAttr - SQL_ATTR_CP_MATCH###########################################
################################################################################
self.SQL_CP_STRICT_MATCH = 0
self.SQL_CP_RELAXED_MATCH = 1
self.SQL_CP_MATCH_DEFAULT = self.SQL_CP_STRICT_MATCH
################################################################################
####SQLGetFunctions - extensions to the X/Open specification####################
################################################################################
self.SQL_API_SQLALLOCHANDLESTD = 73
self.SQL_API_SQLBULKOPERATIONS = 24
self.SQL_API_SQLBINDPARAMETER = 72
self.SQL_API_SQLBROWSECONNECT = 55
self.SQL_API_SQLCOLATTRIBUTES = 6
self.SQL_API_SQLCOLUMNPRIVILEGES = 56
self.SQL_API_SQLDESCRIBEPARAM = 58
self.SQL_API_SQLDRIVERCONNECT = 41
self.SQL_API_SQLDRIVERS = 71
self.SQL_API_SQLEXTENDEDFETCH = 59
self.SQL_API_SQLFOREIGNKEYS = 60
self.SQL_API_SQLMORERESULTS = 61
self.SQL_API_SQLNATIVESQL = 62
self.SQL_API_SQLNUMPARAMS = 63
self.SQL_API_SQLPARAMOPTIONS = 64
self.SQL_API_SQLPRIMARYKEYS = 65
self.SQL_API_SQLPROCEDURECOLUMNS = 66
self.SQL_API_SQLPROCEDURES = 67
self.SQL_API_SQLSETPOS = 68
self.SQL_API_SQLSETSCROLLOPTIONS = 69
self.SQL_API_SQLTABLEPRIVILEGES = 70
################################################################################
####These are not useful anymore as the X/Open specification defines############
####functions in the 10000 range################################################
################################################################################
self.SQL_EXT_API_LAST = self.SQL_API_SQLBINDPARAMETER
self.SQL_NUM_FUNCTIONS = 23
self.SQL_EXT_API_START = 40
self.SQL_NUM_EXTENSIONS = (self.SQL_EXT_API_LAST-self.SQL_EXT_API_START+1)
################################################################################
####SQLGetFunctions - ODBC version 2.x and earlier##############################
################################################################################
self.SQL_API_ALL_FUNCTIONS = 0
################################################################################
####Loading by ordinal is not supported for 3.0 and above drivers###############
################################################################################
self.SQL_API_LOADBYORDINAL = 199
################################################################################
####SQLGetFunctions - SQL_API_ODBC3_ALL_FUNCTIONS###############################
################################################################################
self.SQL_API_ODBC3_ALL_FUNCTIONS = 999
self.SQL_API_ODBC3_ALL_FUNCTIONS_SIZE = 250
#TODO: Map self.SQL_FUNC_EXISTS macro
self.SQL_FUNC_EXISTS = self.UnimplementedSQLFunction
################################################################################
####SQLGetInfo - ODBC 2.x extensions to the X/Open standard#####################
################################################################################
self.SQL_INFO_FIRST = 0
self.SQL_ACTIVE_CONNECTIONS = 0
self.SQL_ACTIVE_STATEMENTS = 1
self.SQL_DRIVER_HDBC = 3
self.SQL_DRIVER_HENV = 4
self.SQL_DRIVER_HSTMT = 5
self.SQL_DRIVER_NAME = 6
self.SQL_DRIVER_VER = 7
self.SQL_ODBC_API_CONFORMANCE = 9
self.SQL_ODBC_VER = 10
self.SQL_ROW_UPDATES = 11
self.SQL_ODBC_SAG_CLI_CONFORMANCE = 12
self.SQL_ODBC_SQL_CONFORMANCE = 15
self.SQL_PROCEDURES = 21
self.SQL_CONCAT_NULL_BEHAVIOR = 22
self.SQL_CURSOR_ROLLBACK_BEHAVIOR = 24
self.SQL_EXPRESSIONS_IN_ORDERBY = 27
self.SQL_MAX_OWNER_NAME_LEN = 32
self.SQL_MAX_PROCEDURE_NAME_LEN = 33
self.SQL_MAX_QUALIFIER_NAME_LEN = 34
self.SQL_MULT_RESULT_SETS = 36
self.SQL_MULTIPLE_ACTIVE_TXN = 37
self.SQL_OUTER_JOINS = 38
self.SQL_OWNER_TERM = 39
self.SQL_PROCEDURE_TERM = 40
self.SQL_QUALIFIER_NAME_SEPARATOR = 41
self.SQL_QUALIFIER_TERM = 42
self.SQL_SCROLL_OPTIONS = 44
self.SQL_TABLE_TERM = 45
self.SQL_CONVERT_FUNCTIONS = 48
self.SQL_NUMERIC_FUNCTIONS = 49
self.SQL_STRING_FUNCTIONS = 50
self.SQL_SYSTEM_FUNCTIONS = 51
self.SQL_TIMEDATE_FUNCTIONS = 52
self.SQL_CONVERT_BIGINT = 53
self.SQL_CONVERT_BINARY = 54
self.SQL_CONVERT_BIT = 55
self.SQL_CONVERT_CHAR = 56
self.SQL_CONVERT_DATE = 57
self.SQL_CONVERT_DECIMAL = 58
self.SQL_CONVERT_DOUBLE = 59
self.SQL_CONVERT_FLOAT = 60
self.SQL_CONVERT_INTEGER = 61
self.SQL_CONVERT_LONGVARCHAR = 62
self.SQL_CONVERT_NUMERIC = 63
self.SQL_CONVERT_REAL = 64
self.SQL_CONVERT_SMALLINT = 65
self.SQL_CONVERT_TIME = 66
self.SQL_CONVERT_TIMESTAMP = 67
self.SQL_CONVERT_TINYINT = 68
self.SQL_CONVERT_VARBINARY = 69
self.SQL_CONVERT_VARCHAR = 70
self.SQL_CONVERT_LONGVARBINARY = 71
self.SQL_ODBC_SQL_OPT_IEF = 73
self.SQL_CORRELATION_NAME = 74
self.SQL_NON_NULLABLE_COLUMNS = 75
self.SQL_DRIVER_HLIB = 76
self.SQL_DRIVER_ODBC_VER = 77
self.SQL_LOCK_TYPES = 78
self.SQL_POS_OPERATIONS = 79
self.SQL_POSITIONED_STATEMENTS = 80
self.SQL_BOOKMARK_PERSISTENCE = 82
self.SQL_STATIC_SENSITIVITY = 83
self.SQL_FILE_USAGE = 84
self.SQL_COLUMN_ALIAS = 87
self.SQL_GROUP_BY = 88
self.SQL_KEYWORDS = 89
self.SQL_OWNER_USAGE = 91
self.SQL_QUALIFIER_USAGE = 92
self.SQL_QUOTED_IDENTIFIER_CASE = 93
self.SQL_SUBQUERIES = 95
self.SQL_UNION = 96
self.SQL_MAX_ROW_SIZE_INCLUDES_LONG = 103
self.SQL_MAX_CHAR_LITERAL_LEN = 108
self.SQL_TIMEDATE_ADD_INTERVALS = 109
self.SQL_TIMEDATE_DIFF_INTERVALS = 110
self.SQL_NEED_LONG_DATA_LEN = 111
self.SQL_MAX_BINARY_LITERAL_LEN = 112
self.SQL_LIKE_ESCAPE_CLAUSE = 113
self.SQL_QUALIFIER_LOCATION = 114
self.SQL_OJ_CAPABILITIES = 65003
################################################################################
####These values are not useful anymore as X/Open defines values in the#########
####10000 range#################################################################
################################################################################
self.SQL_INFO_LAST = self.SQL_QUALIFIER_LOCATION
self.SQL_INFO_DRIVER_START = 1000
################################################################################
####SQLGetInfo - ODBC 3.x extensions to the X/Open standard#####################
################################################################################
self.SQL_ACTIVE_ENVIRONMENTS = 116
self.SQL_ALTER_DOMAIN = 117
self.SQL_SQL_CONFORMANCE = 118
self.SQL_DATETIME_LITERALS = 119
self.SQL_ASYNC_MODE = 10021
self.SQL_BATCH_ROW_COUNT = 120
self.SQL_BATCH_SUPPORT = 121
self.SQL_CATALOG_LOCATION = self.SQL_QUALIFIER_LOCATION
self.SQL_CATALOG_NAME_SEPARATOR = self.SQL_QUALIFIER_NAME_SEPARATOR
self.SQL_CATALOG_TERM = self.SQL_QUALIFIER_TERM
self.SQL_CATALOG_USAGE = self.SQL_QUALIFIER_USAGE
self.SQL_CONVERT_WCHAR = 122
self.SQL_CONVERT_INTERVAL_DAY_TIME = 123
self.SQL_CONVERT_INTERVAL_YEAR_MONTH = 124
self.SQL_CONVERT_WLONGVARCHAR = 125
self.SQL_CONVERT_WVARCHAR = 126
self.SQL_CREATE_ASSERTION = 127
self.SQL_CREATE_CHARACTER_SET = 128
self.SQL_CREATE_COLLATION = 129
self.SQL_CREATE_DOMAIN = 130
self.SQL_CREATE_SCHEMA = 131
self.SQL_CREATE_TABLE = 132
self.SQL_CREATE_TRANSLATION = 133
self.SQL_CREATE_VIEW = 134
self.SQL_DRIVER_HDESC = 135
self.SQL_DROP_ASSERTION = 136
self.SQL_DROP_CHARACTER_SET = 137
self.SQL_DROP_COLLATION = 138
self.SQL_DROP_DOMAIN = 139
self.SQL_DROP_SCHEMA = 140
self.SQL_DROP_TABLE = 141
self.SQL_DROP_TRANSLATION = 142
self.SQL_DROP_VIEW = 143
self.SQL_DYNAMIC_CURSOR_ATTRIBUTES1 = 144
self.SQL_DYNAMIC_CURSOR_ATTRIBUTES2 = 145
self.SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES1 = 146
self.SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES2 = 147
self.SQL_INDEX_KEYWORDS = 148
self.SQL_INFO_SCHEMA_VIEWS = 149
self.SQL_KEYSET_CURSOR_ATTRIBUTES1 = 150
self.SQL_KEYSET_CURSOR_ATTRIBUTES2 = 151
self.SQL_MAX_ASYNC_CONCURRENT_STATEMENTS = 10022
self.SQL_ODBC_INTERFACE_CONFORMANCE = 152
self.SQL_PARAM_ARRAY_ROW_COUNTS = 153
self.SQL_PARAM_ARRAY_SELECTS = 154
self.SQL_SCHEMA_TERM = self.SQL_OWNER_TERM
self.SQL_SCHEMA_USAGE = self.SQL_OWNER_USAGE
self.SQL_SQL92_DATETIME_FUNCTIONS = 155
self.SQL_SQL92_FOREIGN_KEY_DELETE_RULE = 156
self.SQL_SQL92_FOREIGN_KEY_UPDATE_RULE = 157
self.SQL_SQL92_GRANT = 158
self.SQL_SQL92_NUMERIC_VALUE_FUNCTIONS = 159
self.SQL_SQL92_PREDICATES = 160
self.SQL_SQL92_RELATIONAL_JOIN_OPERATORS = 161
self.SQL_SQL92_REVOKE = 162
self.SQL_SQL92_ROW_VALUE_CONSTRUCTOR = 163
self.SQL_SQL92_STRING_FUNCTIONS = 164
self.SQL_SQL92_VALUE_EXPRESSIONS = 165
self.SQL_STANDARD_CLI_CONFORMANCE = 166
self.SQL_STATIC_CURSOR_ATTRIBUTES1 = 167
self.SQL_STATIC_CURSOR_ATTRIBUTES2 = 168
self.SQL_AGGREGATE_FUNCTIONS = 169
self.SQL_DDL_INDEX = 170
self.SQL_DM_VER = 171
self.SQL_INSERT_STATEMENT = 172
self.SQL_UNION_STATEMENT = self.SQL_UNION
self.SQL_DTC_TRANSITION_COST = 1750
################################################################################
####SQLGetInfo - SQL_AGGREGATE_FUNCTIONS########################################
################################################################################
self.SQL_AF_AVG = 0x00000001
self.SQL_AF_COUNT = 0x00000002
self.SQL_AF_MAX = 0x00000004
self.SQL_AF_MIN = 0x00000008
self.SQL_AF_SUM = 0x00000010
self.SQL_AF_DISTINCT = 0x00000020
self.SQL_AF_ALL = 0x00000040
################################################################################
####SQLGetInfo - SQL_ALTER_DOMAIN###############################################
################################################################################
self.SQL_AD_CONSTRAINT_NAME_DEFINITION = 0x00000001
self.SQL_AD_ADD_DOMAIN_CONSTRAINT = 0x00000002
self.SQL_AD_DROP_DOMAIN_CONSTRAINT = 0x00000004
self.SQL_AD_ADD_DOMAIN_DEFAULT = 0x00000008
self.SQL_AD_DROP_DOMAIN_DEFAULT = 0x00000010
self.SQL_AD_ADD_CONSTRAINT_INITIALLY_DEFERRED = 0x00000020
self.SQL_AD_ADD_CONSTRAINT_INITIALLY_IMMEDIATE = 0x00000040
self.SQL_AD_ADD_CONSTRAINT_DEFERRABLE = 0x00000080
self.SQL_AD_ADD_CONSTRAINT_NON_DEFERRABLE = 0x00000100
################################################################################
####SQLGetInfo - SQL_ALTER_TABLE################################################
################################################################################
################################################################################
####The following 5 bitmasks are defined in sql.h###############################
################################################################################
####SQL_AT_ADD_COLUMN = 0x00000001##############################################
####SQL_AT_DROP_COLUMN = 0x00000002#############################################
####SQL_AT_ADD_CONSTRAINT = 0x00000008##########################################
################################################################################
self.SQL_AT_ADD_COLUMN_SINGLE = 0x00000020
self.SQL_AT_ADD_COLUMN_DEFAULT = 0x00000040
self.SQL_AT_ADD_COLUMN_COLLATION = 0x00000080
self.SQL_AT_SET_COLUMN_DEFAULT = 0x00000100
self.SQL_AT_DROP_COLUMN_DEFAULT = 0x00000200
self.SQL_AT_DROP_COLUMN_CASCADE = 0x00000400
self.SQL_AT_DROP_COLUMN_RESTRICT = 0x00000800
self.SQL_AT_ADD_TABLE_CONSTRAINT = 0x00001000
self.SQL_AT_DROP_TABLE_CONSTRAINT_CASCADE = 0x00002000
self.SQL_AT_DROP_TABLE_CONSTRAINT_RESTRICT = 0x00004000
self.SQL_AT_CONSTRAINT_NAME_DEFINITION = 0x00008000
self.SQL_AT_CONSTRAINT_INITIALLY_DEFERRED = 0x00010000
self.SQL_AT_CONSTRAINT_INITIALLY_IMMEDIATE = 0x00020000
self.SQL_AT_CONSTRAINT_DEFERRABLE = 0x00040000
self.SQL_AT_CONSTRAINT_NON_DEFERRABLE = 0x00080000
################################################################################
####SQLGetInfo - SQL_ASYNC_MODE#################################################
################################################################################
self.SQL_AM_NONE = 0
self.SQL_AM_CONNECTION = 1
self.SQL_AM_STATEMENT = 2
################################################################################
####SQLGetInfo - SQL_BATCH_ROW_COUNT############################################
################################################################################
self.SQL_BRC_PROCEDURES = 0x0000001
self.SQL_BRC_EXPLICIT = 0x0000002
self.SQL_BRC_ROLLED_UP = 0x0000004
################################################################################
####SQLGetInfo - SQL_BATCH_SUPPORT##############################################
################################################################################
self.SQL_BS_SELECT_EXPLICIT = 0x00000001
self.SQL_BS_ROW_COUNT_EXPLICIT = 0x00000002
self.SQL_BS_SELECT_PROC = 0x00000004
self.SQL_BS_ROW_COUNT_PROC = 0x00000008
################################################################################
####SQLGetInfo - SQL_BOOKMARK_PERSISTENCE#######################################
################################################################################
self.SQL_BP_CLOSE = 0x00000001
self.SQL_BP_DELETE = 0x00000002
self.SQL_BP_DROP = 0x00000004
self.SQL_BP_TRANSACTION = 0x00000008
self.SQL_BP_UPDATE = 0x00000010
self.SQL_BP_OTHER_HSTMT = 0x00000020
self.SQL_BP_SCROLL = 0x00000040
################################################################################
####SQLGetInfo - SQL_CONCAT_NULL_BEHAVIOR#######################################
################################################################################
self.SQL_CB_NULL = 0x0000
self.SQL_CB_NON_NULL = 0x0001
################################################################################
####SQLGetInfo - SQL_CONVERT_* bitmask values###################################
################################################################################
self.SQL_CVT_CHAR = 0x00000001
self.SQL_CVT_NUMERIC = 0x00000002
self.SQL_CVT_DECIMAL = 0x00000004
self.SQL_CVT_INTEGER = 0x00000008
self.SQL_CVT_SMALLINT = 0x00000010
self.SQL_CVT_FLOAT = 0x00000020
self.SQL_CVT_REAL = 0x00000040
self.SQL_CVT_DOUBLE = 0x00000080
self.SQL_CVT_VARCHAR = 0x00000100
self.SQL_CVT_LONGVARCHAR = 0x00000200
self.SQL_CVT_BINARY = 0x00000400
self.SQL_CVT_VARBINARY = 0x00000800
self.SQL_CVT_BIT = 0x00001000
self.SQL_CVT_TINYINT = 0x00002000
self.SQL_CVT_BIGINT = 0x00004000
self.SQL_CVT_DATE = 0x00008000
self.SQL_CVT_TIME = 0x00010000
self.SQL_CVT_TIMESTAMP = 0x00020000
self.SQL_CVT_LONGVARBINARY = 0x00040000
self.SQL_CVT_INTERVAL_YEAR_MONTH = 0x00080000
self.SQL_CVT_INTERVAL_DAY_TIME = 0x00100000
self.SQL_CVT_WCHAR = 0x00200000
self.SQL_CVT_WLONGVARCHAR = 0x00400000
self.SQL_CVT_WVARCHAR = 0x00800000
################################################################################
####SQLGetInfo - SQL_CONVERT_FUNCTIONS##########################################
################################################################################
self.SQL_FN_CVT_CONVERT = 0x00000001
self.SQL_FN_CVT_CAST = 0x00000002
################################################################################
####SQLGetInfo - SQL_CORRELATION_NAME###########################################
################################################################################
self.SQL_CN_NONE = 0x0000
self.SQL_CN_DIFFERENT = 0x0001
self.SQL_CN_ANY = 0x0002
################################################################################
####SQLGetInfo - SQL_CREATE_ASSERTION###########################################
################################################################################
self.SQL_CA_CREATE_ASSERTION = 0x00000001
self.SQL_CA_CONSTRAINT_INITIALLY_DEFERRED = 0x00000010
self.SQL_CA_CONSTRAINT_INITIALLY_IMMEDIATE = 0x00000020
self.SQL_CA_CONSTRAINT_DEFERRABLE = 0x00000040
self.SQL_CA_CONSTRAINT_NON_DEFERRABLE = 0x00000080
################################################################################
####SQLGetInfo - SQL_CREATE_CHARACTER_SET#######################################
################################################################################
self.SQL_CCS_CREATE_CHARACTER_SET = 0x00000001
self.SQL_CCS_COLLATE_CLAUSE = 0x00000002
self.SQL_CCS_LIMITED_COLLATION = 0x00000004
################################################################################
####SQLGetInfo - SQL_CREATE_COLLATION###########################################
################################################################################
self.SQL_CCOL_CREATE_COLLATION = 0x00000001
################################################################################
####SQLGetInfo - SQL_CREATE_DOMAIN##############################################
################################################################################
self.SQL_CDO_CREATE_DOMAIN = 0x00000001
self.SQL_CDO_DEFAULT = 0x00000002
self.SQL_CDO_CONSTRAINT = 0x00000004
self.SQL_CDO_COLLATION = 0x00000008
self.SQL_CDO_CONSTRAINT_NAME_DEFINITION = 0x00000010
self.SQL_CDO_CONSTRAINT_INITIALLY_DEFERRED = 0x00000020
self.SQL_CDO_CONSTRAINT_INITIALLY_IMMEDIATE = 0x00000040
self.SQL_CDO_CONSTRAINT_DEFERRABLE = 0x00000080
self.SQL_CDO_CONSTRAINT_NON_DEFERRABLE = 0x00000100
################################################################################
####SQLGetInfo - SQL_CREATE_SCHEMA##############################################
################################################################################
self.SQL_CS_CREATE_SCHEMA = 0x00000001
self.SQL_CS_AUTHORIZATION = 0x00000002
self.SQL_CS_DEFAULT_CHARACTER_SET = 0x00000004
################################################################################
####SQLGetInfo - SQL_CREATE_TABLE###############################################
################################################################################
self.SQL_CT_CREATE_TABLE = 0x00000001
self.SQL_CT_COMMIT_PRESERVE = 0x00000002
self.SQL_CT_COMMIT_DELETE = 0x00000004
self.SQL_CT_GLOBAL_TEMPORARY = 0x00000008
self.SQL_CT_LOCAL_TEMPORARY = 0x00000010
self.SQL_CT_CONSTRAINT_INITIALLY_DEFERRED = 0x00000020
self.SQL_CT_CONSTRAINT_INITIALLY_IMMEDIATE = 0x00000040
self.SQL_CT_CONSTRAINT_DEFERRABLE = 0x00000080
self.SQL_CT_CONSTRAINT_NON_DEFERRABLE = 0x00000100
self.SQL_CT_COLUMN_CONSTRAINT = 0x00000200
self.SQL_CT_COLUMN_DEFAULT = 0x00000400
self.SQL_CT_COLUMN_COLLATION = 0x00000800
self.SQL_CT_TABLE_CONSTRAINT = 0x00001000
self.SQL_CT_CONSTRAINT_NAME_DEFINITION = 0x00002000
################################################################################
####SQLGetInfo - SQL_CREATE_TRANSLATION#########################################
################################################################################
self.SQL_CTR_CREATE_TRANSLATION = 0x00000001
################################################################################
####SQLGetInfo - SQL_CREATE_VIEW################################################
################################################################################
self.SQL_CV_CREATE_VIEW = 0x00000001
self.SQL_CV_CHECK_OPTION = 0x00000002
self.SQL_CV_CASCADED = 0x00000004
self.SQL_CV_LOCAL = 0x00000008
################################################################################
####SQLGetInfo - SQL_DATETIME_LITERALS##########################################
################################################################################
self.SQL_DL_SQL92_DATE = 0x00000001
self.SQL_DL_SQL92_TIME = 0x00000002
self.SQL_DL_SQL92_TIMESTAMP = 0x00000004
self.SQL_DL_SQL92_INTERVAL_YEAR = 0x00000008
self.SQL_DL_SQL92_INTERVAL_MONTH = 0x00000010
self.SQL_DL_SQL92_INTERVAL_DAY = 0x00000020
self.SQL_DL_SQL92_INTERVAL_HOUR = 0x00000040
self.SQL_DL_SQL92_INTERVAL_MINUTE = 0x00000080
self.SQL_DL_SQL92_INTERVAL_SECOND = 0x00000100
self.SQL_DL_SQL92_INTERVAL_YEAR_TO_MONTH = 0x00000200
self.SQL_DL_SQL92_INTERVAL_DAY_TO_HOUR = 0x00000400
self.SQL_DL_SQL92_INTERVAL_DAY_TO_MINUTE = 0x00000800
self.SQL_DL_SQL92_INTERVAL_DAY_TO_SECOND = 0x00001000
self.SQL_DL_SQL92_INTERVAL_HOUR_TO_MINUTE = 0x00002000
self.SQL_DL_SQL92_INTERVAL_HOUR_TO_SECOND = 0x00004000
self.SQL_DL_SQL92_INTERVAL_MINUTE_TO_SECOND = 0x00008000
################################################################################
####SQLGetInfo - SQL_DDL_INDEX##################################################
################################################################################
self.SQL_DI_CREATE_INDEX = 0x00000001
self.SQL_DI_DROP_INDEX = 0x00000002
################################################################################
####SQLGetInfo - SQL_DROP_ASSERTION#############################################
################################################################################
self.SQL_DA_DROP_ASSERTION = 0x00000001
################################################################################
####SQLGetInfo - SQL_DROP_CHARACTER_SET#########################################
################################################################################
self.SQL_DCS_DROP_CHARACTER_SET = 0x00000001
################################################################################
####SQLGetInfo - SQL_DROP_COLLATION#############################################
################################################################################
self.SQL_DC_DROP_COLLATION = 0x00000001
################################################################################
####SQLGetInfo - SQL_DROP_DOMAIN################################################
################################################################################
self.SQL_DD_DROP_DOMAIN = 0x00000001
self.SQL_DD_RESTRICT = 0x00000002
self.SQL_DD_CASCADE = 0x00000004
################################################################################
####SQLGetInfo - SQL_DROP_SCHEMA################################################
################################################################################
self.SQL_DS_DROP_SCHEMA = 0x00000001
self.SQL_DS_RESTRICT = 0x00000002
self.SQL_DS_CASCADE = 0x00000004
################################################################################
####SQLGetInfo - SQL_DROP_TABLE#################################################
################################################################################
self.SQL_DT_DROP_TABLE = 0x00000001
self.SQL_DT_RESTRICT = 0x00000002
self.SQL_DT_CASCADE = 0x00000004
################################################################################
####SQLGetInfo - SQL_DROP_TRANSLATION###########################################
################################################################################
self.SQL_DTR_DROP_TRANSLATION = 0x00000001
################################################################################
####SQLGetInfo - SQL_DROP_VIEW##################################################
################################################################################
self.SQL_DV_DROP_VIEW = 0x00000001
self.SQL_DV_RESTRICT = 0x00000002
self.SQL_DV_CASCADE = 0x00000004
################################################################################
####SQLGetInfo - SQL_DTC_TRANSITION_COST########################################
################################################################################
self.SQL_DTC_ENLIST_EXPENSIVE = 0x00000001
self.SQL_DTC_UNENLIST_EXPENSIVE = 0x00000002
################################################################################
####SQLGetInfo - SQL_DYNAMIC_CURSOR_ATTRIBUTES1#################################
####SQLGetInfo - SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES1############################
####SQLGetInfo - SQL_KEYSET_CURSOR_ATTRIBUTES1##################################
####SQLGetInfo - SQL_STATIC_CURSOR_ATTRIBUTES1##################################
################################################################################
################################################################################
####SQLFetchScroll - FetchOrientation###########################################
################################################################################
self.SQL_CA1_NEXT = 0x00000001
self.SQL_CA1_ABSOLUTE = 0x00000002
self.SQL_CA1_RELATIVE = 0x00000004
self.SQL_CA1_BOOKMARK = 0x00000008
################################################################################
####SQLSetPos - LockType########################################################
################################################################################
self.SQL_CA1_LOCK_NO_CHANGE = 0x00000040
self.SQL_CA1_LOCK_EXCLUSIVE = 0x00000080
self.SQL_CA1_LOCK_UNLOCK = 0x00000100
################################################################################
####SQLSetPos Operations########################################################
################################################################################
self.SQL_CA1_POS_POSITION = 0x00000200
self.SQL_CA1_POS_UPDATE = 0x00000400
self.SQL_CA1_POS_DELETE = 0x00000800
self.SQL_CA1_POS_REFRESH = 0x00001000
################################################################################
####positioned updates and deletes##############################################
################################################################################
self.SQL_CA1_POSITIONED_UPDATE = 0x00002000
self.SQL_CA1_POSITIONED_DELETE = 0x00004000
self.SQL_CA1_SELECT_FOR_UPDATE = 0x00008000
################################################################################
####SQLBulkOperations operations################################################
################################################################################
self.SQL_CA1_BULK_ADD = 0x00010000
self.SQL_CA1_BULK_UPDATE_BY_BOOKMARK = 0x00020000
self.SQL_CA1_BULK_DELETE_BY_BOOKMARK = 0x00040000
self.SQL_CA1_BULK_FETCH_BY_BOOKMARK = 0x00080000
################################################################################
####SQLGetInfo - SQL_DYNAMIC_CURSOR_ATTRIBUTES2#################################
####SQLGetInfo - SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES2############################
####SQLGetInfo - SQL_KEYSET_CURSOR_ATTRIBUTES2##################################
####SQLGetInfo - SQL_STATIC_CURSOR_ATTRIBUTES2##################################
################################################################################
################################################################################
####SQL_ATTR_SCROLL_CONCURRENCY#################################################
################################################################################
self.SQL_CA2_READ_ONLY_CONCURRENCY = 0x00000001
self.SQL_CA2_LOCK_CONCURRENCY = 0x00000002
self.SQL_CA2_OPT_ROWVER_CONCURRENCY = 0x00000004
self.SQL_CA2_OPT_VALUES_CONCURRENCY = 0x00000008
################################################################################
####sensitivity of the cursor to its own inserts, deletes, and updates##########
################################################################################
self.SQL_CA2_SENSITIVITY_ADDITIONS = 0x00000010
self.SQL_CA2_SENSITIVITY_DELETIONS = 0x00000020
self.SQL_CA2_SENSITIVITY_UPDATES = 0x00000040
################################################################################
####SQL_ATTR_MAX_ROWS###########################################################
################################################################################
self.SQL_CA2_MAX_ROWS_SELECT = 0x00000080
self.SQL_CA2_MAX_ROWS_INSERT = 0x00000100
self.SQL_CA2_MAX_ROWS_DELETE = 0x00000200
self.SQL_CA2_MAX_ROWS_UPDATE = 0x00000400
self.SQL_CA2_MAX_ROWS_CATALOG = 0x00000800
self.SQL_CA2_MAX_ROWS_AFFECTS_ALL = self.SQL_CA2_MAX_ROWS_SELECT |\
self.SQL_CA2_MAX_ROWS_INSERT |\
self.SQL_CA2_MAX_ROWS_DELETE |\
self.SQL_CA2_MAX_ROWS_UPDATE |\
self.SQL_CA2_MAX_ROWS_CATALOG
################################################################################
####SQL_DIAG_CURSOR_ROW_COUNT###################################################
################################################################################
self.SQL_CA2_CRC_EXACT = 0x00001000
self.SQL_CA2_CRC_APPROXIMATE = 0x00002000
################################################################################
####the kinds of positioned statements that can be simulated####################
################################################################################
self.SQL_CA2_SIMULATE_NON_UNIQUE = 0x00004000
self.SQL_CA2_SIMULATE_TRY_UNIQUE = 0x00008000
self.SQL_CA2_SIMULATE_UNIQUE = 0x00010000
################################################################################
####SQLGetInfo - SQL_FETCH_DIRECTION############################################
################################################################################
self.SQL_FD_FETCH_RESUME = 0x00000040
self.SQL_FD_FETCH_BOOKMARK = 0x00000080
################################################################################
####SQLGetInfo - SQL_FILE_USAGE#################################################
################################################################################
self.SQL_FILE_NOT_SUPPORTED = 0x0000
self.SQL_FILE_TABLE = 0x0001
self.SQL_FILE_QUALIFIER = 0x0002
self.SQL_FILE_CATALOG = self.SQL_FILE_QUALIFIER
################################################################################
####SQLGetInfo - SQL_GETDATA_EXTENSIONS#########################################
################################################################################
self.SQL_GD_BLOCK = 0x00000004
self.SQL_GD_BOUND = 0x00000008
################################################################################
####SQLGetInfo - SQL_GROUP_BY###################################################
################################################################################
self.SQL_GB_NOT_SUPPORTED = 0x0000
self.SQL_GB_GROUP_BY_EQUALS_SELECT = 0x0001
self.SQL_GB_GROUP_BY_CONTAINS_SELECT = 0x0002
self.SQL_GB_NO_RELATION = 0x0003
self.SQL_GB_COLLATE = 0x0004
################################################################################
####SQLGetInfo - SQL_INDEX_KEYWORDS#############################################
################################################################################
self.SQL_IK_NONE = 0x00000000
self.SQL_IK_ASC = 0x00000001
self.SQL_IK_DESC = 0x00000002
self.SQL_IK_ALL = self.SQL_IK_ASC | self.SQL_IK_DESC
################################################################################
####SQLGetInfo - SQL_INFO_SCHEMA_VIEWS##########################################
################################################################################
self.SQL_ISV_ASSERTIONS = 0x00000001
self.SQL_ISV_CHARACTER_SETS = 0x00000002
self.SQL_ISV_CHECK_CONSTRAINTS = 0x00000004
self.SQL_ISV_COLLATIONS = 0x00000008
self.SQL_ISV_COLUMN_DOMAIN_USAGE = 0x00000010
self.SQL_ISV_COLUMN_PRIVILEGES = 0x00000020
self.SQL_ISV_COLUMNS = 0x00000040
self.SQL_ISV_CONSTRAINT_COLUMN_USAGE = 0x00000080
self.SQL_ISV_CONSTRAINT_TABLE_USAGE = 0x00000100
self.SQL_ISV_DOMAIN_CONSTRAINTS = 0x00000200
self.SQL_ISV_DOMAINS = 0x00000400
self.SQL_ISV_KEY_COLUMN_USAGE = 0x00000800
self.SQL_ISV_REFERENTIAL_CONSTRAINTS = 0x00001000
self.SQL_ISV_SCHEMATA = 0x00002000
self.SQL_ISV_SQL_LANGUAGES = 0x00004000
self.SQL_ISV_TABLE_CONSTRAINTS = 0x00008000
self.SQL_ISV_TABLE_PRIVILEGES = 0x00010000
self.SQL_ISV_TABLES = 0x00020000
self.SQL_ISV_TRANSLATIONS = 0x00040000
self.SQL_ISV_USAGE_PRIVILEGES = 0x00080000
self.SQL_ISV_VIEW_COLUMN_USAGE = 0x00100000
self.SQL_ISV_VIEW_TABLE_USAGE = 0x00200000
self.SQL_ISV_VIEWS = 0x00400000
################################################################################
####SQLGetInfo - SQL_INSERT_STATEMENT###########################################
################################################################################
self.SQL_IS_INSERT_LITERALS = 0x00000001
self.SQL_IS_INSERT_SEARCHED = 0x00000002
self.SQL_IS_SELECT_INTO = 0x00000004
################################################################################
####SQLGetInfo - SQL_LOCK_TYPES#################################################
################################################################################
self.SQL_LCK_NO_CHANGE = 0x00000001
self.SQL_LCK_EXCLUSIVE = 0x00000002
self.SQL_LCK_UNLOCK = 0x00000004
################################################################################
####SQLGetInfo - SQL_POS_OPERATIONS#############################################
################################################################################
self.SQL_POS_POSITION = 0x00000001
self.SQL_POS_REFRESH = 0x00000002
self.SQL_POS_UPDATE = 0x00000004
self.SQL_POS_DELETE = 0x00000008
self.SQL_POS_ADD = 0x00000010
################################################################################
####SQLGetInfo - SQL_NON_NULLABLE_COLUMNS#######################################
################################################################################
self.SQL_NNC_NULL = 0x0000
self.SQL_NNC_NON_NULL = 0x0001
################################################################################
####SQLGetInfo - SQL_NULL_COLLATION#############################################
################################################################################
self.SQL_NC_START = 0x0002
self.SQL_NC_END = 0x0004
################################################################################
####SQLGetInfo - SQL_NUMERIC_FUNCTIONS##########################################
################################################################################
self.SQL_FN_NUM_ABS = 0x00000001
self.SQL_FN_NUM_ACOS = 0x00000002
self.SQL_FN_NUM_ASIN = 0x00000004
self.SQL_FN_NUM_ATAN = 0x00000008
self.SQL_FN_NUM_ATAN2 = 0x00000010
self.SQL_FN_NUM_CEILING = 0x00000020
self.SQL_FN_NUM_COS = 0x00000040
self.SQL_FN_NUM_COT = 0x00000080
self.SQL_FN_NUM_EXP = 0x00000100
self.SQL_FN_NUM_FLOOR = 0x00000200
self.SQL_FN_NUM_LOG = 0x00000400
self.SQL_FN_NUM_MOD = 0x00000800
self.SQL_FN_NUM_SIGN = 0x00001000
self.SQL_FN_NUM_SIN = 0x00002000
self.SQL_FN_NUM_SQRT = 0x00004000
self.SQL_FN_NUM_TAN = 0x00008000
self.SQL_FN_NUM_PI = 0x00010000
self.SQL_FN_NUM_RAND = 0x00020000
self.SQL_FN_NUM_DEGREES = 0x00040000
self.SQL_FN_NUM_LOG10 = 0x00080000
self.SQL_FN_NUM_POWER = 0x00100000
self.SQL_FN_NUM_RADIANS = 0x00200000
self.SQL_FN_NUM_ROUND = 0x00400000
self.SQL_FN_NUM_TRUNCATE = 0x00800000
################################################################################
####SQLGetInfo - SQL_ODBC_API_CONFORMANCE#######################################
################################################################################
self.SQL_OAC_NONE = 0x0000
self.SQL_OAC_LEVEL1 = 0x0001
self.SQL_OAC_LEVEL2 = 0x0002
################################################################################
####SQLGetInfo - SQL_ODBC_INTERFACE_CONFORMANCE#################################
################################################################################
self.SQL_OIC_CORE = 1
self.SQL_OIC_LEVEL1 = 2
self.SQL_OIC_LEVEL2 = 3
################################################################################
####SQLGetInfo - SQL_ODBC_SAG_CLI_CONFORMANCE###################################
################################################################################
self.SQL_OSCC_NOT_COMPLIANT = 0x0000
self.SQL_OSCC_COMPLIANT = 0x0001
################################################################################
####SQLGetInfo - SQL_ODBC_SQL_CONFORMANCE#######################################
################################################################################
self.SQL_OSC_MINIMUM = 0x0000
self.SQL_OSC_CORE = 0x0001
self.SQL_OSC_EXTENDED = 0x0002
################################################################################
####SQLGetInfo - SQL_OWNER_USAGE################################################
################################################################################
self.SQL_OU_DML_STATEMENTS = 0x00000001
self.SQL_OU_PROCEDURE_INVOCATION = 0x00000002
self.SQL_OU_TABLE_DEFINITION = 0x00000004
self.SQL_OU_INDEX_DEFINITION = 0x00000008
self.SQL_OU_PRIVILEGE_DEFINITION = 0x00000010
################################################################################
####SQLGetInfo - SQL_PARAM_ARRAY_ROW_COUNTS#####################################
################################################################################
self.SQL_PARC_BATCH = 1
self.SQL_PARC_NO_BATCH = 2
################################################################################
####SQLGetInfo - SQL_PARAM_ARRAY_SELECTS########################################
################################################################################
self.SQL_PAS_BATCH = 1
self.SQL_PAS_NO_BATCH = 2
self.SQL_PAS_NO_SELECT = 3
################################################################################
####SQLGetInfo - SQL_POSITIONED_STATEMENTS######################################
################################################################################
self.SQL_PS_POSITIONED_DELETE = 0x00000001
self.SQL_PS_POSITIONED_UPDATE = 0x00000002
self.SQL_PS_SELECT_FOR_UPDATE = 0x00000004
################################################################################
####SQLGetInfo - SQL_QUALIFIER_LOCATION#########################################
################################################################################
self.SQL_QL_START = 0x0001
self.SQL_QL_END = 0x0002
################################################################################
####SQLGetInfo - SQL_CATALOG_LOCATION###########################################
################################################################################
self.SQL_CL_START = self.SQL_QL_START
self.SQL_CL_END = self.SQL_QL_END
################################################################################
####SQLGetInfo - SQL_QUALIFIER_USAGE############################################
################################################################################
self.SQL_QU_DML_STATEMENTS = 0x00000001
self.SQL_QU_PROCEDURE_INVOCATION = 0x00000002
self.SQL_QU_TABLE_DEFINITION = 0x00000004
self.SQL_QU_INDEX_DEFINITION = 0x00000008
self.SQL_QU_PRIVILEGE_DEFINITION = 0x00000010
################################################################################
####SQLGetInfo - SQL_CATALOG_USAGE##############################################
################################################################################
self.SQL_CU_DML_STATEMENTS = self.SQL_QU_DML_STATEMENTS
self.SQL_CU_PROCEDURE_INVOCATION = self.SQL_QU_PROCEDURE_INVOCATION
self.SQL_CU_TABLE_DEFINITION = self.SQL_QU_TABLE_DEFINITION
self.SQL_CU_INDEX_DEFINITION = self.SQL_QU_INDEX_DEFINITION
self.SQL_CU_PRIVILEGE_DEFINITION = self.SQL_QU_PRIVILEGE_DEFINITION
################################################################################
####SQLGetInfo - SQL_SCHEMA_USAGE###############################################
################################################################################
self.SQL_SU_DML_STATEMENTS = self.SQL_OU_DML_STATEMENTS
self.SQL_SU_PROCEDURE_INVOCATION = self.SQL_OU_PROCEDURE_INVOCATION
self.SQL_SU_TABLE_DEFINITION = self.SQL_OU_TABLE_DEFINITION
self.SQL_SU_INDEX_DEFINITION = self.SQL_OU_INDEX_DEFINITION
self.SQL_SU_PRIVILEGE_DEFINITION = self.SQL_OU_PRIVILEGE_DEFINITION
################################################################################
####SQLGetInfo - SQL_SCROLL_OPTIONS#############################################
################################################################################
self.SQL_SO_FORWARD_ONLY = 0x00000001
self.SQL_SO_KEYSET_DRIVEN = 0x00000002
self.SQL_SO_DYNAMIC = 0x00000004
self.SQL_SO_MIXED = 0x00000008
self.SQL_SO_STATIC = 0x00000010
################################################################################
####SQLGetInfo - SQL_SQL_CONFORMANCE############################################
################################################################################
self.SQL_SC_SQL92_ENTRY = 0x00000001
self.SQL_SC_FIPS127_2_TRANSITIONAL = 0x00000002
self.SQL_SC_SQL92_INTERMEDIATE = 0x00000004
self.SQL_SC_SQL92_FULL = 0x00000008
################################################################################
####SQLGetInfo - SQL_SQL92_DATETIME_FUNCTIONS###################################
################################################################################
self.SQL_SDF_CURRENT_DATE = 0x00000001
self.SQL_SDF_CURRENT_TIME = 0x00000002
self.SQL_SDF_CURRENT_TIMESTAMP = 0x00000004
################################################################################
####SQLGetInfo - SQL_SQL92_FOREIGN_KEY_DELETE_RULE##############################
################################################################################
self.SQL_SFKD_CASCADE = 0x00000001
self.SQL_SFKD_NO_ACTION = 0x00000002
self.SQL_SFKD_SET_DEFAULT = 0x00000004
self.SQL_SFKD_SET_NULL = 0x00000008
################################################################################
####SQLGetInfo - SQL_SQL92_FOREIGN_KEY_UPDATE_RULE##############################
################################################################################
self.SQL_SFKU_CASCADE = 0x00000001
self.SQL_SFKU_NO_ACTION = 0x00000002
self.SQL_SFKU_SET_DEFAULT = 0x00000004
self.SQL_SFKU_SET_NULL = 0x00000008
################################################################################
####SQLGetInfo - SQL_SQL92_GRANT################################################
################################################################################
self.SQL_SG_USAGE_ON_DOMAIN = 0x00000001
self.SQL_SG_USAGE_ON_CHARACTER_SET = 0x00000002
self.SQL_SG_USAGE_ON_COLLATION = 0x00000004
self.SQL_SG_USAGE_ON_TRANSLATION = 0x00000008
self.SQL_SG_WITH_GRANT_OPTION = 0x00000010
self.SQL_SG_DELETE_TABLE = 0x00000020
self.SQL_SG_INSERT_TABLE = 0x00000040
self.SQL_SG_INSERT_COLUMN = 0x00000080
self.SQL_SG_REFERENCES_TABLE = 0x00000100
self.SQL_SG_REFERENCES_COLUMN = 0x00000200
self.SQL_SG_SELECT_TABLE = 0x00000400
self.SQL_SG_UPDATE_TABLE = 0x00000800
self.SQL_SG_UPDATE_COLUMN = 0x00001000
################################################################################
####SQLGetInfo - SQL_SQL92_NUMERIC_VALUE_FUNCTIONS##############################
################################################################################
self.SQL_SNVF_BIT_LENGTH = 0x00000001
self.SQL_SNVF_CHAR_LENGTH = 0x00000002
self.SQL_SNVF_CHARACTER_LENGTH = 0x00000004
self.SQL_SNVF_EXTRACT = 0x00000008
self.SQL_SNVF_OCTET_LENGTH = 0x00000010
self.SQL_SNVF_POSITION = 0x00000020
################################################################################
####SQLGetInfo - SQL_SQL92_PREDICATES###########################################
################################################################################
self.SQL_SP_EXISTS = 0x00000001
self.SQL_SP_ISNOTNULL = 0x00000002
self.SQL_SP_ISNULL = 0x00000004
self.SQL_SP_MATCH_FULL = 0x00000008
self.SQL_SP_MATCH_PARTIAL = 0x00000010
self.SQL_SP_MATCH_UNIQUE_FULL = 0x00000020
self.SQL_SP_MATCH_UNIQUE_PARTIAL = 0x00000040
self.SQL_SP_OVERLAPS = 0x00000080
self.SQL_SP_UNIQUE = 0x00000100
self.SQL_SP_LIKE = 0x00000200
self.SQL_SP_IN = 0x00000400
self.SQL_SP_BETWEEN = 0x00000800
self.SQL_SP_COMPARISON = 0x00001000
self.SQL_SP_QUANTIFIED_COMPARISON = 0x00002000
################################################################################
####SQLGetInfo - SQL_SQL92_RELATIONAL_JOIN_OPERATORS############################
################################################################################
self.SQL_SRJO_CORRESPONDING_CLAUSE = 0x00000001
self.SQL_SRJO_CROSS_JOIN = 0x00000002
self.SQL_SRJO_EXCEPT_JOIN = 0x00000004
self.SQL_SRJO_FULL_OUTER_JOIN = 0x00000008
self.SQL_SRJO_INNER_JOIN = 0x00000010
self.SQL_SRJO_INTERSECT_JOIN = 0x00000020
self.SQL_SRJO_LEFT_OUTER_JOIN = 0x00000040
self.SQL_SRJO_NATURAL_JOIN = 0x00000080
self.SQL_SRJO_RIGHT_OUTER_JOIN = 0x00000100
self.SQL_SRJO_UNION_JOIN = 0x00000200
################################################################################
####SQLGetInfo - SQL_SQL92_REVOKE###############################################
################################################################################
self.SQL_SR_USAGE_ON_DOMAIN = 0x00000001
self.SQL_SR_USAGE_ON_CHARACTER_SET = 0x00000002
self.SQL_SR_USAGE_ON_COLLATION = 0x00000004
self.SQL_SR_USAGE_ON_TRANSLATION = 0x00000008
self.SQL_SR_GRANT_OPTION_FOR = 0x00000010
self.SQL_SR_CASCADE = 0x00000020
self.SQL_SR_RESTRICT = 0x00000040
self.SQL_SR_DELETE_TABLE = 0x00000080
self.SQL_SR_INSERT_TABLE = 0x00000100
self.SQL_SR_INSERT_COLUMN = 0x00000200
self.SQL_SR_REFERENCES_TABLE = 0x00000400
self.SQL_SR_REFERENCES_COLUMN = 0x00000800
self.SQL_SR_SELECT_TABLE = 0x00001000
self.SQL_SR_UPDATE_TABLE = 0x00002000
self.SQL_SR_UPDATE_COLUMN = 0x00004000
################################################################################
####SQLGetInfo - SQL_SQL92_ROW_VALUE_CONSTRUCTOR################################
################################################################################
self.SQL_SRVC_VALUE_EXPRESSION = 0x00000001
self.SQL_SRVC_NULL = 0x00000002
self.SQL_SRVC_DEFAULT = 0x00000004
self.SQL_SRVC_ROW_SUBQUERY = 0x00000008
################################################################################
####SQLGetInfo - SQL_SQL92_STRING_FUNCTIONS#####################################
################################################################################
self.SQL_SSF_CONVERT = 0x00000001
self.SQL_SSF_LOWER = 0x00000002
self.SQL_SSF_UPPER = 0x00000004
self.SQL_SSF_SUBSTRING = 0x00000008
self.SQL_SSF_TRANSLATE = 0x00000010
self.SQL_SSF_TRIM_BOTH = 0x00000020
self.SQL_SSF_TRIM_LEADING = 0x00000040
self.SQL_SSF_TRIM_TRAILING = 0x00000080
################################################################################
####SQLGetInfo - SQL_SQL92_VALUE_EXPRESSIONS####################################
################################################################################
self.SQL_SVE_CASE = 0x00000001
self.SQL_SVE_CAST = 0x00000002
self.SQL_SVE_COALESCE = 0x00000004
self.SQL_SVE_NULLIF = 0x00000008
################################################################################
####SQLGetInfo - SQL_STANDARD_CLI_CONFORMANCE###################################
################################################################################
self.SQL_SCC_XOPEN_CLI_VERSION1 = 0x00000001
self.SQL_SCC_ISO92_CLI = 0x00000002
################################################################################
####SQLGetInfo - SQL_STATIC_SENSITIVITY#########################################
################################################################################
self.SQL_SS_ADDITIONS = 0x00000001
self.SQL_SS_DELETIONS = 0x00000002
self.SQL_SS_UPDATES = 0x00000004
################################################################################
####SQLGetInfo - SQL_SUBQUERIES#################################################
################################################################################
self.SQL_SQ_COMPARISON = 0x00000001
self.SQL_SQ_EXISTS = 0x00000002
self.SQL_SQ_IN = 0x00000004
self.SQL_SQ_QUANTIFIED = 0x00000008
self.SQL_SQ_CORRELATED_SUBQUERIES = 0x00000010
################################################################################
####SQLGetInfo - SQL_SYSTEM_FUNCTIONS###########################################
################################################################################
self.SQL_FN_SYS_USERNAME = 0x00000001
self.SQL_FN_SYS_DBNAME = 0x00000002
self.SQL_FN_SYS_IFNULL = 0x00000004
################################################################################
####SQLGetInfo - SQL_STRING_FUNCTIONS###########################################
################################################################################
self.SQL_FN_STR_CONCAT = 0x00000001
self.SQL_FN_STR_INSERT = 0x00000002
self.SQL_FN_STR_LEFT = 0x00000004
self.SQL_FN_STR_LTRIM = 0x00000008
self.SQL_FN_STR_LENGTH = 0x00000010
self.SQL_FN_STR_LOCATE = 0x00000020
self.SQL_FN_STR_LCASE = 0x00000040
self.SQL_FN_STR_REPEAT = 0x00000080
self.SQL_FN_STR_REPLACE = 0x00000100
self.SQL_FN_STR_RIGHT = 0x00000200
self.SQL_FN_STR_RTRIM = 0x00000400
self.SQL_FN_STR_SUBSTRING = 0x00000800
self.SQL_FN_STR_UCASE = 0x00001000
self.SQL_FN_STR_ASCII = 0x00002000
self.SQL_FN_STR_CHAR = 0x00004000
self.SQL_FN_STR_DIFFERENCE = 0x00008000
self.SQL_FN_STR_LOCATE_2 = 0x00010000
self.SQL_FN_STR_SOUNDEX = 0x00020000
self.SQL_FN_STR_SPACE = 0x00040000
self.SQL_FN_STR_BIT_LENGTH = 0x00080000
self.SQL_FN_STR_CHAR_LENGTH = 0x00100000
self.SQL_FN_STR_CHARACTER_LENGTH = 0x00200000
self.SQL_FN_STR_OCTET_LENGTH = 0x00400000
self.SQL_FN_STR_POSITION = 0x00800000
################################################################################
####SQLGetInfo - SQL_TIMEDATE_ADD_INTERVALS#####################################
####SQLGetInfo - SQL_TIMEDATE_DIFF_INTERVALS####################################
################################################################################
self.SQL_FN_TSI_FRAC_SECOND = 0x00000001
self.SQL_FN_TSI_SECOND = 0x00000002
self.SQL_FN_TSI_MINUTE = 0x00000004
self.SQL_FN_TSI_HOUR = 0x00000008
self.SQL_FN_TSI_DAY = 0x00000010
self.SQL_FN_TSI_WEEK = 0x00000020
self.SQL_FN_TSI_MONTH = 0x00000040
self.SQL_FN_TSI_QUARTER = 0x00000080
self.SQL_FN_TSI_YEAR = 0x00000100
################################################################################
####SQLGetInfo - SQL_TIMEDATE_FUNCTIONS#########################################
################################################################################
self.SQL_FN_TD_NOW = 0x00000001
self.SQL_FN_TD_CURDATE = 0x00000002
self.SQL_FN_TD_DAYOFMONTH = 0x00000004
self.SQL_FN_TD_DAYOFWEEK = 0x00000008
self.SQL_FN_TD_DAYOFYEAR = 0x00000010
self.SQL_FN_TD_MONTH = 0x00000020
self.SQL_FN_TD_QUARTER = 0x00000040
self.SQL_FN_TD_WEEK = 0x00000080
self.SQL_FN_TD_YEAR = 0x00000100
self.SQL_FN_TD_CURTIME = 0x00000200
self.SQL_FN_TD_HOUR = 0x00000400
self.SQL_FN_TD_MINUTE = 0x00000800
self.SQL_FN_TD_SECOND = 0x00001000
self.SQL_FN_TD_TIMESTAMPADD = 0x00002000
self.SQL_FN_TD_TIMESTAMPDIFF = 0x00004000
self.SQL_FN_TD_DAYNAME = 0x00008000
self.SQL_FN_TD_MONTHNAME = 0x00010000
self.SQL_FN_TD_CURRENT_DATE = 0x00020000
self.SQL_FN_TD_CURRENT_TIME = 0x00040000
self.SQL_FN_TD_CURRENT_TIMESTAMP = 0x00080000
self.SQL_FN_TD_EXTRACT = 0x00100000
################################################################################
####SQLGetInfo - SQL_TXN_ISOLATION_OPTION#######################################
################################################################################
self.SQL_TXN_VERSIONING = 0x00000010
################################################################################
####SQLGetInfo - SQL_UNION######################################################
################################################################################
self.SQL_U_UNION = 0x00000001
self.SQL_U_UNION_ALL = 0x00000002
################################################################################
####SQLGetInfo - SQL_UNION_STATEMENT############################################
################################################################################
self.SQL_US_UNION = self.SQL_U_UNION
self.SQL_US_UNION_ALL = self.SQL_U_UNION_ALL
################################################################################
####SQLGetStmtAttr - ODBC 2.x attributes########################################
################################################################################
self.SQL_QUERY_TIMEOUT = 0
self.SQL_MAX_ROWS = 1
self.SQL_NOSCAN = 2
self.SQL_MAX_LENGTH = 3
self.SQL_ASYNC_ENABLE = 4
self.SQL_BIND_TYPE = 5
self.SQL_CURSOR_TYPE = 6
self.SQL_CONCURRENCY = 7
self.SQL_KEYSET_SIZE = 8
self.SQL_ROWSET_SIZE = 9
self.SQL_SIMULATE_CURSOR = 10
self.SQL_RETRIEVE_DATA = 11
self.SQL_USE_BOOKMARKS = 12
self.SQL_GET_BOOKMARK = 13
self.SQL_ROW_NUMBER = 14
################################################################################
####SQLGetStmtAttr - ODBC 3.x attributes########################################
################################################################################
self.SQL_ATTR_ASYNC_ENABLE = 4
self.SQL_ATTR_CONCURRENCY = self.SQL_CONCURRENCY
self.SQL_ATTR_CURSOR_TYPE = self.SQL_CURSOR_TYPE
self.SQL_ATTR_ENABLE_AUTO_IPD = 15
self.SQL_ATTR_FETCH_BOOKMARK_PTR = 16
self.SQL_ATTR_KEYSET_SIZE = self.SQL_KEYSET_SIZE
self.SQL_ATTR_MAX_LENGTH = self.SQL_MAX_LENGTH
self.SQL_ATTR_MAX_ROWS = self.SQL_MAX_ROWS
self.SQL_ATTR_NOSCAN = self.SQL_NOSCAN
self.SQL_ATTR_PARAM_BIND_OFFSET_PTR = 17
self.SQL_ATTR_PARAM_BIND_TYPE = 18
self.SQL_ATTR_PARAM_OPERATION_PTR = 19
self.SQL_ATTR_PARAM_STATUS_PTR = 20
self.SQL_ATTR_PARAMS_PROCESSED_PTR = 21
self.SQL_ATTR_PARAMSET_SIZE = 22
self.SQL_ATTR_QUERY_TIMEOUT = self.SQL_QUERY_TIMEOUT
self.SQL_ATTR_RETRIEVE_DATA = self.SQL_RETRIEVE_DATA
self.SQL_ATTR_ROW_BIND_OFFSET_PTR = 23
self.SQL_ATTR_ROW_BIND_TYPE = self.SQL_BIND_TYPE
self.SQL_ATTR_ROW_NUMBER = self.SQL_ROW_NUMBER
self.SQL_ATTR_ROW_OPERATION_PTR = 24
self.SQL_ATTR_ROW_STATUS_PTR = 25
self.SQL_ATTR_ROWS_FETCHED_PTR = 26
self.SQL_ATTR_ROW_ARRAY_SIZE = 27
self.SQL_ATTR_SIMULATE_CURSOR = self.SQL_SIMULATE_CURSOR
self.SQL_ATTR_USE_BOOKMARKS = self.SQL_USE_BOOKMARKS
self.SQL_STMT_OPT_MAX = self.SQL_ROW_NUMBER
self.SQL_STMT_OPT_MIN = self.SQL_QUERY_TIMEOUT
################################################################################
####SQLGetStmtAttr - SQL_ATTR_ASYNC_ENABLE######################################
################################################################################
self.SQL_ASYNC_ENABLE_OFF = 0
self.SQL_ASYNC_ENABLE_ON = 1
self.SQL_ASYNC_ENABLE_DEFAULT = self.SQL_ASYNC_ENABLE_OFF
################################################################################
####SQLGetStmtAttr - SQL_ATTR_PARAM_BIND_TYPE###################################
################################################################################
self.SQL_PARAM_BIND_BY_COLUMN = 0
self.SQL_PARAM_BIND_TYPE_DEFAULT = self.SQL_PARAM_BIND_BY_COLUMN
################################################################################
####SQLGetStmtAttr - SQL_BIND_TYPE##############################################
################################################################################
self.SQL_BIND_BY_COLUMN = 0
self.SQL_BIND_TYPE_DEFAULT = self.SQL_BIND_BY_COLUMN
################################################################################
####SQLGetStmtAttr - SQL_CONCURRENCY############################################
################################################################################
self.SQL_CONCUR_READ_ONLY = 1
self.SQL_CONCUR_LOCK = 2
self.SQL_CONCUR_ROWVER = 3
self.SQL_CONCUR_VALUES = 4
self.SQL_CONCUR_DEFAULT = self.SQL_CONCUR_READ_ONLY
################################################################################
####SQLGetStmtAttr - SQL_CURSOR_TYPE############################################
################################################################################
self.SQL_CURSOR_FORWARD_ONLY = 0
self.SQL_CURSOR_KEYSET_DRIVEN = 1
self.SQL_CURSOR_DYNAMIC = 2
self.SQL_CURSOR_STATIC = 3
self.SQL_CURSOR_TYPE_DEFAULT = self.SQL_CURSOR_FORWARD_ONLY
################################################################################
####SQLGetStmtAttr - SQL_KEYSET_SIZE############################################
################################################################################
self.SQL_KEYSET_SIZE_DEFAULT = 0
################################################################################
####SQLGetStmtAttr - SQL_MAX_LENGTH#############################################
################################################################################
self.SQL_MAX_LENGTH_DEFAULT = 0
################################################################################
####SQLGetStmtAttr - SQL_MAX_ROWS###############################################
################################################################################
self.SQL_MAX_ROWS_DEFAULT = 0
################################################################################
####SQLGetStmtAttr - SQL_NOSCAN#################################################
################################################################################
self.SQL_NOSCAN_OFF = 0
self.SQL_NOSCAN_ON = 1
self.SQL_NOSCAN_DEFAULT = self.SQL_NOSCAN_OFF
################################################################################
####SQLGetStmtAttr - SQL_QUERY_TIMEOUT##########################################
################################################################################
self.SQL_QUERY_TIMEOUT_DEFAULT = 0
################################################################################
####SQLGetStmtAttr - SQL_RETRIEVE_DATA##########################################
################################################################################
self.SQL_RD_OFF = 0
self.SQL_RD_ON = 1
self.SQL_RD_DEFAULT = self.SQL_RD_ON
################################################################################
####SQLGetStmtAttr - SQL_ROWSET_SIZE############################################
################################################################################
self.SQL_ROWSET_SIZE_DEFAULT = 1
################################################################################
####SQLGetStmtAttr - SQL_SIMULATE_CURSOR########################################
################################################################################
self.SQL_SC_NON_UNIQUE = 0
self.SQL_SC_TRY_UNIQUE = 1
self.SQL_SC_UNIQUE = 2
################################################################################
####SQLGetStmtAttr - SQL_USE_BOOKMARKS##########################################
################################################################################
self.SQL_UB_OFF = 0
self.SQL_UB_ON = 1
self.SQL_UB_DEFAULT = self.SQL_UB_OFF
self.SQL_UB_FIXED = self.SQL_UB_ON
self.SQL_UB_VARIABLE = 2
################################################################################
####SQLGetTypeInfo - SEARCHABLE#################################################
################################################################################
self.SQL_COL_PRED_CHAR = self.SQL_LIKE_ONLY
self.SQL_COL_PRED_BASIC = self.SQL_ALL_EXCEPT_LIKE
################################################################################
####SQLSetPos###################################################################
################################################################################
self.SQL_ENTIRE_ROWSET = 0
################################################################################
####SQLSetPos - Operation#######################################################
################################################################################
self.SQL_POSITION = 0
self.SQL_REFRESH = 1
self.SQL_UPDATE = 2
self.SQL_DELETE = 3
################################################################################
####SQLBulkOperations - Operation###############################################
################################################################################
self.SQL_ADD = 4
self.SQL_SETPOS_MAX_OPTION_VALUE = self.SQL_ADD
self.SQL_UPDATE_BY_BOOKMARK = 5
self.SQL_DELETE_BY_BOOKMARK = 6
self.SQL_FETCH_BY_BOOKMARK = 7
################################################################################
####SQLSetPos - LockType########################################################
################################################################################
self.SQL_LOCK_NO_CHANGE = 0
self.SQL_LOCK_EXCLUSIVE = 1
self.SQL_LOCK_UNLOCK = 2
self.SQL_SETPOS_MAX_LOCK_VALUE = self.SQL_LOCK_UNLOCK
################################################################################
####SQLSetPos macros############################################################
################################################################################
#TODO: Map self.SQLSetPos macros
self.SQL_POSITION_TO = self.UnimplementedSQLFunction
self.SQL_LOCK_RECORD = self.UnimplementedSQLFunction
self.SQL_REFRESH_RECORD = self.UnimplementedSQLFunction
self.SQL_UPDATE_RECORD = self.UnimplementedSQLFunction
self.SQL_DELETE_RECORD = self.UnimplementedSQLFunction
self.SQL_ADD_RECORD = self.UnimplementedSQLFunction
################################################################################
####SQLSpecialColumns - Column types and scopes#################################
################################################################################
self.SQL_BEST_ROWID = 1
self.SQL_ROWVER = 2
################################################################################
####All the ODBC keywords#######################################################
################################################################################
self.SQL_ODBC_KEYWORDS = ("ABSOLUTE,ACTION,ADA,ADD,ALL,ALLOCATE,ALTER,AND,ANY,ARE,AS,"
"ASC,ASSERTION,AT,AUTHORIZATION,AVG,"
"BEGIN,BETWEEN,BIT,BIT_LENGTH,BOTH,BY,CASCADE,CASCADED,CASE,CAST,CATALOG,"
"CHAR,CHAR_LENGTH,CHARACTER,CHARACTER_LENGTH,CHECK,CLOSE,COALESCE,"
"COLLATE,COLLATION,COLUMN,COMMIT,CONNECT,CONNECTION,CONSTRAINT,"
"CONSTRAINTS,CONTINUE,CONVERT,CORRESPONDING,COUNT,CREATE,CROSS,CURRENT,"
"CURRENT_DATE,CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_USER,CURSOR,"
"DATE,DAY,DEALLOCATE,DEC,DECIMAL,DECLARE,DEFAULT,DEFERRABLE,"
"DEFERRED,DELETE,DESC,DESCRIBE,DESCRIPTOR,DIAGNOSTICS,DISCONNECT,"
"DISTINCT,DOMAIN,DOUBLE,DROP,"
"ELSE,END,END-EXEC,ESCAPE,EXCEPT,EXCEPTION,EXEC,EXECUTE,"
"EXISTS,EXTERNAL,EXTRACT,"
"FALSE,FETCH,FIRST,FLOAT,FOR,FOREIGN,FORTRAN,FOUND,FROM,FULL,"
"GET,GLOBAL,GO,GOTO,GRANT,GROUP,HAVING,HOUR,"
"IDENTITY,IMMEDIATE,IN,INCLUDE,INDEX,INDICATOR,INITIALLY,INNER,"
"INPUT,INSENSITIVE,INSERT,INT,INTEGER,INTERSECT,INTERVAL,INTO,IS,ISOLATION,"
"JOIN,KEY,LANGUAGE,LAST,LEADING,LEFT,LEVEL,LIKE,LOCAL,LOWER,"
"MATCH,MAX,MIN,MINUTE,MODULE,MONTH,"
"NAMES,NATIONAL,NATURAL,NCHAR,NEXT,NO,NONE,NOT,NULL,NULLIF,NUMERIC,"
"OCTET_LENGTH,OF,ON,ONLY,OPEN,OPTION,OR,ORDER,OUTER,OUTPUT,OVERLAPS,"
"PAD,PARTIAL,PASCAL,PLI,POSITION,PRECISION,PREPARE,PRESERVE,"
"PRIMARY,PRIOR,PRIVILEGES,PROCEDURE,PUBLIC,"
"READ,REAL,REFERENCES,RELATIVE,RESTRICT,REVOKE,RIGHT,ROLLBACK,ROWS"
"SCHEMA,SCROLL,SECOND,SECTION,SELECT,SESSION,SESSION_USER,SET,SIZE,"
"SMALLINT,SOME,SPACE,SQL,SQLCA,SQLCODE,SQLERROR,SQLSTATE,SQLWARNING,"
"SUBSTRING,SUM,SYSTEM_USER,"
"TABLE,TEMPORARY,THEN,TIME,TIMESTAMP,TIMEZONE_HOUR,TIMEZONE_MINUTE,"
"TO,TRAILING,TRANSACTION,TRANSLATE,TRANSLATION,TRIM,TRUE,"
"UNION,UNIQUE,UNKNOWN,UPDATE,UPPER,USAGE,USER,USING,"
"VALUE,VALUES,VARCHAR,VARYING,VIEW,WHEN,WHENEVER,WHERE,WITH,WORK,WRITE,"
"YEAR,ZONE")
################################################################################
####Level 2 Functions###########################################################
################################################################################
################################################################################
####SQLExtendedFetch - fFetchType###############################################
################################################################################
self.SQL_FETCH_BOOKMARK = 8
################################################################################
####SQLExtendedFetch - rgfRowStatus#############################################
################################################################################
self.SQL_ROW_SUCCESS = 0
self.SQL_ROW_DELETED = 1
self.SQL_ROW_UPDATED = 2
self.SQL_ROW_NOROW = 3
self.SQL_ROW_ADDED = 4
self.SQL_ROW_ERROR = 5
self.SQL_ROW_SUCCESS_WITH_INFO = 6
self.SQL_ROW_PROCEED = 0
self.SQL_ROW_IGNORE = 1
################################################################################
####SQL_DESC_ARRAY_STATUS_PTR###################################################
################################################################################
self.SQL_PARAM_SUCCESS = 0
self.SQL_PARAM_SUCCESS_WITH_INFO = 6
self.SQL_PARAM_ERROR = 5
self.SQL_PARAM_UNUSED = 7
self.SQL_PARAM_DIAG_UNAVAILABLE = 1
self.SQL_PARAM_PROCEED = 0
self.SQL_PARAM_IGNORE = 1
################################################################################
####SQLForeignKeys - UPDATE_RULE/DELETE_RULE####################################
################################################################################
self.SQL_CASCADE = 0
self.SQL_RESTRICT = 1
self.SQL_SET_NULL = 2
self.SQL_NO_ACTION = 3
self.SQL_SET_DEFAULT = 4
################################################################################
####SQLForeignKeys - DEFERABILITY###############################################
################################################################################
self.SQL_INITIALLY_DEFERRED = 5
self.SQL_INITIALLY_IMMEDIATE = 6
self.SQL_NOT_DEFERRABLE = 7
################################################################################
####SQLBindParameter - fParamType###############################################
####SQLProcedureColumns - COLUMN_TYPE###########################################
################################################################################
self.SQL_PARAM_TYPE_UNKNOWN = 0
self.SQL_PARAM_INPUT = 1
self.SQL_PARAM_INPUT_OUTPUT = 2
self.SQL_RESULT_COL = 3
self.SQL_PARAM_OUTPUT = 4
self.SQL_RETURN_VALUE = 5
################################################################################
#### SQLProcedures - PROCEDURE_TYPE#############################################
################################################################################
self.SQL_PT_UNKNOWN = 0
self.SQL_PT_PROCEDURE = 1
self.SQL_PT_FUNCTION = 2
################################################################################
####SQLSetParam to SQLBindParameter conversion##################################
################################################################################
self.SQL_PARAM_TYPE_DEFAULT = self.SQL_PARAM_INPUT_OUTPUT
self.SQL_SETPARAM_VALUE_MAX = -1
################################################################################
####SQLStatistics - fAccuracy###################################################
################################################################################
self.SQL_QUICK = 0
self.SQL_ENSURE = 1
################################################################################
####SQLStatistics - TYPE########################################################
################################################################################
self.SQL_TABLE_STAT = 0
################################################################################
####SQLTables###################################################################
################################################################################
self.SQL_ALL_CATALOGS = "%"
self.SQL_ALL_SCHEMAS = "%"
self.SQL_ALL_TABLE_TYPES = "%"
################################################################################
####SQLSpecialColumns - PSEUDO_COLUMN###########################################
################################################################################
self.SQL_PC_NOT_PSEUDO = 1
################################################################################
####Deprecated defines from prior versions of ODBC##############################
################################################################################
self.SQL_DATABASE_NAME = 16
self.SQL_FD_FETCH_PREV = self.SQL_FD_FETCH_PRIOR
self.SQL_FETCH_PREV = self.SQL_FETCH_PRIOR
self.SQL_CONCUR_TIMESTAMP = self.SQL_CONCUR_ROWVER
self.SQL_SCCO_OPT_TIMESTAMP = self.SQL_SCCO_OPT_ROWVER
self.SQL_CC_DELETE = self.SQL_CB_DELETE
self.SQL_CR_DELETE = self.SQL_CB_DELETE
self.SQL_CC_CLOSE = self.SQL_CB_CLOSE
self.SQL_CR_CLOSE = self.SQL_CB_CLOSE
self.SQL_CC_PRESERVE = self.SQL_CB_PRESERVE
self.SQL_CR_PRESERVE = self.SQL_CB_PRESERVE
self.SQL_FETCH_RESUME = 7
self.SQL_SCROLL_FORWARD_ONLY = 0
self.SQL_SCROLL_KEYSET_DRIVEN = -1
self.SQL_SCROLL_DYNAMIC = -2
self.SQL_SCROLL_STATIC = -3
################################################################################
####Level 1 function prototypes#################################################
################################################################################
if hasattr(self.ODBC_DRIVER, "SQLDriverConnect"):
self.ODBC_DRIVER.SQLDriverConnect.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDriverConnect.argtypes = (self.SQLHDBC, self.SQLHWND, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), self.SQLUSMALLINT,)
self.SQLDriverConnect = self.ODBC_DRIVER.SQLDriverConnect
else:
self.SQLDriverConnect = self.UnimplementedSQLFunction
################################################################################
####Level 2 function prototypes#################################################
################################################################################
if hasattr(self.ODBC_DRIVER, "SQLBrowseConnect"):
self.ODBC_DRIVER.SQLBrowseConnect.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLBrowseConnect.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLBrowseConnect = self.ODBC_DRIVER.SQLBrowseConnect
else:
self.SQLBrowseConnect = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLBulkOperations"):
self.ODBC_DRIVER.SQLBulkOperations.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLBulkOperations.argtypes = (self.SQLHSTMT, self.SQLSMALLINT,)
self.SQLBulkOperations = self.ODBC_DRIVER.SQLBulkOperations
else:
self.SQLBulkOperations = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColAttributes"):
self.ODBC_DRIVER.SQLColAttributes.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColAttributes.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLUSMALLINT, self.SQLPOINTER, self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLLEN),)
self.SQLColAttributes = self.ODBC_DRIVER.SQLColAttributes
else:
self.SQLColAttributes = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLColumnPrivileges"):
self.ODBC_DRIVER.SQLColumnPrivileges.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLColumnPrivileges.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLColumnPrivileges = self.ODBC_DRIVER.SQLColumnPrivileges
else:
self.SQLColumnPrivileges = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDescribeParam"):
self.ODBC_DRIVER.SQLDescribeParam.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDescribeParam.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLULEN), ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDescribeParam = self.ODBC_DRIVER.SQLDescribeParam
else:
self.SQLDescribeParam = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLExtendedFetch"):
self.ODBC_DRIVER.SQLExtendedFetch.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLExtendedFetch.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLLEN, ctypes.POINTER(self.SQLULEN), ctypes.POINTER(self.SQLUSMALLINT),)
self.SQLExtendedFetch = self.ODBC_DRIVER.SQLExtendedFetch
else:
self.SQLExtendedFetch = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLForeignKeys"):
self.ODBC_DRIVER.SQLForeignKeys.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLForeignKeys.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLForeignKeys = self.ODBC_DRIVER.SQLForeignKeys
else:
self.SQLForeignKeys = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLMoreResults"):
self.ODBC_DRIVER.SQLMoreResults.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLMoreResults.argtypes = (self.SQLHSTMT,)
self.SQLMoreResults = self.ODBC_DRIVER.SQLMoreResults
else:
self.SQLMoreResults = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLNativeSql"):
self.ODBC_DRIVER.SQLNativeSql.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLNativeSql.argtypes = (self.SQLHDBC, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER, ctypes.POINTER(self.SQLCHAR), self.SQLINTEGER, ctypes.POINTER(self.SQLINTEGER),)
self.SQLNativeSql = self.ODBC_DRIVER.SQLNativeSql
else:
self.SQLNativeSql = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLNumParams"):
self.ODBC_DRIVER.SQLNumParams.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLNumParams.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLNumParams = self.ODBC_DRIVER.SQLNumParams
else:
self.SQLNumParams = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLParamOptions"):
self.ODBC_DRIVER.SQLParamOptions.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLParamOptions.argtypes = (self.SQLHSTMT, self.SQLULEN, ctypes.POINTER(self.SQLULEN),)
self.SQLParamOptions = self.ODBC_DRIVER.SQLParamOptions
else:
self.SQLParamOptions = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLPrimaryKeys"):
self.ODBC_DRIVER.SQLPrimaryKeys.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLPrimaryKeys.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLPrimaryKeys = self.ODBC_DRIVER.SQLPrimaryKeys
else:
self.SQLPrimaryKeys = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLProcedureColumns"):
self.ODBC_DRIVER.SQLProcedureColumns.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLProcedureColumns.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLProcedureColumns = self.ODBC_DRIVER.SQLProcedureColumns
else:
self.SQLProcedureColumns = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLProcedures"):
self.ODBC_DRIVER.SQLProcedures.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLProcedures.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLProcedures = self.ODBC_DRIVER.SQLProcedures
else:
self.SQLProcedures = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLSetPos"):
self.ODBC_DRIVER.SQLSetPos.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetPos.argtypes = (self.SQLHSTMT, self.SQLSETPOSIROW, self.SQLUSMALLINT, self.SQLUSMALLINT,)
self.SQLSetPos = self.ODBC_DRIVER.SQLSetPos
else:
self.SQLSetPos = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLTablePrivileges"):
self.ODBC_DRIVER.SQLTablePrivileges.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLTablePrivileges.argtypes = (self.SQLHSTMT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT,)
self.SQLTablePrivileges = self.ODBC_DRIVER.SQLTablePrivileges
else:
self.SQLTablePrivileges = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLDrivers"):
self.ODBC_DRIVER.SQLDrivers.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLDrivers.argtypes = (self.SQLHENV, self.SQLUSMALLINT, ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT), ctypes.POINTER(self.SQLCHAR), self.SQLSMALLINT, ctypes.POINTER(self.SQLSMALLINT),)
self.SQLDrivers = self.ODBC_DRIVER.SQLDrivers
else:
self.SQLDrivers = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLBindParameter"):
self.ODBC_DRIVER.SQLBindParameter.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLBindParameter.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLSMALLINT, self.SQLULEN, self.SQLSMALLINT, self.SQLPOINTER, self.SQLLEN, ctypes.POINTER(self.SQLLEN),)
self.SQLBindParameter = self.ODBC_DRIVER.SQLBindParameter
else:
self.SQLBindParameter = self.UnimplementedSQLFunction
################################################################################
####Depreciated function prototypes#############################################
################################################################################
if hasattr(self.ODBC_DRIVER, "SQLSetScrollOptions"):
self.ODBC_DRIVER.SQLSetScrollOptions.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLSetScrollOptions.argtypes = (self.SQLHSTMT, self.SQLUSMALLINT, self.SQLLEN, self.SQLUSMALLINT,)
self.SQLSetScrollOptions = self.ODBC_DRIVER.SQLSetScrollOptions
else:
self.SQLSetScrollOptions = self.UnimplementedSQLFunction
if hasattr(self.ODBC_DRIVER, "SQLAllocHandleStd"):
self.ODBC_DRIVER.SQLAllocHandleStd.restype = self.SQLRETURN
self.ODBC_DRIVER.SQLAllocHandleStd.argtypes = (self.SQLSMALLINT, self.SQLHANDLE, ctypes.POINTER(self.SQLHANDLE),)
self.SQLAllocHandleStd = self.ODBC_DRIVER.SQLAllocHandleStd
else:
self.SQLAllocHandleStd = self.UnimplementedSQLFunction
################################################################################
####Internal type subcodes######################################################
################################################################################
self.SQL_YEAR = self.SQL_CODE_YEAR
self.SQL_MONTH = self.SQL_CODE_MONTH
self.SQL_DAY = self.SQL_CODE_DAY
self.SQL_HOUR = self.SQL_CODE_HOUR
self.SQL_MINUTE = self.SQL_CODE_MINUTE
self.SQL_SECOND = self.SQL_CODE_SECOND
self.SQL_YEAR_TO_MONTH = self.SQL_CODE_YEAR_TO_MONTH
self.SQL_DAY_TO_HOUR = self.SQL_CODE_DAY_TO_HOUR
self.SQL_DAY_TO_MINUTE = self.SQL_CODE_DAY_TO_MINUTE
self.SQL_DAY_TO_SECOND = self.SQL_CODE_DAY_TO_SECOND
self.SQL_HOUR_TO_MINUTE = self.SQL_CODE_HOUR_TO_MINUTE
self.SQL_HOUR_TO_SECOND = self.SQL_CODE_HOUR_TO_SECOND
self.SQL_MINUTE_TO_SECOND = self.SQL_CODE_MINUTE_TO_SECOND
| 56.525216 | 371 | 0.510997 | 18,162 | 216,322 | 5.780531 | 0.074606 | 0.111482 | 0.081611 | 0.048807 | 0.539805 | 0.344475 | 0.251033 | 0.167089 | 0.152011 | 0.146553 | 0 | 0.033282 | 0.190938 | 216,322 | 3,826 | 372 | 56.53999 | 0.566576 | 0.060905 | 0 | 0.068369 | 0 | 0.001965 | 0.027564 | 0.011191 | 0 | 0 | 0.03091 | 0.000261 | 0.003143 | 1 | 0.001965 | false | 0 | 0.001179 | 0.001179 | 0.008251 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
681ec526dcb669a4d2386ce91f35ac4a0c98331e | 1,074 | py | Python | vendor/tensorboxresnet/setup.py | Julen-Lujambio/deepfigures_open | 7a60f8649fcdb92840451c50e6131b3c513bd49a | [
"Apache-2.0"
] | null | null | null | vendor/tensorboxresnet/setup.py | Julen-Lujambio/deepfigures_open | 7a60f8649fcdb92840451c50e6131b3c513bd49a | [
"Apache-2.0"
] | null | null | null | vendor/tensorboxresnet/setup.py | Julen-Lujambio/deepfigures_open | 7a60f8649fcdb92840451c50e6131b3c513bd49a | [
"Apache-2.0"
] | 1 | 2020-06-26T19:19:53.000Z | 2020-06-26T19:19:53.000Z | #!/usr/bin/env python
import sys
from setuptools import setup, Extension, find_packages
tf_include = '/'.join(sys.executable.split('/')[:-2]) + \
'/lib/python%d.%d/site-packages/tensorflow/include' % sys.version_info[:2]
import os
extra_defs = []
if os.uname().sysname == 'Darwin':
extra_defs.append('-D_GLIBCXX_USE_CXX11_ABI=0')
else:
os.environ['CC'] = 'g++'
os.environ['CXX'] = 'g++'
setup(
name='tensorboxresnet',
version='0.20',
packages=find_packages(),
setup_requires=['Cython'],
ext_modules=[
Extension(
'tensorboxresnet.utils.stitch_wrapper',
[
'./tensorboxresnet/utils/stitch_wrapper.pyx',
'./tensorboxresnet/utils/stitch_rects.cpp',
'./tensorboxresnet/utils/hungarian/hungarian.cpp'
],
language='c++',
extra_compile_args=[
'-std=c++11', '-Itensorbox/utils',
'-I%s' % tf_include
] + extra_defs,
)
]
)
| 27.538462 | 91 | 0.540968 | 109 | 1,074 | 5.155963 | 0.59633 | 0.142349 | 0.13879 | 0.117438 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013333 | 0.301676 | 1,074 | 38 | 92 | 28.263158 | 0.736 | 0.018622 | 0 | 0 | 0 | 0 | 0.3133 | 0.236453 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.09375 | 0 | 0.09375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6820045ad6a963cfcd757b8ecab4e0d48883c4b4 | 2,044 | py | Python | epub_writer.py | bytesapart/LeetcodeScraping | 54391f640506afe54e53db577da59f501a0c7e7b | [
"Apache-2.0"
] | null | null | null | epub_writer.py | bytesapart/LeetcodeScraping | 54391f640506afe54e53db577da59f501a0c7e7b | [
"Apache-2.0"
] | null | null | null | epub_writer.py | bytesapart/LeetcodeScraping | 54391f640506afe54e53db577da59f501a0c7e7b | [
"Apache-2.0"
] | null | null | null | from ebooklib import epub
import colorama
from colorama import Back, Fore
colorama.init()
def write(file_name, title, author, chapters):
# Ebook
book = epub.EpubBook()
# set metadata
book.set_identifier('id123456')
book.set_title(title)
book.set_language('en')
book.add_author(author)
book.add_author('Anonymous', file_as='Anonymous', role='ill', uid='coauthor')
with open('LeetCode_Sharing.png', 'rb') as logo:
book.add_item(
epub.EpubItem('cover-image', file_name='LeetCode_Sharing.png', media_type='image/png', content=logo.read()))
c1 = epub.EpubHtml(title='Cover', file_name='cover_2.html', lang='en')
with open('cover.html', 'r') as cover_html:
content = cover_html.read()
book.set_cover('cover.html', content)
c1.content = content
book.add_item(c1)
toc = []
spine = [c1, 'nav']
# For each chapter add chapter to the book, TOC and spine
for chapter in chapters:
book.add_item(chapter)
toc.append(epub.Link(chapter.file_name, chapter.title, chapter.title))
spine.append(chapter)
# define Table Of Contents
book.toc = tuple(toc)
# add default NCX and Nav file
book.add_item(epub.EpubNcx())
book.add_item(epub.EpubNav())
# define CSS style
style = 'pre{white-space:pre-wrap;background:#f7f9fa;padding:10px 15px;color:#263238;line-height:1.6;font-size:13px;border-radius:3px margin-top: 0;margin-bottom:1em;overflow:auto}b,strong{font-weight:bolder}#title{font-size:16px;color:#212121;font-weight:600;margin-bottom:10px}hr{height:10px;border:0;box-shadow:0 10px 10px -10px #8c8b8b inset}'
nav_css = epub.EpubItem(uid="style_nav", file_name="style/nav.css", media_type="text/css", content=style)
# add CSS file
book.add_item(nav_css)
# basic spine
book.spine = spine
# write to the file
epub.write_epub(file_name, book, {})
print(Back.GREEN + Fore.BLACK + " File " + Back.YELLOW + f" {file_name} " + Back.GREEN + " Successfully Written ")
| 35.859649 | 351 | 0.677593 | 298 | 2,044 | 4.543624 | 0.416107 | 0.041359 | 0.048744 | 0.033235 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033473 | 0.181507 | 2,044 | 56 | 352 | 36.5 | 0.775852 | 0.091977 | 0 | 0 | 0 | 0.028571 | 0.299025 | 0.15818 | 0 | 0 | 0 | 0 | 0 | 1 | 0.028571 | false | 0 | 0.085714 | 0 | 0.114286 | 0.028571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6820bcda8187ef62292cf5f4b1da91de1ce1282b | 1,411 | py | Python | pyETM/httpclient/delete.py | robcalon/PyETM | 323418ad57b2df7d47f2495919c943db28ca55cc | [
"MIT"
] | null | null | null | pyETM/httpclient/delete.py | robcalon/PyETM | 323418ad57b2df7d47f2495919c943db28ca55cc | [
"MIT"
] | null | null | null | pyETM/httpclient/delete.py | robcalon/PyETM | 323418ad57b2df7d47f2495919c943db28ca55cc | [
"MIT"
] | null | null | null | import json
import asyncio
import aiohttp
class Delete:
async def _async_delete(self, post, proxy, **kwargs):
"""asynchronous wrapper for clientsession's delete function."""
# construct url
url = self._make_url(post)
# delete request at url
async with aiohttp.ClientSession() as session:
async with session.delete(url, proxy=proxy, **kwargs) as response:
# check response
valid = await self._check_response(response)
# check validity
if not valid is True:
raise ValueError('check failed without raising error.')
return response
def delete(self, post, **kwargs):
"""Run async_delete definition without async statement.
Parameters
----------
post : str
String of the subdomain that is requested.
Returns
-------
response : dict
Returns the decoded async response.
**kwargs are passed to aiohttp's delete function."""
# pass proxy
proxy = self.proxy
# evaluate coroutine
process = self._async_delete(post, proxy=proxy, **kwargs)
response = asyncio.run(process)
return response
| 29.395833 | 94 | 0.528703 | 133 | 1,411 | 5.541353 | 0.473684 | 0.044776 | 0.037992 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.39759 | 1,411 | 48 | 95 | 29.395833 | 0.867059 | 0.256556 | 0 | 0.117647 | 0 | 0 | 0.039773 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.176471 | 0 | 0.411765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6822e375dbfee4177b6f505d2510b41a8d49ea78 | 2,961 | py | Python | codes/exp_real.py | xindi-dumbledore/NetInfoAccEqua | f4b52166afb5504566b6e6bcfcfb9d01612ce30a | [
"MIT"
] | null | null | null | codes/exp_real.py | xindi-dumbledore/NetInfoAccEqua | f4b52166afb5504566b6e6bcfcfb9d01612ce30a | [
"MIT"
] | null | null | null | codes/exp_real.py | xindi-dumbledore/NetInfoAccEqua | f4b52166afb5504566b6e6bcfcfb9d01612ce30a | [
"MIT"
] | null | null | null | import networkx as nx
from fairness_measure import *
from simulation_param import *
import random
import pickle
def run_exp_real(G, key="gender", group_info=["male", "female"]):
G = G.to_undirected()
groups = nx.get_node_attributes(G, key)
nodes_maj = [n for n in groups if groups[n] == group_info[0]]
nodes_min = [n for n in groups if groups[n] == group_info[1]]
G = G.subgraph(nodes_maj + nodes_min)
largest_cc = max(nx.connected_components(G), key=len)
G = G.subgraph(largest_cc)
nodes_maj = [n for n in nodes_maj if n in largest_cc]
nodes_min = [n for n in nodes_min if n in largest_cc]
print(len(G), len(nodes_maj), len(nodes_min))
seed_num = int(len(G) * 0.002)
if seed_num < 5:
seed_num = 5
# degree parity
degree_parity_dict = centrality_fairness(
G, centrality="degree", key=key, group_info=group_info)
# structural hole
information_access_dict = defaultdict(list)
for beta_array_name, beta_array in zip(["asy", "sym"], [BETA_ARRAY_ASY, BETA_ARRAY_SYM]):
for minority_seeding_portion_type in ["low", "mid", "high"]:
for threshold in [None, 0.1]: # 0.1 is activation threshold
for trial in range(N_TRIALS):
low_end, high_end = MINORITY_SEEDING_PORTION_DICT[
minority_seeding_portion_type]
minority_seeding_portion = random.uniform(
low_end, high_end)
seed_min = int(minority_seeding_portion * seed_num)
seed_maj = seed_num - seed_min
seed_nodes = random.sample(
nodes_min, seed_min) + random.sample(nodes_maj, seed_maj)
R_M, R_m, R = SIR_network(
G, beta_array, threshold, GAMMA, seed_nodes, seed_num, key=key, group_info=group_info)
information_access_dict[(beta_array_name, minority_seeding_portion, threshold)].append(
(R_M, R_m, R))
return degree_parity_dict, information_access_dict
if __name__ == '__main__':
key_dict = {"Github": "gender",
"DBLP": "gender",
"APS": "pacs",
}
group_dict = {"Github": ["male", "female"],
"DBLP": ["m", "f"],
"APS": ['05.30.-d', '05.20.-y']}
for G_name, G_file_name in zip(["Github", "DBLP", "APS"], ["github_mutual_follower_ntw", "DBLP_graph", "sampled_APS_pacs052030"]):
G = nx.read_gexf("datasets/%s.gexf" % G_file_name)
degree_parity_dict, information_access_dict = run_exp_real(
G, key=key_dict[G_name], group_info=group_dict[G_name])
pickle.dump(degree_parity_dict, open(
"exp_results/exp_real/%s_degree_parity_dict.pickle" % G_name, "wb"))
pickle.dump(information_access_dict, open(
"exp_results/exp_real/%s_information_access_dict.pickle" % G_name, "wb"))
| 48.540984 | 134 | 0.614319 | 406 | 2,961 | 4.137931 | 0.285714 | 0.042857 | 0.075 | 0.016667 | 0.233333 | 0.167857 | 0.067857 | 0.036905 | 0.036905 | 0.036905 | 0 | 0.012048 | 0.271192 | 2,961 | 60 | 135 | 49.35 | 0.76645 | 0.01925 | 0 | 0 | 0 | 0 | 0.106897 | 0.052069 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018182 | false | 0 | 0.090909 | 0 | 0.127273 | 0.018182 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6824cbdf847cb38a7087393b8c65381d5b40af84 | 2,110 | py | Python | server/routes/overview.py | nondanee/idol-dev | 1214618125f1ed55437c092991e76bf82300e1e7 | [
"MIT"
] | 4 | 2018-08-27T15:56:48.000Z | 2020-09-08T15:10:09.000Z | server/routes/overview.py | nondanee/idol-dev | 1214618125f1ed55437c092991e76bf82300e1e7 | [
"MIT"
] | null | null | null | server/routes/overview.py | nondanee/idol-dev | 1214618125f1ed55437c092991e76bf82300e1e7 | [
"MIT"
] | null | null | null | import asyncio
from . import tool
from aiohttp import web
from aiohttp_session import get_session
@asyncio.coroutine
def route(request):
session = yield from get_session(request)
parameters = request.rel_url.query
if 'uid' not in session:
return web.HTTPUnauthorized()
else:
uid = session['uid']
try:
mid = int(parameters['mid'])
except:
return web.HTTPBadRequest()
with (yield from request.app['pool']) as connect:
cursor= yield from connect.cursor()
yield from cursor.execute('''
select
overview.id,
overview.romaji,
overview.name,
overview.affiliation,
overview.introduction,
overview.follows,
overview.subscribes,
follow.uid,
subscription.uid
from (
select
member.id,
member.romaji,
member.name,
member.affiliation,
member.introduction,
member.follows,
member.subscribes
from member
where member.id = %s
) overview
left join follow on follow.uid = %s and follow.mid = overview.id
left join subscription on subscription.uid = %s and subscription.mid = overview.id
''',(mid,uid,uid))
data = yield from cursor.fetchone()
yield from cursor.close()
connect.close()
if not data:
return web.HTTPNotFound()
json_back = {
"mid": str(data[0]).zfill(4),
"avatar": "/avatar/{}.jpg".format(data[1]),
"name": data[2],
"romaji": data[1],
"affiliation": data[3],
"introduction": data[4],
"follows": data[5],
"subscribes": data[6],
"followed": True if data[7] else False,
"subscribed": True if data[8] else False
}
return web.Response(text=tool.jsonify(json_back),content_type="application/json",charset="utf-8") | 28.90411 | 105 | 0.534597 | 216 | 2,110 | 5.189815 | 0.398148 | 0.048171 | 0.040143 | 0.039251 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008922 | 0.362559 | 2,110 | 73 | 105 | 28.90411 | 0.824535 | 0 | 0 | 0.032258 | 0 | 0 | 0.448603 | 0.020369 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016129 | false | 0 | 0.064516 | 0 | 0.145161 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6824d7dc201c493ffd23ef954ff15adb8a4b4467 | 2,392 | py | Python | user_input.py | adiah80/Game-Of-Life | c1922e9cff04b6d1cc567bbeccb6af8e562d3e41 | [
"MIT"
] | null | null | null | user_input.py | adiah80/Game-Of-Life | c1922e9cff04b6d1cc567bbeccb6af8e562d3e41 | [
"MIT"
] | null | null | null | user_input.py | adiah80/Game-Of-Life | c1922e9cff04b6d1cc567bbeccb6af8e562d3e41 | [
"MIT"
] | null | null | null | import tkinter as tk
import numpy as np
import time
import json
from simulate import GameOfLife
from utils import ApplyInitialization
from patterns import patternsDict
from initalizations import initializationsDict as inits
if __name__ == '__main__':
# Import config from `config.json`
with open('config.json') as jsonFile:
config = json.load(jsonFile)
### Handle Inputs ###
print("========================================================================")
print("NOTE : Simply press 'Enter' for defaults.\n")
maxGenerations = \
int(input("(1) Enter number of generations to simulate [Default is 10000] : ") \
or config['maxGenerations'])
updateTime = \
int(input("(2) Enter time (is ms) between updates [Default is 10ms] : ") \
or config['updateTime'])
initializationChoices = [
"StillLife",
"Oscillators",
"AcornSpread",
"EngineSpread",
"Guns",
"Pulsars",
"ShipsSimple",
"ShipsCollision",
"ShipsDestroyed",
]
print("\nInitialization choices : ")
for idx, initializationName in enumerate(initializationChoices):
print("\t[{:d}] {:s}".format(idx, initializationName))
initChoice = \
int(input("\n(3) Choose the Initialization index to simulate [Default is '8'] : ") or 8)
initialization = initializationChoices[initChoice]
print("\nLoading simulation...")
# Load simulation params from config and user input.
maxGenerations = maxGenerations # total number of generations
updateTime = updateTime # in millSeconds
numRows = config['numRows'] # number of cell-rows in the grid
numCols = config['numCols'] # number of cell-columns in the grid
gridHeight = config['gridHeight'] # total height of the grid
gridWidth = config['gridWidth'] # total width of the grid
# Define root window.
root = tk.Tk()
# Get simulation object.
gameOfLife = GameOfLife(
initialization,
root,
numRows,
numCols,
gridHeight,
gridWidth
)
# Apply the chosed initialization to the grid.
ApplyInitialization(inits[initialization], gameOfLife)
# Run the simulation.
for generation in range(maxGenerations):
try:
gameOfLife.FillGrid()
gameOfLife.MakeUpdate()
time.sleep(updateTime/1000)
except:
# If window is closed.
print("Window closed by user.")
break
print("Simulation over.")
print("========================================================================") | 27.494253 | 90 | 0.660117 | 253 | 2,392 | 6.209486 | 0.466403 | 0.022279 | 0.024188 | 0.024188 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008118 | 0.176003 | 2,392 | 87 | 91 | 27.494253 | 0.78894 | 0.161789 | 0 | 0.031746 | 0 | 0 | 0.327291 | 0.072508 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.126984 | 0 | 0.126984 | 0.126984 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68255df099a161e89ff4e124ef109d66a8ebccb0 | 883 | py | Python | highest-voted.py | sinelaw/knesset-votes | ca5f52b3d639d86fff915e5cf724b3aef2ccac09 | [
"MIT"
] | null | null | null | highest-voted.py | sinelaw/knesset-votes | ca5f52b3d639d86fff915e5cf724b3aef2ccac09 | [
"MIT"
] | null | null | null | highest-voted.py | sinelaw/knesset-votes | ca5f52b3d639d86fff915e5cf724b3aef2ccac09 | [
"MIT"
] | null | null | null | import sqlite3
import codecs
# Copied from https://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def render_highest_votes_page():
num = 200
with codecs.open('highest-voted.html','wb', 'utf-8') as f:
conn = sqlite3.connect('votes.sqlite3')
conn.row_factory = dict_factory
c = conn.cursor()
for row in c.execute('select * from votes order by votes_count desc limit %d' % (num,)):
f.write('<tr>')
titles = ['title', 'votes_count', 'for_votes_count', 'against_votes_count']
for k in titles:
f.write('<td class="%s">%s</td>' % (k, row[k]))
f.write('</tr>\n')
if __name__ == '__main__':
render_highest_votes_page()
| 32.703704 | 96 | 0.604757 | 124 | 883 | 4.120968 | 0.548387 | 0.078278 | 0.07045 | 0.086106 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022355 | 0.240091 | 883 | 26 | 97 | 33.961538 | 0.739195 | 0.10419 | 0 | 0 | 0 | 0 | 0.231939 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.095238 | 0 | 0.238095 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682a4cdafc275508691e93927329bac2fcd34d7f | 3,092 | py | Python | formatter_for_output.py | giuliapuntoit/RL-framework-iot | 1c0961f10f0477415198bbee94b6eb3272973004 | [
"MIT"
] | 5 | 2021-01-23T20:47:18.000Z | 2021-09-13T14:37:01.000Z | formatter_for_output.py | SmartData-Polito/RL-IoT | d293c8410d6c2e8fcb56f96c346c519dd3a84a28 | [
"MIT"
] | null | null | null | formatter_for_output.py | SmartData-Polito/RL-IoT | d293c8410d6c2e8fcb56f96c346c519dd3a84a28 | [
"MIT"
] | 1 | 2021-02-09T17:34:47.000Z | 2021-02-09T17:34:47.000Z | """
Support classes for coloring the console output
"""
import logging
import sys
from config import FrameworkConfiguration
def format_console_output():
"""
Format console with a common format and if selected with a colored output
"""
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format='[%(levelname)s]\t(%(threadName)s) %(message)s', )
logging.basicConfig(stream=sys.stdout, level=logging.ERROR, format='[%(levelname)s]\t(%(threadName)s) %(message)s', )
logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='[%(levelname)s]\t(%(threadName)s) %(message)s', )
logging.basicConfig(stream=sys.stdout, level=logging.WARNING, format='[%(levelname)s]\t(%(threadName)s) %(message)s', )
# Set colored output for console
if FrameworkConfiguration.use_colored_output and FrameworkConfiguration.DEBUG is False:
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG)
for handler in LOG.handlers:
LOG.removeHandler(handler)
LOG.addHandler(ColorHandler())
class _AnsiColorizer(object):
"""
A colorizer is an object that loosely wraps around a stream, allowing
callers to write text to the stream in a particular color.
Colorizer classes must implement C{supported()} and C{write(text, color)}.
"""
_colors = dict(black=30, red=31, green=32, yellow=33,
blue=34, magenta=35, cyan=36, white=37)
def __init__(self, stream):
self.stream = stream
@classmethod
def supported(cls, stream=sys.stdout):
"""
A class method that returns True if the current platform supports
coloring terminal output using this method. Returns False otherwise.
"""
if not stream.isatty():
return False # auto color only on TTYs
try:
import curses
except ImportError:
return False
else:
try:
try:
return curses.tigetnum("colors") > 2
except curses.error:
curses.setupterm()
return curses.tigetnum("colors") > 2
except:
raise
# guess false in case of error
return False
def write(self, text, color):
"""
Write the given text to the stream in the given color.
@param text: Text to be written to the stream.
@param color: A string label for a color. e.g. 'red', 'white'.
"""
color = self._colors[color]
self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
class ColorHandler(logging.StreamHandler):
def __init__(self, stream=sys.stderr):
super(ColorHandler, self).__init__(_AnsiColorizer(stream))
def emit(self, record):
msg_colors = {
logging.DEBUG: "green",
logging.INFO: "blue",
logging.WARNING: "yellow",
logging.ERROR: "red"
}
color = msg_colors.get(record.levelno, "green")
self.stream.write(record.msg + "\n", color)
| 34.355556 | 123 | 0.613195 | 364 | 3,092 | 5.148352 | 0.381868 | 0.028815 | 0.040021 | 0.057631 | 0.226254 | 0.208111 | 0.172892 | 0.148879 | 0.129669 | 0.129669 | 0 | 0.009826 | 0.275873 | 3,092 | 89 | 124 | 34.741573 | 0.827155 | 0.230272 | 0 | 0.153846 | 0 | 0 | 0.104517 | 0.058459 | 0 | 0 | 0 | 0 | 0 | 1 | 0.115385 | false | 0 | 0.096154 | 0 | 0.365385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682aee6e109beb26c251cc7f6835c8e207af0b3b | 1,485 | py | Python | src/__init__.py | swerwath/monitor_mapper | 59a1f2977212e68abd6bc4906cd6d10b0430529e | [
"BSD-2-Clause"
] | null | null | null | src/__init__.py | swerwath/monitor_mapper | 59a1f2977212e68abd6bc4906cd6d10b0430529e | [
"BSD-2-Clause"
] | null | null | null | src/__init__.py | swerwath/monitor_mapper | 59a1f2977212e68abd6bc4906cd6d10b0430529e | [
"BSD-2-Clause"
] | null | null | null | from flask import Flask, render_template, request, redirect, url_for
from geopy.geocoders import Nominatim
from .airnow_worker import AirNowWorker, group_monitor_data, get_nearest_monitor_by_category
from .community_database import CommunityDatabase
from .util import get_bbox
from .tri_database import TRIDatabase
from .copy import get_copy
import os
app = Flask(__name__)
KEY = os.environ['GOOGLE_MAPS_KEY']
worker = AirNowWorker()
tri = TRIDatabase()
cd = CommunityDatabase()
geolocator = Nominatim(timeout=5)
@app.route('/')
def index():
return render_template("index.html")
@app.route('/results')
def results():
lat = float(request.args.get('lat'))
long = float(request.args.get('long'))
bbox = get_bbox(lat, long)
monitors = worker.get_monitors(bbox)
grouped_monitors = group_monitor_data(monitors)
nearest_monitors = get_nearest_monitor_by_category(lat, long, grouped_monitors)
facilities = tri.get_facilities(bbox)
facilities_dicts = [f.to_json_dict() for f in facilities]
orgs = cd.get_organizations(bbox)
return render_template("results.html", lat=lat, long=long, KEY=KEY, monitors=grouped_monitors, nearest_monitors=nearest_monitors, facilities=facilities_dicts, orgs=orgs, get_copy=get_copy)
@app.route('/place_results')
def place_results():
place_name = request.args.get('place')
location = geolocator.geocode(place_name)
return redirect(url_for('results', lat=location.latitude, long=location.longitude))
| 33 | 192 | 0.767003 | 198 | 1,485 | 5.510101 | 0.338384 | 0.025665 | 0.038497 | 0.03483 | 0.049496 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000769 | 0.124579 | 1,485 | 44 | 193 | 33.75 | 0.838462 | 0 | 0 | 0 | 0 | 0 | 0.053199 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088235 | false | 0 | 0.235294 | 0.029412 | 0.411765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682bd7c48d23aef11a9bae521b0afa1322b70529 | 2,080 | py | Python | tests/test_graphiqlview.py | agritheory/quart-graphql | f4a1398fca60a1c9134e669e81e4f4ae07556cb1 | [
"MIT"
] | null | null | null | tests/test_graphiqlview.py | agritheory/quart-graphql | f4a1398fca60a1c9134e669e81e4f4ae07556cb1 | [
"MIT"
] | null | null | null | tests/test_graphiqlview.py | agritheory/quart-graphql | f4a1398fca60a1c9134e669e81e4f4ae07556cb1 | [
"MIT"
] | null | null | null | import typing
import pytest
from quart import Quart, url_for
from quart.testing import QuartClient
from tests.app import create_app
@pytest.fixture
async def app() -> Quart:
app = create_app(graphiql=True)
ctx = app.app_context()
await ctx.push()
return app
@pytest.fixture
def client(app: Quart) -> QuartClient:
return app.test_client()
@pytest.mark.asyncio
async def test_graphiql_is_enabled(app: Quart, client: QuartClient) -> typing.NoReturn:
async with app.test_request_context("/"):
response = await client.get(
url_for("graphql", externals=False), headers={"Accept": "text/html"}
)
assert response.status_code == 200
@pytest.mark.asyncio
async def test_graphiql_renders_pretty(
app: Quart, client: QuartClient
) -> typing.NoReturn:
async with app.test_request_context("/"):
response = await client.get(
url_for("graphql", query="{test}"), headers={"Accept": "text/html"}
)
assert response.status_code == 200
pretty_response = (
'{\n'
' "data": {\n'
' "test": "Hello World"\n'
' }\n'
'}'
).replace("\"", "\\\"").replace("\n", "\\n")
assert pretty_response in str(await response.get_data(), 'utf-8')
@pytest.mark.asyncio
async def test_graphiql_default_title(
app: Quart, client: QuartClient
) -> typing.NoReturn:
async with app.test_request_context("/"):
response = await client.get(url_for("graphql"), headers={"Accept": "text/html"})
assert "<title>GraphiQL</title>" in str(await response.get_data())
@pytest.mark.parametrize(
"app", [create_app(graphiql=True, graphiql_html_title="Awesome")]
)
@pytest.mark.asyncio
async def test_graphiql_custom_title(
app: Quart, client: QuartClient
) -> typing.NoReturn:
async with app.test_request_context("/"):
response = await client.get(url_for("graphql"), headers={"Accept": "text/html"})
data = str(await response.get_data())
assert "<title>Awesome</title>" in data
| 29.295775 | 88 | 0.646154 | 253 | 2,080 | 5.158103 | 0.241107 | 0.036782 | 0.052107 | 0.067433 | 0.642912 | 0.583908 | 0.545594 | 0.432184 | 0.432184 | 0.358621 | 0 | 0.004261 | 0.210096 | 2,080 | 70 | 89 | 29.714286 | 0.790018 | 0 | 0 | 0.392857 | 0 | 0 | 0.103365 | 0.021635 | 0 | 0 | 0 | 0 | 0.089286 | 1 | 0.017857 | false | 0 | 0.089286 | 0.017857 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682c3f95fb6f311b3a07f2e75ce43bc1ee2e21c9 | 692 | py | Python | tests/utils.py | eerimoq/bunga | 86b3e83f16e70590fc776991c56c05b81a8dabd0 | [
"MIT"
] | 5 | 2020-05-30T15:32:38.000Z | 2021-11-23T11:48:06.000Z | tests/utils.py | eerimoq/bunga | 86b3e83f16e70590fc776991c56c05b81a8dabd0 | [
"MIT"
] | null | null | null | tests/utils.py | eerimoq/bunga | 86b3e83f16e70590fc776991c56c05b81a8dabd0 | [
"MIT"
] | null | null | null | import threading
import socket
class ServerThread(threading.Thread):
def __init__(self, listener, handler):
super().__init__()
self._listener = listener
self._handler = handler
self.daemon = True
self.exception = None
def run(self):
try:
self._handler(self._listener.accept()[0])
except Exception as e:
self.exception = e
raise
self._listener.close()
def start_server(handler):
listener = socket.socket()
listener.bind(('localhost', 0))
listener.listen()
server = ServerThread(listener, handler)
server.start()
return server, listener.getsockname()[1]
| 21.625 | 53 | 0.617052 | 72 | 692 | 5.736111 | 0.458333 | 0.116223 | 0.077482 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006 | 0.277457 | 692 | 31 | 54 | 22.322581 | 0.82 | 0 | 0 | 0 | 0 | 0 | 0.013006 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.130435 | false | 0 | 0.086957 | 0 | 0.304348 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682c6565216ac05cb5a10d9d4cfb45f81c9a439d | 3,892 | py | Python | code/client/munkilib/wrappers.py | backwardn/munki | f1b5162841475a9486983faabff22504e14c06ae | [
"Apache-2.0"
] | 1 | 2020-12-17T19:52:42.000Z | 2020-12-17T19:52:42.000Z | code/client/munkilib/wrappers.py | backwardn/munki | f1b5162841475a9486983faabff22504e14c06ae | [
"Apache-2.0"
] | null | null | null | code/client/munkilib/wrappers.py | backwardn/munki | f1b5162841475a9486983faabff22504e14c06ae | [
"Apache-2.0"
] | null | null | null | # encoding: utf-8
#
# Copyright 2019-2020 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
wrappers.py
Created by Greg Neagle on 2018-05-29.
Some wrappers to paper over the differences between Python 2 and Python 3
"""
import plistlib
# plistlib wrappers
class PlistError(Exception):
"""Base error for plists"""
pass
class PlistReadError(PlistError):
"""Error when reading plists"""
pass
class PlistWriteError(PlistError):
"""Error when writing plists"""
pass
# Disable PyLint complaining about 'invalid' camelCase names
# pylint: disable=C0103
def readPlist(filepath):
'''Wrapper for the differences between Python 2 and Python 3's plistlib'''
try:
with open(filepath, "rb") as fileobj:
return plistlib.load(fileobj)
except AttributeError:
# plistlib module doesn't have a load function (as in Python 2)
try:
return plistlib.readPlist(filepath)
except BaseException as err:
raise PlistReadError(err)
except Exception as err:
raise PlistReadError(err)
def readPlistFromString(bytestring):
'''Wrapper for the differences between Python 2 and Python 3's plistlib'''
try:
return plistlib.loads(bytestring)
except AttributeError:
# plistlib module doesn't have a loads function (as in Python 2)
try:
return plistlib.readPlistFromString(bytestring)
except BaseException as err:
raise PlistReadError(err)
except Exception as err:
raise PlistReadError(err)
def writePlist(data, filepath):
'''Wrapper for the differences between Python 2 and Python 3's plistlib'''
try:
with open(filepath, "wb") as fileobj:
plistlib.dump(data, fileobj)
except AttributeError:
# plistlib module doesn't have a dump function (as in Python 2)
try:
plistlib.writePlist(data, filepath)
except BaseException as err:
raise PlistWriteError(err)
except Exception as err:
raise PlistWriteError(err)
def writePlistToString(data):
'''Wrapper for the differences between Python 2 and Python 3's plistlib'''
try:
return plistlib.dumps(data)
except AttributeError:
# plistlib module doesn't have a dumps function (as in Python 2)
try:
return plistlib.writePlistToString(data)
except BaseException as err:
raise PlistWriteError(err)
except Exception as err:
raise PlistWriteError(err)
# pylint: enable=C0103
# Python 2 and 3 wrapper for raw_input/input
try:
# Python 2
get_input = raw_input # pylint: disable=raw_input-builtin
except NameError:
# Python 3
get_input = input # pylint: disable=input-builtin
# remap basestring in Python 3
try:
_ = basestring
except NameError:
basestring = str
def is_a_string(something):
'''Wrapper for basestring vs str'''
return isinstance(something, basestring)
def unicode_or_str(something, encoding="UTF-8"):
'''Wrapper for unicode vs str'''
try:
# Python 2
if isinstance(something, str):
return unicode(something, encoding)
return unicode(something)
except NameError:
# Python 3
if isinstance(something, bytes):
return str(something, encoding)
return str(something)
| 27.8 | 78 | 0.679856 | 482 | 3,892 | 5.46888 | 0.302905 | 0.031866 | 0.030349 | 0.051214 | 0.400607 | 0.400607 | 0.386191 | 0.386191 | 0.296662 | 0.257208 | 0 | 0.017371 | 0.245632 | 3,892 | 139 | 79 | 28 | 0.88045 | 0.423433 | 0 | 0.544118 | 0 | 0 | 0.004165 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088235 | false | 0.044118 | 0.014706 | 0 | 0.308824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682d63fff52ab17d91dd71fccc20eef479ef0bea | 6,939 | py | Python | cryptofeed/rest/ftx.py | DunnCreativeSS/cash_carry_leveraged_futures_arbitrageur | 1120ebfb487ce4987fe70e6645b36e0d7ce041ec | [
"Apache-2.0"
] | 1 | 2021-09-06T00:09:11.000Z | 2021-09-06T00:09:11.000Z | cryptofeed/rest/ftx.py | DunnCreativeSS/cash_carry_leveraged_futures_arbitrageur | 1120ebfb487ce4987fe70e6645b36e0d7ce041ec | [
"Apache-2.0"
] | null | null | null | cryptofeed/rest/ftx.py | DunnCreativeSS/cash_carry_leveraged_futures_arbitrageur | 1120ebfb487ce4987fe70e6645b36e0d7ce041ec | [
"Apache-2.0"
] | null | null | null | '''
Copyright (C) 2017-2019 Bryant Moscon - bmoscon@gmail.com
Please see the LICENSE file for the terms and conditions
associated with this software.
'''
import logging
from time import sleep
import pandas as pd
import requests
from sortedcontainers.sorteddict import SortedDict as sd
from cryptofeed.defines import BID, ASK, BUY
from cryptofeed.defines import FTX as FTX_ID
from cryptofeed.defines import SELL
from cryptofeed.rest.api import API, request_retry
from cryptofeed.standards import pair_std_to_exchange
LOG = logging.getLogger('rest')
RATE_LIMIT_SLEEP = 0.2
class FTX(API):
ID = FTX_ID
api = "https://ftx.com/api"
def _get(self, command: str, params=None, retry=None, retry_wait=0):
url = f"{self.api}{command}"
@request_retry(self.ID, retry, retry_wait)
def helper():
resp = requests.get(url, params={} if not params else params)
self._handle_error(resp, LOG)
return resp.json()
return helper()
def ticker(self, symbol: str, retry=None, retry_wait=0):
sym = pair_std_to_exchange(symbol, self.ID)
data = self._get(f"/markets/{sym}", retry=retry, retry_wait=retry_wait)
return {'pair': symbol,
'feed': self.ID,
'bid': data['result']['bid'],
'ask': data['result']['ask']
}
def l2_book(self, symbol: str, retry=None, retry_wait=0):
sym = pair_std_to_exchange(symbol, self.ID)
data = self._get(f"/markets/{sym}/orderbook", {'depth': 100}, retry=retry, retry_wait=retry_wait)
return {
BID: sd({
u[0]: u[1]
for u in data['result']['bids']
}),
ASK: sd({
u[0]: u[1]
for u in data['result']['asks']
})
}
def trades(self, symbol: str, start=None, end=None, retry=None, retry_wait=10):
symbol = pair_std_to_exchange(symbol, self.ID)
for data in self._get_trades_hist(symbol, start, end, retry, retry_wait):
yield data
def funding(self, symbol: str, start_date=None, end_date=None, retry=None, retry_wait=10):
last = []
start = None
end = None
if end_date and not start_date:
start_date = '2019-01-01'
if start_date:
if not end_date:
end_date = pd.Timestamp.utcnow()
start = API._timestamp(start_date)
end = API._timestamp(end_date)
start = int(start.timestamp())
end = int(end.timestamp())
@request_retry(self.ID, retry, retry_wait)
def helper(start, end):
if start and end:
return requests.get(f"{self.api}/funding_rates?future={symbol}&start_time={start}&end_time={end}")
else:
return requests.get(f"{self.api}/funding_rates?symbol={symbol}")
while True:
r = helper(start, end)
if r.status_code == 429:
sleep(RATE_LIMIT_SLEEP)
continue
elif r.status_code == 500:
LOG.warning("%s: 500 for URL %s - %s", self.ID, r.url, r.text)
sleep(retry_wait)
continue
elif r.status_code != 200:
self._handle_error(r, LOG)
else:
sleep(RATE_LIMIT_SLEEP)
data = r.json()['result']
if data == []:
LOG.warning("%s: No data for range %d - %d", self.ID, start, end)
else:
end = int(API._timestamp(data[-1]["time"]).timestamp()) + 1
orig_data = list(data)
# data = self._dedupe(data, last)
# last = list(orig_data)
data = [self._funding_normalization(x, symbol) for x in data]
return data
@staticmethod
def _dedupe(data, last):
if len(last) == 0:
return data
ids = set([data['id'] for data in last])
ret = []
for d in data:
if d['id'] in ids:
continue
ids.add(d['id'])
ret.append(d)
return ret
def _get_trades_hist(self, symbol, start_date, end_date, retry, retry_wait):
last = []
start = None
end = None
if end_date and not start_date:
start_date = '2019-01-01'
if start_date:
if not end_date:
end_date = pd.Timestamp.utcnow()
start = API._timestamp(start_date)
end = API._timestamp(end_date)
start = int(start.timestamp())
end = int(end.timestamp())
@request_retry(self.ID, retry, retry_wait)
def helper(start, end):
if start and end:
return requests.get(f"{self.api}/markets/{symbol}/trades?limit=100&start_time={start}&end_time={end}")
else:
return requests.get(f"{self.api}/markets/{symbol}/trades")
while True:
r = helper(start, end)
if r.status_code == 429:
sleep(RATE_LIMIT_SLEEP)
continue
elif r.status_code == 500:
LOG.warning("%s: 500 for URL %s - %s", self.ID, r.url, r.text)
sleep(retry_wait)
continue
elif r.status_code != 200:
self._handle_error(r, LOG)
else:
sleep(RATE_LIMIT_SLEEP)
data = r.json()['result']
if data == []:
LOG.warning("%s: No data for range %d - %d", self.ID, start, end)
else:
end = int(API._timestamp(data[-1]["time"]).timestamp()) + 1
orig_data = list(data)
data = self._dedupe(data, last)
last = list(orig_data)
data = [self._trade_normalization(x, symbol) for x in data]
yield data
if len(orig_data) < 100:
break
def _trade_normalization(self, trade: dict, symbol: str) -> dict:
return {
'timestamp': API._timestamp(trade['time']).timestamp(),
'pair': symbol,
'id': trade['id'],
'feed': self.ID,
'side': SELL if trade['side'] == 'sell' else BUY,
'amount': trade['size'],
'price': trade['price']
}
def _funding_normalization(self, funding: dict, symbol: str) -> dict:
ts = pd.to_datetime(funding['time'], format="%Y-%m-%dT%H:%M:%S%z")
return {
'timestamp': API._timestamp(funding['time']).timestamp(),
'pair': funding['future'],
'feed': self.ID,
'rate': funding['rate']
}
| 32.886256 | 119 | 0.516933 | 829 | 6,939 | 4.185766 | 0.18456 | 0.041499 | 0.028242 | 0.025937 | 0.563977 | 0.557349 | 0.543516 | 0.498271 | 0.491354 | 0.468588 | 0 | 0.017183 | 0.362588 | 6,939 | 210 | 120 | 33.042857 | 0.767352 | 0.029255 | 0 | 0.530864 | 0 | 0.006173 | 0.096363 | 0.038361 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.061728 | 0.006173 | 0.234568 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682dd8077861d49fc5c94c9b61b7556121f43cb9 | 13,662 | py | Python | src/grokcore/view/templatereg.py | zopefoundation/grokcore.view | c574c0d041130ac607c95feb610a2b75bfc30abf | [
"ZPL-2.1"
] | null | null | null | src/grokcore/view/templatereg.py | zopefoundation/grokcore.view | c574c0d041130ac607c95feb610a2b75bfc30abf | [
"ZPL-2.1"
] | 8 | 2016-02-02T13:42:20.000Z | 2022-02-16T07:06:52.000Z | src/grokcore/view/templatereg.py | zopefoundation/grokcore.view | c574c0d041130ac607c95feb610a2b75bfc30abf | [
"ZPL-2.1"
] | 5 | 2015-04-03T05:01:45.000Z | 2018-06-13T08:41:30.000Z | ##############################################################################
#
# Copyright (c) 2006-2007 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import os
import warnings
import re
import zope.interface
import zope.component
import grokcore.component
import grokcore.view
from martian.scan import module_info_from_dotted_name
from martian.error import GrokError
from grokcore.view.interfaces import ITemplate, ITemplateFileFactory
from grokcore.view.interfaces import TemplateLookupError
from grokcore.view.components import PageTemplate
class InlineTemplateRegistry(object):
"""Registry managing all inline template files.
"""
_reg = None
_unassociated = None
def __init__(self):
self.clear()
def clear(self):
self._reg = {}
self._unassociated = set()
def register_inline_template(self, module_info, template_name, template):
# verify no file template got registered with the same name
try:
file_template_registry.lookup(module_info, template_name)
except TemplateLookupError:
pass
else:
template_dir = file_template_registry.get_template_dir(module_info)
raise GrokError("Conflicting templates found for name '%s': "
"the inline template in module '%s' conflicts "
"with the file template in directory '%s'" %
(template_name, module_info.dotted_name,
template_dir), None)
# register the inline template
self._reg[(module_info.dotted_name, template_name)] = template
self._unassociated.add((module_info.dotted_name, template_name))
def associate(self, module_info, template_name):
# Two views in the same module should be able to use the same
# inline template
try:
self._unassociated.remove((module_info.dotted_name, template_name))
except KeyError:
pass
def lookup(self, module_info, template_name, mark_as_associated=False):
result = self._reg.get((module_info.dotted_name, template_name))
if result is None:
raise TemplateLookupError(
"inline template '%s' in '%s' cannot be found" % (
template_name, module_info.dotted_name))
if mark_as_associated:
self.associate(module_info, template_name)
return result
def unassociated(self):
return self._unassociated
class FileTemplateRegistry(object):
"""Registry managing all template files.
"""
_reg = None
_unassociated = None
_registered_directories = None
_ignored_patterns = None
def __init__(self):
self.clear()
def clear(self):
self._reg = {}
self._unassociated = set()
self._registered_directories = set()
self._ignored_patterns = []
def ignore_templates(self, pattern):
self._ignored_patterns.append(re.compile(pattern))
def register_directory(self, module_info):
# we cannot register a templates dir for a package
if module_info.isPackage():
return
template_dir = self.get_template_dir(module_info)
# we can only register for directories
if not os.path.isdir(template_dir):
return
# we don't want associated templates become unassociated again
if template_dir in self._registered_directories:
return
for template_file in os.listdir(template_dir):
template_path = os.path.join(template_dir, template_file)
if os.path.isfile(template_path):
self._register_template_file(module_info, template_path)
self._registered_directories.add(template_dir)
def _register_template_file(self, module_info, template_path):
template_dir, template_file = os.path.split(template_path)
for pattern in self._ignored_patterns:
if pattern.search(template_file):
return
template_name, extension = os.path.splitext(template_file)
if (template_dir, template_name) in self._reg:
raise GrokError("Conflicting templates found for name '%s' "
"in directory '%s': multiple templates with "
"the same name and different extensions." %
(template_name, template_dir), None)
# verify no inline template exists with the same name
try:
inline_template_registry.lookup(module_info, template_name)
except TemplateLookupError:
pass
else:
raise GrokError("Conflicting templates found for name '%s': "
"the inline template in module '%s' conflicts "
"with the file template in directory '%s'" %
(template_name, module_info.dotted_name,
template_dir), None)
extension = extension[1:] # Get rid of the leading dot.
template_factory = zope.component.queryUtility(
grokcore.view.interfaces.ITemplateFileFactory,
name=extension)
if template_factory is None:
# Warning when importing files. This should be
# allowed because people may be using editors that generate
# '.bak' files and such.
if extension == 'pt':
warnings.warn("You forgot to embed the zcml slug for "
"grokcore.view. It provides a renderer "
"for pt files. Now the file '%s' in '%s' "
"cannot be rendered" %
(template_file, template_dir), UserWarning, 2)
elif extension == '':
"""Don't choke on subdirs or files without extensions."""
return
else:
warnings.warn("File '%s' has an unrecognized extension in "
"directory '%s'" %
(template_file, template_dir), UserWarning, 2)
return
template = template_factory(template_file, template_dir)
template._annotateGrokInfo(template_name, template_path)
self._reg[(template_dir, template_name)] = template
self._unassociated.add(template_path)
def associate(self, template_path):
# Two views in different module should be able to use the same template
try:
self._unassociated.remove(template_path)
except KeyError:
pass
def lookup(self, module_info, template_name, mark_as_associated=False):
template_dir = self.get_template_dir(module_info)
result = self._reg.get((template_dir, template_name))
if result is None:
raise TemplateLookupError(
"template '%s' in '%s' cannot be found" % (
template_name, template_dir))
if mark_as_associated:
registered_template_path = self._reg.get(
(template_dir, template_name)).__grok_location__
self.associate(registered_template_path)
return result
def unassociated(self):
return self._unassociated
def get_template_dir(self, module_info):
template_dir_name = grokcore.view.templatedir.bind().get(
module_info.getModule())
if template_dir_name is None:
template_dir_name = module_info.name + '_templates'
template_dir = module_info.getResourcePath(template_dir_name)
return template_dir
inline_template_registry = InlineTemplateRegistry()
file_template_registry = FileTemplateRegistry()
def register_inline_template(module_info, template_name, template):
return inline_template_registry.register_inline_template(
module_info, template_name, template)
def register_directory(module_info):
return file_template_registry.register_directory(module_info)
def _clear():
"""Remove the registries (for use by tests)."""
inline_template_registry.clear()
file_template_registry.clear()
try:
from zope.testing.cleanup import addCleanUp
except ImportError:
# don't have that part of Zope
pass
else:
addCleanUp(_clear)
del addCleanUp
def lookup(module_info, template_name, mark_as_associated=False):
try:
return file_template_registry.lookup(
module_info, template_name, mark_as_associated)
except TemplateLookupError as e:
try:
return inline_template_registry.lookup(
module_info, template_name, mark_as_associated)
except TemplateLookupError:
# re-raise first error again
raise e
def check_unassociated():
unassociated = inline_template_registry.unassociated()
if unassociated:
for dotted_name, template_name in unassociated:
msg = (
"Found the following unassociated template "
"after configuration in %r: %s." % (
dotted_name, template_name))
warnings.warn(msg, UserWarning, 1)
unassociated = file_template_registry.unassociated()
for template_name in unassociated:
msg = (
"Found the following unassociated template "
"after configuration: %s" % (
template_name))
warnings.warn(msg, UserWarning, 1)
def associate_template(module_info, factory, component_name,
has_render, has_no_render):
"""Associate a template to a factory located in the module
described by module_info.
"""
explicit_template = False
factory_name = factory.__name__.lower()
module_name, template_name = grokcore.view.template.bind(
default=(None, None)).get(factory)
if template_name is None:
# We didn't used grok.template. Default the template name to
# the factory name.
template_name = factory_name
else:
# We used grok.template. Use the same module_info to fetch the
# template that the module in which the directive have been
# used (to get the grok.templatedir value).
assert module_name is not None, \
u"module_name cannot be None if template_name is specified."
module_info = module_info_from_dotted_name(module_name)
explicit_template = True
# We used grok.template, to specify a template which is different
# than the class name. Check if there is no template with the same
# name as the view
if factory_name != template_name:
try:
lookup(module_info, factory_name)
raise GrokError("Multiple possible templates for %s %r. It "
"uses grok.template('%s'), but there is also "
"a template called '%s'."
% (component_name, factory, template_name,
factory_name), factory)
except TemplateLookupError:
pass
# Check if view already have a template set with template =
factory_have_template = (
getattr(factory, 'template', None) is not None and
ITemplate.providedBy(factory.template))
# Lookup for a template in the registry
try:
factory.template = lookup(
module_info, template_name, mark_as_associated=True)
# If we associate a template, set the static_name to use to
# the same package name as where the template is found.
factory.__static_name__ = module_info.package_dotted_name
# We now have a template.
factory_have_template = True
except TemplateLookupError:
pass
if not factory_have_template:
# If a template was explicitly asked, error.
if explicit_template:
raise GrokError(
"Template %s for %s %r cannot be found." %
(template_name, component_name.title(), factory), factory)
# Check for render or error.
if has_no_render(factory):
raise GrokError(
"%s %r has no associated template or 'render' method." %
(component_name.title(), factory), factory)
if has_render(factory):
# Check for have both render and template
if factory_have_template:
raise GrokError(
"Multiple possible ways to render %s %r. "
"It has both a 'render' method as well as "
"an associated template." %
(component_name, factory), factory)
# Set static_name to use if no template are found.
if getattr(factory, '__static_name__', None) is None:
factory.__static_name__ = module_info.package_dotted_name
if factory_have_template:
factory.template._initFactory(factory)
@zope.interface.implementer(ITemplateFileFactory)
class PageTemplateFileFactory(grokcore.component.GlobalUtility):
grokcore.component.name('pt')
def __call__(self, filename, _prefix=None):
return PageTemplate(filename=filename, _prefix=_prefix)
| 37.327869 | 79 | 0.632191 | 1,525 | 13,662 | 5.442623 | 0.175082 | 0.050602 | 0.034699 | 0.034458 | 0.328313 | 0.280241 | 0.239277 | 0.223373 | 0.139518 | 0.130602 | 0 | 0.001541 | 0.287659 | 13,662 | 365 | 80 | 37.430137 | 0.851315 | 0.153345 | 0 | 0.368421 | 0 | 0 | 0.105212 | 0 | 0 | 0 | 0 | 0 | 0.004049 | 1 | 0.089069 | false | 0.02834 | 0.05668 | 0.020243 | 0.246964 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
682fad2fc662319af2ddf907f8c98e6739a1c984 | 10,379 | py | Python | main.py | Emiyalzn/EI328-hw1 | d4716e9cd9de0a0c5174844e55fed377aa894d54 | [
"MIT"
] | null | null | null | main.py | Emiyalzn/EI328-hw1 | d4716e9cd9de0a0c5174844e55fed377aa894d54 | [
"MIT"
] | null | null | null | main.py | Emiyalzn/EI328-hw1 | d4716e9cd9de0a0c5174844e55fed377aa894d54 | [
"MIT"
] | 1 | 2022-03-21T03:36:37.000Z | 2022-03-21T03:36:37.000Z | import time
import pickle
import os
import numpy as np
import argparse
from utils import load_data, mse_loss, plot_boundaries, partition_data, plot_minmax_boundaries, result_dir, plot_datapoints
from model import MLP, MLQP
from copy import deepcopy
from multiprocessing import Pool
def parse_arguments():
parser = argparse.ArgumentParser("Training hyperparameters.")
parser.add_argument('--lr_1', type=float, default=1e-2, help="learning rate for v and b")
parser.add_argument('--lr_2', type=float, default=1e-2, help="learning rate for u")
parser.add_argument('--alpha_1', type=float, default=0., help="momentum rate for v and b")
parser.add_argument('--alpha_2', type=float, default=0., help="momentum rate for u")
parser.add_argument('--n_hid', type=int, default=32, help="size of hidden layer")
parser.add_argument('--n_epoch', type=int, default=30000, help="Max training epochs")
parser.add_argument('--type', choices=['vanilla', 'minmax'], default='vanilla', help="choose the training model type")
parser.add_argument('--model', choices=['mlp', 'mlqp'], default='mlp', help="use mlp or mlqp for training")
parser.add_argument('--partition_num', type=int, default=2, help="number of training set partitions")
parser.add_argument('--partition_mode', choices=['random', 'yaxis', 'yaxis+overlap'], default='random', help="data partition method")
parser.add_argument('--train_mode', choices=['sequential', 'parallel'], default='parallel', help="how to train the submodules")
parser.add_argument('--seed', type=int, default=None, help="fix the random seed")
args = parser.parse_args()
return args
def train_vanillanet(args):
xs, ys, labels = load_data('train')
x_s, y_s, label_s = load_data('test')
train_data = (xs, ys, labels)
test_data = (x_s, y_s, label_s)
train_accs, test_accs, losses, times = [], [], [], []
best_epoch, best_loss = 0, 1e6
# initialize models
if args.model == 'mlp':
model = MLP([2, args.n_hid, 1])
else:
model = MLQP([2, args.n_hid, 1])
# start training
start_time = time.time()
for epoch in range(args.n_epoch):
for i in range(len(xs)):
pred = model.forward(np.array([xs[i], ys[i]]))
model.backward(labels[i])
if args.model == 'mlp':
model.update(args.alpha_1, args.lr_1)
else:
model.update(args.alpha_1, args.alpha_2, args.lr_1, args.lr_2)
curr_time = time.time() - start_time
times.append(curr_time)
# predict on train and test data
train_acc, train_loss = predict(model, train_data)
test_acc, _ = predict(model, test_data)
losses.append(train_loss)
train_accs.append(train_acc)
test_accs.append(test_acc)
if epoch % 10 == 0:
print(f"Epoch {epoch}, Time {curr_time:.2f}, Loss {train_loss:.4f}, Train acc {train_acc:.4f}, Test acc {test_acc:.4f}")
# early stopping
if best_loss - train_loss > 0.0001:
best_loss = train_loss
best_epoch = epoch
elif epoch - best_epoch >= 200:
break
# save files
plot_boundaries(model, args.model, args.lr_1, args.lr_2, args.alpha_1)
save_file = open(os.path.join(result_dir, f"{args.model}_data.pkl"), 'wb')
pickle.dump(times, save_file)
pickle.dump(losses, save_file)
pickle.dump(train_accs, save_file)
pickle.dump(test_accs, save_file)
save_file.close()
def train_minmax_sequential(args):
xs, ys, labels = load_data('train')
x_s, y_s, label_s = load_data('test')
train_data = (xs, ys, labels)
test_data = (x_s, y_s, label_s)
white_subsets, black_subsets = partition_data(train_data, args.partition_mode, args.partition_num)
plot_datapoints(white_subsets, black_subsets)
train_accs, test_accs, losses, times = [], [], [], []
best_epoch, best_loss = 0, 1e6
# initialize models
models = []
for i in range(args.partition_num):
models.append([])
for j in range(args.partition_num):
if args.model == 'mlp':
models[i].append(MLP([2, args.n_hid, 1]))
else:
models[i].append(MLQP([2, args.n_hid, 1]))
# start training
start_time = time.time()
for epoch in range(args.n_epoch):
for i in range(args.partition_num):
for j in range(args.partition_num):
x_train = np.concatenate((white_subsets[i][0], black_subsets[j][0]), axis=0)
y_train = np.concatenate((white_subsets[i][1], black_subsets[j][1]), axis=0)
labels_train = np.concatenate((white_subsets[i][2], black_subsets[j][2]), axis=0)
for k in range(len(x_train)):
pred = models[i][j].forward(np.array([x_train[k], y_train[k]]))
models[i][j].backward(labels_train[k])
if args.model == 'mlp':
models[i][j].update(args.alpha_1, args.lr_1)
else:
models[i][j].update(args.alpha_1, args.alpha_2, args.lr_1, args.lr_2)
curr_time = time.time() - start_time
times.append(curr_time)
# predict on train and test data
train_acc, train_loss = predict_minmax(models, train_data)
test_acc, _ = predict_minmax(models, test_data)
losses.append(train_loss)
train_accs.append(train_acc)
test_accs.append(test_acc)
if epoch % 10 == 0:
print(f"Epoch {epoch}, Time {curr_time:.2f}, Loss {train_loss:.4f}, Train acc {train_acc:.4f}, Test acc {test_acc:.4f}")
# early stopping
if best_loss - train_loss > 0.0001:
best_loss = train_loss
best_epoch = epoch
elif epoch - best_epoch >= 200:
break
# Visualization and save files
plot_minmax_boundaries(models, args.model, args.partition_mode)
save_file = open(os.path.join(result_dir, f"minmax_{args.model}_data.pkl"), 'wb')
pickle.dump(times, save_file)
pickle.dump(losses, save_file)
pickle.dump(train_accs, save_file)
pickle.dump(test_accs, save_file)
save_file.close()
def train_one_model(model, data, args):
xs, ys, labels = data
best_epoch, best_loss = 0, 1e6
for epoch in range(args.n_epoch):
for k in range(len(xs)):
pred = model.forward(np.array([xs[k], ys[k]]))
model.backward(labels[k])
if args.model == 'mlp':
model.update(args.alpha_1, args.lr_1)
else:
model.update(args.alpha_1, args.alpha_2, args.lr_1, args.lr_2)
_, train_loss = predict(model, data)
# early stopping
if best_loss - train_loss > 0.0001:
best_loss = train_loss
best_epoch = epoch
elif epoch - best_epoch >= 200:
break
return model
def step_func_feeder(args, models, white_subsets, black_subsets):
partition_num = len(white_subsets)
for i in range(partition_num):
for j in range(partition_num):
x_train = np.concatenate((white_subsets[i][0], black_subsets[j][0]), axis=0)
y_train = np.concatenate((white_subsets[i][1], black_subsets[j][1]), axis=0)
labels_train = np.concatenate((white_subsets[i][2], black_subsets[j][2]), axis=0)
data_train = (x_train, y_train, labels_train)
yield models[i][j], data_train, args
def train_minmax_parallel(args):
num_workers = args.partition_num * args.partition_num
mp_pool = Pool(num_workers)
xs, ys, labels = load_data('train')
x_s, y_s, label_s = load_data('test')
train_data = (xs, ys, labels)
test_data = (x_s, y_s, label_s)
white_subsets, black_subsets = partition_data(train_data, args.partition_mode, args.partition_num)
plot_datapoints(white_subsets, black_subsets)
# initialize models
models = []
for i in range(args.partition_num):
models.append([])
for j in range(args.partition_num):
if args.model == 'mlp':
models[i].append(MLP([2, args.n_hid, 1]))
else:
models[i].append(MLQP([2, args.n_hid, 1]))
start_time = time.time()
pool_map = mp_pool.starmap_async(train_one_model, step_func_feeder(args, models, white_subsets, black_subsets))
results = pool_map.get()
mp_pool.close()
mp_pool.join()
for i in range(args.partition_num):
for j in range(args.partition_num):
models[i][j] = results[i*args.partition_num+j]
train_acc, train_loss = predict_minmax(models, train_data)
test_acc, _ = predict_minmax(models, test_data)
curr_time = time.time() - start_time
print(f"Time {curr_time:.2f}, Loss {train_loss:.4f}, Train acc {train_acc:.4f}, Test acc {test_acc:.4f}")
# Visualization
plot_minmax_boundaries(models, args.model, args.partition_mode)
def predict(model, data):
xs, ys, labels = data
preds = []
for i in range(len(xs)):
preds.append(model.forward(np.array([xs[i],ys[i]])))
preds = np.squeeze(np.array(preds))
loss = mse_loss(preds, labels)
acc = ((preds > 0.5) == labels).mean()
return acc, loss
def predict_minmax(models, data):
num_partitions = len(models)
xs, ys, labels = data
preds = []
min_results = []
single_pred = np.zeros(len(xs))
for i in range(num_partitions):
preds.append([])
for j in range(num_partitions):
for k in range(len(xs)):
single_pred[k] = models[i][j].forward(np.array([xs[k],ys[k]]))
preds[i].append(deepcopy(single_pred))
for i in range(num_partitions):
min_result = np.min(preds[i],axis=0)
min_results.append(deepcopy(min_result))
max_result = np.max(min_results, axis=0)
loss = mse_loss(max_result, labels)
acc = ((max_result > 0.5) == labels).mean()
return acc, loss
if __name__ == '__main__':
args = parse_arguments()
if args.seed != None:
np.random.seed(args.seed)
if args.type == 'vanilla':
train_vanillanet(args)
elif args.type == 'minmax':
if args.train_mode == 'parallel':
train_minmax_parallel(args)
else:
train_minmax_sequential(args)
else:
raise ValueError("Unknown model type!")
| 41.023715 | 137 | 0.628095 | 1,483 | 10,379 | 4.178018 | 0.120701 | 0.023725 | 0.03357 | 0.015978 | 0.656714 | 0.637024 | 0.596675 | 0.581988 | 0.547127 | 0.492253 | 0 | 0.015961 | 0.239426 | 10,379 | 252 | 138 | 41.186508 | 0.768938 | 0.023509 | 0 | 0.583333 | 0 | 0.013889 | 0.095346 | 0.004841 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037037 | false | 0 | 0.041667 | 0 | 0.097222 | 0.013889 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6830af0c1046d72a73ee2630c491f3d73288b0de | 9,742 | py | Python | test/persistence/test_artifact_manager.py | bavard-ai/bavard-ml-utils | 00b6575d5c74c66f57ca1da753b91d3ceef6d1e4 | [
"MIT"
] | 1 | 2022-02-24T21:25:02.000Z | 2022-02-24T21:25:02.000Z | test/persistence/test_artifact_manager.py | bavard-ai/bavard-ml-utils | 00b6575d5c74c66f57ca1da753b91d3ceef6d1e4 | [
"MIT"
] | null | null | null | test/persistence/test_artifact_manager.py | bavard-ai/bavard-ml-utils | 00b6575d5c74c66f57ca1da753b91d3ceef6d1e4 | [
"MIT"
] | null | null | null | import sys
import time
import typing as t
from unittest import TestCase
import numpy as np
from bavard_ml_utils.persistence.artifact_manager import (
BaseArtifactManager,
BaseArtifactRecord,
BaseDatasetRecord,
ServiceVersionMetadata,
)
from bavard_ml_utils.persistence.record_store.firestore import FirestoreRecordStore
from test.utils import clear_firestore
class ArtifactRecord(BaseArtifactRecord):
A: np.ndarray
b: float
class DatasetRecord(BaseDatasetRecord):
examples: t.List[str]
class ArtifactManager(BaseArtifactManager):
def create_artifact_from_dataset(self, dataset: BaseDatasetRecord) -> BaseArtifactRecord:
# Simulate the artifact being a deterministic output of the dataset's digest and
# the service version.
# Source: https://stackoverflow.com/a/18766695
seed = hash((dataset.digest, self.version)) % ((sys.maxsize + 1) * 2)
rng = np.random.default_rng(seed)
return ArtifactRecord(
agent_id=dataset.agent_id,
dataset_digest=dataset.digest,
service_version=self.version,
updated_at=time.time(),
A=rng.normal(size=(5, 5)),
b=rng.random(),
)
class TestArtifactManager(TestCase):
def setUp(self):
clear_firestore()
self.artifacts: FirestoreRecordStore[ArtifactRecord] = FirestoreRecordStore[ArtifactRecord](
"artifacts", ArtifactRecord
)
self.datasets: FirestoreRecordStore[DatasetRecord] = FirestoreRecordStore[DatasetRecord](
"datasets", DatasetRecord
)
self.versions: FirestoreRecordStore[ServiceVersionMetadata] = FirestoreRecordStore[ServiceVersionMetadata](
"versions", ServiceVersionMetadata
)
self.dataset_records = [
DatasetRecord(examples=["a", "b", "c"], agent_id="1", updated_at=time.time()),
DatasetRecord(examples=["d", "e"], agent_id="2", updated_at=time.time()),
]
def test_sync_new_version(self):
# Simulates the scenario where a new version of the service is being released,
# and checks to make sure all the proper artifacts are re-indexed for the new version.
# Setup: create a couple datasets and artifacts for this version.
mgr = ArtifactManager(self.artifacts, self.datasets, self.versions, "v1")
for dataset_record in self.dataset_records:
artifact = mgr.create_artifact_from_dataset(dataset_record)
mgr.save_artifact(artifact, dataset_record)
# There should be 2 artifacts saved for the first service version, and none for the second.
v1_artifacts = list(self.artifacts.get_all(service_version="v1"))
self.assertEqual(len(v1_artifacts), 2)
v2_artifacts = list(self.artifacts.get_all(service_version="v2"))
self.assertEqual(len(v2_artifacts), 0)
# Act: sync artifacts for a new version of the artifact manager.
mgr = ArtifactManager(self.artifacts, self.datasets, self.versions, "v2")
mgr.sync()
# There should now be two artifacts for this new version, produced from the two datasets added
# by the previous version.
v2_artifacts = list(self.artifacts.get_all(service_version="v2"))
self.assertEqual(len(v2_artifacts), 2)
# The new service version's artifacts should be for the same dataset versions that the
# previous version saved.
self.assertSetEqual(
{artifact.dataset_digest for artifact in v1_artifacts},
{artifact.dataset_digest for artifact in v2_artifacts},
)
# The data should be the same as well.
v1_artifacts_by_digest = {artifact.dataset_digest: artifact for artifact in v1_artifacts}
for v2_artifact in v2_artifacts:
v1_artifact = v1_artifacts_by_digest[v2_artifact.dataset_digest]
self.assertEqual(v1_artifact.agent_id, v2_artifact.agent_id)
self.assertEqual(v1_artifact.dataset_digest, v2_artifact.dataset_digest)
# The service version and updated_at fields should be different, since they were produced at different
# times for different versions. The same goes for the data.
self.assertNotEqual(v1_artifact.updated_at, v2_artifact.updated_at)
self.assertNotEqual(v1_artifact.service_version, v2_artifact.service_version)
self.assertNotEqual(v1_artifact.b, v2_artifact.b)
self.assertFalse((v1_artifact.A == v2_artifact.A).all())
def test_sync_same_version(self):
# Simulates the scenario where a new service instance is spun up with the same version of
# the currently deployed service. For example, when cloud auto-scaling scales up the service.
# Setup: create a dataset and artifact for this version.
mgr = ArtifactManager(self.artifacts, self.datasets, self.versions, "v1")
artifact = mgr.create_artifact_from_dataset(self.dataset_records[0])
mgr.save_artifact(artifact, self.dataset_records[0])
artifacts1 = list(self.artifacts.get_all())
datasets1 = list(self.datasets.get_all())
self.assertEqual(len(artifacts1), 1)
self.assertEqual(len(datasets1), 1)
self.assertEqual(artifacts1[0].service_version, "v1")
# Act: simuate a new instance being spun up and synced.
mgr = ArtifactManager(self.artifacts, self.datasets, self.versions, "v1") # same version
n_indexed = mgr.sync()
# `sync` didn't need to do anything.
self.assertEqual(n_indexed, 0)
artifacts2 = list(self.artifacts.get_all())
self.assertEqual(len(artifacts1), 1)
# The indexed artifact should not have changed.
artifact1, artifact2 = artifacts1[0], artifacts2[0]
self.assertEqual(artifact1.service_version, artifact2.service_version)
self.assertEqual(artifact1.dataset_digest, artifact2.dataset_digest)
self.assertEqual(artifact1.agent_id, artifact2.agent_id)
self.assertEqual(artifact1.updated_at, artifact2.updated_at)
self.assertEqual(artifact1.b, artifact2.b)
self.assertTrue((artifact1.A == artifact2.A).all())
# The saved dataset should not have changed either.
datasets2 = list(self.datasets.get_all())
self.assertEqual(len(datasets2), 1)
self.assertEqual(datasets1[0], datasets2[0])
def test_should_sync_missed_artifacts_on_the_fly(self):
mgr = ArtifactManager(self.artifacts, self.datasets, self.versions, "v1")
# Simulate a dataset being saved to a prior service version (no artifact data exists for it for the current
# service version).
dataset_record = self.dataset_records[0]
self.datasets.save(dataset_record)
# The artifact should not exist in the database for the current service version.
artifact = self.artifacts.get(ArtifactRecord.make_id("v1", dataset_record.agent_id))
self.assertIsNone(artifact)
artifact = mgr.load_artifact(dataset_record.agent_id) # should compute and save the artifact on the fly
self.assertIsNotNone(artifact)
# The artifact should now exist in the database for the current service version, and will no longer
# need to be computed on the fly.
artifact = self.artifacts.get(ArtifactRecord.make_id("v1", dataset_record.agent_id))
self.assertIsNotNone(artifact)
self.assertEqual(artifact.agent_id, dataset_record.agent_id)
self.assertEqual(artifact.service_version, "v1")
self.assertEqual(artifact.dataset_digest, dataset_record.digest)
self.assertIsNotNone(artifact.A)
self.assertIsNotNone(artifact.b)
def test_remove_old_versions(self):
max_service_versions = 5
n_old = 2
version_names = [f"v{i}" for i in range(max_service_versions + n_old)]
# Create artificial metadata and a couple artifacts for each version.
for version_name in version_names:
self.versions.save(ServiceVersionMetadata(name=version_name, synced_at=time.time()))
mgr = ArtifactManager(self.artifacts, self.datasets, self.versions, version_name)
for dataset_record in self.dataset_records:
artifact = mgr.create_artifact_from_dataset(dataset_record)
mgr.save_artifact(artifact, dataset_record)
mgr = ArtifactManager(self.artifacts, self.datasets, self.versions, version_names[-1])
mgr._remove_old_service_versions()
# Metadata for old versions should have been removed, and data for the newer versions should have been kept.
versions = list(self.versions.get_all())
new_version_names = {v.name for v in versions}
expected_version_names = set(version_names[n_old:])
self.assertSetEqual(new_version_names, expected_version_names)
# Artifacts for newer versions should have been preserved.
for version_name in new_version_names:
tasks_for_version = list(self.artifacts.get_all(service_version=version_name))
self.assertSetEqual(
{task.dataset_digest for task in tasks_for_version}, {rec.digest for rec in self.dataset_records}
)
self.assertSetEqual(
{task.agent_id for task in tasks_for_version}, {rec.agent_id for rec in self.dataset_records}
)
# Artifacts for old versions should have been removed.
for removed_version_name in set(version_names) - new_version_names:
n_artifacts_for_version = sum(1 for _ in self.artifacts.get_all(service_version=removed_version_name))
self.assertEqual(n_artifacts_for_version, 0)
| 49.451777 | 116 | 0.697187 | 1,192 | 9,742 | 5.526846 | 0.175336 | 0.047814 | 0.021858 | 0.032939 | 0.316333 | 0.26017 | 0.224347 | 0.192927 | 0.153309 | 0.103522 | 0 | 0.013449 | 0.221515 | 9,742 | 196 | 117 | 49.704082 | 0.855222 | 0.209916 | 0 | 0.152174 | 0 | 0 | 0.00783 | 0 | 0 | 0 | 0 | 0 | 0.253623 | 1 | 0.043478 | false | 0 | 0.057971 | 0 | 0.15942 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6832f4fad5795bb5e1049c8a59ee7b24d6204ec0 | 2,383 | py | Python | lib/usfm/handler.py | silnrsi/python-usfm | eb3c194ab995c7064bcfa435c2d178318f069fde | [
"MIT"
] | null | null | null | lib/usfm/handler.py | silnrsi/python-usfm | eb3c194ab995c7064bcfa435c2d178318f069fde | [
"MIT"
] | null | null | null | lib/usfm/handler.py | silnrsi/python-usfm | eb3c194ab995c7064bcfa435c2d178318f069fde | [
"MIT"
] | null | null | null | '''
The original callback based SFM parser API implemented ontop of the new SFM
parser system.
'''
__version__ = '20101011'
__date__ = '11 October 2010'
__author__ = 'Tim Eves <tim_eves@sil.org>'
__history__ = '''
20101026 - tse - rewrote to use new palaso.sfm module
'''
from . import sfm
import warnings
from functools import reduce
class Handler(object):
def __init__(self):
self.errors = []
def start(self, pos, ctag, tag, params):
return ' '.join([tag]+params)
def text(self, pos, ctag, text):
return text
def end(self, pos, ctag, tag):
return ''
def error(self, *warn_msg):
self.errors.append(warnings.WarningMessage(*warn_msg))
def transduce(parser, handler, source):
def _g(line, e):
if isinstance(e, str):
return line + handler.text(e.pos, e.parent, e)
line += '\\' + handler.start(e.pos, e.parent and e.parent.name,
e.name, e.args)
body = reduce(_g, e, '')
line += body if not body or not body.startswith('\\\\') \
and body.startswith(('\r\n', '\n', '\\')) else ' ' + body
tag = handler.end(e.pos, e.parent and e.parent.name, e.name)
if tag:
line += f'\\{tag}'
return line
with warnings.catch_warnings():
warnings.showwarning = handler.error
warnings.resetwarnings()
warnings.simplefilter("always", SyntaxWarning)
doc = parser(source)
return reduce(_g, doc, '').splitlines(True)
def parse(parser, handler, source):
def _g(_, e):
if isinstance(e, str):
handler.text(e.pos, e.parent, e)
else:
handler.start(e.pos, e.parent.name, e.name, e.args)
reduce(_g, e, '')
handler.end(e.pos, e.parent.name, e.name)
with warnings.catch_warnings():
warnings.showwarning = handler.error
warnings.resetwarnings()
warnings.simplefilter("always", SyntaxWarning)
doc = parser(source)
return reduce(_g, doc, '').splitlines(True)
if __name__ == '__main__':
import palaso.sfm.usfm as usfm
import sys
import codecs
mat = codecs.open(sys.argv[1], 'rb', encoding='utf-8_sig')
out = codecs.open(sys.argv[2], 'wb', encoding='utf-8', buffering=1)
out.writelines(transduce(usfm.parser, sfm.handler(), mat))
| 28.710843 | 75 | 0.595888 | 304 | 2,383 | 4.536184 | 0.351974 | 0.040609 | 0.021755 | 0.047861 | 0.436548 | 0.378535 | 0.346628 | 0.277012 | 0.277012 | 0.277012 | 0 | 0.015376 | 0.263114 | 2,383 | 82 | 76 | 29.060976 | 0.769932 | 0.037768 | 0 | 0.233333 | 0 | 0 | 0.074398 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15 | false | 0 | 0.1 | 0.05 | 0.383333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
683782ad1e13830da00f95e9662bd01792657d34 | 7,366 | py | Python | library/network.py | saxtouri/kamaki-ansible-role | b228506b715a47ef89f408406f255ec86d426eca | [
"Apache-2.0"
] | 1 | 2021-04-26T08:46:17.000Z | 2021-04-26T08:46:17.000Z | library/network.py | saxtouri/kamaki-ansible-role | b228506b715a47ef89f408406f255ec86d426eca | [
"Apache-2.0"
] | null | null | null | library/network.py | saxtouri/kamaki-ansible-role | b228506b715a47ef89f408406f255ec86d426eca | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright 2018 Stavros Sachtouris <saxtouri@grnet.gr>
from kamaki.clients import ClientError
from kamaki.clients.cyclades import CycladesClient, CycladesNetworkClient
from kamaki.clients.network import NetworkClient
from kamaki.cli import logging
from kamaki.clients.utils import https
from ansible.module_utils.basic import AnsibleModule
class SNFPrivateNetwork(AnsibleModule):
"""Synnefo network class, based on kamaki
Create, delete, start, stop, reboot, etc. a private network
"""
_cyclades, _network = None, None
def __init__(self, *args, **kw):
super(SNFPrivateNetwork, self).__init__(*args, **kw)
self.cloud = self.params.get('cloud').get('cloud')
ca_certs = self.cloud.get('ca_certs')
if ca_certs:
try:
https.patch_with_certs(ca_certs)
except Exception as e:
self.fail_json(
msg="Certificates (ca_certs) failed to patch kamaki",
msg_details=e.message)
else:
https.patch_ignore_ssl()
@property
def network(self):
if not self._network:
url, token = self.cloud.get('network_url'), self.cloud.get('token')
try:
self._network = CycladesNetworkClient(url, token)
except ClientError as e:
self.fail_json(
msg="Network Client initialization failed",
msg_details=e.message)
return self._network
def discover(self):
id_, name = self.params.get('id'), self.params.get('name')
if id_:
try:
return self.network.get_network_details(id_)
except ClientError as e:
if e.status in (404, ):
return None
self.fail_json(
msg='Error while looking for network',
msg_details=e.message)
elif name:
for net in self.network.list_networks(detail=True):
if name == net['name']:
return net
return None
def create_subnet(self, id_):
cidr, dhcp = self.params.get('cidr'), self.params.get('hdcp')
try:
return self.network.create_subnet(id_, cidr, enable_dhcp=dhcp)
except ClientError as e:
self.fail_json(
msg="Failed to create subnet=", msg_details=e.message)
def create(self):
name = self.params.get('name')
try:
return self.network.create_network(
type='MAC_FILTERED', name=name,
project_id=self.cloud.get('project_id'))
except ClientError as e:
self.fail_json(
msg="Failed to create network with name {}".format(name),
msg_details=e.message)
def discover_port(self, net_id):
try:
ports = self.network.list_ports()
except ClientError as e:
self.fail_json(
msg='Failed to list ports', msg_details=e.message)
vm_id = self.params.get('vm_id')
for port in ports:
if all((
port['device_id'] == vm_id,
port['network_id'] == net_id)):
return port
# state functions
def absent(self):
"""Make sure a given network does not exist
Networks are identified by id or name, in that order
"""
net = self.discover()
if net:
try:
self.network.delete_network(net['id'])
return dict(changed=True, msg='Network deleted')
except ClientError as e:
if e.status not in (404, ):
self.fail_json(
msg="Error deleting network", msg_details=e.message)
return dict(changed=False, msg="No such network")
def present(self):
"""Make sure a network exists (create it, if not)
If an id and a name are given, the network is identified by the id
and then its name changes to match the new name.
If no id is provided, we make sure there exists a network with this
name
"""
changed = False
name = self.params.get('name')
net = self.discover()
if not net:
net = self.create()
changed = True
if name and net['name'] != name:
try:
net = self.network.update_network(net['id'], name=name)
except ClientError as e:
self.fail_json(
msg="Failed to update network", msg_details=e.message)
changed = True
if self.params.get('cidr') and not net['subnets']:
subnet = self.create_subnet(net['id'])
net['subnets'].append(subnet['id'])
changed = True
return dict(changed=changed, network=net)
def connected(self):
net, vm_id = self.discover(), self.params.get('vm_id')
if not net:
self.fail_json(msg='Network does not exist')
port = self.discover_port(net['id'])
if port:
return dict(changed=False, port=port)
try:
port = self.network.create_port(net['id'], vm_id)
except ClientError as e:
self.fail_json(
msg='Failed to connect network', msg_details=e.message)
if self.params.get('wait'):
try:
port = self.network.wait_port_until(port['id'], 'ACTIVE')
except ClientError as e:
pass
return dict(changed=True, port=port)
def disconnected(self):
net = self.discover()
if not net:
self.fail_json(msg='Network does not exist')
port = self.discover_port(net['id'])
if not port:
return dict(changed=False, msg='No connection')
try:
self.network.delete_port(port['id'])
except ClientError as e:
self.fail_json(msg='Failed to delete port', msg_details=e.message)
if self.params.get('wait'):
try:
self.network.wait_port_while(port['id'], 'ACTIVE')
except ClientError:
pass
return dict(changed=True, msg='Disconnected succesfully')
if __name__ == '__main__':
module = SNFPrivateNetwork(
argument_spec={
'state': {
'default': 'present',
'choices': ['absent', 'present', 'connected', 'disconnected']},
'cloud': {'required': True, 'type': 'dict'},
'id': {'required': False, 'type': 'str'},
'name': {'required': False, 'type': 'str'},
'cidr': {'required': False, 'type': 'str'},
'dhcp': {'required': False, 'type': 'bool'},
'vm_id': {'required': False, 'type': 'str'},
'wait': {'default': True, 'type': 'bool'},
},
required_if=(
('dhcp', True, ('cidr', )),
('state', 'connected', ('vm_id', )),
('state', 'disconnected', ('vm_id', )),
),
)
result = {
'absent': module.absent,
'present': module.present,
'connected': module.connected,
'disconnected': module.disconnected,
}[module.params['state']]()
module.exit_json(**result)
| 37.015075 | 79 | 0.545479 | 836 | 7,366 | 4.688995 | 0.202153 | 0.039286 | 0.039796 | 0.045918 | 0.314031 | 0.177806 | 0.149745 | 0.134949 | 0.12602 | 0.12602 | 0 | 0.002052 | 0.338447 | 7,366 | 198 | 80 | 37.20202 | 0.80238 | 0.069916 | 0 | 0.331361 | 0 | 0 | 0.127593 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.059172 | false | 0.011834 | 0.035503 | 0 | 0.189349 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
683a2fd3a094f4d4627ba57cf4c80fc8ff0cfe9b | 283 | py | Python | research/validator-sampling/random/getcosmosvp.py | vutting4221/bandchain | 47d6cbb2923d9e8651a3ff03ae1d979458067c5d | [
"Apache-2.0"
] | 251 | 2018-08-03T04:07:16.000Z | 2022-03-27T09:12:38.000Z | research/validator-sampling/random/getcosmosvp.py | Sherwana/bandchain | 7e9c9b72a642461e88a8c5b98dc5e69d30f6c42b | [
"Apache-2.0"
] | 2,935 | 2018-08-03T08:59:20.000Z | 2022-03-25T23:44:55.000Z | research/validator-sampling/random/getcosmosvp.py | Sherwana/bandchain | 7e9c9b72a642461e88a8c5b98dc5e69d30f6c42b | [
"Apache-2.0"
] | 57 | 2018-08-16T08:10:04.000Z | 2022-03-30T06:16:11.000Z |
import requests
url = "https://rpc.cosmos.network/validators"
r = requests.get(url)
jsonData = r.json()["result"]["validators"]
l = []
ll = []
for x in jsonData:
l.append(int(x["voting_power"]))
for x in jsonData:
ll.append(x["address"])
print(l)
print(ll)
print(len(l))
| 15.722222 | 45 | 0.650177 | 44 | 283 | 4.159091 | 0.568182 | 0.043716 | 0.065574 | 0.153005 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.14841 | 283 | 17 | 46 | 16.647059 | 0.759336 | 0 | 0 | 0.153846 | 0 | 0 | 0.255319 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.076923 | 0.230769 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
683f7ac8ec64f9c43860413257ec3e52b0f953f7 | 2,553 | py | Python | app/api/v2/utils.py | ValerieRono/iReporter | db79bfc2f25bee93b495a9163d96df985646c756 | [
"MIT"
] | null | null | null | app/api/v2/utils.py | ValerieRono/iReporter | db79bfc2f25bee93b495a9163d96df985646c756 | [
"MIT"
] | null | null | null | app/api/v2/utils.py | ValerieRono/iReporter | db79bfc2f25bee93b495a9163d96df985646c756 | [
"MIT"
] | 2 | 2018-12-01T17:05:50.000Z | 2019-01-13T14:21:27.000Z | from flask import request, jsonify, make_response
from marshmallow import Schema, fields
from datetime import datetime, timedelta
from functools import wraps
import jwt
import os
SECRET_KEY = os.getenv('SECRET_KEY')
def generate_token(self, user_id, is_admin):
""" Generates the access token"""
try:
# set up a payload with an expiration time
payload = {
'exp': datetime.utcnow() + timedelta(minutes=120),
'iat': datetime.utcnow(),
'user': {
'user_id': user_id,
'is_admin': is_admin
}
}
# create the byte string token using the payload and the SECRET key
jwt_string = jwt.encode(
payload,
SECRET_KEY,
algorithm='HS256'
)
return jwt_string
except Exception as e:
# return an error in string format if an exception occurs
return str(e)
def decode_token(token):
"""Decodes the access token from the Authorization header."""
try:
# try to decode the token using our SECRET variable
payload = jwt.decode(token, SECRET_KEY, algorithms=['HS256'])
return payload['user']
except jwt.ExpiredSignatureError:
# the token is expired, return an error string
return "Expired token. Please login to get a new token"
except jwt.InvalidTokenError:
# the token is invalid, return an error string
return "Invalid token"
def token_required(f):
@wraps(f)
def decorated(*args, **kwargs):
access = request.headers.get('Authorization')
if not access:
return make_response(jsonify({
"message": "Authorization required!",
}), 401)
token = access.split(" ")[1]
# ensure token is present
if not token:
return make_response(jsonify({
"message": "token is missing!",
}), 401)
user = decode_token(token)
if isinstance(user, str):
return make_response(jsonify({
"message": "unsuccessful",
"error": user
}), 401)
return f(user=user, *args, **kwargs)
return decorated
# for serialization
class IncidentSchema(Schema):
id = fields.Int()
createdBy = fields.Int()
createdOn = fields.DateTime()
type_of_incident = fields.Str()
location = fields.Str()
status = fields.Str()
images = fields.Str()
videos = fields.Str()
comment = fields.Str() | 27.75 | 75 | 0.589503 | 285 | 2,553 | 5.203509 | 0.385965 | 0.036413 | 0.026298 | 0.050573 | 0.098449 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010888 | 0.31649 | 2,553 | 92 | 76 | 27.75 | 0.838968 | 0.167646 | 0 | 0.125 | 0 | 0 | 0.094922 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.09375 | 0 | 0.46875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68405c394273dee79f8ca4c0b7f5621251ed30fc | 705 | py | Python | tests/test_yahoo.py | portfolioplus/pysymbolscanner | 6d78e93e21d0db0760151dd9dd08bd3b3177e44a | [
"MIT"
] | 3 | 2021-02-09T23:17:50.000Z | 2021-06-11T01:35:26.000Z | tests/test_yahoo.py | portfolioplus/pysymbolscanner | 6d78e93e21d0db0760151dd9dd08bd3b3177e44a | [
"MIT"
] | 56 | 2020-12-17T13:17:06.000Z | 2022-03-10T17:11:59.000Z | tests/test_yahoo.py | portfolioplus/pysymbolscanner | 6d78e93e21d0db0760151dd9dd08bd3b3177e44a | [
"MIT"
] | 1 | 2021-11-15T09:05:05.000Z | 2021-11-15T09:05:05.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" pysymbolscanner
Copyright 2020 Slash Gordon
Use of this source code is governed by an MIT-style license that
can be found in the LICENSE file.
"""
import unittest
from pysymbolscanner.yahoo import YahooSearch
class TestYahooSearch(unittest.TestCase):
def test_yahoo_search(self):
"""
Test the yahoo search
:return:
"""
search = YahooSearch()
result = search.get_symbols('INDUS Holding Aktiengesellschaft')
self.assertTrue(result)
result2 = search.get_symbols('Borussia Dortmund GmbH & Co. KGaA')
self.assertTrue(result2)
if __name__ == "__main__":
unittest.main()
| 25.178571 | 73 | 0.669504 | 82 | 705 | 5.609756 | 0.731707 | 0.047826 | 0.069565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012939 | 0.232624 | 705 | 27 | 74 | 26.111111 | 0.837338 | 0.306383 | 0 | 0 | 0 | 0 | 0.164786 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 1 | 0.090909 | false | 0 | 0.181818 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68425a602e2fe9ff60b4655e6dd3c05d4677606d | 476 | py | Python | cursoemvideo/exercicios/ex081.py | mrqssjeff/project-python | b3b08f2acfe825640a5ee92cf9d6fa45ab580384 | [
"MIT"
] | null | null | null | cursoemvideo/exercicios/ex081.py | mrqssjeff/project-python | b3b08f2acfe825640a5ee92cf9d6fa45ab580384 | [
"MIT"
] | null | null | null | cursoemvideo/exercicios/ex081.py | mrqssjeff/project-python | b3b08f2acfe825640a5ee92cf9d6fa45ab580384 | [
"MIT"
] | null | null | null | numeros = list()
while True:
n = int(input('Digite um número: '))
numeros.append(n)
per = ' '
while per not in 'SN':
per = str(input('Deseja sair? [S/N]: ')).strip().upper()[0]
if per == 'S':
break
print(f'Você digitou {len(numeros)} números')
numeros.sort(reverse=True)
print(f'Os valores em ordem decrescente são {numeros}')
if 5 in numeros:
print('O valor 5 faz parte da lista.')
else:
print('O valor 5 não faz parte da lista.')
| 28 | 67 | 0.613445 | 75 | 476 | 3.893333 | 0.626667 | 0.041096 | 0.075342 | 0.082192 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01087 | 0.226891 | 476 | 16 | 68 | 29.75 | 0.782609 | 0 | 0 | 0 | 0 | 0 | 0.386555 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68427eac80368597c7f25eb04ebd416159cbdfdf | 3,516 | py | Python | rhucrl/policy/action_robust_policy.py | sebascuri/rhucrl | 27663e1302f3bbc636dff28495c6f2667bb7c1da | [
"MIT"
] | 1 | 2021-11-19T11:46:48.000Z | 2021-11-19T11:46:48.000Z | rhucrl/policy/action_robust_policy.py | sebascuri/rhucrl | 27663e1302f3bbc636dff28495c6f2667bb7c1da | [
"MIT"
] | 1 | 2021-11-22T07:48:03.000Z | 2021-11-22T07:48:03.000Z | rhucrl/policy/action_robust_policy.py | sebascuri/rhucrl | 27663e1302f3bbc636dff28495c6f2667bb7c1da | [
"MIT"
] | 1 | 2022-03-26T10:18:01.000Z | 2022-03-26T10:18:01.000Z | """Action Robust policies.."""
from abc import ABCMeta
import torch
from rllib.policy.nn_policy import NNPolicy
from .split_policy import SplitPolicy
class ActionRobustPolicy(SplitPolicy, metaclass=ABCMeta):
"""Action Robust Abstract Policy class.
Parameters
----------
alpha: float
Action robust parameter.
References
----------
Tessler, C., Efroni, Y., & Mannor, S. (2019).
Action robust reinforcement learning and applications in continuous control. ICML.
"""
def __init__(self, alpha, *args, **kwargs):
self.alpha = alpha
super().__init__(*args, **kwargs)
def forward(self, state):
"""Forward compute the policy."""
raise NotImplementedError
@classmethod
def default(
cls, environment, hallucinate_protagonist=True, alpha=None, *args, **kwargs
):
"""See `NNPolicy.default'."""
protagonist_policy = NNPolicy(
dim_state=environment.dim_state, dim_action=environment.dim_action
)
antagonist_policy = NNPolicy(
dim_state=environment.dim_state, dim_action=environment.dim_action
)
hallucination_policy = NNPolicy(
dim_state=environment.dim_state, dim_action=environment.dim_state
)
if alpha is None:
alpha = environment.alpha
return cls(
alpha=alpha,
dim_state=environment.dim_state,
dim_action=environment.dim_action,
protagonist_policy=protagonist_policy,
antagonist_policy=antagonist_policy,
hallucination_policy=hallucination_policy,
hallucinate_protagonist=hallucinate_protagonist,
)
class NoisyActionRobustPolicy(ActionRobustPolicy):
"""Noisy Action Robust Abstract Policy class.
It averages both policies with weight 1-alpha/alpha.
References
----------
Tessler, C., Efroni, Y., & Mannor, S. (2019).
Action robust reinforcement learning and applications in continuous control. ICML.
"""
def forward(self, state):
"""Compute policy."""
p_mean, p_scale_tril = self.protagonist_policy(state)
a_mean, a_scale_tril = self.antagonist_policy(state)
h_mean, h_scale_tril = self.hallucination_policy(state)
p_std = p_scale_tril.diagonal(dim1=-1, dim2=-2)
a_std = a_scale_tril.diagonal(dim1=-1, dim2=-2)
h_std = h_scale_tril.diagonal(dim1=-1, dim2=-2)
mean = (1 - self.alpha) * p_mean + self.alpha * a_mean
std = (1 - self.alpha) * p_std + self.alpha * a_std
return self.stack_policies((mean, h_mean), (std, h_std))
class ProbabilisticActionRobustPolicy(ActionRobustPolicy):
"""Noisy Action Robust Abstract Policy class.
It samples the protagonist with probability 1-alpha and the antagonist with alpha.
References
----------
Tessler, C., Efroni, Y., & Mannor, S. (2019).
Action robust reinforcement learning and applications in continuous control. ICML.
"""
def forward(self, state):
"""Compute policy."""
h_mean, h_scale_tril = self.hallucination_policy(state)
if torch.rand(1).item() < self.alpha:
mean, scale_tril = self.antagonist_policy(state)
else:
mean, scale_tril = self.protagonist_policy(state)
std = scale_tril.diagonal(dim1=-1, dim2=-2)
h_std = h_scale_tril.diagonal(dim1=-1, dim2=-2)
return self.stack_policies((mean, h_mean), (std, h_std))
| 31.963636 | 86 | 0.650455 | 403 | 3,516 | 5.473945 | 0.225806 | 0.044878 | 0.035358 | 0.047597 | 0.56573 | 0.551677 | 0.489121 | 0.476881 | 0.426111 | 0.387126 | 0 | 0.013889 | 0.242321 | 3,516 | 109 | 87 | 32.256881 | 0.814189 | 0.253129 | 0 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.072727 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68437dce8e3af7427b84bb6abed9b7c7399026a4 | 6,007 | py | Python | aws.py | chaordic/aws-resource-tagger | eb4ba6fed9f705da99ab7c07fa3c89c16efc371b | [
"Apache-2.0"
] | null | null | null | aws.py | chaordic/aws-resource-tagger | eb4ba6fed9f705da99ab7c07fa3c89c16efc371b | [
"Apache-2.0"
] | null | null | null | aws.py | chaordic/aws-resource-tagger | eb4ba6fed9f705da99ab7c07fa3c89c16efc371b | [
"Apache-2.0"
] | 2 | 2021-08-15T02:41:10.000Z | 2021-09-17T17:55:21.000Z | import os
import json
import boto3
defaults = {
"query_instances": """
SELECT
resourceId,
configuration.imageId,
configuration.blockDeviceMappings,
configuration.vpcId,
tags,
relationships
WHERE
resourceType = 'AWS::EC2::Instance'
AND configuration.state.name = 'running'
OR configuration.state.name = 'stopped'
""",
"query_vpcs": """
SELECT
resourceId,
tags
WHERE
resourceType = 'AWS::EC2::VPC'
"""
}
class AWS(object):
def __init__(self):
self.clients = {}
self.config_queries = {}
self.metric_namespace = "aws_resource_tagger"
self.metrics_data = []
self.setup()
def setup(self):
self.metrics_data.clear()
self.clients["ec2"] = config = boto3.client('ec2')
self.clients["config"] = config = boto3.client('config')
self.clients["cloudwatch"] = config = boto3.client('cloudwatch')
self.config_queries["instances"] = os.getenv("CONFIG_QUERY_INSTANCES", defaults["query_instances"])
def run_Config_query(self, query):
response = []
response.clear()
page = ''
next_page = True
while next_page:
if page == '':
resp = self.clients["config"].select_resource_config(Expression=query)
else:
resp = self.clients["config"].select_resource_config(Expression=query, NextToken=page)
if 'NextToken' in resp:
page = resp["NextToken"]
else:
next_page = False
for r in resp['Results']:
response.append(json.loads(r))
return response
def get_instances(self):
return self.run_Config_query(
self.config_queries["instances"]
)
def get_instance_tags_Config(self, instance_id):
"""
AWS Config has delay to ingest resources, then events
of new resources is not instantly available on the Config.
Therefore, to retrieve NEW resources, is not a good idea to
use AWS Config.
"""
query = """
SELECT
resourceId,
tags,
relationships
WHERE
resourceType = 'AWS::EC2::Instance'
and resourceId = '{}'
""".format(instance_id)
return self.run_Config_query(query)
def get_instance_tags_api(self, instance_id):
"""
Get instances directly from EC2:Instance API.
Understanding that instance_id is unique for whole
AWS resources, only the dictionary will be returned.
"""
instance_resp = {}
instance_resp.clear()
try:
resp = self.clients["ec2"].describe_instances(
InstanceIds=[instance_id]
)
if 'Reservations' not in resp:
return instance_resp
if len(resp["Reservations"]) <= 0:
return instance_resp
if len(resp["Reservations"][0]['Instances']) <= 0:
return instance_resp
instance = resp["Reservations"][0]['Instances'][0]
if 'Tags' not in instance:
tags = []
else:
tags = instance["Tags"]
dm = instance["BlockDeviceMappings"] or []
try:
vpc_id = instance["VpcId"] or ''
except KeyError:
vpc_id = ''
pass
instance_resp = {
"InstanceId": instance["InstanceId"],
"Tags": tags,
"BlockDeviceMappings": dm,
"VpcId": vpc_id
}
except Exception as e:
raise
return instance_resp
def get_volume_tags_api(self, resource_id):
"""
Get Volumes directly from EC2:Volume API.
Understanding that VolumeId is unique for whole
AWS resources, only the dictionary will be returned.
"""
resource_resp = {}
resource_resp.clear()
try:
resp = self.clients["ec2"].describe_volumes(
VolumeIds=[resource_id]
)
if 'Volumes' not in resp:
return resource_resp
if len(resp["Volumes"]) <= 0:
return resource_resp
resource = resp["Volumes"][0]
tags = []
if 'Tags' in resource:
tags = resource["Tags"]
at = []
if len(resource["Attachments"]) > 0:
at = resource["Attachments"][0]
resource_resp = {
"VolumeId": resource["VolumeId"],
"Tags": tags,
"Attachments": at
}
except Exception as e:
raise
return resource_resp
def get_vpc_tags(self, vpc_id):
query = """
SELECT
resourceId,
tags
WHERE
resourceType = 'AWS::EC2::VPC'
AND resourceId = '{}'
""".format(vpc_id)
return self.run_Config_query(query)
def get_volumes(self):
volumes = {}
volumes.clear()
for response in self.clients["ec2"].get_paginator('describe_volumes').paginate():
volumes.update([(volume['VolumeId'], volume) for volume in response['Volumes']])
return volumes
def add_metrics(self, data={}):
self.metrics_data.append({
"MetricName": data["name"],
"Dimensions": [data["dimensions"]],
"Value": data["value"],
"Unit": "Count"
})
def push_metrics(self):
self.clients["cloudwatch"].put_metric_data(
Namespace=self.metric_namespace,
MetricData=self.metrics_data
)
self.metrics_data.clear()
| 28.879808 | 107 | 0.519394 | 565 | 6,007 | 5.376991 | 0.238938 | 0.036208 | 0.024687 | 0.030283 | 0.254444 | 0.234365 | 0.215273 | 0.215273 | 0.100066 | 0.038841 | 0 | 0.006424 | 0.378059 | 6,007 | 207 | 108 | 29.019324 | 0.806745 | 0.080406 | 0 | 0.310127 | 0 | 0 | 0.25727 | 0.023338 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06962 | false | 0.006329 | 0.018987 | 0.006329 | 0.170886 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6843cab7a27354e1507539504c0ae341a74f0881 | 1,161 | py | Python | noisify/recipes/default_recipes.py | dstl/Noisify | d776a74a66fa71c3f6aacd64f8c21af4f41f1714 | [
"MIT"
] | 11 | 2019-05-03T17:25:05.000Z | 2020-05-20T16:23:00.000Z | noisify/recipes/default_recipes.py | dstl/Noisify | d776a74a66fa71c3f6aacd64f8c21af4f41f1714 | [
"MIT"
] | null | null | null | noisify/recipes/default_recipes.py | dstl/Noisify | d776a74a66fa71c3f6aacd64f8c21af4f41f1714 | [
"MIT"
] | 2 | 2021-04-10T22:02:12.000Z | 2021-11-29T08:46:50.000Z | """
.. Dstl (c) Crown Copyright 2019
Default recipes, these are extremely simple and are mainly to provide examples for developing your own code.
"""
from noisify.faults import TypographicalFault, ScrambleAttributes, GaussianNoise, InterruptionFault
from noisify.reporters import Noisifier, Reporter
def human_error(scale):
"""
Simple example Noisifier recipe, applies typos and attribute scrambling to the input depending
on the scale given, recommended scale range from 1-10
"""
return Noisifier(
reporter=Reporter(
faults=[TypographicalFault(likelihood=min(1, 0.1*scale), severity=0.1*scale),
ScrambleAttributes(likelihood=0.1 * scale)]
),
faults=None
)
def machine_error(scale):
"""
Simple example Noisifier recipe, applies gaussian noise and occasional interruptions to the input
depending on the scale given, recommended scale range from 1-10
"""
return Noisifier(
reporter=Reporter(
faults=[GaussianNoise(sigma=0.1*scale),
InterruptionFault(likelihood=min(1, 0.01*scale))]
),
faults=None
)
| 33.171429 | 108 | 0.682171 | 133 | 1,161 | 5.93985 | 0.481203 | 0.010127 | 0.035443 | 0.058228 | 0.364557 | 0.364557 | 0.364557 | 0.250633 | 0.250633 | 0.250633 | 0 | 0.025959 | 0.236865 | 1,161 | 34 | 109 | 34.147059 | 0.865688 | 0.38932 | 0 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6845c45236694916e706cb4506cb72472bf772fc | 651 | py | Python | tests/old_tests/Classify.py | xaviermouy/ecosound | 25d333807e090e737b3ac910c2dcf8e5850b91b4 | [
"BSD-3-Clause"
] | 3 | 2020-06-05T19:09:05.000Z | 2021-02-20T22:16:03.000Z | tests/old_tests/Classify.py | xaviermouy/ecosound | 25d333807e090e737b3ac910c2dcf8e5850b91b4 | [
"BSD-3-Clause"
] | 8 | 2020-09-02T17:27:45.000Z | 2022-02-21T16:00:30.000Z | tests/old_tests/Classify.py | xaviermouy/ecosound | 25d333807e090e737b3ac910c2dcf8e5850b91b4 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 16 15:51:43 2020
@author: xavier.mouy
"""
import pickle
model_filename = r'C:\Users\xavier.mouy\Documents\PhD\Projects\Dectector\results\Classification\LDA_model.sav'
import matplotlib.pyplot as plt
import numpy as np
# load the model from disk
#loaded_model = pickle.load(open(model_filename, 'rb'))
#result = loaded_model.score(X_test, Y_test)
#print(result)
y=np.random.rand(100,1)
x=range(0,100,1)
fig, ax = plt.subplots(1,1)
ax.plot(x,y)
#style = dict(size=10, color='gray')
bbox_props = dict(boxstyle="square", fc="w", ec="w", alpha=0.5)
ax.text(1, 1, "New Year's Day", size=5, bbox=bbox_props) | 27.125 | 110 | 0.714286 | 117 | 651 | 3.897436 | 0.675214 | 0.04386 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.053448 | 0.109063 | 651 | 24 | 111 | 27.125 | 0.732759 | 0.384025 | 0 | 0 | 0 | 0.1 | 0.287918 | 0.231362 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.3 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6847c7931ade87d3aa380d7b80a3dd2694e53f25 | 6,889 | py | Python | model/deeplab_v3_plus.py | haifangong/TRFE-Net-for-thyroid-nodule-segmentation | 5fd0da2bee39072fe1c74d6d6e07b1cb1e7e3da1 | [
"MIT"
] | 14 | 2021-02-12T10:18:06.000Z | 2021-09-14T07:51:43.000Z | model/deeplab_v3_plus.py | haifangong/TRFE-Net-for-thyroid-nodule-segmentation | 5fd0da2bee39072fe1c74d6d6e07b1cb1e7e3da1 | [
"MIT"
] | 3 | 2021-03-06T04:21:28.000Z | 2021-12-09T14:09:30.000Z | model/deeplab_v3_plus.py | haifangong/TRFE-Net-for-thyroid-nodule-segmentation | 5fd0da2bee39072fe1c74d6d6e07b1cb1e7e3da1 | [
"MIT"
] | 2 | 2021-05-21T12:28:45.000Z | 2022-03-25T13:02:49.000Z | '''
deeplab_v3+ :
"Encoder-Decoder with Atrous Separable Convolution for Semantic Image
Segmentation"
Liang-Chieh Chen, Yukun Zhu, George Papandreou, Florian Schroff, Hartwig Adam.
(https://arxiv.org/abs/1802.02611)
according to [mobilenetv2_coco_voc_trainaug / mobilenetv2_coco_voc_trainval]
https://github.com/lizhengwei1992/models/tree/master/research/deeplab
we use MobileNet_v2 as feature exstractor
These codes are motified frome https://github.com/jfzhang95/pytorch-deeplab-xception/blob/master/networks/deeplab_xception.py
Author: Zhengwei Li
Data: July 1 2018
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from model.ResNet101 import ResNet101, ResNet50
from model.ResNet34 import ResNet34
from model.SPP import ASPP_simple, ASPP
from model.utils import load_pretrain_model
# -------------------------------------------------------------------------------------------------
# Deeplabv3plus
#
# feature exstractor : MobileNet_v2, Xception, VggNet, ResNet
# -------------------------------------------------------------------------------------------------
class Deeplabv3plus(nn.Module):
def __init__(self, nInputChannels, n_classes, os, backbone_type):
super(Deeplabv3plus, self).__init__()
# mobilenetv2 feature
self.os = os
self.backbone_type = backbone_type
if os == 16:
rates = [1, 6, 12, 18]
elif os == 8 or os == 32:
rates = [1, 12, 24, 36]
else:
raise NotImplementedError
if backbone_type == 'resnet101':
self.backbone_features = ResNet101(nInputChannels, os, pretrained=True)
asppInputChannels = 2048
asppOutputChannels = 256
lowInputChannels = 256
lowOutputChannels = 48
self.aspp = ASPP(asppInputChannels, asppOutputChannels, rates)
self.last_conv = nn.Sequential(
nn.Conv2d(asppOutputChannels + lowOutputChannels,
256, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256, n_classes, kernel_size=1, stride=1)
)
elif backbone_type == 'resnet50':
self.backbone_features = ResNet50()
asppInputChannels = 2048
asppOutputChannels = 256
lowInputChannels = 256
lowOutputChannels = 48
self.aspp = ASPP(asppInputChannels, asppOutputChannels, rates)
self.last_conv = nn.Sequential(
nn.Conv2d(asppOutputChannels + lowOutputChannels,
256, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256, n_classes, kernel_size=1, stride=1)
)
elif backbone_type == 'resnet34':
self.backbone_features = ResNet34()
asppInputChannels = 512
asppOutputChannels = 256
lowInputChannels = 64
lowOutputChannels = 48
self.aspp = ASPP(asppInputChannels, asppOutputChannels, rates)
self.last_conv = nn.Sequential(
nn.Conv2d(asppOutputChannels + lowOutputChannels,
256, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256, n_classes, kernel_size=1, stride=1)
)
else:
raise NotImplementedError
# low_level_features to 48 channels
self.conv2 = nn.Conv2d(lowInputChannels, lowOutputChannels, 1, bias=False)
self.bn2 = nn.BatchNorm2d(lowOutputChannels)
# init weights
if backbone_type == 'mobilenetv2':
self._init_weight()
## You CANNOT use this to init xception.
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, input):
# x : 1/1 512 x 512
x, low_level_features = self.backbone_features(input)
# x : 1/os 512/os x 512/os
if self.os == 32:
x = F.interpolate(x, scale_factor=4, mode='bilinear', align_corners=True)
x = self.aspp(x)
# 1/4 128 x 128
low_level_features = self.conv2(low_level_features)
low_level_features = self.bn2(low_level_features)
x = F.interpolate(x, low_level_features.size()[2:], mode='bilinear', align_corners=True)
x = torch.cat((x, low_level_features), dim=1)
x = self.last_conv(x)
x = F.interpolate(x, input.size()[2:], mode='bilinear', align_corners=True)
return x
def load_backbone(self, model_path):
if self.backbone_type == 'mobilenetv2':
self.backbone_features = load_pretrain_model(self.backbone_features, torch.load(model_path))
elif self.backbone_type == 'xception':
self.backbone_features.load_xception_pretrained(model_path)
elif self.backbone_type == 'resnet101':
self.backbone_features = load_pretrain_model(self.backbone_features, torch.load('/media/SecondDisk/chenguanqi/thyroid_seg/pre_train/resnet101-5d3b4d8f.pth'))
print('Already load the backbone of resnet101')
elif self.backbone_type == 'resnet50':
self.backbone_features = load_pretrain_model(self.backbone_features, torch.load('/media/SecondDisk/chenguanqi/thyroid_seg/pre_train/resnet50-19c8e357.pth'))
print('Already load the backbone of resnet50')
elif self.backbone_type == 'resnet34':
self.backbone_features = load_pretrain_model(self.backbone_features, torch.load(model_path))
print('Already load the backbone of resnet34')
else:
raise NotImplementedError
if __name__ == "__main__":
model = Deeplabv3plus(3, 1, 32, 'resnet101')
print("wnet have {}M paramerters in total".format(sum(x.numel() for x in model.parameters())/1e6))
# indata = torch.rand(4, 3, 224, 224)
# _ = wnet(indata)
| 39.820809 | 169 | 0.597039 | 769 | 6,889 | 5.192458 | 0.26788 | 0.0571 | 0.065114 | 0.021037 | 0.466817 | 0.466817 | 0.393188 | 0.360631 | 0.360631 | 0.360631 | 0 | 0.058776 | 0.27885 | 6,889 | 172 | 170 | 40.052326 | 0.744968 | 0.159965 | 0 | 0.434783 | 0 | 0 | 0.071515 | 0.025169 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034783 | false | 0 | 0.06087 | 0 | 0.113043 | 0.034783 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68486357663dfc1112181cf79018cc2ded43f4fe | 5,238 | py | Python | Quizzy.py | mchtter/mucoTraining | 4f304a4fb1bdc1a63313ae31358306ce957fe6c0 | [
"MIT"
] | 1 | 2021-08-01T17:11:28.000Z | 2021-08-01T17:11:28.000Z | Quizzy.py | mchtter/PracticePython | 4f304a4fb1bdc1a63313ae31358306ce957fe6c0 | [
"MIT"
] | null | null | null | Quizzy.py | mchtter/PracticePython | 4f304a4fb1bdc1a63313ae31358306ce957fe6c0 | [
"MIT"
] | null | null | null | class Questions:
def __init__(self, questionText, questionAnswer, choiceList):
self.questionText = questionText
self.questionAnswer = questionAnswer
self.choiceList = choiceList
def answerCheck(self, answer):
return self.questionAnswer == answer
class Quiz:
def __init__(self, questionText):
self.questionText = questionText
self.score = 0
self.questionIndex = 0
def getQuestion(self):
return self.questionText[self.questionIndex]
def displayQuestion(self):
question = self.getQuestion()
print(f'Question {self.questionIndex + 1}: {question.questionText}')
for q in question.choiceList:
print("-" + q)
answer = input("Answer: ")
self.guess(answer)
self.loadQuestion()
def guess(self, answer):
question = self.getQuestion()
if question.answerCheck(answer):
self.score += 1
self.questionIndex += 1
def loadQuestion(self):
if len(self.questionText) == self.questionIndex:
self.showScore()
else:
self.displayProgress()
self.displayQuestion()
def showScore(self):
print(f"Score: {self.score}")
def displayProgress(self):
totalQuestion = len(self.questionText)
questionNumber = self.questionIndex + 1
if questionNumber > totalQuestion:
print("Quiz OVER!")
else:
print(
f"Question {questionNumber} of {totalQuestion}".center(100, '*'))
q01 = Questions('Where is the capital of Turkey?', 'Ankara', [
'Izmir', 'Istanbul', 'Ankara', 'Diyarbakır', 'Erzurum'])
q02 = Questions('Where is the capital of United States?', 'Washington DC', [
'Washington DC', 'New York', 'Seattle', 'Miami', 'Los Angeles'])
q03 = Questions('Where is the capital of United Kingdom?', 'London', [
'Oxford', 'Manchester', 'London', 'Cambridge', 'Liverpool'])
q04 = Questions('Where is the capital of Germany?', 'Berlin', [
'Munih', 'Berlin', 'Frankfurt', 'Hamburg', 'Köln'])
q05 = Questions('Where is the capital of Belgium?', 'Brussels', [
'Hasselt', 'Brugge', 'Anvers', 'Brussels', 'Halle'])
q06 = Questions('Where is the capital of Brazil?', 'Brasilia', [
'Brasilia', 'Salvador', 'Rio de Janeiro', 'São Paulo', 'Santo Andrê'])
q07 = Questions('Where is the capital of Egypt?', 'Cairo', [
'Ismailiye', 'Luksor', 'Asuan', 'Dimyat', 'Cairo'])
q08 = Questions('Where is the capital of Iraq?', 'Baghdad', [
'Erbil', 'Musul', 'Kerkuk', 'Baghdad', 'Basra'])
q09 = Questions('Where is the capital of Israel?', 'Tel Aviv', [
'Netanya', 'Tel Aviv', 'Hayfa', 'Tiberya', 'Akka'])
q10 = Questions('Where is the capital of Mexico?', 'Mexico City', [
'Mexico City', 'Guadalajara', 'Cancùn', 'Monterrey', 'Tijuana'])
q11 = Questions('Where is the capital of Netherlands?', 'Amsterdam', [
'Lahey', 'Rotterdam', 'Amsterdam', 'Utrecht', 'Maastricht'])
q12 = Questions('Where is the capital of Russia?', 'Moscow', [
'Petersburg', 'Soçi', 'Vladivostok', 'Moscow', 'Volgograd'])
q13 = Questions('Where is the capital of Uzbekistan?', 'Tashkent', [
'Buhara', 'Tashkent', 'Semekand', 'Tirmiz', 'Namangan'])
q14 = Questions('Where is the capital of Sweden?', 'Stockholm', [
'Visby', 'Göteborg', 'Malmö', 'Helsingborg', 'Stockholm'])
q15 = Questions('Where is the capital of Turkmenistan?', 'Ashgabat', [
'Daşoğuz', 'Türkmenabat', 'Ashgabat', 'Balkanabat', 'Atamurat'])
q16 = Questions('Where is the capital of Ukraine?', 'Kiev', [
'Lviv', 'Odessa', 'Harkov', 'Kiev', 'Çernivtsi'])
q17 = Questions('Where is the capital of United Arab Emirates?', 'Abu Dhabi', [
'Dubai', 'Sharjah', 'Abu Dhabi', 'Qaiwain', 'Fujairah'])
q18 = Questions('Where is the capital of Philippines?', 'Manila', [
'Manila', 'Quezon City', 'Makati', 'Cebu City', 'Baguio'])
q19 = Questions('Where is the capital of Pakistan?', 'Islamabad', [
'Lahor', 'Islamabad', 'Peşaver', 'Multan', 'Ketta'])
q20 = Questions('Where is the capital of Lebanon?', 'Beirut', [
'Trablusşam', 'Sayda', 'Beirut', 'Sur', 'Tyre'])
questionText = [q01, q02, q03, q04, q05, q06, q07, q08, q09,
q10, q11, q12, q13, q14, q15, q16, q17, q18, q19, q20]
quiz = Quiz(questionText)
test = quiz.getQuestion()
index = quiz.questionIndex
quiz.loadQuestion()
| 48.5 | 93 | 0.526155 | 476 | 5,238 | 5.773109 | 0.409664 | 0.101892 | 0.116448 | 0.138282 | 0.210335 | 0.210335 | 0.037118 | 0 | 0 | 0 | 0 | 0.025656 | 0.337724 | 5,238 | 107 | 94 | 48.953271 | 0.766503 | 0 | 0 | 0.067416 | 0 | 0 | 0.323215 | 0.004391 | 0 | 0 | 0 | 0 | 0 | 1 | 0.101124 | false | 0 | 0 | 0.022472 | 0.146067 | 0.05618 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
684b7345f47d9aef1e7152a2861b4672edd5d0ae | 956 | py | Python | slackutil/slackwrapper.py | ayukatawago/slack-notification | 4dcd13c3f4bcaccf468a1854186b6e604c8ea884 | [
"MIT"
] | null | null | null | slackutil/slackwrapper.py | ayukatawago/slack-notification | 4dcd13c3f4bcaccf468a1854186b6e604c8ea884 | [
"MIT"
] | 1 | 2021-04-30T21:14:24.000Z | 2021-04-30T21:14:24.000Z | slackutil/slackwrapper.py | ayukatawago/slack_notification | 0694671e966eacbae559c7782a33515a420cc6f5 | [
"MIT"
] | null | null | null | from slack import WebClient
class SlackApiWrapper(WebClient):
def __init__(self, api_token):
super().__init__(api_token)
def post_message(self, channel, message):
response = self.chat_postMessage(
channel=channel,
text=message)
assert response["ok"]
def post_attachment_message(self, channel, blocks, attachments):
response = self.api_call(
'chat.postMessage',
json=dict(
channel=channel,
blocks=blocks,
attachments=attachments
)
)
assert response["ok"]
def update_message(self, channel, ts, blocks, attachments):
response = self.api_call(
'chat.update',
json=dict(
channel=channel,
ts=ts,
blocks=blocks,
attachments=attachments
)
)
assert response["ok"]
| 26.555556 | 68 | 0.538703 | 85 | 956 | 5.858824 | 0.341176 | 0.136546 | 0.108434 | 0.076305 | 0.361446 | 0.361446 | 0.361446 | 0 | 0 | 0 | 0 | 0 | 0.374477 | 956 | 35 | 69 | 27.314286 | 0.832776 | 0 | 0 | 0.466667 | 0 | 0 | 0.034519 | 0 | 0 | 0 | 0 | 0 | 0.1 | 1 | 0.133333 | false | 0 | 0.033333 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
684c9d7e52e0323bde95bce76512ec1c95915e5c | 34,358 | py | Python | gpMgmt/bin/gppylib/operations/backup_utils.py | asimrp/gpdb | 635d75556c15c3a42e09e74735f90263526bd881 | [
"PostgreSQL",
"Apache-2.0"
] | null | null | null | gpMgmt/bin/gppylib/operations/backup_utils.py | asimrp/gpdb | 635d75556c15c3a42e09e74735f90263526bd881 | [
"PostgreSQL",
"Apache-2.0"
] | null | null | null | gpMgmt/bin/gppylib/operations/backup_utils.py | asimrp/gpdb | 635d75556c15c3a42e09e74735f90263526bd881 | [
"PostgreSQL",
"Apache-2.0"
] | null | null | null | import fnmatch
import glob
import os
import re
import tempfile
from gppylib import gplog
from gppylib.commands.base import WorkerPool, Command, REMOTE
from gppylib.commands.unix import Scp
from gppylib.db import dbconn
from gppylib.db.dbconn import execSQL
from gppylib.gparray import GpArray
from pygresql import pg
from gppylib.operations.utils import DEFAULT_NUM_WORKERS
logger = gplog.get_default_logger()
def expand_partitions_and_populate_filter_file(dbname, partition_list, file_prefix):
expanded_partitions = expand_partition_tables(dbname, partition_list)
dump_partition_list = list(set(expanded_partitions + partition_list))
return create_temp_file_from_list(dump_partition_list, file_prefix)
def populate_filter_tables(table, rows, non_partition_tables, partition_leaves):
if not rows:
non_partition_tables.append(table)
else:
for (schema_name, partition_leaf_name) in rows:
partition_leaf = schema_name.strip() + '.' + partition_leaf_name.strip()
partition_leaves.append(partition_leaf)
return (non_partition_tables, partition_leaves)
def get_all_parent_tables(dbname):
SQL = "SELECT DISTINCT (schemaname || '.' || tablename) FROM pg_partitions"
data = []
with dbconn.connect(dbconn.DbURL(dbname=dbname)) as conn:
curs = dbconn.execSQL(conn, SQL)
data = curs.fetchall()
return set([d[0] for d in data])
def list_to_quoted_string(filter_tables):
filter_string = "'" + "', '".join([pg.escape_string(t) for t in filter_tables]) + "'"
return filter_string
def convert_parents_to_leafs(dbname, parents):
partition_leaves_sql = """
SELECT x.partitionschemaname || '.' || x.partitiontablename
FROM (
SELECT distinct schemaname, tablename, partitionschemaname, partitiontablename, partitionlevel
FROM pg_partitions
WHERE schemaname || '.' || tablename in (%s)
) as X,
(SELECT schemaname, tablename maxtable, max(partitionlevel) maxlevel
FROM pg_partitions
group by (tablename, schemaname)
) as Y
WHERE x.schemaname = y.schemaname and x.tablename = Y.maxtable and x.partitionlevel = Y.maxlevel;
"""
if not parents:
return []
conn = dbconn.connect(dbconn.DbURL(dbname=dbname))
partition_sql = partition_leaves_sql % list_to_quoted_string(parents)
curs = dbconn.execSQL(conn, partition_sql)
rows = curs.fetchall()
return [r[0] for r in rows]
#input: list of tables to be filtered
#output: same list but parent tables converted to leafs
def expand_partition_tables(dbname, filter_tables):
if not filter_tables or len(filter_tables) == 0:
return filter_tables
parent_tables = list()
non_parent_tables = list()
expanded_list = list()
all_parent_tables = get_all_parent_tables(dbname)
for table in filter_tables:
if table in all_parent_tables:
parent_tables.append(table)
else:
non_parent_tables.append(table)
expanded_list += non_parent_tables
local_batch_size = 1000
for (s, e) in get_batch_from_list(len(parent_tables), local_batch_size):
tmp = convert_parents_to_leafs(dbname, parent_tables[s:e])
expanded_list += tmp
return expanded_list
def get_batch_from_list(length, batch_size):
indices = []
for i in range(0, length, batch_size):
indices.append((i, i+batch_size))
return indices
def create_temp_file_from_list(entries, prefix):
"""
When writing the entries into temp file, don't do any strip as there might be
white space in schema name and table name.
"""
if len(entries) == 0:
return None
fd = tempfile.NamedTemporaryFile(mode='w', prefix=prefix, delete=False)
for entry in entries:
fd.write(entry + '\n')
tmp_file_name = fd.name
fd.close()
verify_lines_in_file(tmp_file_name, entries)
return tmp_file_name
def create_temp_file_with_tables(table_list):
return create_temp_file_from_list(table_list, 'table_list_')
def create_temp_file_with_schemas(schema_list):
return create_temp_file_from_list(schema_list, 'schema_file_')
def validate_timestamp(timestamp):
if not timestamp:
return False
if len(timestamp) != 14:
return False
if timestamp.isdigit():
return True
else:
return False
def check_successful_dump(report_file_contents):
for line in report_file_contents:
if line.strip() == 'gp_dump utility finished successfully.':
return True
return False
def get_ddboost_backup_directory():
"""
The gpddboost --show-config command, gives us all the ddboost \
configuration details.
Third line of the command output gives us the backup directory \
configured with ddboost.
"""
cmd_str = 'gpddboost --show-config'
cmd = Command('Get the ddboost backup directory', cmd_str)
cmd.run(validateAfter=True)
config = cmd.get_results().stdout.splitlines()
for line in config:
if line.strip().startswith("Default Backup Directory:"):
ddboost_dir = line.split(':')[-1].strip()
if ddboost_dir is None or ddboost_dir == "":
logger.error("Expecting format: Default Backup Directory:<dir>")
raise Exception("DDBOOST default backup directory is not configured. Or the format of the line has changed")
return ddboost_dir
logger.error("Could not find Default Backup Directory:<dir> in stdout")
raise Exception("Output: %s from command %s not in expected format." % (config, cmd_str))
# raise exception for bad data
def convert_reportfilename_to_cdatabasefilename(report_file, dump_prefix, ddboost=False):
(dirname, fname) = os.path.split(report_file)
timestamp = fname[-18:-4]
if ddboost:
dirname = get_ddboost_backup_directory()
dirname = "%s/%s" % (dirname, timestamp[0:8])
return "%s/%sgp_cdatabase_1_1_%s" % (dirname, dump_prefix, timestamp)
def get_lines_from_dd_file(filename):
cmd = Command('DDBoost copy of master dump file',
'gpddboost --readFile --from-file=%s'
% (filename))
cmd.run(validateAfter=True)
contents = cmd.get_results().stdout.splitlines()
return contents
def check_cdatabase_exists(dbname, report_file, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
try:
filename = convert_reportfilename_to_cdatabasefilename(report_file, dump_prefix, ddboost)
except Exception:
return False
if ddboost:
cdatabase_contents = get_lines_from_dd_file(filename)
elif netbackup_service_host:
restore_file_with_nbu(netbackup_service_host, netbackup_block_size, filename)
cdatabase_contents = get_lines_from_file(filename)
else:
cdatabase_contents = get_lines_from_file(filename)
dbname = escapeDoubleQuoteInSQLString(dbname, forceDoubleQuote=False)
for line in cdatabase_contents:
if 'CREATE DATABASE' in line:
dump_dbname = get_dbname_from_cdatabaseline(line)
if dump_dbname is None:
continue
else:
if dbname == checkAndRemoveEnclosingDoubleQuote(dump_dbname):
return True
return False
def get_dbname_from_cdatabaseline(line):
"""
Line format: CREATE DATABASE "DBNAME" WITH TEMPLATE = template0 ENCODING = 'UTF8' OWNER = gpadmin;
To get the dbname:
substring between the ending index of the first statement: CREATE DATABASE and the starting index
of WITH TEMPLATE whichever is not inside any double quotes, based on the fact that double quote
inside any name will be escaped by extra double quote, so there's always only one WITH TEMPLATE not
inside any doubles, means its previous and post string should have only even number of double
quotes.
Note: OWER name can also have special characters with double quote.
"""
cdatabase = "CREATE DATABASE "
try:
start = line.index(cdatabase)
except Exception as e:
logger.error('Failed to find substring %s in line %s, error: %s' % (cdatabase, line, str(e)))
return None
with_template = " WITH TEMPLATE = "
all_positions = get_all_occurrences(with_template, line)
if all_positions != None:
for pos in all_positions:
pre_string = line[:pos]
post_string = line[pos + len(with_template):]
double_quotes_before = get_all_occurrences('"', pre_string)
double_quotes_after = get_all_occurrences('"', post_string)
num_double_quotes_before = 0 if double_quotes_before is None else len(double_quotes_before)
num_double_quotes_after = 0 if double_quotes_after is None else len(double_quotes_after)
if num_double_quotes_before % 2 == 0 and num_double_quotes_after % 2 == 0:
dbname = line[start+len(cdatabase) : pos]
return dbname
return None
def get_all_occurrences(substr, line):
if substr is None or line is None or len(substr) > len(line):
return None
return [m.start() for m in re.finditer('(?=%s)' % substr, line)]
def get_type_ts_from_report_file(dbname, report_file, backup_type, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
report_file_contents = get_lines_from_file(report_file)
if not check_successful_dump(report_file_contents):
return None
if not check_cdatabase_exists(dbname, report_file, dump_prefix, ddboost, netbackup_service_host, netbackup_block_size):
return None
if check_backup_type(report_file_contents, backup_type):
return get_timestamp_val(report_file_contents)
return None
def get_full_ts_from_report_file(dbname, report_file, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
return get_type_ts_from_report_file(dbname, report_file, 'Full', dump_prefix, ddboost, netbackup_service_host, netbackup_block_size)
def get_incremental_ts_from_report_file(dbname, report_file, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
return get_type_ts_from_report_file(dbname, report_file, 'Incremental', dump_prefix, ddboost, netbackup_service_host, netbackup_block_size)
def get_timestamp_val(report_file_contents):
for line in report_file_contents:
if line.startswith('Timestamp Key'):
timestamp = line.split(':')[-1].strip()
if not validate_timestamp(timestamp):
raise Exception('Invalid timestamp value found in report_file')
return timestamp
return None
def check_backup_type(report_file_contents, backup_type):
for line in report_file_contents:
if line.startswith('Backup Type'):
if line.split(':')[-1].strip() == backup_type:
return True
return False
def get_lines_from_file(fname, ddboost=None):
"""
Don't strip white space here as it may be part of schema name and table name
"""
content = []
if ddboost:
contents = get_lines_from_dd_file(fname)
return contents
else:
with open(fname) as fd:
for line in fd:
content.append(line.strip('\n'))
return content
def write_lines_to_file(filename, lines):
"""
Don't do strip in line for white space in case it is part of schema name or table name
"""
with open(filename, 'w') as fp:
for line in lines:
fp.write("%s\n" % line.strip('\n'))
def verify_lines_in_file(fname, expected):
lines = get_lines_from_file(fname)
if lines != expected:
raise Exception("After writing file '%s' contents not as expected.\n"
"Lines read from file %s\n"
"Lines expected from file %s\n"
"Suspected IO error" % (fname, lines, expected))
def check_dir_writable(directory):
fp = None
try:
tmp_file = os.path.join(directory, 'tmp_file')
fp = open(tmp_file, 'w')
except IOError as e:
raise Exception('No write access permission on %s' % directory)
except Exception as e:
raise Exception(str(e))
finally:
if fp is not None:
fp.close()
if os.path.isfile(tmp_file):
os.remove(tmp_file)
def execute_sql(query, master_port, dbname):
dburl = dbconn.DbURL(port=master_port, dbname=dbname)
conn = dbconn.connect(dburl)
cursor = execSQL(conn, query)
return cursor.fetchall()
def get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp):
if backup_dir:
use_dir = backup_dir
elif master_data_dir:
use_dir = master_data_dir
else:
raise Exception("Can not locate backup directory with existing parameters")
if not timestamp:
raise Exception("Can not locate backup directory without timestamp")
if not validate_timestamp(timestamp):
raise Exception('Invalid timestamp: "%s"' % timestamp)
return "%s/%s/%s" % (use_dir, dump_dir, timestamp[0:8])
def generate_schema_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_schema" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_schema" % (use_dir, dump_prefix, timestamp)
def generate_report_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s.rpt" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s.rpt" % (use_dir, dump_prefix, timestamp)
def generate_increments_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_increments" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_increments" % (use_dir, dump_prefix, timestamp)
def generate_pgstatlastoperation_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_last_operation" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_last_operation" % (use_dir, dump_prefix, timestamp)
def generate_dirtytable_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_dirty_list" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_dirty_list" % (use_dir, dump_prefix, timestamp)
def generate_plan_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_restore_%s_plan" % (use_dir, dump_prefix, timestamp)
def generate_metadata_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_1_1_%s.gz" % (use_dir, dump_prefix, timestamp)
def generate_partition_list_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_table_list" % (use_dir, dump_prefix, timestamp)
def generate_ao_state_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_ao_state_file" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_ao_state_file" % (use_dir, dump_prefix, timestamp)
def generate_co_state_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_co_state_file" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_co_state_file" % (use_dir, dump_prefix, timestamp)
def generate_files_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%sgp_dump_%s_regular_files' % (use_dir, dump_prefix, timestamp)
def generate_pipes_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%sgp_dump_%s_pipes' % (use_dir, dump_prefix, timestamp)
def generate_master_config_filename(dump_prefix, timestamp):
return '%sgp_master_config_files_%s.tar' % (dump_prefix, timestamp)
def generate_segment_config_filename(dump_prefix, segid, timestamp):
return '%sgp_segment_config_files_0_%d_%s.tar' % (dump_prefix, segid, timestamp)
def generate_filter_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%s%s_filter' % (use_dir, generate_dbdump_prefix(dump_prefix), timestamp)
def generate_global_prefix(dump_prefix):
return '%sgp_global_1_1_' % (dump_prefix)
def generate_master_dbdump_prefix(dump_prefix):
return '%sgp_dump_1_1_' % (dump_prefix)
def generate_master_status_prefix(dump_prefix):
return '%sgp_dump_status_1_1_' % (dump_prefix)
def generate_seg_dbdump_prefix(dump_prefix):
return '%sgp_dump_0_' % (dump_prefix)
def generate_seg_status_prefix(dump_prefix):
return '%sgp_dump_status_0_' % (dump_prefix)
def generate_dbdump_prefix(dump_prefix):
return '%sgp_dump_' % (dump_prefix)
def generate_createdb_prefix(dump_prefix):
return '%sgp_cdatabase_1_1_' % (dump_prefix)
def generate_stats_prefix(dump_prefix):
return '%sgp_statistics_1_1_' % (dump_prefix)
def generate_createdb_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return '%s/%s/%s/%s%s' % (master_data_dir, dump_dir, timestamp[0:8], generate_createdb_prefix(dump_prefix), timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%s%s' % (use_dir, generate_createdb_prefix(dump_prefix), timestamp)
def get_dump_dirs(dump_dir_base, dump_dir):
dump_path = os.path.join(dump_dir_base, dump_dir)
if not os.path.isdir(dump_path):
return []
initial_list = os.listdir(dump_path)
initial_list = fnmatch.filter(initial_list, '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]')
dirnames = []
for d in initial_list:
pth = os.path.join(dump_path, d)
if os.path.isdir(pth):
dirnames.append(pth)
if len(dirnames) == 0:
return []
dirnames = sorted(dirnames, key=lambda x: int(os.path.basename(x)), reverse=True)
return dirnames
def get_latest_report_timestamp(backup_dir, dump_dir, dump_prefix):
dump_dirs = get_dump_dirs(backup_dir, dump_dir)
for d in dump_dirs:
latest = get_latest_report_in_dir(d, dump_prefix)
if latest:
return latest
return None
def get_latest_report_in_dir(report_dir, dump_prefix):
files = os.listdir(report_dir)
if len(files) == 0:
return None
dump_report_files = fnmatch.filter(files, '%sgp_dump_[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].rpt' % dump_prefix)
if len(dump_report_files) == 0:
return None
dump_report_files = sorted(dump_report_files, key=lambda x: int(x.split('_')[-1].split('.')[0]), reverse=True)
return dump_report_files[0][-18:-4]
def get_timestamp_from_increments_filename(filename, dump_prefix):
fname = os.path.basename(filename)
parts = fname.split('_')
# Check for 4 underscores if there is no prefix, or more than 4 if there is a prefix
if not ((not dump_prefix and len(parts) == 4) or (dump_prefix and len(parts) > 4)):
raise Exception("Invalid increments file '%s' passed to get_timestamp_from_increments_filename" % filename)
return parts[-2].strip()
def get_full_timestamp_for_incremental(backup_dir, dump_dir, dump_prefix, incremental_timestamp):
pattern = '%s/%s/[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]/%sgp_dump_[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]_increments' % (backup_dir, dump_dir, dump_prefix)
increments_files = glob.glob(pattern)
for increments_file in increments_files:
increment_ts = get_lines_from_file(increments_file)
if incremental_timestamp in increment_ts:
full_timestamp = get_timestamp_from_increments_filename(increments_file, dump_prefix)
return full_timestamp
return None
# backup_dir will be either MDD or some other directory depending on call
def get_latest_full_dump_timestamp(dbname, backup_dir, dump_dir, dump_prefix, ddboost=False):
if not backup_dir:
raise Exception('Invalid None param to get_latest_full_dump_timestamp')
dump_dirs = get_dump_dirs(backup_dir, dump_dir)
for dump_dir in dump_dirs:
files = sorted(os.listdir(dump_dir))
if len(files) == 0:
logger.warn('Dump directory %s is empty' % dump_dir)
continue
dump_report_files = fnmatch.filter(files, '%sgp_dump_[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].rpt' % dump_prefix)
if len(dump_report_files) == 0:
logger.warn('No dump report files found in dump directory %s' % dump_dir)
continue
dump_report_files = sorted(dump_report_files, key=lambda x: int(x.split('_')[-1].split('.')[0]), reverse=True)
for dump_report_file in dump_report_files:
logger.debug('Checking for latest timestamp in report file %s' % os.path.join(dump_dir, dump_report_file))
timestamp = get_full_ts_from_report_file(dbname, os.path.join(dump_dir, dump_report_file), dump_prefix, ddboost)
logger.debug('Timestamp = %s' % timestamp)
if timestamp is not None:
return timestamp
raise Exception('No full backup found for incremental')
def get_all_segment_addresses(master_port):
gparray = GpArray.initFromCatalog(dbconn.DbURL(port=master_port), utility=True)
addresses = [seg.getSegmentAddress() for seg in gparray.getDbList() if seg.isSegmentPrimary(current_role=True)]
return list(set(addresses))
def scp_file_to_hosts(host_list, filename, batch_default):
pool = WorkerPool(numWorkers=min(len(host_list), batch_default))
for hname in host_list:
pool.addCommand(Scp('Copying table_filter_file to %s' % hname,
srcFile=filename,
dstFile=filename,
dstHost=hname))
pool.join()
pool.haltWork()
pool.check_results()
def run_pool_command(host_list, cmd_str, batch_default, check_results=True):
pool = WorkerPool(numWorkers=min(len(host_list), batch_default))
for host in host_list:
cmd = Command(host, cmd_str, ctxt=REMOTE, remoteHost=host)
pool.addCommand(cmd)
pool.join()
pool.haltWork()
if check_results:
pool.check_results()
def check_funny_chars_in_names(names, is_full_qualified_name=True):
"""
'\n' inside table name makes it hard to specify the object name in shell command line,
this may be worked around by using table file, but currently we read input line by line.
'!' inside table name will mess up with the shell history expansion.
',' is used for separating tables in plan file during incremental restore.
'.' dot is currently being used for full qualified table name in format: schema.table
"""
if names and len(names) > 0:
for name in names:
if ('\t' in name or '\n' in name or '!' in name or ',' in name or
(is_full_qualified_name and name.count('.') > 1) or (not is_full_qualified_name and name.count('.') > 0)):
raise Exception('Name has an invalid character "\\t" "\\n" "!" "," ".": "%s"' % name)
#Form and run command line to backup individual file with NBU
def backup_file_with_nbu(netbackup_service_host, netbackup_policy, netbackup_schedule, netbackup_block_size, netbackup_keyword, netbackup_filepath, hostname=None):
command_string = "cat %s | gp_bsa_dump_agent --netbackup-service-host %s --netbackup-policy %s --netbackup-schedule %s --netbackup-filename %s" % (netbackup_filepath, netbackup_service_host, netbackup_policy, netbackup_schedule, netbackup_filepath)
if netbackup_block_size is not None:
command_string += " --netbackup-block-size %s" % netbackup_block_size
if netbackup_keyword is not None:
command_string += " --netbackup-keyword %s" % netbackup_keyword
logger.debug("Command string inside 'backup_file_with_nbu': %s\n", command_string)
if hostname is None:
Command("dumping metadata files from master", command_string).run(validateAfter=True)
else:
Command("dumping metadata files from segment", command_string, ctxt=REMOTE, remoteHost=hostname).run(validateAfter=True)
logger.debug("Command ran successfully\n")
#Form and run command line to restore individual file with NBU
def restore_file_with_nbu(netbackup_service_host, netbackup_block_size, netbackup_filepath, hostname=None):
command_string = "gp_bsa_restore_agent --netbackup-service-host %s --netbackup-filename %s > %s" % (netbackup_service_host, netbackup_filepath, netbackup_filepath)
if netbackup_block_size is not None:
command_string += " --netbackup-block-size %s" % netbackup_block_size
logger.debug("Command string inside 'restore_file_with_nbu': %s\n", command_string)
if hostname is None:
Command("restoring metadata files to master", command_string).run(validateAfter=True)
else:
Command("restoring metadata files to segment", command_string, ctxt=REMOTE, remoteHost=hostname).run(validateAfter=True)
def check_file_dumped_with_nbu(netbackup_service_host, netbackup_filepath, hostname=None):
command_string = "gp_bsa_query_agent --netbackup-service-host %s --netbackup-filename %s" % (netbackup_service_host, netbackup_filepath)
logger.debug("Command string inside 'check_file_dumped_with_nbu': %s\n", command_string)
if hostname is None:
cmd = Command("Querying NetBackup server to check for dumped file", command_string)
else:
cmd = Command("Querying NetBackup server to check for dumped file", command_string, ctxt=REMOTE, remoteHost=hostname)
cmd.run(validateAfter=True)
if cmd.get_results().stdout.strip() == netbackup_filepath:
return True
else:
return False
def generate_global_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, dump_date, timestamp):
if backup_dir is not None:
dir_path = backup_dir
else:
dir_path = master_data_dir
return os.path.join(dir_path, dump_dir, dump_date, "%s%s" % (generate_global_prefix(dump_prefix), timestamp))
def generate_cdatabase_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_cdatabase_1_1_%s" % (use_dir, dump_prefix, timestamp)
def generate_stats_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, dump_date, timestamp):
if backup_dir is not None:
dir_path = backup_dir
else:
dir_path = master_data_dir
return os.path.join(dir_path, dump_dir, dump_date, "%s%s" % (generate_stats_prefix(dump_prefix), timestamp))
def get_full_timestamp_for_incremental_with_nbu(dump_prefix, incremental_timestamp, netbackup_service_host, netbackup_block_size):
if dump_prefix:
get_inc_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=%sgp_dump_*_increments" % (netbackup_service_host, dump_prefix)
else:
get_inc_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=gp_dump_*_increments" % netbackup_service_host
cmd = Command("Query NetBackup server to get the list of increments files backed up", get_inc_files_cmd)
cmd.run(validateAfter=True)
files_list = cmd.get_results().stdout.split('\n')
for line in files_list:
fname = line.strip()
restore_file_with_nbu(netbackup_service_host, netbackup_block_size, fname)
contents = get_lines_from_file(fname)
if incremental_timestamp in contents:
full_timestamp = get_timestamp_from_increments_filename(fname, dump_prefix)
return full_timestamp
return None
def get_latest_full_ts_with_nbu(dbname, backup_dir, dump_prefix, netbackup_service_host, netbackup_block_size):
if dump_prefix:
get_rpt_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=%sgp_dump_*.rpt" % (netbackup_service_host, dump_prefix)
else:
get_rpt_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=gp_dump_*.rpt" % netbackup_service_host
cmd = Command("Query NetBackup server to get the list of report files backed up", get_rpt_files_cmd)
cmd.run(validateAfter=True)
files_list = cmd.get_results().stdout.split('\n')
for line in files_list:
fname = line.strip()
if fname == '':
continue
if backup_dir not in fname:
continue
if ("No object matched the specified predicate" in fname) or ("No objects of the format" in fname):
return None
restore_file_with_nbu(netbackup_service_host, netbackup_block_size, fname)
timestamp = get_full_ts_from_report_file(dbname, fname, dump_prefix, netbackup_service_host=netbackup_service_host, netbackup_block_size=netbackup_block_size)
logger.debug('Timestamp = %s' % timestamp)
if timestamp is not None:
return timestamp
raise Exception('No full backup found for given incremental on the specified NetBackup server')
def getRows(dbname, exec_sql):
with dbconn.connect(dbconn.DbURL(dbname=dbname)) as conn:
curs = dbconn.execSQL(conn, exec_sql)
results = curs.fetchall()
return results
def check_schema_exists(schema_name, dbname):
schemaname = pg.escape_string(schema_name)
schema_check_sql = "select * from pg_catalog.pg_namespace where nspname='%s';" % schemaname
if len(getRows(dbname, schema_check_sql)) < 1:
return False
return True
def isDoubleQuoted(string):
if len(string) > 2 and string[0] == '"' and string[-1] == '"':
return True
return False
def checkAndRemoveEnclosingDoubleQuote(string):
if isDoubleQuoted(string):
string = string[1 : len(string) - 1]
return string
def checkAndAddEnclosingDoubleQuote(string):
if not isDoubleQuoted(string):
string = '"' + string + '"'
return string
def escapeDoubleQuoteInSQLString(string, forceDoubleQuote=True):
"""
Accept true database name, schema name, table name, escape the double quote
inside the name, add enclosing double quote by default.
"""
string = string.replace('"', '""')
if forceDoubleQuote:
string = '"' + string + '"'
return string
def removeEscapingDoubleQuoteInSQLString(string, forceDoubleQuote=True):
"""
Remove the escaping double quote in database/schema/table name.
"""
if string is None:
return string
string = string.replace('""', '"')
if forceDoubleQuote:
string = '"' + string + '"'
return string
def formatSQLString(rel_file, isTableName=False):
"""
Read the full qualified schema or table name, do a split
if each item is a table name into schema and table,
escape the double quote inside the name properly.
"""
relnames = []
if rel_file and os.path.exists(rel_file):
with open(rel_file, 'r') as fr:
lines = fr.read().strip('\n').split('\n')
for line in lines:
if isTableName:
schema, table = split_fqn(line)
schema = escapeDoubleQuoteInSQLString(schema)
table = escapeDoubleQuoteInSQLString(table)
relnames.append(schema + '.' + table)
else:
schema = escapeDoubleQuoteInSQLString(line)
relnames.append(schema)
if len(relnames) > 0:
tmp_file = create_temp_file_from_list(relnames, os.path.basename(rel_file))
return tmp_file
def split_fqn(fqn_name):
"""
Split full qualified table name into schema and table by separator '.',
"""
try:
schema, table = fqn_name.split('.')
except Exception as e:
logger.error("Failed to split name %s into schema and table, please check the format is schema.table" % fqn_name)
raise Exception('%s' % str(e))
return schema, table
def remove_file_on_segments(master_port, filename, batch_default=DEFAULT_NUM_WORKERS):
addresses = get_all_segment_addresses(master_port)
try:
cmd = 'rm -f %s' % filename
run_pool_command(addresses, cmd, batch_default, check_results=False)
except Exception as e:
logger.error("cleaning up file failed: %s" % e.__str__())
| 43.601523 | 252 | 0.703039 | 4,726 | 34,358 | 4.82628 | 0.100931 | 0.040773 | 0.006971 | 0.009295 | 0.533824 | 0.467666 | 0.399667 | 0.342496 | 0.305406 | 0.279363 | 0 | 0.007576 | 0.200943 | 34,358 | 787 | 253 | 43.656925 | 0.823232 | 0.067524 | 0 | 0.292517 | 0 | 0.017007 | 0.157926 | 0.050244 | 0.001701 | 0 | 0 | 0 | 0 | 1 | 0.137755 | false | 0.001701 | 0.022109 | 0.02381 | 0.346939 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
684ce6b1bc8b5970f8856fccafd6154a767ac668 | 1,082 | py | Python | src/hyapi/decayingcounter.py | Neefs/hyapi | 7d5ccbe649b27f54ea45d8291c9bd35d66ff5b3f | [
"MIT"
] | 1 | 2021-06-29T03:34:25.000Z | 2021-06-29T03:34:25.000Z | src/hyapi/decayingcounter.py | Neefs/hyapi | 7d5ccbe649b27f54ea45d8291c9bd35d66ff5b3f | [
"MIT"
] | 1 | 2021-11-27T01:26:19.000Z | 2021-11-28T01:49:19.000Z | src/hyapi/decayingcounter.py | Neefs/hyapi | 7d5ccbe649b27f54ea45d8291c9bd35d66ff5b3f | [
"MIT"
] | 1 | 2021-11-28T00:48:57.000Z | 2021-11-28T00:48:57.000Z | """
An object that tracks X number of actions within N seconds
Author: Preocts <Preocts#8196>
"""
import time
from typing import List
class DecayingCounter:
"""Tracks number of events within a given life_span of seconds"""
def __init__(self, max_count: int, life_span: int) -> None:
"""Define the max_count number of events allowed within life_span seconds"""
self._max = max_count
self._events: List[float] = []
self._life_span = life_span
@property
def count(self) -> int:
"""Returns count of events"""
self._clean()
return len(self._events)
def inc(self) -> bool:
"""Increments event count by 1 unless max is reached, then returns false"""
if self.count < self._max:
self._events.append(time.time())
return True
else:
return False
def _clean(self) -> None:
"""Removes expired events from front of list"""
while len(self._events) and (time.time() - self._events[0]) > self._life_span:
self._events.pop(0)
| 29.243243 | 86 | 0.621072 | 143 | 1,082 | 4.524476 | 0.447552 | 0.074189 | 0.043277 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008895 | 0.272643 | 1,082 | 36 | 87 | 30.055556 | 0.813215 | 0.329945 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.1 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
684d8263bff7794d036a79aa8aadd1d5284d66ff | 644 | py | Python | tapis_cli/clients/services/taccapis/v2/bearer_refresh.py | bpachev/tapis-cli | c3128fb5b63ef74e06b737bbd95ef28fb24f0d32 | [
"BSD-3-Clause"
] | 8 | 2020-10-18T22:48:23.000Z | 2022-01-10T09:16:14.000Z | tapis_cli/clients/services/taccapis/v2/bearer_refresh.py | bpachev/tapis-cli | c3128fb5b63ef74e06b737bbd95ef28fb24f0d32 | [
"BSD-3-Clause"
] | 238 | 2019-09-04T14:37:54.000Z | 2020-04-15T16:24:24.000Z | tapis_cli/clients/services/taccapis/v2/bearer_refresh.py | bpachev/tapis-cli | c3128fb5b63ef74e06b737bbd95ef28fb24f0d32 | [
"BSD-3-Clause"
] | 5 | 2019-09-20T04:23:49.000Z | 2020-01-16T17:45:14.000Z | from .bearer import TaccApisBearer
__all__ = ['TaccApisBearerRefresh']
class TaccApisBearerRefresh(TaccApisBearer):
"""Base class for Tapis API commands both an access token and a refresh token
"""
def add_common_parser_arguments(self, parser):
parser = super(TaccApisBearer,
self).add_common_parser_arguments(parser)
parser.add_argument(
'-z',
'--refresh-token',
dest='refresh_token',
type=str,
help="{0} {1}".format(self.constants.PLATFORM,
self.constants.REFRESH_TOKEN))
return parser
| 32.2 | 81 | 0.600932 | 63 | 644 | 5.936508 | 0.603175 | 0.128342 | 0.080214 | 0.128342 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004474 | 0.305901 | 644 | 19 | 82 | 33.894737 | 0.832215 | 0.114907 | 0 | 0 | 0 | 0 | 0.103757 | 0.037567 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.071429 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
684e621b761c1a66ff0516291a3ccdf10baa8980 | 1,919 | py | Python | cook/core/system.py | jachris/cook | dd451e11f9aef05ba54bd57cf03e941526ffceef | [
"MIT"
] | 130 | 2017-07-27T15:29:50.000Z | 2021-10-04T22:10:23.000Z | cook/core/system.py | jachris/cook | dd451e11f9aef05ba54bd57cf03e941526ffceef | [
"MIT"
] | 25 | 2017-07-27T19:54:25.000Z | 2020-02-22T16:15:06.000Z | cook/core/system.py | jachris/cook | dd451e11f9aef05ba54bd57cf03e941526ffceef | [
"MIT"
] | 2 | 2017-08-02T02:52:28.000Z | 2017-08-03T06:27:31.000Z | import os
import shutil
import sys
from os.path import normpath, relpath, join
from . import log, misc
build_dir = None
intermediate_dir = None
temporary_dir = None
def initialize(destination):
global build_dir, intermediate_dir, temporary_dir
build_dir = os.path.abspath(destination)
cook = os.path.join(build_dir, '.cook/')
intermediate_dir = os.path.join(cook, 'intermediate/')
temporary_dir = os.path.join(cook, 'temporary/')
if not os.path.isdir(build_dir):
os.makedirs(build_dir)
elif os.listdir(build_dir) and not os.path.isdir(cook):
log.error(
'The build directory "{}" is not empty and does not seem to be '
'the location of a previous build.'.format(build_dir)
)
sys.exit(1)
if not os.path.isdir(cook):
os.mkdir(cook)
if not os.path.isdir(intermediate_dir):
os.mkdir(intermediate_dir)
if os.path.isdir(temporary_dir):
shutil.rmtree(temporary_dir)
os.mkdir(temporary_dir)
def build(path_or_paths):
if isinstance(path_or_paths, misc.Marked):
return path_or_paths
elif isinstance(path_or_paths, str):
return misc.Marked(normpath(relpath(join(build_dir, path_or_paths))))
else:
return list(map(build, path_or_paths))
def intermediate(path_or_paths):
"""..."""
if isinstance(path_or_paths, misc.Marked):
return path_or_paths
elif isinstance(path_or_paths, str):
return misc.Marked(normpath(relpath(
join(intermediate_dir, path_or_paths))))
else:
return list(map(intermediate, path_or_paths))
def temporary(path_or_paths):
"""..."""
if isinstance(path_or_paths, misc.Marked):
return path_or_paths
elif isinstance(path_or_paths, str):
return misc.Marked(normpath(join(temporary_dir, path_or_paths)))
else:
return list(map(temporary, path_or_paths))
| 28.220588 | 77 | 0.677957 | 267 | 1,919 | 4.655431 | 0.209738 | 0.086887 | 0.159292 | 0.101368 | 0.448914 | 0.365245 | 0.365245 | 0.365245 | 0.290426 | 0.290426 | 0 | 0.000663 | 0.214174 | 1,919 | 67 | 78 | 28.641791 | 0.823607 | 0.003648 | 0 | 0.235294 | 0 | 0 | 0.065229 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078431 | false | 0 | 0.098039 | 0 | 0.352941 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6850578562aad7851e4f644d17162e3ebbd674ab | 882 | py | Python | tests/plugins/reject.py | DanielWeigl/lightning | 3852704c5c76ddc6e7eca97ffd6fd406dca9cf6e | [
"MIT"
] | 1 | 2021-01-28T12:23:50.000Z | 2021-01-28T12:23:50.000Z | tests/plugins/reject.py | DanielWeigl/lightning | 3852704c5c76ddc6e7eca97ffd6fd406dca9cf6e | [
"MIT"
] | null | null | null | tests/plugins/reject.py | DanielWeigl/lightning | 3852704c5c76ddc6e7eca97ffd6fd406dca9cf6e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Simple plugin to test the connected_hook.
It can mark some node_ids as rejects and it'll check for each
connection if it should be disconnected immediately or if it can
continue.
"""
from lightning import Plugin
plugin = Plugin()
@plugin.hook('peer_connected')
def on_connected(peer, plugin):
if peer['id'] in plugin.reject_ids:
print("{} is in reject list, disconnecting".format(peer['id']))
return {'result': 'disconnect'}
print("{} is allowed".format(peer['id']))
return {'result': 'continue'}
@plugin.init()
def init(configuration, options, plugin):
plugin.reject_ids = []
@plugin.method('reject')
def reject(node_id, plugin):
"""Mark a given node_id as reject for future connections.
"""
print("Rejecting connections from {}".format(node_id))
plugin.reject_ids.append(node_id)
plugin.run()
| 22.615385 | 71 | 0.69161 | 123 | 882 | 4.869919 | 0.495935 | 0.080134 | 0.075125 | 0.0601 | 0.080134 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00137 | 0.172336 | 882 | 38 | 72 | 23.210526 | 0.819178 | 0.295918 | 0 | 0 | 0 | 0 | 0.21875 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0 | 0.058824 | 0 | 0.352941 | 0.176471 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68506bc318a944d2f67d31be5502a2c63f7eb33e | 1,182 | py | Python | src/matcher.py | salu133445/lakh-pianoroll-dataset | a8a8582254ff5f3225e792fa1ba1f939f0423080 | [
"MIT"
] | 59 | 2018-05-03T17:17:26.000Z | 2022-03-21T09:54:07.000Z | src/matcher.py | salu133445/lakh-pianoroll-dataset | a8a8582254ff5f3225e792fa1ba1f939f0423080 | [
"MIT"
] | 4 | 2018-04-25T13:15:23.000Z | 2021-01-24T11:16:50.000Z | src/matcher.py | salu133445/lakh-pianoroll-dataset | a8a8582254ff5f3225e792fa1ba1f939f0423080 | [
"MIT"
] | 8 | 2019-05-15T00:46:10.000Z | 2022-03-28T11:07:14.000Z | """This script writes the IDs of songs that have been matched to Million Song
Dataset (MSD) to a file.
"""
import argparse
import os.path
import json
def parse_args():
"""Return the parsed command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('filepath', help="path to save the resulting list")
parser.add_argument('src', help="root path to the source dataset")
parser.add_argument('match_scores_path',
help="path to the match scores file")
args = parser.parse_args()
return args.filepath, args.src, args.match_scores_path
def main():
"""Main function."""
filepath, src, match_scores_path = parse_args()
with open(match_scores_path) as f:
match_score_dict = json.load(f)
with open(filepath, 'w') as f:
for msd_id in match_score_dict:
for midi_md5 in match_score_dict[msd_id]:
npz_path = os.path.join(src, midi_md5[0], midi_md5 + '.npz')
if os.path.isfile(npz_path):
f.write("{} {}\n".format(midi_md5, msd_id))
print("Matched ID list successfully saved.")
if __name__ == "__main__":
main()
| 32.833333 | 77 | 0.647208 | 168 | 1,182 | 4.333333 | 0.428571 | 0.075549 | 0.082418 | 0.043956 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005549 | 0.237733 | 1,182 | 35 | 78 | 33.771429 | 0.802442 | 0.13198 | 0 | 0 | 0 | 0 | 0.175421 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.125 | 0 | 0.25 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6858833d6fbe9b788d26aca5d8bd33fd58af7816 | 1,134 | py | Python | pytak/__init__.py | sniporbob/pytak | e8a1478c88c0b9597933e30f78a56b78bcb6dfeb | [
"Apache-2.0"
] | null | null | null | pytak/__init__.py | sniporbob/pytak | e8a1478c88c0b9597933e30f78a56b78bcb6dfeb | [
"Apache-2.0"
] | null | null | null | pytak/__init__.py | sniporbob/pytak | e8a1478c88c0b9597933e30f78a56b78bcb6dfeb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Python Team Awareness Kit (PyTAK) Module.
"""
Python Team Awareness Kit (PyTAK) Module.
~~~~
:author: Greg Albrecht W2GMD <oss@undef.net>
:copyright: Copyright 2021 Orion Labs, Inc.
:license: Apache License, Version 2.0
:source: <https://github.com/ampledata/pytak>
"""
from .constants import (LOG_LEVEL, LOG_FORMAT, DEFAULT_COT_PORT, # NOQA
DEFAULT_BACKOFF, DEFAULT_SLEEP,
DEFAULT_ATAK_PORT, DEFAULT_BROADCAST_PORT,
DOMESTIC_AIRLINES, DEFAULT_HEX_RANGES,
DEFAULT_COT_STALE, ICAO_RANGES, DEFAULT_FIPS_CIPHERS)
from .classes import (Worker, EventWorker, MessageWorker, # NOQA
EventTransmitter, EventReceiver, TCClient)
from .functions import (split_host, udp_client, parse_cot_url, # NOQA
faa_to_cot_type, multicast_client, eventworker_factory,
protocol_factory)
__author__ = "Greg Albrecht W2GMD <oss@undef.net>"
__copyright__ = "Copyright 2021 Orion Labs, Inc."
__license__ = "Apache License, Version 2.0"
| 32.4 | 79 | 0.660494 | 128 | 1,134 | 5.539063 | 0.59375 | 0.028209 | 0.053597 | 0.062059 | 0.366714 | 0.366714 | 0.273625 | 0.273625 | 0.273625 | 0.273625 | 0 | 0.017544 | 0.246032 | 1,134 | 34 | 80 | 33.352941 | 0.811696 | 0.283069 | 0 | 0 | 0 | 0 | 0.116395 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.230769 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6858a5b4ff2066c3af4082b0fd9add216e0cf57d | 4,420 | py | Python | utils/sync/sync_files.py | saridut/BROWNPAK | 8a3aef5f393826e8b5d8c374a190e6e6e1140221 | [
"MIT"
] | null | null | null | utils/sync/sync_files.py | saridut/BROWNPAK | 8a3aef5f393826e8b5d8c374a190e6e6e1140221 | [
"MIT"
] | null | null | null | utils/sync/sync_files.py | saridut/BROWNPAK | 8a3aef5f393826e8b5d8c374a190e6e6e1140221 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#BE CAREFUL WITH THIS ROUTINE.
#
#Syncs fn_traj and fn_stats to make sure that the last record corresponds to
#nts <= nts (in fn_revive).
import argparse
import os
import struct
parser = argparse.ArgumentParser()
parser.add_argument("fr", help="Name of the revive file")
parser.add_argument("--ft", help="Name of the trajectory file")
parser.add_argument("--fs", help="Name of the statistics file")
args = parser.parse_args()
#Name of revive file
assert os.path.exists(args.fr)
fn_revive = args.fr
print('fn_revive = %s'%fn_revive)
#Name of trajectory file.
if args.ft:
assert os.path.exists(args.fr)
fn_traj = args.ft
print('fn_traj = %s'%fn_traj)
else:
fn_traj = None
#Name of stats file.
if args.fs:
assert os.path.exists(args.fs)
fn_stats = args.fs
print('fn_stats = %s'%fn_stats)
else:
fn_stats = None
with open(fn_revive, 'rb') as fh_revive:
fh_revive.read(4) #This is leql (4 bytes), throw away
buf = fh_revive.read(8) #This is nts, we need to decode this as int64 (long long).
ubuf = struct.unpack('<q', buf)
nts_revive = ubuf[0]
print('nts_revive = %d'%nts_revive)
if fn_traj:
print('Checking if fn_traj is good.')
with open(fn_traj, 'rb') as fh_traj:
#Get header size
buf = fh_traj.read(4)
ubuf = struct.unpack('<i', buf)
hs = ubuf[0] #Header size
print('fn_traj: Header size (bytes) = %d'%hs)
#Get frame size
buf = fh_traj.read(4)
ubuf = struct.unpack('<i', buf)
fs = ubuf[0] #Frame size
print('fn_traj: Frame size (bytes) = %d'%fs)
#Number of frames
num_frames = (os.path.getsize(fn_traj)-hs)//fs
print('fn_traj: Number of frames = %d'%num_frames)
if num_frames == 0:
#No frames, hence no need to truncate
trunc_traj = False
print('fn_traj is OK.')
else:
#Check if the last frame has nts <= nts_revive
trunc_traj = True
offset = hs + (num_frames-1)*fs
fh_traj.seek(offset, 0)
buf = fh_traj.read(8)
ubuf = struct.unpack('<q', buf)
nts = ubuf[0]
print('fn_traj: Last frame, nts = %d'%nts)
if nts <= nts_revive:
#Last frame has nts <= nts_revive, hence no need to truncate
trunc_traj = False
print('fn_traj is OK.')
else:
print('fn_traj is bad, will truncate.')
#Truncate fn_traj
if trunc_traj:
#Find out offset for truncation. num_frames is > 0.
fh_traj = open(fn_traj, 'r+b')
#Loop backwords over the number of frames. The lower bound is 1 in case
#the file needs to be truncated just after the header.
iframe = num_frames
while iframe > 0:
if (iframe == 1) or (nts <= nts_revive):
#Truncate the file after current position.
print('Truncating fn_traj')
fh_traj.truncate()
#Close the file.
fh_traj.close()
break
offset = hs + (iframe-1)*fs
fh_traj.seek(offset, 0)
buf = fh_traj.read(8)
ubuf = struct.unpack('<q', buf)
nts = ubuf[0]
iframe -= 1
if fn_stats:
#Check fn_stats
print('Checking if fn_stats is good.')
with open(fn_stats,'r') as fh_stats:
all_lines = fh_stats.readlines()
hdr = all_lines[0].rstrip('\n')
num_hdr_cols = len(hdr.split())
#Check last line
records = all_lines[-1].rstrip('\n').split()
num_cols = len(records)
if num_cols == num_hdr_cols:
nts = int(records[0])
if nts <= nts_revive:
print('fn_stats OK.')
create_fs = False
else:
print('fn_stats is bad, will chop off.')
create_fs = True
else:
print('fn_stats is bad, will chop off.')
create_fs = True
if create_fs:
with open(fn_stats,'w') as fh_stats:
fh_stats.write(hdr+'\n')
for line in all_lines[1:]:
records = line.rstrip('\n').split()
num_cols = len(records)
if num_cols == num_hdr_cols:
nts = int(records[0])
if nts <= nts_revive:
fh_stats.write(line)
| 31.347518 | 86 | 0.56267 | 633 | 4,420 | 3.777251 | 0.224329 | 0.047679 | 0.036805 | 0.021748 | 0.30322 | 0.280636 | 0.250941 | 0.229193 | 0.229193 | 0.229193 | 0 | 0.009705 | 0.323982 | 4,420 | 140 | 87 | 31.571429 | 0.790495 | 0.177376 | 0 | 0.372549 | 0 | 0 | 0.138227 | 0 | 0 | 0 | 0 | 0 | 0.029412 | 1 | 0 | false | 0 | 0.029412 | 0 | 0.029412 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6859329d31323a1a762b92df9638a4ced3ea723d | 5,346 | py | Python | logger/logger.py | foo290/Ai-ChessEngine | 2dd897dd356e8e63d52bc50d239752598b2d359b | [
"MIT"
] | null | null | null | logger/logger.py | foo290/Ai-ChessEngine | 2dd897dd356e8e63d52bc50d239752598b2d359b | [
"MIT"
] | null | null | null | logger/logger.py | foo290/Ai-ChessEngine | 2dd897dd356e8e63d52bc50d239752598b2d359b | [
"MIT"
] | null | null | null | import logging
import os
import sys
from logger import configs as cfg
__all__ = [
'get_custom_logger'
]
class CustomFormatter(logging.Formatter):
"""
A custom formatter class which set colors and format.
"""
FORMATS = {
logging.DEBUG: cfg.LEVEL_FORMATS["DEBUG"],
logging.INFO: cfg.LEVEL_FORMATS["INFO"],
logging.WARNING: cfg.LEVEL_FORMATS["WARNING"],
logging.ERROR: cfg.LEVEL_FORMATS["ERROR"],
logging.CRITICAL: cfg.LEVEL_FORMATS["CRITICAL"],
}
def format(self, record) -> str:
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt, datefmt=cfg.DATE_FORMAT)
return formatter.format(record)
def _get_valid_filepath(file_name: str, file_path: str) -> str:
"""
This function creates the directory and file (if not exist) for log file.
"""
if not file_name.endswith('.log'):
# Checks and put the relevant extension for log files
file_name: str = f"{file_name}.log"
full_path: str = os.path.join(file_path, file_name)
try:
if os.path.exists(full_path):
# If path is valid, means file is already there, return path
return full_path
if not os.path.exists(file_path):
# This block creates folder (if specified and not exist).
os.makedirs(file_path)
if not os.path.exists(full_path):
# This block create empty log file by given name in specified directory.
open(full_path, 'w').close()
return full_path
except (OSError, PermissionError):
return os.path.join(cfg.BACKUP_FILE_PATH, file_name)
def _get_local_file_handler(valid_file_path, level, formatter) -> logging.FileHandler:
local_file_handler = logging.FileHandler(valid_file_path)
local_file_handler.setLevel(level)
local_file_handler.setFormatter(formatter)
return local_file_handler
def _get_global_file_handler(valid_file_path, level, formatter) -> logging.FileHandler:
global_file_handler = logging.FileHandler(valid_file_path)
global_file_handler.setLevel(level)
global_file_handler.setFormatter(formatter)
return global_file_handler
class _Logger(logging.Logger):
def __init__(self, name):
super().__init__(name)
def ylog(self, msg, prefix="LOG"):
print(f"{cfg.high_yellow}[{prefix.upper()}] [{self.name}] : {msg}{cfg.reset}")
def glog(self, msg, prefix="LOG"):
print(f"{cfg.high_green}[{prefix.upper()}] [{self.name}] : {msg}{cfg.reset}")
def plog(self, msg, prefix="LOG"):
print(f"{cfg.high_purple}[{prefix.upper()}] [{self.name}] : {msg}{cfg.reset}")
def clog(self, msg, prefix="LOG"):
print(f"{cfg.high_cyan}[{prefix.upper()}] [{self.name}] : {msg}{cfg.reset}")
def blog(self, msg, prefix="LOG"):
print(f"{cfg.high_blue}[{prefix.upper()}] [{self.name}] : {msg}{cfg.reset}")
def rlog(self, msg, prefix="LOG"):
print(f"{cfg.high_red}[{prefix.upper()}] [{self.name}] : {msg}{cfg.reset}")
def wlog(self, msg, prefix="LOG"):
print(f"{cfg.high_white}[{prefix.upper()}] [{self.name}] : {msg}{cfg.reset}")
def get_custom_logger(name, level=logging.DEBUG, console_output: bool = True,
make_combined_logs: bool = cfg.COMBINED_LOGGING,
make_individual_logs: bool = cfg.INDIVIDUAL_LOGGING
) -> logging.Logger:
"""
This function is supposed to be called whenever you want to make a logger.
:param name: name of the module, set it to __name__
:param level: level of logging. default if debug.
:param console_output: If true, will also display logs on console.
:param make_combined_logs: If True, will make a single file to dump all logs from every module of project.
:param make_individual_logs: if True, the log file name will be same as python modules which are logging it.
:return: an instance of Logger class.
"""
formatter = CustomFormatter()
_logger = _Logger(name)
log_file_path: str = cfg.LOG_FILE_PATH
if log_file_path == '' or log_file_path is None or log_file_path == '.':
# if absolute path is not given, File will be created in backup location specified.
log_file_path = cfg.BACKUP_FILE_PATH
# Adding file handlers
if make_combined_logs:
# Here, the name of the file is same for each instance
combined_file_name: str = cfg.COMBINED_LOG_FILE_NAME
valid_file_path: str = _get_valid_filepath(combined_file_name, log_file_path)
global_file_handler = _get_global_file_handler(valid_file_path, level, formatter)
_logger.addHandler(global_file_handler)
if make_individual_logs:
# Here, the name of the file is same as name of the python module logger is used in.
individual_file_name: str = name
valid_file_path: str = _get_valid_filepath(individual_file_name, log_file_path)
local_file_handler = _get_local_file_handler(valid_file_path, level, formatter)
_logger.addHandler(local_file_handler)
if console_output:
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setLevel(level)
stream_handler.setFormatter(formatter)
_logger.addHandler(stream_handler)
return _logger
| 34.941176 | 112 | 0.676581 | 729 | 5,346 | 4.721536 | 0.218107 | 0.051133 | 0.037188 | 0.032539 | 0.33527 | 0.27484 | 0.262638 | 0.238234 | 0.091226 | 0 | 0 | 0 | 0.216236 | 5,346 | 152 | 113 | 35.171053 | 0.82148 | 0.206322 | 0 | 0.023529 | 0 | 0.082353 | 0.133221 | 0.056649 | 0 | 0 | 0 | 0 | 0 | 1 | 0.152941 | false | 0 | 0.047059 | 0 | 0.317647 | 0.082353 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6859420a0a97ca55c131258804eeb821dc7f5c24 | 3,022 | py | Python | espei/mcmc.py | jwsiegel2510/ESPEI | cb72f676138c96d560d8b83cea6b7ca2da100078 | [
"MIT"
] | null | null | null | espei/mcmc.py | jwsiegel2510/ESPEI | cb72f676138c96d560d8b83cea6b7ca2da100078 | [
"MIT"
] | null | null | null | espei/mcmc.py | jwsiegel2510/ESPEI | cb72f676138c96d560d8b83cea6b7ca2da100078 | [
"MIT"
] | null | null | null | """
Legacy module for running MCMC in ESPEI
"""
import warnings
from espei.utils import database_symbols_to_fit
from espei.optimizers.opt_mcmc import EmceeOptimizer
def mcmc_fit(dbf, datasets, iterations=1000, save_interval=1, chains_per_parameter=2,
chain_std_deviation=0.1, scheduler=None, tracefile=None, probfile=None,
restart_trace=None, deterministic=True, prior=None, mcmc_data_weights=None):
"""
Run MCMC via the EmceeOptimizer class
Parameters
----------
dbf : Database
A pycalphad Database to fit with symbols to fit prefixed with `VV`
followed by a number, e.g. `VV0001`
datasets : PickleableTinyDB
A database of single- and multi-phase data to fit
iterations : int
Number of trace iterations to calculate in MCMC. Default is 1000 iterations.
save_interval :int
interval of iterations to save the tracefile and probfile
chains_per_parameter : int
number of chains for each parameter. Must be an even integer greater or
equal to 2. Defaults to 2.
chain_std_deviation : float
standard deviation of normal for parameter initialization as a fraction
of each parameter. Must be greater than 0. Default is 0.1, which is 10%.
scheduler : callable
Scheduler to use with emcee. Must implement a map method.
tracefile : str
filename to store the trace with NumPy.save. Array has shape
(chains, iterations, parameters)
probfile : str
filename to store the log probability with NumPy.save. Has shape (chains, iterations)
restart_trace : np.ndarray
ndarray of the previous trace. Should have shape (chains, iterations, parameters)
deterministic : bool
If True, the emcee sampler will be seeded to give deterministic sampling
draws. This will ensure that the runs with the exact same database,
chains_per_parameter, and chain_std_deviation (or restart_trace) will
produce exactly the same results.
prior : str
Prior to use to generate priors. Defaults to 'zero', which keeps
backwards compatibility. Can currently choose 'normal', 'uniform',
'triangular', or 'zero'.
mcmc_data_weights : dict
Dictionary of weights for each data type, e.g. {'ZPF': 20, 'HM': 2}
"""
warnings.warn("The mcmc convenience function will be removed in ESPEI 0.8")
all_symbols = database_symbols_to_fit(dbf)
optimizer = EmceeOptimizer(dbf, scheduler=scheduler)
optimizer.save_interval = save_interval
optimizer.fit(all_symbols, datasets, prior=prior, iterations=iterations,
chains_per_parameter=chains_per_parameter,
chain_std_deviation=chain_std_deviation,
deterministic=deterministic, restart_trace=restart_trace,
tracefile=tracefile, probfile=probfile,
mcmc_data_weights=mcmc_data_weights)
optimizer.commit()
return optimizer.dbf, optimizer.sampler
| 44.441176 | 93 | 0.701191 | 392 | 3,022 | 5.285714 | 0.390306 | 0.012066 | 0.043436 | 0.019305 | 0.02027 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012158 | 0.237922 | 3,022 | 67 | 94 | 45.104478 | 0.887538 | 0.589345 | 0 | 0 | 0 | 0 | 0.054358 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.166667 | 0 | 0.277778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
685947cd3edfcf08d33ed27ecfd166f3d297852c | 20,920 | py | Python | SourceCode/latest/Final_Destination-v1.0.SourceCode.py | NapoII/Final-Destination | 7573b366840841e7770d0d2441ecee84f305ee91 | [
"CC0-1.0"
] | 1 | 2021-09-24T17:53:58.000Z | 2021-09-24T17:53:58.000Z | SourceCode/latest/Final_Destination-v1.0.SourceCode.py | NapoII/Final-Destination | 7573b366840841e7770d0d2441ecee84f305ee91 | [
"CC0-1.0"
] | null | null | null | SourceCode/latest/Final_Destination-v1.0.SourceCode.py | NapoII/Final-Destination | 7573b366840841e7770d0d2441ecee84f305ee91 | [
"CC0-1.0"
] | null | null | null | ####################################################################################################
#Intro
v = "1.0"
f0=("""
,:++.
%#::. .. .. .. .. .
%+ :# .@. :# %# M.
%+ :# ,@@, :% .##: M.
%@%#. :# ,#:@, :% #:,@. M.
%%.. :# :% ,@,:% ,@,,#+ M.
%+ :# :% ,@%+ #%+++M. M.
%+ :# :% ,@+ :# +# M...
++ ,% :+ ,: %, .#..##@%
.+++:.
,@:+%@#. ,::+ ,,.,,,,,,, ,. ,. ,. , ,,,,,,,.., ,:+, ., .,
,@ .%@. %%:: ,#%,.%%#@%%+ @, %#. @, .M: +%%@%%%.:# .%#::##. .M: ,#
,@ .M, %: @: ,# @, %##. @, %+@. @, :# %% #+ .@@: ,#
,@ .M. %+,, %#, ,# #, #,%#. @. ,# %+ @, :# .M. :# ,#,@: :%
,@ %% %#+: :@% ,# #, #, +#.@. #+:+M, @, :# .M :# ,# .@::%
,@ .%#. %: ,@ :@ #, @, +#@. :#:::%# @, :# .M, #+ :# .@#%
,@+#@+ %+,,. .:#: :@ #, @, +M..@, .@: @: :# +@,..%#. :# .@+
,%+, :%%#. :%, ,+ +, %. : .+ :: +, ,+ ,%##: ,+ .:
- created by Napo_II
- """ + v + """
- python 3.7
- https://github.com/NapoII/Final-Destination
""")
print(f0)
####################################################################################################
#Import
import os # für die Ordner Struktur
from os import listdir
from os.path import isfile, join
import shutil
import win32con, win32api,os # um ico Bild für Folder einzustellen
from datetime import date
import time
from configparser import ConfigParser
####################################################################################################
aktuellesDatum = date.today()
script_path = os.path.dirname(os.path.realpath(__file__))
ico_dir = (script_path.replace('\\','/')) + "/ico/"
FD_input = input("Wie heißt die gewünschte Festplatte der Final Destination: ")
FD_path = str(FD_input)+":\\"
path = r"C:/Users/space/Desktop/Final Destination/"
dir = r"E:/Backup/desktop.ini"
ico = r"E:/Bilder/Icons/icons8-kategorisieren-64.ico,0"
Datei_Doppelt = []
skip_LIST = ["desktop.ini", "Log_Files"]
Datei_Art_Bilder = r"Bilder"
Filter_liste_Bilder = [".png",".jpeg",".gif",".tif",".jpg"]
Datei_Art_Dokumente = r"Dokumente"
Filter_liste_Dokumente = [".txt",".ui",".pdf",".docx",".xlsx",".ini"]
Datei_Art_Audio = r"Audio"
Filter_liste_Audio = [".wav",".mp3"]
Datei_Art_Projekte = r"Projekte"
Filter_liste_Projekte = [".py"]
Datei_Art_Pr0grame = r"Pr0grame"
Filter_liste_Pr0grame = [".exe"]
Datei_Art_Cheack_to_delt = r"Cheack_to_delt"
Filter_liste_Cheack_to_delt = [".url",".lnk"]
Datei_Art_In_welchen_Ordner = r"In_welchen_Ordner"
Filter_liste_In_welchen_Ordner = [".zip",".tar.xz",".xz"]
####################################################################################################
#Config Datei
path_config = "E:/Pr0grame/My_ Pyhton/work_in_progress/Final Destination/config_Final_Destination.ini"
if os.path.exists(path_config): # Prüft datei pfad nach exsistänz Ture/False
print ( "Final Destination Config wurde geladen.")
else:
print("Standard Config wird erstellt --> ["+str(path_config)+"] ")
config = ConfigParser()
config['Bilder'] = {
'Filter_liste_Bilder': ".png " ".jpeg " ".gif " ".tif " ".jpg ",
}
config['skip_LIST'] = {
'Filter_liste_Bilder': "desktop.ini " "Log_Files ",
}
config['Dokumente'] = {
'Filter_liste_Dokumente': ".txt " ".ui " ".pdf " ".docx " ".xlsx " ".ini ",
}
config['Audio'] = {
'Filter_liste_Audio': ".wav " ".mp3 ",
}
config['Projekte'] = {
'Filter_liste_Projekte': ".py ",
}
config['Pr0grame'] = {
'Filter_liste_Pr0grame': ".exe ",
}
config['Cheack_to_delt'] = {
'Filter_liste_Cheack_to_delt': ".url " ".lnk ",
}
config['skip_LIST'] = {
'Filter_liste_skip_LIST': "desktop.ini " "Log_Files ",
}
config['In_welchen_Ordner'] = {
'Filter_liste_In_welchen_Ordner': ".zip " ".tar.xz " ".xz ",
}
with open(path_config,'w') as f:
config.write(f)
def get_config_Filter_key (Datei_Art):
parser = ConfigParser()
parser.read(path_config)
Filter_liste_ = "Filter_liste_"+str(Datei_Art)
Config_sections = parser.get(Datei_Art, Filter_liste_ )
def stringToList(string):
listRes = list(string.split(" "))
return listRes
return (stringToList(Config_sections))
####################################################################################################
# Ordner Struktur wird erstllet / geprüft
print("\n")
print("Ordner Struktur wird überprüft und ggf. angelegt...\n")
folder = "Final Destination"
dir = "~/Desktop/"+str(folder) # gibt gewünschten Datei-Pfad an
full_path = os.path.expanduser(dir) # ergänzt datei pfad mit PC User name
if os.path.exists(full_path): # Prüft datei pfad nach exsistänz Ture/False
print("Ordner Struktur existiert bereits")
print(" -> " + str(full_path))
else: # Erstellt Ordner falls nicht vorhadnen
os.makedirs(full_path)
print("Der Ordner ["+folder+"] wurde erstellt im Verzeichnis:" )
print(" -> " + str(full_path))
print("\n")
########## Ordner Eigenschaften werden festgelegt
def ini_datei(full_path, Datei_Art ):
File_name = "desktop.ini" #Datei name
complete_pathName = os.path.join(full_path, File_name) # Path + text datei name
if os.path.exists(complete_pathName): # Prüft datei pfad nach exsistänz Ture/False
print("desktop.ini existiert bereits")
print(" -> " + str(complete_pathName))
else:
print("desktop.ini ["+str(File_name)+"] wird erstellt...")
file1 = open(complete_pathName, "w")
full_ico = ico_dir + Datei_Art + ".ico" # Datei erstellen
toFile = "[.ShellClassInfo]\n" + "IconResource=" + full_ico + ",0" + "\n" + "[ViewState]\n" + "Mode=\n" + "Vid=\n" + "FolderType=Generic"
file1.write(toFile) # Datei wird gefüllt mit input
file1.close()
win32api.SetFileAttributes(complete_pathName,win32con.FILE_ATTRIBUTE_HIDDEN) # versteckt datei
win32api.SetFileAttributes(complete_pathName,win32con.FILE_ATTRIBUTE_SYSTEM) # datei auf normal
win32api.SetFileAttributes(complete_pathName,win32con.FILE_ATTRIBUTE_HIDDEN) # datei verstecken . damit ini vom system übernommen wird
ini_datei(full_path, "Final Destination")
#####################################################################################################
#Log File
# Ordner Struktur wird erstllet / geprüft
print("\n")
print("Ordner Struktur wird überprüft und ggf. angelegt...\n")
folder = "Log_Files"
dir = "~/Desktop/Final Destination/"+str(folder) # gibt gewünschten Datei-Pfad an
full_path = os.path.expanduser(dir) # ergänzt datei pfad mit PC User name
if os.path.exists(full_path): # Prüft datei pfad nach exsistänz Ture/False
print("Ordner Struktur existiert bereits")
print(" -> " + str(full_path))
else: # Erstellt Ordner falls nicht vorhadnen
os.makedirs(full_path)
print("Der Ordner ["+folder+"] wurde erstellt im Verzeichnis:" )
print(" -> " + str(full_path))
print("\n")
## Unter Ornder nach datum name erstellt
folder = (aktuellesDatum.strftime("%m_%Y"))
full_path = str(full_path)+"/"+str(folder) # gibt gewünschten Datei-Pfad an
if os.path.exists(full_path): # Prüft datei pfad nach exsistänz Ture/False
print("Ordner Struktur existiert bereits")
print(" -> " + str(full_path))
else: # Erstellt Ordner falls nicht vorhadnen
os.makedirs(full_path)
print("Der Ordner ["+ (folder) +"] wurde erstellt im Verzeichnis:" )
print(" -> " + str(full_path))
print("\n")
####################################################################################################
# text Datei erstellen nach Datum und Uhrzeit
start_time = time.time()
datei_Date = (time.strftime("%d_%m_%Y - %H.%M"))
# Generiert date formater
name_of_file = ("Final_Destination - Log - "+(datei_Date)) # Generiert Datei name
save_path = full_path
completeName = os.path.join(save_path, name_of_file+".txt") # Path + text datei name
print("Textdatei ["+str(name_of_file)+"] wird erstellt...")
file1 = open(completeName, "a") # Datei erstellen
file1.close() # Datei wird gespeichert und geschlossen
####################################################################################################
### Data in Text Datei gespeichert
def Fill_log (fill_input):
toFile = fill_input # def inhalt das hinzugefügt wird
file1 = open(completeName, "a") # Datei wird geöffnet # zufüge modus
print("Datei ["+str(name_of_file) + "] wird beschrieben und gespeichtert...")
file1.write(str(toFile) +"\n") # Datei wird gefüllt mit input
file1.close() # Datei wird geschlossen und gespeichert
#####################################################################################################
def Datei_Filter(Datei_Art) :
Fill_log("----------------------------------------------------------------------------------------------------")
Fill_log ("Datei art: "+str(Datei_Art))
Filter_liste = get_config_Filter_key (Datei_Art)
Fill_log ("Es wird gefiltert nach: "+str(Filter_liste)+"\n")
Filter = Filter_liste
destination = FD_path + Datei_Art
# Ordner Struktur wird erstllet / geprüft
print("\n")
print("Ordner Struktur wird überprüft und ggf. angelegt...\n")
folder = Datei_Art
dir = destination # gibt gewünschten Datei-Pfad an
full_path = os.path.expanduser(dir) # ergänzt datei pfad mit PC User name
if os.path.exists(full_path): # Prüft datei pfad nach exsistänz Ture/False
print("Ordner Struktur existiert bereits")
print(" -> " + str(full_path))
else: # Erstellt Ordner falls nicht vorhadnen
os.makedirs(full_path)
print("Der Ordner ["+folder+"] wurde erstellt im Verzeichnis:" )
print(" -> " + str(full_path))
print("\n")
ini_datei(full_path, Datei_Art)
File_list = [ f for f in listdir(path) if isfile(join(path,f)) ] # liest datein namen aus und fügt sie einer Liste hinzu.
# Filtert Skipp Datein herauss
skip_LIST_1 = skip_LIST
skip_len = len(skip_LIST_1)+1
while True :
skip_len = skip_len - 1
if skip_len == 0 :
break
Skip_datei = str(skip_LIST_1.pop())
try:
File_list.remove(Skip_datei)
skip_LIST_1.append(Skip_datei)
except:
skip_LIST_1.append(Skip_datei)
print ("\n")
Fill_log ("Es wurden folgende Datein gefunden: ")
print ("Es wurden folgende Datein gefunden: ")
Fill_log (File_list)
Fill_log ("\n")
print (File_list)
print ("\n")
len_a = len(Filter)+1
while True:
len_a = len_a - 1
if len_a == 0:
break
datei_end = Filter.pop()
datei_end_match = list(filter(lambda x: datei_end in x, File_list))
len_b = len(datei_end_match)+1
while True:
len_b = len_b - 1
if len_b == 0 :
break
datei_end_name = datei_end_match.pop()
try:
shutil.move(f"{path}/{datei_end_name}", destination)
print (" Datei [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(destination) )
Fill_log(" Datei [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(destination) )
except:
Datei_Doppelt.append(datei_end_name)
print("\n")
print("Ordner Struktur wird überprüft und ggf. angelegt...\n")
folder = Datei_Art
folder_2 = "Dublikate"
dir = destination + "/" + folder_2 # gibt gewünschten Datei-Pfad an
full_path = os.path.expanduser(dir) # ergänzt datei pfad mit PC User name
if os.path.exists(full_path): # Prüft datei pfad nach exsistänz Ture/False
print("Ordner Struktur existiert bereits")
print(" -> " + str(full_path))
else: # Erstellt Ordner falls nicht vorhadnen
os.makedirs(full_path)
print("Der Ordner ["+folder_2+"] wurde erstellt im Verzeichnis:" )
print(" -> " + str(full_path))
print("\n")
print (" Die Datei [ " + str(datei_end_name) + " ] ist schon im Ordner --> |"+ str(destination) +" vorhaden" )
ini_datei(full_path, folder_2 )
try:
shutil.move(f"{path}/{datei_end_name}", dir)
print (" Datei [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(dir) )
Fill_log(" Datei [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(dir) )
except:
print("Die Datei [ " + str(datei_end_name) + " ] ist schon doppelt vorhanden ")
Fill_log("Die Datei [ " + str(datei_end_name) + " ] ist schon doppelt vorhanden ")
Fill_log ("\n")
def Ordner_Filter(Datei_Art):
Fill_log("----------------------------------------------------------------------------------------------------")
Fill_log ("Datei art: Ordner\n")
files = []
paths = []
for file in os.listdir(path):
if os.path.isdir(file):
paths.append(file)
else:
files.append(file)
# Filtert Skipp Datein herauss
skip_LIST_2 = skip_LIST
skip_len = len(skip_LIST_2)+1
while True :
skip_len = skip_len - 1
if skip_len == 0 :
break
Skip_datei = (skip_LIST_2.pop())
try:
files.remove(Skip_datei)
skip_LIST_2.append(Skip_datei)
except:
skip_LIST_2.append(Skip_datei)
len_d = len(files)+1
while True:
len_d = len_d - 1
if len_d == 0:
break
files_x = files.pop()
path_c = path + files_x
isdir = os.path.isdir(path_c)
if isdir == True:
datei_end_name = files_x
destination = FD_path + Datei_Art
# Ordner Struktur wird erstllet / geprüft
print("\n")
print("Ordner Struktur wird überprüft und ggf. angelegt...\n")
folder = Datei_Art
dir = destination # gibt gewünschten Datei-Pfad an
full_path = os.path.expanduser(dir) # ergänzt datei pfad mit PC User name
if os.path.exists(full_path): # Prüft datei pfad nach exsistänz Ture/False
print("Ordner Struktur existiert bereits")
print(" -> " + str(full_path))
else: # Erstellt Ordner falls nicht vorhadnen
os.makedirs(full_path)
print("Der Ordner ["+folder+"] wurde erstellt im Verzeichnis:" )
print(" -> " + str(full_path))
print("\n")
ini_datei(full_path, Datei_Art)
try:
shutil.move(f"{path_c}", destination)
print (" Ordner [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(destination) )
Fill_log (" Ordner [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(destination) )
Fill_log ("\n")
except:
Datei_Doppelt.append(datei_end_name)
print("\n")
print("Ordner Struktur wird überprüft und ggf. angelegt...\n")
folder = Datei_Art
folder_2 = "Dublikate"
dir = destination + "/" + folder_2 # gibt gewünschten Datei-Pfad an
full_path = os.path.expanduser(dir) # ergänzt datei pfad mit PC User name
if os.path.exists(full_path): # Prüft datei pfad nach exsistänz Ture/False
print("Ordner Struktur existiert bereits")
print(" -> " + str(full_path))
else: # Erstellt Ordner falls nicht vorhadnen
os.makedirs(full_path)
print("Der Ordner ["+folder_2+"] wurde erstellt im Verzeichnis:" )
print(" -> " + str(full_path))
print("\n")
print (" Der Ordner [ " + str(datei_end_name) + " ] ist schon im Ordner --> |"+ str(destination) +" vorhaden" )
ini_datei(full_path, folder_2 )
try:
shutil.move(f"{path}/{datei_end_name}", dir)
print (" Datei [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(dir) )
Fill_log (" Datei [ " + str(datei_end_name) + " ] Verschoben nach -> " + str(dir) )
Fill_log ("\n")
except:
print("Der Ordner [" + str(datei_end_name) + "] ist schon doppelt vorhanden ")
Fill_log ("Der Ordner [" + str(datei_end_name) + "] ist schon doppelt vorhanden ")
Fill_log ("\n")
def Folder_cheack(path):
files = []
paths = []
for file in os.listdir(path):
if os.path.isdir(file):
paths.append(file)
else:
files.append(file)
len_d = len(files)+1
while True:
len_d = len_d - 1
if len_d == 0:
break
files_x = files.pop()
path_c = path + files_x
isdir = os.path.isdir(path_c)
if isdir == True:
return True
def if_to_Datei_Art_cheack(Datei_Art_In_welchen_Ordner):
File_list = [ f for f in listdir(path) if isfile(join(path,f)) ] # liest datein namen aus und fügt sie einer Liste hinzu.
Filter = get_config_Filter_key (Datei_Art_In_welchen_Ordner)
Len_e = len(Filter)+1
while True:
Len_e = Len_e - 1
if Len_e == 0:
break
datei_end = Filter.pop()
datei_end_match = list(filter(lambda x: datei_end in x, File_list))
print(datei_end_match)
if len(datei_end_match) == 1 :
return True
else :
return False
Fill_log (f0)
Fill_log ("Der Drop in Ordner befindet sich in -->" + str(path) + "\n" +"Die gefunden Datein/Ordner wurden verschoben nach --> "+ str(FD_path)+ "\n")
Datei_Filter(Datei_Art_Bilder)
Datei_Filter(Datei_Art_Dokumente)
Datei_Filter(Datei_Art_Audio)
Datei_Filter(Datei_Art_Projekte)
Datei_Filter(Datei_Art_Pr0grame)
Datei_Filter(Datei_Art_Cheack_to_delt)
Cheack_da = if_to_Datei_Art_cheack(Datei_Art_In_welchen_Ordner)
Datei_Filter(Datei_Art_In_welchen_Ordner)
Folder_da = (Folder_cheack(path))
if Folder_da == None:
Folder_da = False
if Folder_da == True or Cheack_da == True:
Manuel_cheack = True
else:
Manuel_cheack = False
Ordner_Filter(Datei_Art_In_welchen_Ordner)
print(f0)
os.startfile(FD_path)
print(str(completeName))
os.system("\""+str(completeName)+"\"")
XY = input("Drücke [ ENTER ] um das Programm zu beenden.")
#####################################################################################################
# To_Do :
# -> .ini datei verküpfung
##################################################################################################### | 40.153551 | 149 | 0.493117 | 2,144 | 20,920 | 4.600746 | 0.132463 | 0.035685 | 0.025547 | 0.022709 | 0.67133 | 0.617295 | 0.570661 | 0.545823 | 0.520681 | 0.500608 | 0 | 0.006065 | 0.32218 | 20,920 | 521 | 150 | 40.153551 | 0.689563 | 0.101769 | 0 | 0.492268 | 0 | 0 | 0.300681 | 0.032709 | 0.002577 | 0 | 0 | 0 | 0 | 1 | 0.020619 | false | 0 | 0.020619 | 0 | 0.054124 | 0.180412 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68597bef85b0dda83a4d08365703a436cc87189b | 892 | py | Python | evap/grades/migrations/0017_remove_grade_documents_from_evaluations.py | felixrindt/EvaP | fe65fcc511cc942695ce1edbaab170894f0d37b1 | [
"MIT"
] | 29 | 2020-02-28T23:03:41.000Z | 2022-02-19T09:29:36.000Z | evap/grades/migrations/0017_remove_grade_documents_from_evaluations.py | felixrindt/EvaP | fe65fcc511cc942695ce1edbaab170894f0d37b1 | [
"MIT"
] | 737 | 2015-01-02T17:43:25.000Z | 2018-12-10T20:45:10.000Z | evap/grades/migrations/0017_remove_grade_documents_from_evaluations.py | felixrindt/EvaP | fe65fcc511cc942695ce1edbaab170894f0d37b1 | [
"MIT"
] | 83 | 2015-01-14T12:39:41.000Z | 2018-10-29T16:36:43.000Z | from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('grades', '0016_move_grade_documents_to_course'),
]
operations = [
migrations.AlterField(
model_name='gradedocument',
name='course',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='grade_documents', to='evaluation.Course', verbose_name='course'),
),
migrations.AlterUniqueTogether(
name='gradedocument',
unique_together={('course', 'description_en'), ('course', 'description_de')},
),
migrations.RemoveField(
model_name='gradedocument',
name='evaluation',
),
migrations.AlterModelTable(
name='gradedocument',
table=None,
),
]
| 29.733333 | 160 | 0.608744 | 76 | 892 | 6.960526 | 0.526316 | 0.128544 | 0.05293 | 0.083176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006154 | 0.2713 | 892 | 29 | 161 | 30.758621 | 0.807692 | 0 | 0 | 0.32 | 0 | 0 | 0.209641 | 0.039238 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.08 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6859ac768978328d2e515e67eba45978155bf1eb | 2,424 | py | Python | primrose/pipelines/transformer_pipeline.py | astro313/primrose | 891f001e4e198096edb74eea951d27c9ae2a278f | [
"Apache-2.0"
] | 38 | 2019-09-04T17:39:31.000Z | 2021-11-09T21:20:24.000Z | primrose/pipelines/transformer_pipeline.py | astro313/primrose | 891f001e4e198096edb74eea951d27c9ae2a278f | [
"Apache-2.0"
] | 66 | 2019-09-05T15:55:19.000Z | 2021-11-21T05:36:54.000Z | primrose/pipelines/transformer_pipeline.py | astro313/primrose | 891f001e4e198096edb74eea951d27c9ae2a278f | [
"Apache-2.0"
] | 6 | 2019-12-02T09:05:30.000Z | 2021-12-09T16:12:36.000Z | """Module to run any sequence of transformers, both custom transformers and sklearn preprocessors.
Author(s):
Carl Anderson (carl.anderson@weightwatchers.com)
Brian Graham (brian.graham@ww.com)
"""
import importlib
import inspect
from primrose.base.transformer_sequence import TransformerSequence
from primrose.base.transformer import AbstractTransformer
from primrose.pipelines.train_test_split import TrainTestSplit
class TransformerPipeline(TrainTestSplit):
@staticmethod
def necessary_config(node_config):
"""Return the necessary configuration keys for the TransformerPipeline object
Returns:
set of keys
"""
return set(["transformer_sequence"])
@staticmethod
def optional_config(node_config):
"""Return the optional configuration keys for the TransformerPipeline object
Returns:
set of keys
"""
return TrainTestSplit.necessary_config(node_config)
def init_pipeline(self):
"""create the pipeline's TransformerSequence
Returns:
a TransformerSequence
"""
self.transformer_sequence = TransformerSequence()
for transformer in self.node_config["transformer_sequence"]:
p = self._instantiate_transformer(transformer)
self.transformer_sequence.add(p)
return self.transformer_sequence
@staticmethod
def _instantiate_transformer(transformer):
"""Import and validate user-defined transformer either from primrose or a custom codebase
Args:
transformer (AbstractTransformer): a subclass of primrose AbstractTransformer
Returns:
AbstractTransformer
"""
classname = transformer["class"]
path_sequence = classname.split(".")
target_class_name = path_sequence.pop(-1)
module = importlib.import_module(".".join(path_sequence))
try:
t = getattr(module, target_class_name)
except AttributeError:
raise Exception(
f'Transformer {target_class_name} not found in {".".join(path_sequence)} module'
)
class_args = {k: v for k, v in transformer.items() if k != "class"}
params = [p for p in inspect.signature(t).parameters]
t_args = [class_args.pop(p) for p in params if p in class_args.keys()]
return t(*t_args, **class_args)
| 30.683544 | 98 | 0.672442 | 257 | 2,424 | 6.198444 | 0.357977 | 0.071563 | 0.030132 | 0.033898 | 0.119272 | 0.087884 | 0.087884 | 0.087884 | 0.087884 | 0.087884 | 0 | 0.00055 | 0.25 | 2,424 | 78 | 99 | 31.076923 | 0.875688 | 0.283828 | 0 | 0.088235 | 0 | 0 | 0.081234 | 0.015743 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0 | 0.176471 | 0 | 0.441176 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
685b5013ecddbf782edf424c6a879243e2a21199 | 1,547 | py | Python | examples/mqtt.py | eva121304/Adafruit_Python_DHT | c113941f1f6e1942df65114027aa851c65a6f2f9 | [
"MIT"
] | null | null | null | examples/mqtt.py | eva121304/Adafruit_Python_DHT | c113941f1f6e1942df65114027aa851c65a6f2f9 | [
"MIT"
] | null | null | null | examples/mqtt.py | eva121304/Adafruit_Python_DHT | c113941f1f6e1942df65114027aa851c65a6f2f9 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import http.client as http
import paho.mqtt.client as mqtt
import time
import sys
import urllib
import json
import Adafruit_DHT
deviceId = "DWC50qSX"
deviceKey="i9X6Pky0S7bgpjS5"
dataChnId1="Humidity"
dataChnId2="Temperature"
MQTT_SERVER="mqtt.mcs.mediatek.com"
MQTT_PORT=1883
MQTT_ALIVE=60
MQTT_TOPIC1="mcs/" + deviceId + "/" + deviceKey + "/"+dataChnId1
MQTT_TOPIC2="mcs/" + deviceId + "/" + deviceKey + "/"+dataChnId2
def on_publish(client,userdata,result):
print("success")
pass
mqtt_client=mqtt.Client()
mqtt_client.on_publish=on_publish
mqtt_client.connect(MQTT_SERVER, MQTT_PORT)
ret=mqtt_client.publish(MQTT_TOPIC1)
sensor_args = { '11': Adafruit_DHT.DHT11,
'22': Adafruit_DHT.DHT22,
'2302': Adafruit_DHT.AM2302 }
if len(sys.argv) == 3 and sys.argv[1] in sensor_args:
sensor = sensor_args[sys.argv[1]]
pin = sys.argv[2]
else:
print('Usage: sudo ./Adafruit_DHT.py [11|22|2302] <GPIO pin number>')
print('Example: sudo ./Adafruit_DHT.py 2302 4 - Read from an AM2302 connected to GPIO pin #4')
sys.exit(1)
while True:
h0, t0= Adafruit_DHT.read_retry(sensor, pin)
print('Temp={0:0.1f}*C Humidity={1:0.1f}%'.format(t0,h0))
payload = {"datapoints":[{"dataChnId":"Humidity","values":{"value":h0}}]}
mqtt_client.publish(MQTT_TOPIC1,json.dumps(payload),qos=1)
print(ret)
payload = {"datapoints":[
{"dataChnId":"Temperature","values":{"value":t0}}]}
mqtt_client.publish(MQTT_TOPIC2,json.dumps(payload),qos=2)
print(ret)
mqtt_client.on_publish
time.sleep(1)
| 28.648148 | 98 | 0.713639 | 226 | 1,547 | 4.743363 | 0.40708 | 0.083955 | 0.047575 | 0.058769 | 0.050373 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057037 | 0.127343 | 1,547 | 53 | 99 | 29.188679 | 0.737037 | 0.010989 | 0 | 0.044444 | 0 | 0 | 0.228908 | 0.013734 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022222 | false | 0.022222 | 0.155556 | 0 | 0.177778 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
685edbb89e4cd316edc9382f3c0f6dc1028847e1 | 579 | py | Python | src/salt_client.py | Oskari-Tuormaa/SaltBot | 64d29a4c80fda0781ae929dd6ad2b0a04d04fb6c | [
"MIT"
] | 1 | 2020-05-27T22:10:50.000Z | 2020-05-27T22:10:50.000Z | src/salt_client.py | Oskari-Tuormaa/SaltBot | 64d29a4c80fda0781ae929dd6ad2b0a04d04fb6c | [
"MIT"
] | 1 | 2021-12-05T20:45:21.000Z | 2021-12-05T20:45:21.000Z | src/salt_client.py | Oskari-Tuormaa/SaltBot | 64d29a4c80fda0781ae929dd6ad2b0a04d04fb6c | [
"MIT"
] | 2 | 2021-09-13T17:17:05.000Z | 2021-12-05T20:41:17.000Z | import logging
import logging.handlers
import discord
from cmd_handler import is_valid_command, execute_commands
exception_format = """event: {event}
args: {args}
message: {message}
"""
class SaltClient(discord.Client):
async def on_ready(self):
print("Bot ready!")
async def on_message(self, message: discord.Message):
if is_valid_command(message):
await execute_commands(message)
async def on_error(self, event, *args, **kwargs):
logging.exception(exception_format.format(event=event, args=args, message=args[0].content))
| 24.125 | 99 | 0.716753 | 74 | 579 | 5.445946 | 0.445946 | 0.066998 | 0.074442 | 0.099256 | 0.153846 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0.002088 | 0.172712 | 579 | 23 | 100 | 25.173913 | 0.839248 | 0 | 0 | 0 | 0 | 0 | 0.098446 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.3125 | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
685fb118f3dc07d9c9461cdc6fe079047a9bc8a0 | 3,637 | py | Python | utils/bm25.py | zoesunsun/Forexer | 861137b323907ed8c06c79ec9cba682c8868344e | [
"MIT"
] | null | null | null | utils/bm25.py | zoesunsun/Forexer | 861137b323907ed8c06c79ec9cba682c8868344e | [
"MIT"
] | null | null | null | utils/bm25.py | zoesunsun/Forexer | 861137b323907ed8c06c79ec9cba682c8868344e | [
"MIT"
] | null | null | null | import rank_bm25
class BM25(rank_bm25.BM25Okapi):
"""BM25Okapi modified to maintain a corpus pool
"""
RELATIVE_THRESHOLD = 0.8 # A relative duplication threshold compared in the pool
ABSOLUTE_THRESHOLD = 1.5
def __init__(
self,
pool_size=50,
corpus=["This is only intended for initialization.".split(" ")],
tokenizer=None,
k1=1.5,
b=0.75,
epsilon=0.25,
):
self.pool_size = pool_size
self.is_full = pool_size <= len(corpus)
self.pool_ptr = 0
super().__init__(corpus, tokenizer=tokenizer, k1=k1, b=b, epsilon=epsilon)
# self.corpus = corpus
def _initialize(self, corpus):
self.nd = super()._initialize(corpus)
return self.nd
def update(self, new_corpus):
"""
Add an new corpus to the documents pool.
If it has been full, the oldest one will be replaced.
"""
num_doc = int(self.avgdl * self.corpus_size)
if self.is_full:
# Clean legacy influences
# old = self.corpus[self.pool_ptr]
num_doc += len(new_corpus) - self.doc_len[self.pool_ptr]
self.doc_len[self.pool_ptr] = len(new_corpus)
frequencies = {}
for word in new_corpus:
if word not in frequencies:
frequencies[word] = 0
frequencies[word] += 1
# Clean legacy freqs
for word, freq in self.doc_freqs[self.pool_ptr].items():
self.nd[word] -= 1
if self.nd[word] == 0:
del self.nd[word]
# Update new one
self.doc_freqs[self.pool_ptr] = frequencies
for word, freq in frequencies.items():
if word not in self.nd:
self.nd[word] = 0
self.nd[word] += 1
self.pool_ptr = (self.pool_ptr + 1) % self.pool_size
else:
self.doc_len.append(len(new_corpus))
num_doc += len(new_corpus)
frequencies = {}
for word in new_corpus:
if word not in frequencies:
frequencies[word] = 0
frequencies[word] += 1
self.doc_freqs.append(frequencies)
for word, freq in frequencies.items():
if word not in self.nd:
self.nd[word] = 0
self.nd[word] += 1
self.corpus_size += 1
if self.corpus_size >= self.pool_size:
self.is_full = True
self.avgdl = num_doc / self.corpus_size
self._calc_idf(self.nd)
def __contains__(self, new_doc: str):
score = self.get_scores(new_doc.split())
# print("score:", score.max())
_sum = max(score.sum(), 0.001)
normalized_score = score / _sum
return ((normalized_score.max() >= self.RELATIVE_THRESHOLD).any()) and (score.max() > self.ABSOLUTE_THRESHOLD)
if __name__ == "__main__":
bm = BM25()
print(bm.get_scores("workers of the world, rise up!".split(" ")))
bm.update("Workers of the world, unite!".split(" "))
bm.update("Hello there good man!".split())
bm.update("It is quite windy in London".split())
bm.update("How is the weather today?".split())
print(bm.get_scores("This is a test".split(" ")))
print(bm.get_scores("workers of the world, rise up!".split(" ")))
print(bm.get_scores("windy London".split()))
print("workers of the world, rise up!" in bm)
| 34.971154 | 119 | 0.54138 | 454 | 3,637 | 4.162996 | 0.251101 | 0.050794 | 0.046561 | 0.02328 | 0.347619 | 0.293651 | 0.234921 | 0.234921 | 0.234921 | 0.234921 | 0 | 0.01936 | 0.346714 | 3,637 | 103 | 120 | 35.31068 | 0.776094 | 0.093484 | 0 | 0.27027 | 0 | 0 | 0.086004 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0 | 0.013514 | 0 | 0.135135 | 0.067568 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
686226c27ced659a12abbd329dabb8b47b4fa741 | 437 | py | Python | utils/nadd.py | Tiago-Minuzzi/phd-proj | e6820e1ab939d1c1191b66f5980438ed7be86499 | [
"MIT"
] | null | null | null | utils/nadd.py | Tiago-Minuzzi/phd-proj | e6820e1ab939d1c1191b66f5980438ed7be86499 | [
"MIT"
] | null | null | null | utils/nadd.py | Tiago-Minuzzi/phd-proj | e6820e1ab939d1c1191b66f5980438ed7be86499 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
in_fasta = sys.argv[1]
out_fasta = sys.argv[2]
ntail = 'n'*6
def nadd(in_fasta,out_fasta):
with open(in_fasta, 'r') as fasta, open(out_fasta,"w") as fas_out:
for line in fasta:
line = line.strip()
if not line.startswith('>'):
line = line.lower() + ntail
fas_out.write(line + "\n")
if __name__ == '__main__':
nadd(in_fasta, out_fasta)
| 23 | 70 | 0.585812 | 67 | 437 | 3.552239 | 0.492537 | 0.147059 | 0.10084 | 0.117647 | 0.159664 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012461 | 0.265446 | 437 | 18 | 71 | 24.277778 | 0.728972 | 0.048055 | 0 | 0 | 0 | 0 | 0.033735 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.076923 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68626d05993aca7e17a54307fd1c89c1629097b3 | 2,274 | py | Python | check_stock.py | imLightSpeed/RTX-3070-bot | d6abb59c719c5ff9907ffa350e02a3ed88160d67 | [
"MIT"
] | null | null | null | check_stock.py | imLightSpeed/RTX-3070-bot | d6abb59c719c5ff9907ffa350e02a3ed88160d67 | [
"MIT"
] | null | null | null | check_stock.py | imLightSpeed/RTX-3070-bot | d6abb59c719c5ff9907ffa350e02a3ed88160d67 | [
"MIT"
] | null | null | null | from mail import mail
import os
import time
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import logging
from selenium.webdriver.remote.remote_connection import LOGGER
from selenium.common.exceptions import NoSuchElementException
import threading
from threading import Thread
class stock(Thread):
def __init__(self, item, url):
Thread.__init__(self)
self.item = item
self.url =url
def best_buy(self):
self.html_class = 'add-to-cart-button'
self.store_name = 'Best Buy'
self.cart = 'Add to Cart'
def newegg(self):
self.html_class = 'product-buy'
self.store_name = 'Newegg'
self.cart = 'ADD TO CART'
def run(self):
url = self.url
item = self.item
# options = Options()
# options.headless = True
# PATH = "chromedriver.exe"
# driver = webdriver.Chrome(PATH, options=options)
GOOGLE_CHROME_BIN = os.environ.get('GOOGLE_CHROME_BIN')
CHROMEDRIVER_PATH = os.environ.get('CHROMEDRIVER_PATH')
chrome_options = Options()
chrome_options.binary_location = GOOGLE_CHROME_BIN
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument('--no-sandbox')
driver = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, chrome_options=chrome_options)
try:
driver.get(self.url)
time.sleep(6)
is_instock = driver.find_element_by_class_name(self.html_class).text
driver.close()
driver.quit()
print(is_instock, self.item)
if is_instock == self.cart:
x = mail()
x.send_mail(self.store_name,self.item)
except NoSuchElementException:
print('NoSuchElementException', self.item)
driver.quit()
def main(bb, ne):
threads = []
for key,value in bb.items():
t1 = stock(key,value)
t1.best_buy()
t1.start()
threads.append(t1)
for i in threads:
i.join()
threads = []
for key,value in ne.items():
t1 = stock(key,value)
t1.newegg()
t1.start()
threads.append(t1)
for i in threads:
i.join()
| 32.485714 | 99 | 0.615215 | 273 | 2,274 | 4.96337 | 0.311355 | 0.067159 | 0.028782 | 0.025092 | 0.150554 | 0.121033 | 0.059041 | 0.059041 | 0.059041 | 0.059041 | 0 | 0.005538 | 0.2854 | 2,274 | 69 | 100 | 32.956522 | 0.828308 | 0.051891 | 0 | 0.222222 | 0 | 0 | 0.067875 | 0.010228 | 0 | 0 | 0 | 0 | 0 | 1 | 0.079365 | false | 0 | 0.15873 | 0 | 0.253968 | 0.031746 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6863e3729bd27dee454b1f00c6f7d3cbf51d21e7 | 1,368 | py | Python | study_cases/bootstrap/plot.py | RedPointyJackson/tfg | 20e25150e749849ef0efea95e2d6d053614cb08d | [
"MIT"
] | null | null | null | study_cases/bootstrap/plot.py | RedPointyJackson/tfg | 20e25150e749849ef0efea95e2d6d053614cb08d | [
"MIT"
] | null | null | null | study_cases/bootstrap/plot.py | RedPointyJackson/tfg | 20e25150e749849ef0efea95e2d6d053614cb08d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib as mpl
import numpy as np
import pandas as pd
def cm2inch(value): return value/2.54
plt.style.use('custom')
colors = mpl.rcParams['axes.prop_cycle'].by_key()['color']
primary = colors[0]
jdf = pd.read_csv('data_jack.csv')
bdf = pd.read_csv('data_boot.csv')
# General
fig = plt.figure(figsize=(cm2inch(12), cm2inch(6)))
ax1 = fig.add_subplot(131)
ax1.errorbar(x=bdf['i'], y=bdf['μ'], yerr=bdf['σ'], fmt='o',
color=primary, ms=3, lw=1, capsize=3)
ax1.set_xlabel('Bootstrap')
ax1.set_ylabel('μ ± σ')
ax2 = fig.add_subplot(132)
ax2.errorbar(x=jdf['i'], y=jdf['μ'], yerr=jdf['σ'], fmt='o',
color=primary, ms=3, lw=1, capsize=3)
ax2.set_xlabel('Jacknife')
ax2.set_ylabel('')
ax1.set_xlim(0,15.5)
ax2.set_xlim(0,15.5)
ax1.set_ylim(-2,10)
ax2.set_ylim(-2,10)
ax1.set_xticks([])
ax2.set_xticks([])
ax1.set_yticks([-2,0,1,2,4,6,8,10])
ax2.set_yticks([-2,0,1,2,4,6,8,10])
ax2.set_yticklabels([])
# Comparison
ax3 = fig.add_subplot(133)
xx = jdf['μ']
yy = bdf['μ']
ρ = np.mean( (xx-np.mean(xx)) * (yy-np.mean(yy)) )/np.std(xx)/np.std(yy)
print("ρ =",ρ)
ax3.scatter(xx, yy, alpha=0.3, edgecolor='none')
ax3.set_xlabel('μ (Jacknife)')
ax3.set_ylabel('μ (Bootstrap)')
ax3.set_xlim(0,6)
ax3.set_ylim(0,6)
fig.savefig('general.pdf')
| 19 | 72 | 0.659357 | 259 | 1,368 | 3.3861 | 0.378378 | 0.047891 | 0.04447 | 0.029647 | 0.147092 | 0.122007 | 0.122007 | 0.122007 | 0.122007 | 0.122007 | 0 | 0.072319 | 0.120614 | 1,368 | 71 | 73 | 19.267606 | 0.65586 | 0.02924 | 0 | 0.047619 | 0 | 0 | 0.095849 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02381 | false | 0 | 0.119048 | 0.02381 | 0.142857 | 0.02381 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68643be9dd2c578e0f2938773481bd92ff280d33 | 7,877 | py | Python | service/image_diff.py | xiaohengdai/AutoPlatformServer | f7826d26186728f3d19bd245ca000ddcd31cd101 | [
"MIT"
] | null | null | null | service/image_diff.py | xiaohengdai/AutoPlatformServer | f7826d26186728f3d19bd245ca000ddcd31cd101 | [
"MIT"
] | null | null | null | service/image_diff.py | xiaohengdai/AutoPlatformServer | f7826d26186728f3d19bd245ca000ddcd31cd101 | [
"MIT"
] | null | null | null | import cv2
from service.image_similar import HashSimilar
class ImageDiff(object):
def __init__(self, w=9, padding=80, w_scale=850, h_scale=0.08, hash_score=0.85, pixel_value=28):
self.filter_w = w
self.padding = padding
self.size_scale = w_scale
self.head_scale = h_scale
self.hash_score = hash_score
self.pixel_value = pixel_value
def get_line(self, e, f, i=0, j=0):
"""
calculate a path from e to f
:param e: feature input A
:param f: feature input B
:return: operation list of path from e to f
"""
N, M, L, Z = len(e), len(f), len(e)+len(f), 2*min(len(e), len(f))+2
if N > 0 and M > 0:
w, g, p = N-M, [0]*Z, [0]*Z
for h in range(0, (L//2+(L % 2 != 0))+1):
for r in range(0, 2):
c, d, o, m = (g, p, 1, 1) if r == 0 else (p, g, 0, -1)
for k in range(-(h-2*max(0, h-M)), h-2*max(0, h-N)+1, 2):
a = c[(k+1) % Z] if (k == -h or k != h and c[(k-1) % Z] < c[(k+1) % Z]) else c[(k-1) % Z]+1
b = a-k
s, t = a, b
while a < N and b < M and self.get_hash_score(e[(1-o)*N+m*a+(o-1)], f[(1-o)*M+m*b+(o-1)]) > self.hash_score:
a, b = a+1, b+1
c[k % Z], z = a, -(k-w)
if L % 2 == o and -(h-o) <= z <= h-o and c[k % Z]+d[z % Z] >= N:
D, x, y, u, v = (2*h-1, s, t, a, b) if o == 1 else (2*h, N-a, M-b, N-s, M-t)
if D > 1 or (x != u and y != v):
return self.get_line(e[0:x], f[0:y], i, j) + self.get_line(e[u:N], f[v:M], i + u, j + v)
elif M > N:
return self.get_line([], f[N:M], i + N, j + N)
elif M < N:
return self.get_line(e[M:N], [], i + M, j + M)
else:
return []
elif N > 0:
return [{"operation": "delete", "position_old": i+n} for n in range(0, N)]
else:
return [{"operation": "insert", "position_old": i, "position_new": j+n} for n in range(0, M)]
@staticmethod
def get_hash_score(hash1, hash2, precision=8):
"""
calculate similar score with line A and line B
:param hash1: input line A with hash code
:param hash2: input line B with hash code
:return: similar score in 0-1.0
"""
assert len(hash1) == len(hash2)
score = 1 - sum([ch1 != ch2 for ch1, ch2 in zip(hash1, hash2)]) * 1.0 / (precision * precision)
return score
@staticmethod
def get_line_list(op_list):
"""
get line list
:param op_list: op list
:return: line list
"""
line1_list = []
line2_list = []
for op in op_list:
if op["operation"] == "insert":
line1_list.append(op["position_new"])
if op["operation"] == "delete":
line2_list.append(op["position_old"])
return line1_list, line2_list
@staticmethod
def get_line_feature(image, precision=8):
"""
get line feature of input image
:param image: image in numpy shape
:param precision: feature precision
:return: line feature
"""
line_feature = []
for y in range(image.shape[0]):
img = cv2.resize(image[y], (precision, precision))
img_list = img.flatten()
avg = sum(img_list) * 1. / len(img_list)
avg_list = ["0" if i < avg else "1" for i in img_list]
line_feature.append([int(''.join(avg_list[x:x+4]), 2) for x in range(0, precision*precision)])
return line_feature
def get_image_feature(self, img1, img2):
"""
get image feature with padding processing
:param img1: imageA in numpy shape
:param img2: imageB in numpy shape
:return: image feature
"""
h1, w = img1.shape
img1 = img1[:, :w-self.padding]
img2 = img2[:, :w-self.padding]
img1_feature = self.get_line_feature(img1)
img2_feature = self.get_line_feature(img2)
return img1_feature, img2_feature
def line_filter(self, line_list):
"""
calculate line list with param
:param line_list: line list
:return: filtered line list
"""
i = 0
w = self.filter_w
line = []
while i < len(line_list)-w-1:
f = line_list[i:i+w]
s = 0
for j in range(w-1):
s = s + f[j+1] - f[j]
if s - w <= 6:
for l in f:
if l not in line:
line.append(l)
i = i + 1
return line
def get_image(self, image_file):
"""
cv2.read image and 3d to 1d
:param image_file: image file path
:return: image in numpy shape
"""
image = cv2.imread(image_file)
img = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
img = cv2.GaussianBlur(img, (5, 5), 1.5)
h, w = img.shape
scale = self.size_scale/w
img = cv2.resize(img, (0, 0), fx=scale, fy=scale)
return img
@staticmethod
def get_pixel(img, x, y):
"""
get pixel value of image
:param img: image in numpy shape
:param x: position x
:param y: position y
:return: pixel value
"""
h, w = img.shape
p = 0
if y < h:
p = img[y][x]
return p
def increment_diff(self, image1, image2, image_show) -> int:
"""
calculate increment image diff
:param image1: input image A
:param image2: input image B
:param image_show: increment diff image for show
:return: points length of image show
"""
img1 = self.get_image(image1)
img2 = self.get_image(image2)
score_list = HashSimilar.get_attention(img1, img2)
img1_feature, img2_feature = self.get_image_feature(img1, img2)
line1, line2 = self.get_line_list(self.get_line(img1_feature, img2_feature))
line = line1 + line2
line = self.line_filter(line)
img_show = img2.copy() if img2.shape[0] > img1.shape[0] else img1.copy()
(h, w) = img_show.shape
img_show = cv2.cvtColor(img_show, cv2.COLOR_GRAY2BGR)
points = []
line_attention = []
for l in line:
i = int((len(score_list) * (l - 1) / h))
i = 0 if i < 0 else i
if score_list[i] < 0.98:
line_attention.append(l)
line = line_attention
for y in range(int(h*0.95)):
if y > int(w * self.head_scale):
if y in line:
for x in range(w-self.padding):
p1 = int(self.get_pixel(img1, x, y))
p2 = int(self.get_pixel(img2, x, y))
if abs(p1 - p2) < self.pixel_value:
pass
else:
points.append([x, y])
for point in points:
cv2.circle(img_show, (point[0], point[1]), 1, (0, 0, 255), -1)
cv2.imwrite(image_show, img_show)
return len(points)
def get_image_score(self, image1, image2, image_diff_name):
score = HashSimilar.get_attention_similar(image1, image2)
print("score:",score)
if score < 1.0:
if score > 0.2:
points_size = self.increment_diff(image1, image2, image_diff_name)
if points_size < 50:
score = 1.0
return score
| 37.870192 | 132 | 0.489907 | 1,124 | 7,877 | 3.323843 | 0.137011 | 0.026231 | 0.023555 | 0.004283 | 0.077891 | 0.019272 | 0.012313 | 0 | 0 | 0 | 0 | 0.042417 | 0.386442 | 7,877 | 207 | 133 | 38.05314 | 0.730602 | 0.137108 | 0 | 0.077465 | 0 | 0 | 0.02006 | 0 | 0 | 0 | 0 | 0 | 0.007042 | 1 | 0.077465 | false | 0.007042 | 0.014085 | 0 | 0.204225 | 0.007042 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
68645335067f1dced1141646911f01f08cdf2583 | 3,318 | py | Python | code/fast_rpsss.py | elimsc/stark-anatomy | 5e316e685a1f87cef1d26590bf1584f6523dc603 | [
"Apache-2.0"
] | null | null | null | code/fast_rpsss.py | elimsc/stark-anatomy | 5e316e685a1f87cef1d26590bf1584f6523dc603 | [
"Apache-2.0"
] | null | null | null | code/fast_rpsss.py | elimsc/stark-anatomy | 5e316e685a1f87cef1d26590bf1584f6523dc603 | [
"Apache-2.0"
] | null | null | null | from rescue_prime import *
from rdd_fast_stark import *
from hashlib import blake2s
import os
import pickle as pickle
from rdd_fast_stark import FastStark as RddFastStark
from pyspark import SparkContext, SparkConf
conf = (
SparkConf().setAppName("test_fast_stark").setMaster("spark://zhdeMacBook-Pro:7077")
)
sc = SparkContext()
sc.addPyFile("./algebra.py")
sc.addPyFile("./rdd_ntt.py")
sc.addPyFile("./univariate.py")
sc.addPyFile("./rdd_merkle.py")
sc.addPyFile("./rdd_poly.py")
class SignatureProofStream(ProofStream):
def __init__(self, document):
ProofStream.__init__(self)
self.document = document
self.prefix = blake2s(bytes(document)).digest()
def prover_fiat_shamir(self, num_bytes=32):
return shake_256(self.prefix + self.serialize()).digest(num_bytes)
def verifier_fiat_shamir(self, num_bytes=32):
return shake_256(
self.prefix + pickle.dumps(self.objects[: self.read_index])
).digest(num_bytes)
def deserialize(self, bb):
sps = SignatureProofStream(self.document)
sps.objects = pickle.loads(bb)
return sps
class FastRPSSS:
def __init__(self):
self.field = Field.main()
expansion_factor = 4
num_colinearity_checks = 64
security_level = 2 * num_colinearity_checks
self.rp = RescuePrime()
num_cycles = self.rp.N + 1
state_width = self.rp.m
self.stark = RddFastStark(
self.field,
expansion_factor,
num_colinearity_checks,
security_level,
state_width,
num_cycles,
transition_constraints_degree=3,
sc=sc,
)
(
self.transition_zerofier,
self.transition_zerofier_codeword,
self.transition_zerofier_root,
) = self.stark.preprocess()
def stark_prove(self, input_element, proof_stream):
output_element = self.rp.hash(input_element)
trace = self.rp.trace(input_element)
transition_constraints = self.rp.transition_constraints(self.stark.omicron)
boundary_constraints = self.rp.boundary_constraints(output_element)
proof = self.stark.prove(
trace,
transition_constraints,
boundary_constraints,
self.transition_zerofier,
self.transition_zerofier_codeword,
proof_stream,
)
return proof
def stark_verify(self, output_element, stark_proof, proof_stream):
boundary_constraints = self.rp.boundary_constraints(output_element)
transition_constraints = self.rp.transition_constraints(self.stark.omicron)
return self.stark.verify(
stark_proof,
transition_constraints,
boundary_constraints,
self.transition_zerofier_root,
proof_stream,
)
def keygen(self):
sk = self.field.sample(os.urandom(17))
pk = self.rp.hash(sk)
return sk, pk
def sign(self, sk, document):
sps = SignatureProofStream(document)
signature = self.stark_prove(sk, sps)
return signature
def verify(self, pk, document, signature):
sps = SignatureProofStream(document)
return self.stark_verify(pk, signature, sps)
| 29.891892 | 87 | 0.649186 | 366 | 3,318 | 5.644809 | 0.295082 | 0.029042 | 0.063892 | 0.023233 | 0.287996 | 0.266699 | 0.266699 | 0.166989 | 0.115198 | 0.115198 | 0 | 0.009784 | 0.260699 | 3,318 | 110 | 88 | 30.163636 | 0.83245 | 0 | 0 | 0.2 | 0 | 0 | 0.033153 | 0.008439 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.077778 | 0.022222 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6864ebd0350153df32f01bfaad09d91b7c3d4a97 | 2,330 | py | Python | simulation/test_simulation_cancellation.py | bopopescu/sparrow-mod | 56c601ee3dd852a9f053bffffc2a52ff3da8d2bd | [
"Apache-2.0"
] | 200 | 2015-01-05T07:37:20.000Z | 2022-03-30T03:28:21.000Z | simulation/test_simulation_cancellation.py | bopopescu/sparrow-mod | 56c601ee3dd852a9f053bffffc2a52ff3da8d2bd | [
"Apache-2.0"
] | 1 | 2016-05-13T10:46:32.000Z | 2016-05-13T10:46:32.000Z | simulation/test_simulation_cancellation.py | bopopescu/sparrow-mod | 56c601ee3dd852a9f053bffffc2a52ff3da8d2bd | [
"Apache-2.0"
] | 73 | 2015-01-06T02:00:17.000Z | 2021-11-22T10:04:03.000Z | #
# Copyright 2013 The Regents of The University California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import simulation_cancellation
import util
class TestMultiGetSimulation(unittest.TestCase):
def setUp(self):
simulation_cancellation.CANCELLATION = True
self.simulation = simulation_cancellation.Simulation(5, "", 0.9,
util.TaskDistributions.CONSTANT)
def test_workers_tracked_correctly(self):
job_id = 13
job_start = 10
num_tasks = 2
job = util.Job(num_tasks, job_start, util.TaskDistributions.CONSTANT, 100)
self.assertEqual(job.num_tasks, num_tasks)
self.assertEqual(len(job.unscheduled_tasks), num_tasks)
probe_events = self.simulation.send_probes(job, job_start)
self.assertEqual(len(job.probed_workers), 2*num_tasks)
# Run the first probe event.
time, first_probe = probe_events[0]
# All workers are idle, so should get a task end event back.
events = first_probe.run(time)
self.assertEquals(len(events), 1)
self.assertEquals(len(job.probed_workers), 2*num_tasks - 1)
time, second_probe = probe_events[1]
events = second_probe.run(time)
# This time, should get cancellation events back (in addition to task end event).
self.assertEquals(len(events), 1 + num_tasks)
self.assertEquals(len(job.probed_workers), num_tasks)
# Make sure everything works fine if a worker replies to a probe before realizing it was
# cancelled.
time, third_probe = probe_events[2]
events = third_probe.run(time)
# Should just get a no-op event back.
self.assertEquals(len(events), 1)
if __name__ == '__main__':
unittest.main() | 38.833333 | 96 | 0.684979 | 310 | 2,330 | 5.012903 | 0.43871 | 0.046332 | 0.061133 | 0.03668 | 0.119048 | 0.068855 | 0.036036 | 0 | 0 | 0 | 0 | 0.015722 | 0.235622 | 2,330 | 60 | 97 | 38.833333 | 0.856822 | 0.376824 | 0 | 0.066667 | 0 | 0 | 0.005591 | 0 | 0 | 0 | 0 | 0 | 0.266667 | 1 | 0.066667 | false | 0 | 0.1 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
6867ca86adec09ba95f03c3cc64a39739d408fe3 | 2,340 | py | Python | python/rl_agent_level2.py | mgualti/DeepRLManip | 6b982a319edae51d9c7c740c3a83fe8ce3a97ca7 | [
"MIT"
] | 23 | 2018-10-30T02:34:39.000Z | 2022-01-13T09:58:00.000Z | python/rl_agent_level2.py | mgualti/DeepRLManip | 6b982a319edae51d9c7c740c3a83fe8ce3a97ca7 | [
"MIT"
] | null | null | null | python/rl_agent_level2.py | mgualti/DeepRLManip | 6b982a319edae51d9c7c740c3a83fe8ce3a97ca7 | [
"MIT"
] | 5 | 2018-11-03T19:56:59.000Z | 2020-03-02T08:28:31.000Z | '''RL agent which uses the deictic state/action representation.'''
# python
from copy import copy
# scipy
from numpy.linalg import inv
from numpy.random import choice, rand, randint, uniform
from numpy import absolute, argmax, argmin, array, concatenate, dot, exp, hstack, linspace, isinf, \
ones, pi, repeat, stack, tile, vstack, where, zeros
# openrave
import openravepy
# ros
import tf
# self
import hand_descriptor
from hand_descriptor import HandDescriptor
from rl_agent_hierarchical import RlAgentHierarchical
# AGENT ============================================================================================
class RlAgentLevel2(RlAgentHierarchical):
def __init__(self, rlEnvironment, gpuId, hasHistory, nSamples, caffeDirPostfix=""):
'''Initializes agent in the given environment.'''
RlAgentHierarchical.__init__(self, rlEnvironment, gpuId, hasHistory, nSamples)
# parameters
self.level = 2
# initialize caffe
self.InitializeCaffe(caffeDirPostfix)
def ComposeAction(self, prevDesc, theta):
'''Creates a new action and hand descriptor objects.'''
action = [prevDesc.image, copy(theta)]
R = openravepy.matrixFromAxisAngle(prevDesc.approach, theta[0])[0:3,0:3]
approach = prevDesc.approach
axis = dot(R, prevDesc.axis)
center = prevDesc.center
T = hand_descriptor.PoseFromApproachAxisCenter(approach, axis, center)
desc = HandDescriptor(T)
return action, desc
def SampleActions(self, prevDesc, cloudTree):
'''Samples hand positions in both base frame and image coordinates.'''
theta = zeros((self.nSamples, 6)) # last 3 columns are padded
theta[:, 0] = linspace(-pi, pi, self.nSamples)
return theta
def SenseAndAct(self, hand, prevDesc, cloudTree, epsilon):
'''Senses the current state, s, and determines the next action, a.'''
# generate image for base frame descriptor
prevDesc.GenerateDepthImage(cloudTree.data, cloudTree)
s = self.emptyState if hand is None else [hand.image, self.emptyStateVector]
# decide which location in the image to zoom into
theta = self.SampleActions(prevDesc, cloudTree)
bestIdx, bestValue = self.SelectIndexEpsilonGreedy(s, prevDesc.image, theta, epsilon)
# compose action
a, desc = self.ComposeAction(prevDesc, theta[bestIdx])
return s, a, desc, bestValue | 32.5 | 100 | 0.703846 | 269 | 2,340 | 6.074349 | 0.475836 | 0.034272 | 0.025704 | 0.031824 | 0.053856 | 0.053856 | 0 | 0 | 0 | 0 | 0 | 0.005128 | 0.166667 | 2,340 | 72 | 101 | 32.5 | 0.832821 | 0.244444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.114286 | false | 0 | 0.257143 | 0 | 0.485714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |