hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f8ac5fabe068225bc64ed71253c98847458bd024 | 813 | py | Python | media_management_lti/settings/local.py | Harvard-ATG/media_management_lti | a4b89c3e1c5f7a8adc31a258413b512507916a94 | [
"BSD-3-Clause"
] | 5 | 2017-09-25T19:55:50.000Z | 2020-11-14T23:39:31.000Z | media_management_lti/settings/local.py | Harvard-ATG/media_management_lti | a4b89c3e1c5f7a8adc31a258413b512507916a94 | [
"BSD-3-Clause"
] | 105 | 2016-01-25T19:00:40.000Z | 2021-02-23T18:57:38.000Z | media_management_lti/settings/local.py | Harvard-ATG/media_management_lti | a4b89c3e1c5f7a8adc31a258413b512507916a94 | [
"BSD-3-Clause"
] | 5 | 2016-03-08T14:28:31.000Z | 2020-11-14T23:39:32.000Z | from .base import *
from logging.config import dictConfig
ALLOWED_HOSTS = ['*']
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.file'
#INSTALLED_APPS += ('debug_toolbar',)
#MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
# For Django Debug Toolbar:
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}
# For enabling HTTPS in local development. See https://github.com/teddziuba/django-sslserver
INSTALLED_APPS += ('sslserver',)
# Logging
# Log to console instead of a file when running locally
LOGGING['handlers']['default'] = {
'level': logging.DEBUG,
'class': 'logging.StreamHandler',
'formatter': 'simple',
}
dictConfig(LOGGING)
APP_BUILD_JSON = None
| 23.911765 | 92 | 0.730627 |
c7a6af7c3b468b76f48484949321cf5df195841a | 3,217 | py | Python | unlocode.py | bhaumikmistry/pyairports | 70e482491ca45848cb0c944214fb6c9954359418 | [
"Apache-2.0"
] | null | null | null | unlocode.py | bhaumikmistry/pyairports | 70e482491ca45848cb0c944214fb6c9954359418 | [
"Apache-2.0"
] | null | null | null | unlocode.py | bhaumikmistry/pyairports | 70e482491ca45848cb0c944214fb6c9954359418 | [
"Apache-2.0"
] | null | null | null | from collections import namedtuple
from pyairports import airports
import os
import io
import csv
other = namedtuple('other', ['iata', 'name', 'country', 'subdiv', 'type', 'lat', 'lon'])
def is_port(f):
# 1 = port, as defined in Rec. 16
# 2 = rail terminal
# 3 = road terminal
# 4 = airport
# 5 = postal exchange office
# [6 = reserved for multimodal functions, ICD's, etc.]
# [7 = reserved for fixed transport functions (e.g. oil platform)]
# B = border crossing
# 0 = function not known, to be specified
if len(f) < 4:
return None
t = []
if f[0] == '1':
t.append('Port')
if f[1] == '2':
t.append('Rail Terminal')
if f[2] == '3':
t.append('Road Terminal')
if f[3] == '4':
t.append('Airport')
return t
def parse_subloc(s):
if not s:
return None, None
lat, lon = s.split(' ')
lat_d = int(lat[:2])
lat_m = int(lat[2:-1])
if lat[-1] == 'S':
lat_d = -lat_d
lon_d = int(lon[:3])
lon_m = int(lon[3:-1])
if lon[-1] == 'W':
lon_d = -lon_d
return '{}.{}'.format(lat_d, lat_m), '{}.{}'.format(lon_d, lon_m)
def read_locode_csv(path):
with io.open(path, 'rb') as inf:
r = csv.reader(inf, delimiter=',', quotechar='"')
for line in r:
changed, locode_country, locode_location, name, clean_name, subdiv, func, status, date, iata, subloc, codes = line
lat, lon = parse_subloc(subloc)
if iata:
if iata != locode_location:
print('iata {} != locode {}. Using {}'.format(iata, locode_location, iata))
if is_port(func):
yield dict(
country=locode_country,
iata=locode_location if not iata else iata,
# name=name,
type=', '.join(sorted(is_port(func))),
name=clean_name,
subdiv=subdiv,
lat=lat,
lon=lon
)
def main():
alook = airports.Airports()
results = []
for dirpath, dirnames, filenames in os.walk('UN LOCODE'):
for fn in filenames:
if 'UNLOCODE' in fn and fn.endswith('csv'):
for port in read_locode_csv(os.path.join(dirpath, fn)):
if 'Airport' not in port['type']:
continue
iata = port['iata']
if not iata:
continue
try:
existing = alook.airport_iata(iata)
except KeyError:
continue
results.append(other(**port))
with open('./pyairports/data/other_list.py', 'wb') as outf:
outf.write("""\
#Other location types - from the UN LOCODE database
OTHER_LIST = [""")
for r in results[:-1]:
outf.write(str(list(r)) + ',\n')
outf.write(str(list(results[-1])) + '\n]\n')
print('wrote to ./pyairports/data/other_list.py')
if __name__ == '__main__':
assert False, "When next using, update to write to data/<list>.json please!"
main()
| 25.736 | 126 | 0.507616 |
a5773b7dee292917c9c3ec3feb77c4f0c8c4b985 | 1,720 | py | Python | tests/test_maximum_x_projection.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 2 | 2020-07-01T06:20:44.000Z | 2020-07-01T09:36:48.000Z | tests/test_maximum_x_projection.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | null | null | null | tests/test_maximum_x_projection.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 1 | 2020-06-29T18:40:54.000Z | 2020-06-29T18:40:54.000Z | import pyclesperanto_prototype as cle
import numpy as np
def test_maximum_x_projection():
test1 = cle.push(np.asarray([
[
[1, 0, 0, 0, 9],
[0, 2, 0, 8, 0],
[3, 0, 1, 0, 10],
[0, 4, 0, 7, 0],
[5, 0, 6, 0, 10]
], [
[0, 2, 0, 8, 0],
[1, 0, 0, 0, 9],
[3, 0, 1, 0, 10],
[0, 4, 0, 7, 0],
[5, 0, 6, 0, 10]
], [
[0, 2, 0, 8, 0],
[3, 0, 1, 0, 10],
[0, 4, 0, 7, 0],
[1, 0, 0, 0, 9],
[5, 0, 6, 0, 10]
], [
[0, 2, 0, 8, 0],
[1, 0, 0, 0, 9],
[0, 4, 0, 7, 0],
[3, 0, 1, 0, 10],
[5, 0, 6, 0, 10]
], [
[1, 0, 0, 0, 9],
[0, 4, 0, 7, 0],
[3, 0, 1, 0, 10],
[0, 2, 0, 8, 0],
[5, 0, 6, 0, 10]
]
]))
reference = cle.push(np.asarray([
[9, 8, 8, 8, 9],
[8, 9, 10, 9, 7],
[10, 10, 7, 7, 10],
[7, 7, 9, 10, 8],
[10, 10, 10, 10, 10]
]))
result = cle.create(reference)
cle.maximum_x_projection(test1, result)
a = cle.pull(result)
b = cle.pull(reference)
print(a)
assert (np.array_equal(a, b))
def test_maximum_x_projection_of_pointlist():
positions_and_values = cle.push_zyx(np.asarray([
[0, 0, 2, 3, 5],
[0, 1, 3, 2, 6]
]))
reference = cle.push_zyx(np.asarray([
[5],
[6]
]))
result = cle.maximum_x_projection(positions_and_values)
a = cle.pull_zyx(result)
b = cle.pull_zyx(reference)
print(a)
print(b)
assert (np.array_equal(a, b))
| 21.5 | 59 | 0.37907 |
bd597009651752f3e370448c33891d921388c3e4 | 479 | py | Python | examples/simple/simple.py | anisehsani/yahp | 47326c39df9dcc64dcb62071f62898bb52105b21 | [
"Apache-2.0"
] | null | null | null | examples/simple/simple.py | anisehsani/yahp | 47326c39df9dcc64dcb62071f62898bb52105b21 | [
"Apache-2.0"
] | null | null | null | examples/simple/simple.py | anisehsani/yahp | 47326c39df9dcc64dcb62071f62898bb52105b21 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 MosaicML. All Rights Reserved.
import os
from dataclasses import dataclass
from typing import Optional
import yahp as hp
@dataclass
class SimpleExample(hp.Hparams):
foo: int = hp.required("foo field")
bar: float = hp.optional("bar field", default=1.0)
baz: Optional[str] = hp.optional("baz", default=None)
# load parameters from simple.yaml
hparams = SimpleExample.create(os.path.join(os.path.dirname(__file__), "simple.yaml"))
print(hparams)
| 22.809524 | 86 | 0.73904 |
f29c35abb1b9b6d5df65f690ae7d4cd169dd3d8b | 3,506 | py | Python | pyreversi/args.py | thib1984/pyreversi | 3e71f86d46c84daeaba35abea743943505f3c643 | [
"MIT"
] | null | null | null | pyreversi/args.py | thib1984/pyreversi | 3e71f86d46c84daeaba35abea743943505f3c643 | [
"MIT"
] | null | null | null | pyreversi/args.py | thib1984/pyreversi | 3e71f86d46c84daeaba35abea743943505f3c643 | [
"MIT"
] | null | null | null | """
pygitscrum argparse gestion
"""
import argparse
import sys
class CustomHelpFormatter(argparse.HelpFormatter):
def _format_action_invocation(self, action):
if not action.option_strings or action.nargs == 0:
return super()._format_action_invocation(action)
default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
return ", ".join(action.option_strings) + " " + args_string
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs == argparse.ONE_OR_MORE:
return "%s" % get_metavar(1)
return super(CustomHelpFormatter, self)._format_args(
action, default_metavar
)
def compute_args():
"""
check args and return them
"""
my_parser = argparse.ArgumentParser(
description="pyreversi is a reversi game in your terminal with IA available.",
epilog="""
Full documentation at: <https://github.com/thib1984/pyreversi>.
Report bugs to <https://github.com/thib1984/pyreversi/issues>.
MIT Licence.
Copyright (c) 2021 thib1984.
This is free software: you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law.
Written by thib1984.""",
formatter_class=CustomHelpFormatter,
)
my_parser.add_argument(
"-g",
"--games",
metavar="X",
action="store",
type=int,
default=0,
choices=range(1, 1001),
help="batch mode : number games (in 1->1000). Be patient.",
)
my_parser.add_argument(
"-r",
"--rules",
action="store_true",
help="display rules of reversi",
),
my_parser.add_argument(
"-t",
"--tutorial",
action="store_true",
help="display available moves",
),
my_parser.add_argument(
"-n",
"--nocolor",
action="store_true",
help="no color : use if unicode or color problems (forced option for windows)",
),
my_parser.add_argument(
"-b",
"--blackbot",
metavar="X",
action="store",
type=int,
default=-1,
choices=range(0, 6),
help="black player is a bot with a level [X] (in 1->5)",
)
my_parser.add_argument(
"-w",
"--whitebot",
metavar="X",
action="store",
type=int,
default=-1,
choices=range(0, 6),
help="white player is a bot with a level [X] (in 1->5)",
)
my_parser.add_argument(
"-a",
"--auto",
action="store_true",
help="auto mode. Don't ask for uncessary actions",
)
my_parser.add_argument(
"-f",
"--fix",
action="store_true",
help="disable random in IA. The bot chooses the first in the best choices founds.",
),
my_parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="verbose mode. Use for debug",
)
my_parser.add_argument(
"-s",
"--silent",
action="store_true",
help="silent mode. Use for batch mode",
)
my_parser.add_argument(
"-u",
"--update",
action="store_true",
help="self-update",
),
args = my_parser.parse_args()
return args
| 28.737705 | 91 | 0.564461 |
10d1cddf2d8d42b52012aadf7a7bd5cc17e4096e | 865 | py | Python | tests/test_depth.py | n-Holmes/multiter | d09a11028d4d8ac415adf8be3b7442a675bcc3cb | [
"MIT"
] | null | null | null | tests/test_depth.py | n-Holmes/multiter | d09a11028d4d8ac415adf8be3b7442a675bcc3cb | [
"MIT"
] | null | null | null | tests/test_depth.py | n-Holmes/multiter | d09a11028d4d8ac415adf8be3b7442a675bcc3cb | [
"MIT"
] | null | null | null | """Tests for depth-based iterators."""
import pytest
from multiter import depthflatten, depthenum
def test_correct_depth():
"""Check that depth based iterators stop at the desired depth.
Given a three-dimensional iterable.
When depthflatten or depthenum are called on the iterable with specified depth.
Then only the given number of dimensions should be iterated over.
"""
three_layer_iterable = [
['one', 'two', (3, 4)],
['five']
]
assert len(list(depthflatten(three_layer_iterable, 1))) == 2
assert len(list(depthflatten(three_layer_iterable, 2))) == 4
assert len(list(depthflatten(three_layer_iterable, 3))) == 12
assert len(list(depthenum(three_layer_iterable, 1))) == 2
assert len(list(depthenum(three_layer_iterable, 2))) == 4
assert len(list(depthenum(three_layer_iterable, 3))) == 12
| 34.6 | 83 | 0.700578 |
267bd4ba4d424587bc0b619e66ede0e171e4acb6 | 5,238 | py | Python | trans.py | darkchii/English2Chinese | 190e0a7f0c996ec6900c8e41789b566e8935c22a | [
"MIT"
] | 4 | 2018-03-06T15:42:31.000Z | 2018-06-21T10:00:12.000Z | trans.py | darkchii/English2Chinese | 190e0a7f0c996ec6900c8e41789b566e8935c22a | [
"MIT"
] | null | null | null | trans.py | darkchii/English2Chinese | 190e0a7f0c996ec6900c8e41789b566e8935c22a | [
"MIT"
] | null | null | null | from requests import Session
from bs4 import BeautifulSoup
import re
version = 'v0.0.8'
url = 'https://www.baidu.com/s'
User_Agent = 'Chrome/64.0.3282.168' # 改为自己使用的浏览器
headers = {
'User-Agent': User_Agent,
}
s = Session()
'''
利用百度搜索的特点:
把输入的词加上'百度翻译'进行关键字搜索
就可以翻译英文句子了
'''
def English2Chinese(word=''):
params = {
'wd': word + ' 百度翻译',
}
count = 20
while count:
try:
html = s.get(url=url, params=params, headers=headers, timeout=2, )
except TimeoutError:
count -= 1
continue
break
if count == 0:
print('请求超时,可能是您当前网络环境不稳定,建议到网络良好的地方使用!')
exit(4)
soup = BeautifulSoup(html.text, 'lxml')
# 单词的翻译在span标签中
tags = soup.find_all('span')
# 英文句子的翻译在html页面中的位置不一样 在p标签中
p_tags = soup.find_all('p', attrs={'class': 'op_sp_fanyi_line_two'})
r = re.compile(r'"(op_dict.+?)">')
classAttributeList = r.findall(str(tags)) # 通过正则匹配tags中包含字符串‘op_dict’的字符串
# 在所有的span标签下再通过 classAttributeList 缩小查找范围
taglist = soup.find_all('span', attrs={
'class': classAttributeList
})
'''
# 查看获取的标签
for tag in taglist:
print(tag)
'''
# 国家
r = re.compile(r'"op_dict3_font14 op_dict3_gap_small">(.+?)</span>')
nation = r.findall(str(taglist))
# 发音
r = re.compile(r'"op_dict3_font16 op_dict3_gap_small">(.+?)</span>')
pronunciation = r.findall(str(taglist))
# 词性
r = re.compile(r'"op_dict_text1 c-gap-right">(.+?)</span>')
nature = r.findall(str(taglist))
# 单词或短语的翻译
r = re.compile(r'op_dict_text2">(.*?)</span>', re.S)
translatorOfChinese = r.findall(str(taglist))
# 长句子的中文翻译
r = re.compile(r'op_sp_fanyi_line_two">(.*?)<', re.S)
long_sentence_translatorOfChinese = r.findall(str(p_tags))
print()
print('原文:' + word)
print()
print('译文:')
print()
# 如果搜索结果页面没有翻译会出现数组溢出错误
# 利用这一点来判断是否能翻译而进行异常处理
try:
translatorOfChinese[0]
except:
try:
long_sentence_translatorOfChinese[0]
except:
print('------I am sorry!Can not translated!------')
exit(2)
else:
print(long_sentence_translatorOfChinese[0].replace('\n', '').replace(' ', ''))
else:
# 多个词性
for i in range(8):
try:
print(nature[i] + ' ' + translatorOfChinese[i].replace('\n', '').replace(' ', ''))
except:
break
'''
可翻译部分中文词语、短句
但没考虑到多音字的情况
虽然也可以输出多音字
但没有显示区分输出
'''
def Chinese2English(word=''):
redundancy = ['双语例句','汉英大词典','中中释义','进行更多翻译']
params = {
'wd': word + ' 英文',
}
count = 20
while count:
try:
html = s.get(url=url, params=params, headers=headers, timeout=2, )
except TimeoutError:
count -= 1
continue
break
if count == 0:
print('请求超时,可能是您当前网络环境不稳定,建议到网络良好的地方使用!')
exit(4)
soup = BeautifulSoup(html.text, 'lxml')
# span_tags = soup.find_all('span', attrs={'class':'op_dict_exp'})
a_tags = soup.find_all('a', attrs={'hidefocus': 'true'})
p_tags = soup.find_all('p', attrs={'class': 'op_sp_fanyi_line_two'})
'''
# 获取单词出自
r = re.compile(r'op_dict_exp">(.+?)<')
wordfroms = r.findall(str(span_tags))
'''
# 字或词语翻译
r = re.compile(r'<a.*?>(.+?)<')
translatorOfEnglish = r.findall(str(a_tags))
# 短句翻译
r = re.compile(r'op_sp_fanyi_line_two">(.+?)<', re.S)
short_sentence_translatorOfEnglish = r.findall(str(p_tags))
print()
print('原文:' + word)
print()
print('译文:')
print()
try:
short_sentence_translatorOfEnglish[0]
except:
try:
translatorOfEnglish[0]
except:
print('------对不起!无法翻译!------')
exit(3)
else:
'''修改日志 原本这里是这样写的:
# 单词类的会匹配到多余的最后3个:[双语例句 汉英大词典 中中释义] 所以截取掉
if len(translatorOfEnglish) > 4:
for i in range(len(translatorOfEnglish[:-4])):
print(translatorOfEnglish[i] + ';')
else:
print(translatorOfEnglish[0] + ';')
'''
# 有时会匹配到多余的几个:[双语例句 汉英大词典 中中释义 进行更多翻译]。所以截取掉
for i in range(len(translatorOfEnglish)):
if translatorOfEnglish[i] in redundancy:
break
print(translatorOfEnglish[i] + ';')
else:
# 英文句子中含有一个空格 所以这里用两个以避免英文句子中的空格也被替换掉
print(short_sentence_translatorOfEnglish[0].replace('\n', '').replace(' ', ''))
'''
判断输入词是否是合法的中文词语
'''
def is_Chinese(word):
flag = False
for ch in word:
if u'\u4e00' <= ch <= u'\u9fff':
flag = True
else:
flag = False
break
return flag
'''
判断输入单词是否是合法的英文
'''
def is_English(word):
flag = False
for ch in word:
if ' ' <= ch <= '~':
flag = True
else:
flag = False
break
return flag
if __name__ == '__main__':
word = input('Input:')
if is_English(word):
English2Chinese(word=word)
elif is_Chinese(word):
Chinese2English(word=word)
else:
print('输入有误!')
exit(1)
| 23.176991 | 99 | 0.550019 |
24ab335c8ee50f8ca65fa9961af50d40601212b4 | 9,438 | py | Python | stage_pd.py | nicod-man/Person-Detection-and-Tracking | a76b01792ee43d293c952f2527c0a2c6d2514efe | [
"MIT"
] | null | null | null | stage_pd.py | nicod-man/Person-Detection-and-Tracking | a76b01792ee43d293c952f2527c0a2c6d2514efe | [
"MIT"
] | null | null | null | stage_pd.py | nicod-man/Person-Detection-and-Tracking | a76b01792ee43d293c952f2527c0a2c6d2514efe | [
"MIT"
] | null | null | null | from tensorflow import keras
from keras.preprocessing.image import ImageDataGenerator, load_img, img_to_array
from cv2 import *
import Person_det_track
import detector
import keras
import os, sys, time, socket
import threading
import argparse
import numpy as np
import cv2
modelsdir = 'models'
default_server_port = 9252
"""
Predict class of an image
"""
# camera: parameter to specify the camera is used. If False, a single image is passed to predict people.
def detectPeople(imagefile,camera=False):
if isinstance(imagefile, str):
inp = inputImage(imagefile)
else:
inp = imagefile
if inp is not None:
# It returns:
# - bounding boxes
# - confidence
det = detector.PersonDetector()
# We need the confidence.
# If the image comes from the client, it is already a numpy array, otherwise we must transform it
img_full_np = inp
if isinstance(imagefile, str):
img_full_np = np.asarray(inp)
(_,confidence) = det.get_localization(img_full_np)
img, howManyPeopleDetected = Person_det_track.pipeline(inp,det,camera)
return (img, howManyPeopleDetected, confidence)
else:
return (0, 'error')
"""
Load an image and return input data for the network
"""
def inputImage(imagefile):
try:
#IMREAD_GRAYSCALE
gray = cv2.imread(imagefile, IMREAD_COLOR)
gray = np.array(gray, dtype='uint8')
return gray
except:
return None
"""
Load a trained model
"""
def loadModel(modelname):
global modelsdir
filename = os.path.join(modelsdir, '%s.h5' % modelname)
try:
model = keras.models.load_model(filename)
print("\nModel loaded successfully from file %s\n" % filename)
except OSError:
print("\nModel file %s not found!!!\n" % modelname)
model = None
return model
class ModelServer(threading.Thread):
def __init__(self, port):
threading.Thread.__init__(self)
# Create a TCP/IP socket
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.settimeout(3) # timeout when listening (exit with CTRL+C)
# Bind the socket to the port
server_address = ('', port)
self.sock.bind(server_address)
self.sock.listen(1)
print("Server running on port %d" % port)
self.dorun = True # server running
self.connection = None # connection object
def stop(self):
self.dorun = False
def connect(self):
connected = False
while (self.dorun and not connected):
try:
self.connection, client_address = self.sock.accept()
self.connection.settimeout(3)
connected = True
print('Connection from %s' % str(client_address))
except:
pass
# buf may contain a first chunk of data
def recvall(self, count, chunk):
buf = chunk
count -= len(buf)
while count > 0:
newbuf = self.connection.recv(count)
if not newbuf: return None
buf += newbuf
count -= len(newbuf)
return buf
def run(self):
imgsize = -1
res = 'none 0.0'
while (self.dorun):
self.connect() # wait for connection
try:
# Receive data
while (self.dorun):
try:
data = self.connection.recv(256)
data = data.strip()
except socket.timeout:
data = "***"
except Exception as e:
print(e)
data = None
buf = b''
if (type(data) != str):
k = data.find(b'\n')
if (k < 0):
data = data.decode('utf-8')
elif (len(data) > k + 1):
buf = data[k + 2:]
data = data[0:k].decode('utf-8')
if (data != None and data != "" and data != "***"):
self.received = data
print('Received: %s' % data)
v = data.split(' ')
if v[0]=='GETRESULT':
ressend = (res + '\n\r').encode('UTF-8')
self.connection.send(ressend)
elif v[0] == 'EVAL' and len(v) > 1:
(people_detected, howMany, confidence) = detectPeople(v[1])
# Without resizing the window, it will fit the whole screen
cv2.namedWindow("detected", cv2.WINDOW_NORMAL)
cv2.resizeWindow("detected", 959, 1280)
cv2.imshow("detected", people_detected)
cv2.waitKey(6000)
cv2.destroyAllWindows()
res = "People detected!"
ressend = (res + '\n\r').encode('UTF-8')
self.connection.send(ressend)
elif v[0]=='RGB' and len(v) >= 3:
print("\n---------Predicting faces----------\n")
img_width = int(v[1])
img_height = int(v[2])
img_size = img_height * img_width * 3
print("RGB image size: %d" %img_size)
buf = self.recvall(img_size, buf)
if buf is not None:
print("Image received with size: %d" %len(buf))
img_rcv = np.fromstring(buf, dtype='uint8')
img_rcv = img_rcv.reshape((img_height, img_width, 3))
# Image as array
inp = np.asarray(img_rcv)
# Prediction
(people_detected,howManyPeopleDetected,confidence) = detectPeople(inp)
# Without resizing the window, it will fit the whole screen
# cv2.namedWindow("detected", cv2.WINDOW_NORMAL)
# cv2.resizeWindow("detected", img_width, img_height)
#
# cv2.imshow("detected", people_detected)
# cv2.waitKey(6000)
# cv2.destroyAllWindows()
# The format of the response will be:
# [howManyPeopleDetected confidence[0] confidence[1] .. confidence[N]]
if (howManyPeopleDetected > 0):
res = ''.join( (str(howManyPeopleDetected),' '))
for i in range(howManyPeopleDetected):
res += str(confidence[i]) + ' '
res.rstrip()
ressend = (res + '\n\r').encode('UTF-8')
self.connection.send(ressend)
else:
print('Received: %s' % data)
elif (data == None or data == ""):
break
finally:
print('Connection closed.')
# Clean up the connection
if (self.connection != None):
self.connection.close()
self.connection = None
# wait for Keyboard interrupt
def spin(self):
while (self.dorun):
try:
time.sleep(120)
except KeyboardInterrupt:
print("Exit")
self.dorun = False
"""
Start prediction server
"""
def startServer(port):
print("Starting server on port %d" % port)
mserver = ModelServer(port)
mserver.start()
mserver.spin()
mserver.stop()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--server', default=False, action='store_true',
help='Start in server mode')
parser.add_argument('-server_port', type=int, default=default_server_port,
help='server port (default: %d)' % default_server_port)
parser.add_argument("-predict", type=str, default=None,
help="Image file to predict")
args = parser.parse_args()
if (args.server):
print("\n -----Person detection:------\n")
startServer(args.server_port)
elif (args.predict != None):
print("\n -----Person detection:------\n")
person_detection = detectPeople(args.predict)
# Without resizing the window, it will fit the whole screen
cv2.namedWindow("detected",cv2.WINDOW_NORMAL)
cv2.resizeWindow("detected",(959,1280))
cv2.imshow("detected",person_detection)
cv2.waitKey(0)
cv2.imwrite("prova.jpg",person_detection)
else:
print("Please specify a model name and an operation to perform.")
sys.exit(1) | 33.827957 | 105 | 0.495232 |
9efcd7f855f8708f629d904eb7e1e8b6720ec19d | 6,028 | py | Python | python/smqtk/representation/detection_element/_interface.py | joshanderson-kw/SMQTK | 594e7c733fe7f4e514a1a08a7343293a883a41fc | [
"BSD-3-Clause"
] | 82 | 2015-01-07T15:33:29.000Z | 2021-08-11T18:34:05.000Z | python/smqtk/representation/detection_element/_interface.py | joshanderson-kw/SMQTK | 594e7c733fe7f4e514a1a08a7343293a883a41fc | [
"BSD-3-Clause"
] | 230 | 2015-04-08T14:36:51.000Z | 2022-03-14T17:55:30.000Z | python/smqtk/representation/detection_element/_interface.py | joshanderson-kw/SMQTK | 594e7c733fe7f4e514a1a08a7343293a883a41fc | [
"BSD-3-Clause"
] | 65 | 2015-01-04T15:00:16.000Z | 2021-11-19T18:09:11.000Z | import abc
from smqtk.exceptions import NoDetectionError
from smqtk.representation import SmqtkRepresentation
from smqtk.utils.dict import merge_dict
from smqtk.utils.plugin import Pluggable
class DetectionElement (SmqtkRepresentation, Pluggable):
"""
Representation of a spatial detection.
"""
__slots__ = ('_uuid',)
@classmethod
def get_default_config(cls):
# Override from Configurable.
default = super(DetectionElement, cls).get_default_config()
# Remove runtime positional argument(s).
del default['uuid']
return default
# noinspection PyMethodOverriding
@classmethod
def from_config(cls, config_dict, uuid, merge_default=True):
"""
Override of
:meth:`smqtk.utils.configuration.Configurable.from_config` with the
added runtime argument ``uuid``. See parent method documentation for
details.
:param config_dict: JSON compliant dictionary encapsulating
a configuration.
:type config_dict: dict
:param collections.abc.Hashable uuid:
UUID to assign to the produced DetectionElement.
:param merge_default: Merge the given configuration on top of the
default provided by ``get_default_config``.
:type merge_default: bool
:return: Constructed instance from the provided config.
:rtype: DetectionElement
"""
# Override from Configurable
# Handle passing of runtime positional argument(s).
if merge_default:
config_dict = merge_dict(cls.get_default_config(), config_dict)
config_dict['uuid'] = uuid
return super(DetectionElement, cls).from_config(config_dict,
merge_default=False)
def __init__(self, uuid):
"""
Initialize a new detection element with the given ``uuid``.
All DetectionElement classes will take a ``uuid`` parameter as the
first positional argument. This parameter is not configurable and is
only specified at runtime. Implementing classes should not include
``uuid`` in ``get_config`` returns.
:param collections.abc.Hashable uuid:
Unique ID reference of the detection.
"""
super(DetectionElement, self).__init__()
self._uuid = uuid
__hash__ = None # type: ignore
def __eq__(self, other):
"""
Equality of two detections is defined by their equal spatial overlap
AND their equivalent classification.
When one element does not contain detection information but the other
does, the two elements are of course considered NOT equal.
If *neither* elements contain detection information, they are defined
as NOT equal (undefined).
:param DetectionElement other: Other detection element.
:return: True if the two detections are equal in spacial overlap and
classification.
"""
try:
s_bb, s_ce = self.get_detection()
o_bb, o_ce = other.get_detection()
return s_bb == o_bb and s_ce == o_ce
except NoDetectionError:
return False
def __ne__(self, other):
return not (self == other)
def __repr__(self):
# using "{{...}}" to skip .format activation.
return "{:s}{{uuid: {}}}".format(self.__class__.__name__, self._uuid)
def __nonzero__(self):
"""
A DetectionElement is considered non-zero if ``has_detection`` returns
True. See method documentation for details.
:return: True if this instance is non-zero (see above), false
otherwise.
:rtype: bool
"""
return self.has_detection()
__bool__ = __nonzero__
@property
def uuid(self):
return self._uuid
#
# Abstract methods
#
@abc.abstractmethod
def __getstate__(self):
return {
'_uuid': self._uuid,
}
@abc.abstractmethod
def __setstate__(self, state):
self._uuid = state['_uuid']
@abc.abstractmethod
def has_detection(self):
"""
:return: Whether or not this container currently contains a valid
detection bounding box and classification element (must be
non-zero).
:rtype: bool
"""
@abc.abstractmethod
def get_bbox(self):
"""
:return: The spatial bounding box of this detection.
:rtype: smqtk.representation.AxisAlignedBoundingBox
:raises NoDetectionError: No detection AxisAlignedBoundingBox set yet.
"""
@abc.abstractmethod
def get_classification(self):
"""
:return: The classification element of this detection.
:rtype: smqtk.representation.ClassificationElement
:raises NoDetectionError: No detection ClassificationElement set yet or
the element is empty.
"""
@abc.abstractmethod
def get_detection(self):
"""
:return: The paired spatial bounding box and classification element of
this detection.
:rtype: (smqtk.representation.AxisAlignedBoundingBox,
smqtk.representation.ClassificationElement)
:raises NoDetectionError: No detection AxisAlignedBoundingBox and
ClassificationElement set yet.
"""
@abc.abstractmethod
def set_detection(self, bbox, classification_element):
"""
Set a bounding box and classification element to this detection
element.
:param smqtk.representation.AxisAlignedBoundingBox bbox:
Spatial bounding box instance.
:param smqtk.representation.ClassificationElement classification_element:
The classification of this detection.
:raises ValueError: No, or invalid, AxisAlignedBoundingBox or
ClassificationElement was provided.
:returns: Self
:rtype: DetectionElement
"""
| 31.233161 | 81 | 0.640511 |
56b186ba86a6b9a2488d72ea50b0c1eda385af91 | 1,975 | py | Python | lib/controllers/table/Vocabulary.py | dpozorski/TuringMachine | 99bc55d69c644f06c752c1e92e6f3366ec8777fe | [
"MIT"
] | null | null | null | lib/controllers/table/Vocabulary.py | dpozorski/TuringMachine | 99bc55d69c644f06c752c1e92e6f3366ec8777fe | [
"MIT"
] | null | null | null | lib/controllers/table/Vocabulary.py | dpozorski/TuringMachine | 99bc55d69c644f06c752c1e92e6f3366ec8777fe | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Vocabulary Docstring
The Vocabulary class represents a collection
of words that should be considered as the domain
of words that may be written to or present on
the machine's tape.
"""
from lib.controllers.table.Word import Word
from typing import Set
__author__ = "Dylan Pozorski"
__project__ = "TuringMachine"
__class__ = "Vocabulary"
class Vocabulary(object):
"""
Vocabulary
Attributes:
words (:obj:`Set[Word]`): The set of words
in the tape's vocabulary.
"""
def __init__(self, words: Set[Word]):
"""
Vocabulary Constructor.
:param words: The set of words in
the tape's vocabulary.
"""
self.words = words
def __len__(self) -> int:
"""
Return the size of the vocabulary.
:return: int
"""
return len(self.words)
def __contains__(self, item: Word) -> bool:
"""
Examine whether the vocabulary contains
the specified word.
:param item: Word, Value to search for.
:return: bool
"""
return item in self.words
def __add__(self, other: Word) -> None:
"""
Add the specified word to the vocabulary
if it is not already present.
:param other: Word, The word to add to
the vocabulary.
:return: None
"""
if not self.__contains__(item=other):
self.words.add(element=other)
def __delitem__(self, key: str):
"""
Remove the specified word to the vocabulary
if it exists in the vocabulary.
:param key: str, The name of the word to
remove from the vocabulary.
:return: None
"""
if self.__contains__(item=Word(name=key)):
self.words.remove(Word(name=key))
@property
def words(self) -> Set[Word]:
"""
:obj:`Set[Word]` The tape vocabulary.
Set the vocabulary..
:raises: ValueError if an empty vocabulary
is attempted to be set.
"""
return self.__words
@words.setter
def words(self, words: Set[Word]) -> None:
if words is None or len(words) == 0:
raise ValueError("Empty Vocabulary Provided.")
self.__words = words
| 17.477876 | 49 | 0.682025 |
afc6b8d4036e0ac300c442bd9c3d6f9222d2c0c8 | 572 | py | Python | tests/conftest.py | georgealton/iam-policy-validator-to-sarif | a753c9b1c41ae1250ed24cdd64f9eab5d98d678d | [
"MIT"
] | 3 | 2021-11-17T10:37:48.000Z | 2022-01-17T11:03:05.000Z | tests/conftest.py | georgealton/iam-policy-validator-to-sarif | a753c9b1c41ae1250ed24cdd64f9eab5d98d678d | [
"MIT"
] | null | null | null | tests/conftest.py | georgealton/iam-policy-validator-to-sarif | a753c9b1c41ae1250ed24cdd64f9eab5d98d678d | [
"MIT"
] | null | null | null | import json
import pytest
from iam_sarif_report.adapters import checks
from iam_sarif_report.domain import converter
@pytest.fixture(name="sarif_schema")
def _sarif_schema(shared_datadir):
schema_path = shared_datadir / "sarif-schema-2.1.0.json"
yield json.loads(schema_path.read_text())
@pytest.fixture(name="checks_repository")
def _checks_repository():
yield checks.ChecksPackageDataRepository()
@pytest.fixture(name="sarif_converter")
def _sarif_converter(checks_repository):
yield converter.SarifConverter(checks_repository=checks_repository)
| 24.869565 | 71 | 0.807692 |
095961bb5b76c758e4fb8a209e996656d76beaf8 | 161 | py | Python | genes/views.py | moonso/phizz-genes | 5a907bf3ce544a7d3b5b0054000eb8a91d13de04 | [
"MIT"
] | null | null | null | genes/views.py | moonso/phizz-genes | 5a907bf3ce544a7d3b5b0054000eb8a91d13de04 | [
"MIT"
] | null | null | null | genes/views.py | moonso/phizz-genes | 5a907bf3ce544a7d3b5b0054000eb8a91d13de04 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return HttpResponse("Hello, world. You're at the genes index.") | 32.2 | 67 | 0.782609 |
add4bca79b31d50809f7d54f1b77242df977de98 | 1,400 | py | Python | tests/client/lcd/api/auth_test.py | terra-money/terra.py | d66de6bb4d9d78f08ad8ffe05ae72d847fc5099d | [
"MIT"
] | 66 | 2021-10-21T23:29:38.000Z | 2022-03-30T15:58:13.000Z | tests/client/lcd/api/auth_test.py | terra-money/terra.py | d66de6bb4d9d78f08ad8ffe05ae72d847fc5099d | [
"MIT"
] | 50 | 2021-10-19T06:11:56.000Z | 2022-03-31T17:06:57.000Z | tests/client/lcd/api/auth_test.py | terra-money/terra.py | d66de6bb4d9d78f08ad8ffe05ae72d847fc5099d | [
"MIT"
] | 39 | 2021-11-07T17:28:31.000Z | 2022-03-31T15:03:57.000Z | from terra_sdk.client.lcd import LCDClient, PaginationOptions
from terra_sdk.core import Coins
terra = LCDClient(
url="https://pisco-lcd.terra.dev/",
chain_id="pisco-1",
)
def test_account_info():
# base_account
result = terra.auth.account_info("terra14c64c9wdmnz9n8e9uyvtg70755zn377eyf0s4x")
assert result.address == "terra14c64c9wdmnz9n8e9uyvtg70755zn377eyf0s4x"
assert result.account_number == 0
# delayed_vesting_account
result = terra.auth.account_info("terra1t8mw9mlt28ax6qj88ra89fcv60n8uu7yfqus3r")
assert (
result.base_vesting_account.base_account.address
== "terra1t8mw9mlt28ax6qj88ra89fcv60n8uu7yfqus3r"
)
# continuous_vesting_account
result = terra.auth.account_info("terra186wj7gy9q5syg0kls4cctgn23d9xh3d6h3dqq9")
assert (
result.base_vesting_account.base_account.address
== "terra186wj7gy9q5syg0kls4cctgn23d9xh3d6h3dqq9"
)
assert result.start_time == "1653300000"
# periodic_vesting_account
result = terra.auth.account_info("terra1auswfkggetjrhe8jxkzngfd26hugz55d64p0z6")
assert (
result.base_vesting_account.base_account.address
== "terra1auswfkggetjrhe8jxkzngfd26hugz55d64p0z6"
)
assert result.start_time == "1660000000"
assert result.vesting_periods[0].length == 604800
assert result.vesting_periods[0].amount == Coins("1000000000uluna")
| 31.818182 | 84 | 0.754286 |
cb9a9c02048d8448477484e3dc49129bae5d66a5 | 1,461 | py | Python | deep-rl/lib/python2.7/site-packages/OpenGL/GL/EXT/texture_compression_latc.py | ShujaKhalid/deep-rl | 99c6ba6c3095d1bfdab81bd01395ced96bddd611 | [
"MIT"
] | 210 | 2016-04-09T14:26:00.000Z | 2022-03-25T18:36:19.000Z | deep-rl/lib/python2.7/site-packages/OpenGL/GL/EXT/texture_compression_latc.py | ShujaKhalid/deep-rl | 99c6ba6c3095d1bfdab81bd01395ced96bddd611 | [
"MIT"
] | 72 | 2016-09-04T09:30:19.000Z | 2022-03-27T17:06:53.000Z | deep-rl/lib/python2.7/site-packages/OpenGL/GL/EXT/texture_compression_latc.py | ShujaKhalid/deep-rl | 99c6ba6c3095d1bfdab81bd01395ced96bddd611 | [
"MIT"
] | 64 | 2016-04-09T14:26:49.000Z | 2022-03-21T11:19:47.000Z | '''OpenGL extension EXT.texture_compression_latc
This module customises the behaviour of the
OpenGL.raw.GL.EXT.texture_compression_latc to provide a more
Python-friendly API
Overview (from the spec)
This extension introduces four new block-based texture compression
formats suited for unsigned and signed luminance and luminance-alpha
textures (hence the name "latc" for Luminance-Alpha Texture
Compression).
These formats are designed to reduce the storage requirements and
memory bandwidth required for luminance and luminance-alpha textures
by a factor of 2-to-1 over conventional uncompressed luminance and
luminance-alpha textures with 8-bit components (GL_LUMINANCE8 and
GL_LUMINANCE8_ALPHA8).
The compressed signed luminance-alpha format is reasonably suited
for storing compressed normal maps.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/texture_compression_latc.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.EXT.texture_compression_latc import *
from OpenGL.raw.GL.EXT.texture_compression_latc import _EXTENSION_NAME
def glInitTextureCompressionLatcEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | 37.461538 | 71 | 0.819302 |
08176a658566134d0bc6398df8568596e270a11e | 1,269 | py | Python | TrackerState.py | BlendOSVR/OSVR-Python-FFI | 30371fdb00f89c28c765f450d0784f469f497711 | [
"Apache-2.0"
] | null | null | null | TrackerState.py | BlendOSVR/OSVR-Python-FFI | 30371fdb00f89c28c765f450d0784f469f497711 | [
"Apache-2.0"
] | 1 | 2015-11-06T20:14:49.000Z | 2015-11-06T20:14:49.000Z | TrackerState.py | BlendOSVR/osvrffi | 30371fdb00f89c28c765f450d0784f469f497711 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Sensics and OSVR community
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from osvrClientKit import *
ctx = osvrClientInit("com.osvr.exampleclients.TrackerState")
lefthand = osvrClientGetInterface(ctx, "/me/head")
for i in range (0, 1000000):
osvrClientUpdate(ctx)
if(i%100 == 0):
try:
state, timestamp = osvrGetPoseState(lefthand)
except ReturnError:
pass
else:
print("Got pose state: Position = (%f, %f, %f), orientation = (%f, %f, %f, %f)" % (state.translation.data[0], state.translation.data[1], state.translation.data[2], state.rotation.data[0], state.rotation.data[1], state.rotation.data[2], state.rotation.data[3]))
osvrClientShutdown(ctx)
print("Library shut down, exiting.") | 40.935484 | 272 | 0.710008 |
6edf9543ac00f21117f877bd56ba70a6c0fe3bda | 3,208 | py | Python | etw/GUID.py | tyh2333/pywintrace | a8d42b4d705b817810b9640a4bf7ea032792a3a0 | [
"Apache-2.0"
] | 247 | 2017-09-19T20:34:22.000Z | 2022-03-21T21:56:38.000Z | etw/GUID.py | tyh2333/pywintrace | a8d42b4d705b817810b9640a4bf7ea032792a3a0 | [
"Apache-2.0"
] | 29 | 2017-09-29T18:31:38.000Z | 2021-05-10T08:36:10.000Z | etw/GUID.py | tyh2333/pywintrace | a8d42b4d705b817810b9640a4bf7ea032792a3a0 | [
"Apache-2.0"
] | 66 | 2017-09-19T23:21:05.000Z | 2022-03-09T16:13:04.000Z | ########################################################################
# Copyright 2017 FireEye Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
########################################################################
import ctypes as ct
def cmp(a, b):
return (a > b) - (a < b)
BYTE = ct.c_byte
WORD = ct.c_ushort
DWORD = ct.c_ulong
_ole32 = ct.oledll.ole32
_StringFromCLSID = _ole32.StringFromCLSID
_CoTaskMemFree = ct.windll.ole32.CoTaskMemFree
_ProgIDFromCLSID = _ole32.ProgIDFromCLSID
_CLSIDFromString = _ole32.CLSIDFromString
_CLSIDFromProgID = _ole32.CLSIDFromProgID
_CoCreateGuid = _ole32.CoCreateGuid
# python 2
try:
basestring
except NameError:
# python 3
basestring = str
class GUID(ct.Structure):
_fields_ = [("Data1", DWORD),
("Data2", WORD),
("Data3", WORD),
("Data4", BYTE * 8)]
def __init__(self, name=None):
if name is not None:
_CLSIDFromString(str(name), ct.byref(self))
def __repr__(self):
return 'GUID("%s")' % str(self)
def __str__(self):
p = ct.c_wchar_p()
_StringFromCLSID(ct.byref(self), ct.byref(p))
result = p.value
_CoTaskMemFree(p)
return result
def __cmp__(self, other):
if isinstance(other, GUID):
return cmp(bytes(self), bytes(other))
return -1
def __nonzero__(self):
return self != GUID_null
def __eq__(self, other):
return isinstance(other, GUID) and bytes(self) == bytes(other)
def __hash__(self):
# We make GUID instances hashable, although they are mutable.
return hash(bytes(self))
def copy(self):
return GUID(str(self))
@classmethod
def from_progid(cls, progid):
"""Get guid from progid, ...
"""
if hasattr(progid, "_reg_clsid_"):
progid = progid._reg_clsid_
if isinstance(progid, cls):
return progid
elif isinstance(progid, basestring):
if progid.startswith("{"):
return cls(progid)
inst = cls()
_CLSIDFromProgID(str(progid), ct.byref(inst))
return inst
else:
raise TypeError("Cannot construct guid from %r" % progid)
def as_progid(self):
"Convert a GUID into a progid"
progid = ct.c_wchar_p()
_ProgIDFromCLSID(ct.byref(self), ct.byref(progid))
result = progid.value
_CoTaskMemFree(progid)
return result
@classmethod
def create_new(cls):
"Create a brand new guid"
guid = cls()
_CoCreateGuid(ct.byref(guid))
return guid
GUID_null = GUID()
__all__ = ["GUID"]
| 26.957983 | 74 | 0.59788 |
c0e51a4fdac7b7cab77fcb8a42acc7efd368fee3 | 3,123 | py | Python | CSE321-Operating Systems/Lab-4/Task 1 (SJF Scheduling-SRTF )/task1.py | AIM3r4j/BRACU | 8f3973ca8226a496914f825a2d295d25dd5b4a4d | [
"MIT"
] | null | null | null | CSE321-Operating Systems/Lab-4/Task 1 (SJF Scheduling-SRTF )/task1.py | AIM3r4j/BRACU | 8f3973ca8226a496914f825a2d295d25dd5b4a4d | [
"MIT"
] | null | null | null | CSE321-Operating Systems/Lab-4/Task 1 (SJF Scheduling-SRTF )/task1.py | AIM3r4j/BRACU | 8f3973ca8226a496914f825a2d295d25dd5b4a4d | [
"MIT"
] | null | null | null | # Function to calculate the waiting time of each process
def calculateWaiting(processes, n, waiting):
process_check = False
current_time = 0
completed = 0 #completed process count
minm = 99999
shortest_remaining = 0
# Array for remaining times
remaining = [0] * n
for i in range(n):
remaining[i] = processes[i][2]
# Loops till the processes get completed/terminated
while (completed != n):
# Find process with minimum remaining time at the moment
for j in range(n):
if ((processes[j][1] <= current_time) and (remaining[j] < minm) and remaining[j] > 0):
minm = remaining[j]
shortest_remaining = j
process_check = True
if (process_check == False):
current_time += 1
continue
remaining[shortest_remaining] -= 1
minm = remaining[shortest_remaining]
if (minm == 0):
minm = 99999
# Checking if a process is completed
if (remaining[shortest_remaining] == 0):
completed += 1
process_check = False
finish_time = current_time + 1
# Calculate the waiting time
waiting[shortest_remaining] = (finish_time - proc[shortest_remaining][2] - proc[shortest_remaining][1])
if (waiting[shortest_remaining] < 0):
waiting[shortest_remaining] = 0
# Increasing the current time by 1
current_time += 1
# Function to calculate the turnaround time of each process
def calculateTurnaround(processes, n, waiting, turnaround):
for i in range(n):
#turnaround=burst+waiting
turnaround[i] = processes[i][2] + waiting[i]
# Function to calculate the average waiting time and the average turnaround time and also print the outputs
def calculateTheAvgs(processes, n):
waiting = [0] * n
turnaround = [0] * n
calculateWaiting(processes, n, waiting)
calculateTurnaround(processes, n, waiting, turnaround)
# Printing the CT, WT, TAT of each process
print("\nProcesses Arrival Time Burst Time Completion Time Waiting Time Turn-Around Time")
for i in range(n):
print(" P"+str(processes[i][0]), "\t\t",
processes[i][1], "\t\t",
processes[i][2], "\t\t",
processes[i][1]+turnaround[i], "\t\t",
waiting[i], "\t\t\t", turnaround[i])
# Calculating the total to get the average
total_waiting = 0
total_turnaround = 0
for i in range(n):
total_waiting = total_waiting + waiting[i]
total_turnaround = total_turnaround + turnaround[i]
# Printing the avgs
print("\nAverage waiting time = ",(total_waiting / n) )
print("Average turn-around time = ",(total_turnaround / n))
# Process count
process_count = 4
# Each process with their id, burst time and arrival time
proc = [[1, 0, 8], # [process id, arrival time, burst time]
[2, 1, 4],
[3, 2, 9],
[4, 3, 5]]
calculateTheAvgs(proc, process_count) | 32.873684 | 115 | 0.600704 |
e526ace8825d594dddd95adfde7e6993616d12d2 | 9,246 | py | Python | Advantech ADAM 6050 6060 relay module/legacy/pymodbus/datastore/modredis.py | scroix/nodel-recipes | 3aa26de380d13957ae96d85befe00de6289d2687 | [
"MIT"
] | 8 | 2017-04-22T15:34:23.000Z | 2022-02-03T20:00:20.000Z | Advantech ADAM 6050 6060 relay module/legacy/pymodbus/datastore/modredis.py | scroix/nodel-recipes | 3aa26de380d13957ae96d85befe00de6289d2687 | [
"MIT"
] | 48 | 2016-12-01T19:25:53.000Z | 2021-11-24T04:23:53.000Z | Advantech ADAM 6050 6060 relay module/legacy/pymodbus/datastore/modredis.py | scroix/nodel-recipes | 3aa26de380d13957ae96d85befe00de6289d2687 | [
"MIT"
] | 10 | 2017-04-05T09:51:43.000Z | 2021-07-23T04:50:41.000Z | import redis
from pymodbus.exceptions import NotImplementedException, ParameterException
from pymodbus.interfaces import IModbusSlaveContext
from pymodbus.utilities import pack_bitstring, unpack_bitstring
#---------------------------------------------------------------------------#
# Logging
#---------------------------------------------------------------------------#
import logging;
_logger = logging.getLogger(__name__)
#---------------------------------------------------------------------------#
# Context
#---------------------------------------------------------------------------#
class RedisSlaveContext(IModbusSlaveContext):
'''
This is a modbus slave context using redis as a backing
store.
'''
def __init__(self, **kwargs):
''' Initializes the datastores
:param host: The host to connect to
:param port: The port to connect to
:param prefix: A prefix for the keys
'''
host = kwargs.get('host', 'localhost')
port = kwargs.get('port', 6379)
self.prefix = kwargs.get('prefix', 'pymodbus')
self.client = redis.Redis(host=host, port=port)
self.__build_mapping()
def __str__(self):
''' Returns a string representation of the context
:returns: A string representation of the context
'''
return "Redis Slave Context %s" % self.client
def reset(self):
''' Resets all the datastores to their default values '''
self.client.flushall()
def validate(self, fx, address, count=1):
''' Validates the request to make sure it is in range
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to test
:returns: True if the request in within range, False otherwise
'''
address = address + 1 # section 4.4 of specification
_logger.debug("validate[%d] %d:%d" % (fx, address, count))
return self.__val_callbacks[self.decode(fx)](address, count)
def getValues(self, fx, address, count=1):
''' Validates the request to make sure it is in range
:param fx: The function we are working with
:param address: The starting address
:param count: The number of values to retrieve
:returns: The requested values from a:a+c
'''
address = address + 1 # section 4.4 of specification
_logger.debug("getValues[%d] %d:%d" % (fx, address, count))
return self.__get_callbacks[self.decode(fx)](address, count)
def setValues(self, fx, address, values):
''' Sets the datastore with the supplied values
:param fx: The function we are working with
:param address: The starting address
:param values: The new values to be set
'''
address = address + 1 # section 4.4 of specification
_logger.debug("setValues[%d] %d:%d" % (fx, address,len(values)))
self.__set_callbacks[self.decode(fx)](address, values)
#--------------------------------------------------------------------------#
# Redis Helper Methods
#--------------------------------------------------------------------------#
def __get_prefix(self, key):
''' This is a helper to abstract getting bit values
:param key: The key prefix to use
:returns: The key prefix to redis
'''
return "%s:%s" % (self.prefix, key)
def __build_mapping(self):
'''
A quick helper method to build the function
code mapper.
'''
self.__val_callbacks = {
'd' : lambda o,c: self.__val_bit('d', o, c),
'c' : lambda o,c: self.__val_bit('c', o, c),
'h' : lambda o,c: self.__val_reg('h', o, c),
'i' : lambda o,c: self.__val_reg('i', o, c),
}
self.__get_callbacks = {
'd' : lambda o,c: self.__get_bit('d', o, c),
'c' : lambda o,c: self.__get_bit('c', o, c),
'h' : lambda o,c: self.__get_reg('h', o, c),
'i' : lambda o,c: self.__get_reg('i', o, c),
}
self.__set_callbacks = {
'd' : lambda o,v: self.__set_bit('d', o, v),
'c' : lambda o,v: self.__set_bit('c', o, v),
'h' : lambda o,v: self.__set_reg('h', o, v),
'i' : lambda o,v: self.__set_reg('i', o, v),
}
#--------------------------------------------------------------------------#
# Redis discrete implementation
#--------------------------------------------------------------------------#
__bit_size = 16
__bit_default = '\x00' * (__bit_size % 8)
def __get_bit_values(self, key, offset, count):
''' This is a helper to abstract getting bit values
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
'''
key = self.__get_prefix(key)
s = divmod(offset, self.__bit_size)[0]
e = divmod(offset+count, self.__bit_size)[0]
request = ('%s:%s' % (key, v) for v in range(s, e+1))
response = self.client.mget(request)
return response
def __val_bit(self, key, offset, count):
''' Validates that the given range is currently set in redis.
If any of the keys return None, then it is invalid.
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
'''
response = self.__get_bit_values(key, offset, count)
return None not in response
def __get_bit(self, key, offset, count):
'''
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
'''
response = self.__get_bit_values(key, offset, count)
response = (r or self.__bit_default for r in response)
result = ''.join(response)
result = unpack_bitstring(result)
return result[offset:offset+count]
def __set_bit(self, key, offset, values):
'''
:param key: The key prefix to use
:param offset: The address offset to start at
:param values: The values to set
'''
count = len(values)
s = divmod(offset, self.__bit_size)[0]
e = divmod(offset+count, self.__bit_size)[0]
value = pack_bitstring(values)
current = self.__get_bit_values(key, offset, count)
current = (r or self.__bit_default for r in current)
current = ''.join(current)
current = current[0:offset] + value + current[offset+count:]
final = (current[s:s+self.__bit_size] for s in range(0, count, self.__bit_size))
key = self.__get_prefix(key)
request = ('%s:%s' % (key, v) for v in range(s, e+1))
request = dict(zip(request, final))
self.client.mset(request)
#--------------------------------------------------------------------------#
# Redis register implementation
#--------------------------------------------------------------------------#
__reg_size = 16
__reg_default = '\x00' * (__reg_size % 8)
def __get_reg_values(self, key, offset, count):
''' This is a helper to abstract getting register values
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
'''
key = self.__get_prefix(key)
#s = divmod(offset, self.__reg_size)[0]
#e = divmod(offset+count, self.__reg_size)[0]
#request = ('%s:%s' % (key, v) for v in range(s, e+1))
request = ('%s:%s' % (key, v) for v in range(offset, count+1))
response = self.client.mget(request)
return response
def __val_reg(self, key, offset, count):
''' Validates that the given range is currently set in redis.
If any of the keys return None, then it is invalid.
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
'''
response = self.__get_reg_values(key, offset, count)
return None not in response
def __get_reg(self, key, offset, count):
'''
:param key: The key prefix to use
:param offset: The address offset to start at
:param count: The number of bits to read
'''
response = self.__get_reg_values(key, offset, count)
response = [r or self.__reg_default for r in response]
return response[offset:offset+count]
def __set_reg(self, key, offset, values):
'''
:param key: The key prefix to use
:param offset: The address offset to start at
:param values: The values to set
'''
count = len(values)
#s = divmod(offset, self.__reg_size)
#e = divmod(offset+count, self.__reg_size)
#current = self.__get_reg_values(key, offset, count)
key = self.__get_prefix(key)
request = ('%s:%s' % (key, v) for v in range(offset, count+1))
request = dict(zip(request, values))
self.client.mset(request)
| 37.738776 | 90 | 0.547372 |
e37c47f431121d7b436b9bc7267d6847aa1ad9f5 | 2,383 | py | Python | particle_world.ut.py | davemenc/ParticleWorld | d518229874f932addac1b26ee140826efc24a045 | [
"Apache-2.0"
] | 1 | 2019-11-12T17:19:30.000Z | 2019-11-12T17:19:30.000Z | particle_world.ut.py | davemenc/ParticleWorld | d518229874f932addac1b26ee140826efc24a045 | [
"Apache-2.0"
] | null | null | null | particle_world.ut.py | davemenc/ParticleWorld | d518229874f932addac1b26ee140826efc24a045 | [
"Apache-2.0"
] | null | null | null | import unittest
import math
import sys
from particle_world import World
from particle_world import Particle
sys.path.insert(0, '../vector3dm')
from vector3dm import Vector3dm
class test_world(unittest.TestCase):
def test_aero_acc(self):
p = Particle(Vector3dm(10,10,10,"c"),Vector3dm(0,10,0,"c"))
drag = p.aero_acc().convert_to_cartesian()
self.assertAlmostEqual(drag.get_y(),-5),"aero drag: drag wrong: is {} should be {}".format(drag,-5)
def test_random_position(self):
max = 20
w = World(1,max,max,max)
v = w.random_position()
x = v.get_x()
y = v.get_y()
z = v.get_z()
self.assertLessEqual(x,max)
self.assertGreaterEqual(x,-max)
self.assertLessEqual(y,max)
self.assertGreaterEqual(y,-max)
self.assertLessEqual(z,max)
self.assertGreaterEqual(z,-max)
def test_create_world(self):
w = World(10,1000,1000,1000,wall_type=0)
self.assertEqual(w.width, 1000)
self.assertEqual(w.height, 1000)
self.assertEqual(w.depth, 1000)
self.assertEqual(len(w.particles), 10),"world creation wrong particle count"
self.assertEqual(w.wall_type,0),"world creation wrong wall type"
def test_create_particle(self):
p = Particle(Vector3dm(1,2,3,"c"),Vector3dm(4,5,6,"c"))
self.assertAlmostEqual(p.position.get_x(),1),"particle init: x pos bad result: is {} should be {}".format(p.position.get_x(),1)
self.assertAlmostEqual(p.position.get_y(),2),"particle init: y pos bad result: is {} should be {}".format(p.position.get_y(),2)
self.assertAlmostEqual(p.position.get_z(),3),"particle init: z pos bad result: is {} should be {}".format(p.position.get_z(),3)
self.assertAlmostEqual(p.velocity.get_x(),4),"particle init: x vel bad result: is {} should be {}".format(p.position.get_x(),4)
self.assertAlmostEqual(p.velocity.get_y(),5),"particle init: y vel bad result: is {} should be {}".format(p.position.get_y(),5)
self.assertAlmostEqual(p.velocity.get_z(),6),"particle init: z vel bad result: is {} should be {}".format(p.position.get_z(),6)
def test_random_goal(self):
w = World(1,40,40,40)
pos = Vector3dm(0,0,0,"c")
goal = w.random_goal(pos,100,500)
x,y,z = goal.convert_to_cartesian().vals
self.assertLessEqual(x,20)
self.assertLessEqual(y,20)
self.assertLessEqual(z,40)
self.assertGreaterEqual(x,-20)
self.assertGreaterEqual(y,-20)
self.assertGreaterEqual(z,0)
if __name__ == '__main__':
unittest.main()
| 38.435484 | 129 | 0.715485 |
eb8f04d9487aba8df1d2231dd3f5f15e01dead95 | 2,700 | py | Python | pylatex/config.py | vikorbit/PyLaTeX | 9dee695c88857b4894ec110765a4b3c7f84ca853 | [
"MIT"
] | 4 | 2019-10-12T21:36:01.000Z | 2021-12-21T10:03:31.000Z | pylatex/config.py | vikorbit/PyLaTeX | 9dee695c88857b4894ec110765a4b3c7f84ca853 | [
"MIT"
] | null | null | null | pylatex/config.py | vikorbit/PyLaTeX | 9dee695c88857b4894ec110765a4b3c7f84ca853 | [
"MIT"
] | 1 | 2020-11-25T08:47:30.000Z | 2020-11-25T08:47:30.000Z | # -*- coding: utf-8 -*-
"""
This module implements the ability to use of different configurations.
The current active configuration is `pylatex.config.active`. This variable can
simply be changed to another configuration and that will be used.
It is also possible to use the `~.Version1.use` method to do this temporarily
in a specific context.
.. :copyright: (c) 2016 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from contextlib import contextmanager
class Version1:
"""The config used to get the behaviour of v1.x.y of the library.
The default attributes are::
indent = True
booktabs = False
microtype = False
row_height = None
"""
indent = True
booktabs = False
microtype = False
row_height = None
def __init__(self, **kwargs):
"""
Args
----
kwargs:
Key value pairs of the default attributes that should be overridden
"""
for k, v in kwargs.items():
setattr(self, k, v)
@contextmanager
def use(self):
"""Use the config temporarily in specific context.
A simple usage example::
with Version1(indent=False).use():
# Do stuff where indent should be False
...
"""
global active
prev = active
active = self
yield
active = prev
@contextmanager
def change(self, **kwargs):
"""Override some attributes of the config in a specific context.
A simple usage example::
with pylatex.config.active.change(indent=False):
# Do stuff where indent should be False
...
Args
----
kwargs:
Key value pairs of the default attributes that should be overridden
"""
old_attrs = {}
for k, v in kwargs.items():
old_attrs[k] = getattr(self, k, v)
setattr(self, k, v)
yield self # allows with ... as ...
for k, v in old_attrs.items():
setattr(self, k, v)
#: The default configuration in this release. Currently the same as `Version1`
Default = Version1
class Version2(Version1):
"""The config used to get the behaviour of v2.x.y of the library.
The default attributes are::
indent = False
booktabs = True
microtype = True
row_height = 1.3
"""
indent = False
booktabs = True
microtype = True
row_height = 1.3
#: The default configuration in the nxt major release. Currently the same as
#: `Version2`.
NextMajor = Version2
#: The current active configuration
active = Default()
| 23.275862 | 79 | 0.595185 |
d967e959057799a6eef695c7cd4d708e0a010909 | 1,991 | py | Python | sharpy/plans/acts/protoss/chrono_unit.py | DuncanDHall/sharpy-sc2 | 7a47a7538ad99214e3f0288b6213cac882551180 | [
"MIT"
] | null | null | null | sharpy/plans/acts/protoss/chrono_unit.py | DuncanDHall/sharpy-sc2 | 7a47a7538ad99214e3f0288b6213cac882551180 | [
"MIT"
] | null | null | null | sharpy/plans/acts/protoss/chrono_unit.py | DuncanDHall/sharpy-sc2 | 7a47a7538ad99214e3f0288b6213cac882551180 | [
"MIT"
] | null | null | null | import warnings
from sc2.ids.ability_id import AbilityId
from sc2.ids.unit_typeid import UnitTypeId
from sc2.ids.buff_id import BuffId
from sc2.unit import Unit, UnitOrder
from sharpy.plans.acts.act_base import ActBase
class ChronoUnit(ActBase):
# Use Chronoboost on unit production
def __init__(self, name: UnitTypeId, from_building: UnitTypeId, count: int = 0):
"""
Chrono boosts unit production.
@param name: Unit type for which to chronoboost
@param from_building: Which building to chrono
@param count: Amount of times to cast chronoboost, use 0 for infinite
"""
assert name is not None and isinstance(name, UnitTypeId)
assert from_building is not None and isinstance(from_building, UnitTypeId)
self.unit_type = name
self.from_building = from_building
self.count = count
self.casted = 0
super().__init__()
async def start(self, knowledge: "Knowledge"):
await super().start(knowledge)
unit = self.ai._game_data.units[self.unit_type.value]
self.creation_ability = unit.creation_ability.id
async def execute(self) -> bool:
if self.count > 0 and self.casted >= self.count:
return True
for target in self.cache.own(self.from_building).ready: # type: Unit
if target.orders and target.orders[0].ability.id == self.creation_ability:
# boost here!
if not target.has_buff(BuffId.CHRONOBOOSTENERGYCOST):
for nexus in self.cache.own(UnitTypeId.NEXUS):
if self.cd_manager.is_ready(nexus.tag, AbilityId.EFFECT_CHRONOBOOSTENERGYCOST):
if nexus(AbilityId.EFFECT_CHRONOBOOSTENERGYCOST, target):
self.print(f"Chrono {self.creation_ability.name}")
self.casted += 1
return True
return True # Never block
| 40.632653 | 103 | 0.635861 |
289020e153c5719161bc8c3287414e352e8df689 | 3,154 | py | Python | finalytics/utils/dbhelper.py | indiquant/finalytics | df0d3f715510ecb3a1c2fb41b18d893e448f46ac | [
"MIT"
] | null | null | null | finalytics/utils/dbhelper.py | indiquant/finalytics | df0d3f715510ecb3a1c2fb41b18d893e448f46ac | [
"MIT"
] | null | null | null | finalytics/utils/dbhelper.py | indiquant/finalytics | df0d3f715510ecb3a1c2fb41b18d893e448f46ac | [
"MIT"
] | null | null | null | __author__ = 'indiquant'
import os
import sqlite3
from enum import Enum
from qrymaker import qry_createtable
_DB = r'C:\temp\strat\webdata.sqlite3'
_db = r'C:\temp\webdata.sqlite3'
class EnumDBOptionTable(Enum):
undl = 1
opt_type = 2
exc_type = 3
expiry = 4
strike = 5
recdate = 6
rectime = 7
undl_type = 8
spot = 9
bidpx = 10
bidqty = 11
askpx = 12
askqty = 13
lastpx = 14
volume = 15
def read_rectimes(undl, recdate):
qry = "SELECT rectime FROM options_intraday WHERE undl = '{undl}'".format(undl=undl) + \
" AND recdate = {recdate}".format(recdate=str(recdate)) + \
" GROUP BY rectime ORDER BY rectime"
_res = execute_r(_db, qry)
if _res:
return [r[0] for r in _res]
else:
return None
def read_options(undl, recdate, rectime):
"""
:type recdate: int
:type rectime: int
"""
qry = "SELECT * FROM options_intraday WHERE undl = '{undl}'".format(undl=undl) + \
" AND recdate = {recdate} AND rectime = {rectime}".format(recdate=str(recdate), rectime=str(rectime))
return execute_r(_db, qry)
def dbname():
return _DB
def createfileifmissing(fname):
if not os.path.exists(fname):
open(fname, 'w')
def createtable(dbname, tname, colnames, coltypes, pkeys):
conn = sqlite3.connect(dbname)
_qry = qry_createtable(tname, colnames, coltypes, pkeys)
conn.execute(_qry)
conn.commit()
def create_table(tname, colnames, coltypes, primarykeys):
conn = sqlite3.connect(_DB)
_qry = qry_createtable(tname, colnames, coltypes, primarykeys)
conn.execute(_qry)
conn.commit()
def bulkinsert(dbname, tname, colnames, rows):
conn = sqlite3.connect(dbname)
_qry = "INSERT OR REPLACE INTO " + tname + " ("
for cname in colnames:
_qry += ' ' + cname + ','
_qry = _qry.rstrip(',')
_qry += ') VALUES ('
for cname in colnames:
_qry += ' ?,'
_qry = _qry.rstrip(',')
_qry += ')'
for _row in rows:
conn.execute(_qry, _row)
conn.commit()
def bulkinsert_codes(tname, colnames, rows):
conn = sqlite3.connect(_DB)
_qry = "INSERT OR REPLACE INTO " + tname + " ("
for cname in colnames:
_qry += ' ' + cname + ','
_qry = _qry.rstrip(',')
_qry += ') VALUES ('
for cname in colnames:
_qry += ' ?,'
_qry = _qry.rstrip(',')
_qry += ')'
for _row in rows:
conn.execute(_qry, _row)
conn.commit()
def execute_r(dbname, qry):
conn = sqlite3.connect(dbname)
try:
cur = conn.cursor()
cur.execute(qry)
x = cur.fetchall()
conn.close()
return x
except sqlite3.OperationalError:
conn.close()
return None
def execute_w(dbname, qry):
conn = sqlite3.connect(dbname)
try:
cur = conn.cursor()
cur.execute(qry)
conn.commit()
conn.close()
except sqlite3.OperationalError:
conn.close()
return None
if __name__ == '__main__':
qry = "SELECT * FROM codes WHERE undltype='S'"
x = execute_r(qry)
print 'pause'
| 19.836478 | 111 | 0.593849 |
fc9b2d56a76e3c8d9b5cc8625411ea6f23ebb4e1 | 274 | py | Python | tests/artificial/transf_Difference/trend_ConstantTrend/cycle_30/ar_/test_artificial_32_Difference_ConstantTrend_30__20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/artificial/transf_Difference/trend_ConstantTrend/cycle_30/ar_/test_artificial_32_Difference_ConstantTrend_30__20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/artificial/transf_Difference/trend_ConstantTrend/cycle_30/ar_/test_artificial_32_Difference_ConstantTrend_30__20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 30, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 0); | 39.142857 | 169 | 0.737226 |
bc093be8e51d2e9aa7279bd6a23de9981dd04bcc | 1,104 | py | Python | gizmo/test/integration/tinkerpop.py | emehrkay/gizmo | 01db2f51118f7d746061ace0b491237481949bad | [
"MIT"
] | 19 | 2015-10-06T12:55:09.000Z | 2021-01-09T09:53:38.000Z | gizmo/test/integration/tinkerpop.py | emehrkay/Gizmo | 01db2f51118f7d746061ace0b491237481949bad | [
"MIT"
] | 2 | 2016-01-21T02:55:55.000Z | 2020-08-16T23:05:07.000Z | gizmo/test/integration/tinkerpop.py | emehrkay/gizmo | 01db2f51118f7d746061ace0b491237481949bad | [
"MIT"
] | 3 | 2016-01-21T02:18:41.000Z | 2018-04-25T06:06:25.000Z | import asyncio
import unittest
import random
from gremlinpy import Gremlin
from . import ConnectionTestCases, EntityTestCases, MapperTestCases
from gizmo import Mapper, Request, Collection, Vertex, Edge
from gizmo.mapper import EntityMapper
class BaseTests(unittest.TestCase):
def setUp(self):
self.request = Request('localhost', port=8182)
self.gremlin = Gremlin('gizmo_testing')
self.mapper = Mapper(self.request, self.gremlin)
self.ioloop = asyncio.get_event_loop()
super(BaseTests, self).setUp()
def tearDown(self):
super(BaseTests, self).tearDown()
async def purge(self):
script = "%s.V().map{it.get().remove()}" % self.gremlin.gv
res = await self.mapper.query(script=script)
return res
class ConnectionTests(BaseTests, ConnectionTestCases):
pass
class EntityTests(EntityTestCases, BaseTests):
pass
class MapperTests(MapperTestCases, BaseTests):
pass
class CollectionTests(BaseTests):
pass
class TraversalTests(BaseTests):
pass
if __name__ == '__main__':
unittest.main()
| 20.830189 | 67 | 0.703804 |
eb6c36d643a403781f34b9f3161d4e676908e23c | 4,865 | py | Python | rnntutor/lib/python2.7/site-packages/notebook/services/kernels/tests/test_kernels_api.py | lucaskingjade/RNNTutorial | b641e50014b56bb8e0f8762c76f1178a9a7c911f | [
"Apache-2.0"
] | 652 | 2015-07-26T00:00:17.000Z | 2022-02-24T18:30:04.000Z | rnntutor/lib/python2.7/site-packages/notebook/services/kernels/tests/test_kernels_api.py | lucaskingjade/RNNTutorial | b641e50014b56bb8e0f8762c76f1178a9a7c911f | [
"Apache-2.0"
] | 8 | 2015-09-07T03:38:19.000Z | 2021-05-23T03:18:51.000Z | rnntutor/lib/python2.7/site-packages/notebook/services/kernels/tests/test_kernels_api.py | lucaskingjade/RNNTutorial | b641e50014b56bb8e0f8762c76f1178a9a7c911f | [
"Apache-2.0"
] | 40 | 2015-07-24T19:45:08.000Z | 2021-11-01T14:54:56.000Z | """Test the kernels service API."""
import json
import requests
from jupyter_client.kernelspec import NATIVE_KERNEL_NAME
from notebook.utils import url_path_join
from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error
class KernelAPI(object):
"""Wrapper for kernel REST API requests"""
def __init__(self, base_url):
self.base_url = base_url
def _req(self, verb, path, body=None):
response = requests.request(verb,
url_path_join(self.base_url, 'api/kernels', path), data=body)
if 400 <= response.status_code < 600:
try:
response.reason = response.json()['message']
except:
pass
response.raise_for_status()
return response
def list(self):
return self._req('GET', '')
def get(self, id):
return self._req('GET', id)
def start(self, name=NATIVE_KERNEL_NAME):
body = json.dumps({'name': name})
return self._req('POST', '', body)
def shutdown(self, id):
return self._req('DELETE', id)
def interrupt(self, id):
return self._req('POST', url_path_join(id, 'interrupt'))
def restart(self, id):
return self._req('POST', url_path_join(id, 'restart'))
class KernelAPITest(NotebookTestBase):
"""Test the kernels web service API"""
def setUp(self):
self.kern_api = KernelAPI(self.base_url())
def tearDown(self):
for k in self.kern_api.list().json():
self.kern_api.shutdown(k['id'])
def test_no_kernels(self):
"""Make sure there are no kernels running at the start"""
kernels = self.kern_api.list().json()
self.assertEqual(kernels, [])
def test_default_kernel(self):
# POST request
r = self.kern_api._req('POST', '')
kern1 = r.json()
self.assertEqual(r.headers['location'], '/api/kernels/' + kern1['id'])
self.assertEqual(r.status_code, 201)
self.assertIsInstance(kern1, dict)
self.assertEqual(r.headers['Content-Security-Policy'], (
"frame-ancestors 'self'; "
"report-uri /api/security/csp-report; "
"default-src 'none'"
))
def test_main_kernel_handler(self):
# POST request
r = self.kern_api.start()
kern1 = r.json()
self.assertEqual(r.headers['location'], '/api/kernels/' + kern1['id'])
self.assertEqual(r.status_code, 201)
self.assertIsInstance(kern1, dict)
self.assertEqual(r.headers['Content-Security-Policy'], (
"frame-ancestors 'self'; "
"report-uri /api/security/csp-report; "
"default-src 'none'"
))
# GET request
r = self.kern_api.list()
self.assertEqual(r.status_code, 200)
assert isinstance(r.json(), list)
self.assertEqual(r.json()[0]['id'], kern1['id'])
self.assertEqual(r.json()[0]['name'], kern1['name'])
# create another kernel and check that they both are added to the
# list of kernels from a GET request
kern2 = self.kern_api.start().json()
assert isinstance(kern2, dict)
r = self.kern_api.list()
kernels = r.json()
self.assertEqual(r.status_code, 200)
assert isinstance(kernels, list)
self.assertEqual(len(kernels), 2)
# Interrupt a kernel
r = self.kern_api.interrupt(kern2['id'])
self.assertEqual(r.status_code, 204)
# Restart a kernel
r = self.kern_api.restart(kern2['id'])
self.assertEqual(r.headers['Location'], '/api/kernels/'+kern2['id'])
rekern = r.json()
self.assertEqual(rekern['id'], kern2['id'])
self.assertEqual(rekern['name'], kern2['name'])
def test_kernel_handler(self):
# GET kernel with given id
kid = self.kern_api.start().json()['id']
r = self.kern_api.get(kid)
kern1 = r.json()
self.assertEqual(r.status_code, 200)
assert isinstance(kern1, dict)
self.assertIn('id', kern1)
self.assertEqual(kern1['id'], kid)
# Request a bad kernel id and check that a JSON
# message is returned!
bad_id = '111-111-111-111-111'
with assert_http_error(404, 'Kernel does not exist: ' + bad_id):
self.kern_api.get(bad_id)
# DELETE kernel with id
r = self.kern_api.shutdown(kid)
self.assertEqual(r.status_code, 204)
kernels = self.kern_api.list().json()
self.assertEqual(kernels, [])
# Request to delete a non-existent kernel id
bad_id = '111-111-111-111-111'
with assert_http_error(404, 'Kernel does not exist: ' + bad_id):
self.kern_api.shutdown(bad_id)
| 33.784722 | 78 | 0.589928 |
54f66447d132d8d8353a92fb5516f5fc9688de65 | 621 | py | Python | ex069.py | Alisson7Kruger/python-exercises | 3be863613d47a6ed77086c8f0b791822d7c0082f | [
"MIT"
] | 1 | 2021-08-19T20:43:29.000Z | 2021-08-19T20:43:29.000Z | ex069.py | Alisson7Kruger/python-exercises | 3be863613d47a6ed77086c8f0b791822d7c0082f | [
"MIT"
] | null | null | null | ex069.py | Alisson7Kruger/python-exercises | 3be863613d47a6ed77086c8f0b791822d7c0082f | [
"MIT"
] | null | null | null | tot18 = totH = totM20 = 0
while True:
idade = int(input("Idade: "))
sexo = " "
while sexo not in "MF":
sexo = str(input("Sexo: ")).strip().upper()[0]
if idade >= 18:
tot18 += 1
if sexo == "M":
totH += 1
if sexo == "F" and idade < 20:
totM20 += 1
resp = " "
while resp not in "SN":
resp = str(input("Quer continuar? [S/N] ")).strip().upper()[0]
if resp == "N":
break
print(f"Total de pessoas com mais de 18 anos: {tot18}")
print(f"Ao todo temos {totH} homens cadastrados")
print(f"Ao todo temos {totM20} mulheres com menos de 20 anos")
| 27 | 70 | 0.541063 |
c757939fd5285680584dfa0d1d9c2b1e26f0f6d9 | 1,596 | py | Python | Latest/venv/Lib/site-packages/pyface/tree/trait_dict_node_type.py | adamcvj/SatelliteTracker | 49a8f26804422fdad6f330a5548e9f283d84a55d | [
"Apache-2.0"
] | 1 | 2022-01-09T20:04:31.000Z | 2022-01-09T20:04:31.000Z | Latest/venv/Lib/site-packages/pyface/tree/trait_dict_node_type.py | adamcvj/SatelliteTracker | 49a8f26804422fdad6f330a5548e9f283d84a55d | [
"Apache-2.0"
] | 1 | 2022-02-15T12:01:57.000Z | 2022-03-24T19:48:47.000Z | Latest/venv/Lib/site-packages/pyface/tree/trait_dict_node_type.py | adamcvj/SatelliteTracker | 49a8f26804422fdad6f330a5548e9f283d84a55d | [
"Apache-2.0"
] | null | null | null | """ The node type for a trait dictionary. """
# Enthought library imports.
from traits.api import Any, Str
# Local imports.
from .node_type import NodeType
class TraitDictNodeType(NodeType):
""" The node type for a trait dictionary. """
#### 'TraitDictNodeType' interface ########################################
# The type of object that provides the trait dictionary.
klass = Any
# The label text.
text = Str
# The trait name.
trait_name = Str
###########################################################################
# 'NodeType' interface.
###########################################################################
def is_type_for(self, node):
""" Returns True if this node type recognizes a node. """
is_type_for = isinstance(node, dict) \
and hasattr(node, 'object') \
and isinstance(node.object, self.klass) \
and node.name == self.trait_name
return is_type_for
def allows_children(self, node):
""" Does the node allow children (ie. a folder vs a file). """
return True
def has_children(self, node):
""" Returns True if a node has children, otherwise False. """
return len(node) > 0
def get_children(self, node):
""" Returns the children of a node. """
return list(node.values())
def get_text(self, node):
""" Returns the label text for a node. """
return self.text
##### EOF #####################################################################
| 26.6 | 79 | 0.494361 |
8f0a4b7f94fbec3a0fe1ab3e3181502f1ccf63a3 | 6,224 | py | Python | modules/analysis/gaussian.py | CoCoMol/CoCoPy | 66bd4deda4b80eca65ceb0660f940214e4b457fb | [
"MIT"
] | 1 | 2017-11-15T14:40:01.000Z | 2017-11-15T14:40:01.000Z | modules/analysis/gaussian.py | CoCoMol/CoCoPy | 66bd4deda4b80eca65ceb0660f940214e4b457fb | [
"MIT"
] | 1 | 2017-11-15T14:39:38.000Z | 2017-11-15T14:39:38.000Z | modules/analysis/gaussian.py | CoCoMol/CoCoPy | 66bd4deda4b80eca65ceb0660f940214e4b457fb | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
#
# CoCoPy - A python toolkit for rotational spectroscopy
#
# Copyright (c) 2013 by David Schmitz (david.schmitz@chasquiwan.de).
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the “Software”), to deal in the
# Software without restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH
# THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# MIT Licence (http://mit-license.org/)
#
################################################################################
import re
'''
Todo:
- 1 yo
- 2
- 3
'''
def readout(fname='', calctype='opt'):
try:
f = open(fname, 'r')
except IOError:
print 'Cannot open: ', fname
else:
if calctype == 'opt' or calctype == 'optfreq':
dip = 0
freq = 0
rot = 0
mull = 0
longstr = ''
flag = False
properties = dict()
for line in f:
if 'This is the Gaussian(R) 03 program' in line:
ver = '03'
if 'This is part of the Gaussian(R) 09 program.' in line:
ver = '09'
if 'Rotational constants (GHZ):' in line and rot == 1:
properties['rotA'] = float(line[30:45]) * 1000.
properties['rotB'] = float(line[45:60]) * 1000.
properties['rotC'] = float(line[60:]) * 1000.
rot = 0
if '---------------------------------------------------------------------' in line:
rot = 1
if dip == 1:
if ver == '09':
properties['dipA'] = float(line[13:26])
properties['dipB'] = float(line[39:52])
properties['dipC'] = float(line[65:78])
properties['dipTot'] = float(line[91:])
if ver == '03':
properties['dipA'] = float(line[6:17])
properties['dipB'] = float(line[23:34])
properties['dipC'] = float(line[40:51])
properties['dipTot'] = float(line[57:])
dip = 0
if ' Dipole moment (field-independent basis, Debye):' in line:
dip = 1
if 'Harmonic frequencies (cm**-1)' in line:
freq = 1
properties['freq'] = list([])
properties['intens'] = list([])
if freq == 1 and 'Frequencies --' in line:
properties['freq'].append(float(line[15:30]))
if len(line) > 30:
properties['freq'].append(float(line[30:55]))
if len(line) > 55:
properties['freq'].append(float(line[55:]))
if freq == 1 and 'IR Inten --' in line:
properties['intens'].append(float(line[15:30]))
if len(line) > 30:
properties['intens'].append(float(line[30:55]))
if len(line) > 55:
properties['intens'].append(float(line[55:]))
if 'Mulliken atomic charges:' in line:
mull = 1
if 'SCF Done:' in line:
properties['energy'] = float(line.split('=')[1].split('A.U.')[0])
if flag == True:
if line != '\n':
longstr += line
else:
flag = False
if '(Enter /usr/product/gaussian/g09/l9999.exe)' in line:
flag = True
if '\MP2=' in longstr:
h = re.findall('\\\\MP2=[+-]?\d*\.\d*\\\\', longstr)
properties['energy'] = float(re.findall('[+-]?\d*\.\d*', h[0])[0])
return properties
if calctype == 'optscan':
dip = 0
freq = 0
rot = 0
properties = dict(energy=[], rotA=[], rotB=[], rotC=[])
for line in f:
if 'This is the Gaussian(R) 03 program' in line:
ver = '03'
if 'This is part of the Gaussian(R) 09 program.' in line:
ver = '09'
if 'Rotational constants (GHZ):' in line and rot == 1:
rotA = float(line[30:45]) * 1000.
rotB = float(line[45:60]) * 1000.
rotC = float(line[60:]) * 1000.
rot = 0
if '---------------------------------------------------------------------' in line:
rot = 1
if 'SCF Done:' in line:
if ver == '09':
energy = float(line[23:40])
if ver == '03':
energy = float(line[26:43])
if '-- Stationary point found.' in line:
properties['energy'].append(energy)
properties['rotA'].append(rotA)
properties['rotB'].append(rotB)
properties['rotC'].append(rotC)
return properties
f.close()
| 42.630137 | 99 | 0.449068 |
1d4fadd8e6b0e13b1a52053d2385fab4f49532a9 | 4,017 | py | Python | gen_graph.py | tedzhouhk/tgl-1 | 4858e678cf1fed2e1728cf62ef879dcedb0310d1 | [
"Apache-2.0"
] | 9 | 2022-03-16T04:47:45.000Z | 2022-03-31T20:02:26.000Z | gen_graph.py | tedzhouhk/tgl-1 | 4858e678cf1fed2e1728cf62ef879dcedb0310d1 | [
"Apache-2.0"
] | 1 | 2022-03-31T22:34:54.000Z | 2022-03-31T22:34:54.000Z | gen_graph.py | tedzhouhk/tgl-1 | 4858e678cf1fed2e1728cf62ef879dcedb0310d1 | [
"Apache-2.0"
] | 2 | 2022-02-25T23:40:44.000Z | 2022-02-26T00:41:05.000Z | import argparse
import itertools
import pandas as pd
import numpy as np
from tqdm import tqdm
parser=argparse.ArgumentParser()
parser.add_argument('--data', type=str, help='dataset name')
parser.add_argument('--add_reverse', default=False, action='store_true')
args=parser.parse_args()
df = pd.read_csv('DATA/{}/edges.csv'.format(args.data))
num_nodes = max(int(df['src'].max()), int(df['dst'].max())) + 1
print('num_nodes: ', num_nodes)
int_train_indptr = np.zeros(num_nodes + 1, dtype=np.int)
int_train_indices = [[] for _ in range(num_nodes)]
int_train_ts = [[] for _ in range(num_nodes)]
int_train_eid = [[] for _ in range(num_nodes)]
int_full_indptr = np.zeros(num_nodes + 1, dtype=np.int)
int_full_indices = [[] for _ in range(num_nodes)]
int_full_ts = [[] for _ in range(num_nodes)]
int_full_eid = [[] for _ in range(num_nodes)]
ext_full_indptr = np.zeros(num_nodes + 1, dtype=np.int)
ext_full_indices = [[] for _ in range(num_nodes)]
ext_full_ts = [[] for _ in range(num_nodes)]
ext_full_eid = [[] for _ in range(num_nodes)]
for idx, row in tqdm(df.iterrows(), total=len(df)):
src = int(row['src'])
dst = int(row['dst'])
if row['int_roll'] == 0:
int_train_indices[src].append(dst)
int_train_ts[src].append(row['time'])
int_train_eid[src].append(idx)
if args.add_reverse:
int_train_indices[dst].append(src)
int_train_ts[dst].append(row['time'])
int_train_eid[dst].append(idx)
# int_train_indptr[src + 1:] += 1
if row['int_roll'] != 3:
int_full_indices[src].append(dst)
int_full_ts[src].append(row['time'])
int_full_eid[src].append(idx)
if args.add_reverse:
int_full_indices[dst].append(src)
int_full_ts[dst].append(row['time'])
int_full_eid[dst].append(idx)
# int_full_indptr[src + 1:] += 1
ext_full_indices[src].append(dst)
ext_full_ts[src].append(row['time'])
ext_full_eid[src].append(idx)
if args.add_reverse:
ext_full_indices[dst].append(src)
ext_full_ts[dst].append(row['time'])
ext_full_eid[dst].append(idx)
# ext_full_indptr[src + 1:] += 1
for i in tqdm(range(num_nodes)):
int_train_indptr[i + 1] = int_train_indptr[i] + len(int_train_indices[i])
int_full_indptr[i + 1] = int_full_indptr[i] + len(int_full_indices[i])
ext_full_indptr[i + 1] = ext_full_indptr[i] + len(ext_full_indices[i])
int_train_indices = np.array(list(itertools.chain(*int_train_indices)))
int_train_ts = np.array(list(itertools.chain(*int_train_ts)))
int_train_eid = np.array(list(itertools.chain(*int_train_eid)))
int_full_indices = np.array(list(itertools.chain(*int_full_indices)))
int_full_ts = np.array(list(itertools.chain(*int_full_ts)))
int_full_eid = np.array(list(itertools.chain(*int_full_eid)))
ext_full_indices = np.array(list(itertools.chain(*ext_full_indices)))
ext_full_ts = np.array(list(itertools.chain(*ext_full_ts)))
ext_full_eid = np.array(list(itertools.chain(*ext_full_eid)))
print('Sorting...')
def tsort(i, indptr, indices, t, eid):
beg = indptr[i]
end = indptr[i + 1]
sidx = np.argsort(t[beg:end])
indices[beg:end] = indices[beg:end][sidx]
t[beg:end] = t[beg:end][sidx]
eid[beg:end] = eid[beg:end][sidx]
for i in tqdm(range(int_train_indptr.shape[0] - 1)):
tsort(i, int_train_indptr, int_train_indices, int_train_ts, int_train_eid)
tsort(i, int_full_indptr, int_full_indices, int_full_ts, int_full_eid)
tsort(i, ext_full_indptr, ext_full_indices, ext_full_ts, ext_full_eid)
# import pdb; pdb.set_trace()
print('saving...')
np.savez('DATA/{}/int_train.npz'.format(args.data), indptr=int_train_indptr, indices=int_train_indices, ts=int_train_ts, eid=int_train_eid)
np.savez('DATA/{}/int_full.npz'.format(args.data), indptr=int_full_indptr, indices=int_full_indices, ts=int_full_ts, eid=int_full_eid)
np.savez('DATA/{}/ext_full.npz'.format(args.data), indptr=ext_full_indptr, indices=ext_full_indices, ts=ext_full_ts, eid=ext_full_eid) | 41.84375 | 139 | 0.700025 |
f97b635e47141c84825fac432da8a2336d00cffb | 629 | py | Python | 2021/buckeye-ctf/staff/solve.py | HaroldHH/My-CTF-Solutions | 7baca0df1ca96a00de77a1a113a0011c43ad6ab8 | [
"MIT"
] | null | null | null | 2021/buckeye-ctf/staff/solve.py | HaroldHH/My-CTF-Solutions | 7baca0df1ca96a00de77a1a113a0011c43ad6ab8 | [
"MIT"
] | null | null | null | 2021/buckeye-ctf/staff/solve.py | HaroldHH/My-CTF-Solutions | 7baca0df1ca96a00de77a1a113a0011c43ad6ab8 | [
"MIT"
] | null | null | null | import sys
from pwn import *
# Flag : buckeye{if_0n1y_th15_w0rk3d}
elf = ELF("./chall")
proc = elf.process()
res = proc.recvuntil("\n").decode()
res += proc.recvuntil("\n").decode()
res += proc.recvuntil("\n").decode()
print(res)
proc.sendline("1")
print(proc.recvuntil("\n").decode())
payload = b"FLAG 1337"
proc.sendline(payload)
print(proc.recvuntil("\n").decode())
res = proc.recvuntil("\n").decode()
res += proc.recvuntil("\n").decode()
res += proc.recvuntil("\n").decode()
print(res)
proc.sendline("2")
print(proc.recvuntil("\n").decode())
payload = b"Staff"
proc.sendline(payload)
print(proc.recvuntil("\n").decode())
| 22.464286 | 37 | 0.674086 |
0fa4c7da36f9a94a877164dfc8177bc6cd7e374f | 26,985 | py | Python | saleor/graphql/order/tests/test_discount_order.py | gustavoarmoa/saleor | f81b2f347e4c7a624cd68a1eca3b0a5611498f6e | [
"CC-BY-4.0"
] | null | null | null | saleor/graphql/order/tests/test_discount_order.py | gustavoarmoa/saleor | f81b2f347e4c7a624cd68a1eca3b0a5611498f6e | [
"CC-BY-4.0"
] | 40 | 2022-01-17T04:39:16.000Z | 2022-03-27T17:08:42.000Z | saleor/graphql/order/tests/test_discount_order.py | gustavoarmoa/saleor | f81b2f347e4c7a624cd68a1eca3b0a5611498f6e | [
"CC-BY-4.0"
] | null | null | null | from decimal import Decimal
from functools import partial
from unittest.mock import patch
import graphene
import pytest
from prices import Money, TaxedMoney, fixed_discount, percentage_discount
from ....core.prices import quantize_price
from ....discount import DiscountValueType
from ....order import OrderEvents, OrderStatus
from ....order.error_codes import OrderErrorCode
from ....order.interface import OrderTaxedPricesData
from ...discount.enums import DiscountValueTypeEnum
from ...tests.utils import get_graphql_content
ORDER_DISCOUNT_ADD = """
mutation OrderDiscountAdd($orderId: ID!, $input: OrderDiscountCommonInput!){
orderDiscountAdd(orderId:$orderId, input:$input){
order{
lines{
id
}
total{
gross{
amount
}
net{
amount
}
}
}
errors{
field
code
message
}
}
}
"""
@pytest.mark.parametrize(
"value,value_type",
[
(Decimal("2222222"), DiscountValueTypeEnum.FIXED.name),
(Decimal("101"), DiscountValueTypeEnum.PERCENTAGE.name),
],
)
def test_add_order_discount_incorrect_values(
value, value_type, draft_order, staff_api_client, permission_manage_orders
):
variables = {
"orderId": graphene.Node.to_global_id("Order", draft_order.pk),
"input": {"valueType": value_type, "value": value},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_ADD, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountAdd"]
errors = data["errors"]
error = data["errors"][0]
assert error["field"] == "value"
assert error["code"] == OrderErrorCode.INVALID.name
assert len(errors) == 1
def test_add_fixed_order_discount_order_is_not_draft(
order_with_lines, staff_api_client, permission_manage_orders
):
value = Decimal("10")
variables = {
"orderId": graphene.Node.to_global_id("Order", order_with_lines.pk),
"input": {"valueType": DiscountValueTypeEnum.FIXED.name, "value": value},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_ADD, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountAdd"]
errors = data["errors"]
assert len(errors) == 1
error = data["errors"][0]
assert error["field"] == "orderId"
assert error["code"] == OrderErrorCode.CANNOT_DISCOUNT.name
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_add_fixed_order_discount_to_order(
status, draft_order, staff_api_client, permission_manage_orders
):
order = draft_order
order.status = status
order.save(update_fields=["status"])
total_before_order_discount = order.total
value = Decimal("10")
variables = {
"orderId": graphene.Node.to_global_id("Order", order.pk),
"input": {"valueType": DiscountValueTypeEnum.FIXED.name, "value": value},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_ADD, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountAdd"]
order.refresh_from_db()
expected_gross = total_before_order_discount.gross.amount - value
expected_net = total_before_order_discount.net.amount - value
errors = data["errors"]
assert len(errors) == 0
assert expected_gross == order.total.gross.amount
assert expected_net == order.total.net.amount
assert order.undiscounted_total == total_before_order_discount
assert order.discounts.count() == 1
order_discount = order.discounts.first()
assert order_discount.value == value
assert order_discount.value_type == DiscountValueType.FIXED
assert order_discount.amount.amount == value
assert order_discount.reason is None
event = order.events.get()
assert event.type == OrderEvents.ORDER_DISCOUNT_ADDED
parameters = event.parameters
discount_data = parameters.get("discount")
assert discount_data["value"] == str(value)
assert discount_data["value_type"] == DiscountValueTypeEnum.FIXED.value
assert discount_data["amount_value"] == str(order_discount.amount.amount)
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_add_percentage_order_discount_to_order(
status, draft_order, staff_api_client, permission_manage_orders
):
order = draft_order
order.status = status
order.save(update_fields=["status"])
total_before_order_discount = order.total
reason = "The reason of the discount"
value = Decimal("10.000")
variables = {
"orderId": graphene.Node.to_global_id("Order", order.pk),
"input": {
"valueType": DiscountValueTypeEnum.PERCENTAGE.name,
"value": value,
"reason": reason,
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_ADD, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountAdd"]
order.refresh_from_db()
discount = partial(percentage_discount, percentage=value)
expected_net_total = discount(total_before_order_discount.net)
expected_gross_total = discount(total_before_order_discount.gross)
expected_total = TaxedMoney(expected_net_total, expected_gross_total)
errors = data["errors"]
assert len(errors) == 0
assert expected_total == order.total
assert order.undiscounted_total == total_before_order_discount
assert order.discounts.count() == 1
order_discount = order.discounts.first()
assert order_discount.value == value
assert order_discount.value_type == DiscountValueType.PERCENTAGE
assert order_discount.amount == (total_before_order_discount - expected_total).gross
assert order_discount.reason == reason
event = order.events.get()
assert event.type == OrderEvents.ORDER_DISCOUNT_ADDED
parameters = event.parameters
discount_data = parameters.get("discount")
assert discount_data["value"] == str(value)
assert discount_data["value_type"] == DiscountValueTypeEnum.PERCENTAGE.value
assert discount_data["amount_value"] == str(order_discount.amount.amount)
ORDER_DISCOUNT_UPDATE = """
mutation OrderDiscountUpdate($discountId: ID!, $input: OrderDiscountCommonInput!){
orderDiscountUpdate(discountId:$discountId, input: $input){
order{
id
total{
gross{
amount
}
}
undiscountedTotal{
gross{
amount
}
}
}
errors{
field
message
code
}
}
}
"""
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_update_percentage_order_discount_to_order(
status,
draft_order_with_fixed_discount_order,
staff_api_client,
permission_manage_orders,
):
order = draft_order_with_fixed_discount_order
order.status = status
order.save(update_fields=["status"])
order_discount = draft_order_with_fixed_discount_order.discounts.get()
current_undiscounted_total = order.undiscounted_total
reason = "The reason of the discount"
value = Decimal("10.000")
variables = {
"discountId": graphene.Node.to_global_id("OrderDiscount", order_discount.pk),
"input": {
"valueType": DiscountValueTypeEnum.PERCENTAGE.name,
"value": value,
"reason": reason,
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountUpdate"]
order.refresh_from_db()
discount = partial(percentage_discount, percentage=value)
expected_net_total = discount(current_undiscounted_total.net)
expected_gross_total = discount(current_undiscounted_total.gross)
expected_total = TaxedMoney(expected_net_total, expected_gross_total)
errors = data["errors"]
assert len(errors) == 0
assert order.undiscounted_total == current_undiscounted_total
assert expected_total == order.total
assert order.discounts.count() == 1
order_discount = order.discounts.first()
assert order_discount.value == value
assert order_discount.value_type == DiscountValueType.PERCENTAGE
assert order_discount.amount == (current_undiscounted_total - expected_total).gross
assert order_discount.reason == reason
event = order.events.get()
assert event.type == OrderEvents.ORDER_DISCOUNT_UPDATED
parameters = event.parameters
discount_data = parameters.get("discount")
assert discount_data["value"] == str(value)
assert discount_data["value_type"] == DiscountValueTypeEnum.PERCENTAGE.value
assert discount_data["amount_value"] == str(order_discount.amount.amount)
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_update_fixed_order_discount_to_order(
status,
draft_order_with_fixed_discount_order,
staff_api_client,
permission_manage_orders,
):
order = draft_order_with_fixed_discount_order
order.status = status
order.save(update_fields=["status"])
order_discount = draft_order_with_fixed_discount_order.discounts.get()
current_undiscounted_total = order.undiscounted_total
value = Decimal("50.000")
variables = {
"discountId": graphene.Node.to_global_id("OrderDiscount", order_discount.pk),
"input": {
"valueType": DiscountValueTypeEnum.FIXED.name,
"value": value,
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountUpdate"]
order.refresh_from_db()
discount = partial(fixed_discount, discount=Money(value, currency=order.currency))
expected_total = discount(current_undiscounted_total)
errors = data["errors"]
assert len(errors) == 0
assert order.undiscounted_total == current_undiscounted_total
assert expected_total == order.total
assert order.discounts.count() == 1
order_discount = order.discounts.first()
assert order_discount.value == value
assert order_discount.value_type == DiscountValueType.FIXED
assert order_discount.amount == (current_undiscounted_total - expected_total).gross
event = order.events.get()
assert event.type == OrderEvents.ORDER_DISCOUNT_UPDATED
parameters = event.parameters
discount_data = parameters.get("discount")
assert discount_data["value"] == str(value)
assert discount_data["value_type"] == DiscountValueTypeEnum.FIXED.value
assert discount_data["amount_value"] == str(order_discount.amount.amount)
def test_update_order_discount_order_is_not_draft(
draft_order_with_fixed_discount_order, staff_api_client, permission_manage_orders
):
draft_order_with_fixed_discount_order.status = OrderStatus.UNFULFILLED
draft_order_with_fixed_discount_order.save()
order_discount = draft_order_with_fixed_discount_order.discounts.get()
value = Decimal("50")
variables = {
"discountId": graphene.Node.to_global_id("OrderDiscount", order_discount.pk),
"input": {
"valueType": DiscountValueTypeEnum.FIXED.name,
"value": value,
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountUpdate"]
errors = data["errors"]
assert len(errors) == 1
error = data["errors"][0]
assert error["field"] == "orderId"
assert error["code"] == OrderErrorCode.CANNOT_DISCOUNT.name
@pytest.mark.parametrize(
"value,value_type",
[
(Decimal("2222222"), DiscountValueTypeEnum.FIXED.name),
(Decimal("101"), DiscountValueTypeEnum.PERCENTAGE.name),
],
)
def test_update_order_discount_incorrect_values(
value,
value_type,
draft_order_with_fixed_discount_order,
staff_api_client,
permission_manage_orders,
):
order_discount = draft_order_with_fixed_discount_order.discounts.get()
variables = {
"discountId": graphene.Node.to_global_id("OrderDiscount", order_discount.pk),
"input": {
"valueType": value_type,
"value": value,
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountUpdate"]
errors = data["errors"]
assert len(errors) == 1
error = errors[0]
assert error["field"] == "value"
assert error["code"] == OrderErrorCode.INVALID.name
ORDER_DISCOUNT_DELETE = """
mutation OrderDiscountDelete($discountId: ID!){
orderDiscountDelete(discountId: $discountId){
order{
id
}
errors{
field
message
code
}
}
}
"""
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_delete_order_discount_from_order(
status,
draft_order_with_fixed_discount_order,
staff_api_client,
permission_manage_orders,
):
order = draft_order_with_fixed_discount_order
order.status = status
order.save(update_fields=["status"])
order_discount = draft_order_with_fixed_discount_order.discounts.get()
name = "discount translated"
translated_name = "discount translated name"
order_discount.name = name
order_discount.translated_name = translated_name
order_discount.save(update_fields=["name", "translated_name"])
current_undiscounted_total = order.undiscounted_total
variables = {
"discountId": graphene.Node.to_global_id("OrderDiscount", order_discount.pk),
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_DELETE, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountDelete"]
order.refresh_from_db()
errors = data["errors"]
assert len(errors) == 0
assert order.undiscounted_total == current_undiscounted_total
assert order.total == current_undiscounted_total
event = order.events.get()
assert event.type == OrderEvents.ORDER_DISCOUNT_DELETED
assert order.search_document
assert name not in order.search_document
assert translated_name not in order.search_document
def test_delete_order_discount_order_is_not_draft(
draft_order_with_fixed_discount_order, staff_api_client, permission_manage_orders
):
draft_order_with_fixed_discount_order.status = OrderStatus.UNFULFILLED
draft_order_with_fixed_discount_order.save()
order_discount = draft_order_with_fixed_discount_order.discounts.get()
variables = {
"discountId": graphene.Node.to_global_id("OrderDiscount", order_discount.pk),
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_DISCOUNT_DELETE, variables)
content = get_graphql_content(response)
data = content["data"]["orderDiscountDelete"]
errors = data["errors"]
assert len(errors) == 1
assert draft_order_with_fixed_discount_order.discounts.get()
error = data["errors"][0]
assert error["field"] == "orderId"
assert error["code"] == OrderErrorCode.CANNOT_DISCOUNT.name
ORDER_LINE_DISCOUNT_UPDATE = """
mutation OrderLineDiscountUpdate($input: OrderDiscountCommonInput!, $orderLineId: ID!){
orderLineDiscountUpdate(orderLineId: $orderLineId, input: $input){
orderLine{
unitPrice{
gross{
amount
}
}
}
errors{
field
message
code
}
}
}
"""
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_update_order_line_discount(
status,
draft_order_with_fixed_discount_order,
staff_api_client,
permission_manage_orders,
):
order = draft_order_with_fixed_discount_order
order.status = status
order.save(update_fields=["status"])
line_to_discount = order.lines.first()
unit_price = Money(Decimal(7.3), currency="USD")
line_to_discount.unit_price = TaxedMoney(unit_price, unit_price)
line_to_discount.undiscounted_unit_price = line_to_discount.unit_price
total_price = line_to_discount.unit_price * line_to_discount.quantity
line_to_discount.total_price = total_price
line_to_discount.undiscounted_total_price = total_price
line_to_discount.save()
line_price_before_discount = line_to_discount.unit_price
value = Decimal("5")
reason = "New reason for unit discount"
variables = {
"orderLineId": graphene.Node.to_global_id("OrderLine", line_to_discount.pk),
"input": {
"valueType": DiscountValueTypeEnum.FIXED.name,
"value": value,
"reason": reason,
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_LINE_DISCOUNT_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["orderLineDiscountUpdate"]
line_to_discount.refresh_from_db()
errors = data["errors"]
assert not errors
discount = partial(
fixed_discount,
discount=Money(value, currency=order.currency),
)
expected_line_price = discount(line_price_before_discount)
assert line_to_discount.unit_price == quantize_price(expected_line_price, "USD")
unit_discount = line_to_discount.unit_discount
assert unit_discount == (line_price_before_discount - expected_line_price).gross
event = order.events.get()
assert event.type == OrderEvents.ORDER_LINE_DISCOUNT_UPDATED
parameters = event.parameters
lines = parameters.get("lines", {})
assert len(lines) == 1
line_data = lines[0]
assert line_data.get("line_pk") == str(line_to_discount.pk)
discount_data = line_data.get("discount")
assert discount_data["value"] == str(value)
assert discount_data["value_type"] == DiscountValueTypeEnum.FIXED.value
assert discount_data["amount_value"] == str(unit_discount.amount)
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_update_order_line_discount_line_with_discount(
status,
draft_order_with_fixed_discount_order,
staff_api_client,
permission_manage_orders,
):
order = draft_order_with_fixed_discount_order
order.status = status
order.save(update_fields=["status"])
line_to_discount = order.lines.first()
unit_price = quantize_price(Money(Decimal(7.3), currency="USD"), currency="USD")
line_to_discount.unit_price = TaxedMoney(unit_price, unit_price)
line_to_discount.unit_discount_amount = Decimal("2.500")
line_to_discount.unit_discount_type = DiscountValueType.FIXED
line_to_discount.unit_discount_value = Decimal("2.500")
line_to_discount.undiscounted_unit_price_gross_amount = (
line_to_discount.unit_price_gross_amount + line_to_discount.unit_discount_amount
)
line_to_discount.undiscounted_unit_price_net_amount = (
line_to_discount.unit_price_net_amount + line_to_discount.unit_discount_amount
)
line_to_discount.undiscounted_total_price_gross_amount = (
line_to_discount.undiscounted_unit_price_gross_amount
* line_to_discount.quantity
)
line_to_discount.undiscounted_total_price_net_amount = (
line_to_discount.undiscounted_unit_price_net_amount * line_to_discount.quantity
)
line_to_discount.save()
line_discount_amount_before_update = line_to_discount.unit_discount_amount
line_discount_value_before_update = line_to_discount.unit_discount_value
line_undiscounted_price = line_to_discount.undiscounted_unit_price
value = Decimal("50")
reason = "New reason for unit discount"
variables = {
"orderLineId": graphene.Node.to_global_id("OrderLine", line_to_discount.pk),
"input": {
"valueType": DiscountValueTypeEnum.PERCENTAGE.name,
"value": value,
"reason": reason,
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_LINE_DISCOUNT_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["orderLineDiscountUpdate"]
line_to_discount.refresh_from_db()
errors = data["errors"]
assert not errors
discount = partial(
percentage_discount,
percentage=value,
)
expected_line_price = discount(line_undiscounted_price)
assert line_to_discount.unit_price == expected_line_price
unit_discount = line_to_discount.unit_discount
assert unit_discount == (line_undiscounted_price - expected_line_price).gross
event = order.events.get()
assert event.type == OrderEvents.ORDER_LINE_DISCOUNT_UPDATED
parameters = event.parameters
lines = parameters.get("lines", {})
assert len(lines) == 1
line_data = lines[0]
assert line_data.get("line_pk") == str(line_to_discount.pk)
discount_data = line_data.get("discount")
assert discount_data["value"] == str(value)
assert discount_data["value_type"] == DiscountValueTypeEnum.PERCENTAGE.value
assert discount_data["amount_value"] == str(unit_discount.amount)
assert discount_data["old_value"] == str(line_discount_value_before_update)
assert discount_data["old_value_type"] == DiscountValueTypeEnum.FIXED.value
assert discount_data["old_amount_value"] == str(line_discount_amount_before_update)
def test_update_order_line_discount_order_is_not_draft(
draft_order_with_fixed_discount_order, staff_api_client, permission_manage_orders
):
draft_order_with_fixed_discount_order.status = OrderStatus.UNFULFILLED
draft_order_with_fixed_discount_order.save()
line_to_discount = draft_order_with_fixed_discount_order.lines.first()
variables = {
"orderLineId": graphene.Node.to_global_id("OrderLine", line_to_discount.pk),
"input": {
"valueType": DiscountValueTypeEnum.FIXED.name,
"value": Decimal("5"),
"reason": "New reason for unit discount",
},
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_LINE_DISCOUNT_UPDATE, variables)
content = get_graphql_content(response)
data = content["data"]["orderLineDiscountUpdate"]
line_to_discount.refresh_from_db()
errors = data["errors"]
assert len(errors) == 1
error = data["errors"][0]
assert error["field"] == "orderId"
assert error["code"] == OrderErrorCode.CANNOT_DISCOUNT.name
assert line_to_discount.unit_discount_amount == Decimal("0")
ORDER_LINE_DISCOUNT_REMOVE = """
mutation OrderLineDiscountRemove($orderLineId: ID!){
orderLineDiscountRemove(orderLineId: $orderLineId){
orderLine{
id
}
errors{
field
message
code
}
}
}
"""
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
@patch("saleor.plugins.manager.PluginsManager.calculate_order_line_unit")
@patch("saleor.plugins.manager.PluginsManager.calculate_order_line_total")
def test_delete_discount_from_order_line(
mocked_calculate_order_line_total,
mocked_calculate_order_line_unit,
status,
draft_order_with_fixed_discount_order,
staff_api_client,
permission_manage_orders,
):
order = draft_order_with_fixed_discount_order
order.status = status
order.save(update_fields=["status"])
line = order.lines.first()
line_undiscounted_price = line.undiscounted_unit_price
line_undiscounted_total_price = line.undiscounted_total_price
mocked_calculate_order_line_unit.return_value = OrderTaxedPricesData(
undiscounted_price=line_undiscounted_price,
price_with_discounts=line_undiscounted_price,
)
mocked_calculate_order_line_total.return_value = OrderTaxedPricesData(
undiscounted_price=line_undiscounted_total_price,
price_with_discounts=line_undiscounted_total_price,
)
line.unit_discount_amount = Decimal("2.5")
line.unit_discount_type = DiscountValueType.FIXED
line.unit_discount_value = Decimal("2.5")
line.save()
variables = {
"orderLineId": graphene.Node.to_global_id("OrderLine", line.pk),
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_LINE_DISCOUNT_REMOVE, variables)
content = get_graphql_content(response)
data = content["data"]["orderLineDiscountRemove"]
errors = data["errors"]
assert len(errors) == 0
line.refresh_from_db()
assert line.unit_price == line_undiscounted_price
assert line.total_price == line_undiscounted_total_price
unit_discount = line.unit_discount
currency = order.currency
assert unit_discount == Money(Decimal(0), currency=currency)
event = order.events.get()
assert event.type == OrderEvents.ORDER_LINE_DISCOUNT_REMOVED
parameters = event.parameters
lines = parameters.get("lines", {})
assert len(lines) == 1
line_data = lines[0]
assert line_data.get("line_pk") == str(line.pk)
def test_delete_order_line_discount_order_is_not_draft(
draft_order_with_fixed_discount_order, staff_api_client, permission_manage_orders
):
draft_order_with_fixed_discount_order.status = OrderStatus.UNFULFILLED
draft_order_with_fixed_discount_order.save()
line = draft_order_with_fixed_discount_order.lines.first()
line.unit_discount_amount = Decimal("2.5")
line.unit_discount_type = DiscountValueType.FIXED
line.unit_discount_value = Decimal("2.5")
line.save()
variables = {
"orderLineId": graphene.Node.to_global_id("OrderLine", line.pk),
}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_LINE_DISCOUNT_REMOVE, variables)
content = get_graphql_content(response)
data = content["data"]["orderLineDiscountRemove"]
errors = data["errors"]
assert len(errors) == 1
assert draft_order_with_fixed_discount_order.discounts.get()
error = data["errors"][0]
assert error["field"] == "orderId"
assert error["code"] == OrderErrorCode.CANNOT_DISCOUNT.name
assert line.unit_discount_amount == Decimal("2.5")
| 33.858218 | 88 | 0.731518 |
647e5ef76b23a9f98769a10cd1808aeff4c08aaa | 3,985 | py | Python | usecases/extasy_gromacs_lsdmap_adaptive/custom_kernels/pre_lsdmap.py | chemlove/radical.ensemblemd | 0ec4b127760d2fee88d4eae1768fecec4bdd6b21 | [
"MIT"
] | null | null | null | usecases/extasy_gromacs_lsdmap_adaptive/custom_kernels/pre_lsdmap.py | chemlove/radical.ensemblemd | 0ec4b127760d2fee88d4eae1768fecec4bdd6b21 | [
"MIT"
] | null | null | null | usecases/extasy_gromacs_lsdmap_adaptive/custom_kernels/pre_lsdmap.py | chemlove/radical.ensemblemd | 0ec4b127760d2fee88d4eae1768fecec4bdd6b21 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""A kernel that creates a new ASCII file with a given size and name.
"""
__author__ = "Vivek <vivek.balasubramanian@rutgers.edu>"
__copyright__ = "Copyright 2014, http://radical.rutgers.edu"
__license__ = "MIT"
from copy import deepcopy
from radical.ensemblemd.exceptions import ArgumentError
from radical.ensemblemd.exceptions import NoKernelConfigurationError
from radical.ensemblemd.kernel_plugins.kernel_base import KernelBase
# ------------------------------------------------------------------------------
#
_KERNEL_INFO = {
"name": "custom.pre_lsdmap",
"description": "Creates a new file of given size and fills it with random ASCII characters.",
"arguments": {},
"machine_configs":
{
"*": {
"environment" : {"FOO": "bar"},
"pre_exec" : [],
"executable" : ".",
"uses_mpi" : True
},
"xsede.stampede":
{
"environment" : {},
"pre_exec" : [
"module load TACC",
"module load intel/15.0.2",
"module load boost",
"module load cxx11",
"module load gromacs",
"module load python",
],
"executable" : ["python"],
"uses_mpi" : False
},
"epsrc.archer":
{
"environment" : {},
"pre_exec" : [
"module load packages-archer",
"module load gromacs",
"module load python-compute/2.7.6"
],
"executable" : ["python"],
"uses_mpi" : False
},
"futuregrid.india":
{
"environment" : {},
"pre_exec" : [
"module load openmpi",
"module load python",
"export PATH=$PATH:/N/u/vivek91/modules/gromacs-5/bin:/N/u/vivek91/.local/bin"
],
"executable" : ["python"],
"uses_mpi" : False
},
"ncsa.bw":
{
"environment" : {},
"pre_exec" : ["source /projects/sciteam/gkd/virtenvs/lsdmap/20151210_OMPI20151210-DYN/bin/activate",
"export PATH=$PATH:/projects/sciteam/gkd/gromacs/5.1.1/20151210-NO_MPI/install-cpu/bin"],
"executable" : ["python"],
"uses_mpi" : False
},
}
}
# ------------------------------------------------------------------------------
#
class pre_lsdmap_Kernel(KernelBase):
# --------------------------------------------------------------------------
#
def __init__(self):
"""Le constructor.
"""
super(pre_lsdmap_Kernel, self).__init__(_KERNEL_INFO)
# --------------------------------------------------------------------------
#
@staticmethod
def get_name():
return _KERNEL_INFO["name"]
# --------------------------------------------------------------------------
#
def _bind_to_resource(self, resource_key):
"""(PRIVATE) Implements parent class method.
"""
if resource_key not in _KERNEL_INFO["machine_configs"]:
if "*" in _KERNEL_INFO["machine_configs"]:
# Fall-back to generic resource key
resource_key = "*"
else:
raise NoKernelConfigurationError(kernel_name=_KERNEL_INFO["name"], resource_key=resource_key)
cfg = _KERNEL_INFO["machine_configs"][resource_key]
arguments = ['pre_analyze.py','tmp.gro','.']
self._executable = cfg["executable"]
self._arguments = arguments
self._environment = cfg["environment"]
self._uses_mpi = cfg["uses_mpi"]
self._pre_exec = cfg["pre_exec"]
| 33.208333 | 117 | 0.451945 |
79e0fe165c813827363bffa37ffc109c489318d6 | 1,570 | py | Python | setup.py | svenXY/prometheus-mysql-exporter | a7e5c126fee234f95a8c5b2eb2390c4f9268d5ea | [
"MIT"
] | 35 | 2017-01-25T06:50:01.000Z | 2021-12-23T15:44:46.000Z | setup.py | weiya1990/prometheus-mysql-exporter | 52f2ac4c8dfa30d9dced98a137e89441581d527c | [
"MIT"
] | 17 | 2018-01-18T09:55:45.000Z | 2021-08-03T09:08:21.000Z | setup.py | weiya1990/prometheus-mysql-exporter | 52f2ac4c8dfa30d9dced98a137e89441581d527c | [
"MIT"
] | 18 | 2017-06-12T08:44:11.000Z | 2021-12-23T15:44:58.000Z | from setuptools import setup, find_packages
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='prometheus-mysql-exporter',
version='0.5.0',
description='MySQL query Prometheus exporter',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/braedon/prometheus-mysql-exporter',
author='Braedon Vickers',
author_email='braedon.vickers@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
keywords='monitoring prometheus exporter mysql',
packages=find_packages(exclude=['tests']),
python_requires='>=3.5',
install_requires=[
'click',
'click-config-file',
'croniter',
'DBUtils ~= 2.0',
'jog',
'PyMySQL',
'prometheus-client >= 0.6.0',
'pytz',
],
entry_points={
'console_scripts': [
'prometheus-mysql-exporter=prometheus_mysql_exporter:main',
],
},
)
| 31.4 | 73 | 0.618471 |
f0a6e907a4002adce858b6a2965b85278f09d927 | 33,723 | py | Python | authors/apps/articles/views.py | andela/ah-codeofduty | ab749037dbb08712a2e47848e31a1ccb14de1165 | [
"BSD-3-Clause"
] | null | null | null | authors/apps/articles/views.py | andela/ah-codeofduty | ab749037dbb08712a2e47848e31a1ccb14de1165 | [
"BSD-3-Clause"
] | 41 | 2018-10-23T08:45:43.000Z | 2022-03-11T23:34:18.000Z | authors/apps/articles/views.py | andela/ah-codeofduty | ab749037dbb08712a2e47848e31a1ccb14de1165 | [
"BSD-3-Clause"
] | 3 | 2020-05-01T16:21:13.000Z | 2021-05-11T08:25:11.000Z | '''articles/views.py'''
import django_filters
from django.contrib.postgres.fields import ArrayField
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import send_mail
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django_filters import rest_framework as filters
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import status, viewsets, generics
from rest_framework.exceptions import NotFound, PermissionDenied
from rest_framework.filters import SearchFilter
from rest_framework.generics import CreateAPIView, ListAPIView, UpdateAPIView, GenericAPIView
from rest_framework.permissions import (
IsAuthenticated, IsAuthenticatedOrReadOnly, AllowAny)
from rest_framework.response import Response
from rest_framework.views import APIView
from authors.apps.articles.renderers import ReportJSONRenderer
from authors.apps.authentication.backends import JWTAuthentication
from authors.apps.authentication.models import User
from authors.apps.core.pagination import LimitOffsetPagination
from .exceptions import ArticleDoesNotExist
from .models import Article, Comment
from .models import CommentHistory, Highlight, Report, LikesDislikes, Tag, ArticleStatistics
from .serializers import (ArticleSerializer, CommentSerializer, TagSerializer,
CommentHistorySerializer, HighlightSerializer, ReportSerializer, LikesDislikesSerializer,
ArticleStatSerializer)
class ArticleMetaData:
permission_classes = (IsAuthenticatedOrReadOnly,)
serializer_class = ArticleSerializer
def check_article_exists(self, slug):
'''method checking if article exists'''
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound('This article doesn\'t exist')
return article
class ArticlesView(ArticleMetaData, viewsets.ModelViewSet):
permission_classes = (IsAuthenticatedOrReadOnly,)
serializer_class = ArticleSerializer
pagination_class = LimitOffsetPagination
def get_queryset(self):
''' method to filter article by author, title, and tag '''
queryset = Article.objects.all()
author = self.request.query_params.get('author', None)
if author is not None:
queryset = queryset.filter(author__username=author)
title = self.request.query_params.get('title', None)
if title is not None:
queryset = queryset.filter(title=title)
tag = self.request.query_params.get('tag', None)
if tag is not None:
queryset = queryset.filter(tags__tag=tag)
return queryset
def list(self, request):
''' method to fetch all articles'''
serializer_context = {'request': request}
page = self.paginate_queryset(self.get_queryset())
serializer = self.serializer_class(
page, context=serializer_context, many=True)
return self.get_paginated_response(serializer.data)
def list_by_recent(self, request):
page = self.paginate_queryset(self.get_queryset().order_by('-time_created'))
serializer = self.serializer_class(
page, context={"request": request}, many=True)
return self.get_paginated_response(serializer.data)
def list_by_popular(self, request):
page = self.paginate_queryset(self.get_queryset().order_by('-average_rating'))
serializer = self.serializer_class(
page, context={"request": request}, many=True)
return self.get_paginated_response(serializer.data)
def create(self, request):
'''method creating a new article(post)'''
serializer = self.serializer_class(
data=request.data, context={"email": request.user})
serializer.is_valid(raise_exception=True)
serializer.save()
profile = self.request.user.profile
to = []
my_followers = profile.get_my_followers()
for follower in my_followers:
subscribed = User.objects.filter(username=follower).values()[0]['is_subscribed']
if subscribed:
follower = User.objects.filter(username=follower).values()[0]['email']
to.append(follower)
for recipient in to:
pk = User.objects.filter(email=recipient).values()[0]['id']
token = JWTAuthentication.encode_token(self, pk)
current_site = get_current_site(request)
# Setup the content to be sent
# the url to send with the mail
link = "http://" + current_site.domain + \
'/api/notifications/subscription/' + token + '/'
article_link = "http://" + current_site.domain + \
'/api/articles/{}/'.format(serializer.data['slug'])
from_email = 'codeofd@gmail.com'
# username = request.user.username
template = 'index.html'
subject = '"{}" added a new article "{}"'.format(request.user.username, request.data['title'])
username = User.objects.filter(email=recipient).values()[0]['username']
html_content = render_to_string(template, context={
"username": username,
"author": request.user.username,
"unsubscribe_url": link,
"article_link": article_link,
"article_title": request.data['title']})
send_mail(subject, '', from_email, [recipient], html_message=html_content)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def retrieve(self, request, slug):
'''method retrieving a single article(get)'''
serializer_context = {'request': request}
article = self.check_article_exists(slug)
serializer = self.serializer_class(article, context=serializer_context)
ArticleStatistics.objects.create(article=article)
return Response(serializer.data, status=status.HTTP_200_OK)
def update(self, request, slug):
'''method updating an article(put)'''
article = self.check_article_exists(slug)
serializer = self.serializer_class(article, data=request.data, context={
"email": request.user}, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug):
'''method deleting an article(delete)'''
article = self.check_article_exists(slug)
email = request.user
if email != article.author:
raise PermissionDenied
article.delete()
Tag.edit_tags()
return Response(dict(message="Article {} deleted successfully".format(slug)), status=status.HTTP_200_OK)
class ArticlesFavoriteAPIView(APIView):
"""
Implements favoriting and unfavoriting articles
"""
permission_classes = (IsAuthenticated,)
serializer_class = ArticleSerializer
def post(self, request, slug=None):
"""
method to favorite an article
"""
profile = self.request.user.profile
serializer_context = {'request': request}
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise ArticleDoesNotExist
profile.favorite(article)
serializer = self.serializer_class(article, context=serializer_context)
article_author_id = Article.objects.filter(slug=self.kwargs["slug"]).values()[0]['author_id']
article_username = User.objects.filter(id=article_author_id).values()[0]['username']
article_username_pk = User.objects.filter(id=article_author_id).values()[0]['id']
article_author = User.objects.filter(id=article_author_id).values()[0]['email']
article_title = Article.objects.filter(slug=self.kwargs["slug"]).values()[0]['title']
author_notification_subscription = User.objects.filter(id=article_author_id).values()[0]['is_subscribed']
article_slug = Article.objects.filter(slug=self.kwargs["slug"]).values()[0]['slug']
favouriter = request.user.username
is_favorited = serializer.data['favorited']
token = JWTAuthentication.encode_token(self, article_username_pk)
if author_notification_subscription:
current_site = get_current_site(request)
link = "http://" + current_site.domain + \
'/api/notifications/subscription/' + token + '/'
article_link = "http://" + current_site.domain + \
'/api/articles/{}/'.format(article_slug)
from_email = 'codeofd@gmail.com'
template = 'favorite.html'
to = [article_author]
subject = '"{}" favourited your article, "{}"'.format(favouriter, article_title)
html_content = render_to_string(template, context={
"username": article_username,
"favouriter": favouriter,
'article_title': article_title,
'article_link': article_link,
"unsubscribe_url": link})
# send Email
send_mail(subject, '', from_email, to, html_message=html_content)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def delete(self, request, slug=None):
"""
method to unfavorite an article
"""
profile = self.request.user.profile
serializer_context = {'request': request}
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise ArticleDoesNotExist
profile.unfavorite(article)
serializer = self.serializer_class(article, context=serializer_context)
return Response(serializer.data, status=status.HTTP_200_OK)
class CommentsListCreateAPIView(ArticlesView):
"""Authenticated users can comment on articles"""
queryset = Article.objects.all()
permission_classes = (IsAuthenticatedOrReadOnly,)
serializer_class = CommentSerializer
def create_a_comment(self, request, slug=None):
"""
This method creates a comment to a specified article if it exist
article slug acts as a key to find an article
"""
article = get_object_or_404(Article, slug=self.kwargs["slug"])
data = request.data
serializer = self.serializer_class(data=data)
if serializer.is_valid():
serializer.save(author=self.request.user, article=article)
article_author_id = Article.objects.filter(slug=self.kwargs["slug"]).values()[0]['author_id']
article_slug = Article.objects.filter(slug=self.kwargs["slug"]).values()[0]['slug']
article_title = Article.objects.filter(slug=self.kwargs["slug"]).values()[0]['title']
article_author = User.objects.filter(id=article_author_id).values()[0]['username']
articles_instance = Article.objects.get(slug=article_slug)
favouriters = articles_instance.favorited_by.values()
commenter = request.user.username
for user_id in favouriters:
favouriters_name = User.objects.get(id=user_id['user_id'])
token = JWTAuthentication.encode_token(self, favouriters_name.pk)
author_notification_subscription = User.objects.filter(id=favouriters_name.pk).values()[0][
'is_subscribed']
if author_notification_subscription:
current_site = get_current_site(request)
link = "http://" + current_site.domain + \
'/api/notifications/subscription/' + token + '/'
article_link = "http://" + current_site.domain + \
'/api/articles/{}/'.format(article_slug)
from_email = 'codeofd@gmail.com'
template = 'comments.html'
to = [favouriters_name.email]
subject = 'New comment on one of your favorite articles, "{}" by "{}"'.format(article_title,
article_author)
html_content = render_to_string(template, context={
"username": favouriters_name.username,
"commenter": commenter,
'article_title': article_title,
'article_link': article_link,
"unsubscribe_url": link})
# send Email
send_mail(subject, '', from_email, to, html_message=html_content)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def fetch_all_comments(self, request, slug=None):
"""
retrieves all the comments of an article if they exist
if the article does not exist a Not found is returned by default
"""
article = get_object_or_404(Article, slug=self.kwargs["slug"])
comments_found = Comment.objects.filter(article__id=article.id)
comments_list = []
for comment in comments_found:
serializer = CommentSerializer(comment)
comments_list.append(serializer.data)
response = []
response.append({'comments': comments_list})
commentsCount = len(comments_list)
if commentsCount == 0:
return Response({"Message": "There are no comments for this article"}, status=status.HTTP_200_OK)
elif commentsCount == 1:
return Response(response, status=status.HTTP_200_OK)
else:
response.append({"commentsCount": commentsCount})
return Response(response, status=status.HTTP_200_OK)
class CommentRetrieveUpdateDestroy(CommentsListCreateAPIView, CreateAPIView):
"""
Class to retrieve, create, update and delete a comment
this
"""
queryset = Article.objects.all()
permission_classes = (IsAuthenticatedOrReadOnly, IsAuthenticated)
serializer_class = CommentSerializer
serializer_history = CommentHistorySerializer
def fetch_comment_obj(self):
"""
This method fetchies comment object
if a comment does not exist a Not found is returned.
"""
article = get_object_or_404(Article, slug=self.kwargs["slug"])
comment_set = Comment.objects.filter(article__id=article.id)
for comment in comment_set:
new_comment = get_object_or_404(Comment, pk=self.kwargs["id"])
if comment.id == new_comment.id:
self.check_object_permissions(self.request, comment)
return comment
def create_a_reply(self, request, slug=None, pk=None, **kwargs):
"""
This method creates a comment reply to a specified comment if it exist
"""
data = request.data
context = {'request': request}
comment = self.fetch_comment_obj()
context['parent'] = comment = Comment.objects.get(pk=comment.id)
serializer = self.serializer_class(data=data, context=context)
serializer.is_valid(raise_exception=True)
serializer.save(author=self.request.user)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def fetch_a_comment(self, request, slug=None, pk=None, **kwargs):
"""
This method will retrieve a comment if it exist
A comment will come with all the replies if exist.
"""
comment = self.fetch_comment_obj()
if comment == None:
return Response({"message": "Comment with the specified id for this article does Not Exist"},
status=status.HTTP_404_NOT_FOUND)
serializer = CommentSerializer(comment)
return Response(serializer.data, status=status.HTTP_200_OK)
def update_a_comment(self, request, slug=None, pk=None, **kwargs):
"""
This method will update a comment
However it cannot update a reply
"""
comment = self.fetch_comment_obj()
if comment == None:
return Response({"message": "Comment with the specified id for this article does Not Exist"},
status=status.HTTP_404_NOT_FOUND)
old_comment = CommentSerializer(comment).data['body']
comment_id = CommentSerializer(comment).data['id']
parent_comment_obj = Comment.objects.only('id').get(id=comment_id)
data = request.data
new_comment = data['body']
if new_comment == old_comment:
return Response(
{"message": "New comment same as the old existing one. "
"Editing rejected"},
status=status.HTTP_400_BAD_REQUEST)
serializer = self.serializer_class(
comment, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
edited_comment = CommentHistory.objects.create(
comment=old_comment,
parent_comment=parent_comment_obj)
edited_comment.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def delete_a_comment(self, request, slug=None, pk=None, **kwargs):
"""
This method deletes a comment if it exists
Replies attached to a comment to be deleted are also deleted
"""
comment = self.fetch_comment_obj()
if comment == None:
return Response({"message": "Comment with the specified id for this article does Not Exist"},
status=status.HTTP_404_NOT_FOUND)
comment.delete()
return Response({"message": {"Comment was deleted successfully"}}, status.HTTP_200_OK)
class ArticlesFeedAPIView(ListAPIView):
"""
Returns multiple articles created by followed users, ordered by most recent first.
"""
permission_classes = (IsAuthenticated,)
queryset = Article.objects.all()
serializer_class = ArticleSerializer
def get_queryset(self):
following = list(self.request.user.profile.follows.all())
user = [profile.user for profile in following]
return Article.objects.filter(
author__in=user
)
def list(self, request):
queryset = self.get_queryset()
serializer_context = {'request': request}
serializer = self.serializer_class(
queryset, context=serializer_context, many=True
)
return Response(serializer.data)
class ArticleFilterAPIView(filters.FilterSet):
"""
creates a custom filter class for articles
"""
title = filters.CharFilter(field_name='title', lookup_expr='icontains')
description = filters.CharFilter(
field_name='description', lookup_expr='icontains')
body = filters.CharFilter(field_name='body', lookup_expr='icontains')
author__username = filters.CharFilter(
field_name='author__username', lookup_expr='icontains')
class Meta:
"""
This class describes the fields to be used in the search.
Overrides the ArrayField
"""
model = Article
fields = [
'title', 'description', 'body', 'tags', 'author__username'
]
filter_overrides = {
ArrayField: {
'filter_class': django_filters.CharFilter,
'extra': lambda f: {
'lookup_expr': 'icontains', },
},
}
class ArticlesSearchListAPIView(ListAPIView):
"""
Implements search functionality
"""
permission_classes = (IsAuthenticatedOrReadOnly,)
search_list = ['title', 'body', 'description', 'tags', 'author__username']
filter_list = ['title', 'tags', 'author__username']
queryset = Article.objects.all()
serializer_class = ArticleSerializer
# DjangoFilterBackend class, allows you to easily create filters across relationships,
# or create multiple filter lookup types for a given field.
# SearchFilter class supports simple single query parameter based searching
# It will only be applied if the view has a search_fields attribute set.
# The search_fields attribute should be a list of names of text type fields on the model,
# such as CharField or TextField.
filter_backends = (DjangoFilterBackend, SearchFilter)
filter_fields = filter_list
search_fields = search_list
filterset_class = ArticleFilterAPIView
class CommentHistoryAPIView(generics.ListAPIView):
"""This class has fetchies comment edit history"""
lookup_url_kwarg = 'pk'
serializer_class = CommentHistorySerializer
permission_classes = (IsAuthenticated,)
def get(self, request, *args, **kwargs):
"""
Overrides the default GET request from ListAPIView
Returns all comment edits for a particular comment
:param request:
:param args:
:param kwargs:
:return: HTTP Code 200
:return: Response
# """
try:
comment = Comment.objects.get(pk=kwargs['id'])
except Comment.DoesNotExist:
return Response(
{"message": "Comment not found"},
status=status.HTTP_404_NOT_FOUND)
self.queryset = CommentHistory.objects.filter(parent_comment=comment)
return generics.ListAPIView.list(self, request, *args, **kwargs)
class HighlightCommentView(ArticleMetaData, viewsets.ModelViewSet):
"""
view allowing highlighting and commenting on a specific part of an article
"""
permission_classes = (IsAuthenticatedOrReadOnly,)
serializer_class = HighlightSerializer
def list(self, request, slug):
'''get all highlights of an article'''
article = self.check_article_exists(slug)
highlights = article.highlights.values()
return Response(dict(highlights=highlights))
def post(self, request, slug):
'''create a new highlight or comment on article'''
article = self.check_article_exists(slug)
highlighter = request.user
serializer = self.serializer_class(data=request.data, context=dict(
article=article,
highlighter=highlighter))
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def retrieve(self, request, slug, id):
'''get a particular text highlight'''
highlight = get_object_or_404(Highlight, id=id)
serializer = self.serializer_class(highlight, data=request.data,
context=dict(user=request.user),
partial=True)
serializer.is_valid()
return Response(serializer.data)
def put(self, request, slug, id):
'''update a particular highlight on an article'''
highlight = get_object_or_404(Highlight, id=id)
serializer = self.serializer_class(highlight, data=request.data,
context=dict(user=request.user),
partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
def delete(self, request, slug, id):
'''delete a particular highlight or comment on an article'''
self.check_article_exists(slug=slug)
highlight = get_object_or_404(Highlight, id=id)
if request.user != highlight.highlighter:
raise PermissionDenied
highlight.delete()
return Response(dict(message="Comment deleted"),
status=status.HTTP_200_OK)
class LikeComments(UpdateAPIView):
"""Class for comment likes"""
serializer_class = CommentSerializer
def update(self, request, *args, **kwargs):
"""Method for updating comment likes"""
slug = self.kwargs['slug']
try:
Article.objects.get(slug=slug)
except Article.DoesNotExist:
return Response({'Error': 'The article does not exist'}, status.HTTP_404_NOT_FOUND)
try:
pk = self.kwargs.get('id')
comment = Comment.objects.get(id=pk)
except Comment.DoesNotExist:
message = {"Error": "A comment with this ID does not exist"}
return Response(message, status.HTTP_404_NOT_FOUND)
# fetch user
user = request.user
# Confirmation user already liked the comment
confirm = bool(user in comment.likes.all())
if confirm is True:
comment.likes.remove(user.id)
return Response({"Success": "You un-liked this comment"}, status.HTTP_200_OK)
# Adding user like to list of likes
comment.likes.add(user.id)
message = {"Success": "You liked this comment"}
return Response(message, status.HTTP_200_OK)
class ReportCreateAPIView(generics.CreateAPIView):
"""Facilitate create reports"""
slug = 'slug'
queryset = Report.objects.select_related()
serializer_class = ReportSerializer
renderer_classes = (ReportJSONRenderer,)
permission_classes = (IsAuthenticatedOrReadOnly,)
def filter_queryset(self, queryset):
"""Handle getting reports on an article."""
filters = {self.lookup_field: self.kwargs[self.slug]}
return queryset.filter(**filters)
def create(self, request, **kwargs):
"""Create reports to an article"""
slug = self.kwargs['slug']
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article does not exist")
serializer_context = {"reporter": request.user, "slug": slug}
serializer_data = request.data
serializer = self.serializer_class(data=serializer_data, context=serializer_context)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
class ArticlesLikesDislikes(GenericAPIView):
""" Class for creating and deleting article likes/dislikes"""
queryset = LikesDislikes.objects.all()
serializer_class = LikesDislikesSerializer
permission_classes = (IsAuthenticatedOrReadOnly,)
def post(self, request, slug):
# Check if the article exists in the database
article = Article.objects.get(slug=slug)
if isinstance(article, dict):
return Response(article, status=status.HTTP_404_NOT_FOUND)
like = request.data.get('likes', None)
if type(like) == bool:
# Check if the article belongs to the current user
if article.author == request.user:
message = {'Error': 'You cannot like/unlike your own article'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
like_data = {
'reader': request.user.id,
'article': article.id,
'likes': like
}
try:
user_likes = LikesDislikes.objects.get(
article=article.id, reader=request.user.id)
if user_likes:
# checking true for stored and request data
if user_likes.likes and like:
return Response(
{
'detail': 'You have already '
'liked this article.'
}, status=status.HTTP_400_BAD_REQUEST)
# checking false for stored and request data
elif not user_likes.likes and not like:
return Response(
{
'detail': 'You have already '
'disliked this article.'
}, status=status.HTTP_400_BAD_REQUEST)
# checking true for stored and request data
# one is true and the other false
elif like and not user_likes.likes:
user_likes.likes = True
user_likes.save()
article.likes.add(request.user)
article.dislikes.remove(request.user)
article.save()
return Response(
{
'Success': 'You have liked this article.'
}, status=status.HTTP_200_OK)
else:
user_likes.likes = False
user_likes.save()
article.likes.remove(request.user)
article.dislikes.add(request.user)
article.save()
return Response(
{
'Success': 'You have disliked this article.'
}, status=status.HTTP_200_OK)
except LikesDislikes.DoesNotExist:
serializer = self.serializer_class(data=like_data)
serializer.is_valid(raise_exception=True)
serializer.save(article=article, reader=request.user)
# if the request data is true, we update the article
# with the new data
if like:
article.likes.add(request.user)
article.save()
return Response(
{
'Success': 'You have liked this article.'
}, status=status.HTTP_201_CREATED)
# if the request data is false, we update the article
# with the new data
else:
article.dislikes.add(request.user)
article.save()
return Response(
{
'Success': 'You have disliked this article.'
}, status=status.HTTP_201_CREATED)
else:
return Response(
{
'detail': 'Please indicate whether you '
'like/dislike this article.'
}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, slug):
# Checking if the article is in the database
article = Article.objects.get(slug=slug)
if isinstance(article, dict):
return Response(article, status=status.HTTP_404_NOT_FOUND)
try:
# Check existence of article and user in the db and get the like
user_like = LikesDislikes.objects.get(
article=article.id, reader=request.user.id)
if user_like:
if user_like.likes:
article.likes.remove(request.user)
article.save()
else:
article.dislikes.remove(request.user)
article.save()
except LikesDislikes.DoesNotExist:
return Response(
{
'Error': 'Likes/dislikes not found.'
}, status=status.HTTP_404_NOT_FOUND)
user_like.delete()
return Response(
{
'Success': 'Your reaction has been deleted successfully.'
}, status=status.HTTP_200_OK)
class TagListAPIView(ListAPIView):
"""
implements taggging
"""
queryset = Tag.objects.all()
pagination_class = None
serializer_class = TagSerializer
permission_classes = (AllowAny,)
def list(self, request):
"""
fetches existing article tags
"""
serializer = self.serializer_class(self.get_queryset(), many=True)
return Response(
{
'tags': serializer.data
}, status=status.HTTP_200_OK
)
class BookMarkArticle(ArticleMetaData, ListAPIView):
"""Implements bookmarking an article"""
permission_classes = IsAuthenticated,
def put(self, request, slug):
""""Method to either bookmark or remove an article from bookmarks"""
article = self.check_article_exists(slug)
user = User.objects.get(email=request.user)
bookmarked_article = user.profile.bookmarks.filter(slug=slug).first()
if bookmarked_article:
user.profile.bookmarks.remove(bookmarked_article)
return Response(dict(message="Article removed from bookmarks!"))
user.profile.bookmarks.add(article)
return Response(dict(message="Article bookmarked!"), status=status.HTTP_200_OK)
class BookMarksView(ListAPIView):
"""Class retrieves all user bookmarks"""
permission_classes = IsAuthenticated,
serializer_class = ArticleSerializer
def get(self, request):
"""fetch all a users bookmarks"""
user = User.objects.get(email=request.user)
bookmarked_articles = user.profile.bookmarks.all()
serializer = self.serializer_class(
bookmarked_articles, context={"request": request}, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class ArticleStatisticsView(ListAPIView):
serializer_class = ArticleStatSerializer
def get_queryset(self):
"""This method filters articles by authors"""
return Article.objects.filter(author=self.request.user)
| 41.684796 | 115 | 0.62192 |
d97421b4eaa6348a591632b7e918ebe984508a4b | 10,674 | py | Python | tests/unit/remoteworker/test_remote_worker.py | FG-AI4H/health-aiaudit-platform | 8eebe55c2aadcc70719b1288466639b5036f7efb | [
"BSD-3-Clause"
] | 3 | 2021-06-14T11:45:51.000Z | 2022-02-08T09:41:18.000Z | tests/unit/remoteworker/test_remote_worker.py | FG-AI4H/health-aiaudit-platform | 8eebe55c2aadcc70719b1288466639b5036f7efb | [
"BSD-3-Clause"
] | null | null | null | tests/unit/remoteworker/test_remote_worker.py | FG-AI4H/health-aiaudit-platform | 8eebe55c2aadcc70719b1288466639b5036f7efb | [
"BSD-3-Clause"
] | 1 | 2021-08-20T13:38:55.000Z | 2021-08-20T13:38:55.000Z | import mock
import os
import responses
import shutil
import tempfile
from os.path import join
from unittest import TestCase
from scripts.workers.remote_submission_worker import (
create_dir_as_python_package,
make_request,
get_message_from_sqs_queue,
delete_message_from_sqs_queue,
download_and_extract_file,
get_submission_by_pk,
get_challenge_phases_by_challenge_pk,
get_challenge_by_queue_name,
get_challenge_phase_by_pk,
process_submission_callback,
update_submission_data,
update_submission_status,
return_url_per_environment,
)
class BaseTestClass(TestCase):
def setUp(self):
self.submission_pk = 1
self.challenge_pk = 1
self.challenge_phase_pk = 1
self.data = {"test": "data"}
self.headers = {"Authorization": "Token test_token"}
self.testserver = "http://testserver"
def make_request_url(self):
return "/test/url"
def get_message_from_sqs_queue_url(self, queue_name):
return "/api/jobs/challenge/queues/{}/".format(queue_name)
def delete_message_from_sqs_queue_url(self, queue_name):
return "/api/jobs/queues/{}/".format(queue_name)
def get_submission_by_pk_url(self, submission_pk):
return "/api/jobs/submission/{}".format(submission_pk)
def get_challenge_phases_by_challenge_pk_url(self, challenge_pk):
return "/api/challenges/{}/phases/".format(challenge_pk)
def get_challenge_by_queue_name_url(self, queue_name):
return "/api/challenges/challenge/queues/{}/".format(queue_name)
def get_challenge_phase_by_pk_url(self, challenge_pk, challenge_phase_pk):
return "/api/challenges/challenge/{}/challenge_phase/{}".format(
challenge_pk, challenge_phase_pk
)
def update_submission_data_url(self, challenge_pk):
return "/api/jobs/challenge/{}/update_submission/".format(challenge_pk)
@mock.patch(
"scripts.workers.remote_submission_worker.AUTH_TOKEN", "test_token"
)
@mock.patch("scripts.workers.remote_submission_worker.requests")
class MakeRequestTestClass(BaseTestClass):
def setUp(self):
super(MakeRequestTestClass, self).setUp()
self.url = super(MakeRequestTestClass, self).make_request_url()
def test_make_request_get(self, mock_make_request):
make_request(self.url, "GET")
mock_make_request.get.assert_called_with(
url=self.url, headers=self.headers
)
def test_make_request_put(self, mock_make_request):
make_request(self.url, "PUT", data=self.data)
mock_make_request.put.assert_called_with(
url=self.url, headers=self.headers, data=self.data
)
def test_make_request_patch(self, mock_make_request):
make_request(self.url, "PATCH", data=self.data)
mock_make_request.patch.assert_called_with(
url=self.url, headers=self.headers, data=self.data
)
def test_make_request_post(self, mock_make_request):
make_request(self.url, "POST", data=self.data)
mock_make_request.post.assert_called_with(
url=self.url, headers=self.headers, data=self.data
)
@mock.patch(
"scripts.workers.remote_submission_worker.QUEUE_NAME",
"test-ai4h",
)
@mock.patch(
"scripts.workers.remote_submission_worker.return_url_per_environment"
)
@mock.patch("scripts.workers.remote_submission_worker.make_request")
class APICallsTestClass(BaseTestClass):
def test_get_message_from_sqs_queue(self, mock_make_request, mock_url):
url = self.get_message_from_sqs_queue_url("test-ai4h")
get_message_from_sqs_queue()
mock_url.assert_called_with(url)
url = mock_url(url)
mock_make_request.assert_called_with(url, "GET")
def test_delete_message_from_sqs_queue(self, mock_make_request, mock_url):
test_receipt_handle = (
"MbZj6wDWli+JvwwJaBV+3dcjk2YW2vA3+STFFljTM8tJJg6HRG6PYSasuWXPJB+Cw"
)
url = self.delete_message_from_sqs_queue_url("test-ai4h")
delete_message_from_sqs_queue(test_receipt_handle)
mock_url.assert_called_with(url)
url = mock_url(url)
expected_data = {
"receipt_handle": "MbZj6wDWli+JvwwJaBV+3dcjk2YW2vA3+STFFljTM8tJJg6HRG6PYSasuWXPJB+Cw"
}
mock_make_request.assert_called_with(url, "POST", data=expected_data)
def test_get_challenge_by_queue_name(self, mock_make_request, mock_url):
url = self.get_challenge_by_queue_name_url("test-ai4h")
get_challenge_by_queue_name()
mock_url.assert_called_with(url)
url = mock_url(url)
mock_make_request.assert_called_with(url, "GET")
def test_get_submission_by_pk(self, mock_make_request, mock_url):
get_submission_by_pk(self.submission_pk)
url = self.get_submission_by_pk_url(self.submission_pk)
mock_url.assert_called_with(url)
url = mock_url(url)
mock_make_request.assert_called_with(url, "GET")
def test_get_challenge_phases_by_challenge_pk(
self, mock_make_request, mock_url
):
get_challenge_phases_by_challenge_pk(self.challenge_pk)
url = self.get_challenge_phases_by_challenge_pk_url(self.challenge_pk)
mock_url.assert_called_with(url)
url = mock_url(url)
mock_make_request.assert_called_with(url, "GET")
def test_get_challenge_phase_by_pk(self, mock_make_request, mock_url):
get_challenge_phase_by_pk(self.challenge_pk, self.challenge_phase_pk)
url = self.get_challenge_phase_by_pk_url(
self.challenge_pk, self.challenge_phase_pk
)
mock_url.assert_called_with(url)
url = mock_url(url)
mock_make_request.assert_called_with(url, "GET")
def test_update_submission_data(self, mock_make_request, mock_url):
update_submission_data(
self.data, self.challenge_pk, self.submission_pk
)
url = self.update_submission_data_url(self.challenge_pk)
mock_url.assert_called_with(url)
url = mock_url(url)
mock_make_request.assert_called_with(url, "PUT", data=self.data)
def test_update_submission_status(self, mock_make_request, mock_url):
update_submission_status(self.data, self.challenge_pk)
url = self.update_submission_data_url(self.challenge_pk)
mock_url.assert_called_with(url)
url = mock_url(url)
mock_make_request.assert_called_with(url, "PATCH", data=self.data)
@mock.patch(
"scripts.workers.remote_submission_worker.DJANGO_SERVER_PORT", ""
)
@mock.patch(
"scripts.workers.remote_submission_worker.DJANGO_SERVER", "testserver"
)
class URLFormatTestCase(BaseTestClass):
def test_return_url_per_environment(self):
url = "/test/url"
expected_url = "http://testserver:80{}".format(url)
returned_url = return_url_per_environment(url)
self.assertEqual(returned_url, expected_url)
@mock.patch(
"scripts.workers.remote_submission_worker.process_submission_message"
)
class ProcessSubmissionCallback(BaseTestClass):
@mock.patch("scripts.workers.remote_submission_worker.logger.info")
def test_process_submission_callback(
self, mock_logger, mock_process_submission_message
):
message = {
"challenge_pk": self.challenge_pk,
"phase_pk": self.challenge_phase_pk,
"submission_pk": self.submission_pk,
}
process_submission_callback(message)
mock_logger.assert_called_with(
"[x] Received submission message {}".format(message)
)
mock_process_submission_message.assert_called_with(message)
@mock.patch("scripts.workers.remote_submission_worker.logger.exception")
def test_process_submission_callback_with_exception(
self, mock_logger, mock_process_submission_message
):
message = {
"challenge_pk": self.challenge_pk,
"phase_pk": self.challenge_phase_pk,
"submission_pk": self.submission_pk,
}
error = "Exception"
mock_process_submission_message.side_effect = Exception(error)
process_submission_callback(message)
mock_logger.assert_called_with(
"Exception while processing message from submission queue with error {}".format(
error
)
)
class CreateDirAsPythonPackageTest(BaseTestClass):
def setUp(self):
super(CreateDirAsPythonPackageTest, self).setUp()
self.BASE_TEMP_DIR = tempfile.mkdtemp()
self.temp_directory = join(self.BASE_TEMP_DIR, "temp_dir")
def test_create_dir_as_python_package(self):
create_dir_as_python_package(self.temp_directory)
self.assertTrue(
os.path.isfile(join(self.temp_directory, "__init__.py"))
)
with open(join(self.temp_directory, "__init__.py")) as f:
self.assertEqual(f.read(), "")
shutil.rmtree(self.temp_directory)
self.assertFalse(os.path.exists(self.temp_directory))
class DownloadAndExtractFileTest(BaseTestClass):
def setUp(self):
super(DownloadAndExtractFileTest, self).setUp()
self.req_url = "{}{}".format(self.testserver, self.make_request_url())
self.file_content = b"file content"
self.temp_directory = tempfile.mkdtemp()
self.download_location = join(self.temp_directory, "dummy_file")
def tearDown(self):
if os.path.exists(self.temp_directory):
shutil.rmtree(self.temp_directory)
@responses.activate
def test_download_and_extract_file_success(self):
responses.add(
responses.GET,
self.req_url,
body=self.file_content,
content_type="application/octet-stream",
status=200,
)
download_and_extract_file(self.req_url, self.download_location)
self.assertTrue(os.path.exists(self.download_location))
with open(self.download_location, "rb") as f:
self.assertEqual(f.read(), self.file_content)
@responses.activate
@mock.patch("scripts.workers.remote_submission_worker.logger.error")
def test_download_and_extract_file_when_download_fails(self, mock_logger):
error = "ExampleError: Example Error description"
responses.add(responses.GET, self.req_url, body=Exception(error))
expected = "Failed to fetch file from {}, error {}".format(
self.req_url, error
)
download_and_extract_file(self.req_url, self.download_location)
mock_logger.assert_called_with(expected)
self.assertFalse(os.path.exists(self.download_location))
| 36.430034 | 97 | 0.707045 |
eba7fdc9bee7f9f1ebdda035d35ae16f2ef5f6ad | 1,661 | py | Python | 6-1.py | xeno14/advent_of_code2018 | 87d0f4dc76ca9cd82d68618255709e446cf09b37 | [
"MIT"
] | null | null | null | 6-1.py | xeno14/advent_of_code2018 | 87d0f4dc76ca9cd82d68618255709e446cf09b37 | [
"MIT"
] | null | null | null | 6-1.py | xeno14/advent_of_code2018 | 87d0f4dc76ca9cd82d68618255709e446cf09b37 | [
"MIT"
] | null | null | null | import re
import numpy as np
def dist(p, q):
return np.abs(p[0]-q[0]) + np.abs(p[1]-q[1])
assert dist((2,3), (4,5)) == 4
assert dist((1,1), (4,1)) == 3
if __name__ == '__main__':
with open("input/6.txt") as f:
# with open("input/6.test") as f:
points = []
for l in f:
p = re.search("(\d+), (\d+)", l).groups()
points.append(tuple(map(int, p)))
points = np.array(points, dtype=np.int)
print(points)
xmin = points[:,0].min()-1
xmax = points[:,0].max()+1
ymin = points[:,1].min()-1
ymax = points[:,1].max()+1
field = np.zeros((xmax-xmin+1, ymax-ymin+1), dtype=np.int)
for x in range(xmin, xmax+1):
for y in range(ymin, ymax+1):
dists = [dist((x,y), p) for p in points]
#print((x,y), sorted(dists))
pp = np.argmin(dists)
# print(x, y, dists, pp)
d = dists[pp]
# possibly multiple points are closest
if (dists==d).sum() > 1:
pp = -1
i = x - xmin
j = y - ymin
field[i, j] = pp
candidates = set(range(len(points)))
# remove id on edges
for s in set(field[0,:]):
if s in candidates:
candidates.remove(s)
for s in set(field[-1,:]):
if s in candidates:
candidates.remove(s)
for s in set(field[:,0]):
if s in candidates:
candidates.remove(s)
for s in set(field[:,-1]):
if s in candidates:
candidates.remove(s)
print(candidates)
areas = [ (field==c).sum() for c in candidates]
ans = max(areas)
print(ans)
| 24.426471 | 62 | 0.494281 |
e15c07a945216165929ec9a0fcfe69c72450f130 | 4,574 | py | Python | userbot/modules/snips.py | hemantsachdeva/oub-remix | 0db69fb8b7e98dcfaed18ac0583d50ed1f73178d | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/modules/snips.py | hemantsachdeva/oub-remix | 0db69fb8b7e98dcfaed18ac0583d50ed1f73178d | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/modules/snips.py | hemantsachdeva/oub-remix | 0db69fb8b7e98dcfaed18ac0583d50ed1f73178d | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2020-07-06T12:14:07.000Z | 2020-07-06T12:14:07.000Z | # Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
""" Userbot module containing commands for keeping global notes. """
from userbot.events import register
from userbot import CMD_HELP, BOTLOG_CHATID
@register(outgoing=True,
pattern=r"\+\w*",
ignore_unsafe=True,
disable_errors=True)
async def on_snip(event):
""" Snips logic. """
try:
from userbot.modules.sql_helper.snips_sql import get_snip
except AttributeError:
return
name = event.text[1:]
snip = get_snip(name)
message_id_to_reply = event.message.reply_to_msg_id
if not message_id_to_reply:
message_id_to_reply = None
if snip and snip.f_mesg_id:
msg_o = await event.client.get_messages(entity=BOTLOG_CHATID,
ids=int(snip.f_mesg_id))
await event.client.send_message(event.chat_id,
msg_o.message,
reply_to=message_id_to_reply,
file=msg_o.media)
await event.delete()
elif snip and snip.reply:
await event.client.send_message(event.chat_id,
snip.reply,
reply_to=message_id_to_reply)
await event.delete()
@register(outgoing=True, pattern="^.snip (\w*)")
async def on_snip_save(event):
""" For .snip command, saves snips for future use. """
try:
from userbot.modules.sql_helper.snips_sql import add_snip
except AtrributeError:
await event.edit("`Running on Non-SQL mode!`")
return
keyword = event.pattern_match.group(1)
string = event.text.partition(keyword)[2]
msg = await event.get_reply_message()
msg_id = None
if msg and msg.media and not string:
if BOTLOG_CHATID:
await event.client.send_message(
BOTLOG_CHATID, f"#SNIP\
\nKEYWORD: {keyword}\
\n\nThe following message is saved as the data for the snip, please do NOT delete it !!"
)
msg_o = await event.client.forward_messages(
entity=BOTLOG_CHATID,
messages=msg,
from_peer=event.chat_id,
silent=True)
msg_id = msg_o.id
else:
await event.edit(
"`Saving snips with media requires the BOTLOG_CHATID to be set.`"
)
return
elif event.reply_to_msg_id and not string:
rep_msg = await event.get_reply_message()
string = rep_msg.text
success = "Snip {} successfully. Use `+{}` anywhere to get it"
if add_snip(keyword, string, msg_id) is False:
await event.edit(success.format('updated', keyword))
else:
await event.edit(success.format('saved', keyword))
@register(outgoing=True, pattern="^.snips$")
async def on_snip_list(event):
""" For .snips command, lists snips saved by you. """
try:
from userbot.modules.sql_helper.snips_sql import get_snips
except AttributeError:
await event.edit("`Running on Non-SQL mode!`")
return
message = "`No snips available right now.`"
all_snips = get_snips()
for a_snip in all_snips:
if message == "`No snips available right now.`":
message = "Available snips:\n"
message += f"`+{a_snip.snip}`\n"
else:
message += f"`+{a_snip.snip}`\n"
await event.edit(message)
@register(outgoing=True, pattern="^.rmsnip (\w*)")
async def on_snip_delete(event):
""" For .rmsnip command, deletes a snip. """
try:
from userbot.modules.sql_helper.snips_sql import remove_snip
except AttributeError:
await event.edit("`Running on Non-SQL mode!`")
return
name = event.pattern_match.group(1)
if remove_snip(name) is True:
await event.edit(f"`Successfully deleted snip:` **{name}**")
else:
await event.edit(f"`Couldn't find snip:` **{name}**")
CMD_HELP.update({
"snips":
"\
+<snip_name>\
\nUsage: Gets the specified snip, anywhere.\
\n\n`.snip` <name> <data> or reply to a message with .snip <name>\
\nUsage: Saves the message as a snip (global note) with the name. (Works with pics, docs, and stickers too!)\
\n\n`.snips`\
\nUsage: Gets all saved snips.\
\n\n`.rmsnip` <snip_name>\
\nUsage: Deletes the specified snip.\
"
})
| 35.457364 | 109 | 0.609095 |
dc281a611632733c1f8b11896da70161437af130 | 25,182 | py | Python | msgraph-cli-extensions/v1_0/directoryobjects_v1_0/azext_directoryobjects_v1_0/vendored_sdks/directoryobjects/operations/_directory_objects_operations.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/v1_0/directoryobjects_v1_0/azext_directoryobjects_v1_0/vendored_sdks/directoryobjects/operations/_directory_objects_operations.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | 22 | 2022-03-29T22:54:37.000Z | 2022-03-29T22:55:27.000Z | msgraph-cli-extensions/v1_0/directoryobjects_v1_0/azext_directoryobjects_v1_0/vendored_sdks/directoryobjects/operations/_directory_objects_operations.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DirectoryObjectsOperations(object):
"""DirectoryObjectsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~directory_objects.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def check_member_groups(
self,
directory_object_id, # type: str
body, # type: "models.Paths1Ffes6MDirectoryobjectsDirectoryobjectIdMicrosoftGraphCheckmembergroupsPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Invoke action checkMemberGroups.
Invoke action checkMemberGroups.
:param directory_object_id: key: id of directoryObject.
:type directory_object_id: str
:param body: Action parameters.
:type body: ~directory_objects.models.Paths1Ffes6MDirectoryobjectsDirectoryobjectIdMicrosoftGraphCheckmembergroupsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.check_member_groups.metadata['url'] # type: ignore
path_format_arguments = {
'directoryObject-id': self._serialize.url("directory_object_id", directory_object_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1Ffes6MDirectoryobjectsDirectoryobjectIdMicrosoftGraphCheckmembergroupsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_member_groups.metadata = {'url': '/directoryObjects/{directoryObject-id}/microsoft.graph.checkMemberGroups'} # type: ignore
def check_member_objects(
self,
directory_object_id, # type: str
body, # type: "models.Paths1B1K3OoDirectoryobjectsDirectoryobjectIdMicrosoftGraphCheckmemberobjectsPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Invoke action checkMemberObjects.
Invoke action checkMemberObjects.
:param directory_object_id: key: id of directoryObject.
:type directory_object_id: str
:param body: Action parameters.
:type body: ~directory_objects.models.Paths1B1K3OoDirectoryobjectsDirectoryobjectIdMicrosoftGraphCheckmemberobjectsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.check_member_objects.metadata['url'] # type: ignore
path_format_arguments = {
'directoryObject-id': self._serialize.url("directory_object_id", directory_object_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1B1K3OoDirectoryobjectsDirectoryobjectIdMicrosoftGraphCheckmemberobjectsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_member_objects.metadata = {'url': '/directoryObjects/{directoryObject-id}/microsoft.graph.checkMemberObjects'} # type: ignore
def get_member_groups(
self,
directory_object_id, # type: str
body, # type: "models.Paths15Et6VvDirectoryobjectsDirectoryobjectIdMicrosoftGraphGetmembergroupsPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Invoke action getMemberGroups.
Invoke action getMemberGroups.
:param directory_object_id: key: id of directoryObject.
:type directory_object_id: str
:param body: Action parameters.
:type body: ~directory_objects.models.Paths15Et6VvDirectoryobjectsDirectoryobjectIdMicrosoftGraphGetmembergroupsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_member_groups.metadata['url'] # type: ignore
path_format_arguments = {
'directoryObject-id': self._serialize.url("directory_object_id", directory_object_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths15Et6VvDirectoryobjectsDirectoryobjectIdMicrosoftGraphGetmembergroupsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_member_groups.metadata = {'url': '/directoryObjects/{directoryObject-id}/microsoft.graph.getMemberGroups'} # type: ignore
def get_member_objects(
self,
directory_object_id, # type: str
body, # type: "models.Paths16Hhl7EDirectoryobjectsDirectoryobjectIdMicrosoftGraphGetmemberobjectsPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List[str]
"""Invoke action getMemberObjects.
Invoke action getMemberObjects.
:param directory_object_id: key: id of directoryObject.
:type directory_object_id: str
:param body: Action parameters.
:type body: ~directory_objects.models.Paths16Hhl7EDirectoryobjectsDirectoryobjectIdMicrosoftGraphGetmemberobjectsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_member_objects.metadata['url'] # type: ignore
path_format_arguments = {
'directoryObject-id': self._serialize.url("directory_object_id", directory_object_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths16Hhl7EDirectoryobjectsDirectoryobjectIdMicrosoftGraphGetmemberobjectsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_member_objects.metadata = {'url': '/directoryObjects/{directoryObject-id}/microsoft.graph.getMemberObjects'} # type: ignore
def restore(
self,
directory_object_id, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphDirectoryObject"
"""Invoke action restore.
Invoke action restore.
:param directory_object_id: key: id of directoryObject.
:type directory_object_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphDirectoryObject, or the result of cls(response)
:rtype: ~directory_objects.models.MicrosoftGraphDirectoryObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphDirectoryObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.restore.metadata['url'] # type: ignore
path_format_arguments = {
'directoryObject-id': self._serialize.url("directory_object_id", directory_object_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphDirectoryObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
restore.metadata = {'url': '/directoryObjects/{directoryObject-id}/microsoft.graph.restore'} # type: ignore
def get_available_extension_properties(
self,
body, # type: "models.Paths1Izu2OlDirectoryobjectsMicrosoftGraphGetavailableextensionpropertiesPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List["models.MicrosoftGraphExtensionProperty"]
"""Invoke action getAvailableExtensionProperties.
Invoke action getAvailableExtensionProperties.
:param body: Action parameters.
:type body: ~directory_objects.models.Paths1Izu2OlDirectoryobjectsMicrosoftGraphGetavailableextensionpropertiesPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphExtensionProperty, or the result of cls(response)
:rtype: list[~directory_objects.models.MicrosoftGraphExtensionProperty]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphExtensionProperty"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_available_extension_properties.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1Izu2OlDirectoryobjectsMicrosoftGraphGetavailableextensionpropertiesPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphExtensionProperty]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_available_extension_properties.metadata = {'url': '/directoryObjects/microsoft.graph.getAvailableExtensionProperties'} # type: ignore
def get_by_ids(
self,
body, # type: "models.PathsG5Xp0HDirectoryobjectsMicrosoftGraphGetbyidsPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> List["models.MicrosoftGraphDirectoryObject"]
"""Invoke action getByIds.
Invoke action getByIds.
:param body: Action parameters.
:type body: ~directory_objects.models.PathsG5Xp0HDirectoryobjectsMicrosoftGraphGetbyidsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphDirectoryObject, or the result of cls(response)
:rtype: list[~directory_objects.models.MicrosoftGraphDirectoryObject]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphDirectoryObject"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_by_ids.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'PathsG5Xp0HDirectoryobjectsMicrosoftGraphGetbyidsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphDirectoryObject]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_ids.metadata = {'url': '/directoryObjects/microsoft.graph.getByIds'} # type: ignore
def validate_properties(
self,
body, # type: "models.Paths1Re7RfDirectoryobjectsMicrosoftGraphValidatepropertiesPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> None
"""Invoke action validateProperties.
Invoke action validateProperties.
:param body: Action parameters.
:type body: ~directory_objects.models.Paths1Re7RfDirectoryobjectsMicrosoftGraphValidatepropertiesPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.validate_properties.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1Re7RfDirectoryobjectsMicrosoftGraphValidatepropertiesPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
validate_properties.metadata = {'url': '/directoryObjects/microsoft.graph.validateProperties'} # type: ignore
| 48.6139 | 173 | 0.694067 |
e3726772770451c5e851e01698e366f5e571fc55 | 809 | py | Python | Lib/corpuscrawler/crawl_anv.py | cash/corpuscrawler | 8913fe1fb2b6bfdfbf2ba01d2ce88057b3b5ba3d | [
"Apache-2.0"
] | 95 | 2019-06-13T23:34:21.000Z | 2022-03-12T05:22:49.000Z | Lib/corpuscrawler/crawl_anv.py | sahwar/corpuscrawler | 8913fe1fb2b6bfdfbf2ba01d2ce88057b3b5ba3d | [
"Apache-2.0"
] | 31 | 2019-06-02T18:56:53.000Z | 2021-08-10T20:16:02.000Z | Lib/corpuscrawler/crawl_anv.py | sahwar/corpuscrawler | 8913fe1fb2b6bfdfbf2ba01d2ce88057b3b5ba3d | [
"Apache-2.0"
] | 35 | 2019-06-18T08:26:24.000Z | 2022-01-11T13:59:40.000Z | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_function, unicode_literals
from corpuscrawler.util import crawl_bibleis
def crawl(crawler):
out = crawler.get_output(language='anv')
crawl_bibleis(crawler, out, bible='ANVWBT')
| 36.772727 | 74 | 0.770087 |
7e880823ee82f82a745a1e472f0308d07c08367c | 681 | py | Python | src/compas_view2/objects/sphereobject.py | selinabitting/compas_view2 | cac8abaf8fbde13ceabe35324be92779ea2e535f | [
"MIT"
] | 5 | 2021-03-03T13:07:31.000Z | 2022-02-05T01:07:31.000Z | src/compas_view2/objects/sphereobject.py | selinabitting/compas_view2 | cac8abaf8fbde13ceabe35324be92779ea2e535f | [
"MIT"
] | 91 | 2021-01-29T14:26:28.000Z | 2022-03-22T17:15:58.000Z | src/compas_view2/objects/sphereobject.py | selinabitting/compas_view2 | cac8abaf8fbde13ceabe35324be92779ea2e535f | [
"MIT"
] | 6 | 2021-01-29T11:13:45.000Z | 2022-02-05T00:56:24.000Z | from compas.datastructures import Mesh
from compas.geometry import Sphere
from .meshobject import MeshObject
class SphereObject(MeshObject):
"""Object for displaying COMPAS sphere geometry."""
def __init__(self, data, u=16, v=16, **kwargs):
super().__init__(Mesh.from_shape(data, u=u, v=v), **kwargs)
self.u = u
self.v = v
self._data = data
def update(self):
self._mesh = Mesh.from_shape(self._data, u=self.u, v=self.v)
self.init()
super().update()
@property
def properties(self):
return ["u", "v"]
@classmethod
def create_default(cls) -> Sphere:
return Sphere((0, 0, 0), 1)
| 25.222222 | 68 | 0.615272 |
aab5c97913c980a18f0db9a067f0e500ac83e61b | 408 | py | Python | eu-structural-funds/tests/processors/test_fill_in_constant_values.py | transpresupuestaria/os-data-importers | 929e07aefc98ae4788e75c682d4c3adc014bf6ce | [
"MIT"
] | null | null | null | eu-structural-funds/tests/processors/test_fill_in_constant_values.py | transpresupuestaria/os-data-importers | 929e07aefc98ae4788e75c682d4c3adc014bf6ce | [
"MIT"
] | null | null | null | eu-structural-funds/tests/processors/test_fill_in_constant_values.py | transpresupuestaria/os-data-importers | 929e07aefc98ae4788e75c682d4c3adc014bf6ce | [
"MIT"
] | null | null | null | """Test module for the `fill_in_constant_fields` processor."""
from common.processors.fill_in_constant_fields import fill_columns
def test_fill_columns_without_constants_returns_identical_row():
assert fill_columns({'foo': 'bar'}) == {'foo': 'bar'}
def test_fill_columns_returns_correct_row():
new_row = fill_columns({'foo': None}, constants={'foo': 'bar'})
assert new_row == {'foo': 'bar'}
| 31.384615 | 67 | 0.732843 |
b6aec15be2dd3e9cf1592e8bdc312b371d44008b | 352 | py | Python | fibonacci.py | ANITHARANGANATHAN/python | 5f4976341a94aa28b16410349e52cd4fc5865292 | [
"MIT"
] | null | null | null | fibonacci.py | ANITHARANGANATHAN/python | 5f4976341a94aa28b16410349e52cd4fc5865292 | [
"MIT"
] | null | null | null | fibonacci.py | ANITHARANGANATHAN/python | 5f4976341a94aa28b16410349e52cd4fc5865292 | [
"MIT"
] | null | null | null | nterms = int(input("how many terms?"))
n1,n2=0,1
count = 0
if nterms <= 0:
print("please enter a positive integer")
elif nterms == 1:
print("fibonacci sequence upto",nterms,":")
print(n1)
else:
print("fibonacci sequence:")
while count < nterms:
print(n1)
nth = n1+n2
n1=n2
count +=1
| 20.705882 | 47 | 0.553977 |
61571e0894dfea56222d0b461debfdf1cd9c3bb2 | 4,945 | py | Python | kolla_mesos/service_definition.py | nhlfr/kolla-mesos | a0ffda04aedf02fb2ed10218b87b4ed4eb5aa5f7 | [
"Apache-2.0"
] | null | null | null | kolla_mesos/service_definition.py | nhlfr/kolla-mesos | a0ffda04aedf02fb2ed10218b87b4ed4eb5aa5f7 | [
"Apache-2.0"
] | null | null | null | kolla_mesos/service_definition.py | nhlfr/kolla-mesos | a0ffda04aedf02fb2ed10218b87b4ed4eb5aa5f7 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import socket
import yaml
import jinja2
from oslo_log import log
from kolla_mesos.common import jinja_utils
from kolla_mesos import exception
CNF_FIELDS = ('source', 'dest', 'owner', 'perm')
CMD_FIELDS = ('run_once', 'dependencies', 'command', 'env',
'delay', 'retries', 'files')
DEP_FIELDS = ('path', 'scope')
SCOPE_OPTS = ('global', 'local')
LOG = log.getLogger()
def find_service_file(service_name, service_dir):
# let's be flexible with the input, to make life easy
# for users.
if not os.path.exists(service_dir):
raise exception.KollaNotFoundException(service_dir,
entity='service directory')
short_name = service_name.split('/')[-1].replace('_ansible_tasks', '-init')
for root, dirs, names in os.walk(service_dir):
for name in names:
if short_name in name:
return os.path.join(root, name)
raise exception.KollaNotFoundException(service_name,
entity='service definition')
def inspect(service_name, service_dir):
filename = find_service_file(service_name, service_dir)
try:
required_variables = set.union(
jinja_utils.jinja_find_required_variables(filename))
except jinja2.exceptions.TemplateNotFound:
raise exception.KollaNotFoundException(filename,
entity='service definition')
return dict(required_variables=list(required_variables))
def validate(service_name, service_dir, variables=None, deps=None):
if variables is None:
variables = {}
if deps is None:
deps = {}
filename = find_service_file(service_name, service_dir)
try:
cnf = yaml.load(jinja_utils.jinja_render(filename, variables))
except jinja2.exceptions.TemplateNotFound:
raise exception.KollaNotFoundException(filename,
entity='service definition')
def get_commands():
for cmd in cnf.get('commands', {}):
yield cmd, cnf['commands'][cmd]
if 'service' in cnf:
yield 'daemon', cnf['service']['daemon']
LOG.debug('%s: file found at %s' % (cnf['name'], filename))
for cmd, cmd_info in get_commands():
_validate_command(filename, cmd, cmd_info, deps,
cnf['name'], service_dir)
return deps
def _validate_config(filename, conf, service_dir):
for file in conf:
for key in conf[file]:
assert key in CNF_FIELDS, '%s: %s not in %s' % (filename,
key, CNF_FIELDS)
srcs = conf[file]['source']
if isinstance(srcs, str):
srcs = [srcs]
for src in srcs:
file_path = os.path.join(service_dir, '..', src)
if not file_path.startswith('/etc'):
assert os.path.exists(file_path), '%s missing' % file_path
def _validate_command(filename, cmd, cmd_info, deps,
service_name, service_dir):
for key in cmd_info:
assert key in CMD_FIELDS, '%s not in %s' % (key, CMD_FIELDS)
_, group, role = service_name.split('/')
regs = ['%s/%s' % (role, cmd),
'%s/%s/%s' % (socket.gethostname(), role, cmd)]
reqs = cmd_info.get('dependencies', [])
for reg in regs:
if reg not in deps:
deps[reg] = {'waiters': {}}
deps[reg]['registered_by'] = cmd
deps[reg]['name'] = cmd
deps[reg]['run_by'] = filename
for req in reqs:
for key in req:
assert key in DEP_FIELDS, '%s: %s not in %s' % (filename,
key, DEP_FIELDS)
scope = req.get('scope', 'global')
assert scope in SCOPE_OPTS, '%s: %s not in %s' % (filename,
scope, SCOPE_OPTS)
req_path = req['path']
if scope == 'local':
req_path = os.path.join(socket.gethostname(), req_path)
if req_path not in deps:
deps[req_path] = {'waiters': {}}
for reg in regs:
deps[req_path]['waiters'][cmd] = reg
if 'files' in cmd_info:
_validate_config(filename, cmd_info['files'], service_dir)
LOG.debug('%s: command "%s" validated' % (service_name, cmd))
| 37.462121 | 79 | 0.594742 |
9599fa16175bb8321f78e1b6f6862b75d383a7cd | 22,646 | py | Python | src/_pytest/main.py | kalekundert/pytest | 253419316ce09b952862811db229a9e9383a332b | [
"MIT"
] | 1 | 2018-08-01T09:32:16.000Z | 2018-08-01T09:32:16.000Z | src/_pytest/main.py | Jacklibobo/pytest | 253419316ce09b952862811db229a9e9383a332b | [
"MIT"
] | null | null | null | src/_pytest/main.py | Jacklibobo/pytest | 253419316ce09b952862811db229a9e9383a332b | [
"MIT"
] | null | null | null | """ core implementation of testing process: init, session, runtest loop. """
from __future__ import absolute_import, division, print_function
import contextlib
import functools
import os
import pkgutil
import six
import sys
import _pytest
from _pytest import nodes
import _pytest._code
import py
from _pytest.config import directory_arg, UsageError, hookimpl
from _pytest.outcomes import exit
from _pytest.runner import collect_one_node
# exitcodes for the command line
EXIT_OK = 0
EXIT_TESTSFAILED = 1
EXIT_INTERRUPTED = 2
EXIT_INTERNALERROR = 3
EXIT_USAGEERROR = 4
EXIT_NOTESTSCOLLECTED = 5
def pytest_addoption(parser):
parser.addini(
"norecursedirs",
"directory patterns to avoid for recursion",
type="args",
default=[".*", "build", "dist", "CVS", "_darcs", "{arch}", "*.egg", "venv"],
)
parser.addini(
"testpaths",
"directories to search for tests when no files or directories are given in the "
"command line.",
type="args",
default=[],
)
# parser.addini("dirpatterns",
# "patterns specifying possible locations of test files",
# type="linelist", default=["**/test_*.txt",
# "**/test_*.py", "**/*_test.py"]
# )
group = parser.getgroup("general", "running and selection options")
group._addoption(
"-x",
"--exitfirst",
action="store_const",
dest="maxfail",
const=1,
help="exit instantly on first error or failed test.",
),
group._addoption(
"--maxfail",
metavar="num",
action="store",
type=int,
dest="maxfail",
default=0,
help="exit after first num failures or errors.",
)
group._addoption(
"--strict",
action="store_true",
help="marks not registered in configuration file raise errors.",
)
group._addoption(
"-c",
metavar="file",
type=str,
dest="inifilename",
help="load configuration from `file` instead of trying to locate one of the implicit "
"configuration files.",
)
group._addoption(
"--continue-on-collection-errors",
action="store_true",
default=False,
dest="continue_on_collection_errors",
help="Force test execution even if collection errors occur.",
)
group._addoption(
"--rootdir",
action="store",
dest="rootdir",
help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', "
"'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: "
"'$HOME/root_dir'.",
)
group = parser.getgroup("collect", "collection")
group.addoption(
"--collectonly",
"--collect-only",
action="store_true",
help="only collect tests, don't execute them.",
),
group.addoption(
"--pyargs",
action="store_true",
help="try to interpret all arguments as python packages.",
)
group.addoption(
"--ignore",
action="append",
metavar="path",
help="ignore path during collection (multi-allowed).",
)
group.addoption(
"--deselect",
action="append",
metavar="nodeid_prefix",
help="deselect item during collection (multi-allowed).",
)
# when changing this to --conf-cut-dir, config.py Conftest.setinitial
# needs upgrading as well
group.addoption(
"--confcutdir",
dest="confcutdir",
default=None,
metavar="dir",
type=functools.partial(directory_arg, optname="--confcutdir"),
help="only load conftest.py's relative to specified dir.",
)
group.addoption(
"--noconftest",
action="store_true",
dest="noconftest",
default=False,
help="Don't load any conftest.py files.",
)
group.addoption(
"--keepduplicates",
"--keep-duplicates",
action="store_true",
dest="keepduplicates",
default=False,
help="Keep duplicate tests.",
)
group.addoption(
"--collect-in-virtualenv",
action="store_true",
dest="collect_in_virtualenv",
default=False,
help="Don't ignore tests in a local virtualenv directory",
)
group = parser.getgroup("debugconfig", "test session debugging and configuration")
group.addoption(
"--basetemp",
dest="basetemp",
default=None,
metavar="dir",
help="base temporary directory for this test run.",
)
def pytest_configure(config):
__import__("pytest").config = config # compatibility
def wrap_session(config, doit):
"""Skeleton command line program"""
session = Session(config)
session.exitstatus = EXIT_OK
initstate = 0
try:
try:
config._do_configure()
initstate = 1
config.hook.pytest_sessionstart(session=session)
initstate = 2
session.exitstatus = doit(config, session) or 0
except UsageError:
raise
except Failed:
session.exitstatus = EXIT_TESTSFAILED
except KeyboardInterrupt:
excinfo = _pytest._code.ExceptionInfo()
if initstate < 2 and isinstance(excinfo.value, exit.Exception):
sys.stderr.write("{}: {}\n".format(excinfo.typename, excinfo.value.msg))
config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
session.exitstatus = EXIT_INTERRUPTED
except: # noqa
excinfo = _pytest._code.ExceptionInfo()
config.notify_exception(excinfo, config.option)
session.exitstatus = EXIT_INTERNALERROR
if excinfo.errisinstance(SystemExit):
sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
finally:
excinfo = None # Explicitly break reference cycle.
session.startdir.chdir()
if initstate >= 2:
config.hook.pytest_sessionfinish(
session=session, exitstatus=session.exitstatus
)
config._ensure_unconfigure()
return session.exitstatus
def pytest_cmdline_main(config):
return wrap_session(config, _main)
def _main(config, session):
""" default command line protocol for initialization, session,
running tests and reporting. """
config.hook.pytest_collection(session=session)
config.hook.pytest_runtestloop(session=session)
if session.testsfailed:
return EXIT_TESTSFAILED
elif session.testscollected == 0:
return EXIT_NOTESTSCOLLECTED
def pytest_collection(session):
return session.perform_collect()
def pytest_runtestloop(session):
if session.testsfailed and not session.config.option.continue_on_collection_errors:
raise session.Interrupted("%d errors during collection" % session.testsfailed)
if session.config.option.collectonly:
return True
for i, item in enumerate(session.items):
nextitem = session.items[i + 1] if i + 1 < len(session.items) else None
item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
if session.shouldfail:
raise session.Failed(session.shouldfail)
if session.shouldstop:
raise session.Interrupted(session.shouldstop)
return True
def _in_venv(path):
"""Attempts to detect if ``path`` is the root of a Virtual Environment by
checking for the existence of the appropriate activate script"""
bindir = path.join("Scripts" if sys.platform.startswith("win") else "bin")
if not bindir.isdir():
return False
activates = (
"activate",
"activate.csh",
"activate.fish",
"Activate",
"Activate.bat",
"Activate.ps1",
)
return any([fname.basename in activates for fname in bindir.listdir()])
def pytest_ignore_collect(path, config):
ignore_paths = config._getconftest_pathlist("collect_ignore", path=path.dirpath())
ignore_paths = ignore_paths or []
excludeopt = config.getoption("ignore")
if excludeopt:
ignore_paths.extend([py.path.local(x) for x in excludeopt])
if py.path.local(path) in ignore_paths:
return True
allow_in_venv = config.getoption("collect_in_virtualenv")
if _in_venv(path) and not allow_in_venv:
return True
# Skip duplicate paths.
keepduplicates = config.getoption("keepduplicates")
duplicate_paths = config.pluginmanager._duplicatepaths
if not keepduplicates:
if path in duplicate_paths:
return True
else:
duplicate_paths.add(path)
return False
def pytest_collection_modifyitems(items, config):
deselect_prefixes = tuple(config.getoption("deselect") or [])
if not deselect_prefixes:
return
remaining = []
deselected = []
for colitem in items:
if colitem.nodeid.startswith(deselect_prefixes):
deselected.append(colitem)
else:
remaining.append(colitem)
if deselected:
config.hook.pytest_deselected(items=deselected)
items[:] = remaining
@contextlib.contextmanager
def _patched_find_module():
"""Patch bug in pkgutil.ImpImporter.find_module
When using pkgutil.find_loader on python<3.4 it removes symlinks
from the path due to a call to os.path.realpath. This is not consistent
with actually doing the import (in these versions, pkgutil and __import__
did not share the same underlying code). This can break conftest
discovery for pytest where symlinks are involved.
The only supported python<3.4 by pytest is python 2.7.
"""
if six.PY2: # python 3.4+ uses importlib instead
def find_module_patched(self, fullname, path=None):
# Note: we ignore 'path' argument since it is only used via meta_path
subname = fullname.split(".")[-1]
if subname != fullname and self.path is None:
return None
if self.path is None:
path = None
else:
# original: path = [os.path.realpath(self.path)]
path = [self.path]
try:
file, filename, etc = pkgutil.imp.find_module(subname, path)
except ImportError:
return None
return pkgutil.ImpLoader(fullname, file, filename, etc)
old_find_module = pkgutil.ImpImporter.find_module
pkgutil.ImpImporter.find_module = find_module_patched
try:
yield
finally:
pkgutil.ImpImporter.find_module = old_find_module
else:
yield
class FSHookProxy(object):
def __init__(self, fspath, pm, remove_mods):
self.fspath = fspath
self.pm = pm
self.remove_mods = remove_mods
def __getattr__(self, name):
x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods)
self.__dict__[name] = x
return x
class NoMatch(Exception):
""" raised if matching cannot locate a matching names. """
class Interrupted(KeyboardInterrupt):
""" signals an interrupted test run. """
__module__ = "builtins" # for py3
class Failed(Exception):
""" signals a stop as failed test run. """
class Session(nodes.FSCollector):
Interrupted = Interrupted
Failed = Failed
def __init__(self, config):
nodes.FSCollector.__init__(
self, config.rootdir, parent=None, config=config, session=self, nodeid=""
)
self.testsfailed = 0
self.testscollected = 0
self.shouldstop = False
self.shouldfail = False
self.trace = config.trace.root.get("collection")
self._norecursepatterns = config.getini("norecursedirs")
self.startdir = py.path.local()
# Keep track of any collected nodes in here, so we don't duplicate fixtures
self._node_cache = {}
self.config.pluginmanager.register(self, name="session")
@hookimpl(tryfirst=True)
def pytest_collectstart(self):
if self.shouldfail:
raise self.Failed(self.shouldfail)
if self.shouldstop:
raise self.Interrupted(self.shouldstop)
@hookimpl(tryfirst=True)
def pytest_runtest_logreport(self, report):
if report.failed and not hasattr(report, "wasxfail"):
self.testsfailed += 1
maxfail = self.config.getvalue("maxfail")
if maxfail and self.testsfailed >= maxfail:
self.shouldfail = "stopping after %d failures" % (self.testsfailed)
pytest_collectreport = pytest_runtest_logreport
def isinitpath(self, path):
return path in self._initialpaths
def gethookproxy(self, fspath):
# check if we have the common case of running
# hooks with all conftest.py files
pm = self.config.pluginmanager
my_conftestmodules = pm._getconftestmodules(fspath)
remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
if remove_mods:
# one or more conftests are not in use at this fspath
proxy = FSHookProxy(fspath, pm, remove_mods)
else:
# all plugis are active for this fspath
proxy = self.config.hook
return proxy
def perform_collect(self, args=None, genitems=True):
hook = self.config.hook
try:
items = self._perform_collect(args, genitems)
self.config.pluginmanager.check_pending()
hook.pytest_collection_modifyitems(
session=self, config=self.config, items=items
)
finally:
hook.pytest_collection_finish(session=self)
self.testscollected = len(items)
return items
def _perform_collect(self, args, genitems):
if args is None:
args = self.config.args
self.trace("perform_collect", self, args)
self.trace.root.indent += 1
self._notfound = []
self._initialpaths = set()
self._initialparts = []
self.items = items = []
for arg in args:
parts = self._parsearg(arg)
self._initialparts.append(parts)
self._initialpaths.add(parts[0])
rep = collect_one_node(self)
self.ihook.pytest_collectreport(report=rep)
self.trace.root.indent -= 1
if self._notfound:
errors = []
for arg, exc in self._notfound:
line = "(no name %r in any of %r)" % (arg, exc.args[0])
errors.append("not found: %s\n%s" % (arg, line))
# XXX: test this
raise UsageError(*errors)
if not genitems:
return rep.result
else:
if rep.passed:
for node in rep.result:
self.items.extend(self.genitems(node))
return items
def collect(self):
for parts in self._initialparts:
arg = "::".join(map(str, parts))
self.trace("processing argument", arg)
self.trace.root.indent += 1
try:
for x in self._collect(arg):
yield x
except NoMatch:
# we are inside a make_report hook so
# we cannot directly pass through the exception
self._notfound.append((arg, sys.exc_info()[1]))
self.trace.root.indent -= 1
def _collect(self, arg):
names = self._parsearg(arg)
argpath = names.pop(0)
paths = []
root = self
# Start with a Session root, and delve to argpath item (dir or file)
# and stack all Packages found on the way.
# No point in finding packages when collecting doctests
if not self.config.option.doctestmodules:
for parent in argpath.parts():
pm = self.config.pluginmanager
if pm._confcutdir and pm._confcutdir.relto(parent):
continue
if parent.isdir():
pkginit = parent.join("__init__.py")
if pkginit.isfile():
if pkginit in self._node_cache:
root = self._node_cache[pkginit]
else:
col = root._collectfile(pkginit)
if col:
root = col[0]
self._node_cache[root.fspath] = root
# If it's a directory argument, recurse and look for any Subpackages.
# Let the Package collector deal with subnodes, don't collect here.
if argpath.check(dir=1):
assert not names, "invalid arg %r" % (arg,)
for path in argpath.visit(
fil=lambda x: x.check(file=1), rec=self._recurse, bf=True, sort=True
):
pkginit = path.dirpath().join("__init__.py")
if pkginit.exists() and not any(x in pkginit.parts() for x in paths):
for x in root._collectfile(pkginit):
yield x
paths.append(x.fspath.dirpath())
if not any(x in path.parts() for x in paths):
for x in root._collectfile(path):
if (type(x), x.fspath) in self._node_cache:
yield self._node_cache[(type(x), x.fspath)]
else:
yield x
self._node_cache[(type(x), x.fspath)] = x
else:
assert argpath.check(file=1)
if argpath in self._node_cache:
col = self._node_cache[argpath]
else:
col = root._collectfile(argpath)
if col:
self._node_cache[argpath] = col
for y in self.matchnodes(col, names):
yield y
def _collectfile(self, path):
ihook = self.gethookproxy(path)
if not self.isinitpath(path):
if ihook.pytest_ignore_collect(path=path, config=self.config):
return ()
return ihook.pytest_collect_file(path=path, parent=self)
def _recurse(self, path):
ihook = self.gethookproxy(path.dirpath())
if ihook.pytest_ignore_collect(path=path, config=self.config):
return
for pat in self._norecursepatterns:
if path.check(fnmatch=pat):
return False
ihook = self.gethookproxy(path)
ihook.pytest_collect_directory(path=path, parent=self)
return True
def _tryconvertpyarg(self, x):
"""Convert a dotted module name to path.
"""
try:
with _patched_find_module():
loader = pkgutil.find_loader(x)
except ImportError:
return x
if loader is None:
return x
# This method is sometimes invoked when AssertionRewritingHook, which
# does not define a get_filename method, is already in place:
try:
with _patched_find_module():
path = loader.get_filename(x)
except AttributeError:
# Retrieve path from AssertionRewritingHook:
path = loader.modules[x][0].co_filename
if loader.is_package(x):
path = os.path.dirname(path)
return path
def _parsearg(self, arg):
""" return (fspath, names) tuple after checking the file exists. """
parts = str(arg).split("::")
if self.config.option.pyargs:
parts[0] = self._tryconvertpyarg(parts[0])
relpath = parts[0].replace("/", os.sep)
path = self.config.invocation_dir.join(relpath, abs=True)
if not path.check():
if self.config.option.pyargs:
raise UsageError(
"file or package not found: " + arg + " (missing __init__.py?)"
)
else:
raise UsageError("file not found: " + arg)
parts[0] = path
return parts
def matchnodes(self, matching, names):
self.trace("matchnodes", matching, names)
self.trace.root.indent += 1
nodes = self._matchnodes(matching, names)
num = len(nodes)
self.trace("matchnodes finished -> ", num, "nodes")
self.trace.root.indent -= 1
if num == 0:
raise NoMatch(matching, names[:1])
return nodes
def _matchnodes(self, matching, names):
if not matching or not names:
return matching
name = names[0]
assert name
nextnames = names[1:]
resultnodes = []
for node in matching:
if isinstance(node, nodes.Item):
if not names:
resultnodes.append(node)
continue
assert isinstance(node, nodes.Collector)
if node.nodeid in self._node_cache:
rep = self._node_cache[node.nodeid]
else:
rep = collect_one_node(node)
self._node_cache[node.nodeid] = rep
if rep.passed:
has_matched = False
for x in rep.result:
# TODO: remove parametrized workaround once collection structure contains parametrization
if x.name == name or x.name.split("[")[0] == name:
resultnodes.extend(self.matchnodes([x], nextnames))
has_matched = True
# XXX accept IDs that don't have "()" for class instances
if not has_matched and len(rep.result) == 1 and x.name == "()":
nextnames.insert(0, name)
resultnodes.extend(self.matchnodes([x], nextnames))
else:
# report collection failures here to avoid failing to run some test
# specified in the command line because the module could not be
# imported (#134)
node.ihook.pytest_collectreport(report=rep)
return resultnodes
def genitems(self, node):
self.trace("genitems", node)
if isinstance(node, nodes.Item):
node.ihook.pytest_itemcollected(item=node)
yield node
else:
assert isinstance(node, nodes.Collector)
rep = collect_one_node(node)
if rep.passed:
for subnode in rep.result:
for x in self.genitems(subnode):
yield x
node.ihook.pytest_collectreport(report=rep)
| 34.260212 | 109 | 0.592423 |
6e4673b7d92e5792fda28b6efd0785c3de795621 | 2,049 | py | Python | novaagent/xenstore/xenstore.py | JourdanClark/nova-agent | cccea98bc5b55982352ab50d751907eb5465e675 | [
"Apache-2.0"
] | 2 | 2017-08-08T21:46:02.000Z | 2018-01-24T21:40:15.000Z | novaagent/xenstore/xenstore.py | JourdanClark/nova-agent | cccea98bc5b55982352ab50d751907eb5465e675 | [
"Apache-2.0"
] | 27 | 2017-08-07T13:51:51.000Z | 2021-05-22T23:46:57.000Z | novaagent/xenstore/xenstore.py | JourdanClark/nova-agent | cccea98bc5b55982352ab50d751907eb5465e675 | [
"Apache-2.0"
] | 13 | 2017-09-16T12:12:05.000Z | 2021-03-04T21:41:16.000Z |
from subprocess import PIPE
from subprocess import Popen
import json
def xenstore_read(path, client, to_json=False):
result = None
if client is None:
p = Popen(
['xenstore-read', path],
stdout=PIPE,
stderr=PIPE
)
output, _ = p.communicate()
if p.returncode == 0:
result = output.decode('utf-8').strip()
else:
result = client.read(path).decode('utf-8').strip()
if result and to_json:
return json.loads(result)
return result
def xenstore_list(path, client):
result = []
if client is None:
p = Popen(
['xenstore-ls', path],
stdout=PIPE,
stderr=PIPE
)
out, _ = p.communicate()
if p.returncode == 0:
decoded_out = out.decode('utf-8').split('\n')
result = [
item.split(' = ')[0] for item in decoded_out if item
]
else:
for item in client.list(path):
result.append(item.decode('utf-8').strip())
return result
def xenstore_write(write_path, write_value, client):
if client is None:
p = Popen(
['xenstore-write', write_path, write_value],
stdout=PIPE,
stderr=PIPE
)
p.communicate()
if p.returncode != 0:
raise ValueError(
'Shell to xenstore-write returned invalid code {0}'.format(
p.returncode
)
)
else:
client.write(write_path, write_value)
return
def xenstore_delete(path, client):
if client is None:
p = Popen(
['xenstore-rm', path],
stdout=PIPE,
stderr=PIPE
)
_out, _err = p.communicate()
if p.returncode != 0:
raise ValueError(
'Shell to xenstore-rm returned invalid code {0}'.format(
p.returncode
)
)
else:
client.delete(path)
return
| 23.022472 | 75 | 0.507565 |
f31a7d17386531baa5db378d386b993fb404cff8 | 54,893 | py | Python | tensorflow/python/distribute/mirrored_strategy_test.py | vladbataev/tensorflow | 139cc09ea1cee1c69bcb27022199899db5cdd9e5 | [
"Apache-2.0"
] | 1 | 2020-03-02T20:45:18.000Z | 2020-03-02T20:45:18.000Z | tensorflow/python/distribute/mirrored_strategy_test.py | sagol/tensorflow | 04f2870814d2773e09dcfa00cbe76a66a2c4de88 | [
"Apache-2.0"
] | 2 | 2021-08-25T15:58:27.000Z | 2022-02-10T02:04:40.000Z | tensorflow/python/distribute/mirrored_strategy_test.py | sagol/tensorflow | 04f2870814d2773e09dcfa00cbe76a66a2c4de88 | [
"Apache-2.0"
] | 2 | 2021-04-28T20:57:17.000Z | 2022-01-11T13:05:41.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for MirroredStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import sys
from absl.testing import parameterized
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.autograph.core import converter_testing
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribution_strategy_context as ds_context
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.distribute import multi_worker_test_base
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import strategy_combinations
from tensorflow.python.distribute import strategy_test_lib
from tensorflow.python.distribute import values
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.eager import test
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras.engine import training as keras_training
from tensorflow.python.keras.layers import core as keras_core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import optimizer as optimizer_lib
from tensorflow.python.training import server_lib
GPU_TEST = "test_gpu" in sys.argv[0]
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.mirrored_strategy_with_two_gpus,
],
mode=["graph", "eager"]))
class MirroredTwoDeviceDistributionTest(
strategy_test_lib.DistributionTestBase,
strategy_test_lib.TwoDeviceDistributionTestBase,
parameterized.TestCase):
def testMinimizeLoss(self, distribution):
if context.executing_eagerly():
self._test_minimize_loss_eager(distribution)
else:
self._test_minimize_loss_graph(distribution)
def testReplicaId(self, distribution):
self._test_replica_id(distribution)
def testNumReplicasInSync(self, distribution):
self.assertEqual(2, distribution.num_replicas_in_sync)
def testCallAndMergeExceptions(self, distribution):
self._test_call_and_merge_exceptions(distribution)
def testRunRegroupError(self, distribution):
def run_fn():
replica_id = int(self.evaluate(_replica_id()))
# Generates a list with different lengths on different devices.
# Will fail in _regroup() (if more than one device).
return list(range(replica_id))
with distribution.scope(), self.assertRaises(AssertionError):
distribution.extended.call_for_each_replica(run_fn)
def testReduceToCpu(self, distribution):
with distribution.scope():
result = distribution.extended.call_for_each_replica(_replica_id)
reduced = distribution.reduce(reduce_util.ReduceOp.SUM, result, axis=None)
expected = sum(range(distribution.num_replicas_in_sync))
self.assertEqual(expected, self.evaluate(reduced))
def reduce_axis_helper(self, distribution, replica_squared_fn):
with distribution.scope():
num_replicas = distribution.num_replicas_in_sync
result = distribution.extended.call_for_each_replica(replica_squared_fn)
# sum
reduced = distribution.reduce(reduce_util.ReduceOp.SUM, result, axis=0)
expected = sum(x * (x + 1) for x in range(num_replicas))
self.assertNear(expected, self.evaluate(reduced), 0.00001)
# mean
reduced = distribution.reduce(reduce_util.ReduceOp.MEAN, result, axis=0)
expected /= sum(x + 1 for x in range(num_replicas))
self.assertNear(expected, self.evaluate(reduced), 0.00001)
def testReduceAxisToCpu(self, distribution):
for dtype in (dtypes.float32, dtypes.int32):
def replica_squared_fn(dtype=dtype):
# Lists with different lengths on different replicas.
replica_id = _replica_id_as_int()
return math_ops.cast([replica_id] * (replica_id + 1), dtype)
self.reduce_axis_helper(distribution, replica_squared_fn)
def set_v2_tensorshape(self, v2):
if v2:
tensor_shape.enable_v2_tensorshape()
else:
tensor_shape.disable_v2_tensorshape()
def testReduceAxisToCpuUnknownShape(self, distribution):
original_v2 = tensor_shape._TENSORSHAPE_V2_OVERRIDE # pylint: disable=protected-access
try:
for v2 in (False, True):
self.set_v2_tensorshape(v2)
for dtype in (dtypes.float32, dtypes.int32):
for shape in ((None,), None): # Test both unknown size and rank.
def replica_squared_fn(dtype=dtype, shape=shape):
# Lists with different lengths on different replicas.
replica_id = _replica_id_as_int()
tensor = math_ops.cast([replica_id] * (replica_id + 1), dtype)
# Erase shape information
return array_ops.placeholder_with_default(tensor, shape=shape)
self.reduce_axis_helper(distribution, replica_squared_fn)
finally:
self.set_v2_tensorshape(original_v2)
def testReplicateDataset(self, distribution):
dataset_fn = lambda: dataset_ops.Dataset.range(10)
expected_values = [[i, i+1] for i in range(0, 10, 2)]
input_fn = self._input_fn_to_test_input_context(
dataset_fn,
expected_num_replicas_in_sync=2,
expected_num_input_pipelines=1,
expected_input_pipeline_id=0)
self._test_input_fn_iterable(distribution, input_fn, expected_values)
def testMakeInputFnIteratorWithDataset(self, distribution):
dataset_fn = lambda: dataset_ops.Dataset.range(10)
expected_values = [[i, i+1] for i in range(0, 10, 2)]
input_fn = self._input_fn_to_test_input_context(
dataset_fn,
expected_num_replicas_in_sync=2,
expected_num_input_pipelines=1,
expected_input_pipeline_id=0)
iterator = distribution.make_input_fn_iterator(input_fn)
self._test_input_fn_iterator(iterator, distribution.extended.worker_devices,
expected_values)
def testMakeInputFnIteratorWithCallable(self, distribution):
def fn():
dataset = dataset_ops.Dataset.range(2).interleave(
(lambda _: dataset_ops.Dataset.range(10)), cycle_length=2)
it = dataset_ops.make_one_shot_iterator(dataset)
return it.get_next
expected_values = [[i, i] for i in range(0, 10)]
input_fn = self._input_fn_to_test_input_context(
fn,
expected_num_replicas_in_sync=2,
expected_num_input_pipelines=1,
expected_input_pipeline_id=0)
iterator = distribution.make_input_fn_iterator(input_fn)
self._test_input_fn_iterator(iterator, distribution.extended.worker_devices,
expected_values, test_reinitialize=False,
ignore_order=True)
def testNumpyDataset(self, distribution):
self._test_numpy_dataset(distribution)
def testGlobalStepUpdate(self, distribution):
self._test_global_step_update(distribution)
def testRun(self, distribution):
self._test_run(distribution)
def testAllReduceSum(self, distribution):
self._test_all_reduce_sum(distribution)
def testAllReduceSumGradients(self, distribution):
self._test_all_reduce_sum_gradients(distribution)
def testAllReduceSumGradientTape(self, distribution):
self._test_all_reduce_sum_gradient_tape(distribution)
def testAllReduceMean(self, distribution):
self._test_all_reduce_mean(distribution)
def testAllReduceMeanGradients(self, distribution):
self._test_all_reduce_mean_gradients(distribution)
def testAllReduceMeanGradientTape(self, distribution):
self._test_all_reduce_mean_gradient_tape(distribution)
def testSummaryForReplicaZeroOnly(self, distribution):
self._test_summary_for_replica_zero_only(distribution)
def testTrainableVariables(self, distribution):
self._test_trainable_variable(distribution)
def one_device_combinations():
return combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_one_gpu,
],
mode=["graph", "eager"])
@combinations.generate(one_device_combinations())
class MirroredOneDeviceDistributionTest(
strategy_test_lib.DistributionTestBase,
strategy_test_lib.OneDeviceDistributionTestBase,
parameterized.TestCase):
def testMinimizeLoss(self, distribution):
if context.executing_eagerly():
self._test_minimize_loss_eager(distribution)
else:
self._test_minimize_loss_graph(distribution)
def testReplicaId(self, distribution):
self._test_replica_id(distribution)
def testCallAndMergeExceptions(self, distribution):
self._test_call_and_merge_exceptions(distribution)
def testRun(self, distribution):
self._test_run(distribution)
def testAllReduceSum(self, distribution):
self._test_all_reduce_sum(distribution)
def testAllReduceSumGradients(self, distribution):
self._test_all_reduce_sum_gradients(distribution)
def testAllReduceSumGradientTape(self, distribution):
self._test_all_reduce_sum_gradient_tape(distribution)
def testAllReduceMean(self, distribution):
self._test_all_reduce_mean(distribution)
def testAllReduceMeanGradients(self, distribution):
self._test_all_reduce_mean_gradients(distribution)
def testAllReduceMeanGradientTape(self, distribution):
self._test_all_reduce_mean_gradient_tape(distribution)
class MirroredStrategyVariableCreatorStackTest(
test.TestCase, parameterized.TestCase):
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph"]))
def testCreatorStacksAreThreadLocal(self, distribution):
def model_fn():
replica_id_str = str(self.evaluate(_replica_id()))
def thread_creator_fn(next_creator, **kwargs):
return next_creator(**kwargs) + ":thread_" + replica_id_str
with variable_scope.variable_creator_scope(thread_creator_fn):
# Create a variable in this scope.
v = variable_scope.variable(1.0)
# This will pause the current thread, and execute the other thread.
ds_context.get_replica_context().merge_call(lambda _: _)
return v
def main_thread_creator(next_creator, **kwargs):
# We are not using the underlying next_creator for test purposes.
del next_creator, kwargs
return "main_thread"
with context.graph_mode(), \
distribution.scope(), \
variable_scope.variable_creator_scope(main_thread_creator):
result = distribution.extended.call_for_each_replica(model_fn)
result = distribution.experimental_local_results(result)
expected = ("main_thread:thread_0", "main_thread:thread_1")
self.assertEqual(expected, result)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph", "eager"]))
class MirroredStrategyCallForEachReplicaTest(test.TestCase):
def testExecutingEagerlyOutsideFunction(self, distribution):
"""Verify we preserve the value of executing_eagerly_outside_functions()."""
def model_fn():
return ops.executing_eagerly_outside_functions()
originally = ops.executing_eagerly_outside_functions()
with distribution.scope():
in_scope = ops.executing_eagerly_outside_functions()
in_model_fn = distribution.extended.call_for_each_replica(model_fn)
unwrapped = distribution.experimental_local_results(in_model_fn)
self.assertEqual(in_scope, unwrapped[0])
self.assertEqual(in_scope, originally)
# Verify this all again, but this time in a FuncGraph.
with func_graph.FuncGraph("fg").as_default(), distribution.scope():
in_scope = ops.executing_eagerly_outside_functions()
in_model_fn = distribution.extended.call_for_each_replica(model_fn)
unwrapped = distribution.experimental_local_results(in_model_fn)
self.assertEqual(in_scope, unwrapped[0])
self.assertEqual(in_scope, originally)
def testFunctionInCallForEachReplica(self, distribution):
traces = []
@def_function.function
def model_fn():
traces.append(1)
return ds_context.get_replica_context().replica_id_in_sync_group
with distribution.scope():
result = distribution.extended.call_for_each_replica(model_fn)
self.assertEqual((0, 1), self.evaluate(result.values))
self.assertLen(traces, distribution.num_replicas_in_sync)
def testFunctionInCallForEachReplicaInsideAnotherFunction(self, distribution):
traces = []
@def_function.function
def model_fn():
traces.append(1)
return ds_context.get_replica_context().replica_id_in_sync_group
@def_function.function
def step():
return distribution.extended.call_for_each_replica(model_fn)
with distribution.scope():
result = step()
self.assertEqual((0, 1), self.evaluate(result.values))
self.assertLen(traces, distribution.num_replicas_in_sync)
def testNestedFunctionInCallForEachReplicaWithMergeCall(self, distribution):
def merge_fn(_):
pass
@def_function.function
def model_fn():
def body_fn(i):
ds_context.get_replica_context().merge_call(merge_fn)
return i + 1
return control_flow_ops.while_loop_v2(lambda i: i < 2, body_fn, [0])
with distribution.scope():
with self.assertRaisesRegexp(
RuntimeError, "`merge_call` called while defining a new graph."):
distribution.extended.call_for_each_replica(model_fn)
def testFunctionInCallForEachReplicaWithMergeCall(self, distribution):
def merge_fn(_):
pass
@def_function.function
def model_fn():
ds_context.get_replica_context().merge_call(merge_fn)
return 0.
with distribution.scope():
self.assertEqual(
self.evaluate(distribution.extended.call_for_each_replica(model_fn)),
0.)
def testFunctionInCallForEachReplicaCached(self, distribution):
traces = []
@def_function.function
def model_fn():
traces.append(None)
self.assertEmpty(traces)
for i in range(10):
distribution.extended.call_for_each_replica(model_fn)
if i == 0:
num_devices = len(traces)
self.assertGreater(num_devices, 0)
else:
# model_fn should not have been re-evaluated so the length should remain
# the same.
self.assertLen(traces, num_devices)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph"]))
class MirroredStrategyNameScopeTest(test.TestCase):
# NOTE(priyag): Names and name scopes are ignored in eager, hence we are not
# testing this in eager mode.
def testNameScope(self, distribution):
def model_fn():
with ops.name_scope("foo"):
a = constant_op.constant(1.0, name="a")
ds_context.get_replica_context().merge_call(lambda _: _)
b = constant_op.constant(1.0, name="b")
return a, b
with context.graph_mode(), distribution.scope():
with ops.name_scope("main"):
result = distribution.extended.call_for_each_replica(model_fn)
self.assertEqual(2, len(result))
for v, name in zip(result, ["a", "b"]):
self.assertIsInstance(v, values.DistributedValues)
v0, v1 = distribution.experimental_local_results(v)
self.assertEqual("main/foo/" + name + ":0", v0.name)
self.assertEqual("main/replica_1/foo/" + name + ":0", v1.name)
def testWithDefaultName(self, distribution):
def model_fn():
with ops.name_scope(None, "foo"):
a = constant_op.constant(1.0, name="a")
ds_context.get_replica_context().merge_call(lambda _: _)
b = constant_op.constant(2.0, name="b")
return a, b
with context.graph_mode(), distribution.scope():
result = distribution.extended.call_for_each_replica(model_fn)
self.assertEqual(2, len(result))
for v, name in zip(result, ["a", "b"]):
self.assertIsInstance(v, values.DistributedValues)
v0, v1 = distribution.experimental_local_results(v)
self.assertEqual("foo/" + name + ":0", v0.name)
self.assertEqual("replica_1/foo/" + name + ":0", v1.name)
# variable_scope.variable() respects name scopes when creating
# variables. On the other hand variable_scope.get_variable() ignores name
# scopes but respects variable scope when creating variables. We test both
# methods of creating variables to make sure that we have the same
# variable names in both cases.
def testNameScopeWithVariable(self, distribution):
def in_cross_replica(_):
c = variable_scope.variable(1.0, name="c")
return c
def model_fn():
b = variable_scope.variable(1.0, name="b")
with ops.name_scope("foo"):
c = ds_context.get_replica_context().merge_call(in_cross_replica)
return b, c
with context.graph_mode(), distribution.scope():
with ops.name_scope("main"):
a = variable_scope.variable(1.0, name="a")
result = distribution.extended.call_for_each_replica(model_fn)
result_b = result[0]
result_c = result[1]
self.assertIsInstance(result_b, values.DistributedValues)
self.assertIsInstance(result_c, values.DistributedValues)
a0, a1 = distribution.experimental_local_results(a)
b0, b1 = distribution.experimental_local_results(result_b)
c0, c1 = distribution.experimental_local_results(result_c)
self.assertEqual("main/a:0", a0.name)
self.assertEqual("main/a/replica_1:0", a1.name)
self.assertEqual("main/b:0", b0.name)
self.assertEqual("main/b/replica_1:0", b1.name)
self.assertEqual("main/foo/c:0", c0.name)
self.assertEqual("main/foo/c/replica_1:0", c1.name)
def testNameScopeWithGetVariable(self, distribution):
def in_cross_replica(_):
c = variable_scope.get_variable("c", [1])
return c
def model_fn():
b = variable_scope.get_variable("b", [1])
with ops.name_scope("foo"):
c = ds_context.get_replica_context().merge_call(in_cross_replica)
return b, c
with context.graph_mode(), distribution.scope():
with ops.name_scope("main"):
a = variable_scope.get_variable("a", [1])
result = distribution.extended.call_for_each_replica(model_fn)
result_b = result[0]
result_c = result[1]
self.assertIsInstance(result_b, values.DistributedValues)
self.assertIsInstance(result_c, values.DistributedValues)
a0, a1 = distribution.experimental_local_results(a)
b0, b1 = distribution.experimental_local_results(result_b)
c0, c1 = distribution.experimental_local_results(result_c)
self.assertEqual("a:0", a0.name)
self.assertEqual("a/replica_1:0", a1.name)
self.assertEqual("b:0", b0.name)
self.assertEqual("b/replica_1:0", b1.name)
self.assertEqual("c:0", c0.name)
self.assertEqual("c/replica_1:0", c1.name)
def testVariableScopeWithGetVariable(self, distribution):
def in_cross_replica(_):
c = variable_scope.get_variable("c", [1])
return c
def model_fn():
b = variable_scope.get_variable("b", [1])
with variable_scope.variable_scope("foo"):
c = ds_context.get_replica_context().merge_call(in_cross_replica)
return b, c
with context.graph_mode(), distribution.scope():
with variable_scope.variable_scope("main"):
a = variable_scope.get_variable("a", [1])
result = distribution.extended.call_for_each_replica(model_fn)
result_b = result[0]
result_c = result[1]
self.assertIsInstance(result_b, values.DistributedValues)
self.assertIsInstance(result_c, values.DistributedValues)
a0, a1 = distribution.experimental_local_results(a)
b0, b1 = distribution.experimental_local_results(result_b)
c0, c1 = distribution.experimental_local_results(result_c)
self.assertEqual("main/a:0", a0.name)
self.assertEqual("main/a/replica_1:0", a1.name)
self.assertEqual("main/b:0", b0.name)
self.assertEqual("main/b/replica_1:0", b1.name)
self.assertEqual("main/foo/c:0", c0.name)
self.assertEqual("main/foo/c/replica_1:0", c1.name)
@combinations.generate(
combinations.combine(
distribution=[
combinations.NamedDistribution(
"Mirrored3Devices",
# pylint: disable=g-long-lambda
lambda: mirrored_strategy.MirroredStrategy(
["/device:GPU:0", "/device:GPU:1", "/device:CPU:0"]),
required_gpus=2)
],
mode=["graph", "eager"]))
class MirroredThreeDeviceDistributionTest(
strategy_test_lib.DistributionTestBase,
parameterized.TestCase):
def testThreeDevices(self, distribution):
def model_fn():
v = variable_scope.variable(1.0, name="foo")
ds_context.get_replica_context().merge_call(lambda _: _)
return v
with distribution.scope():
result = distribution.extended.call_for_each_replica(model_fn)
self.assertIsInstance(result, values.MirroredVariable)
self.assertEqual("foo:0", result.name)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph", "eager"]))
class MirroredVariableUpdateTest(test.TestCase):
# The following tests check assign, assign_add and assign_sub on Mirrored
# variables in replica and cross replica context.
def testAssignMirroredVarReplicaContextWithoutAggregationType(self,
distribution):
# Test that we always have an aggregation type set on the mirrored variable
# if we assign to it in replica mode.
def var_fn():
v = variable_scope.variable(1.0, name="foo")
return v
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
def model_fn():
return mirrored_var.assign(5.0)
with self.assertRaisesRegexp(
ValueError, "You must specify an aggregation method to update a "
"MirroredVariable in Replica Context. You can do so by"):
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
def testAssignMirroredVarReplicaContextWithSum(self, distribution):
# Test that we don't reduce a non-per-replica value with the "sum"
# aggregation type.
def var_fn():
v = variable_scope.variable(
1.0, name="foo", aggregation=variable_scope.VariableAggregation.SUM)
return v
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
def model_fn():
return mirrored_var.assign(5.0)
with self.assertRaisesRegexp(
ValueError, "A non-DistributedValues value 5.0 cannot be reduced "
"with the given reduce op ReduceOp.SUM."):
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
def testAssignMirroredVarCrossDeviceContext(self, distribution):
def var_fn():
return variable_scope.variable(1.0, name="foo")
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(mirrored_var))
mirrored_var_result = self.evaluate(mirrored_var.assign(6.0))
self.assertEqual(6.0, mirrored_var_result)
def testAssignMirroredVarReplicaContext(self, distribution):
def var_fn():
return variable_scope.variable(
1.0, name="foo", aggregation=variable_scope.VariableAggregation.MEAN)
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(mirrored_var))
def model_fn():
value = math_ops.cast(
ds_context.get_replica_context().replica_id_in_sync_group,
mirrored_var.dtype)
return mirrored_var.assign(value)
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
self.assertEqual(0.5, self.evaluate(mirrored_var))
def testAssignMirroredVarReplicaContextWithSingleValue(self, distribution):
def var_fn():
return variable_scope.variable(
1.0, name="foo", aggregation=variable_scope.VariableAggregation.MEAN)
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(mirrored_var))
def model_fn():
return mirrored_var.assign(5.0)
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
self.assertEqual(5.0, self.evaluate(mirrored_var))
def testAssignAddMirroredVarCrossDeviceContext(self, distribution):
def var_fn():
return variable_scope.variable(1.0, name="foo")
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(mirrored_var))
# read_value == True
mirrored_var_result = self.evaluate(
mirrored_var.assign_add(6.0, read_value=True))
self.assertEqual(7.0, mirrored_var_result)
self.assertEqual(7.0, self.evaluate(mirrored_var.values[0]))
self.assertEqual(7.0, self.evaluate(mirrored_var.values[1]))
self.assertEqual(
distribution.extended.worker_devices[0], mirrored_var.devices[0])
self.assertEqual(
distribution.extended.worker_devices[1], mirrored_var.devices[1])
# read_value == False
self.evaluate(mirrored_var.assign_add(2.0, read_value=False))
self.assertEqual(9.0, self.evaluate(mirrored_var.values[0]))
self.assertEqual(9.0, self.evaluate(mirrored_var.values[1]))
self.assertEqual(
distribution.extended.worker_devices[0], mirrored_var.devices[0])
self.assertEqual(
distribution.extended.worker_devices[1], mirrored_var.devices[1])
def testAssignAddMirroredVarReplicaContext(self, distribution):
def var_fn():
return variable_scope.variable(
1.0, name="foo", aggregation=variable_scope.VariableAggregation.MEAN)
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(mirrored_var))
def model_fn():
value = math_ops.cast(
ds_context.get_replica_context().replica_id_in_sync_group,
mirrored_var.dtype)
return mirrored_var.assign_add(value)
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
self.assertEqual(1.5, self.evaluate(mirrored_var))
def testAssignAddMirroredVarReplicaContextWithSingleValue(self, distribution):
def var_fn():
return variable_scope.variable(
1.0, name="foo", aggregation=variable_scope.VariableAggregation.MEAN)
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(mirrored_var))
def model_fn():
return mirrored_var.assign_add(5.0)
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
self.assertEqual(6.0, self.evaluate(mirrored_var))
def testAssignSubMirroredVarCrossDeviceContext(self, distribution):
def var_fn():
return variable_scope.variable(5.0, name="foo")
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(5.0, self.evaluate(mirrored_var))
mirrored_var_result = self.evaluate(mirrored_var.assign_sub(2.0))
self.assertEqual(3.0, mirrored_var_result)
self.assertEqual(3.0, self.evaluate(mirrored_var.values[0]))
self.assertEqual(3.0, self.evaluate(mirrored_var.values[1]))
self.assertEqual(
distribution.extended.worker_devices[0], mirrored_var.devices[0])
self.assertEqual(
distribution.extended.worker_devices[1], mirrored_var.devices[1])
def testAssignSubMirroredVarReplicaContext(self, distribution):
def var_fn():
return variable_scope.variable(
5.0, name="foo", aggregation=variable_scope.VariableAggregation.MEAN)
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(5.0, self.evaluate(mirrored_var))
def model_fn():
value = math_ops.cast(
ds_context.get_replica_context().replica_id_in_sync_group,
mirrored_var.dtype)
return mirrored_var.assign_sub(value)
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
self.assertEqual(4.5, self.evaluate(mirrored_var))
def testAssignSubMirroredVarReplicaContextWithSingleValue(self, distribution):
def var_fn():
return variable_scope.variable(
5.0, name="foo", aggregation=variable_scope.VariableAggregation.MEAN)
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(5.0, self.evaluate(mirrored_var))
def model_fn():
return mirrored_var.assign_sub(1.0)
self.evaluate(distribution.experimental_local_results(
distribution.extended.call_for_each_replica(model_fn)))
self.assertEqual(4.0, self.evaluate(mirrored_var))
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph", "eager"]))
class MirroredAndSyncOnReadVariableInitializerTest(test.TestCase):
def testAssignMirroredVarInitializer(self, distribution):
# This test is not eager compatible since in eager variables are initialized
# upon construction instead of once the initialization op is run.
with context.graph_mode():
def var_fn():
v = variable_scope.variable(1.0, name="foo")
return v
with distribution.scope():
mirrored_var = distribution.extended.call_for_each_replica(var_fn)
self.assertIsInstance(mirrored_var, values.MirroredVariable)
self.assertFalse(self.evaluate(mirrored_var.is_initialized()))
self.evaluate(mirrored_var.initializer)
self.assertTrue(self.evaluate(mirrored_var.is_initialized()))
def testAssignReplicaLocalVarInitializer(self, distribution):
# This test is not eager compatible since in eager variables are initialized
# upon construction instead of once the initialization op is run.
with context.graph_mode():
def model_fn():
v_sum = variable_scope.variable(
1.0,
synchronization=variable_scope.VariableSynchronization.ON_READ,
aggregation=variable_scope.VariableAggregation.SUM)
self.assertIsInstance(v_sum, values.SyncOnReadVariable)
return v_sum
with distribution.scope():
sync_on_read_var = distribution.extended.call_for_each_replica(
model_fn)
self.assertIsInstance(sync_on_read_var, values.SyncOnReadVariable)
self.assertFalse(self.evaluate(sync_on_read_var.is_initialized()))
self.evaluate(sync_on_read_var.initializer)
self.assertTrue(self.evaluate(sync_on_read_var.is_initialized()))
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph", "eager"]))
class SyncOnReadVariableAssignTest(test.TestCase):
def testAssignReplicaLocalVarSumAggregation(self, distribution):
def model_fn():
v_sum = variable_scope.variable(
1.0,
synchronization=variable_scope.VariableSynchronization.ON_READ,
aggregation=variable_scope.VariableAggregation.SUM)
return v_sum
with distribution.scope():
sync_on_read_var = distribution.extended.call_for_each_replica(model_fn)
self.assertIsInstance(sync_on_read_var, values.SyncOnReadVariable)
self.evaluate(variables.global_variables_initializer())
# Each replica has a value of 1.0 assigned to it in replica context.
# When we read the value using `read_var` we should see the SUM of each of
# values on each of the replicas.
self.assertEqual(2.0, self.evaluate(
distribution.extended.read_var(sync_on_read_var)))
# Assigning 6.0 in cross replica context will assign a value of
# 6.0/num_replicas to each replica.
tlv_ops = sync_on_read_var.assign(6.0)
self.evaluate(tlv_ops)
# On reading the sync on read var we should get the assigned value back.
# The value on all the replicas are added before being returned by
# `read_var`.
self.assertEqual(6.0, self.evaluate(
distribution.extended.read_var(sync_on_read_var)))
def testAssignReplicaLocalVarMeanAggregation(self, distribution):
def model_fn():
v_sum = variable_scope.variable(
1.0,
synchronization=variable_scope.VariableSynchronization.ON_READ,
aggregation=variable_scope.VariableAggregation.MEAN)
return v_sum
with distribution.scope():
sync_on_read_var = distribution.extended.call_for_each_replica(model_fn)
self.assertIsInstance(sync_on_read_var, values.SyncOnReadVariable)
self.evaluate(variables.global_variables_initializer())
# Each replica has a value of 1.0 assigned to it in replica context.
# When we read the value using `read_var` we should see the MEAN of values
# on all replicas which is the value assigned in replica context.
self.assertEqual(1.0, self.evaluate(
distribution.extended.read_var(sync_on_read_var)))
tlv_ops = sync_on_read_var.assign(6.0)
self.evaluate(tlv_ops)
# On reading the sync on read var we should get the MEAN of all values
# which is equal to the value assigned.
self.assertEqual(6.0, self.evaluate(
distribution.extended.read_var(sync_on_read_var)))
class MockModel(object):
def __init__(self, two_variables=False):
self.variables = []
self.variables.append(variable_scope.variable(1.25, name="dummy_var1"))
if two_variables:
self.variables.append(variable_scope.variable(2.0, name="dummy_var2"))
def __call__(self, factor=2):
x = factor * self.variables[0]
if len(self.variables) > 1:
x += self.variables[1]
return x
class MiniModel(keras_training.Model):
"""Minimal model for mnist.
Useful for testing and debugging on slow TPU simulators.
"""
def __init__(self):
super(MiniModel, self).__init__(name="")
self.fc = keras_core.Dense(1, name="fc", kernel_initializer="ones",
bias_initializer="ones")
def call(self, inputs, training=True):
inputs = array_ops.ones([1, 10])
return self.fc(inputs)
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph", "eager"]))
class MirroredStrategyDefunTest(test.TestCase):
def _call_and_check(self, distribution, model_fn, inputs, expected_result,
defuns, two_variables=False):
cpu_dev = device_util.canonicalize("CPU:0")
gpu_dev = device_util.canonicalize("GPU:0")
devices = [cpu_dev, gpu_dev]
with distribution.scope():
mock_model = MockModel(two_variables)
self.evaluate(variables.global_variables_initializer())
result = distribution.extended.call_for_each_replica(
model_fn, args=[mock_model] + inputs)
for r in range(len(devices)):
device_result = values.select_replica(r, result)
device_expected_result = values.select_replica(r, expected_result)
self.assertAllClose(device_expected_result,
self.evaluate(device_result))
for defun in defuns:
# `Function`s are specialized to the current device stack, so
# call_for_each has one trace per device. To check that the expected set
# of variables was accessed on each trace, we first retrieve each
# device-specific graph function.
per_replica_graph_functions = (
distribution.extended.call_for_each_replica(
defun.get_concrete_function, args=[mock_model] + inputs))
for i in range(len(devices)):
graph_function = per_replica_graph_functions.values[i]
# TODO(b/129555712): re-enable an assertion here that the two sets of
# variables are the same.
# self.assertEqual(set(graph_function.graph.variables),
# set(mock_model.variables))
del graph_function
def testVariableInDefun(self, distribution):
@function.defun
def times_two(mock_model):
return mock_model()
def model_fn(mock_model):
return times_two(mock_model)
self._call_and_check(distribution, model_fn, [], 2.5, [times_two])
def testVariableInNestedDefun(self, distribution):
@function.defun
def times_two(mock_model):
return mock_model()
@function.defun
def two_x_plus_one(mock_model):
return times_two(mock_model) + 1
def model_fn(mock_model):
return two_x_plus_one(mock_model)
self._call_and_check(distribution, model_fn, [], 3.5,
[times_two, two_x_plus_one])
def testTwoVariablesInNestedDefun(self, distribution):
@function.defun
def fn1(mock_model):
return mock_model()
@function.defun
def fn2(mock_model):
return fn1(mock_model) + 1
def model_fn(mock_model):
return fn2(mock_model)
self._call_and_check(distribution, model_fn, [], 5.5, [fn1, fn2],
two_variables=True)
def testGradientTapeOverNestedDefuns(self, distribution):
@function.defun
def fn1(mock_model):
return mock_model()
@function.defun
def fn2(mock_model):
return fn1(mock_model) + 1
def model_fn(mock_model):
with backprop.GradientTape(persistent=True) as gtape:
result = fn2(mock_model)
grads = gtape.gradient(result,
[v._get() for v in mock_model.variables])
return grads
self._call_and_check(distribution, model_fn, [], [2.0, 1.0], [fn1, fn2],
two_variables=True)
def testPassPerReplica(self, distribution):
@function.defun
def fn1(mock_model, factor):
return mock_model(factor)
factors = values.PerReplica((5.0, 3.0))
expected_result = values.PerReplica((5.0 * 1.25, 3.0 * 1.25))
self._call_and_check(distribution, fn1, [factors], expected_result, [fn1])
def testTrain(self, distribution):
with distribution.scope():
mock_model = MiniModel()
mock_model.call = function.defun(mock_model.call)
def loss_fn(ctx):
del ctx
return mock_model(array_ops.ones([1, 10]))
gradients_fn = backprop.implicit_grad(loss_fn)
gradients_fn = optimizer_lib.get_filtered_grad_fn(gradients_fn)
grads_and_vars = distribution.extended.call_for_each_replica(
gradients_fn, args=(None,))
optimizer = gradient_descent.GradientDescentOptimizer(0.25)
update_ops = optimizer._distributed_apply(distribution, grads_and_vars) # pylint: disable=protected-access
if not context.executing_eagerly():
self.evaluate(variables.global_variables_initializer())
self.evaluate(update_ops)
updated_var_values = self.evaluate(mock_model.variables)
# All variables start at 1.0 and get two updates of 0.25.
self.assertAllEqual(0.5 * np.ones([10, 1]), updated_var_values[0])
self.assertAllEqual([0.5], updated_var_values[1])
@combinations.generate(
combinations.combine(
distribution=[
combinations.NamedDistribution(
"Mirrored",
# pylint: disable=g-long-lambda
lambda: mirrored_strategy.MirroredStrategy(
devices=mirrored_strategy.all_local_devices(),
cross_device_ops=cross_device_ops_lib.MultiWorkerAllReduce([
"/job:worker/task:0", "/job:worker/task:1"
], context.num_gpus())),
required_gpus=1)
],
mode=["graph"]))
class MultiWorkerMirroredStrategyTest(
multi_worker_test_base.MultiWorkerTestBase,
strategy_test_lib.DistributionTestBase):
def _configure_distribution_strategy(self, distribution):
cluster_spec = server_lib.ClusterSpec({
"worker": ["/job:worker/task:0", "/job:worker/task:1"]
})
distribution.configure(cluster_spec=cluster_spec)
def test_num_replicas_in_sync(self, distribution):
self._configure_distribution_strategy(distribution)
# We calculate the total number of gpus across the workers(2) specified in
# the cluster spec.
self.assertEqual(context.num_gpus() * 2, distribution.num_replicas_in_sync)
def testMinimizeLossGraph(self, distribution):
self._configure_distribution_strategy(distribution)
self._test_minimize_loss_graph(distribution, learning_rate=0.05)
def testDeviceScope(self, distribution):
"""Test the device scope of multi-worker MirroredStrategy."""
self._configure_distribution_strategy(distribution)
with distribution.scope():
a = constant_op.constant(1.)
with ops.device("/cpu:0"):
b = constant_op.constant(1.)
self.assertEqual(a.device, "/job:worker/task:0")
self.assertEqual(b.device, "/job:worker/task:0/device:CPU:0")
def testMakeInputFnIteratorWithDataset(self, distribution):
self._configure_distribution_strategy(distribution)
dataset_fn = lambda: dataset_ops.Dataset.range(100)
num_gpus = context.num_gpus()
num_workers = 2
expected_values = [[i+j for j in range(num_gpus)] * num_workers
for i in range(0, 100, num_gpus)]
with context.graph_mode(), self.cached_session() as sess:
# `expected_input_pipeline_id` is None because the input_fn will be called
# multiple times, each with a different input_pipeline_id.
input_fn = self._input_fn_to_test_input_context(
dataset_fn,
expected_num_replicas_in_sync=num_workers*num_gpus,
expected_num_input_pipelines=num_workers,
expected_input_pipeline_id=None)
iterator = distribution.make_input_fn_iterator(input_fn)
self._test_input_fn_iterator(
iterator, distribution.extended.worker_devices, expected_values, sess)
def testMakeInputFnIteratorWithCallable(self, distribution):
self._configure_distribution_strategy(distribution)
def fn():
dataset = dataset_ops.Dataset.range(100)
it = dataset_ops.make_one_shot_iterator(dataset)
return it.get_next
num_gpus = context.num_gpus()
num_workers = 2
expected_values = []
for i in range(0, 100, num_gpus):
expected_values.append([i+j for j in range(num_gpus)] * num_workers)
with context.graph_mode(), self.cached_session() as sess:
# `expected_input_pipeline_id` is None because the input_fn will be called
# multiple times, each with a different input_pipeline_id.
input_fn = self._input_fn_to_test_input_context(
fn,
expected_num_replicas_in_sync=num_workers*num_gpus,
expected_num_input_pipelines=num_workers,
expected_input_pipeline_id=None)
iterator = distribution.make_input_fn_iterator(input_fn)
self._test_input_fn_iterator(
iterator, distribution.extended.worker_devices, expected_values, sess,
test_reinitialize=False, ignore_order=True)
def testUpdateConfigProto(self, distribution):
distribution.configure(cluster_spec={"worker": ["fake1", "fake2"]})
config_proto = config_pb2.ConfigProto()
new_config = distribution.update_config_proto(config_proto)
# Verify isolate_session_state
self.assertTrue(new_config.isolate_session_state)
@combinations.generate(
combinations.combine(
distribution=[
combinations.NamedDistribution(
"Mirrored",
# pylint: disable=g-long-lambda
lambda: mirrored_strategy.MirroredStrategy(
devices=["/job:worker/task:0/gpu:{}".format(
i) for i in range(context.num_gpus())]),
required_gpus=1)
],
mode=["graph"]))
class RemoteSingleWorkerMirroredStrategyGraph(
multi_worker_test_base.SingleWorkerTestBaseGraph,
strategy_test_lib.RemoteSingleWorkerMirroredStrategyBase):
def _get_num_gpus(self):
return context.num_gpus()
def testNumReplicasInSync(self, distribution):
self._testNumReplicasInSync(distribution)
def testMinimizeLoss(self, distribution):
self._testMinimizeLoss(distribution)
def testDeviceScope(self, distribution):
self._testDeviceScope(distribution)
def testMakeInputFnIteratorWithDataset(self, distribution):
self._testMakeInputFnIteratorWithDataset(distribution)
def testMakeInputFnIteratorWithCallable(self, distribution):
self._testMakeInputFnIteratorWithCallable(distribution)
class MultiWorkerMirroredStrategyTestWithChief(
multi_worker_test_base.MultiWorkerTestBase,
strategy_test_lib.DistributionTestBase):
@classmethod
def setUpClass(cls):
"""Create a local cluster with 2 workers and 1 chief."""
cls._cluster_spec = multi_worker_test_base.create_in_process_cluster(
num_workers=2, num_ps=0, has_chief=True)
cls._default_target = "grpc://" + cls._cluster_spec["chief"][0]
def _make_cross_device_ops(self):
return cross_device_ops_lib.MultiWorkerAllReduce(
["/job:chief/task:0", "/job:worker/task:0", "/job:worker/task:1"],
context.num_gpus())
def testMinimizeLossGraph(self):
with context.graph_mode():
strategy = mirrored_strategy.MirroredStrategy(
cross_device_ops=self._make_cross_device_ops())
strategy.configure(cluster_spec=self._cluster_spec)
self._test_minimize_loss_graph(strategy, learning_rate=0.05)
def testMinimizeLossGraphMirroredStrategy(self):
with context.graph_mode():
strategy = mirrored_strategy.MirroredStrategy(
mirrored_strategy.all_local_devices(),
cross_device_ops=self._make_cross_device_ops())
strategy.configure(cluster_spec=self._cluster_spec)
self._test_minimize_loss_graph(strategy, learning_rate=0.05)
def testMinimizeLossGraphMirroredStrategyWithOneNode(self):
with context.graph_mode():
cluster_spec = {}
cluster_spec["chief"] = self._cluster_spec["chief"]
tf_config = {"cluster": cluster_spec}
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(tf_config)}):
strategy = mirrored_strategy.MirroredStrategy()
if context.num_gpus() > 0:
self.assertIsInstance(strategy.extended._inferred_cross_device_ops,
cross_device_ops_lib.NcclAllReduce)
else:
self.assertIsInstance(strategy.extended._inferred_cross_device_ops,
cross_device_ops_lib.ReductionToOneDevice)
self.skipTest("b/130551176, run the following once fixed.")
self._test_minimize_loss_graph(strategy, learning_rate=0.05)
def testInitializeFromTFConfig(self):
with context.graph_mode():
tf_config = {"cluster": self._cluster_spec}
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(tf_config)}):
strategy = mirrored_strategy.MirroredStrategy(
cross_device_ops=self._make_cross_device_ops())
self.assertEqual(
max(context.num_gpus(), 1) * 3, strategy.num_replicas_in_sync)
def testSummaryForReplicaZeroOnly(self):
with context.graph_mode():
strategy = mirrored_strategy.MirroredStrategy(
mirrored_strategy.all_local_devices(),
cross_device_ops=self._make_cross_device_ops())
strategy.configure(cluster_spec=self._cluster_spec)
self._test_summary_for_replica_zero_only(strategy)
class MirroredVariableStopGradientTest(test.TestCase, parameterized.TestCase):
@combinations.generate(
combinations.combine(
distribution=[
strategy_combinations.mirrored_strategy_with_one_cpu,
strategy_combinations.mirrored_strategy_with_one_gpu,
],
mode=["graph"]))
def testMirroredVariableAsStopGradient(self, distribution):
with distribution.scope():
inp = constant_op.constant(1.0)
x = variables.Variable(1.0)
y = inp*x
grads = gradients.gradients(x, y, stop_gradients=x)
self.assertIsNone(grads[0])
class FunctionTest(test.TestCase):
def testBackwardFuctionDevicePlacement(self):
if context.num_gpus() < 1:
self.skipTest("At least one GPU is required.")
devices = [device_util.resolve("/device:GPU:0"),
device_util.resolve("/device:CPU:0")]
ms = mirrored_strategy.MirroredStrategy(devices)
with ms.scope():
w = variable_scope.variable([1.5], name="w")
b = variable_scope.variable([0.5], name="b")
@def_function.function
def forward(x, w, b):
return x * w + b
x = constant_op.constant([1.0], name="x_useless")
concrete_forward = forward.get_concrete_function(x, w.primary, b.primary)
with ms.scope():
def replica_fn():
with backprop.GradientTape() as t:
x = constant_op.constant([1.0], name="x")
loss = concrete_forward(x, w._get(), b._get()) - [1.0]
return t.gradient(loss, [w, b])
def step_fn():
return ms.experimental_run_v2(replica_fn)
context.enable_run_metadata()
g1, g2 = step_fn()
run_metadata = context.export_run_metadata()
context.disable_run_metadata()
self.assertEqual(self.evaluate(g1.primary), 1.0)
self.assertEqual(self.evaluate(g2.primary), 1.0)
# Verify that this node runs on both devices.
node_name = "gradients_mul_grad_mul_1_x"
devices_for_this_node = set()
for partition_graph in run_metadata.partition_graphs:
for node in partition_graph.node:
if node.name == node_name:
devices_for_this_node.add(node.device)
self.assertSetEqual(devices_for_this_node, set(devices))
def testFuctionPreservesAutoGraph(self):
config.set_logical_device_configuration(
config.list_physical_devices("CPU")[0],
[context.LogicalDeviceConfiguration()] * 2)
ms = mirrored_strategy.MirroredStrategy()
def f():
self.assertTrue(converter_testing.is_inside_generated_code())
return 1
with ms.scope():
@def_function.function
def replica_fn():
return f()
ms.experimental_run_v2(replica_fn)
def _replica_id():
replica_id = ds_context.get_replica_context().replica_id_in_sync_group
if not isinstance(replica_id, ops.Tensor):
replica_id = constant_op.constant(replica_id)
return replica_id
def _replica_id_as_int():
replica_id = ds_context.get_replica_context().replica_id_in_sync_group
if isinstance(replica_id, ops.Tensor):
replica_id = tensor_util.constant_value(replica_id)
return replica_id
if __name__ == "__main__":
test.main()
| 39.237312 | 113 | 0.715956 |
41f0a016fcc4f446dc414074472959db2aff82eb | 46,721 | py | Python | shotgun/external_libs/libevent/event_rpcgen.py | tqrg-bot/rubinius | beb0fe3968ea7ff3c09e192605eef066136105c8 | [
"BSD-3-Clause"
] | null | null | null | shotgun/external_libs/libevent/event_rpcgen.py | tqrg-bot/rubinius | beb0fe3968ea7ff3c09e192605eef066136105c8 | [
"BSD-3-Clause"
] | null | null | null | shotgun/external_libs/libevent/event_rpcgen.py | tqrg-bot/rubinius | beb0fe3968ea7ff3c09e192605eef066136105c8 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
#
# Copyright (c) 2005 Niels Provos <provos@citi.umich.edu>
# All rights reserved.
#
# Generates marshaling code based on libevent.
import sys
import re
#
_NAME = "event_rpcgen.py"
_VERSION = "0.1"
_STRUCT_RE = '[a-z][a-z_0-9]*'
# Globals
line_count = 0
white = re.compile(r'^\s+')
cppcomment = re.compile(r'\/\/.*$')
headerdirect = []
cppdirect = []
# Holds everything that makes a struct
class Struct:
def __init__(self, name):
self._name = name
self._entries = []
self._tags = {}
print >>sys.stderr, ' Created struct: %s' % name
def AddEntry(self, entry):
if self._tags.has_key(entry.Tag()):
print >>sys.stderr, ( 'Entry "%s" duplicates tag number '
'%d from "%s" around line %d' ) % (
entry.Name(), entry.Tag(),
self._tags[entry.Tag()], line_count)
sys.exit(1)
self._entries.append(entry)
self._tags[entry.Tag()] = entry.Name()
print >>sys.stderr, ' Added entry: %s' % entry.Name()
def Name(self):
return self._name
def EntryTagName(self, entry):
"""Creates the name inside an enumeration for distinguishing data
types."""
name = "%s_%s" % (self._name, entry.Name())
return name.upper()
def PrintIdented(self, file, ident, code):
"""Takes an array, add indentation to each entry and prints it."""
for entry in code:
print >>file, '%s%s' % (ident, entry)
def PrintTags(self, file):
"""Prints the tag definitions for a structure."""
print >>file, '/* Tag definition for %s */' % self._name
print >>file, 'enum %s_ {' % self._name.lower()
for entry in self._entries:
print >>file, ' %s=%d,' % (self.EntryTagName(entry),
entry.Tag())
print >>file, ' %s_MAX_TAGS' % (self._name.upper())
print >>file, '};\n'
def PrintForwardDeclaration(self, file):
print >>file, 'struct %s;' % self._name
def PrintDeclaration(self, file):
print >>file, '/* Structure declaration for %s */' % self._name
print >>file, 'struct %s {' % self._name
for entry in self._entries:
dcl = entry.Declaration()
dcl.extend(
entry.AssignDeclaration('(*%s_assign)' % entry.Name()))
dcl.extend(
entry.GetDeclaration('(*%s_get)' % entry.Name()))
if entry.Array():
dcl.extend(
entry.AddDeclaration('(*%s_add)' % entry.Name()))
self.PrintIdented(file, ' ', dcl)
print >>file, ''
for entry in self._entries:
print >>file, ' uint8_t %s_set;' % entry.Name()
print >>file, '};\n'
print >>file, (
'struct %s *%s_new();\n' % (self._name, self._name) +
'void %s_free(struct %s *);\n' % (self._name, self._name) +
'void %s_clear(struct %s *);\n' % (self._name, self._name) +
'void %s_marshal(struct evbuffer *, const struct %s *);\n' % (
self._name, self._name) +
'int %s_unmarshal(struct %s *, struct evbuffer *);\n' % (
self._name, self._name) +
'int %s_complete(struct %s *);' % (self._name, self._name)
)
print >>file, ('void evtag_marshal_%s(struct evbuffer *, uint8_t, '
'const struct %s *);') % ( self._name, self._name)
print >>file, ('int evtag_unmarshal_%s(struct evbuffer *, uint8_t, '
'struct %s *);') % ( self._name, self._name)
# Write a setting function of every variable
for entry in self._entries:
self.PrintIdented(file, '', entry.AssignDeclaration(
entry.AssignFuncName()))
self.PrintIdented(file, '', entry.GetDeclaration(
entry.GetFuncName()))
if entry.Array():
self.PrintIdented(file, '', entry.AddDeclaration(
entry.AddFuncName()))
print >>file, '/* --- %s done --- */\n' % self._name
def PrintCode(self, file):
print >>file, ('/*\n'
' * Implementation of %s\n'
' */\n') % self._name
# Creation
print >>file, ( 'struct %s *\n' % self._name +
'%s_new()\n' % self._name +
'{\n'
' struct %s *tmp;\n' % self._name +
' if ((tmp = malloc(sizeof(struct %s))) == NULL) {\n'
' event_warn("%%s: malloc", __func__);\n'
' return (NULL);\n' % self._name +
' }'
)
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeNew('tmp'))
print >>file, ' tmp->%s_set = 0;\n' % entry.Name()
print >>file, (' return (tmp);\n'
'}\n')
# Adding
for entry in self._entries:
if entry.Array():
self.PrintIdented(file, '', entry.CodeAdd())
print >>file, ''
# Assigning
for entry in self._entries:
self.PrintIdented(file, '', entry.CodeAssign())
print >>file, ''
# Getting
for entry in self._entries:
self.PrintIdented(file, '', entry.CodeGet())
print >>file, ''
# Clearing
print >>file, ( 'void\n'
'%s_clear(struct %s *tmp)\n' % (
self._name, self._name)+
'{'
)
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeClear('tmp'))
print >>file, '}\n'
# Freeing
print >>file, ( 'void\n'
'%s_free(struct %s *tmp)\n' % (
self._name, self._name)+
'{'
)
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeFree('tmp'))
print >>file, (' free(tmp);\n'
'}\n')
# Marshaling
print >>file, ('void\n'
'%s_marshal(struct evbuffer *evbuf, '
'const struct %s *tmp)' % (self._name, self._name) +
'{')
for entry in self._entries:
indent = ' '
# Optional entries do not have to be set
if entry.Optional():
indent += ' '
print >>file, ' if (tmp->%s_set) {' % entry.Name()
self.PrintIdented(
file, indent,
entry.CodeMarshal('evbuf', self.EntryTagName(entry), 'tmp'))
if entry.Optional():
print >>file, ' }'
print >>file, '}\n'
# Unmarshaling
print >>file, ('int\n'
'%s_unmarshal(struct %s *tmp, '
' struct evbuffer *evbuf)\n' % (
self._name, self._name) +
'{\n'
' uint8_t tag;\n'
' while (EVBUFFER_LENGTH(evbuf) > 0) {\n'
' if (evtag_peek(evbuf, &tag) == -1)\n'
' return (-1);\n'
' switch (tag) {\n'
)
for entry in self._entries:
print >>file, ' case %s:\n' % self.EntryTagName(entry)
if not entry.Array():
print >>file, (
' if (tmp->%s_set)\n'
' return (-1);'
) % (entry.Name())
self.PrintIdented(
file, ' ',
entry.CodeUnmarshal('evbuf',
self.EntryTagName(entry), 'tmp'))
print >>file, ( ' tmp->%s_set = 1;\n' % entry.Name() +
' break;\n' )
print >>file, ( ' default:\n'
' return -1;\n'
' }\n'
' }\n' )
# Check if it was decoded completely
print >>file, ( ' if (%s_complete(tmp) == -1)\n' % self._name +
' return (-1);')
# Successfully decoded
print >>file, ( ' return (0);\n'
'}\n')
# Checking if a structure has all the required data
print >>file, (
'int\n'
'%s_complete(struct %s *msg)\n' % (self._name, self._name) +
'{' )
for entry in self._entries:
self.PrintIdented(
file, ' ',
entry.CodeComplete('msg'))
print >>file, (
' return (0);\n'
'}\n' )
# Complete message unmarshaling
print >>file, (
'int\n'
'evtag_unmarshal_%s(struct evbuffer *evbuf, uint8_t need_tag, '
' struct %s *msg)'
) % (self._name, self._name)
print >>file, (
'{\n'
' uint8_t tag;\n'
' int res = -1;\n'
'\n'
' struct evbuffer *tmp = evbuffer_new();\n'
'\n'
' if (evtag_unmarshal(evbuf, &tag, tmp) == -1'
' || tag != need_tag)\n'
' goto error;\n'
'\n'
' if (%s_unmarshal(msg, tmp) == -1)\n'
' goto error;\n'
'\n'
' res = 0;\n'
'\n'
' error:\n'
' evbuffer_free(tmp);\n'
' return (res);\n'
'}\n' ) % self._name
# Complete message marshaling
print >>file, (
'void\n'
'evtag_marshal_%s(struct evbuffer *evbuf, uint8_t tag, '
'const struct %s *msg)\n' % (self._name, self._name) +
'{\n'
' struct evbuffer *_buf = evbuffer_new();\n'
' assert(_buf != NULL);\n'
' evbuffer_drain(_buf, -1);\n'
' %s_marshal(_buf, msg);\n' % self._name +
' evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), '
'EVBUFFER_LENGTH(_buf));\n'
' evbuffer_free(_buf);\n'
'}\n' )
class Entry:
def __init__(self, type, name, tag):
self._type = type
self._name = name
self._tag = int(tag)
self._ctype = type
self._optional = 0
self._can_be_array = 0
self._array = 0
self._line_count = -1
self._struct = None
def SetStruct(self, struct):
self._struct = struct
def LineCount(self):
assert self._line_count != -1
return self._line_count
def SetLineCount(self, number):
self._line_count = number
def Array(self):
return self._array
def Optional(self):
return self._optional
def Tag(self):
return self._tag
def Name(self):
return self._name
def Type(self):
return self._type
def MakeArray(self, yes=1):
self._array = yes
def MakeOptional(self):
self._optional = 1
def GetFuncName(self):
return '%s_%s_get' % (self._struct.Name(), self._name)
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeGet(self):
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value)' % (
self._struct.Name(), self._name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % self._name,
' return (-1);',
' *value = msg->%s_data;' % self._name,
' return (0);',
'}' ]
return code
def AssignFuncName(self):
return '%s_%s_assign' % (self._struct.Name(), self._name)
def AddFuncName(self):
return '%s_%s_add' % (self._struct.Name(), self._name)
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
code = [ 'int',
'%s_%s_assign(struct %s *msg, const %s value)' % (
self._struct.Name(), self._name,
self._struct.Name(), self._ctype),
'{',
' msg->%s_set = 1;' % self._name,
' msg->%s_data = value;' % self._name,
' return (0);',
'}' ]
return code
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()) ]
return code
def CodeComplete(self, structname):
if self.Optional():
return []
code = [ 'if (!%s->%s_set)' % (structname, self.Name()),
' return (-1);' ]
return code
def CodeFree(self, name):
return []
def CodeNew(self, name):
code = [ '%s->%s_assign = %s_%s_assign;' % (
name, self._name, self._struct.Name(), self._name ),
'%s->%s_get = %s_%s_get;' % (
name, self._name, self._struct.Name(), self._name ),
]
if self.Array():
code.append(
'%s->%s_add = %s_%s_add;' % (
name, self._name, self._struct.Name(), self._name ) )
return code
def Verify(self):
if self.Array() and not self._can_be_array:
print >>sys.stderr, (
'Entry "%s" cannot be created as an array '
'around line %d' ) % (self._name, self.LineCount())
sys.exit(1)
if not self._struct:
print >>sys.stderr, (
'Entry "%s" does not know which struct it belongs to '
'around line %d' ) % (self._name, self.LineCount())
sys.exit(1)
if self._optional and self._array:
print >>sys.stderr, ( 'Entry "%s" has illegal combination of '
'optional and array around line %d' ) % (
self._name, self.LineCount() )
sys.exit(1)
class EntryBytes(Entry):
def __init__(self, type, name, tag, length):
# Init base class
Entry.__init__(self, type, name, tag)
self._length = length
self._ctype = 'uint8_t'
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s **);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def Declaration(self):
dcl = ['uint8_t %s_data[%s];' % (self._name, self._length)]
return dcl
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s **value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, const %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' msg->%s_set = 1;' % name,
' memcpy(msg->%s_data, value, %s);' % (
name, self._length),
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = [ 'if (evtag_unmarshal_fixed(%s, %s, ' % (buf, tag_name) +
'%s->%s_data, ' % (var_name, self._name) +
'sizeof(%s->%s_data)) == -1) {' % (
var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal(%s, %s, %s->%s_data, sizeof(%s->%s_data));' % (
buf, tag_name, var_name, self._name, var_name, self._name )]
return code
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()),
'memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
structname, self._name, structname, self._name)]
return code
def CodeNew(self, name):
code = ['memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
name, self._name, name, self._name)]
code.extend(Entry.CodeNew(self, name))
return code
def Verify(self):
if not self._length:
print >>sys.stderr, 'Entry "%s" needs a length around line %d' % (
self._name, self.LineCount() )
sys.exit(1)
Entry.Verify(self)
class EntryInt(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'uint32_t'
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_unmarshal_int(%s, %s, &%s->%s_data) == -1) {' % (
buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}' ]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_int(%s, %s, %s->%s_data);' % (
buf, tag_name, var_name, self._name)]
return code
def Declaration(self):
dcl = ['uint32_t %s_data;' % self._name]
return dcl
class EntryString(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'char *'
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, const %s value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_data != NULL)' % name,
' free(msg->%s_data);' % name,
' if ((msg->%s_data = strdup(value)) == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_unmarshal_string(%s, %s, &%s->%s_data) == -1) {' % (
buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_string(%s, %s, %s->%s_data);' % (
buf, tag_name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free (%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
code.extend(Entry.CodeNew(self, name))
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free (%s->%s_data); ' % (name, self._name)]
return code
def Declaration(self):
dcl = ['char *%s_data;' % self._name]
return dcl
class EntryStruct(Entry):
def __init__(self, type, name, tag, refname):
# Init base class
Entry.__init__(self, type, name, tag)
self._can_be_array = 1
self._refname = refname
self._ctype = 'struct %s' % refname
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s **);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s **value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1) {' % name,
' msg->%s_data = %s_new();' % (name, self._refname),
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' }',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, const %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' struct evbuffer *tmp = NULL;',
' if (msg->%s_set) {' % name,
' %s_clear(msg->%s_data);' % (self._refname, name),
' msg->%s_set = 0;' % name,
' } else {',
' msg->%s_data = %s_new();' % (name, self._refname),
' if (msg->%s_data == NULL) {' % name,
' event_warn("%%s: %s_new()", __func__);' % (
self._refname),
' goto error;',
' }',
' }',
' if ((tmp = evbuffer_new()) == NULL) {',
' event_warn("%s: evbuffer_new()", __func__);',
' goto error;',
' }',
' %s_marshal(tmp, value); ' % self._refname,
' if (%s_unmarshal(msg->%s_data, tmp) == -1) {' % (
self._refname, name ),
' event_warnx("%%s: %s_unmarshal", __func__);' % (
self._refname),
' goto error;',
' }',
' msg->%s_set = 1;' % name,
' evbuffer_free(tmp);',
' return (0);',
' error:',
' if (tmp != NULL)',
' evbuffer_free(tmp);',
' if (msg->%s_data != NULL) {' % name,
' %s_free(msg->%s_data);' % (self._refname, name),
' msg->%s_data = NULL;' % name,
' }',
' return (-1);',
'}' ]
return code
def CodeComplete(self, structname):
if self.Optional():
code = [ 'if (%s->%s_set && %s_complete(%s->%s_data) == -1)' % (
structname, self.Name(),
self._refname, structname, self.Name()),
' return (-1);' ]
else:
code = [ 'if (%s_complete(%s->%s_data) == -1)' % (
self._refname, structname, self.Name()),
' return (-1);' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['%s->%s_data = %s_new();' % (
var_name, self._name, self._refname),
'if (%s->%s_data == NULL)' % (var_name, self._name),
' return (-1);',
'if (evtag_unmarshal_%s(%s, %s, %s->%s_data) == -1) {' % (
self._refname, buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_%s(%s, %s, %s->%s_data);' % (
self._refname, buf, tag_name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' %s_free(%s->%s_data);' % (
self._refname, structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
code.extend(Entry.CodeNew(self, name))
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' %s_free(%s->%s_data); ' % (
self._refname, name, self._name)]
return code
def Declaration(self):
dcl = ['struct %s *%s_data;' % (self._refname, self._name)]
return dcl
class EntryVarBytes(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'uint8_t *'
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *, uint32_t *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s, uint32_t);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, '
'const %s value, uint32_t len)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_data != NULL)' % name,
' free (msg->%s_data);' % name,
' msg->%s_data = malloc(len);' % name,
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' msg->%s_length = len;' % name,
' memcpy(msg->%s_data, value, len);' % name,
' return (0);',
'}' ]
return code
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value, uint32_t *plen)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' *plen = msg->%s_length;' % name,
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_payload_length(%s, &%s->%s_length) == -1)' % (
buf, var_name, self._name),
' return (-1);',
# We do not want DoS opportunities
'if (%s->%s_length > EVBUFFER_LENGTH(%s))' % (
var_name, self._name, buf),
' return (-1);',
'if ((%s->%s_data = malloc(%s->%s_length)) == NULL)' % (
var_name, self._name, var_name, self._name),
' return (-1);',
'if (evtag_unmarshal_fixed(%s, %s, %s->%s_data, '
'%s->%s_length) == -1) {' % (
buf, tag_name, var_name, self._name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal(%s, %s, %s->%s_data, %s->%s_length);' % (
buf, tag_name, var_name, self._name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free (%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_length = 0;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name) ]
code.extend(Entry.CodeNew(self, name))
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free (%s->%s_data); ' % (name, self._name)]
return code
def Declaration(self):
dcl = ['uint8_t *%s_data;' % self._name,
'uint32_t %s_length;' % self._name]
return dcl
class EntryArray(Entry):
def __init__(self, entry):
# Init base class
Entry.__init__(self, entry._type, entry._name, entry._tag)
self._entry = entry
self._refname = entry._refname
self._ctype = 'struct %s' % self._refname
def GetDeclaration(self, funcname):
"""Allows direct access to elements of the array."""
code = [ 'int %s(struct %s *, int, %s **);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, int, const %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AddDeclaration(self, funcname):
code = [ '%s *%s(struct %s *);' % (
self._ctype, funcname, self._struct.Name() ) ]
return code
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, int offset, %s **value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' if (offset >= msg->%s_length)' % name,
' return (-1);',
' *value = msg->%s_data[offset];' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, int off, const %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' struct evbuffer *tmp = NULL;',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' if (off >= msg->%s_length)' % name,
' return (-1);',
'',
' %s_clear(msg->%s_data[off]);' % (self._refname, name),
' if ((tmp = evbuffer_new()) == NULL) {',
' event_warn("%s: evbuffer_new()", __func__);',
' goto error;',
' }',
' %s_marshal(tmp, value); ' % self._refname,
' if (%s_unmarshal(msg->%s_data[off], tmp) == -1) {' % (
self._refname, name ),
' event_warnx("%%s: %s_unmarshal", __func__);' % (
self._refname),
' goto error;',
' }',
' evbuffer_free(tmp);',
' return (0);',
' error:',
' if (tmp != NULL)',
' evbuffer_free(tmp);',
' %s_clear(msg->%s_data[off]);' % (self._refname, name),
' return (-1);',
'}' ]
return code
def CodeAdd(self):
name = self._name
code = [
'%s *' % self._ctype,
'%s_%s_add(struct %s *msg)' % (
self._struct.Name(), name, self._struct.Name()),
'{',
' msg->%s_length++;' % name,
' msg->%s_data = (struct %s**)realloc(msg->%s_data, '
' msg->%s_length * sizeof(struct %s*));' % (
name, self._refname, name, name, self._refname ),
' if (msg->%s_data == NULL)' % name,
' return (NULL);',
' msg->%s_data[msg->%s_length - 1] = %s_new();' % (
name, name, self._refname),
' if (msg->%s_data[msg->%s_length - 1] == NULL) {' % (name, name),
' msg->%s_length--; ' % name,
' return (NULL);',
' }',
' msg->%s_set = 1;' % name,
' return (msg->%s_data[msg->%s_length - 1]);' % (name, name),
'}'
]
return code
def CodeComplete(self, structname):
code = []
if self.Optional():
code.append( 'if (%s->%s_set)' % (structname, self.Name()))
code.extend(['{',
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
structname, self.Name()),
' if (%s_complete(%s->%s_data[i]) == -1)' % (
self._refname, structname, self.Name()),
' return (-1);',
' }',
'}'
])
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (%s_%s_add(%s) == NULL)' % (
self._struct.Name(), self._name, var_name),
' return (-1);',
'if (evtag_unmarshal_%s(%s, %s, '
'%s->%s_data[%s->%s_length - 1]) == -1) {' % (
self._refname, buf, tag_name, var_name, self._name,
var_name, self._name),
' %s->%s_length--; ' % (var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['{',
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
var_name, self._name),
' evtag_marshal_%s(%s, %s, %s->%s_data[i]);' % (
self._refname, buf, tag_name, var_name, self._name),
' }',
'}'
]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
structname, self.Name()),
' %s_free(%s->%s_data[i]);' % (
self._refname, structname, self.Name()),
' }',
' free(%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
' %s->%s_length = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name)]
code.extend(Entry.CodeNew(self, name))
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL) {' % (name, self._name),
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
name, self._name),
' %s_free(%s->%s_data[i]); ' % (
self._refname, name, self._name),
' %s->%s_data[i] = NULL;' % (name, self._name),
' }',
' free(%s->%s_data);' % (name, self._name),
' %s->%s_data = NULL;' % (name, self._name),
' %s->%s_length = 0;' % (name, self._name),
'}'
]
return code
def Declaration(self):
dcl = ['struct %s **%s_data;' % (self._refname, self._name),
'int %s_length;' % self._name]
return dcl
def NormalizeLine(line):
global white
global cppcomment
line = cppcomment.sub('', line)
line = line.strip()
line = white.sub(' ', line)
return line
def ProcessOneEntry(newstruct, entry):
optional = 0
array = 0
entry_type = ''
name = ''
tag = ''
tag_set = None
separator = ''
fixed_length = ''
tokens = entry.split(' ')
while tokens:
token = tokens[0]
tokens = tokens[1:]
if not entry_type:
if not optional and token == 'optional':
optional = 1
continue
if not array and token == 'array':
array = 1
continue
if not entry_type:
entry_type = token
continue
if not name:
res = re.match(r'^([^\[\]]+)(\[.*\])?$', token)
if not res:
print >>sys.stderr, 'Cannot parse name: \"%s\" around %d' % (
entry, line_count)
sys.exit(1)
name = res.group(1)
fixed_length = res.group(2)
if fixed_length:
fixed_length = fixed_length[1:-1]
continue
if not separator:
separator = token
if separator != '=':
print >>sys.stderr, 'Expected "=" after name \"%s\" got %s' % (
name, token)
sys.exit(1)
continue
if not tag_set:
tag_set = 1
if not re.match(r'^[0-9]+$', token):
print >>sys.stderr, 'Expected tag number: \"%s\"' % entry
sys.exit(1)
tag = int(token)
continue
print >>sys.stderr, 'Cannot parse \"%s\"' % entry
sys.exit(1)
if not tag_set:
print >>sys.stderr, 'Need tag number: \"%s\"' % entry
sys.exit(1)
# Create the right entry
if entry_type == 'bytes':
if fixed_length:
newentry = EntryBytes(entry_type, name, tag, fixed_length)
else:
newentry = EntryVarBytes(entry_type, name, tag)
elif entry_type == 'int' and not fixed_length:
newentry = EntryInt(entry_type, name, tag)
elif entry_type == 'string' and not fixed_length:
newentry = EntryString(entry_type, name, tag)
else:
res = re.match(r'^struct\[(%s)\]$' % _STRUCT_RE,
entry_type, re.IGNORECASE)
if res:
# References another struct defined in our file
newentry = EntryStruct(entry_type, name, tag, res.group(1))
else:
print >>sys.stderr, 'Bad type: "%s" in "%s"' % (entry_type, entry)
sys.exit(1)
structs = []
if optional:
newentry.MakeOptional()
if array:
newentry.MakeArray()
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.Verify()
if array:
# We need to encapsulate this entry into a struct
newname = newentry.Name()+ '_array'
# Now borgify the new entry.
newentry = EntryArray(newentry)
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.MakeArray()
newstruct.AddEntry(newentry)
return structs
def ProcessStruct(data):
tokens = data.split(' ')
# First three tokens are: 'struct' 'name' '{'
newstruct = Struct(tokens[1])
inside = ' '.join(tokens[3:-1])
tokens = inside.split(';')
structs = []
for entry in tokens:
entry = NormalizeLine(entry)
if not entry:
continue
# It's possible that new structs get defined in here
structs.extend(ProcessOneEntry(newstruct, entry))
structs.append(newstruct)
return structs
def GetNextStruct(file):
global line_count
global cppdirect
got_struct = 0
processed_lines = []
have_c_comment = 0
data = ''
while 1:
line = file.readline()
if not line:
break
line_count += 1
line = line[:-1]
if not have_c_comment and re.search(r'/\*', line):
if re.search(r'/\*.*\*/', line):
line = re.sub(r'/\*.*\*/', '', line)
else:
line = re.sub(r'/\*.*$', '', line)
have_c_comment = 1
if have_c_comment:
if not re.search(r'\*/', line):
continue
have_c_comment = 0
line = re.sub(r'^.*\*/', '', line)
line = NormalizeLine(line)
if not line:
continue
if not got_struct:
if re.match(r'#include ["<].*[>"]', line):
cppdirect.append(line)
continue
if re.match(r'^#(if( |def)|endif)', line):
cppdirect.append(line)
continue
if re.match(r'^#define', line):
headerdirect.append(line)
continue
if not re.match(r'^struct %s {$' % _STRUCT_RE,
line, re.IGNORECASE):
print >>sys.stderr, 'Missing struct on line %d: %s' % (
line_count, line)
sys.exit(1)
else:
got_struct = 1
data += line
continue
# We are inside the struct
tokens = line.split('}')
if len(tokens) == 1:
data += ' ' + line
continue
if len(tokens[1]):
print >>sys.stderr, 'Trailing garbage after struct on line %d' % (
line_count )
sys.exit(1)
# We found the end of the struct
data += ' %s}' % tokens[0]
break
# Remove any comments, that might be in there
data = re.sub(r'/\*.*\*/', '', data)
return data
def Parse(file):
"""
Parses the input file and returns C code and corresponding header file.
"""
entities = []
while 1:
# Just gets the whole struct nicely formatted
data = GetNextStruct(file)
if not data:
break
entities.extend(ProcessStruct(data))
return entities
def GuardName(name):
name = '_'.join(name.split('.'))
name = '_'.join(name.split('/'))
guard = '_'+name.upper()+'_'
return guard
def HeaderPreamble(name):
guard = GuardName(name)
pre = (
'/*\n'
' * Automatically generated from %s\n'
' */\n\n'
'#ifndef %s\n'
'#define %s\n\n' ) % (
name, guard, guard)
# insert stdint.h - let's hope everyone has it
pre += '#include <stdint.h>\n'
for statement in headerdirect:
pre += '%s\n' % statement
if headerdirect:
pre += '\n'
pre += (
'#define EVTAG_HAS(msg, member) ((msg)->member##_set == 1)\n'
'#define EVTAG_ASSIGN(msg, member, args...) '
'(*(msg)->member##_assign)(msg, ## args)\n'
'#define EVTAG_GET(msg, member, args...) '
'(*(msg)->member##_get)(msg, ## args)\n'
'#define EVTAG_ADD(msg, member) (*(msg)->member##_add)(msg)\n'
'#define EVTAG_LEN(msg, member) ((msg)->member##_length)\n'
)
return pre
def HeaderPostamble(name):
guard = GuardName(name)
return '#endif /* %s */' % guard
def BodyPreamble(name):
global _NAME
global _VERSION
header_file = '.'.join(name.split('.')[:-1]) + '.gen.h'
pre = ( '/*\n'
' * Automatically generated from %s\n'
' * by %s/%s. DO NOT EDIT THIS FILE.\n'
' */\n\n' ) % (name, _NAME, _VERSION)
pre += ( '#include <sys/types.h>\n'
'#include <sys/time.h>\n'
'#include <stdlib.h>\n'
'#include <string.h>\n'
'#include <assert.h>\n'
'#include <event.h>\n\n' )
for statement in cppdirect:
pre += '%s\n' % statement
pre += '\n#include "%s"\n\n' % header_file
pre += 'void event_err(int eval, const char *fmt, ...);\n'
pre += 'void event_warn(const char *fmt, ...);\n'
pre += 'void event_errx(int eval, const char *fmt, ...);\n'
pre += 'void event_warnx(const char *fmt, ...);\n\n'
return pre
def main(argv):
if len(argv) < 2 or not argv[1]:
print >>sys.stderr, 'Need RPC description file as first argument.'
sys.exit(1)
filename = argv[1]
ext = filename.split('.')[-1]
if ext != 'rpc':
print >>sys.stderr, 'Unrecognized file extension: %s' % ext
sys.exit(1)
print >>sys.stderr, 'Reading \"%s\"' % filename
fp = open(filename, 'r')
entities = Parse(fp)
fp.close()
header_file = '.'.join(filename.split('.')[:-1]) + '.gen.h'
impl_file = '.'.join(filename.split('.')[:-1]) + '.gen.c'
print >>sys.stderr, '... creating "%s"' % header_file
header_fp = open(header_file, 'w')
print >>header_fp, HeaderPreamble(filename)
# Create forward declarations: allows other structs to reference
# each other
for entry in entities:
entry.PrintForwardDeclaration(header_fp)
print >>header_fp, ''
for entry in entities:
entry.PrintTags(header_fp)
entry.PrintDeclaration(header_fp)
print >>header_fp, HeaderPostamble(filename)
header_fp.close()
print >>sys.stderr, '... creating "%s"' % impl_file
impl_fp = open(impl_file, 'w')
print >>impl_fp, BodyPreamble(filename)
for entry in entities:
entry.PrintCode(impl_fp)
impl_fp.close()
if __name__ == '__main__':
main(sys.argv)
| 33.419886 | 79 | 0.452088 |
30478ca1221294a1b71b05f889f991a719892e4f | 3,484 | py | Python | kospeech/models/decoder.py | Rhcsky/KoSpeech | dbff78140d150dcc71d14d65f81c011847e9574d | [
"Apache-2.0"
] | null | null | null | kospeech/models/decoder.py | Rhcsky/KoSpeech | dbff78140d150dcc71d14d65f81c011847e9574d | [
"Apache-2.0"
] | null | null | null | kospeech/models/decoder.py | Rhcsky/KoSpeech | dbff78140d150dcc71d14d65f81c011847e9574d | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021, Soohwan Kim. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
from torch import Tensor
from typing import Tuple
class DecoderInterface(nn.Module):
def __init__(self):
super(DecoderInterface, self).__init__()
def count_parameters(self) -> int:
""" Count parameters of encoder """
return sum([p.numel for p in self.parameters()])
def update_dropout(self, dropout_p: float) -> None:
""" Update dropout probability of encoder """
for name, child in self.named_children():
if isinstance(child, nn.Dropout):
child.p = dropout_p
class BaseDecoder(DecoderInterface):
""" ASR Decoder Super Class for KoSpeech model implementation """
def __init__(self):
super(BaseDecoder, self).__init__()
def forward(self, targets: Tensor, encoder_outputs: Tensor, **kwargs) -> Tensor:
"""
Forward propagate a `encoder_outputs` for training.
Args:
targets (torch.LongTensr): A target sequence passed to decoder. `IntTensor` of size ``(batch, seq_length)``
encoder_outputs (torch.FloatTensor): A output sequence of encoder. `FloatTensor` of size
``(batch, seq_length, dimension)``
Returns:
* predicted_log_probs (torch.FloatTensor): Log probability of model predictions.
"""
raise NotImplementedError
@torch.no_grad()
def decode(self, encoder_outputs: Tensor, *args) -> Tensor:
"""
Decode encoder_outputs.
Args:
encoder_outputs (torch.FloatTensor): A output sequence of encoder. `FloatTensor` of size
``(batch, seq_length, dimension)``
Returns:
* predicted_log_probs (torch.FloatTensor): Log probability of model predictions.
"""
raise NotImplementedError
class TransducerDecoder(DecoderInterface):
""" ASR Transducer Decoder Super Class for KoSpeech model implementation """
def __init__(self):
super(TransducerDecoder, self).__init__()
def forward(self, inputs: Tensor, input_lengths: Tensor) -> Tuple[Tensor, Tensor]:
"""
Forward propage a `inputs` (targets) for training.
Args:
inputs (torch.LongTensor): A target sequence passed to decoder. `IntTensor` of size ``(batch, seq_length)``
input_lengths (torch.LongTensor): The length of input tensor. ``(batch)``
Returns:
(Tensor, Tensor):
* decoder_outputs (torch.FloatTensor): A output sequence of decoder. `FloatTensor` of size
``(batch, seq_length, dimension)``
* hidden_states (torch.FloatTensor): A hidden state of decoder. `FloatTensor` of size
``(batch, seq_length, dimension)``
"""
raise NotImplementedError
| 37.869565 | 120 | 0.641217 |
73b00b39357e15f4cf1d3bc77bd5ea4f3ad63bc8 | 47,275 | py | Python | Ctrax/tracking_settings.py | johnabender/ctrax-tmp | 3e326ca9d00472dff0dbaea6528f349a5225ad90 | [
"Unlicense"
] | null | null | null | Ctrax/tracking_settings.py | johnabender/ctrax-tmp | 3e326ca9d00472dff0dbaea6528f349a5225ad90 | [
"Unlicense"
] | null | null | null | Ctrax/tracking_settings.py | johnabender/ctrax-tmp | 3e326ca9d00472dff0dbaea6528f349a5225ad90 | [
"Unlicense"
] | null | null | null |
import copy
import os
import sys
import motmot.wxvideo.wxvideo as wxvideo
import motmot.wxvalidatedtext.wxvalidatedtext as wxvt # part of Motmot
import numpy as num
#from scipy.misc.pilutil import imresize
import wx
from wx import xrc
import codedir
import params
import ellipsesk as ell
from ellipses_draw import draw_ellipses
import imagesk
if 'darwin' in sys.platform:
from mac_text_fixer import fix_text_sizes
from matchidentities import cvpred
from version import DEBUG_TRACKINGSETTINGS
RSRC_FILE = os.path.join(codedir.codedir,'xrc','tracking_settings.xrc')
if RSRC_FILE is None:
RSRC_FILE = os.path.join('xrc','tracking_settings.xrc')
SHOW_UNFILTERED_OBSERVATIONS = 0
SHOW_FILTERED_OBSERVATIONS = 1
SHOW_SMALL_OBSERVATIONS = 2
SHOW_LARGE_OBSERVATIONS = 3
SHOW_DELETED_OBSERVATIONS = 4
SHOW_SPLIT_OBSERVATIONS = 5
SHOW_MERGED_OBSERVATIONS = 6
SHOW_LOWERED_OBSERVATIONS = 7
SHOW_MAXJUMP = 8
SHOW_MOTIONMODEL = 9
class StoredObservations:
def __init__(self,obs,frame,ellsmall=None,elllarge=None,didlowerthresh=None,
didmerge=None,diddelete=None,didsplit=None):
self.obs = obs
self.frame = frame
#self.params = params.params.copy()
#self.params = None
self.isvalid = True
self.ellsmall = ellsmall
self.elllarge = elllarge
self.didlowerthresh = didlowerthresh
self.didmerge = didmerge
self.diddelete = diddelete
self.didsplit = didsplit
def SetInvalid(self):
self.isvalid = False
def issame(self,frame):
return (self.isvalid and (self.frame == frame))
def __str__(self):
s = ''
if hasattr(self,'isvalid') and self.isvalid is not None:
s += 'isvalid = ' + str(self.isvalid) + '\n'
if hasattr(self,'frame') and self.frame is not None:
s += 'frame = ' + str(self.frame) + '\n'
if hasattr(self,'obs') and self.obs is not None:
s += 'obs = ' + str(self.obs) + ', len = ' + str(len(self.obs)) + '\n'
if hasattr(self,'ellsmall') and self.ellsmall is not None:
s += 'ellsmall = ' + str(self.ellsmall) + '\n'
if hasattr(self,'elllarge') and self.elllarge is not None:
s += 'elllarge = ' + str(self.elllarge) + '\n'
if hasattr(self,'didlowerthresh') and self.didlowerthresh is not None:
s += 'didlowerthresh = ' + str(self.didlowerthresh) + '\n'
if hasattr(self,'didmerge') and self.didmerge is not None:
s += 'didmerge = ' + str(self.didmerge) + '\n'
if hasattr(self,'diddelete') and self.diddelete is not None:
s += 'diddelete = ' + str(self.diddelete) + '\n'
if hasattr(self,'didsplit') and self.didsplit is not None:
s += 'didsplit = ' + str(self.didsplit) + '\n'
return s
class TrackingSettings:
def __init__(self,parent,bg_imgs,currframe):
self.parent = parent
# background model
self.bg_imgs = bg_imgs
self.show_frame = currframe
self.bg_img_frame = -1
# zoom mode
self.zoomin = False
self.zoomout = False
self.info = False
self.zoomaxes = [0,params.params.movie_size[1]-1,0,params.params.movie_size[0]-1]
self.zoomfactor = 1
self.shape_uptodate = False
self.automatic_minshape = params.ShapeParams()
self.automatic_maxshape = params.ShapeParams()
self.automatic_meanshape = params.ShapeParams()
rsrc = xrc.XmlResource( RSRC_FILE )
self.frame = rsrc.LoadFrame(parent,"trackingframe")
if 'darwin' in sys.platform:
fix_text_sizes( self.frame )
self.InitControlHandles()
self.InitializeValues()
self.BindCallbacks()
self.OnResize()
self.ShowImage()
def RegisterParamChange(self):
if hasattr(self,'obs_filtered'):
self.obs_filtered.SetInvalid()
if hasattr(self,'obs_unfiltered'):
self.obs_unfiltered.SetInvalid()
if hasattr(self,'obs_prev'):
self.obs_prev.SetInvalid()
def InitControlHandles(self):
#self.min_ntargets_input = self.control('min_ntarets')
#self.max_ntargets_input = self.control('max_ntargets')
self.automatic_shape_input = self.control('automatic_bounds')
self.automatic_panel = self.control('automatic_panel')
self.nstd_shape_input = self.control('nstd_shape')
self.nframes_shape_input = self.control('nframes_shape')
self.compute_shape_input = self.control('compute_shape')
self.automatic_shape_text = self.control('automatic_shape_text')
self.manual_shape_input = self.control('manual_bounds')
self.manual_panel = self.control('manual_panel')
self.min_area_input = self.control('min_area')
self.mean_area_input = self.control('mean_area')
self.max_area_input = self.control('max_area')
self.min_major_input = self.control('min_major')
self.mean_major_input = self.control('mean_major')
self.max_major_input = self.control('max_major')
self.min_minor_input = self.control('min_minor')
self.mean_minor_input = self.control('mean_minor')
self.max_minor_input = self.control('max_minor')
self.min_ecc_input = self.control('min_ecc')
self.mean_ecc_input = self.control('mean_ecc')
self.max_ecc_input = self.control('max_ecc')
self.enforce_shape_input = self.control('enforce_shape_bounds')
self.angle_weight_input = self.control('angle_weight')
self.max_jump_input = self.control('max_jump')
# KB 20120109: allow jumps of distance max_jump_split if an observation is the result of splitting a connected component
self.max_jump_split_input = self.control('max_jump_split')
self.min_jump_input = self.control('min_jump')
self.center_dampen_input = self.control('center_dampen')
self.angle_dampen_input = self.control('angle_dampen')
self.max_area_delete_input = self.control('max_area_delete')
self.min_area_ignore_input = self.control('min_area_ignore')
self.max_penalty_merge_input = self.control('max_penalty_merge')
self.lower_thresh_input = self.control('lower_thresh')
self.maxclustersperblob_input = self.control('maxclustersperblob')
self.max_blobs_input = self.control( 'max_blobs_detect' )
self.done_input = self.control('done')
self.img_panel = self.control('img_panel')
self.frame_scrollbar = self.control('frame_scrollbar')
self.frame_number_text = self.control('frame_number')
self.show_img_input = self.control('show_img')
self.toolbar = xrc.XRCCTRL(self.frame,'toolbar')
if self.toolbar is None:
self.toolbar = self.frame.GetToolBar()
self.zoomin_id = xrc.XRCID('zoomin')
self.zoomout_id = xrc.XRCID('zoomout')
self.info_id = xrc.XRCID('moreinfo')
self.info_text = xrc.XRCCTRL( self.frame, "text_obsinfo" )
## if 'win' in sys.platform:
## self.toolbar.AddSeparator()
## self.info_text = wx.TextCtrl(self.toolbar, -1, 'Observation Info', pos=(80,0),size=(300,20),style=wx.TE_READONLY|wx.TE_CENTRE)
## self.toolbar.AddControl(self.info_text)
## else:
## self.info_text = wx.TextCtrl(self.toolbar, -1, 'Observation Info', size=(300,20),style=wx.TE_READONLY|wx.TE_CENTRE)
self.info_text.SetValue('Observation Info')
#self.info_text.SetEditable(False)
self.hindsight_panel = self.control('hindsight_panel')
self.splitdetection_panel = self.control('splitdetection_panel')
self.mergeddetection_panel = self.control('mergeddetection_panel')
self.spuriousdetection_panel = self.control('spuriousdetection_panel')
self.lostdetection_panel = self.control('lostdetection_panel')
self.do_fix_split_input = self.control('do_fix_split')
self.do_fix_merged_input = self.control('do_fix_merged')
self.do_fix_spurious_input = self.control('do_fix_spurious')
self.do_fix_lost_input = self.control('do_fix_lost')
self.splitdetection_length_input = self.control('splitdetection_length')
self.splitdetection_cost_input = self.control('splitdetection_distance')
self.mergeddetection_length_input = self.control('mergeddetection_length')
self.mergeddetection_distance_input = self.control('mergeddetection_distance')
self.spuriousdetection_length_input = self.control('spuriousdetection_length')
self.lostdetection_length_input = self.control('lostdetection_length')
box = wx.BoxSizer( wx.VERTICAL )
self.img_panel.SetSizer( box )
self.img_wind = wxvideo.DynamicImageCanvas( self.img_panel, -1 )
self.img_wind.set_resize(True)
box.Add( self.img_wind, 1, wx.EXPAND )
self.img_panel.SetAutoLayout( True )
self.img_panel.Layout()
def InitializeValues(self):
#self.min_ntargets_input.SetValue(str(params.params.min_ntargets))
#self.max_ntargets_input.SetValue(str(params.params.max_ntargets))
self.automatic_shape_input.SetValue(True)
self.manual_shape_input.SetValue(False)
self.nstd_shape_input.SetValue(str(params.params.n_std_thresh))
self.nframes_shape_input.SetValue(str(params.params.n_frames_size))
self.min_area_input.SetValue(str(params.params.minshape.area))
self.min_major_input.SetValue(str(params.params.minshape.major))
self.min_minor_input.SetValue(str(params.params.minshape.minor))
self.min_ecc_input.SetValue(str(params.params.minshape.ecc))
self.mean_area_input.SetValue(str(params.params.meanshape.area))
self.mean_major_input.SetValue(str(params.params.meanshape.major))
self.mean_minor_input.SetValue(str(params.params.meanshape.minor))
self.mean_ecc_input.SetValue(str(params.params.meanshape.ecc))
self.max_area_input.SetValue(str(params.params.maxshape.area))
self.max_major_input.SetValue(str(params.params.maxshape.major))
self.max_minor_input.SetValue(str(params.params.maxshape.minor))
self.max_ecc_input.SetValue(str(params.params.maxshape.ecc))
self.enforce_shape_input.SetValue(params.params.enforce_minmax_shape)
self.angle_dampen_input.SetValue(str(params.params.ang_dist_wt))
self.max_area_delete_input.SetValue(str(params.params.maxareadelete))
self.min_area_ignore_input.SetValue(str(params.params.minareaignore))
self.max_penalty_merge_input.SetValue(str(params.params.maxpenaltymerge))
self.max_jump_input.SetValue(str(params.params.max_jump))
# KB 20120109: allow jumps of distance max_jump_split if an observation is the result of splitting a connected component
self.max_jump_split_input.SetValue(str(params.params.max_jump_split))
self.min_jump_input.SetValue(str(params.params.min_jump))
self.angle_weight_input.SetValue(str(params.params.ang_dist_wt))
self.center_dampen_input.SetValue(str(params.params.dampen))
self.angle_dampen_input.SetValue(str(params.params.angle_dampen))
self.lower_thresh_input.SetValue(str(params.params.minbackthresh))
self.maxclustersperblob_input.SetValue(str(params.params.maxclustersperblob))
self.max_blobs_input.SetValue( str( params.params.max_n_clusters ) )
self.frame_scrollbar.SetThumbPosition(self.show_frame)
self.frame_scrollbar.SetScrollbar(self.show_frame,0,params.params.n_frames-1,30)
self.img_chosen = SHOW_UNFILTERED_OBSERVATIONS
self.show_img_input.SetSelection(self.img_chosen)
self.toolbar.SetToggle(self.zoomin,self.zoomin_id)
self.toolbar.SetToggle(self.zoomout,self.zoomout_id)
self.toolbar.SetToggle(self.info,self.info_id)
self.do_fix_split_input.SetValue(params.params.do_fix_split)
self.do_fix_merged_input.SetValue(params.params.do_fix_merged)
self.do_fix_spurious_input.SetValue(params.params.do_fix_spurious)
self.do_fix_lost_input.SetValue(params.params.do_fix_lost)
self.splitdetection_panel.Enable(params.params.do_fix_split)
self.mergeddetection_panel.Enable(params.params.do_fix_merged)
self.spuriousdetection_panel.Enable(params.params.do_fix_spurious)
self.lostdetection_panel.Enable(params.params.do_fix_lost)
self.splitdetection_length_input.SetValue(str(params.params.splitdetection_length))
self.splitdetection_cost_input.SetValue('%.2f'%params.params.splitdetection_cost)
self.mergeddetection_length_input.SetValue(str(params.params.mergeddetection_length))
self.mergeddetection_distance_input.SetValue('%.2f'%params.params.mergeddetection_distance)
self.spuriousdetection_length_input.SetValue(str(params.params.spuriousdetection_length))
self.lostdetection_length_input.SetValue(str(params.params.lostdetection_length))
def BindCallbacks(self):
# number of targets
#self.bindctrl(('min_ntargets','max_ntargets'),('int','int'),self.SetNTargets)
# shape
# automatic computation of bounds on shape
self.frame.Bind(wx.EVT_RADIOBUTTON,self.OnAutomatic,self.automatic_shape_input)
self.frame.Bind(wx.EVT_RADIOBUTTON,self.OnAutomatic,self.manual_shape_input)
# automatic shape
self.bindctrl(('nstd_shape','nframes_shape'),('float','float'),self.SetAutomatic)
# compute shape now
self.frame.Bind(wx.EVT_BUTTON,self.ComputeShapeNow,self.compute_shape_input)
# manual shape
self.bindctrl(('min_area','mean_area','max_area',
'min_major','mean_major','max_major',
'min_minor','mean_minor','max_minor',
'min_ecc','mean_ecc','max_ecc'),
('float','float','float',
'float','float','float',
'float','float','float',
'float','float','float'),
self.SetManual)
self.frame.Bind( wx.EVT_CHECKBOX, self.OnEnforceShape, self.enforce_shape_input )
# motion
# KB 20120109: allow jumps of distance max_jump_split if an observation is the result of
# splitting a connected component
self.bindctrl(('angle_weight','max_jump','max_jump_split','min_jump','center_dampen','angle_dampen'),
('float','float','float','float','float','float'),
self.SetMotion)
# observation
self.bindctrl(('max_area_delete','min_area_ignore','max_penalty_merge','lower_thresh','maxclustersperblob', 'max_blobs_detect'),
('float','float','float','float','int','int'),self.SetObservation)
# hindsight
self.frame.Bind(wx.EVT_CHECKBOX,self.OnSplit,self.do_fix_split_input)
self.frame.Bind(wx.EVT_CHECKBOX,self.OnMerged,self.do_fix_merged_input)
self.frame.Bind(wx.EVT_CHECKBOX,self.OnSpurious,self.do_fix_spurious_input)
self.frame.Bind(wx.EVT_CHECKBOX,self.OnLost,self.do_fix_lost_input)
self.bindctrl(('splitdetection_length','splitdetection_distance'),
('int','float'),self.SetSplit)
self.bindctrl(('mergeddetection_length','mergeddetection_distance'),
('int','float'),self.SetMerged)
self.bindctrl(('spuriousdetection_length',),('int',),self.SetSpurious)
self.bindctrl(('lostdetection_length',),('int',),self.SetLost)
# frame scrollbar
self.frame.Bind(wx.EVT_SCROLL,self.FrameScrollbarMoved,self.frame_scrollbar)
# img choice
self.frame.Bind(wx.EVT_CHOICE,self.ImageChosen,self.show_img_input)
# resize
self.frame.Bind( wx.EVT_SIZE, self.OnResize )
#self.frame.Bind( wx.EVT_MOVE, self.OnResizeBG )
# toolbar
self.frame.Bind(wx.EVT_TOOL, self.ZoominToggle, id=self.zoomin_id)
self.frame.Bind(wx.EVT_TOOL, self.ZoomoutToggle, id=self.zoomout_id)
self.frame.Bind(wx.EVT_TOOL, self.InfoToggle, id=self.info_id)
# mouse click
# get a pointer to the "trackset" child
self.img_size = params.params.movie_size
img = num.zeros((self.img_size[0],self.img_size[1]),dtype=num.uint8)
try:
self.img_wind.update_image_and_drawings( "trackset", img, format="MONO8" )
except:
print "tracking_settings passing on redraw error, size:", self.img_wind.GetSize()
self.img_wind_child = self.img_wind.get_child_canvas("trackset")
self.img_wind_child.Bind(wx.EVT_LEFT_DOWN,self.MouseClick)
self.img_wind_child.Bind(wx.EVT_LEFT_DCLICK,self.MouseDoubleClick)
def control(self,ctrlname):
return xrc.XRCCTRL(self.frame,ctrlname)
def bindctrl(self,ctrlnames,type,validatef):
for i,v in enumerate(ctrlnames):
if type[i] == 'int':
wxvt.setup_validated_integer_callback(self.control(v),
xrc.XRCID(v),
validatef,
pending_color=params.params.wxvt_bg)
else:
wxvt.setup_validated_float_callback(self.control(v),
xrc.XRCID(v),
validatef,
pending_color=params.params.wxvt_bg)
def OnAutomatic(self,evt):
isautomatic = self.automatic_shape_input.GetValue()
if isautomatic == self.manual_shape_input.GetValue():
print 'error: isautomatic == ismanual'
if isautomatic:
self.automatic_panel.Enable(True)
self.manual_panel.Enable(False)
if not self.shape_uptodate:
self.automatic_shape_text.SetLabel('Bounds on shape not up to date.')
else:
self.automatic_shape_text.SetLabel('')
params.params.minshape = self.automatic_minshape.copy()
params.params.maxshape = self.automatic_maxshape.copy()
params.params.meanshape = self.automatic_meanshape.copy()
self.PrintShape()
self.RegisterParamChange()
self.ShowImage()
else:
self.automatic_panel.Enable(False)
self.manual_panel.Enable(True)
self.automatic_shape_text.SetLabel('')
self.SetManual( evt ) # register current values
def OnEnforceShape( self, evt ):
"""Checkbox state changed for 'enforce shape bounds'."""
params.params.enforce_minmax_shape = self.enforce_shape_input.GetValue()
def OnSplit(self,evt):
params.params.do_fix_split = self.do_fix_split_input.GetValue()
self.splitdetection_panel.Enable(params.params.do_fix_split)
def OnMerged(self,evt):
params.params.do_fix_merged = self.do_fix_merged_input.GetValue()
self.mergeddetection_panel.Enable(params.params.do_fix_merged)
def OnSpurious(self,evt):
params.params.do_fix_spurious = self.do_fix_spurious_input.GetValue()
self.spuriousdetection_panel.Enable(params.params.do_fix_spurious)
def OnLost(self,evt):
params.params.do_fix_lost = self.do_fix_lost_input.GetValue()
self.lostdetection_panel.Enable(params.params.do_fix_lost)
def SetSplit(self,evt):
params.params.splitdetection_length = int(self.splitdetection_length_input.GetValue())
params.params.splitdetection_cost = float(self.splitdetection_cost_input.GetValue())
def SetMerged(self,evt):
params.params.mergeddetection_length = int(self.mergeddetection_length_input.GetValue())
params.params.mergeddetection_distance = float(self.mergeddetection_distance_input.GetValue())
def SetSpurious(self,evt):
params.params.spuriousdetection_length = int(self.spuriousdetection_length_input.GetValue())
def SetLost(self,evt):
params.params.lostdetection_length = int(self.lostdetection_length_input.GetValue())
def SetAutomatic(self,evt):
nstd = float(self.nstd_shape_input.GetValue())
nframes = int(self.nframes_shape_input.GetValue())
# check to make sure valid
if nstd <= 0:
self.nstd_shape_input.SetValue(str(params.params.n_std_thresh))
nstd = params.params.n_std_thresh
if nframes <= 0:
self.nframes_shape_input.SetValue(str(params.params.n_frames_size))
nframes = params.params.n_frames_size
if (nstd != params.params.n_std_thresh) or (nframes != params.params.n_frames_size):
params.params.n_std_thresh = nstd
params.params.n_frames_size = nframes
self.shape_uptodate = False
self.automatic_shape_text.SetLabel('Bounds on shape not up to date.')
def ComputeShapeNow(self,evt):
# estimate shape now
wx.BeginBusyCursor()
wx.Yield()
succeeded = ell.est_shape( self.bg_imgs,self.frame )
wx.EndBusyCursor()
if not succeeded:
return
# copy to temporary variable
self.automatic_minshape = params.params.minshape.copy()
self.automatic_maxshape = params.params.maxshape.copy()
self.automatic_meanshape = params.params.meanshape.copy()
# show shape
self.PrintShape()
# set up to date
self.shape_uptodate = True
self.automatic_shape_text.SetLabel('')
self.RegisterParamChange()
self.ShowImage()
def PrintShape(self):
self.min_area_input.SetValue(str(params.params.minshape.area))
self.min_major_input.SetValue(str(params.params.minshape.major))
self.min_minor_input.SetValue(str(params.params.minshape.minor))
self.min_ecc_input.SetValue(str(params.params.minshape.ecc))
self.mean_area_input.SetValue(str(params.params.meanshape.area))
self.mean_major_input.SetValue(str(params.params.meanshape.major))
self.mean_minor_input.SetValue(str(params.params.meanshape.minor))
self.mean_ecc_input.SetValue(str(params.params.meanshape.ecc))
self.max_area_input.SetValue(str(params.params.maxshape.area))
self.max_major_input.SetValue(str(params.params.maxshape.major))
self.max_minor_input.SetValue(str(params.params.maxshape.minor))
self.max_ecc_input.SetValue(str(params.params.maxshape.ecc))
def SetManual(self,evt):
minarea = float(self.min_area_input.GetValue())
meanarea = float(self.mean_area_input.GetValue())
maxarea = float(self.max_area_input.GetValue())
if (minarea <= meanarea) and (meanarea <= maxarea) and \
(minarea >= 0):
params.params.minshape.area = minarea
params.params.meanshape.area = meanarea
params.params.maxshape.area = maxarea
else:
self.min_area_input.SetValue(str(params.params.minshape.area))
self.mean_area_input.SetValue(str(params.params.meanshape.area))
self.max_area_input.SetValue(str(params.params.maxshape.area))
minmajor = float(self.min_major_input.GetValue())
meanmajor = float(self.mean_major_input.GetValue())
maxmajor = float(self.max_major_input.GetValue())
if (minmajor <= meanmajor) and (meanmajor <= maxmajor) and \
(minmajor >= 0):
params.params.minshape.major = minmajor
params.params.meanshape.major = meanmajor
params.params.maxshape.major = maxmajor
else:
self.min_major_input.SetValue(str(params.params.minshape.major))
self.mean_major_input.SetValue(str(params.params.meanshape.major))
self.max_major_input.SetValue(str(params.params.maxshape.major))
minminor = float(self.min_minor_input.GetValue())
meanminor = float(self.mean_minor_input.GetValue())
maxminor = float(self.max_minor_input.GetValue())
if (minminor <= meanminor) and (meanminor <= maxminor) and \
(minminor >= 0):
params.params.minshape.minor = minminor
params.params.meanshape.minor = meanminor
params.params.maxshape.minor = maxminor
else:
self.min_minor_input.SetValue(str(params.params.minshape.minor))
self.mean_minor_input.SetValue(str(params.params.meanshape.minor))
self.max_minor_input.SetValue(str(params.params.maxshape.minor))
minecc = float(self.min_ecc_input.GetValue())
meanecc = float(self.mean_ecc_input.GetValue())
maxecc = float(self.max_ecc_input.GetValue())
if (minecc <= meanecc) and (meanecc <= maxecc) and \
(minecc >= 0) and (maxecc <= 1):
params.params.minshape.ecc = minecc
params.params.meanshape.ecc = meanecc
params.params.maxshape.ecc = maxecc
else:
self.min_ecc_input.SetValue(str(params.params.minshape.ecc))
self.mean_ecc_input.SetValue(str(params.params.meanshape.ecc))
self.max_ecc_input.SetValue(str(params.params.maxshape.ecc))
#params.params.meanshape = params.averageshape(params.params.minshape,
# params.params.maxshape)
self.RegisterParamChange()
self.ShowImage()
def SetMotion(self,evt):
angle_weight = float(self.angle_weight_input.GetValue())
if angle_weight >= 0:
params.params.ang_dist_wt = angle_weight
else:
self.angle_weight_input.SetValue(str(params.params.ang_dist_wt))
max_jump = float(self.max_jump_input.GetValue())
if max_jump > 0:
params.params.max_jump = max_jump
else:
self.max_jump_input.SetValue(str(params.params.max_jump))
# KB 20120109: allow jumps of distance max_jump_split if an observation is the result of
# splitting a connected component
max_jump_split = float(self.max_jump_split_input.GetValue())
if max_jump_split > 0:
params.params.max_jump_split = max_jump_split
else:
self.max_jump_split_input.SetValue(str(params.params.max_jump_split))
min_jump = float(self.min_jump_input.GetValue())
if min_jump >= 0:
params.params.min_jump = min_jump
else:
self.min_jump_input.SetValue(str(params.params.min_jump))
center_dampen = float(self.center_dampen_input.GetValue())
if center_dampen >= 0 and center_dampen <= 1:
params.params.dampen = center_dampen
else:
self.center_dampen_input.SetValue(str(params.params.dampen))
angle_dampen = float(self.angle_dampen_input.GetValue())
if angle_dampen >= 0 and angle_dampen <= 1:
params.params.angle_dampen = angle_dampen
else:
self.angle_dampen_input.SetValue(str(params.params.angle_dampen))
self.RegisterParamChange()
self.ShowImage()
def SetObservation(self,evt):
params.params.maxareadelete = float(self.max_area_delete_input.GetValue())
params.params.minareaignore = float(self.min_area_ignore_input.GetValue())
params.params.maxpenaltymerge = float(self.max_penalty_merge_input.GetValue())
minbackthresh = float(self.lower_thresh_input.GetValue())
maxclustersperblob = int(self.maxclustersperblob_input.GetValue())
max_blobs_detect = int( self.max_blobs_input.GetValue() )
if minbackthresh > 0 and minbackthresh <= 1:
params.params.minbackthresh = minbackthresh
else:
self.lower_thresh_input.SetValue( str( params.params.minbackthresh ) )
if maxclustersperblob >= 1:
params.params.maxclustersperblob = maxclustersperblob
else:
self.maxclustersperblob_input.SetValue( str( params.params.maxclustersperblob ) )
if max_blobs_detect >= 1:
params.params.max_n_clusters = max_blobs_detect
else:
self.max_blobs_input.SetValue( str( params.params.max_n_clusters ) )
self.RegisterParamChange()
self.ShowImage()
def FrameScrollbarMoved(self,evt):
# get the value on the scrollbar now
self.show_frame = self.frame_scrollbar.GetThumbPosition()
if hasattr(self,'obs_filtered'):
self.obs_prev = self.obs_filtered
# show the image and update text
self.ShowImage()
def ShowImage(self):
if DEBUG_TRACKINGSETTINGS: print 'ShowImage ' + str(self.show_frame)
try:
wx.Yield()
except: pass # can be recursive sometimes in Windows
wx.BeginBusyCursor()
im, stamp = self.bg_imgs.movie.get_frame( int(self.show_frame) )
windowsize = [self.img_panel.GetRect().GetHeight(),self.img_panel.GetRect().GetWidth()]
self.GetBgImage()
if self.img_chosen == SHOW_UNFILTERED_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_UNFILTERED_OBSERVATIONS'
obs_unfiltered = self.GetObsUnfiltered()
plot_linesegs = draw_ellipses(obs_unfiltered)
elif self.img_chosen == SHOW_FILTERED_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_FILTERED_OBSERVATIONS'
obs_filtered = self.GetObsFiltered()
plot_linesegs = draw_ellipses(obs_filtered)
elif self.img_chosen == SHOW_SMALL_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_SMALL_OBSERVATIONS'
obs_unfiltered = self.GetObsUnfiltered()
obs_small = []
for obs in obs_unfiltered:
if obs.area() < params.params.minshape.area:
obs_small.append(obs)
plot_linesegs = draw_ellipses(obs_small)
elif self.img_chosen == SHOW_LARGE_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_LARGE_OBSERVATIONS'
obs_unfiltered = self.GetObsUnfiltered()
obs_large = []
for obs in obs_unfiltered:
if obs.area() > params.params.maxshape.area:
obs_large.append(obs)
plot_linesegs = draw_ellipses(obs_large)
elif self.img_chosen == SHOW_DELETED_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_DELETED_OBSERVATIONS'
(obs_unfiltered,obs_deleted) = self.GetObsUnfiltered('diddelete')
plot_linesegs = draw_ellipses(obs_deleted)
elif self.img_chosen == SHOW_SPLIT_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_SPLIT_OBSERVATIONS'
(obs_unfiltered,obs_split) = self.GetObsUnfiltered('didsplit')
plot_linesegs = draw_ellipses(obs_split)
elif self.img_chosen == SHOW_MERGED_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_MERGED_OBSERVATIONS'
(obs_unfiltered,obs_merge) = self.GetObsUnfiltered('didmerge')
plot_linesegs = draw_ellipses(obs_merge)
elif self.img_chosen == SHOW_LOWERED_OBSERVATIONS:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_LOWERED_OBSERVATIONS'
(obs_unfiltered,obs_lowered) = self.GetObsUnfiltered('didlowerthresh')
plot_linesegs = draw_ellipses(obs_lowered)
# MAXJUMP
elif self.img_chosen == SHOW_MAXJUMP:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_MAXJUMP'
# either grab or compute observations
obs_filtered = self.GetObsFiltered()
plot_linesegs = draw_ellipses(obs_filtered)
# draw circles
for i,obs in enumerate(obs_filtered):
plot_new_stuff = imagesk.draw_circle(obs.center.x,
obs.center.y,params.params.max_jump*2.,
params.params.colors[i%len(params.params.colors)])
plot_linesegs.extend(plot_new_stuff)
plot_new_stuff = imagesk.draw_circle(obs.center.x,
obs.center.y,params.params.min_jump,
params.params.colors[i%len(params.params.colors)])
plot_linesegs.extend(plot_new_stuff)
elif self.img_chosen == SHOW_MOTIONMODEL:
if DEBUG_TRACKINGSETTINGS: print 'SHOW_MOTIONMODEL'
(target_prev,target_curr,target_pred) = self.GetTargetMotion()
# show the next frame, if there is one
nextframe = num.minimum(int(self.show_frame+1),params.params.n_frames-1)
im, stamp = self.bg_imgs.movie.get_frame(nextframe)
# draw previous positions
plot_linesegs = draw_ellipses(target_prev)
# draw current positions
plot_new_stuff = draw_ellipses(target_curr)
plot_linesegs.extend(plot_new_stuff)
# draw predicted positions
plot_new_stuff = draw_ellipses(target_pred)
plot_linesegs.extend(plot_new_stuff)
scaleunit = params.params.max_jump / params.params.DRAW_MOTION_SCALE
parcolor = [0,255,0]
perpcolor = [0,255,0]
anglecolor = [0,0,255]
for i in target_pred.iterkeys():
# compute direction of motion
vx = target_pred[i].center.x - target_curr[i].center.x
vy = target_pred[i].center.y - target_curr[i].center.y
thetamotion = num.arctan2(vy,vx)
# angle
theta = target_pred[i].angle
# we want to choose to add pi if that makes difference from motion direction smaller
dtheta = abs( ((theta - thetamotion + num.pi) % (2.*num.pi)) - num.pi )
if dtheta > (num.pi/2.):
theta += num.pi
# compute end points of parallel motion
x0 = target_pred[i].center.x + scaleunit*num.cos(theta)
x1 = target_pred[i].center.x - scaleunit*num.cos(theta)
y0 = target_pred[i].center.y + scaleunit*num.sin(theta)
y1 = target_pred[i].center.y - scaleunit*num.sin(theta)
# add parallel motion annotation line
plot_new_stuff = imagesk.draw_line(x0+1,y0+1,x1+1,y1+1,parcolor)
plot_linesegs.extend(plot_new_stuff)
# compute end points of perpendicular motion
x0 = target_pred[i].center.x + scaleunit*num.cos(num.pi/2.+theta)
x1 = target_pred[i].center.x - scaleunit*num.cos(num.pi/2.+theta)
y0 = target_pred[i].center.y + scaleunit*num.sin(num.pi/2.+theta)
y1 = target_pred[i].center.y - scaleunit*num.sin(num.pi/2.+theta)
# add perpendicular motion annotation line
plot_new_stuff = imagesk.draw_line(x0+1,y0+1,x1+1,y1+1,perpcolor)
plot_linesegs.extend(plot_new_stuff)
# compute end points of angular motion
if params.params.ang_dist_wt > 0:
dtheta = scaleunit*num.sqrt(1./params.params.ang_dist_wt)
if dtheta >= (num.pi/2.):
print 'dtheta is more than pi/2'
else:
theta0 = theta - dtheta
theta1 = theta + dtheta
# draw arc
plot_new_stuff = imagesk.draw_arc(target_pred[i].center.x,
target_pred[i].center.y,
scaleunit/2,
theta0,theta1,
anglecolor)
plot_linesegs.extend(plot_new_stuff)
im = imagesk.double2mono8(im,donormalize=False)
linesegs,im = imagesk.zoom_linesegs_and_image(plot_linesegs,im,self.zoomaxes)
(linesegs,linecolors) = imagesk.separate_linesegs_colors(linesegs)
self.img_wind.update_image_and_drawings('trackset',
im,
format='MONO8',
linesegs=linesegs,
lineseg_colors=linecolors
)
self.img_wind.Refresh(eraseBackground=False)
self.frame_number_text.SetLabel('Frame %d'%self.show_frame)
if self.info == True:
self.ShowInfo( None, None )
wx.EndBusyCursor()
if DEBUG_TRACKINGSETTINGS: sys.stdout.flush()
def ImageChosen(self,evt):
self.img_chosen = self.show_img_input.GetSelection()
self.ShowImage()
def OnResize(self,evt=None):
if evt is not None: evt.Skip()
self.frame.Layout()
try:
#self.ShowImage()
self.frame_scrollbar.SetMinSize( wx.Size(
self.img_panel.GetRect().GetWidth(),
self.frame_scrollbar.GetRect().GetHeight() ) )
except AttributeError: pass # during initialization
def ZoominToggle(self,evt):
self.zoomin = not self.zoomin
if self.zoomin == True and self.zoomout == True:
self.toolbar.ToggleTool(self.zoomout_id,False)
self.zoomout = False
if self.zoomin == True and self.info == True:
self.toolbar.ToggleTool(self.info_id,False)
self.info = False
def ZoomoutToggle(self,evt):
self.zoomout = not self.zoomout
if self.zoomin == True and self.zoomout == True:
self.toolbar.ToggleTool(self.zoomin_id,False)
self.zoomin = False
if self.zoomout == True and self.info == True:
self.toolbar.ToggleTool(self.info_id,False)
self.info = False
def InfoToggle(self,evt):
self.info = not self.info
if self.info == True and self.zoomin == True:
self.toolbar.ToggleTool(self.zoomin_id,False)
self.zoomin = False
if self.zoomout == True and self.info == True:
self.toolbar.ToggleTool(self.zoomout_id,False)
self.zoomout = False
def MouseDoubleClick(self,evt):
if self.zoomout == True:
self.zoomfactor = 1
self.SetZoomAxes()
def MouseClick(self,evt):
# get the clicked position
resize = self.img_wind.get_resize()
x = evt.GetX() / resize
h = self.zoomaxes[3] - self.zoomaxes[2] + 1
y = h - evt.GetY() / resize
x += self.zoomaxes[0]
y += self.zoomaxes[2]
if (x > self.img_size[1]) or (y > self.img_size[0]):
return
if self.zoomin:
self.ZoomIn(x,y)
elif self.zoomout:
self.ZoomOut()
elif self.info:
self.ShowInfo(x,y)
def ZoomIn(self,x,y):
self.zoomfactor *= 1.5
self.zoompoint = [x,y]
self.SetZoomAxes()
def SetZoomAxes(self):
x = self.zoompoint[0]
y = self.zoompoint[1]
W = params.params.movie_size[1]
H = params.params.movie_size[0]
h = H/self.zoomfactor
w = W/self.zoomfactor
x1 = x-w/2
x2 = x+w/2
y1 = y-h/2
y2 = y+h/2
if x1 < 0:
x2 -= x1
x1 = 0
elif x2 > W-1:
x1 -= (x2 - W + 1)
x2 = W-1
if y1 < 0:
y2 -= y1
y1 = 0
elif y2 > H-1:
y1 -= (y2 - H + 1)
y2 = H-1
x1 = num.maximum(int(x1),0)
x2 = num.minimum(int(x2),W-1)
y1 = num.maximum(int(y1),0)
y2 = num.minimum(int(y2),H-1)
self.zoomaxes = [x1,x2,y1,y2]
self.ShowImage()
def ZoomOut(self):
if self.zoomfactor <= 1:
return
self.zoomfactor /= 1.5
self.SetZoomAxes()
def ShowInfo(self,x,y):
# grab targets
if (self.img_chosen == SHOW_MAXJUMP) or (self.img_chosen == SHOW_FILTERED_OBSERVATIONS):
obs = self.obs_filtered.obs
else:
obs = self.obs_unfiltered.obs
if (x is None or y is None):
if hasattr( self, 'info_mini' ) and self.info_mini < len( obs ):
mini = self.info_mini
x = obs[mini].center.x
y = obs[mini].center.y
else:
self.ShowObsInfo( None )
return
# determine closest target
mind = num.inf
for i,v in enumerate(obs):
d = (v.center.x-x)**2 + (v.center.y-y)**2
if d <= mind:
mini = i
mind = d
maxdshowinfo = (num.maximum(self.zoomaxes[1]-self.zoomaxes[0],
self.zoomaxes[2]-self.zoomaxes[1])/params.params.MAXDSHOWINFO)**2
if mind < maxdshowinfo:
self.ShowObsInfo(obs[mini])
self.info_mini = mini
else:
self.ShowObsInfo( None )
def ShowObsInfo(self,ellipse):
"""Output text describing currently selected ellipse."""
if ellipse is None:
self.info_text.SetValue( "" )
else:
self.info_text.SetValue('area=%.2f, maj=%.2f, min=%.2f, ecc=%.2f'%(ellipse.area(),ellipse.major,ellipse.minor,ellipse.eccentricity()))
def GetObsFiltered(self):
if not(hasattr(self,'obs_filtered') and self.obs_filtered.issame(self.show_frame)):
if DEBUG_TRACKINGSETTINGS: print 'computing filtered observations'
obs_filtered = ell.find_ellipses(self.bg_imgs.dfore.copy(),self.bg_imgs.cc.copy(),self.bg_imgs.ncc,True)
self.obs_filtered = StoredObservations(obs_filtered,self.show_frame)
else:
if DEBUG_TRACKINGSETTINGS: print 'filtered observations already computed'
if DEBUG_TRACKINGSETTINGS: print 'obs_filtered:\n' + str(self.obs_filtered)
return self.obs_filtered.obs
def GetObsUnfiltered(self,*args):
# do we need to recompute?
mustcompute = False
if hasattr(self,'obs_unfiltered') and self.obs_unfiltered.issame(self.show_frame):
for arg in args:
if self.obs_unfiltered.__dict__[arg] is None:
mustcompute = True
break
else:
mustcompute = True
if DEBUG_TRACKINGSETTINGS: print 'mustcompute = ' + str(mustcompute)
if DEBUG_TRACKINGSETTINGS and not mustcompute:
print 'stored obs_unfiltered = ' + str(self.obs_unfiltered)
# if we are only interested in the unfiltered observation
if len(args) == 0:
# if it has not yet been computed for this frame, compute
if mustcompute:
obs_unfiltered = ell.find_ellipses(self.bg_imgs.dfore.copy(),self.bg_imgs.cc.copy(),self.bg_imgs.ncc,False)
self.obs_unfiltered = StoredObservations(obs_unfiltered,self.show_frame)
return self.obs_unfiltered.obs
# compute if necessary
if mustcompute:
wx.BeginBusyCursor()
wx.YieldIfNeeded()
print "findellipsesdisplay"
(obs_unfiltered,
ellsmall,
elllarge,
didlowerthresh,
didmerge,
diddelete,
didsplit) = ell.find_ellipses(self.bg_imgs.dfore.copy(),
self.bg_imgs.cc.copy(),
self.bg_imgs.ncc,
return_vals=True)
if DEBUG_TRACKINGSETTINGS: print 'computed obs_unfiltered = ' + str(obs_unfiltered) + ', len = ' + str(len(obs_unfiltered))
if DEBUG_TRACKINGSETTINGS: print 'ellsmall = ' + str(ellsmall)
if DEBUG_TRACKINGSETTINGS: print 'elllarge = ' + str(elllarge)
if DEBUG_TRACKINGSETTINGS: print 'didlowerthresh = ' + str(didlowerthresh)
if DEBUG_TRACKINGSETTINGS: print 'didmerge = ' + str(didmerge)
if DEBUG_TRACKINGSETTINGS: print 'diddelete = ' + str(diddelete)
if DEBUG_TRACKINGSETTINGS: print 'didsplit = ' + str(didsplit)
wx.EndBusyCursor()
self.obs_unfiltered = StoredObservations(obs_unfiltered,self.show_frame,
ellsmall,elllarge,didlowerthresh,
didmerge,diddelete,didsplit)
if DEBUG_TRACKINGSETTINGS: print 'stored obs_unfiltered: '
if DEBUG_TRACKINGSETTINGS: print str(self.obs_unfiltered)
# create return list
ret = (self.obs_unfiltered.obs,)
for arg in args:
ret += (self.obs_unfiltered.__dict__[arg],)
return ret
def GetBgImage(self):
if not (self.bg_img_frame == self.show_frame):
(self.bg_imgs.dfore,self.bg_imgs.bw,
self.bg_imgs.cc,self.bg_imgs.ncc) = \
self.bg_imgs.sub_bg(self.show_frame)
self.bg_img_frame = self.show_frame
def GetObsPrev(self):
if not(hasattr(self,'obs_prev') and self.obs_filtered.issame(self.show_frame-1)):
wx.BeginBusyCursor()
wx.Yield()
prevframe = num.maximum(0,self.show_frame-1)
(dfore,bw,cc,ncc) = self.bg_imgs.sub_bg(prevframe)
obs_filtered = ell.find_ellipses(dfore,cc,ncc,True)
wx.EndBusyCursor()
self.obs_prev = StoredObservations(obs_filtered,self.show_frame)
return self.obs_prev.obs
def GetTargetMotion(self):
# get current positions
obs_curr = self.GetObsFiltered()
# get previous positions
obs_prev = self.GetObsPrev()
# give identities to previous positions
target_prev = ell.TargetList()
for i,obs in enumerate(obs_prev):
obs.identity = i
target_prev.append(obs)
# match previous and current targets, no velocity
oldnids = params.params.nids
target_curr = ell.find_flies(target_prev,target_prev,obs_curr)
# don't actually assign new identities
params.params.nids = oldnids
# delete targets that aren't in both frames
keyscurr = set(target_curr.keys())
keysprev = set(target_prev.keys())
keysremove = keyscurr - keysprev
for i in keysremove:
tmp = target_curr.pop(i)
keysremove = keysprev - keyscurr
for i in keysremove:
tmp = target_prev.pop(i)
# compute predicted positions
target_pred = cvpred(target_prev,target_curr)
# store
targetmotion = (target_prev,target_curr,target_pred)
# return
return targetmotion
| 43.21298 | 146 | 0.628683 |
c2687c842573c8e780aca2c0602919b9514fe940 | 4,837 | py | Python | test/functional/mempool_reorg.py | daface45/cerebralcoin | 0ea3caf2b22113c31c8fd3672f9dc6fa092ffd29 | [
"MIT"
] | 1 | 2021-10-07T01:18:40.000Z | 2021-10-07T01:18:40.000Z | test/functional/mempool_reorg.py | daface45/cerebralcoin | 0ea3caf2b22113c31c8fd3672f9dc6fa092ffd29 | [
"MIT"
] | null | null | null | test/functional/mempool_reorg.py | daface45/cerebralcoin | 0ea3caf2b22113c31c8fd3672f9dc6fa092ffd29 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Cerebralcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool re-org scenarios.
Test re-org scenarios with a mempool that contains transactions
that spend (directly or indirectly) coinbase transactions.
"""
from test_framework.blocktools import create_raw_transaction
from test_framework.test_framework import CerebralcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
class MempoolCoinbaseTest(CerebralcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
alert_filename = None # Set by setup_network
def run_test(self):
# Start with a 200 block chain
assert_equal(self.nodes[0].getblockcount(), 200)
# Mine four blocks. After this, nodes[0] blocks
# 101, 102, and 103 are spend-able.
new_blocks = self.nodes[1].generate(4)
self.sync_all()
node0_address = self.nodes[0].getnewaddress()
node1_address = self.nodes[1].getnewaddress()
# Three scenarios for re-orging coinbase spends in the memory pool:
# 1. Direct coinbase spend : spend_101
# 2. Indirect (coinbase spend in chain, child in mempool) : spend_102 and spend_102_1
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
# and make sure the mempool code behaves correctly.
b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spend_101_raw = create_raw_transaction(self.nodes[0], coinbase_txids[1], node1_address, amount=49.99)
spend_102_raw = create_raw_transaction(self.nodes[0], coinbase_txids[2], node0_address, amount=49.99)
spend_103_raw = create_raw_transaction(self.nodes[0], coinbase_txids[3], node0_address, amount=49.99)
# Create a transaction which is time-locked to two blocks in the future
timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 49.99})
# Set the time lock
timelock_tx = timelock_tx.replace("ffffffff", "11111191", 1)
timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
timelock_tx = self.nodes[0].signrawtransactionwithwallet(timelock_tx)["hex"]
# This will raise an exception because the timelock transaction is too immature to spend
assert_raises_rpc_error(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
# Broadcast and mine spend_102 and 103:
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
self.nodes[0].generate(1)
# Time-locked transaction is still too immature to spend
assert_raises_rpc_error(-26, 'non-final', self.nodes[0].sendrawtransaction, timelock_tx)
# Create 102_1 and 103_1:
spend_102_1_raw = create_raw_transaction(self.nodes[0], spend_102_id, node1_address, amount=49.98)
spend_103_1_raw = create_raw_transaction(self.nodes[0], spend_103_id, node1_address, amount=49.98)
# Broadcast and mine 103_1:
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
last_block = self.nodes[0].generate(1)
# Time-locked transaction can now be spent
timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
# ... now put spend_101 and spend_102_1 in memory pools:
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw)
self.sync_all()
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, timelock_tx_id})
for node in self.nodes:
node.invalidateblock(last_block[0])
# Time-locked transaction is now too immature and has been removed from the mempool
# spend_103_1 has been re-orged out of the chain and is back in the mempool
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, spend_103_1_id})
# Use invalidateblock to re-org back and make all those coinbase spends
# immature/invalid:
for node in self.nodes:
node.invalidateblock(new_blocks[0])
self.sync_all()
# mempool should be empty.
assert_equal(set(self.nodes[0].getrawmempool()), set())
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| 47.891089 | 122 | 0.704155 |
8a9bff77db967acf8a1eceba26397dfcade23476 | 896 | py | Python | download_assets.py | nikitablack/vulkan | cac1d2e850bb0ac5d4d6fa6949faabdc57b82c21 | [
"MIT"
] | 11 | 2019-06-27T19:29:09.000Z | 2021-03-03T18:24:49.000Z | download_assets.py | nikitablack/vulkan | cac1d2e850bb0ac5d4d6fa6949faabdc57b82c21 | [
"MIT"
] | 1 | 2020-02-16T03:51:47.000Z | 2020-02-16T03:51:47.000Z | download_assets.py | nikitablack/vulkan | cac1d2e850bb0ac5d4d6fa6949faabdc57b82c21 | [
"MIT"
] | 3 | 2019-08-21T14:04:02.000Z | 2020-11-12T07:27:42.000Z | #!/usr/bin/env python3
import sys
from urllib.request import urlretrieve
from zipfile import ZipFile
ASSET_PACK_URL = 'http://vulkan.gpuinfo.org/downloads/vulkan_asset_pack.zip'
ASSET_PACK_FILE_NAME = 'vulkan_asset_pack.zip'
print("Downloading asset pack from '%s'" % ASSET_PACK_URL)
def reporthook(blocknum, blocksize, totalsize):
bytesread = blocknum * blocksize
if totalsize > 0:
percent = bytesread * 1e2 / totalsize
s = "\r%5.1f%% (%*d / %d bytes)" % (percent, len(str(totalsize)), bytesread, totalsize)
sys.stderr.write(s)
if bytesread >= totalsize:
sys.stderr.write("\n")
else:
sys.stderr.write("read %d\n" % (bytesread,))
urlretrieve(ASSET_PACK_URL, ASSET_PACK_FILE_NAME, reporthook)
print("Download finished")
print("Extracting assets")
zip = ZipFile(ASSET_PACK_FILE_NAME, 'r')
zip.extractall("./")
zip.close()
| 28 | 95 | 0.690848 |
2a3d9131c07795a539b35be2070754add94f6692 | 15,520 | py | Python | python/pyarrow/__init__.py | cool-RR/arrow | e78aa4c986e6f0b58d857b55997955ba3dc10b02 | [
"Apache-2.0"
] | 1 | 2020-06-06T16:16:14.000Z | 2020-06-06T16:16:14.000Z | python/pyarrow/__init__.py | ysriram/arrow | 2bff6138b5e97b5e94ebf6fe6a5a9f61ba2c80a0 | [
"Apache-2.0"
] | null | null | null | python/pyarrow/__init__.py | ysriram/arrow | 2bff6138b5e97b5e94ebf6fe6a5a9f61ba2c80a0 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# flake8: noqa
"""
PyArrow is the python implementation of Apache Arrow.
Apache Arrow is a cross-language development platform for in-memory data.
It specifies a standardized language-independent columnar memory format for
flat and hierarchical data, organized for efficient analytic operations on
modern hardware. It also provides computational libraries and zero-copy
streaming messaging and interprocess communication.
For more information see the official page at https://arrow.apache.org
"""
import gc as _gc
import os as _os
import sys as _sys
try:
from ._generated_version import version as __version__
except ImportError:
# Package is not installed, parse git tag at runtime
try:
import setuptools_scm
# Code duplicated from setup.py to avoid a dependency on each other
def parse_git(root, **kwargs):
"""
Parse function for setuptools_scm that ignores tags for non-C++
subprojects, e.g. apache-arrow-js-XXX tags.
"""
from setuptools_scm.git import parse
kwargs['describe_command'] = \
"git describe --dirty --tags --long --match 'apache-arrow-[0-9].*'"
return parse(root, **kwargs)
__version__ = setuptools_scm.get_version('../',
parse=parse_git)
except ImportError:
__version__ = None
import pyarrow.compat as compat
# ARROW-8684: Disable GC while initializing Cython extension module,
# to workaround Cython bug in https://github.com/cython/cython/issues/3603
_gc_enabled = _gc.isenabled()
_gc.disable()
import pyarrow.lib as _lib
if _gc_enabled:
_gc.enable()
from pyarrow.lib import cpu_count, set_cpu_count
from pyarrow.lib import (null, bool_,
int8, int16, int32, int64,
uint8, uint16, uint32, uint64,
time32, time64, timestamp, date32, date64, duration,
float16, float32, float64,
binary, string, utf8,
large_binary, large_string, large_utf8,
decimal128,
list_, large_list, map_, struct, union, dictionary,
field,
type_for_alias,
DataType, DictionaryType, StructType,
ListType, LargeListType, MapType, FixedSizeListType,
UnionType,
TimestampType, Time32Type, Time64Type, DurationType,
FixedSizeBinaryType, Decimal128Type,
BaseExtensionType, ExtensionType,
PyExtensionType, UnknownExtensionType,
register_extension_type, unregister_extension_type,
DictionaryMemo,
KeyValueMetadata,
Field,
Schema,
schema,
unify_schemas,
Array, Tensor,
array, chunked_array, record_batch, table,
SparseCOOTensor, SparseCSRMatrix, SparseCSCMatrix,
SparseCSFTensor,
infer_type, from_numpy_dtype,
NullArray,
NumericArray, IntegerArray, FloatingPointArray,
BooleanArray,
Int8Array, UInt8Array,
Int16Array, UInt16Array,
Int32Array, UInt32Array,
Int64Array, UInt64Array,
ListArray, LargeListArray, MapArray,
FixedSizeListArray, UnionArray,
BinaryArray, StringArray,
LargeBinaryArray, LargeStringArray,
FixedSizeBinaryArray,
DictionaryArray,
Date32Array, Date64Array, TimestampArray,
Time32Array, Time64Array, DurationArray,
Decimal128Array, StructArray, ExtensionArray,
ArrayValue, Scalar, NA, _NULL as NULL,
BooleanValue,
Int8Value, Int16Value, Int32Value, Int64Value,
UInt8Value, UInt16Value, UInt32Value, UInt64Value,
HalfFloatValue, FloatValue, DoubleValue,
ListValue, LargeListValue, MapValue, FixedSizeListValue,
BinaryValue, StringValue,
LargeBinaryValue, LargeStringValue,
FixedSizeBinaryValue,
DecimalValue, UnionValue, StructValue, DictionaryValue,
Date32Value, Date64Value,
Time32Value, Time64Value,
TimestampValue, DurationValue)
# Buffers, allocation
from pyarrow.lib import (Buffer, ResizableBuffer, foreign_buffer, py_buffer,
Codec, compress, decompress, allocate_buffer)
from pyarrow.lib import (MemoryPool, LoggingMemoryPool, ProxyMemoryPool,
total_allocated_bytes, set_memory_pool,
default_memory_pool, logging_memory_pool,
proxy_memory_pool, log_memory_allocations,
jemalloc_set_decay_ms)
# I/O
from pyarrow.lib import (HdfsFile, NativeFile, PythonFile,
BufferedInputStream, BufferedOutputStream,
CompressedInputStream, CompressedOutputStream,
FixedSizeBufferWriter,
BufferReader, BufferOutputStream,
OSFile, MemoryMappedFile, memory_map,
create_memory_map, have_libhdfs,
MockOutputStream, input_stream, output_stream)
from pyarrow.lib import (ChunkedArray, RecordBatch, Table,
concat_arrays, concat_tables)
# Exceptions
from pyarrow.lib import (ArrowException,
ArrowKeyError,
ArrowInvalid,
ArrowIOError,
ArrowMemoryError,
ArrowNotImplementedError,
ArrowTypeError,
ArrowSerializationError)
# Serialization
from pyarrow.lib import (deserialize_from, deserialize,
deserialize_components,
serialize, serialize_to, read_serialized,
SerializedPyObject, SerializationContext,
SerializationCallbackError,
DeserializationCallbackError)
from pyarrow.filesystem import FileSystem, LocalFileSystem
from pyarrow.hdfs import HadoopFileSystem
import pyarrow.hdfs as hdfs
from pyarrow.ipc import serialize_pandas, deserialize_pandas
import pyarrow.ipc as ipc
localfs = LocalFileSystem.get_instance()
from pyarrow.serialization import (default_serialization_context,
register_default_serialization_handlers,
register_torch_serialization_handlers)
import pyarrow.types as types
# Entry point for starting the plasma store
def _plasma_store_entry_point():
"""Entry point for starting the plasma store.
This can be used by invoking e.g.
``plasma_store -s /tmp/plasma -m 1000000000``
from the command line and will start the plasma_store executable with the
given arguments.
"""
import pyarrow
plasma_store_executable = _os.path.join(pyarrow.__path__[0],
"plasma-store-server")
_os.execv(plasma_store_executable, _sys.argv)
# ----------------------------------------------------------------------
# Deprecations
from pyarrow.util import _deprecate_api # noqa
read_message = _deprecate_api("read_message", "ipc.read_message",
ipc.read_message, "0.17.0")
read_record_batch = _deprecate_api("read_record_batch",
"ipc.read_record_batch",
ipc.read_record_batch, "0.17.0")
read_schema = _deprecate_api("read_schema", "ipc.read_schema",
ipc.read_schema, "0.17.0")
read_tensor = _deprecate_api("read_tensor", "ipc.read_tensor",
ipc.read_tensor, "0.17.0")
write_tensor = _deprecate_api("write_tensor", "ipc.write_tensor",
ipc.write_tensor, "0.17.0")
get_record_batch_size = _deprecate_api("get_record_batch_size",
"ipc.get_record_batch_size",
ipc.get_record_batch_size, "0.17.0")
get_tensor_size = _deprecate_api("get_tensor_size",
"ipc.get_tensor_size",
ipc.get_tensor_size, "0.17.0")
open_stream = _deprecate_api("open_stream", "ipc.open_stream",
ipc.open_stream, "0.17.0")
open_file = _deprecate_api("open_file", "ipc.open_file", ipc.open_file,
"0.17.0")
# TODO: Deprecate these somehow in the pyarrow namespace
from pyarrow.ipc import (Message, MessageReader,
RecordBatchFileReader, RecordBatchFileWriter,
RecordBatchStreamReader, RecordBatchStreamWriter)
# ----------------------------------------------------------------------
# Returning absolute path to the pyarrow include directory (if bundled, e.g. in
# wheels)
def get_include():
"""
Return absolute path to directory containing Arrow C++ include
headers. Similar to numpy.get_include
"""
return _os.path.join(_os.path.dirname(__file__), 'include')
def _get_pkg_config_executable():
return _os.environ.get('PKG_CONFIG', 'pkg-config')
def _has_pkg_config(pkgname):
import subprocess
try:
return subprocess.call([_get_pkg_config_executable(),
'--exists', pkgname]) == 0
except FileNotFoundError:
return False
def _read_pkg_config_variable(pkgname, cli_args):
import subprocess
cmd = [_get_pkg_config_executable(), pkgname] + cli_args
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
raise RuntimeError("pkg-config failed: " + err.decode('utf8'))
return out.rstrip().decode('utf8')
def get_libraries():
"""
Return list of library names to include in the `libraries` argument for C
or Cython extensions using pyarrow
"""
return ['arrow', 'arrow_python']
def create_library_symlinks():
"""
With Linux and macOS wheels, the bundled shared libraries have an embedded
ABI version like libarrow.so.17 or libarrow.17.dylib and so linking to them
with -larrow won't work unless we create symlinks at locations like
site-packages/pyarrow/libarrow.so. This unfortunate workaround addresses
prior problems we had with shipping two copies of the shared libraries to
permit third party projects like turbodbc to build their C++ extensions
against the pyarrow wheels.
This function must only be invoked once and only when the shared libraries
are bundled with the Python package, which should only apply to wheel-based
installs. It requires write access to the site-packages/pyarrow directory
and so depending on your system may need to be run with root.
"""
import glob
if _sys.platform == 'win32':
return
package_cwd = _os.path.dirname(__file__)
if _sys.platform == 'linux':
bundled_libs = glob.glob(_os.path.join(package_cwd, '*.so.*'))
def get_symlink_path(hard_path):
return hard_path.rsplit('.', 1)[0]
else:
bundled_libs = glob.glob(_os.path.join(package_cwd, '*.*.dylib'))
def get_symlink_path(hard_path):
return '.'.join((hard_path.split('.')[0], 'dylib'))
for lib_hard_path in bundled_libs:
symlink_path = get_symlink_path(lib_hard_path)
if _os.path.exists(symlink_path):
continue
try:
_os.symlink(lib_hard_path, symlink_path)
except PermissionError:
print("Tried creating symlink {}. If you need to link to "
"bundled shared libraries, run "
"pyarrow._setup_bundled_symlinks() as root")
def get_library_dirs():
"""
Return lists of directories likely to contain Arrow C++ libraries for
linking C or Cython extensions using pyarrow
"""
package_cwd = _os.path.dirname(__file__)
library_dirs = [package_cwd]
def append_library_dir(library_dir):
if library_dir not in library_dirs:
library_dirs.append(library_dir)
# Search library paths via pkg-config. This is necessary if the user
# installed libarrow and the other shared libraries manually and they
# are not shipped inside the pyarrow package (see also ARROW-2976).
pkg_config_executable = _os.environ.get('PKG_CONFIG') or 'pkg-config'
for pkgname in ["arrow", "arrow_python"]:
if _has_pkg_config(pkgname):
library_dir = _read_pkg_config_variable(pkgname,
["--libs-only-L"])
# pkg-config output could be empty if Arrow is installed
# as a system package.
if library_dir:
if not library_dir.startswith("-L"):
raise ValueError(
"pkg-config --libs-only-L returned unexpected "
"value {!r}".format(library_dir))
append_library_dir(library_dir[2:])
if _sys.platform == 'win32':
# TODO(wesm): Is this necessary, or does setuptools within a conda
# installation add Library\lib to the linker path for MSVC?
python_base_install = _os.path.dirname(_sys.executable)
library_dir = _os.path.join(python_base_install, 'Library', 'lib')
if _os.path.exists(_os.path.join(library_dir, 'arrow.lib')):
append_library_dir(library_dir)
# ARROW-4074: Allow for ARROW_HOME to be set to some other directory
if _os.environ.get('ARROW_HOME'):
append_library_dir(_os.path.join(_os.environ['ARROW_HOME'], 'lib'))
else:
# Python wheels bundle the Arrow libraries in the pyarrow directory.
append_library_dir(_os.path.dirname(_os.path.abspath(__file__)))
return library_dirs
| 40.949868 | 83 | 0.605026 |
9bd58c4dd1d6cf103b6de7779b798cdb5b41f7bf | 7,249 | py | Python | accelbyte_py_sdk/api/iam/operations/users/get_publisher_user.py | encyphered/accelbyte-python-sdk | 09c1e989d7251de308150fdcd3119d662ca2d205 | [
"MIT"
] | null | null | null | accelbyte_py_sdk/api/iam/operations/users/get_publisher_user.py | encyphered/accelbyte-python-sdk | 09c1e989d7251de308150fdcd3119d662ca2d205 | [
"MIT"
] | null | null | null | accelbyte_py_sdk/api/iam/operations/users/get_publisher_user.py | encyphered/accelbyte-python-sdk | 09c1e989d7251de308150fdcd3119d662ca2d205 | [
"MIT"
] | null | null | null | # Auto-generated at 2021-09-27T17:01:24.686781+08:00
# from: Justice Iam Service (4.1.0)
# Copyright (c) 2018 - 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from .....core import Operation
from .....core import HttpResponse
from ...models import ModelGetPublisherUserResponse
from ...models import RestErrorResponse
class GetPublisherUser(Operation):
"""Get Publisher User (GetPublisherUser)
Properties:
url: /iam/namespaces/{namespace}/users/{userId}/publisher
method: GET
tags: Users
consumes: ["application/json"]
produces: ["application/json"]
security: bearer
namespace: (namespace) REQUIRED str in path
user_id: (userId) REQUIRED str in path
Responses:
200: OK - ModelGetPublisherUserResponse (OK)
400: Bad Request - RestErrorResponse (Error Code: 7239 - Error Message: wrong namespace: required game namespace)
401: Unauthorized - (Unauthorized access)
403: Forbidden - (Forbidden)
404: Not Found - (Data not found)
"""
# region fields
_url: str = "/iam/namespaces/{namespace}/users/{userId}/publisher"
_method: str = "GET"
_consumes: List[str] = ["application/json"]
_produces: List[str] = ["application/json"]
_security: Optional[str] = "bearer"
_location_query: str = None
namespace: str # REQUIRED in [path]
user_id: str # REQUIRED in [path]
# endregion fields
# region properties
@property
def url(self) -> str:
return self._url
@property
def method(self) -> str:
return self._method
@property
def consumes(self) -> List[str]:
return self._consumes
@property
def produces(self) -> List[str]:
return self._produces
@property
def security(self) -> Optional[str]:
return self._security
@property
def location_query(self) -> str:
return self._location_query
# endregion properties
# region get methods
def get_full_url(self, base_url: Union[None, str] = None) -> str:
result = base_url if base_url is not None else ""
# path params
url = self.url
for k, v in self.get_path_params().items():
url = url.replace(f"{{{k}}}", v)
result += url
return result
# noinspection PyMethodMayBeStatic
def get_all_required_fields(self) -> List[str]:
return [
"namespace",
"user_id",
]
# endregion get methods
# region get_x_params methods
def get_all_params(self) -> dict:
return {
"path": self.get_path_params(),
}
def get_path_params(self) -> dict:
result = {}
if hasattr(self, "namespace"):
result["namespace"] = self.namespace
if hasattr(self, "user_id"):
result["userId"] = self.user_id
return result
# endregion get_x_params methods
# region is/has methods
def is_valid(self) -> bool:
if not hasattr(self, "namespace") or self.namespace is None:
return False
if not hasattr(self, "user_id") or self.user_id is None:
return False
return True
# endregion is/has methods
# region with_x methods
def with_namespace(self, value: str) -> GetPublisherUser:
self.namespace = value
return self
def with_user_id(self, value: str) -> GetPublisherUser:
self.user_id = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result = {}
if hasattr(self, "namespace") and self.namespace:
result["namespace"] = str(self.namespace)
elif include_empty:
result["namespace"] = str()
if hasattr(self, "user_id") and self.user_id:
result["userId"] = str(self.user_id)
elif include_empty:
result["userId"] = str()
return result
# endregion to methods
# region response methods
# noinspection PyMethodMayBeStatic
def parse_response(self, code: int, content_type: str, content: Any) -> Tuple[Union[None, ModelGetPublisherUserResponse], Union[None, HttpResponse, RestErrorResponse]]:
"""Parse the given response.
200: OK - ModelGetPublisherUserResponse (OK)
400: Bad Request - RestErrorResponse (Error Code: 7239 - Error Message: wrong namespace: required game namespace)
401: Unauthorized - (Unauthorized access)
403: Forbidden - (Forbidden)
404: Not Found - (Data not found)
"""
if code == 200:
return ModelGetPublisherUserResponse.create_from_dict(content), None
if code == 400:
return None, RestErrorResponse.create_from_dict(content)
if code == 401:
return None, HttpResponse.create(code, "Unauthorized")
if code == 403:
return None, HttpResponse.create(code, "Forbidden")
if code == 404:
return None, HttpResponse.create(code, "Not Found")
was_handled, undocumented_response = HttpResponse.try_create_undocumented_response(code, content)
if was_handled:
return None, undocumented_response
return None, HttpResponse.create_unhandled_error()
# endregion response methods
# region static methods
@classmethod
def create(
cls,
namespace: str,
user_id: str,
) -> GetPublisherUser:
instance = cls()
instance.namespace = namespace
instance.user_id = user_id
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> GetPublisherUser:
instance = cls()
if "namespace" in dict_ and dict_["namespace"] is not None:
instance.namespace = str(dict_["namespace"])
elif include_empty:
instance.namespace = str()
if "userId" in dict_ and dict_["userId"] is not None:
instance.user_id = str(dict_["userId"])
elif include_empty:
instance.user_id = str()
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"namespace": "namespace",
"userId": "user_id",
}
# endregion static methods
| 28.880478 | 172 | 0.620499 |
940ff0a9407c9528e1e47319ca65c67b88467888 | 1,142 | py | Python | config/views.py | jlillest/geodjango-tigerleaflet-example | f1b6ba178193c235f9740fece578deb09b149d98 | [
"MIT"
] | 1 | 2016-12-25T15:32:50.000Z | 2016-12-25T15:32:50.000Z | config/views.py | jlillest/geodjango-tigerleaflet-example | f1b6ba178193c235f9740fece578deb09b149d98 | [
"MIT"
] | null | null | null | config/views.py | jlillest/geodjango-tigerleaflet-example | f1b6ba178193c235f9740fece578deb09b149d98 | [
"MIT"
] | null | null | null | from django.views.generic import TemplateView
from tigerleaflet.models import State
class Index(TemplateView):
template_name = "pages/country.html"
def get_context_data(self, **kwargs):
return { 'title': "Welcome to the tigerleaflet demo!"}
class StateView(TemplateView):
template_name = "pages/state.html"
def get_context_data(self, **kwargs):
state_code = self.kwargs['state']
state_name = State.objects.get(usps_code=state_code.upper()).name
context = { 'title': "Showing " + state_name,
'state': state_code
}
return context
class CountyView(TemplateView):
template_name = "pages/county.html"
def get_context_data(self, **kwargs):
state_code = self.kwargs['state']
county = self.kwargs['county']
state_name = State.objects.get(usps_code=state_code.upper()).name
county_name = county.replace('_', ' ').title()
context = { 'title' : county_name + ", " + state_name,
'state' : state_code,
'county': county,
}
return context
| 33.588235 | 73 | 0.609457 |
1c7853e01dee23841d010c8878e7a6c8a668d467 | 4,575 | py | Python | qa/rpc-tests/multi_rpc.py | r3vcoin-project/r3vcoin | 6f72edb47bae7011a08c9e2621c3955d3bb7ed26 | [
"MIT"
] | null | null | null | qa/rpc-tests/multi_rpc.py | r3vcoin-project/r3vcoin | 6f72edb47bae7011a08c9e2621c3955d3bb7ed26 | [
"MIT"
] | null | null | null | qa/rpc-tests/multi_rpc.py | r3vcoin-project/r3vcoin | 6f72edb47bae7011a08c9e2621c3955d3bb7ed26 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test multiple rpc user config option rpcauth
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import str_to_b64str, assert_equal
import os
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
#Append rpcauth to bitcoin.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
with open(os.path.join(self.options.tmpdir+"/node0", "r3vcoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
def setup_network(self):
self.nodes = self.setup_nodes()
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcuser tool
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Use new authpair to confirm both work
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong login name with rt's password
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Wrong password for rt
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Correct for rt2
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 37.809917 | 129 | 0.645246 |
24b6e9d5f861960514e136f950d7b77a4c2dd0f7 | 13,523 | py | Python | {{cookiecutter.repo_name}}/utilities.py | Bhaskers-Blu-Org2/deep_bait | 8caa5a7b0472be09d8bea700f4c031273682d375 | [
"MIT"
] | 14 | 2017-12-03T15:59:28.000Z | 2019-04-17T12:55:00.000Z | {{cookiecutter.repo_name}}/utilities.py | microsoft/deep_bait | 8caa5a7b0472be09d8bea700f4c031273682d375 | [
"MIT"
] | 6 | 2017-11-29T09:33:59.000Z | 2017-12-05T01:11:17.000Z | {{cookiecutter.repo_name}}/utilities.py | Microsoft/deep_bait | 8caa5a7b0472be09d8bea700f4c031273682d375 | [
"MIT"
] | 7 | 2017-11-29T14:47:22.000Z | 2018-06-21T00:53:32.000Z | from __future__ import print_function
import json
import logging
import os
import pprint
import time
import azure.mgmt.batchai as training
import azure.mgmt.batchai.models as models
import requests
from azure.common.credentials import ServicePrincipalCredentials
from azure.storage.file import FileService
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
POLLING_INTERVAL_SEC = 5
def encode(value):
if isinstance(value, type('str')):
return value
return value.encode('utf-8')
class Configuration(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
@staticmethod
def from_file(filename):
if not os.path.exists(filename):
raise ValueError('Cannot find configuration file "{0}"'.format(filename))
with open(filename, 'r') as f:
return Configuration(**json.load(f))
@staticmethod
def from_dict(conf_dict):
return Configuration(**conf_dict)
def update(self, **kwargs):
self.__dict__.update(kwargs)
def __repr__(self):
return pprint.pformat(self.__dict__)
def __str__(self):
return pprint.pformat(self.__dict__)
class OutputStreamer:
"""Helper class to stream (tail -f) job's output files."""
def __init__(self, client, resource_group, workspace, experiment, job_name, output_directory_id,
file_name):
self.client = client
self.resource_group = resource_group
self.job_name = job_name
self.output_directory_id = output_directory_id
self.file_name = file_name
self.url = None
self.downloaded = 0
self.workspace=workspace
self.experiment=experiment
# if no output_directory_id or file_name specified, the tail call is
# nope
if self.output_directory_id is None or self.file_name is None:
self.tail = lambda: None
def tail(self):
if not self.url:
files = self.client.jobs.list_output_files(
self.resource_group, self.workspace, self.experiment, self.job_name,
models.JobsListOutputFilesOptions(
outputdirectoryid=self.output_directory_id))
if not files:
return
else:
for f in list(files):
if f.name == self.file_name:
self.url = f.download_url
if self.url:
r = requests.get(self.url, headers={
'Range': 'bytes={0}-'.format(self.downloaded)})
if int(r.status_code / 100) == 2:
self.downloaded += len(r.content)
print(r.content.decode(), end='')
# def client_from(configuration):
# ''' Returns a Batch AI client based on config
# '''
# client = training.BatchAITrainingClient(
# configuration.subscription_id,
# configuration.api_version,
# configuration.url)
# # During private preview we need to setup x-ms-auth-cert manually
# client._client.add_header("x-ms-auth-cert", configuration.access_key)
# client.config.generate_client_request_id = True
# return client
def client_from(configuration):
client = training.BatchAIManagementClient(
credentials = ServicePrincipalCredentials(client_id=configuration.client_id, secret=configuration.secret, tenant=configuration.tenant),
subscription_id = configuration.subscription_id)
return client
def download_file(sas, destination):
dir_name = os.path.dirname(destination)
if dir_name:
os.makedirs(dir_name, exist_ok=True)
print('Downloading {0} ...'.format(sas), end='')
r = requests.get(sas, stream=True)
with open(destination, 'wb') as f:
for chunk in r.iter_content(chunk_size=512 * 1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.close()
print('Done')
def print_job_status(job):
failure_message = None
exit_code = 'None'
if job.execution_info is not None:
exit_code = job.execution_info.exit_code
if job.execution_state == models.ExecutionState.failed:
for error in job.execution_info.errors:
failure_message = \
'\nErrorCode:{0}\nErrorMessage:{1}\n'. \
format(error.code,
error.message)
if error.details is not None:
failure_message += 'Details:\n'
for detail in error.details:
failure_message += '{0}:{1}\n'.format(detail.name,
detail.value)
print('Job state: {0} ExitCode: {1}'.format(job.execution_state,
exit_code))
if failure_message:
print('FailureDetails: {0}'.format(failure_message))
def print_cluster_status(cluster):
print(
'Cluster state: {0} Target: {1}; Allocated: {2}; Idle: {3}; '
'Unusable: {4}; Running: {5}; Preparing: {6}; Leaving: {7}'.format(
cluster.allocation_state,
cluster.scale_settings.manual.target_node_count,
cluster.current_node_count,
cluster.node_state_counts.idle_node_count,
cluster.node_state_counts.unusable_node_count,
cluster.node_state_counts.running_node_count,
cluster.node_state_counts.preparing_node_count,
cluster.node_state_counts.leaving_node_count))
if not cluster.errors:
return
for error in cluster.errors:
print('Cluster error: {0}: {1}'.format(error.code, error.message))
if error.details:
print('Details:')
for detail in error.details:
print('{0}: {1}'.format(detail.name, detail.value))
def wait_for_cluster(config, resource_group, workspace, cluster_name, polling_interval=POLLING_INTERVAL_SEC):
client = client_from(config)
while True:
try:
cluster = client.clusters.get(resource_group, workspace, cluster_name)
print_cluster_status(cluster)
if (cluster.scale_settings.manual.target_node_count == cluster.current_node_count
and cluster.node_state_counts.preparing_node_count == 0 and
cluster.node_state_counts.idle_node_count > 0 or
cluster.errors):
return cluster
except:
pass
time.sleep(polling_interval)
def wait_for_job_completion(client, resource_group, workspace, experiment, job_name, cluster_name,
output_directory_id=None, file_name=None, polling_interval=POLLING_INTERVAL_SEC):
"""
Waits for job completion and tails a file specified by output_directory_id
and file_name.
"""
# Wait for job to start running
while True:
cluster = client.clusters.get(resource_group, workspace, cluster_name)
print_cluster_status(cluster)
job = client.jobs.get(resource_group, workspace, experiment, job_name)
print_job_status(job)
if job.execution_state != models.ExecutionState.queued:
break
time.sleep(polling_interval)
print('Waiting for job output to become available...')
# Tail the output file and wait for job to complete
streamer = OutputStreamer(client, resource_group, workspace, experiment, job_name,
output_directory_id, file_name)
while True:
streamer.tail()
job = client.jobs.get(resource_group, workspace, experiment, job_name)
if job.execution_state == models.ExecutionState.succeeded:
break
time.sleep(1)
streamer.tail()
print_job_status(job)
def upload_scripts(config, job_name, filenames):
service = FileService(config.storage_account['name'],
config.storage_account['key'])
if not service.exists(config.fileshare_name, directory_name=job_name):
service.create_directory(config.fileshare_name, job_name, fail_on_exist=False)
trasfer_file = lambda fname: service.create_file_from_path(config.fileshare_name, job_name, os.path.basename(fname), fname)
for filename in filenames:
trasfer_file(filename)
def create_job(config, cluster_id, workspace, experiment, job_name, image_name, command, number_of_vms=1):
''' Creates job
'''
input_directories = [
models.InputDirectory(
id='SCRIPT',
path='$AZ_BATCHAI_MOUNT_ROOT/{0}/{1}'.format(config.fileshare_mount_point, job_name)),
models.InputDirectory(
id='DATASET',
path='$AZ_BATCHAI_MOUNT_ROOT/{0}/{1}'.format(config.fileshare_mount_point, 'data'))]
std_output_path_prefix = "$AZ_BATCHAI_MOUNT_ROOT/{0}".format(config.fileshare_mount_point)
output_directories = [
models.OutputDirectory(
id='MODEL',
path_prefix='$AZ_BATCHAI_MOUNT_ROOT/{0}'.format(config.fileshare_mount_point),
path_suffix="models"),
models.OutputDirectory(
id='NOTEBOOKS',
path_prefix='$AZ_BATCHAI_MOUNT_ROOT/{0}'.format(config.fileshare_mount_point),
path_suffix="notebooks")
]
parameters = models.JobCreateParameters(
location=config.location,
cluster=models.ResourceId(id=cluster_id),
node_count=number_of_vms,
input_directories=input_directories,
std_out_err_path_prefix=std_output_path_prefix,
output_directories=output_directories,
container_settings=models.ContainerSettings(image_source_registry=models.ImageSourceRegistry(image=image_name)),
custom_toolkit_settings=models.CustomToolkitSettings(command_line=command))
client = client_from(config)
_ = client.jobs.create(config.group_name, workspace, experiment, job_name, parameters)
def wait_for_job(config, workspace, experiment, job_name):
client = client_from(config)
wait_for_job_completion(client, config.group_name, workspace, experiment, job_name, config.cluster_name, 'stdOuterr', 'stdout.txt')
def setup_cluster(config, workspace):
client = client_from(config)
container_setting_for = lambda img: models.ContainerSettings(image_source_registry=models.ImageSourceRegistry(image=img))
container_settings = [container_setting_for(img) for img in config.image_names]
volumes = create_volume(config.storage_account['name'],config.storage_account['key'], config.fileshare_name, config.fileshare_mount_point)
parameters = cluster_parameters_for(config, container_settings, volumes)
_ = client.clusters.create(config.group_name, workspace, config.cluster_name, parameters)
def write_json_to_file(json_dict, filename):
""" Simple function to write JSON dictionaries to files
"""
with open(filename, 'w') as outfile:
json.dump(json_dict, outfile)
def create_volume(storage_name, storage_key, azure_file_share_name, azure_file_share):
return models.MountVolumes(
azure_file_shares=[
models.AzureFileShareReference(
account_name=storage_name,
credentials=models.AzureStorageCredentialsInfo(account_key=storage_key),
azure_file_url='https://{0}.file.core.windows.net/{1}'.format(storage_name,
azure_file_share_name),
relative_mount_path=azure_file_share)
]
)
def cluster_parameters_for(config, container_settings, volumes):
return models.ClusterCreateParameters(
virtual_machine_configuration=models.VirtualMachineConfiguration(
image_reference=models.ImageReference(offer='UbuntuServer',
publisher='Canonical',
sku='16.04-LTS',
version='16.04.201708151')),
location=config.location,
vm_size=config.vm_type,
user_account_settings=models.UserAccountSettings(
admin_user_name=config.admin_user['name'],
admin_user_password=config.admin_user['password']),
scale_settings=models.ScaleSettings(
manual=models.ManualScaleSettings(target_node_count=config.node_count)
),
node_setup=models.NodeSetup(
mount_volumes=volumes
)
)
def jobs_list_for(client, workspace, experiment, resource_group=None):
jobs_generator = client.jobs.list_by_experiment(resource_group, workspace, experiment)
return [job.as_dict() for job in jobs_generator]
def print_jobs_for( workspace, experiment, client, resource_group=None):
pprint.pprint(jobs_list_for(client, workspace, experiment, resource_group=resource_group))
def print_jobs_summary_for( workspace, experiment, client, resource_group=None):
for job in jobs_list_for(client, workspace, experiment, resource_group=resource_group):
print('{}: status:{} | exit-code {}'.format(job['name'],
job['execution_state'],
job.get('execution_info', dict()).get('exit_code', None)))
def delete_all_jobs_for(resource_group, workspace, experiment, client):
for job in jobs_list_for(client, workspace, experiment, resource_group=resource_group):
logger.info('Deleting {}'.format(job['name']))
client.jobs.delete(resource_group, workspace, experiment, job['name'])
| 39.540936 | 142 | 0.658656 |
3d064b81294e09c43270d3a34f1f507298b9739b | 1,214 | py | Python | run.py | iamsushanth/Automating-Real-World-Tasks-with-Python | eb471866a26043e18cf92f65a26abd8e498b61b2 | [
"MIT"
] | null | null | null | run.py | iamsushanth/Automating-Real-World-Tasks-with-Python | eb471866a26043e18cf92f65a26abd8e498b61b2 | [
"MIT"
] | null | null | null | run.py | iamsushanth/Automating-Real-World-Tasks-with-Python | eb471866a26043e18cf92f65a26abd8e498b61b2 | [
"MIT"
] | 2 | 2020-07-19T10:46:53.000Z | 2020-08-21T08:27:22.000Z | """Automate Updating Catalog Information - run.py"""
#! /usr/bin/env python3
import os
import requests
BASEPATH_SUPPLIER_TEXT_DES = os.path.expanduser('~') + '/supplier-data/descriptions/'
list_text_files = os.listdir(BASEPATH_SUPPLIER_TEXT_DES)
BASEPATH_SUPPLIER_IMAGE = os.path.expanduser('~') + '/supplier-data/images/'
list_image_files = os.listdir(BASEPATH_SUPPLIER_IMAGE)
list_images = [image_name for image_name in list_image_files if '.jpeg' in image_name]
list = []
for text_file in list_text_files:
with open(BASEPATH_SUPPLIER_TEXT_DES + text_file, 'r') as f:
data = {"name":f.readline().rstrip("\n"),
"weight":int(f.readline().rstrip("\n").split(' ')[0]),
"description":f.readline().rstrip("\n")}
for image_file in list_images:
if image_file.split('.')[0] in text_file.split('.')[0]:
data['image_name'] = image_file
list.append(data)
for item in list:
resp = requests.post('http://127.0.0.1:80/fruits/', json=item)
if resp.status_code != 201:
raise Exception('POST error status={}'.format(resp.status_code))
print('Created feedback ID: {}'.format(resp.json()["id"])) | 35.705882 | 86 | 0.652389 |
2453495e645e6fd08d607afd46926c51472350a8 | 1,869 | py | Python | cotaau/urls.py | dezcor/Cotaau | 1914e5fac77734a9e82c3b49110da3ebe079d618 | [
"Apache-2.0"
] | null | null | null | cotaau/urls.py | dezcor/Cotaau | 1914e5fac77734a9e82c3b49110da3ebe079d618 | [
"Apache-2.0"
] | null | null | null | cotaau/urls.py | dezcor/Cotaau | 1914e5fac77734a9e82c3b49110da3ebe079d618 | [
"Apache-2.0"
] | null | null | null | """cotaau URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from django.contrib.auth.views import LogoutView, PasswordResetView,PasswordResetDoneView,PasswordResetConfirmView,PasswordResetCompleteView
from apps.estudiantes.views import Login,CrearUsuario
from django.views.generic import TemplateView
urlpatterns = [
path('admin/', admin.site.urls),
path('estudiantes/',include("apps.estudiantes.urls")),
path("ponentes/",include("apps.ponentes.urls")),
path("logout/",LogoutView.as_view(),name = 'logout'),
path("registro/",CrearUsuario.as_view(),name='registro'),
path("conferencias/",include('apps.conferencias.urls')),
path("accounts/login/",Login.as_view(),{'template_name':'index.html'},name='login'),
path("accounts/password_reset",PasswordResetView.as_view(),name='password_reset'),
path("accounts/password_reset/done/",PasswordResetDoneView.as_view(),name= 'password_reset_done'),
path("accounts/reset/<uidb64>/<token>/",PasswordResetConfirmView.as_view(),name= 'password_reset_confirm'),
path("accounts/reset/done/",PasswordResetCompleteView.as_view(),name= 'password_reset_complete'),
path('', TemplateView.as_view(template_name="principal/main.html"), name='main')
]
| 50.513514 | 140 | 0.742108 |
df99bb7c1c6b4e51138d42504a4807e6fcc0e0c9 | 3,189 | py | Python | toktak_slam/scripts/ObstacleAvoidance.py | chatreejs/assistiverobot.ros | 4644d17cb657d5a72e48e712abf00f190bedfe07 | [
"MIT"
] | null | null | null | toktak_slam/scripts/ObstacleAvoidance.py | chatreejs/assistiverobot.ros | 4644d17cb657d5a72e48e712abf00f190bedfe07 | [
"MIT"
] | null | null | null | toktak_slam/scripts/ObstacleAvoidance.py | chatreejs/assistiverobot.ros | 4644d17cb657d5a72e48e712abf00f190bedfe07 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
class ObstacleAvoidance():
def __init__(self):
rospy.init_node('ObstacleAvoidance', anonymous=False)
rospy.loginfo("Press CTRL+c to stop Kobuki")
rospy.on_shutdown(self.shutdown)
self.cmd_vel = rospy.Publisher(
'/mobile_base/commands/velocity', Twist, queue_size=10)
self.laser_sensor = rospy.Subscriber(
'/laser/scan', LaserScan, self.callback_laser)
rate = rospy.Rate(10)
while not rospy.is_shutdown():
rate.sleep()
def callback_laser(self, msg):
regions = {
"right": min(min(msg.ranges[500:699]), 10),
"fright": min(min(msg.ranges[700:899]), 10),
"front": min(min(msg.ranges[900:1099]), 10),
"fleft": min(min(msg.ranges[1100:1299]), 10),
"left": min(min(msg.ranges[1300:1499]), 10),
}
self.take_action(regions)
def take_action(self, regions):
move_cmd = Twist()
linear = 0
angular = 0
state_description = ""
if regions["front"] > 1 and regions["fleft"] > 1 and regions["fright"] > 1:
state_description = "case 1 - nothing"
linear = 0.4
angular = 0
elif regions["front"] < 1 and regions["fleft"] > 1 and regions["fright"] > 1:
state_description = "case 2 - front"
linear = 0
angular = 0.7
elif regions["front"] > 1 and regions["fleft"] > 1 and regions["fright"] < 1:
state_description = "case 3 - fright"
linear = 0
angular = 0.7
elif regions["front"] > 1 and regions["fleft"] < 1 and regions["fright"] > 1:
state_description = "case 4 - fleft"
linear = 0
angular = -0.7
elif regions["front"] < 1 and regions["fleft"] > 1 and regions["fright"] < 1:
state_description = "case 5 - front and fright"
linear = 0
angular = 0.7
elif regions["front"] < 1 and regions["fleft"] < 1 and regions["fright"] > 1:
state_description = "case 6 - front and fleft"
linear = 0
angular = -0.7
elif regions["front"] < 1 and regions["fleft"] < 1 and regions["fright"] < 1:
state_description = "case 7 - front and fleft and fright"
linear = 0
angular = 0.5
elif regions["front"] > 1 and regions["fleft"] < 1 and regions["fright"] < 1:
state_description = "case 8 - fleft and fright"
linear = 0.3
angular = 0
else:
state_description = "unknown case"
rospy.loginfo(regions)
rospy.loginfo(state_description)
move_cmd.linear.x = linear
move_cmd.angular.z = angular
self.cmd_vel.publish(move_cmd)
def shutdown(self):
rospy.loginfo("Stopping Kobuki")
self.cmd_vel.publish(Twist())
rospy.sleep(1)
if __name__ == '__main__':
try:
ObstacleAvoidance()
except:
rospy.loginfo("End of the trip for Kobuki")
| 32.540816 | 85 | 0.556914 |
5f82dc4b36d1c6107e54903e90a01ed1523b2ce4 | 632 | py | Python | backend/manage.py | crowdbotics-apps/docter-28901 | 0e51cc9aa99b6ca391431be9c7c1e29dce952371 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/manage.py | crowdbotics-apps/docter-28901 | 0e51cc9aa99b6ca391431be9c7c1e29dce952371 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/manage.py | crowdbotics-apps/docter-28901 | 0e51cc9aa99b6ca391431be9c7c1e29dce952371 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'docter_28901.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.727273 | 76 | 0.685127 |
76cb9144fb79a893f119334f33d49725c27a84df | 15,534 | py | Python | transformations/manual_composition.py | eth-sri/3dcertify | bb10f339f80149a9ebc7c07d041b2ef222efb394 | [
"Apache-2.0"
] | 9 | 2021-03-31T20:27:50.000Z | 2022-01-07T21:52:47.000Z | transformations/manual_composition.py | eth-sri/3dcertify | bb10f339f80149a9ebc7c07d041b2ef222efb394 | [
"Apache-2.0"
] | 2 | 2021-06-21T15:38:07.000Z | 2021-11-08T09:10:09.000Z | transformations/manual_composition.py | eth-sri/3dcertify | bb10f339f80149a9ebc7c07d041b2ef222efb394 | [
"Apache-2.0"
] | 4 | 2021-07-17T15:04:14.000Z | 2022-02-09T17:51:39.000Z | from typing import Union, List
import numpy as np
from relaxations import interval as iv
from relaxations.interval import Interval
from transformations.rotation import RotationZ
from transformations.tapering import TaperingZ
from transformations.transformation import Transformation
class TaperingRotation(Transformation):
def __init__(self):
super().__init__(3)
def transform(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[np.ndarray, Interval]:
a, b, theta = params
return TaperingZ().transform(RotationZ().transform(points, [theta]), [a, b])
def gradient_params(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[np.ndarray], List[Interval]]:
a, b, theta = params
x = points[:, 0]
y = points[:, 1]
z = points[:, 2]
zero = iv.zeros_like(z)
return [
iv.stack([
(a * z) * (iv.cos(theta) * x - iv.sin(theta) * y),
(a * z) * (iv.sin(theta) * x + iv.cos(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
z * (iv.cos(theta) * x - iv.sin(theta) * y),
z * (iv.sin(theta) * x + iv.cos(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
(0.5 * iv.square(a) * z + b * z + 1) * (-iv.sin(theta) * x - iv.cos(theta) * y),
(0.5 * iv.square(a) * z + b * z + 1) * (iv.cos(theta) * x - iv.sin(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval)),
]
def gradient_points(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[np.ndarray], List[Interval]]:
a, b, theta = params
x = points[:, 0]
y = points[:, 1]
z = points[:, 2]
zero = iv.zeros_like(z)
one = iv.ones_like(z)
return [
iv.stack([
(0.5 * iv.square(a) * z + b * z + 1) * iv.cos(theta),
(0.5 * iv.square(a) * z + b * z + 1) * iv.sin(theta),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-(0.5 * iv.square(a) * z + b * z + 1) * iv.sin(theta),
(0.5 * iv.square(a) * z + b * z + 1) * iv.cos(theta),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
(0.5 * iv.square(a) + b) * (iv.cos(theta) * x - iv.sin(theta) * y),
(0.5 * iv.square(a) + b) * (iv.sin(theta) * x + iv.cos(theta) * y),
one
], axis=1, convert=isinstance(a, Interval))
]
def hessian_params(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[List[np.ndarray]], List[List[Interval]]]:
a, b, theta = params
x = points[:, 0]
y = points[:, 1]
z = points[:, 2]
zero = iv.zeros_like(z)
return [
[
iv.stack([
z * (iv.cos(theta) * x - iv.sin(theta) * y),
z * (iv.sin(theta) * x + iv.cos(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-a * z * (iv.sin(theta) * x + iv.cos(theta) * y),
a * z * (iv.cos(theta) * x - iv.sin(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval))
],
[
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-z * (iv.sin(theta) * x + iv.cos(theta) * y),
z * (iv.cos(theta) * x - iv.sin(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval))
],
[
iv.stack([
-a * z * (iv.sin(theta) * x + iv.cos(theta) * y),
a * z * (iv.cos(theta) * x - iv.sin(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-z * (iv.sin(theta) * x + iv.cos(theta) * y),
z * (iv.cos(theta) * x - iv.sin(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
(0.5 * iv.square(a) * z + b * z + 1) * (-iv.cos(theta) * x + iv.sin(theta) * y),
(0.5 * iv.square(a) * z + b * z + 1) * (-iv.sin(theta) * x - iv.cos(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval))
]
]
def hessian_points(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[List[np.ndarray]], List[List[Interval]]]:
a, b, theta = params
z = points[:, 2]
zero = iv.zeros_like(z)
one = iv.ones_like(z)
return [
[
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
(0.5 * iv.square(a) + b) * iv.cos(theta) * one,
(0.5 * iv.square(a) + b) * iv.sin(theta) * one,
zero
], axis=1, convert=isinstance(a, Interval)),
],
[
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-(0.5 * iv.square(a) + b) * iv.sin(theta) * one,
(0.5 * iv.square(a) + b) * iv.cos(theta) * one,
zero
], axis=1, convert=isinstance(a, Interval))
],
[
iv.stack([
(0.5 * iv.square(a) + b) * iv.cos(theta) * one,
(0.5 * iv.square(a) + b) * iv.sin(theta) * one,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-(0.5 * iv.square(a) + b) * iv.sin(theta) * one,
(0.5 * iv.square(a) + b) * iv.cos(theta) * one,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval))
],
]
def hessian_points_params(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[List[np.ndarray]], List[List[Interval]]]:
a, b, theta = params
x = points[:, 0]
y = points[:, 1]
z = points[:, 2]
zero = iv.zeros_like(z)
return [
[
iv.stack([
a * z * iv.cos(theta),
a * z * iv.sin(theta),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
z * iv.cos(theta),
z * iv.sin(theta),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-(0.5 * iv.square(a) * z + b * z + 1) * iv.sin(theta),
(0.5 * iv.square(a) * z + b * z + 1) * iv.cos(theta),
zero
], axis=1, convert=isinstance(a, Interval)),
],
[
iv.stack([
-a * z * iv.sin(theta),
a * z * iv.cos(theta),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-z * iv.sin(theta),
z * iv.cos(theta),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-(0.5 * iv.square(a) * z + b * z + 1) * iv.cos(theta),
-(0.5 * iv.square(a) * z + b * z + 1) * iv.sin(theta),
zero
], axis=1, convert=isinstance(a, Interval))
],
[
iv.stack([
a * (iv.cos(theta) * x - iv.sin(theta) * y),
a * (iv.sin(theta) * x + iv.cos(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
iv.cos(theta) * x - iv.sin(theta) * y,
iv.sin(theta) * x + iv.cos(theta) * y,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
(0.5 * iv.square(a) + b) * (-iv.sin(theta) * x - iv.cos(theta) * y),
(0.5 * iv.square(a) + b) * (iv.cos(theta) * x - iv.sin(theta) * y),
zero
], axis=1, convert=isinstance(a, Interval))
],
]
class RotationZX(Transformation):
def __init__(self):
super().__init__(2)
def transform(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[np.ndarray, Interval]:
a, b = params
x = points[:, 0]
y = points[:, 1]
z = points[:, 2]
return iv.stack([
iv.cos(a) * x - iv.sin(a) * iv.cos(b) * y + iv.sin(a) * iv.sin(b) * z,
iv.sin(a) * x + iv.cos(a) * iv.cos(b) * y - iv.cos(a) * iv.sin(b) * z,
iv.sin(b) * y + iv.cos(b) * z
], axis=1, convert=isinstance(a, Interval))
def gradient_params(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[np.ndarray], List[Interval]]:
a, b = params
x = points[:, 0]
y = points[:, 1]
z = points[:, 2]
return [
iv.stack([
-iv.sin(a) * x - iv.cos(a) * iv.cos(b) * y + iv.cos(a) * iv.sin(b) * z,
iv.cos(a) * x - iv.sin(a) * iv.cos(b) * y + iv.sin(a) * iv.sin(b) * z,
iv.zeros_like(z)
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
iv.sin(a) * iv.sin(b) * y + iv.sin(a) * iv.cos(b) * z,
-iv.cos(a) * iv.sin(b) * y - iv.cos(a) * iv.cos(b) * z,
iv.cos(b) * y - iv.sin(b) * z
], axis=1, convert=isinstance(a, Interval)),
]
def gradient_points(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[np.ndarray], List[Interval]]:
a, b = params
x = points[:, 0]
zero = iv.zeros_like(x)
one = iv.ones_like(x)
return [
iv.stack([
iv.cos(a) * one,
iv.sin(a) * one,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
-iv.sin(a) * iv.cos(b) * one,
iv.cos(a) * iv.cos(b) * one,
iv.sin(b) * one
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
iv.sin(a) * iv.sin(b) * one,
-iv.cos(a) * iv.sin(b) * one,
iv.cos(b) * one
], axis=1, convert=isinstance(a, Interval)),
]
def hessian_params(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[List[np.ndarray]], List[List[Interval]]]:
a, b = params
x = points[:, 0]
y = points[:, 1]
z = points[:, 2]
zero = iv.zeros_like(x)
return [
[
iv.stack([
-iv.cos(a) * x + iv.sin(a) * iv.cos(b) * y - iv.sin(a) * iv.sin(b) * z,
-iv.sin(a) * x - iv.cos(a) * iv.cos(b) * y + iv.cos(a) * iv.sin(b) * z,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
iv.cos(a) * iv.sin(b) * y + iv.cos(a) * iv.cos(b) * z,
iv.sin(a) * iv.sin(b) * y + iv.sin(a) * iv.cos(b) * z,
zero
], axis=1, convert=isinstance(a, Interval)),
],
[
iv.stack([
iv.cos(a) * iv.sin(b) * y + iv.cos(a) * iv.cos(b) * z,
iv.sin(a) * iv.sin(b) * y + iv.sin(a) * iv.cos(b) * z,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
iv.sin(a) * iv.cos(b) * y - iv.sin(a) * iv.sin(b) * z,
-iv.cos(a) * iv.cos(b) * y + iv.cos(a) * iv.sin(b) * z,
-iv.sin(b) * y - iv.cos(b) * z
], axis=1, convert=isinstance(a, Interval)),
]
]
def hessian_points(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[List[np.ndarray]], List[List[Interval]]]:
a, b = params
zero = iv.as_interval(iv.zeros_like(points)) if isinstance(a, Interval) else iv.zeros_like(points)
return [[zero] * 3 for _ in range(3)]
def hessian_points_params(self, points: np.ndarray, params: Union[List[float], List[Interval]]) -> Union[List[List[np.ndarray]], List[List[Interval]]]:
a, b = params
x = points[:, 0]
zero = iv.zeros_like(x)
one = iv.ones_like(x)
return [
[
iv.stack([
-iv.sin(a) * one,
iv.cos(a) * one,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
zero,
zero,
zero
], axis=1, convert=isinstance(a, Interval)),
],
[
iv.stack([
-iv.cos(a) * iv.cos(b) * one,
-iv.sin(a) * iv.cos(b) * one,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
iv.sin(a) * iv.sin(b) * one,
-iv.cos(a) * iv.sin(b) * one,
iv.cos(b) * one
], axis=1, convert=isinstance(a, Interval)),
],
[
iv.stack([
iv.cos(a) * iv.sin(b) * one,
iv.sin(a) * iv.sin(b) * one,
zero
], axis=1, convert=isinstance(a, Interval)),
iv.stack([
iv.sin(a) * iv.cos(b) * one,
-iv.cos(a) * iv.cos(b) * one,
-iv.sin(b) * one
], axis=1, convert=isinstance(a, Interval)),
]
]
| 40.036082 | 155 | 0.406785 |
3bfaa99e5056403c2ef3843a8927aa3c9bfe9a2e | 1,552 | py | Python | kratos/apps/task/views.py | cipher-ops/backend-kts | 7ea54d7f56bcb0da54b901ac8f3cfbfbb0b12319 | [
"MIT"
] | 1 | 2020-11-30T09:53:40.000Z | 2020-11-30T09:53:40.000Z | kratos/apps/task/views.py | cipher-ops/backend-kts | 7ea54d7f56bcb0da54b901ac8f3cfbfbb0b12319 | [
"MIT"
] | null | null | null | kratos/apps/task/views.py | cipher-ops/backend-kts | 7ea54d7f56bcb0da54b901ac8f3cfbfbb0b12319 | [
"MIT"
] | null | null | null | from rest_framework import viewsets
from kratos.apps.task.serializers import TaskSerializer
from rest_framework.response import Response
from kratos.apps.task import models
from rest_framework import status
class TaskViewSet(viewsets.GenericViewSet):
'''
任务
'''
serializer_class = TaskSerializer
queryset = models.Task.objects.all()
def list(self, request):
'''
任务列表
'''
records = self.paginator.paginate_queryset(self.get_queryset(), self.request, view=self)
serializer = self.get_serializer(records, many=True)
return self.paginator.get_paginated_response(serializer.data)
def retrieve(self, request, pk=None):
'''
任务详情
'''
serializer = self.get_serializer(self.get_object())
return Response(serializer.data)
def create(self, request):
'''
新增任务
'''
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data)
def partial_update(self, request, pk=None):
'''
任务信息更新
'''
serializer = self.get_serializer(self.get_object(), data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data)
def destroy(self, request, pk=None):
'''
删除一条任务记录
'''
self.get_object().delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| 28.740741 | 96 | 0.648196 |
d87cfe480a16ac9102e6af5de87b7bcd0c1a5b09 | 10,090 | py | Python | test_leonardo.py | chahyon1998/sornet-ku | 87a4479368c95e4d3d59863a4329b2b77f184218 | [
"MIT"
] | null | null | null | test_leonardo.py | chahyon1998/sornet-ku | 87a4479368c95e4d3d59863a4329b2b77f184218 | [
"MIT"
] | null | null | null | test_leonardo.py | chahyon1998/sornet-ku | 87a4479368c95e4d3d59863a4329b2b77f184218 | [
"MIT"
] | null | null | null | '''
MIT License
Copyright (c) 2022 Wentao Yuan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import numpy
from matplotlib import pyplot as plt
import datasets
from datasets import LeonardoDataset, build_predicates
from networks import EmbeddingNet, ReadoutNet
from torch.utils.data import DataLoader
from tqdm import tqdm
import argparse
import numpy as np
import torch
unary_pred = [
'on_surface(%s, left)', 'on_surface(%s, right)', 'on_surface(%s, far)',
'on_surface(%s, center)', 'has_obj(robot, %s)', 'top_is_clear(%s)',
'in_approach_region(robot, %s)'
]
binary_pred = ['stacked(%s, %s)', 'aligned_with(%s, %s)']
def calc_accuracy(pred, target):
return (pred == target).sum(0) / target.shape[0] * 100
def calc_accuracy_allmatch(pred, target, keys, names):
acc = {}
acc['all'] = ((pred != target).sum(axis=1) == 0).sum() / pred.shape[0] * 100
for key in keys:
mask = [key in name for name in names]
if sum(mask) > 0:
correct = ((pred[:, mask] != target[:, mask]).sum(axis=1) == 0).sum()
acc[key] = correct / pred.shape[0] * 100
else:
acc[key] = 0
return acc
def calc_f1(pred, target):
majority_is_one = target.shape[0] - target.sum(axis=0) < target.sum(axis=0)
pred[:, majority_is_one] = ~pred[:, majority_is_one]
target[:, majority_is_one] = ~target[:, majority_is_one]
tp = (pred & target).sum(axis=0)
fp = (pred & ~target).sum(axis=0)
fn = (~pred & target).sum(axis=0)
precision = tp / (tp + fp) * 100
recall = tp / (tp + fn) * 100
f1 = 2 * precision * recall / (precision + recall)
f1[np.isnan(f1)] = 0
return f1
def split_avg(data, keys, names):
avg = {'all': np.mean(data)}
for key in keys:
mask = [key in name for name in names]
if sum(mask) > 0:
avg[key] = np.mean(data[mask])
else:
avg[key] = 0
return avg
def create_and_write_image(img, obj_patches, gripper, target):
#mask = numpy.array(mask.bool().cpu(), dtype=bool)
max_obj_i = numpy.zeros(obj_patches.shape[0], dtype=int)
for img_i in range(obj_patches.shape[0]):
for obj_i in range(10):
if (numpy.array(obj_patches[img_i, obj_i].cpu()).swapaxes(-1, -3) != numpy.array([0, 0, 0])).any():
max_obj_i[img_i] = obj_i
index = 0
img_raw = datasets.denormalize_rgb(img[index].cpu())
fig, (a0, a1, a2) = plt.subplots(
1, 3, figsize=(15, 10), gridspec_kw={'width_ratios': [7, 2, 4]}
)
a0.imshow(img_raw)
a0.set_title('Input image', fontsize=18)
a0.axis('off')
obj_img = numpy.ones((320, 32, 3)).astype('uint8') * 255
for i in range(5):
obj_img[32 * (2 * i):32 * (2 * i + 1), :32] = numpy.array(datasets.denormalize_rgb(obj_patches[index][2 * i]))
obj_img[32 * (2 * i + 1):32 * (2 * i + 2), :32] = numpy.array(
datasets.denormalize_rgb(obj_patches[index][2 * i + 1]))
a1.imshow(obj_img)
a1.set_title('Query Object', fontsize=18)
a1.axis('off')
target = target[index].reshape(len(unary_pred) + len(binary_pred), -1)
pred = logits[index].reshape(len(unary_pred) + len(binary_pred), -1)
#mask = mask[index].reshape(len(relations), -1)
row_count = 0
pair_count = -1
for obj1_i in range(max_obj_i[0]):
for obj2_i in range(max_obj_i[0]):
if obj1_i == obj2_i:
continue
pair_count += 1
# if (obj_img[32 * obj1_i:32 * (obj1_i + 1)] == numpy.array([122, 116, 104])).all()\
# or (obj_img[32 * obj2_i:32 * (obj2_i + 1)] == numpy.array([122, 116, 104])).all():
# continue
for rel_i in range(4):
#rel_mask = mask[rel_i][pair_count] > 0
rel_pred = pred[rel_i][pair_count] > 0
rel_true = target[rel_i][pair_count] > 0
#if not rel_mask or (not rel_pred and not rel_true):
# continue
rel = relations[rel_i]
rel_phrase = relation_phrases[rel]
pred_text = '' if rel_pred else 'not '
pred_text = pred_text + rel_phrase
color = (0, 0, 0)
if rel_pred and not rel_true: # false positive
color = (1, 0, 0)
elif not rel_pred and rel_true: # false negative
color = (0, 0, 1)
a2.text(0.5, 1 - row_count * 0.025, pred_text, color=color, fontsize=12, ha='center', va='center')
obj1_axis = a2.inset_axes([0.2, 1 - row_count * 0.025 - 0.0125, 0.1, 0.025])
obj1_axis.imshow(obj_img[32 * obj1_i:32 * (obj1_i + 1)])
obj1_axis.axis('off')
obj2_axis = a2.inset_axes([0.7, 1 - row_count * 0.025 - 0.0125, 0.1, 0.025])
obj2_axis.imshow(obj_img[32 * obj2_i:32 * (obj2_i + 1)])
obj2_axis.axis('off')
row_count += 1
a2.axis('off')
plt.tight_layout()
io_buffer = io.BytesIO()
fig_size = fig.get_size_inches() * fig.dpi
fig.savefig(io_buffer, format='raw', dpi=fig.dpi)
io_buffer.seek(0)
out_img = numpy.frombuffer(io_buffer.getvalue(), dtype=numpy.uint8)
out_img = numpy.reshape(out_img, (int(fig_size[1]), int(fig_size[0]), -1))
writer.add_image('img' + str(batch_i), out_img, dataformats='HWC')
batch_i += 1
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# Data
parser.add_argument('--data_dir')
parser.add_argument('--split')
parser.add_argument('--obj_file')
parser.add_argument('--img_h', type=int, default=224)
parser.add_argument('--img_w', type=int, default=224)
parser.add_argument('--n_views', type=int, default=1)
parser.add_argument('--n_objects', type=int, default=4)
parser.add_argument('--objects', nargs='+')
parser.add_argument('--colors', nargs='+')
# Model
parser.add_argument('--patch_size', type=int, default=32)
parser.add_argument('--width', type=int, default=768)
parser.add_argument('--layers', type=int, default=12)
parser.add_argument('--heads', type=int, default=12)
parser.add_argument('--gripper', action='store_true')
parser.add_argument('--d_hidden', type=int, default=512)
# Evaluation
parser.add_argument('--checkpoint')
parser.add_argument('--batch_size', type=int, default=128)
parser.add_argument('--n_worker', type=int, default=0)
args = parser.parse_args()
if args.objects is None:
objects = [f'object{i:02d}' for i in range(args.n_objects)]
else:
objects = args.objects
pred_names = build_predicates(objects, unary_pred, binary_pred)
loaders = []
for v in range(args.n_views):
data = LeonardoDataset(
args.data_dir, args.split, pred_names, args.obj_file, args.colors,
randpatch=False, view=v, randview=False, gripper=args.gripper
)
loaders.append(DataLoader(
data, args.batch_size, pin_memory=True, num_workers=args.n_worker
))
model = EmbeddingNet(
(args.img_w, args.img_h), args.patch_size, len(objects),
args.width, args.layers, args.heads
)
out_dim = args.width + 1 if args.gripper else args.width
head = ReadoutNet(out_dim, args.d_hidden, len(unary_pred), len(binary_pred))
checkpoint = torch.load(args.checkpoint, map_location='cpu')
model.load_state_dict(checkpoint['model'])
head.load_state_dict(checkpoint['head'])
model = model.cuda().eval()
head = head.cuda().eval()
predictions = []
targets = []
loaders.insert(0, tqdm(range(len(loaders[0]))))
for data in zip(*loaders):
data = data[1:]
batch_size = data[0][0].shape[0]
logits = 0
for img, obj_patches, gripper, target in data:
with torch.no_grad():
img = img.cuda()
obj_patches = obj_patches.cuda()
emb, attn = model(img, obj_patches)
if args.gripper:
gripper = gripper.cuda()
emb = torch.cat([
emb, gripper[:, None, None].expand(-1, len(objects), -1)
], dim=-1)
logits += head(emb)
predictions.append((logits > 0).cpu().numpy())
targets.append(target.bool().numpy())
predictions = np.concatenate(predictions)
targets = np.concatenate(targets)
prefixes = [
'on_surface', 'has_obj', 'top_is_clear',
'in_approach_region', 'stacked', 'aligned_with'
]
accuracy = split_avg(calc_accuracy(predictions, targets), prefixes, pred_names)
accuracy_all = calc_accuracy_allmatch(predictions, targets, prefixes, pred_names)
f1 = split_avg(calc_f1(predictions, targets), prefixes, pred_names)
print('Accuracy')
for key in accuracy:
print(key, accuracy[key])
print()
print('All match accuracy')
for key in accuracy_all:
print(key, accuracy_all[key])
print()
print('F1 score')
for key in f1:
print(key, f1[key])
| 38.957529 | 118 | 0.614866 |
49ded194b13efd73afb070c9bbb6d7674b37801b | 5,063 | py | Python | lib/JumpScale/clients/oauth/OauthInstance.py | Jumpscale/jumpscale_core8 | f80ac9b1ab99b833ee7adb17700dcf4ef35f3734 | [
"Apache-2.0"
] | 8 | 2016-04-14T14:04:57.000Z | 2020-06-09T00:24:34.000Z | lib/JumpScale/clients/oauth/OauthInstance.py | Jumpscale/jumpscale_core8 | f80ac9b1ab99b833ee7adb17700dcf4ef35f3734 | [
"Apache-2.0"
] | 418 | 2016-01-25T10:30:00.000Z | 2021-09-08T12:29:13.000Z | lib/JumpScale/clients/oauth/OauthInstance.py | Jumpscale/jumpscale_core8 | f80ac9b1ab99b833ee7adb17700dcf4ef35f3734 | [
"Apache-2.0"
] | 9 | 2016-04-21T07:21:17.000Z | 2022-01-24T10:35:54.000Z | import urllib.request
import urllib.parse
import urllib.error
import string
import requests
import time
import random
from JumpScale import j
class AuthError(Exception):
pass
class UserInfo(object):
def __init__(self, username, emailaddress, groups):
self.username = username
self.emailaddress = emailaddress
self.groups = groups
class OauthInstance:
def __init__(self, addr, accesstokenaddr, id, secret, scope, redirect_url, user_info_url, logout_url, instance):
if not addr:
hrd = j.application.getAppInstanceHRD('oauth_client', instance)
self.addr = hrd.get('instance.oauth.client.url')
self.accesstokenaddress = hrd.get('instance.oauth.client.url2')
self.id = hrd.get('instance.oauth.client.id')
self.scope = hrd.get('instance.oauth.client.scope')
self.redirect_url = hrd.get('instance.oauth.client.redirect_url')
self.secret = hrd.get('instance.oauth.client.secret')
self.user_info_url = hrd.get('instance.oauth.client.user_info_url')
self.logout_url = hrd.get('instance.oauth.client.logout_url')
else:
self.addr = addr
self.id = id
self.scope = scope
self.redirect_url = redirect_url
self.accesstokenaddress = accesstokenaddr
self.secret = secret
self.user_info_url = user_info_url
self.logout_url = logout_url
self.state = ''.join(random.choice(
string.ascii_uppercase + string.digits) for _ in range(30))
@property
def url(self):
params = {'client_id': self.id, 'redirect_uri': self.redirect_url,
'state': self.state, 'response_type': 'code'}
if self.scope:
params.update({'scope': self.scope})
return '%s?%s' % (self.addr, urllib.parse.urlencode(params))
def getAccessToken(self, code, state):
payload = {'code': code, 'client_id': self.id, 'client_secret': self.secret,
'redirect_uri': self.redirect_url, 'grant_type': 'authorization_code',
'state': state}
result = requests.post(self.accesstokenaddress, data=payload, headers={
'Accept': 'application/json'})
if not result.ok or 'error' in result.json():
msg = result.json()['error']
j.logger.log(msg)
raise AuthError(msg)
return result.json()
def getUserInfo(self, accesstoken):
params = {'access_token': accesstoken['access_token']}
userinforesp = requests.get(self.user_info_url, params=params)
if not userinforesp.ok:
msg = 'Failed to get user details'
j.logger.log(msg)
raise AuthError(msg)
userinfo = userinforesp.json()
return UserInfo(userinfo['login'], userinfo['email'], ['user'])
class ItsYouOnline(OauthInstance):
def getAccessToken(self, code, state):
import jose
import jose.jwt
scope = self.scope + ',offline_access'
organization = j.portal.server.active.cfg['organization']
payload = {'code': code, 'client_id': self.id, 'client_secret': self.secret,
'redirect_uri': self.redirect_url, 'grant_type': '', 'scope': scope,
'response_type': 'id_token', 'state': state, 'aud': organization}
result = requests.post(self.accesstokenaddress, data=payload, headers={
'Accept': 'application/json'})
if not result.ok:
msg = result.text
j.logger.log(msg)
raise AuthError(msg)
token = result.json()
# convert jwt expire time to oauth2 token expire time
jwtdata = jose.jwt.get_unverified_claims(token['access_token'])
token['expires_in'] = jwtdata['exp'] - time.time()
return token
def getUserInfo(self, accesstoken):
import jose
import jose.jwt
jwt = accesstoken['access_token']
headers = {'Authorization': 'bearer %s' % jwt}
jwtdata = jose.jwt.get_unverified_claims(jwt)
scopes = jwtdata['scope']
requestedscopes = set(self.scope.split(','))
if set(jwtdata['scope']).intersection(requestedscopes) != requestedscopes:
msg = 'Failed to get the requested scope for %s' % self.client.id
raise AuthError(msg)
username = jwtdata['username']
userinfourl = self.user_info_url.rstrip('/') + "/%s/info" % username
userinforesp = requests.get(userinfourl, headers=headers)
if not userinforesp.ok:
msg = 'Failed to get user details'
raise AuthError(msg)
groups = ['user']
for scope in scopes:
parts = scope.split(':')
if len(parts) == 3 and parts[:2] == ['user', 'memberof']:
groups.append(parts[-1].split('.')[-1])
userinfo = userinforesp.json()
return UserInfo(userinfo['username'], userinfo['emailaddresses'][0]['emailaddress'], groups)
| 37.783582 | 116 | 0.61031 |
b399b85f64b91f5b0202a535568fe79e5dfa9e53 | 931 | py | Python | sample/_test_sender_request.py | cilame/vredis | 7c884ab29d9e3094003eb96f8ecedfed5ba51079 | [
"MIT"
] | 4 | 2019-01-05T17:51:55.000Z | 2019-11-18T08:12:40.000Z | sample/_test_sender_request.py | cilame/vredis | 7c884ab29d9e3094003eb96f8ecedfed5ba51079 | [
"MIT"
] | 1 | 2021-03-18T10:13:54.000Z | 2021-03-23T06:15:41.000Z | sample/_test_sender_request.py | cilame/vredis | 7c884ab29d9e3094003eb96f8ecedfed5ba51079 | [
"MIT"
] | null | null | null | # 增加环境变量,仅测试使用
import os
import sys
p = os.path.split(os.getcwd())[0]
sys.path.append(p)
from _test_config import host,password
from vredis import pipe
pipe.connect(host=host,port=6379,password=password)
pipe.DEBUG = True # worker端是否进行控制台打印。(默认False)
#pipe.DUMPING = True # 是否进行item数据本地存储。(默认False)
pipe.KEEPALIVE = False # 是否保持链接,如果是,worker 端将监控发送端是否链接,若是 sender 端断开则停止任务。(默认True)
pipe.AUTO_IMPORT = True
# 被包装的函数在 worker 端执行时,
# 返回的数据不为 None 的话,
# 1 如果是一般数据类型,会以字典的方式装包并自动 put 进默认表里。
# 2 如果是可迭代的话,会在迭代出来后,以字典的方式装包并自动 put 进默认表里。
import requests
from lxml import etree
@pipe
def req_baidu(key='123',num=0):
url = 'http://www.baidu.com/s?wd={}&pn={}0'.format(key,num)
s = requests.get(url)
e = etree.HTML(s.content)
print(num)
for href in e.xpath('//div[contains(@class,"c-container")]/h3/a/@href'):
yield {'num':num,'href':href}
req_baidu(num=1)
# for i in range(400):
# req_baidu(num=i)
| 22.707317 | 82 | 0.703545 |
3a82b912025f90e1d8d184536da05115cb517380 | 513 | py | Python | 220.py | RafaelHuang87/Leet-Code-Practice | 7754dcee38ffda18a5759113ef06d7becf4fe728 | [
"MIT"
] | null | null | null | 220.py | RafaelHuang87/Leet-Code-Practice | 7754dcee38ffda18a5759113ef06d7becf4fe728 | [
"MIT"
] | null | null | null | 220.py | RafaelHuang87/Leet-Code-Practice | 7754dcee38ffda18a5759113ef06d7becf4fe728 | [
"MIT"
] | null | null | null | class Solution:
def containsNearbyAlmostDuplicate(self, nums: [int], k: int, t: int) -> bool:
lenth = len(nums)
a = set()
for i in range(lenth):
if t == 0:
if nums[i] in a:
return True
else:
for atem in a:
if abs(nums[i] - atem) <= t:
return True
a.add(nums[i])
if len(a) == k + 1:
a.remove(nums[i - k])
return False
| 30.176471 | 81 | 0.397661 |
8627b0fbdee4ed4268e39be6ee734bdeee1fd615 | 8,937 | py | Python | seq2seq_keras/seq2seq.py | vladcioaba/text_simplification | 4f1a56b91c2b7b9212b6b5f3303c1b14e351c855 | [
"MIT"
] | null | null | null | seq2seq_keras/seq2seq.py | vladcioaba/text_simplification | 4f1a56b91c2b7b9212b6b5f3303c1b14e351c855 | [
"MIT"
] | null | null | null | seq2seq_keras/seq2seq.py | vladcioaba/text_simplification | 4f1a56b91c2b7b9212b6b5f3303c1b14e351c855 | [
"MIT"
] | null | null | null | from __future__ import print_function
from keras.models import Model
from keras.models import load_model
from keras.layers import Input, LSTM, Dense
import numpy as np
def initCorpus(num_samples = 10000, data_path = ''):
# Vectorize the data.
input_texts = []
target_texts = []
input_characters = set()
target_characters = set()
with open(data_path, 'r', encoding='utf-8') as f:
lines = f.read().split('\n')
for i in range(0, len(lines)-1, 3):
num_samples = num_samples - 1;
if (num_samples <= 0):
break;
input_text = lines[i]
target_text = lines[i+1]
target_text = '\t' + target_text + '\n'
input_texts.append(input_text)
target_texts.append(target_text)
for char in input_text:
if char not in input_characters:
input_characters.add(char)
for char in target_text:
if char not in target_characters:
target_characters.add(char)
input_characters = sorted(list(input_characters))
target_characters = sorted(list(target_characters))
num_encoder_tokens = len(input_characters)
num_decoder_tokens = len(target_characters)
max_encoder_seq_length = max([len(txt) for txt in input_texts])
max_decoder_seq_length = max([len(txt) for txt in target_texts])
print('Number of samples:', len(input_texts))
print('Number of unique input tokens:', num_encoder_tokens)
print('Number of unique output tokens:', num_decoder_tokens)
print('Max sequence length for inputs:', max_encoder_seq_length)
print('Max sequence length for outputs:', max_decoder_seq_length)
input_token_index = dict(
[(char, i) for i, char in enumerate(input_characters)])
target_token_index = dict(
[(char, i) for i, char in enumerate(target_characters)])
encoder_input_data = np.zeros(
(len(input_texts), max_encoder_seq_length, num_encoder_tokens),
dtype='float32')
decoder_input_data = np.zeros(
(len(input_texts), max_decoder_seq_length, num_decoder_tokens),
dtype='float32')
decoder_target_data = np.zeros(
(len(input_texts), max_decoder_seq_length, num_decoder_tokens),
dtype='float32')
print("encoder_input_data",encoder_input_data.shape)
print("decoder_input_data",decoder_input_data.shape)
print("decoder_target_data",decoder_target_data.shape)
print("**************")
for i, (input_text, target_text) in enumerate(zip(input_texts, target_texts)):
for t, char in enumerate(input_text):
encoder_input_data[i, t, input_token_index[char]] = 1. # earlier we have made a numpy array of zeros so here we are one hot encoding by puttong 1 on places chars are found.
#encoder_input_data[i, t + 1:, input_token_index[' ']] = 1.
for t, char in enumerate(target_text):
# decoder_target_data is ahead of decoder_input_data by one timestep
decoder_input_data[i, t, target_token_index[char]] = 1.
if t > 0:
# decoder_target_data will be ahead by one timestep
# and will not include the start character.
decoder_target_data[i, t - 1, target_token_index[char]] = 1.
#decoder_input_data[i, t + 1:, target_token_index[' ']] = 1.
#decoder_target_data[i, t:, target_token_index[' ']] = 1.
return input_texts, target_texts, input_characters, target_characters, num_encoder_tokens, num_decoder_tokens, max_encoder_seq_length, max_decoder_seq_length, input_token_index, target_token_index, encoder_input_data, decoder_input_data, decoder_target_data
def trainAndSave(input_texts = [],
target_texts = [],
batch_size = 64,
epochs = 100,
latent_dim = 512,
save_file = 's2s.h5',
num_encoder_tokens = 0,
num_decoder_tokens = 0,
max_encoder_seq_length = 0,
max_decoder_seq_length = 0,
encoder_input_data = None,
decoder_input_data = None,
decoder_target_data = None):
# Define an input sequence and process it.
encoder_inputs = Input(shape=(None, num_encoder_tokens))
encoder = LSTM(latent_dim, return_state=True)
encoder_outputs, state_h, state_c = encoder(encoder_inputs)
# We discard `encoder_outputs` and only keep the states.
encoder_states = [state_h, state_c]
# Set up the decoder, using `encoder_states` as initial state.
decoder_inputs = Input(shape=(None, num_decoder_tokens))
# We set up our decoder to return full output sequences,
# and to return internal states as well. We don't use the
# return states in the training model, but we will use them in inference.
decoder_lstm = LSTM(latent_dim, return_sequences=True, return_state=True)
decoder_outputs, _, _ = decoder_lstm(decoder_inputs,
initial_state=encoder_states)
decoder_dense = Dense(num_decoder_tokens, activation='softmax')
decoder_outputs = decoder_dense(decoder_outputs)
# Define the model that will turn
# `encoder_input_data` & `decoder_input_data` into `decoder_target_data`
model = Model([encoder_inputs, decoder_inputs], decoder_outputs)
# Run training
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
model.fit([encoder_input_data, decoder_input_data], decoder_target_data,
batch_size=batch_size,
epochs=epochs,
validation_split=0.2)
# Save model
model.save(save_file)
def load(save_file = 's2s.h5',
latent_dim = 512,
num_encoder_tokens = 0,
num_decoder_tokens = 0):
model = load_model(save_file)
encoder_inputs = model.input[0]
encoder = LSTM(latent_dim, return_state=True)
encoder_outputs, state_h, state_c = model.layers[2].output
encoder_states = [state_h, state_c]
# Define sampling models
encoder_model = Model(encoder_inputs, encoder_states)
decoder_state_input_h = Input(shape=(latent_dim,), name='input_3')
decoder_state_input_c = Input(shape=(latent_dim,), name='input_4')
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
decoder_inputs = model.input[1]
decoder_lstm = model.layers[3]
decoder_outputs, state_h, state_c = decoder_lstm(decoder_inputs, initial_state=decoder_states_inputs)
decoder_states = [state_h, state_c]
decoder_dense = model.layers[4]
decoder_outputs = decoder_dense(decoder_outputs)
decoder_model = Model([decoder_inputs] + decoder_states_inputs,
[decoder_outputs] + decoder_states)
return encoder_model, decoder_model
def decodeSequence(encoder_model,
decoder_model,
num_encoder_tokens,
num_decoder_tokens,
input_token_index,
target_token_index,
reverse_input_char_index,
reverse_target_char_index,
max_encoder_seq_length,
max_decoder_seq_length,
input_text):
input_seq = np.zeros((1, max_encoder_seq_length, num_encoder_tokens), dtype='float32')
for t, char in enumerate(input_text):
input_seq[0, t, input_token_index[char]] = 1.
#input_seq[0, t + 1:, input_token_index[' ']] = 1.
states_value = encoder_model.predict(input_seq)
decoded_sentence_in = ''
for token_index in input_seq[0]:
result = np.where(token_index == 1)
if result[0]:
decoded_sentence_in += reverse_input_char_index[result[0][0]]
print(decoded_sentence_in)
print("******")
# Generate empty target sequence of length 1.
target_seq = np.zeros((1, 1, num_decoder_tokens))
# Populate the first character of target sequence with the start character.
target_seq[0, 0, target_token_index['\t']] = 1.
# Sampling loop for a batch of sequences
# (to simplify, here we assume a batch of size 1).
stop_condition = False
decoded_sentence = ''
while not stop_condition:
output_tokens, h, c = decoder_model.predict([target_seq] + states_value)
# Sample a token
sampled_token_index = np.argmax(output_tokens[0, -1, :])
sampled_char = reverse_target_char_index[sampled_token_index]
decoded_sentence += sampled_char
# Exit condition: either hit max length
# or find stop character.
if (sampled_char == '\n' or len(decoded_sentence) > max_decoder_seq_length):
stop_condition = True
# Update the target sequence (of length 1).
target_seq = np.zeros((1, 1, num_decoder_tokens))
target_seq[0, 0, sampled_token_index] = 1.
# Update states
states_value = [h, c]
print(decoded_sentence)
return decoded_sentence
| 41.761682 | 261 | 0.668681 |
330e8d16318b9df3d8c1a274cde325ffe1925085 | 5,456 | py | Python | slaves/api.py | Phinnik/slaves_api | 075ce21a917f42974be73845d920a62f5e323ced | [
"MIT"
] | 13 | 2021-03-24T15:53:02.000Z | 2021-03-31T08:37:23.000Z | slaves/api.py | Phinnik/slaves_api | 075ce21a917f42974be73845d920a62f5e323ced | [
"MIT"
] | 4 | 2021-03-26T17:30:40.000Z | 2021-03-31T21:16:11.000Z | slaves/api.py | Phinnik/slaves_api | 075ce21a917f42974be73845d920a62f5e323ced | [
"MIT"
] | 5 | 2021-03-24T20:12:41.000Z | 2021-03-28T15:21:32.000Z | import pydantic
import requests
from typing import List
from slaves import responses
from slaves import exceptions
import json
import time
class Api:
def __init__(self, authorization: str):
self._authorization = authorization
def _call(self, method: str, api_method: str, response_type, payload=None):
url = 'https://pixel.w84.vkforms.ru/HappySanta/slaves/1.0.0/' + api_method
payload = payload or dict()
headers = {
"authorization": self._authorization,
'authority': 'pixel.w84.vkforms.ru',
'sec-ch-ua': '"Google Chrome";v="89", "Chromium";v="89", ";Not A Brand";v="99"',
'accept': 'application/json, text/plain, */*',
'sec-ch-ua-mobile': '?0',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36',
'origin': 'https://prod-app7794757-29d7bd3253fe.pages-ac.vk-apps.com',
'sec-fetch-site': 'cross-site',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://prod-app7794757-29d7bd3253fe.pages-ac.vk-apps.com/',
'accept-language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7'
}
if method == 'post':
response = requests.post(url, headers=headers, json=payload)
else:
response = requests.get(url, headers=headers, params=payload)
try:
response = response.json()
except json.decoder.JSONDecodeError as e:
print(response.text)
raise e
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError):
time.sleep(2)
return self._call(method, api_method, response_type, payload)
if 'error' in response:
if response['error'].get('code') == 422:
raise exceptions.SlaveIsLocked
if response['error'].get('message') == 'Invalid sign 3':
raise exceptions.InvalidSign
try:
return response_type(**response)
except pydantic.error_wrappers.ValidationError as e:
raise exceptions.UnknownError
def start(self) -> responses.StartResponse:
"""
Возвращает стартовую информацию
"""
method = 'get'
api_method = 'start'
response_type = responses.StartResponse
payload = None
return self._call(method, api_method, response_type, payload)
def user_get(self, user_id: int) -> responses.UserGetResponse:
method = 'get'
api_method = 'user'
response_type = responses.UserGetResponse
payload = {'id': user_id}
return self._call(method, api_method, response_type, payload)
def users_get(self, user_ids: List[int]) -> responses.UsersGetResponse:
"""
Возвращает информацию о пользователях
:param user_ids: список идентификаторов пользователей
"""
method = 'post'
api_method = 'user'
response_type = responses.UsersGetResponse
payload = {'ids': user_ids}
return self._call(method, api_method, response_type, payload)
def slave_list(self, user_id: int) -> responses.SlaveListResponse:
"""
Возвращает список рабов
:param user_id: идентификатор пользователя
"""
method = 'get'
api_method = 'slaveList'
response_type = responses.SlaveListResponse
payload = {'id': user_id}
return self._call(method, api_method, response_type, payload)
def buy_slave(self, user_id) -> responses.BuySlaveResponse:
"""
Покупает раба
:param user_id: идентификатор пользователя
"""
method = 'post'
api_method = 'buySlave'
response_type = responses.BuySlaveResponse
payload = {'slave_id': user_id}
return self._call(method, api_method, response_type, payload)
def sale_slave(self, user_id) -> responses.SaleSlaveResponse:
"""
Продает раба
:param user_id: идентификатор пользователя
"""
method = 'post'
api_method = 'saleSlave'
response_type = responses.BuySlaveResponse
payload = {'slave_id': user_id}
return self._call(method, api_method, response_type, payload)
def buy_fetter(self, slave_id: int) -> responses.BuyFetterResponse:
"""
Покупает оковы для раба
:param slave_id: идентификатор раба
"""
method = 'post'
api_method = 'buyFetter'
response_type = responses.BuyFetterResponse
payload = {'slave_id': slave_id}
return self._call(method, api_method, response_type, payload)
def job_slave(self, name: str, slave_id: int) -> responses.JobSlaveResponse:
"""
Отправляет раба на работу
:param name: название работы
:param slave_id: идентификатор раба
"""
method = 'post'
api_method = 'jobSlave'
response_type = responses.JobSlaveResponse
payload = {'name': name, 'slave_id': slave_id}
return self._call(method, api_method, response_type, payload)
def top_users(self) -> responses.TopUsersResponse:
method = 'get'
api_method = 'topUsers'
response_type = responses.TopUsersResponse
payload = None
return self._call(method, api_method, response_type, payload)
| 35.894737 | 143 | 0.616569 |
3ce5bbd71206babd67103686127870f8795d3eba | 2,464 | bzl | Python | tensorflow_lite_support/tools/build_rules/android_test/generate_instrumentation_tests.bzl | BruceDai/tflite-support | ca36eacbc37b05a9347c428ea6c1e49311505d26 | [
"Apache-2.0"
] | null | null | null | tensorflow_lite_support/tools/build_rules/android_test/generate_instrumentation_tests.bzl | BruceDai/tflite-support | ca36eacbc37b05a9347c428ea6c1e49311505d26 | [
"Apache-2.0"
] | null | null | null | tensorflow_lite_support/tools/build_rules/android_test/generate_instrumentation_tests.bzl | BruceDai/tflite-support | ca36eacbc37b05a9347c428ea6c1e49311505d26 | [
"Apache-2.0"
] | 2 | 2021-06-23T01:14:12.000Z | 2021-06-28T15:12:49.000Z | """Internal helper function for generating instrumentation tests ."""
load(
"//tensorflow_lite_support/tools/build_rules/android_test:android_multidevice_instrumentation_test.bzl",
"android_multidevice_instrumentation_test",
)
def generate_instrumentation_tests(
name,
srcs,
deps,
target_devices,
test_java_package_name,
test_android_package_name,
instrumentation_target_package,
instruments,
binary_args = {},
**kwargs):
"""A helper rule to generate instrumentation tests.
This will generate:
- a test_binary android_binary (soon to be android_application)
- the manifest to use for the test library.
- for each device combination:
- an android_instrumentation_test rule)
Args:
name: unique prefix to use for generated rules
srcs: the test sources to generate rules for
deps: the build dependencies to use for the generated test binary
target_devices: array of device targets to execute on
test_java_package_name: the root java package name for the tests.
test_android_package_name: the android package name to use for the android_binary test app. Typically this is the same as test_java_package_name
instrumentation_target_package: the android package name to specify as instrumentationTargetPackage in the test_app manifest
instruments: The android binary the tests instrument.
binary_args: Optional additional arguments to pass to generated android_binary
**kwargs: arguments to pass to generated android_instrumentation_test rules
"""
_manifest_values = {
"applicationId": test_android_package_name,
"instrumentationTargetPackage": instrumentation_target_package,
}
_manifest_values.update(binary_args.pop("manifest_values", {}))
native.android_binary(
name = "%s_binary" % name,
instruments = instruments,
manifest = "//tensorflow_lite_support/tools/build_rules/android_test:AndroidManifest_instrumentation_test_template.xml",
manifest_values = _manifest_values,
testonly = 1,
deps = deps + [
"@android_test_support//runner/android_junit_runner",
],
**binary_args
)
android_multidevice_instrumentation_test(
name = "%s_tests" % name,
target_devices = target_devices,
test_app = "%s_binary" % name,
**kwargs
)
| 39.111111 | 150 | 0.708604 |
2576b0c75e8f9de42b4a0042806c5366c7f26c0e | 2,249 | py | Python | python/GafferSceneUI/SceneWriterUI.py | davidsminor/gaffer | 64f75654ce778105dd93fbaad0e4486a5577cd09 | [
"BSD-3-Clause"
] | null | null | null | python/GafferSceneUI/SceneWriterUI.py | davidsminor/gaffer | 64f75654ce778105dd93fbaad0e4486a5577cd09 | [
"BSD-3-Clause"
] | null | null | null | python/GafferSceneUI/SceneWriterUI.py | davidsminor/gaffer | 64f75654ce778105dd93fbaad0e4486a5577cd09 | [
"BSD-3-Clause"
] | null | null | null | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferScene
GafferUI.PlugValueWidget.registerCreator(
GafferScene.SceneWriter.staticTypeId(),
"fileName",
lambda plug : GafferUI.PathPlugValueWidget( plug,
path = Gaffer.FileSystemPath( "/", filter = Gaffer.FileSystemPath.createStandardFilter() ),
pathChooserDialogueKeywords = {
"bookmarks" : GafferUI.Bookmarks.acquire( plug, category = "sceneCache" ),
"leaf" : True,
},
),
)
| 43.25 | 93 | 0.690084 |
6367608cc755b28f4444470b39345a7b306c54bc | 4,431 | py | Python | self-paced-labs/cloud-hero/optional-kubernetes-engine/tests/test_auth.py | laurenzberger/training-data-analyst | 3e2ef4668c5088ab50ad50a4f29673c88fb1bcd3 | [
"Apache-2.0"
] | 6,140 | 2016-05-23T16:09:35.000Z | 2022-03-30T19:00:46.000Z | self-paced-labs/cloud-hero/optional-kubernetes-engine/tests/test_auth.py | laurenzberger/training-data-analyst | 3e2ef4668c5088ab50ad50a4f29673c88fb1bcd3 | [
"Apache-2.0"
] | 1,384 | 2016-07-08T22:26:41.000Z | 2022-03-24T16:39:43.000Z | self-paced-labs/cloud-hero/optional-kubernetes-engine/tests/test_auth.py | laurenzberger/training-data-analyst | 3e2ef4668c5088ab50ad50a4f29673c88fb1bcd3 | [
"Apache-2.0"
] | 5,110 | 2016-05-27T13:45:18.000Z | 2022-03-31T18:40:42.000Z | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import bookshelf
from conftest import flaky_filter
from flaky import flaky
import mock
from oauth2client.client import OAuth2Credentials
import pytest
@pytest.fixture
def client_with_credentials(app):
"""This fixture provides a Flask app test client that has a session
pre-configured with use credentials."""
credentials = OAuth2Credentials(
'access_token',
'client_id',
'client_secret',
'refresh_token',
'3600',
None,
'Test',
id_token={'sub': '123', 'email': 'user@example.com'},
scopes=('email', 'profile'))
@contextlib.contextmanager
def inner():
with app.test_client() as client:
with client.session_transaction() as session:
session['profile'] = {
'email': 'abc@example.com',
'name': 'Test User'
}
session['google_oauth2_credentials'] = credentials.to_json()
yield client
return inner
# Mark all test cases in this class as flaky, so that if errors occur they
# can be retried. This is useful when databases are temporarily unavailable.
@flaky(rerun_filter=flaky_filter)
# Tell pytest to use both the app and model fixtures for all test cases.
# This ensures that configuration is properly applied and that all database
# resources created during tests are cleaned up. These fixtures are defined
# in conftest.py
@pytest.mark.usefixtures('app', 'model')
class TestAuth(object):
def test_not_logged_in(self, app):
with app.test_client() as c:
rv = c.get('/books/')
assert rv.status == '200 OK'
body = rv.data.decode('utf-8')
assert 'Login' in body
def test_logged_in(self, client_with_credentials):
with client_with_credentials() as c:
rv = c.get('/books/')
assert rv.status == '200 OK'
body = rv.data.decode('utf-8')
assert 'Test User' in body
def test_add_anonymous(self, app):
data = {
'title': 'Test Book',
}
with app.test_client() as c:
rv = c.post('/books/add', data=data, follow_redirects=True)
assert rv.status == '200 OK'
body = rv.data.decode('utf-8')
assert 'Test Book' in body
assert 'Added by Anonymous' in body
def test_add_logged_in(self, client_with_credentials):
data = {
'title': 'Test Book',
}
with client_with_credentials() as c:
rv = c.post('/books/add', data=data, follow_redirects=True)
assert rv.status == '200 OK'
body = rv.data.decode('utf-8')
assert 'Test Book' in body
assert 'Added by Test User' in body
def test_mine(self, model, client_with_credentials):
# Create two books, one created by the logged in user and one
# created by another user.
model.create({
'title': 'Book 1',
'createdById': 'abc@example.com'
})
model.create({
'title': 'Book 2',
'createdById': 'def@example.com'
})
# Check the "My Books" page and make sure only one of the books
# appears.
with client_with_credentials() as c:
rv = c.get('/books/mine')
assert rv.status == '200 OK'
body = rv.data.decode('utf-8')
assert 'Book 1' in body
assert 'Book 2' not in body
@mock.patch("httplib2.Http")
def test_request_user_info(self, HttpMock):
httpObj = mock.MagicMock()
responseMock = mock.MagicMock(status=200)
httpObj.request = mock.MagicMock(
return_value=(responseMock, b'{"name": "bill"}'))
HttpMock.return_value = httpObj
credentials = mock.MagicMock()
bookshelf._request_user_info(credentials)
| 32.343066 | 76 | 0.62469 |
4ca104fa30c16f7861f8d934fef6e8eafd501be2 | 15,069 | py | Python | sdk/python/pulumi_azure_nextgen/network/v20181101/public_ip_address.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/network/v20181101/public_ip_address.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/network/v20181101/public_ip_address.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['PublicIPAddress']
class PublicIPAddress(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
ddos_settings: Optional[pulumi.Input[pulumi.InputType['DdosSettingsArgs']]] = None,
dns_settings: Optional[pulumi.Input[pulumi.InputType['PublicIPAddressDnsSettingsArgs']]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
idle_timeout_in_minutes: Optional[pulumi.Input[int]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpTagArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
public_ip_address_version: Optional[pulumi.Input[Union[str, 'IPVersion']]] = None,
public_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
public_ip_prefix: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
public_ip_address_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['PublicIPAddressSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Public IP address resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['DdosSettingsArgs']] ddos_settings: The DDoS protection custom policy associated with the public IP address.
:param pulumi.Input[pulumi.InputType['PublicIPAddressDnsSettingsArgs']] dns_settings: The FQDN of the DNS record associated with the public IP address.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] idle_timeout_in_minutes: The idle timeout of the public IP address.
:param pulumi.Input[str] ip_address: The IP address associated with the public IP address resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpTagArgs']]]] ip_tags: The list of tags associated with the public IP address.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[Union[str, 'IPVersion']] public_ip_address_version: The public IP address version. Possible values are: 'IPv4' and 'IPv6'.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] public_ip_allocation_method: The public IP allocation method. Possible values are: 'Static' and 'Dynamic'.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] public_ip_prefix: The Public IP Prefix this Public IP Address should be allocated from.
:param pulumi.Input[str] public_ip_address_name: The name of the public IP address.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] resource_guid: The resource GUID property of the public IP resource.
:param pulumi.Input[pulumi.InputType['PublicIPAddressSkuArgs']] sku: The public IP address SKU.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: A list of availability zones denoting the IP allocated for the resource needs to come from.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['ddos_settings'] = ddos_settings
__props__['dns_settings'] = dns_settings
__props__['etag'] = etag
__props__['id'] = id
__props__['idle_timeout_in_minutes'] = idle_timeout_in_minutes
__props__['ip_address'] = ip_address
__props__['ip_tags'] = ip_tags
__props__['location'] = location
__props__['provisioning_state'] = provisioning_state
__props__['public_ip_address_version'] = public_ip_address_version
__props__['public_ip_allocation_method'] = public_ip_allocation_method
__props__['public_ip_prefix'] = public_ip_prefix
__props__['public_ip_address_name'] = public_ip_address_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['resource_guid'] = resource_guid
__props__['sku'] = sku
__props__['tags'] = tags
__props__['zones'] = zones
__props__['ip_configuration'] = None
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/latest:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20150615:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20160330:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20160601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20160901:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20161201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170301:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170801:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170901:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20171001:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20171101:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180101:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180401:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180701:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180801:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20181001:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20181201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190401:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190701:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190801:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190901:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20191101:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20191201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200301:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200401:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200501:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200701:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200801:PublicIPAddress")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PublicIPAddress, __self__).__init__(
'azure-nextgen:network/v20181101:PublicIPAddress',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PublicIPAddress':
"""
Get an existing PublicIPAddress resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return PublicIPAddress(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="ddosSettings")
def ddos_settings(self) -> pulumi.Output[Optional['outputs.DdosSettingsResponse']]:
"""
The DDoS protection custom policy associated with the public IP address.
"""
return pulumi.get(self, "ddos_settings")
@property
@pulumi.getter(name="dnsSettings")
def dns_settings(self) -> pulumi.Output[Optional['outputs.PublicIPAddressDnsSettingsResponse']]:
"""
The FQDN of the DNS record associated with the public IP address.
"""
return pulumi.get(self, "dns_settings")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> pulumi.Output[Optional[int]]:
"""
The idle timeout of the public IP address.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Output[Optional[str]]:
"""
The IP address associated with the public IP address resource.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="ipConfiguration")
def ip_configuration(self) -> pulumi.Output['outputs.IPConfigurationResponse']:
"""
The IP configuration associated with the public IP address.
"""
return pulumi.get(self, "ip_configuration")
@property
@pulumi.getter(name="ipTags")
def ip_tags(self) -> pulumi.Output[Optional[Sequence['outputs.IpTagResponse']]]:
"""
The list of tags associated with the public IP address.
"""
return pulumi.get(self, "ip_tags")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicIPAddressVersion")
def public_ip_address_version(self) -> pulumi.Output[Optional[str]]:
"""
The public IP address version. Possible values are: 'IPv4' and 'IPv6'.
"""
return pulumi.get(self, "public_ip_address_version")
@property
@pulumi.getter(name="publicIPAllocationMethod")
def public_ip_allocation_method(self) -> pulumi.Output[Optional[str]]:
"""
The public IP allocation method. Possible values are: 'Static' and 'Dynamic'.
"""
return pulumi.get(self, "public_ip_allocation_method")
@property
@pulumi.getter(name="publicIPPrefix")
def public_ip_prefix(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The Public IP Prefix this Public IP Address should be allocated from.
"""
return pulumi.get(self, "public_ip_prefix")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> pulumi.Output[Optional[str]]:
"""
The resource GUID property of the public IP resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.PublicIPAddressSkuResponse']]:
"""
The public IP address SKU.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of availability zones denoting the IP allocated for the resource needs to come from.
"""
return pulumi.get(self, "zones")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 53.43617 | 2,603 | 0.682063 |
f0a57014b9f9c3cdad14a8ee7d64607e8d2e2b67 | 1,151 | py | Python | django_qbe/savedqueries/models.py | maxwell-k/qbe | 7efee0b33130bca81fb903249771480f4c3711ee | [
"MIT"
] | 37 | 2015-01-05T09:28:46.000Z | 2020-10-25T08:35:30.000Z | django_qbe/savedqueries/models.py | maxwell-k/qbe | 7efee0b33130bca81fb903249771480f4c3711ee | [
"MIT"
] | 17 | 2015-01-05T00:54:06.000Z | 2020-12-17T05:00:16.000Z | django_qbe/savedqueries/models.py | maxwell-k/qbe | 7efee0b33130bca81fb903249771480f4c3711ee | [
"MIT"
] | 16 | 2015-01-19T20:16:46.000Z | 2021-10-03T16:48:17.000Z | from builtins import object
import pickle
from django.db import models
from django.utils.translation import ugettext_lazy as _
try:
from django.utils.timezone import now
except ImportError:
from datetime import datetime
now = datetime.now
from picklefield.fields import PickledObjectField
class SavedQuery(models.Model):
query_hash = models.CharField(_("hash"), max_length=32, primary_key=True,
editable=False)
name = models.CharField(_("name"), max_length=100)
description = models.TextField(_("description"), blank=True)
query_data = PickledObjectField(protocol=pickle.HIGHEST_PROTOCOL)
date_created = models.DateTimeField(_("date created"), default=now,
editable=False)
date_updated = models.DateTimeField(_("date updated"), editable=False)
class Meta(object):
verbose_name = _("Saved query")
verbose_name_plural = _("Saved queries")
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.date_updated = now()
super(SavedQuery, self).save(*args, **kwargs)
| 32.885714 | 77 | 0.677672 |
408fdb9caebbfcbb0963fdc75234f0e39eafa321 | 6,044 | py | Python | ucsmsdk/mometa/vnic/VnicFcGroupDef.py | thinkitdata/ucsmsdk | da6599e1dbc1207a30eabe548a7e5791af5f476b | [
"Apache-2.0"
] | null | null | null | ucsmsdk/mometa/vnic/VnicFcGroupDef.py | thinkitdata/ucsmsdk | da6599e1dbc1207a30eabe548a7e5791af5f476b | [
"Apache-2.0"
] | null | null | null | ucsmsdk/mometa/vnic/VnicFcGroupDef.py | thinkitdata/ucsmsdk | da6599e1dbc1207a30eabe548a7e5791af5f476b | [
"Apache-2.0"
] | null | null | null | """This module contains the general information for VnicFcGroupDef ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class VnicFcGroupDefConsts:
INT_ID_NONE = "none"
POLICY_OWNER_LOCAL = "local"
POLICY_OWNER_PENDING_POLICY = "pending-policy"
POLICY_OWNER_POLICY = "policy"
class VnicFcGroupDef(ManagedObject):
"""This is VnicFcGroupDef class."""
consts = VnicFcGroupDefConsts()
naming_props = set([])
mo_meta = MoMeta("VnicFcGroupDef", "vnicFcGroupDef", "fc-group", VersionMeta.Version211a, "InputOutput", 0x3ff, [], ["admin", "ls-compute", "ls-config", "ls-server", "ls-storage"], [u'storageIniGroup'], [u'faultInst', u'storageConnectionDef'], ["Add", "Get", "Set"])
prop_meta = {
"adaptor_profile_name": MoPropertyMeta("adaptor_profile_name", "adaptorProfileName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version211a, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x4, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"ident_pool_name": MoPropertyMeta("ident_pool_name", "identPoolName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"int_id": MoPropertyMeta("int_id", "intId", "string", VersionMeta.Version211a, MoPropertyMeta.INTERNAL, None, None, None, None, ["none"], ["0-4294967295"]),
"max_data_field_size": MoPropertyMeta("max_data_field_size", "maxDataFieldSize", "uint", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], ["256-2112"]),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"nw_templ_name": MoPropertyMeta("nw_templ_name", "nwTemplName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"oper_stats_policy_name": MoPropertyMeta("oper_stats_policy_name", "operStatsPolicyName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"oper_storage_conn_policy_name": MoPropertyMeta("oper_storage_conn_policy_name", "operStorageConnPolicyName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"policy_level": MoPropertyMeta("policy_level", "policyLevel", "uint", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"policy_owner": MoPropertyMeta("policy_owner", "policyOwner", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["local", "pending-policy", "policy"], []),
"qos_policy_name": MoPropertyMeta("qos_policy_name", "qosPolicyName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x40, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"stats_policy_name": MoPropertyMeta("stats_policy_name", "statsPolicyName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x80, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x100, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"storage_conn_policy_name": MoPropertyMeta("storage_conn_policy_name", "storageConnPolicyName", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x200, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
}
prop_map = {
"adaptorProfileName": "adaptor_profile_name",
"childAction": "child_action",
"descr": "descr",
"dn": "dn",
"identPoolName": "ident_pool_name",
"intId": "int_id",
"maxDataFieldSize": "max_data_field_size",
"name": "name",
"nwTemplName": "nw_templ_name",
"operStatsPolicyName": "oper_stats_policy_name",
"operStorageConnPolicyName": "oper_storage_conn_policy_name",
"policyLevel": "policy_level",
"policyOwner": "policy_owner",
"qosPolicyName": "qos_policy_name",
"rn": "rn",
"sacl": "sacl",
"statsPolicyName": "stats_policy_name",
"status": "status",
"storageConnPolicyName": "storage_conn_policy_name",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.adaptor_profile_name = None
self.child_action = None
self.descr = None
self.ident_pool_name = None
self.int_id = None
self.max_data_field_size = None
self.name = None
self.nw_templ_name = None
self.oper_stats_policy_name = None
self.oper_storage_conn_policy_name = None
self.policy_level = None
self.policy_owner = None
self.qos_policy_name = None
self.sacl = None
self.stats_policy_name = None
self.status = None
self.storage_conn_policy_name = None
ManagedObject.__init__(self, "VnicFcGroupDef", parent_mo_or_dn, **kwargs)
| 68.681818 | 270 | 0.662641 |
499d92ee41dffb52a4c464216678f7802166e423 | 4,755 | py | Python | sdk/hanaonazure/azure-mgmt-hanaonazure/azure/mgmt/hanaonazure/models/hana_instance.py | mccoyp/azure-keyvault-7.3-preview | da351753a9d3d2bf97c27566865cd88bae7faa55 | [
"MIT"
] | null | null | null | sdk/hanaonazure/azure-mgmt-hanaonazure/azure/mgmt/hanaonazure/models/hana_instance.py | mccoyp/azure-keyvault-7.3-preview | da351753a9d3d2bf97c27566865cd88bae7faa55 | [
"MIT"
] | null | null | null | sdk/hanaonazure/azure-mgmt-hanaonazure/azure/mgmt/hanaonazure/models/hana_instance.py | mccoyp/azure-keyvault-7.3-preview | da351753a9d3d2bf97c27566865cd88bae7faa55 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class HanaInstance(Resource):
"""HANA instance info on Azure (ARM properties and HANA properties).
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:ivar tags: Resource tags
:vartype tags: dict[str, str]
:param hardware_profile: Specifies the hardware settings for the HANA
instance.
:type hardware_profile: ~azure.mgmt.hanaonazure.models.HardwareProfile
:param storage_profile: Specifies the storage settings for the HANA
instance disks.
:type storage_profile: ~azure.mgmt.hanaonazure.models.StorageProfile
:param os_profile: Specifies the operating system settings for the HANA
instance.
:type os_profile: ~azure.mgmt.hanaonazure.models.OSProfile
:param network_profile: Specifies the network settings for the HANA
instance.
:type network_profile: ~azure.mgmt.hanaonazure.models.NetworkProfile
:ivar hana_instance_id: Specifies the HANA instance unique ID.
:vartype hana_instance_id: str
:ivar power_state: Resource power state. Possible values include:
'starting', 'started', 'stopping', 'stopped', 'restarting', 'unknown'
:vartype power_state: str or
~azure.mgmt.hanaonazure.models.HanaInstancePowerStateEnum
:ivar proximity_placement_group: Resource proximity placement group
:vartype proximity_placement_group: str
:ivar hw_revision: Hardware revision of a HANA instance
:vartype hw_revision: str
:param partner_node_id: ARM ID of another HanaInstance that will share a
network with this HanaInstance
:type partner_node_id: str
:ivar provisioning_state: State of provisioning of the HanaInstance.
Possible values include: 'Accepted', 'Creating', 'Updating', 'Failed',
'Succeeded', 'Deleting', 'Migrating'
:vartype provisioning_state: str or
~azure.mgmt.hanaonazure.models.HanaProvisioningStatesEnum
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'tags': {'readonly': True},
'hana_instance_id': {'readonly': True},
'power_state': {'readonly': True},
'proximity_placement_group': {'readonly': True},
'hw_revision': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'hardware_profile': {'key': 'properties.hardwareProfile', 'type': 'HardwareProfile'},
'storage_profile': {'key': 'properties.storageProfile', 'type': 'StorageProfile'},
'os_profile': {'key': 'properties.osProfile', 'type': 'OSProfile'},
'network_profile': {'key': 'properties.networkProfile', 'type': 'NetworkProfile'},
'hana_instance_id': {'key': 'properties.hanaInstanceId', 'type': 'str'},
'power_state': {'key': 'properties.powerState', 'type': 'str'},
'proximity_placement_group': {'key': 'properties.proximityPlacementGroup', 'type': 'str'},
'hw_revision': {'key': 'properties.hwRevision', 'type': 'str'},
'partner_node_id': {'key': 'properties.partnerNodeId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HanaInstance, self).__init__(**kwargs)
self.hardware_profile = kwargs.get('hardware_profile', None)
self.storage_profile = kwargs.get('storage_profile', None)
self.os_profile = kwargs.get('os_profile', None)
self.network_profile = kwargs.get('network_profile', None)
self.hana_instance_id = None
self.power_state = None
self.proximity_placement_group = None
self.hw_revision = None
self.partner_node_id = kwargs.get('partner_node_id', None)
self.provisioning_state = None
| 45.285714 | 98 | 0.651735 |
9470846f6ae9dcf1d43cd6d17f043ec0c931b1a9 | 2,661 | py | Python | rboost/source/document/remark.py | SimoneGasperini/rboost | 5e0108d821077da76964e1e797f0d775b3999f56 | [
"MIT"
] | 1 | 2021-02-06T17:44:00.000Z | 2021-02-06T17:44:00.000Z | rboost/source/document/remark.py | SimoneGasperini/rboost | 5e0108d821077da76964e1e797f0d775b3999f56 | [
"MIT"
] | null | null | null | rboost/source/document/remark.py | SimoneGasperini/rboost | 5e0108d821077da76964e1e797f0d775b3999f56 | [
"MIT"
] | null | null | null | import os
import sys
from stat import S_IREAD, S_IWUSR
from gensim.parsing.preprocessing import strip_punctuation
from gensim.parsing.preprocessing import strip_non_alphanum
from rboost.source.document.base import Document
class Remark (Document):
"""
Class for the Remark object
Parameters
----------
date : str, default=None
Remark date (dd-mm-yyyy)
user : str, default=None
Remark author (name-surname)
path : str
Remark local path
name : str
Remark specific name
special : str
Remark special type
"""
def __init__(self, date, user, path, name, special):
doctype = 'remark-' + special
name = doctype + '_' + name + '.txt'
super(Remark, self).__init__(date=date, user=user,
path=path, name=name,
doctype=doctype)
@property
def docname(self):
"""
Full Remark name (str)
"""
docname = os.path.basename(self.path[:-1]) + '/' + self.name
return docname
def open_editor(self):
"""
Open the document using the system's basic text editor
"""
filepath = self.path + self.name
if not os.path.exists(filepath):
open(filepath, mode='w').close()
self.write_date_and_user()
os.chmod(filepath, S_IWUSR | S_IREAD)
if sys.platform.startswith('win'):
os.system('notepad ' + filepath)
if sys.platform.startswith('linux'):
os.system('gedit ' + filepath)
if sys.platform.startswith('darwin'):
os.system('open -a TextEdit ' + filepath)
os.chmod(filepath, S_IREAD)
def get_text(self):
"""
Get the pre-processed text extracted from the Remark document
Returns
-------
text : str
Extracted text
"""
with open(self.path + self.name, mode='r') as file:
first_line, raw_text = file.readline(), file.read()
text = strip_non_alphanum(strip_punctuation(raw_text.lower()))
return text
def write_date_and_user(self):
"""
Write date and user in the first line of the Remark document
"""
new_line = f'#{self.date}_{self.user}'
filepath = self.path + self.name
with open(filepath, mode='r') as file:
first_line, remainder = file.readline(), file.read()
os.chmod(filepath, S_IWUSR | S_IREAD)
with open(filepath, mode='w') as file:
file.write(new_line + '\n')
file.write(remainder)
| 24.638889 | 70 | 0.56708 |
f49386e5fa38c1ff28fa7b43406d7bf2ae69402f | 21,900 | py | Python | tests/tests.py | gescheit/fastsnmp | e4a5d2c704c875c598a3001becf392879414a0b4 | [
"MIT"
] | 18 | 2015-04-10T10:10:34.000Z | 2021-05-09T17:22:29.000Z | tests/tests.py | gescheit/fastsnmp | e4a5d2c704c875c598a3001becf392879414a0b4 | [
"MIT"
] | 6 | 2015-06-18T10:51:52.000Z | 2021-11-11T17:11:37.000Z | tests/tests.py | gescheit/fastsnmp | e4a5d2c704c875c598a3001becf392879414a0b4 | [
"MIT"
] | 8 | 2016-02-13T21:02:23.000Z | 2021-12-28T22:43:10.000Z | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import unittest
import pstats
import cProfile
from fastsnmp import snmp_parser
OID1 = "1.2.1"
OID2 = "1.2.2"
OID3 = "1.2.3"
class TestSnmpParser(unittest.TestCase):
strs = [
[b'56', b'\x35\x36'], # str
[b'\x00\x80\xeaB^7', b'\x00\x80\xea\x42\x5e\x37'], # bytes
]
ints = [
[-1, b'\xff'],
[-136, b'\xff\x78'],
[-1390, b'\xfa\x92'],
[4294970001, b'\x01\x00\x00\n\x91'],
]
uints = [
[0, b'\x00'],
[1, b'\x01'],
[2, b'\x02'],
[128, b'\x00\x80'],
[136, b'\x00\x88'],
[160, b'\x00\xA0'],
[256, b'\x01\x00'],
[32767, b'\x7f\xff'],
[4294970001, b'\x01\x00\x00\x0a\x91'],
[17179869184, b'\x04\x00\x00\x00\x00'],
[2568068810643379472, b'\x23\xa3\x9c\xfa\x21\x28\x95\x10'],
[18446744073709551615, b'\x00\xff\xff\xff\xff\xff\xff\xff\xff'], # max uint64
[523160, b'\x07\xfb\x98'],
]
object_ids = [
["1.2", b'\x2a'],
# ["2.99.3", b'\x88\x37\x03\x16'], # T-REC-X.690-201508 example
["1.2.128", b'\x2a\x81\x00'],
["1.2.128.128", b'\x2a\x81\x00\x81\x00'],
["1.2.256", b'\x2a\x82\x00'],
["1.2.65536", b'\x2a\x84\x80\x00'],
["1.2.99999", b'\x2a\x86\x8d\x1f'],
['1.3.268633409', b'\x2b\x81\x80\x8c\x8a\x41'],
['1.3.6.1.2.1.3.1.1.3.4.1.192.168.1.255', b'\x2b\x06\x01\x02\x01\x03\x01\x01\x03\x04\x01\x81\x40\x81\x28\x01\x81\x7f'],
]
tags = [
[(67, 1), b'\x43'],
]
length = [
[(15, 1), b'\x0f'],
# [(127, 2), b'\x81\x7f'], # long form
[(127, 1), b'\x7f'],
[(129, 2), b'\x81\x81'],
[(1256, 3), b'\x82\x04\xe8'],
]
def test_integer_encode(self):
for i, enc in self.ints:
int_encoded = snmp_parser.integer_encode(i)
self.assertEqual(int_encoded, enc, "encode %s" % i)
def test_integer_decode(self):
for i, enc in self.ints:
int_decoded = snmp_parser.integer_decode(enc)
self.assertEqual(int_decoded, i, "encode %s" % i)
def test_counter64_encode(self):
for i, enc in self.uints:
int_encoded = snmp_parser.uinteger_encode(i)
self.assertEqual(int_encoded, enc, "encode %s" % i)
def test_counter64_decode(self):
for i, enc in self.uints:
int_decoded = snmp_parser.uinteger_decode(enc)
self.assertEqual(int_decoded, i)
def test_str_decode(self):
for i, enc in self.strs:
str_decoded = snmp_parser.octetstring_decode(enc)
self.assertEqual(str_decoded, i)
def test_oid_encoder(self):
for str_oid, enc in self.object_ids:
oid_encoded = snmp_parser.objectid_encode(str_oid)
self.assertEqual(enc, bytes(oid_encoded), "testing %s" % str_oid)
def test_oid_decoder(self):
for str_oid, enc in self.object_ids:
oid_decoded = snmp_parser.objectid_decode(enc)
self.assertEqual(str_oid, oid_decoded)
def test_tag_decode(self):
for tag, enc in self.tags:
tag_decoded = snmp_parser.tag_decode(enc)
self.assertEqual(tag, tag_decoded)
def test_length_decode(self):
for length, enc in self.length:
length_decoded = snmp_parser.length_decode(enc)
self.assertEqual(length, length_decoded)
length_encoded = snmp_parser.length_encode(length[0])
self.assertEqual(length_encoded, enc)
def test_decode(self):
msg = b'0\x82\x06W\x02\x01\x01\x04\x04test\xa2\x82\x06J\x02\x02\x1f\xc1\x02\x01\x00\x02\x01\x000\x82\x06<0"' \
b'\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x02\x81\xb0\x80\x88L\x04\x10port-channel11010\x13\x06\x0e+\x06' \
b'\x01\x02\x01\x02\x02\x01\x0e\x81\xb0\x80\x88LA\x01\x000\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\r' \
b'\x81\xb0\x80\x88LA\x01\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\n\x81\xb0\x80\x88LF\x07\x01' \
b'\xdd9R\x9b\xd7\xdd0\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x0b\x81\xb0\x80\x88LF\x05\'\xb5+\xec' \
b'\x0b0\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x14\x81\xb0\x80\x88LA\x01\x000\x1a\x06\x0f+\x06\x01\x02' \
b'\x01\x1f\x01\x01\x01\x06\x81\xb0\x80\x88LF\x07\x01\xb5\xad\x9b2\x96b0\x13\x06\x0e+\x06\x01\x02\x01\x02' \
b'\x02\x01\x13\x81\xb0\x80\x88LA\x01\x000\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x07\x81\xb0\x80' \
b'\x88LF\x05\'\xbd\x11\x1d\xa60"\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x02\x81\xb0\x80\x88M\x04' \
b'\x10port-channel11020\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x0e\x81\xb0\x80\x88MA\x01\x000\x13\x06' \
b'\x0e+\x06\x01\x02\x01\x02\x02\x01\r\x81\xb0\x80\x88MA\x01\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01' \
b'\x01\x01\n\x81\xb0\x80\x88MF\x07\x00\xbb\xbf\xe8\xe2\xc7\xef0\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01' \
b'\x01\x0b\x81\xb0\x80\x88MF\x051\xb5\x7f\xdf"0\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x14\x81\xb0\x80' \
b'\x88MA\x01\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x06\x81\xb0\x80\x88MF\x07\x01?*\xaa\x156' \
b'\x170\x14\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x13\x81\xb0\x80\x88MA\x02\x07\x860\x18\x06\x0f+\x06\x01' \
b'\x02\x01\x1f\x01\x01\x01\x07\x81\xb0\x80\x88MF\x055\x8d\x04\xed90"\x06\x0e+\x06\x01\x02\x01\x02\x02\x01' \
b'\x02\x81\xb0\x80\x88N\x04\x10port-channel11030\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x0e\x81\xb0\x80' \
b'\x88NA\x01\x000\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\r\x81\xb0\x80\x88NA\x01\x000\x1a\x06\x0f+\x06' \
b'\x01\x02\x01\x1f\x01\x01\x01\n\x81\xb0\x80\x88NF\x07\x02h\xe4v\xe0Dz0\x18\x06\x0f+\x06\x01\x02\x01\x1f' \
b'\x01\x01\x01\x0b\x81\xb0\x80\x88NF\x05&\xa8-l\xbe0\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x14\x81\xb0' \
b'\x80\x88NA\x01\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x06\x81\xb0\x80\x88NF\x07\x01\x97' \
b'\xb5p\xb9\xe2\xe50\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x13\x81\xb0\x80\x88NA\x01\x000\x18\x06' \
b'\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x07\x81\xb0\x80\x88NF\x05"r\x11\x89\x0f0"\x06\x0e+\x06\x01\x02' \
b'\x01\x02\x02\x01\x02\x81\xb0\x80\x88O\x04\x10port-channel11040\x13\x06\x0e+\x06\x01\x02\x01\x02\x02' \
b'\x01\x0e\x81\xb0\x80\x88OA\x01\x000\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\r\x81\xb0\x80\x88OA\x01' \
b'\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\n\x81\xb0\x80\x88OF\x07\x02h\xd6\xc1\xa2\x19\xcf0' \
b'\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x0b\x81\xb0\x80\x88OF\x05\'H\x98w&0\x13\x06\x0e+\x06' \
b'\x01\x02\x01\x02\x02\x01\x14\x81\xb0\x80\x88OA\x01\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01' \
b'\x06\x81\xb0\x80\x88OF\x07\x01\xc1\xc0gn\xcf\x040\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x13\x81' \
b'\xb0\x80\x88OA\x01\x000\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x07\x81\xb0\x80\x88OF\x05%\r' \
b'\xe1)\xa00"\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x02\x81\xb0\x80\x88P\x04\x10port-channel11050\x13' \
b'\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x0e\x81\xb0\x80\x88PA\x01\x000\x13\x06\x0e+\x06\x01\x02\x01' \
b'\x02\x02\x01\r\x81\xb0\x80\x88PA\x01\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\n\x81\xb0\x80' \
b'\x88PF\x07\x015)\xff\x8f\xf5\xab0\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x0b\x81\xb0\x80\x88PF' \
b'\x05Q\x03\xf5=\xe90\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x14\x81\xb0\x80\x88PA\x01\x000\x1a\x06' \
b'\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x06\x81\xb0\x80\x88PF\x07\x02\x0b\x91\xb5E\xd3k0\x14\x06\x0e+' \
b'\x06\x01\x02\x01\x02\x02\x01\x13\x81\xb0\x80\x88PA\x02ZX0\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01' \
b'\x01\x07\x81\xb0\x80\x88PF\x05Q\xa0\xbe\xd6\x810"\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x02\x81\xb0' \
b'\x80\x88Q\x04\x10port-channel11060\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x0e\x81\xb0\x80\x88QA' \
b'\x01\x000\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\r\x81\xb0\x80\x88QA\x01\x000\x19\x06\x0f+\x06' \
b'\x01\x02\x01\x1f\x01\x01\x01\n\x81\xb0\x80\x88QF\x06u\x04\xd1:C,0\x18\x06\x0f+\x06\x01\x02\x01' \
b'\x1f\x01\x01\x01\x0b\x81\xb0\x80\x88QF\x05\x14\x92\xc5\xa8)0\x13\x06\x0e+\x06\x01\x02\x01\x02' \
b'\x02\x01\x14\x81\xb0\x80\x88QA\x01\x000\x19\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x06\x81\xb0' \
b'\x80\x88QF\x06%\x150\xbb\x05\x960\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x13\x81\xb0\x80\x88QA' \
b'\x01\x000\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x07\x81\xb0\x80\x88QF\x05\x16\x05!&+0"' \
b'\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x02\x81\xb0\x80\x88R\x04\x10port-channel11070\x13\x06\x0e+' \
b'\x06\x01\x02\x01\x02\x02\x01\x0e\x81\xb0\x80\x88RA\x01\x000\x13\x06\x0e+\x06\x01\x02\x01\x02\x02' \
b'\x01\r\x81\xb0\x80\x88RA\x01\x000\x1a\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\n\x81\xb0\x80\x88RF' \
b'\x07\x02\xae\r\x8c\xaaU\x980\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x0b\x81\xb0\x80\x88RF\x05+' \
b'\xf8lm\xb50\x13\x06\x0e+\x06\x01\x02\x01\x02\x02\x01\x14\x81\xb0\x80\x88RA\x01\x000\x1a\x06\x0f+\x06' \
b'\x01\x02\x01\x1f\x01\x01\x01\x06\x81\xb0\x80\x88RF\x07\x01\xee\xd7$,\xbb\xce0\x13\x06\x0e+\x06\x01' \
b'\x02\x01\x02\x02\x01\x13\x81\xb0\x80\x88RA\x01\x000\x18\x06\x0f+\x06\x01\x02\x01\x1f\x01\x01\x01\x07' \
b'\x81\xb0\x80\x88RF\x05(<i(\xf9'
encoded = (8129, 0, 0, [['1.3.6.1.2.1.2.2.1.2.369099852', b'port-channel1101'],
['1.3.6.1.2.1.2.2.1.14.369099852', 0],
['1.3.6.1.2.1.2.2.1.13.369099852', 0],
['1.3.6.1.2.1.31.1.1.1.10.369099852', 524713245530077],
['1.3.6.1.2.1.31.1.1.1.11.369099852', 170543279115],
['1.3.6.1.2.1.2.2.1.20.369099852', 0],
['1.3.6.1.2.1.31.1.1.1.6.369099852', 481232214464098],
['1.3.6.1.2.1.2.2.1.19.369099852', 0],
['1.3.6.1.2.1.31.1.1.1.7.369099852', 170675740070],
['1.3.6.1.2.1.2.2.1.2.369099853', b'port-channel1102'],
['1.3.6.1.2.1.2.2.1.14.369099853', 0],
['1.3.6.1.2.1.2.2.1.13.369099853', 0],
['1.3.6.1.2.1.31.1.1.1.10.369099853', 206432920324079],
['1.3.6.1.2.1.31.1.1.1.11.369099853', 213498453794],
['1.3.6.1.2.1.2.2.1.20.369099853', 0],
['1.3.6.1.2.1.31.1.1.1.6.369099853', 350927451403799],
['1.3.6.1.2.1.2.2.1.19.369099853', 1926],
['1.3.6.1.2.1.31.1.1.1.7.369099853', 229999177017],
['1.3.6.1.2.1.2.2.1.2.369099854', b'port-channel1103'],
['1.3.6.1.2.1.2.2.1.14.369099854', 0],
['1.3.6.1.2.1.2.2.1.13.369099854', 0],
['1.3.6.1.2.1.31.1.1.1.10.369099854', 678280409662586],
['1.3.6.1.2.1.31.1.1.1.11.369099854', 166030306494],
['1.3.6.1.2.1.2.2.1.20.369099854', 0],
['1.3.6.1.2.1.31.1.1.1.6.369099854', 448280512815845],
['1.3.6.1.2.1.2.2.1.19.369099854', 0],
['1.3.6.1.2.1.31.1.1.1.7.369099854', 147942639887],
['1.3.6.1.2.1.2.2.1.2.369099855', b'port-channel1104'],
['1.3.6.1.2.1.2.2.1.14.369099855', 0],
['1.3.6.1.2.1.2.2.1.13.369099855', 0],
['1.3.6.1.2.1.31.1.1.1.10.369099855', 678221534337487],
['1.3.6.1.2.1.31.1.1.1.11.369099855', 168721676070],
['1.3.6.1.2.1.2.2.1.20.369099855', 0],
['1.3.6.1.2.1.31.1.1.1.6.369099855', 494507089907460],
['1.3.6.1.2.1.2.2.1.19.369099855', 0],
['1.3.6.1.2.1.31.1.1.1.7.369099855', 159146650016],
['1.3.6.1.2.1.2.2.1.2.369099856', b'port-channel1105'],
['1.3.6.1.2.1.2.2.1.14.369099856', 0],
['1.3.6.1.2.1.2.2.1.13.369099856', 0],
['1.3.6.1.2.1.31.1.1.1.10.369099856', 339929474266539],
['1.3.6.1.2.1.31.1.1.1.11.369099856', 347958754793],
['1.3.6.1.2.1.2.2.1.20.369099856', 0],
['1.3.6.1.2.1.31.1.1.1.6.369099856', 575670392836971],
['1.3.6.1.2.1.2.2.1.19.369099856', 23128],
['1.3.6.1.2.1.31.1.1.1.7.369099856', 350589212289],
['1.3.6.1.2.1.2.2.1.2.369099857', b'port-channel1106'],
['1.3.6.1.2.1.2.2.1.14.369099857', 0],
['1.3.6.1.2.1.2.2.1.13.369099857', 0],
['1.3.6.1.2.1.31.1.1.1.10.369099857', 128663550575404],
['1.3.6.1.2.1.31.1.1.1.11.369099857', 88361773097],
['1.3.6.1.2.1.2.2.1.20.369099857', 0],
['1.3.6.1.2.1.31.1.1.1.6.369099857', 40772942103958],
['1.3.6.1.2.1.2.2.1.19.369099857', 0],
['1.3.6.1.2.1.31.1.1.1.7.369099857', 94575339051],
['1.3.6.1.2.1.2.2.1.2.369099858', b'port-channel1107'],
['1.3.6.1.2.1.2.2.1.14.369099858', 0],
['1.3.6.1.2.1.2.2.1.13.369099858', 0],
['1.3.6.1.2.1.31.1.1.1.10.369099858', 754323171202456],
['1.3.6.1.2.1.31.1.1.1.11.369099858', 188851449269],
['1.3.6.1.2.1.2.2.1.20.369099858', 0],
['1.3.6.1.2.1.31.1.1.1.6.369099858', 544082769001422],
['1.3.6.1.2.1.2.2.1.19.369099858', 0],
['1.3.6.1.2.1.31.1.1.1.7.369099858', 172812216569]])
msg_decoded = snmp_parser.msg_decode(msg)
self.assertEqual(encoded, msg_decoded)
def test_decode2(self):
msg = b'0\x81\xa6\x02\x01\x01\x04\x06public\xa2\x81\x98\x02\x03\x07\xc80\x02\x01\x00\x02\x01\x000\x81\x8a0' \
b'\x19\x06\x11+\x06\x01\x02\x01\x03\x01\x01\x03\x04\x01\x81@\x81(\x01\x01@\x04\xc0\xa8\x01\x010\x1a' \
b'\x06\x12+\x06\x01\x02\x01\x03\x01\x01\x03\x04\x01\x81@\x81(\x01\x81\x1a@\x04\xc0\xa8\x01\x9a0\x1a' \
b'\x06\x12+\x06\x01\x02\x01\x03\x01\x01\x03\x04\x01\x81@\x81(\x01\x815@\x04\xc0\xa8\x01\xb50\x1a\x06' \
b'\x12+\x06\x01\x02\x01\x03\x01\x01\x03\x04\x01\x81@\x81(\x01\x81<@\x04\xc0\xa8\x01\xbc0\x19\x06\x11+' \
b'\x06\x01\x02\x01\x03\x01\x01\x03\x04\x01\x81`\x00\x00\x81{@\x04\xe0\x00\x00\xfb'
encoded = (510000, 0, 0, [
['1.3.6.1.2.1.3.1.1.3.4.1.192.168.1.1', b'\xc0\xa8\x01\x01'],
['1.3.6.1.2.1.3.1.1.3.4.1.192.168.1.154', b'\xc0\xa8\x01\x9a'],
['1.3.6.1.2.1.3.1.1.3.4.1.192.168.1.181', b'\xc0\xa8\x01\xb5'],
['1.3.6.1.2.1.3.1.1.3.4.1.192.168.1.188', b'\xc0\xa8\x01\xbc'],
['1.3.6.1.2.1.3.1.1.3.4.1.224.0.0.251', b'\xe0\x00\x00\xfb']])
msg_decoded = snmp_parser.msg_decode(msg)
self.assertEqual(encoded, msg_decoded)
def test_decode3(self):
msg = b'0\x81\x83\x02\x01\x01\x04\x06public\xa2v\x02\x03?\x8fT\x02\x01\x00\x02\x01\x000i0' \
b'\x11\x06\x0b+\x06\x01\x04\x01\x8fe\n\x01\x05\x01\x02\x02\x00\x8f0\x11\x06\x0b+\x06' \
b'\x01\x04\x01\x8fe\n\x01\x05\x02\x02\x02\x00\xe20\x11\x06\x0b+\x06\x01\x04\x01\x8fe' \
b'\n\x01\x05\x03\x02\x02\x01j0\x16\x06\x0b+\x06\x01\x04\x01\x8fe\n\x01\x06\x01D\x07' \
b'\x9fx\x04?\xb8@\x000\x16\x06\x0b+\x06\x01\x04\x01\x8fe\n\x01\x06\x02D\x07\x9fx\x04@' \
b'\x10\xd8\x00'
encoded = (4165460, 0, 0, [
['1.3.6.1.4.1.2021.10.1.5.1', 143], ['1.3.6.1.4.1.2021.10.1.5.2', 226],
['1.3.6.1.4.1.2021.10.1.5.3', 362], ['1.3.6.1.4.1.2021.10.1.6.1', 1.439453125],
['1.3.6.1.4.1.2021.10.1.6.2', 2.26318359375]]
)
msg_decoded = snmp_parser.msg_decode(msg)
self.assertEqual(encoded, msg_decoded)
def test_parse_varbind(self):
result = [['1.2.1.1', 1], ['1.2.2.1', 1], ['1.2.3.1', 1],
['1.2.1.2', 1], ['1.2.2.2', 1], ['1.2.3.2', 1],
['1.2.1.3', 1], ['1.2.2.3', 1], ['1.2.3.3', 1],
['1.2.1.4', 1], ['1.2.2.4', 1], ['1.2.3.4', 1],
]
main_oids = ('1.2.1', '1.2.2', '1.2.3')
prev_oids_to_poll = ('1.2.1', '1.2.2', '1.2.3')
expected_res = [['1.2.1', '1', 1], ['1.2.2', '1', 1], ['1.2.3', '1', 1], ['1.2.1', '2', 1], ['1.2.2', '2', 1],
['1.2.3', '2', 1], ['1.2.1', '3', 1], ['1.2.2', '3', 1], ['1.2.3', '3', 1], ['1.2.1', '4', 1],
['1.2.2', '4', 1], ['1.2.3', '4', 1]]
expected_oids_to_poll = ('1.2.1.4', '1.2.2.4', '1.2.3.4')
result, next_oids_to_poll = snmp_parser.parse_varbind(result, main_oids, prev_oids_to_poll)
self.assertEqual(next_oids_to_poll, expected_oids_to_poll)
self.assertEqual(result, expected_res)
def test_parse_varbind2(self):
# unequal oids len
result = [['1.2.1.1', 1], ['1.2.2.1', 1], ['1.2.3.1', 1],
['1.2.1.2', 1], ['1.2.2.2', 1], ['1.2.3.2', 1],
['1.2.999.1', 1], ['1.2.2.3', 1], ['1.2.3.3', 1],
['1.2.999.2', 1], ['1.2.2.4', 1], ['1.2.3.4', 1],
]
main_oids = ('1.2.1', '1.2.2', '1.2.3')
prev_oids_to_poll = ('1.2.1', '1.2.2', '1.2.3')
expected_res = [['1.2.1', '1', 1], ['1.2.2', '1', 1], ['1.2.3', '1', 1], ['1.2.1', '2', 1], ['1.2.2', '2', 1],
['1.2.3', '2', 1], ['1.2.2', '3', 1], ['1.2.3', '3', 1], ['1.2.2', '4', 1], ['1.2.3', '4', 1]]
expected_oids_to_poll = (None, '1.2.2.4', '1.2.3.4')
result, next_oids_to_poll = snmp_parser.parse_varbind(result, main_oids, prev_oids_to_poll)
self.assertEqual(next_oids_to_poll, expected_oids_to_poll)
self.assertEqual(result, expected_res)
def test_parse_varbind_with_none(self):
result = [[OID1 + '.1', None], [OID2 + '.1', 1], [OID3 + '.1', 1],
[OID1 + '.2', 1], [OID2 + '.2', None], [OID3 + '.2', 1],
[OID1 + '.3', 1], [OID2 + '.3', 1], [OID3 + '.3', None],
]
main_oids = (OID1, OID2, OID3)
prev_oids_to_poll = (OID1, OID2, OID3)
expected_res = [[OID1, '1', None], [OID2, '1', 1], [OID3, '1', 1],
[OID1, '2', 1], [OID2, '2', None], [OID3, '2', 1],
[OID1, '3', 1], [OID2, '3', 1], [OID3, '3', None],
]
expected_oids_to_poll = (OID1 + '.3', OID2 + '.3', OID3 + '.3')
result, next_oids_to_poll = snmp_parser.parse_varbind(result, main_oids, prev_oids_to_poll)
self.assertEqual(next_oids_to_poll, expected_oids_to_poll)
self.assertEqual(result, expected_res)
def test_parse_varbind_with_end_of_mib(self):
in_data = [[OID1 + '.1', 1], [OID2 + '.1', 1], [OID3 + '.1', 1],
[OID1 + '.1', snmp_parser.end_of_mib_view], [OID2 + '.1', snmp_parser.end_of_mib_view], [OID3 + '.2', 1],
]
main_oids = (OID1, OID2, OID3)
prev_oids_to_poll = (OID1, OID2, OID3)
expected_res = [[OID1, '1', 1],
[OID2, '1', 1],
[OID3, '1', 1],
[OID3, '2', 1],
]
expected_oids_to_poll = (None, None, OID3 + ".2")
result, next_oids_to_poll = snmp_parser.parse_varbind(in_data, main_oids, prev_oids_to_poll)
self.assertEqual(next_oids_to_poll, expected_oids_to_poll)
self.assertEqual(result, expected_res)
def test_check_is_growing(self):
test_data = [
['1', '2', True],
['123456', '53453', False],
['1.2.1.1', '1.2.2.2', True],
['1.3.1.1', '1.2.2.2', False],
['8.2.1.0', '23.7.0.0', True]
]
for start_oid, finish_oid, exp_res in test_data:
res = snmp_parser.check_is_growing(start_oid, finish_oid)
self.assertEqual(res, exp_res)
def _test_parse_varbind_perf(self):
result = []
for i in range(100):
for y in range(10):
result.append(['1.2.%s.%s' % (y, i), i + 3])
main_oids = tuple(['1.2.%s' % i for i in range(10)])
oids_to_poll = main_oids
snmp_parser.parse_varbind(result, main_oids, oids_to_poll)
def _test_parse_varbind_prof(self):
cProfile.runctx("for i in range(1000): self._test_parse_varbind_perf()", globals(), locals(), "Profile.prof")
s = pstats.Stats("Profile.prof")
s.strip_dirs().sort_stats("time").print_stats()
if __name__ == "__main__":
unittest.main()
| 61.344538 | 127 | 0.517032 |
e5a9ce326921245e63a4085bc707aa0820ba3f38 | 1,959 | py | Python | aiohttp_login/decorators.py | kirlf/aiohttp-login | 7e52ea4ef694b6bfe2ad19b0a3d1077b04cfe221 | [
"0BSD"
] | 61 | 2017-01-24T13:13:22.000Z | 2021-09-23T06:39:38.000Z | aiohttp_login/decorators.py | kirlf/aiohttp-login | 7e52ea4ef694b6bfe2ad19b0a3d1077b04cfe221 | [
"0BSD"
] | 13 | 2017-01-28T00:13:37.000Z | 2021-11-25T05:11:59.000Z | aiohttp_login/decorators.py | kirlf/aiohttp-login | 7e52ea4ef694b6bfe2ad19b0a3d1077b04cfe221 | [
"0BSD"
] | 27 | 2017-01-27T21:14:25.000Z | 2021-11-24T14:46:57.000Z | from functools import wraps
from aiohttp.abc import AbstractView
from aiohttp.web import HTTPForbidden, json_response, StreamResponse
try:
import ujson as json
except ImportError:
import json
from .cfg import cfg
from .utils import url_for, redirect, get_cur_user
def _get_request(args):
# Supports class based views see web.View
if isinstance(args[0], AbstractView):
return args[0].request
return args[-1]
def user_to_request(handler):
'''Add user to request if user logged in'''
@wraps(handler)
async def decorator(*args):
request = _get_request(args)
request[cfg.REQUEST_USER_KEY] = await get_cur_user(request)
return await handler(*args)
return decorator
def login_required(handler):
@user_to_request
@wraps(handler)
async def decorator(*args):
request = _get_request(args)
if not request[cfg.REQUEST_USER_KEY]:
return redirect(get_login_url(request))
return await handler(*args)
return decorator
def restricted_api(handler):
@user_to_request
@wraps(handler)
async def decorator(*args):
request = _get_request(args)
if not request[cfg.REQUEST_USER_KEY]:
return json_response({'error': 'Access denied'}, status=403)
response = await handler(*args)
if not isinstance(response, StreamResponse):
response = json_response(response, dumps=json.dumps)
return response
return decorator
def admin_required(handler):
@wraps(handler)
async def decorator(*args):
request = _get_request(args)
response = await login_required(handler)(request)
if request['user']['email'] not in cfg.ADMIN_EMAILS:
raise HTTPForbidden(reason='You are not admin')
return response
return decorator
def get_login_url(request):
return url_for('auth_login').with_query({
cfg.BACK_URL_QS_KEY: request.path_qs})
| 27.985714 | 72 | 0.686064 |
f94bf4a984d178ee95bc5f85dad2f6dfcb01ebde | 584 | py | Python | src/11-plotly-bubble-chart.py | paiboon15721/clusterkit-dash-training | 66012eafc8737f1fdf1e26ada17c2927c3a03922 | [
"MIT"
] | 2 | 2019-11-26T06:38:26.000Z | 2021-05-31T06:28:39.000Z | src/11-plotly-bubble-chart.py | paiboon15721/clusterkit-dash-training | 66012eafc8737f1fdf1e26ada17c2927c3a03922 | [
"MIT"
] | 12 | 2020-02-12T02:55:00.000Z | 2022-02-10T08:51:26.000Z | src/11-plotly-bubble-chart.py | paiboon15721/clusterkit-dash-training | 66012eafc8737f1fdf1e26ada17c2927c3a03922 | [
"MIT"
] | null | null | null | import pandas as pd
import plotly.graph_objects as go
import plotly.offline as pyo
df = pd.read_csv('../dataset/mpg.csv')
data = [go.Scatter( # start with a normal scatter plot
x=df['horsepower'],
y=df['mpg'],
text=df['name'],
mode='markers',
marker=dict(size=1.5*df['cylinders']) # set the marker size
)]
layout = go.Layout(
title='Vehicle mpg vs. horsepower',
xaxis=dict(title='horsepower'), # x-axis label
yaxis=dict(title='mpg'), # y-axis label
hovermode='closest'
)
fig = go.Figure(data=data, layout=layout)
pyo.plot(fig)
| 25.391304 | 64 | 0.643836 |
01a34e2da17902a84bccf85634c5d6fd8ecd7e47 | 4,980 | py | Python | tests/lump/test_csv.py | viaacode/lump | 02f669f7f25c2a123bc118b23dcc3d53d4cdf424 | [
"MIT"
] | 1 | 2021-02-01T08:07:00.000Z | 2021-02-01T08:07:00.000Z | tests/lump/test_csv.py | viaacode/lump | 02f669f7f25c2a123bc118b23dcc3d53d4cdf424 | [
"MIT"
] | 4 | 2020-03-12T18:06:32.000Z | 2022-03-25T10:47:48.000Z | tests/lump/test_csv.py | viaacode/lump | 02f669f7f25c2a123bc118b23dcc3d53d4cdf424 | [
"MIT"
] | null | null | null | import benchmarkstt.csv as csv
import pytest
from io import StringIO
# Code copied from ebu/benchmarkstt (by original author), see
# https://github.com/ebu/benchmarkstt/blob/master/LICENCE.md
example1 = '''
Some line, some other \t \t
dsfgdsg
\n \t \r
\n \r
"stay","togther "
# commented out
fsdss
'''
expected1 = [['Some line', 'some other'], ['dsfgdsg'], ['stay', 'togther '], ['fsdss']]
def get_reader(text, *args, **kwargs):
return list(csv.reader(StringIO(text), *args, **kwargs))
def test_csv():
_reader = get_reader
assert _reader('replace," ","\n"') == [['replace', ' ', '\n']]
assert type(csv.reader(StringIO(''))) is csv.Reader
assert type(csv.Reader(StringIO(''), csv.DefaultDialect)) is csv.Reader
assert _reader('""') == [['']]
assert _reader('') == []
assert _reader(example1) == expected1
assert _reader('"","test"," quiot"""') == [['', 'test', ' quiot"']]
assert _reader(' val1 ,\t val2 \n') == [['val1', 'val2']]
assert _reader(' ","') == [[',']]
assert _reader('""') == [['']]
assert _reader('''
"A,B","""A,B""",
''') == [['A,B', '"A,B"', '']]
assert _reader('"A,B","""A,B""",') == [['A,B', '"A,B"', '']]
assert _reader(' A\tB, \t B\tA\t ,') == [['A\tB', 'B\tA', '']]
assert _reader('"#nocomment",#yescomment\n') == [['#nocomment', '']]
assert _reader('"#nocomment",#here ') == [['#nocomment', '']]
assert _reader('"#nocomment",#') == [['#nocomment', '']]
assert _reader('"#nocomment"# test') == [['#nocomment']]
assert _reader('"#nocomment" # commented') == [['#nocomment']]
assert _reader('\t t ') == [['t']]
assert _reader('t') == [['t']]
assert _reader('replace," ","\n"') == [['replace', ' ', '\n']]
assert _reader(',') == [['', '']]
assert _reader('#yescomment,#here \n') == []
assert _reader(r'''# test
"(?s)<\?xml.*</head>","" # inline comment
# ignore
"<[^>]+>"," " # test
"[,.-\?]", "" # more comments''') == [['(?s)<\\?xml.*</head>', ''], ['<[^>]+>', ' '], ['[,.-\\?]', '']]
assert _reader('test,ok\n\n \n\t\n\t\n.\n\n') == [['test', 'ok'], ['.']]
assert _reader('"Some", "words" # comment') == [['Some', 'words']]
def test_conf():
def _reader(text):
return list(csv.reader(StringIO(text), 'whitespace'))
assert _reader('replace " " "\n"') == [['replace', ' ', '\n']]
expected = [['Lowercase'],
['regex', 'y t', 'Y T'],
['Replace', 'e', 'a']]
gotten = _reader('''# using a simple config file
Lowercase \n
# it even supports comments
# If there is a space in the argument, make sure you quote it though!
regex "y t" "Y T"
# extraneous whitespaces are ignored
Replace e a''')
assert gotten == expected
expected = [
['Normalizer1', 'arg1', 'arg 2'],
['Normalizer2'],
['Normalizer3', 'This is argument 1\nSpanning multiple lines\n',
'argument 2'],
['Normalizer4', 'argument with double quote (")']
]
assert _reader("""
Normalizer1 arg1 "arg 2"
# This is a comment
Normalizer2
# (Normalizer2 has no arguments)
Normalizer3 "This is argument 1
Spanning multiple lines
" "argument 2"
Normalizer4 "argument with double quote ("")"
""") == expected
assert _reader("lower case ") == [['lower', 'case']]
assert _reader("lower case \n") == [['lower', 'case']]
assert _reader('test "stuff "\t') == [['test', 'stuff ']]
assert _reader('test "stuff "\n') == [['test', 'stuff ']]
assert _reader('test "stuff\n\t"\n\t \t YEs \t \n') == \
[['test', 'stuff\n\t'], ['YEs']]
assert _reader("\n\n\n\nline5")[0].lineno == 5
def test_exceptions():
_reader = get_reader
with pytest.raises(csv.InvalidDialectError):
csv.Reader(StringIO(''), dialect=csv.InvalidDialectError)
with pytest.raises(csv.UnknownDialectError):
_reader('', dialect='notknown')
with pytest.raises(csv.UnallowedQuoteError) as exc:
_reader('test "')
assert "Quote not allowed here" in str(exc)
with pytest.raises(csv.CSVParserError):
_reader('stray"quote')
with pytest.raises(csv.UnclosedQuoteError) as exc:
_reader(' s ,"')
assert "Unexpected end" in str(exc)
with pytest.raises(csv.UnallowedQuoteError):
_reader(' fsd","')
with pytest.raises(csv.UnallowedQuoteError) as exc:
_reader('""test,')
assert "Single quote inside quoted field" in str(exc)
def test_own_dialect():
class OwnDialect(csv.Dialect):
delimiter = ';'
assert get_reader("Tester \n No Trim ", dialect=OwnDialect) == [['Tester '], [' No Trim ']]
def test_debugger(capsys):
gotten = get_reader(example1, debug=True)
assert gotten == expected1
with open('./resources/test/_data/csv.debugging.output.txt', encoding='UTF-8') as f:
expected_debug = f.read()
assert capsys.readouterr().out == expected_debug
| 29.642857 | 106 | 0.562651 |
a337dbb65f49a4c85b1c2f42146a8a8c5e3377bd | 1,050 | py | Python | net/data/verify_certificate_chain_unittest/incorrect-trust-anchor/generate-chains.py | metux/chromium-deb | 3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | net/data/verify_certificate_chain_unittest/incorrect-trust-anchor/generate-chains.py | metux/chromium-deb | 3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | net/data/verify_certificate_chain_unittest/incorrect-trust-anchor/generate-chains.py | metux/chromium-deb | 3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Certificate chain where the supposed root certificate is wrong:
* The intermediate's "issuer" does not match the root's "subject"
* The intermediate's signature was not generated using the root's key
"""
import sys
sys.path += ['..']
import common
# Self-signed root certificate, which actually signed the intermediate.
root = common.create_self_signed_root_certificate('Root')
# Intermediate certificate.
intermediate = common.create_intermediate_certificate('Intermediate', root)
# Target certificate.
target = common.create_end_entity_certificate('Target', intermediate)
# Self-signed root certificate that has nothing to do with this chain, but will
# be saved as its root certificate.
bogus_root = common.create_self_signed_root_certificate('BogusRoot')
chain = [target, intermediate, bogus_root]
common.write_chain(__doc__, chain, 'chain.pem')
| 32.8125 | 79 | 0.778095 |
cb88b3c2d18e0e7a91660c93511665de09f3b073 | 744 | py | Python | server/conf/secret_settings.py | frastlin/ICAD-map | 014dac99cec5cd25cca08adcc0dee8877790fdb1 | [
"BSD-3-Clause"
] | null | null | null | server/conf/secret_settings.py | frastlin/ICAD-map | 014dac99cec5cd25cca08adcc0dee8877790fdb1 | [
"BSD-3-Clause"
] | null | null | null | server/conf/secret_settings.py | frastlin/ICAD-map | 014dac99cec5cd25cca08adcc0dee8877790fdb1 | [
"BSD-3-Clause"
] | null | null | null | """
This file is meant for when you want to share your game dir with
others but don't want to share all details of your specific game
or local server setup. The settings in this file will override those
in settings.py and is in .gitignore by default.
A good guideline when sharing your game dir is that you want your
game to run correctly also without this file and only use this
to override your public, shared settings.
"""
# The secret key is randomly seeded upon creation. It is used to sign
# Django's cookies and should not be publicly known. It should also
# generally not be changed once people have registered with the game
# since it will invalidate their existing sessions.
SECRET_KEY = 'Ge^&rAQNVx27[EJ%vBT"nO<]sy3o$gdCLI;H+h5S'
| 41.333333 | 69 | 0.778226 |
ce5b329bd53e34d132df93216d2fcaea1bd19e10 | 853 | py | Python | oneflow/python/framework/parallel_conf_util.py | xxg1413/oneflow | f2e3c85a25b8aecfb6c0c0af1737833b1a77e135 | [
"Apache-2.0"
] | 1 | 2020-12-04T03:06:16.000Z | 2020-12-04T03:06:16.000Z | oneflow/python/framework/parallel_conf_util.py | xxg1413/oneflow | f2e3c85a25b8aecfb6c0c0af1737833b1a77e135 | [
"Apache-2.0"
] | null | null | null | oneflow/python/framework/parallel_conf_util.py | xxg1413/oneflow | f2e3c85a25b8aecfb6c0c0af1737833b1a77e135 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def GetDeviceTagAndMachineDeviceIds(parallel_conf):
machine_device_ids = []
for device_name in parallel_conf.device_name:
machine_device_ids.append(device_name)
device_tag = parallel_conf.device_tag
return device_tag, machine_device_ids
| 35.541667 | 72 | 0.785463 |
7051affb00f342601c01459d278827fef952851d | 8,916 | py | Python | themes/default/base16-equilibrium-dark.config.py | knezi/base16-qutebrowser | d1d63d1e2f16fd03cd88ec7006c0d885da06dcb8 | [
"MIT"
] | null | null | null | themes/default/base16-equilibrium-dark.config.py | knezi/base16-qutebrowser | d1d63d1e2f16fd03cd88ec7006c0d885da06dcb8 | [
"MIT"
] | null | null | null | themes/default/base16-equilibrium-dark.config.py | knezi/base16-qutebrowser | d1d63d1e2f16fd03cd88ec7006c0d885da06dcb8 | [
"MIT"
] | null | null | null | # base16-qutebrowser (https://github.com/theova/base16-qutebrowser)
# Base16 qutebrowser template by theova
# Equilibrium Dark scheme by Carlo Abelli
base00 = "#0c1118"
base01 = "#181c22"
base02 = "#22262d"
base03 = "#7b776e"
base04 = "#949088"
base05 = "#afaba2"
base06 = "#cac6bd"
base07 = "#e7e2d9"
base08 = "#f04339"
base09 = "#df5923"
base0A = "#bb8801"
base0B = "#7f8b00"
base0C = "#00948b"
base0D = "#008dd1"
base0E = "#6a7fd2"
base0F = "#e3488e"
# set qutebrowser colors
# Text color of the completion widget. May be a single color to use for
# all columns or a list of three colors, one for each column.
c.colors.completion.fg = base05
# Background color of the completion widget for odd rows.
c.colors.completion.odd.bg = base01
# Background color of the completion widget for even rows.
c.colors.completion.even.bg = base00
# Foreground color of completion widget category headers.
c.colors.completion.category.fg = base0A
# Background color of the completion widget category headers.
c.colors.completion.category.bg = base00
# Top border color of the completion widget category headers.
c.colors.completion.category.border.top = base00
# Bottom border color of the completion widget category headers.
c.colors.completion.category.border.bottom = base00
# Foreground color of the selected completion item.
c.colors.completion.item.selected.fg = base01
# Background color of the selected completion item.
c.colors.completion.item.selected.bg = base0A
# Top border color of the selected completion item.
c.colors.completion.item.selected.border.top = base0A
# Bottom border color of the selected completion item.
c.colors.completion.item.selected.border.bottom = base0A
# Foreground color of the matched text in the selected completion item.
c.colors.completion.item.selected.match.fg = base08
# Foreground color of the matched text in the completion.
c.colors.completion.match.fg = base0B
# Background color of disabled items in the context menu.
c.colors.contextmenu.disabled.bg = base01
# Foreground color of disabled items in the context menu.
c.colors.contextmenu.disabled.fg = base04
# Color of the scrollbar handle in the completion view.
c.colors.completion.scrollbar.fg = base05
# Color of the scrollbar in the completion view.
c.colors.completion.scrollbar.bg = base00
# Background color of the context menu. If set to null, the Qt default is used.
c.colors.contextmenu.menu.bg = base00
# Foreground color of the context menu. If set to null, the Qt default is used.
c.colors.contextmenu.menu.fg = base05
# Background color of the context menu’s selected item. If set to null, the Qt default is used.
c.colors.contextmenu.selected.bg = base0A
#Foreground color of the context menu’s selected item. If set to null, the Qt default is used.
c.colors.contextmenu.selected.fg = base01
# Background color for the download bar.
c.colors.downloads.bar.bg = base00
# Color gradient start for download text.
c.colors.downloads.start.fg = base00
# Color gradient start for download backgrounds.
c.colors.downloads.start.bg = base0D
# Color gradient end for download text.
c.colors.downloads.stop.fg = base00
# Color gradient stop for download backgrounds.
c.colors.downloads.stop.bg = base0C
# Foreground color for downloads with errors.
c.colors.downloads.error.fg = base08
# Font color for hints.
c.colors.hints.fg = base00
# Background color for hints. Note that you can use a `rgba(...)` value
# for transparency.
c.colors.hints.bg = base0A
# Font color for the matched part of hints.
c.colors.hints.match.fg = base05
# Text color for the keyhint widget.
c.colors.keyhint.fg = base05
# Highlight color for keys to complete the current keychain.
c.colors.keyhint.suffix.fg = base05
# Background color of the keyhint widget.
c.colors.keyhint.bg = base00
# Foreground color of an error message.
c.colors.messages.error.fg = base00
# Background color of an error message.
c.colors.messages.error.bg = base08
# Border color of an error message.
c.colors.messages.error.border = base08
# Foreground color of a warning message.
c.colors.messages.warning.fg = base00
# Background color of a warning message.
c.colors.messages.warning.bg = base0E
# Border color of a warning message.
c.colors.messages.warning.border = base0E
# Foreground color of an info message.
c.colors.messages.info.fg = base05
# Background color of an info message.
c.colors.messages.info.bg = base00
# Border color of an info message.
c.colors.messages.info.border = base00
# Foreground color for prompts.
c.colors.prompts.fg = base05
# Border used around UI elements in prompts.
c.colors.prompts.border = base00
# Background color for prompts.
c.colors.prompts.bg = base00
# Background color for the selected item in filename prompts.
c.colors.prompts.selected.bg = base0A
# Foreground color of the statusbar.
c.colors.statusbar.normal.fg = base0B
# Background color of the statusbar.
c.colors.statusbar.normal.bg = base00
# Foreground color of the statusbar in insert mode.
c.colors.statusbar.insert.fg = base00
# Background color of the statusbar in insert mode.
c.colors.statusbar.insert.bg = base0D
# Foreground color of the statusbar in passthrough mode.
c.colors.statusbar.passthrough.fg = base00
# Background color of the statusbar in passthrough mode.
c.colors.statusbar.passthrough.bg = base0C
# Foreground color of the statusbar in private browsing mode.
c.colors.statusbar.private.fg = base00
# Background color of the statusbar in private browsing mode.
c.colors.statusbar.private.bg = base01
# Foreground color of the statusbar in command mode.
c.colors.statusbar.command.fg = base05
# Background color of the statusbar in command mode.
c.colors.statusbar.command.bg = base00
# Foreground color of the statusbar in private browsing + command mode.
c.colors.statusbar.command.private.fg = base05
# Background color of the statusbar in private browsing + command mode.
c.colors.statusbar.command.private.bg = base00
# Foreground color of the statusbar in caret mode.
c.colors.statusbar.caret.fg = base00
# Background color of the statusbar in caret mode.
c.colors.statusbar.caret.bg = base0E
# Foreground color of the statusbar in caret mode with a selection.
c.colors.statusbar.caret.selection.fg = base00
# Background color of the statusbar in caret mode with a selection.
c.colors.statusbar.caret.selection.bg = base0D
# Background color of the progress bar.
c.colors.statusbar.progress.bg = base0D
# Default foreground color of the URL in the statusbar.
c.colors.statusbar.url.fg = base05
# Foreground color of the URL in the statusbar on error.
c.colors.statusbar.url.error.fg = base08
# Foreground color of the URL in the statusbar for hovered links.
c.colors.statusbar.url.hover.fg = base05
# Foreground color of the URL in the statusbar on successful load
# (http).
c.colors.statusbar.url.success.http.fg = base0C
# Foreground color of the URL in the statusbar on successful load
# (https).
c.colors.statusbar.url.success.https.fg = base0B
# Foreground color of the URL in the statusbar when there's a warning.
c.colors.statusbar.url.warn.fg = base0E
# Background color of the tab bar.
c.colors.tabs.bar.bg = base00
# Color gradient start for the tab indicator.
c.colors.tabs.indicator.start = base0D
# Color gradient end for the tab indicator.
c.colors.tabs.indicator.stop = base0C
# Color for the tab indicator on errors.
c.colors.tabs.indicator.error = base08
# Foreground color of unselected odd tabs.
c.colors.tabs.odd.fg = base05
# Background color of unselected odd tabs.
c.colors.tabs.odd.bg = base01
# Foreground color of unselected even tabs.
c.colors.tabs.even.fg = base05
# Background color of unselected even tabs.
c.colors.tabs.even.bg = base00
# Background color of pinned unselected even tabs.
c.colors.tabs.pinned.even.bg = base0C
# Foreground color of pinned unselected even tabs.
c.colors.tabs.pinned.even.fg = base07
# Background color of pinned unselected odd tabs.
c.colors.tabs.pinned.odd.bg = base0B
# Foreground color of pinned unselected odd tabs.
c.colors.tabs.pinned.odd.fg = base07
# Background color of pinned selected even tabs.
c.colors.tabs.pinned.selected.even.bg = base05
# Foreground color of pinned selected even tabs.
c.colors.tabs.pinned.selected.even.fg = base00
# Background color of pinned selected odd tabs.
c.colors.tabs.pinned.selected.odd.bg = base05
# Foreground color of pinned selected odd tabs.
c.colors.tabs.pinned.selected.odd.fg = base0E
# Foreground color of selected odd tabs.
c.colors.tabs.selected.odd.fg = base00
# Background color of selected odd tabs.
c.colors.tabs.selected.odd.bg = base05
# Foreground color of selected even tabs.
c.colors.tabs.selected.even.fg = base00
# Background color of selected even tabs.
c.colors.tabs.selected.even.bg = base05
# Background color for webpages if unset (or empty to use the theme's
# color).
# c.colors.webpage.bg = base00
| 29.919463 | 95 | 0.771534 |
f1e23816e3246a50d9e33537a2721370b34d5224 | 937 | py | Python | src/users/views.py | SerhatTeker/django-rest-filtering-tutorial | b02cd9c14cceddbf604cee83026f7134f8109d75 | [
"BSD-3-Clause"
] | null | null | null | src/users/views.py | SerhatTeker/django-rest-filtering-tutorial | b02cd9c14cceddbf604cee83026f7134f8109d75 | [
"BSD-3-Clause"
] | null | null | null | src/users/views.py | SerhatTeker/django-rest-filtering-tutorial | b02cd9c14cceddbf604cee83026f7134f8109d75 | [
"BSD-3-Clause"
] | null | null | null | from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import viewsets
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from src.users.serializers import User, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
serializer_class = UserSerializer
queryset = User.objects.all()
filter_backends = (
DjangoFilterBackend,
OrderingFilter,
SearchFilter,
)
filterset_fields = ("id", "username")
search_fields = ("id", "username")
ordering_fields = ("username",)
ordering = "username"
@action(detail=False, methods=["GET"])
def me(self, request):
serializer = self.serializer_class(request.user, context={"request": request})
return Response(status=status.HTTP_200_OK, data=serializer.data)
| 33.464286 | 86 | 0.745998 |
a84ea398c60e883f96dbee92755f43bded9e6489 | 167 | py | Python | komtek/asgi.py | afrlv1/Komtek_test | 13afd6e15ebe299f38d968106b8aef66211c01fd | [
"MIT"
] | null | null | null | komtek/asgi.py | afrlv1/Komtek_test | 13afd6e15ebe299f38d968106b8aef66211c01fd | [
"MIT"
] | null | null | null | komtek/asgi.py | afrlv1/Komtek_test | 13afd6e15ebe299f38d968106b8aef66211c01fd | [
"MIT"
] | null | null | null | import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'komtek.settings')
application = get_asgi_application()
| 20.875 | 66 | 0.826347 |
5692f0f919a5b5cfaf81403adee27c0233c683be | 4,735 | py | Python | tool/common.py | shreyas269/Gradient-Adversarial-Transformation-Network | 913cfa904b644c4ba304300402d4130874e467c1 | [
"MIT"
] | null | null | null | tool/common.py | shreyas269/Gradient-Adversarial-Transformation-Network | 913cfa904b644c4ba304300402d4130874e467c1 | [
"MIT"
] | null | null | null | tool/common.py | shreyas269/Gradient-Adversarial-Transformation-Network | 913cfa904b644c4ba304300402d4130874e467c1 | [
"MIT"
] | null | null | null | ''' this file contains functions that may be used by many people '''
import numpy as np
import torch
import torchvision
import matplotlib.pyplot as plt
tv = torchvision
tc = torch
def load_data(dataset_name ):
'''
USAGE: x_data, y_train, x_test, y_test = load_data('mnist')
INPUT: a string, naming 'mnist' or 'cifar10' and a
a self.batch_size, defaultly 100
RETURN: X_train, y_train, X_test, y_test
returned X_train and X_test are 4D-torch-tensor in shape [ data_size, channel, width, height ]
This function loads the original data and returns the four variables as shown above.
Note that user need to put the the dataset under './data/' as done before
CONTRIBUTER: henryliu,07.20
'''
if dataset_name != 'mnist' and dataset_name != 'cifar10':
print("unrecognized dataset, da cuo le ba ?")
return
directory = 'data/' + dataset_name
# print(directory)
if dataset_name == 'mnist':
train_data = tv.datasets.MNIST(root=directory, train=True,transform=tv.transforms.ToTensor(),download=True)
test_data = tv.datasets.MNIST(root=directory, train=False,transform=tv.transforms.ToTensor(),download=True)
else:
train_data = tv.datasets.CIFAR10(root=directory, train=True,transform=tv.transforms.ToTensor(),download=True)
test_data = tv.datasets.CIFAR10(root=directory, train=False,transform=tv.transforms.ToTensor(),download=True)
if dataset_name == 'mnist':
X_train = train_data.train_data
X_train = X_train.reshape(X_train.shape[0],1, 28, 28)
y_train = train_data.train_labels
X_test = test_data.test_data
X_test = X_test.reshape(X_test.shape[0],1, 28, 28)
y_test = test_data.test_labels
X_test = tc.tensor(X_test,dtype=tc.float) / 255
X_train = tc.tensor(X_train,dtype=tc.float) / 255
return X_train, y_train, X_test, y_test
elif dataset_name == 'cifar10':
X_train = train_data.train_data
y_train = train_data.train_labels
X_test = test_data.test_data
y_test = test_data.test_labels
X_train = X_train.transpose([0,3,1,2])
X_test = X_test.transpose([0,3,1,2])
X_test = tc.tensor(X_test,dtype=tc.float) / 255
X_train = tc.tensor(X_train,dtype=tc.float) / 255
return X_train, y_train, X_test, y_test
return
def imshow(image, label):
'''
USAGE: imshow(X_train[0], y_train[0])
INPUT: X_train is a 3-D or 4-D torch-tensor in the shape of [channel, width, height] or [1, channel, width, height]
y_train is a number/str
Can be used on both mnist and cifar10
RETURN: plt the graph with label
This function plot a single graph of mnist or cifar10
CONTRIBUTER: henryliu, 07.23
'''
classes = ('plane', 'car', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck')
plt.axis('off')
image = image.reshape(-1, image.size(2), image.size(2) )
if image.shape[1] == 28: # if mnist
plt.imshow(image.reshape(28, 28), cmap='gray')
plt.title('%i' % label, fontsize=20)
else: # if cifar10
plt.imshow(image.permute(1, 2, 0))
plt.title(classes[label])
plt.show()
class batch_generator():
'''
USAGE: generator = batch_generator( batch_size = 50):
x_batch, y_batch = generator.next_batch(X_train, y_train)
INPUT: when initialize, takes an int
when generating batch, takes X and y
RETURNS: x_batch 4D-tensor [self.batch_size, channel, width, height]
This function uses static method to count for next batch
Note that caller is responsible to determine that how many rounds there should be in each epoch!
For example:
for epoch in range(total_epoch) :
for small_round in range( len(y_train)/ self.batch_size )
X_batch, y_batch = next_batch(X,y, self.batch_size)
do something here
CONTRIBUTER: henryliu, 07.20
'''
def __init__(self,batch_size=100):
self.batch_size = batch_size
self.static_counter = 0
def next_batch(self, X, y):
if self.static_counter==None:
self.static_counter = 0
data_size = len(y)
if ( self.static_counter+1 ) * self.batch_size >= data_size:
self.static_counter = 0
return X[ data_size - self.batch_size: ], y[data_size - self.batch_size : ]
else:
self.static_counter += 1
start, end = self.batch_size * ( self.static_counter -1 ) , self.batch_size * self.static_counter
return X[ start: end], y[start: end]
| 42.276786 | 120 | 0.636748 |
7637888efa1057be74cd07a337656d827f47312e | 1,826 | py | Python | ftests/test_all.py | tbproject/tbpr02 | da4df9d148222724633de3b66500e80786f19eac | [
"MIT"
] | null | null | null | ftests/test_all.py | tbproject/tbpr02 | da4df9d148222724633de3b66500e80786f19eac | [
"MIT"
] | 1 | 2020-02-12T01:25:15.000Z | 2020-02-12T01:25:15.000Z | ftests/test_all.py | tbproject/tbpr02 | da4df9d148222724633de3b66500e80786f19eac | [
"MIT"
] | null | null | null |
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
# -*- coding: utf-8 -*-
from selenium import webdriver
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import LiveServerTestCase
class HomeNewVisitorTest(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(1)
def tearDown(self):
self.browser.quit()
def get_full_url(self, namespace):
fullUrl = self.live_server_url + reverse(namespace)
print(fullUrl)
return fullUrl
def test_home_title(self):
self.browser.get(self.get_full_url("home"))
self.assertIn("TaskBuster", self.browser.title)
def test_h1_css(self):
self.browser.get(self.get_full_url("home"))
h1 = self.browser.find_element_by_tag_name("h1")
self.assertEqual(h1.value_of_css_property("color"),
"rgba(200, 50, 255, 1)")
# from selenium import webdriver as wd
# from django.contrib.staticfiles.testing import StaticLiveServerTestCase
# from django.core.urlresolvers import reverse
# class NewHomeVisitorTest(SLST):
# def setUp(self):
# self.browser = wd.Firefox()
# # self.browser.implicitly_wait(1)
# activate('en')
# def tearDown(self):
# self.browser.quit()
#
# def get_full_url(self, namespace):
# print("Live server:", str(self.live_server_url))
# print("Namespace:", str(reverse(namespace)))
# return self.live_server_url + reverse(namespace)
#
# def test_home_title(self):
# print("Testing the home title...")
# url = self.get_full_url("home")
# self.browser.get(url)
# self.browser.implicitly_wait(1)
# self.assertIn("TaskBuster", self.browser.title)
| 22.825 | 73 | 0.658269 |
d2a5c70c4feeb2512ad84fcbdf515622497f30ef | 3,179 | py | Python | starthinker/task/cm_to_dv/cm_account.py | arbrown/starthinker | 1a14664fb1a8f2a757b100363ea8958833b7754c | [
"Apache-2.0"
] | 138 | 2018-11-28T21:42:44.000Z | 2022-03-30T17:26:35.000Z | starthinker/task/cm_to_dv/cm_account.py | arbrown/starthinker | 1a14664fb1a8f2a757b100363ea8958833b7754c | [
"Apache-2.0"
] | 36 | 2019-02-19T18:33:20.000Z | 2022-01-24T18:02:44.000Z | starthinker/task/cm_to_dv/cm_account.py | arbrown/starthinker | 1a14664fb1a8f2a757b100363ea8958833b7754c | [
"Apache-2.0"
] | 54 | 2018-12-06T05:47:32.000Z | 2022-02-21T22:01:01.000Z | ###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from starthinker.util.bigquery import table_create
from starthinker.util.data import get_rows
from starthinker.util.data import put_rows
from starthinker.util.google_api import API_DCM
from starthinker.util.cm import get_profile_for_api
from starthinker.util.discovery_to_bigquery import Discovery_To_BigQuery
from starthinker.util.regexp import lookup_id
from starthinker.util.sheets import sheets_clear
def cm_account_clear(config, task):
table_create(
config,
task['auth_bigquery'],
config.project,
task['dataset'],
'CM_Accounts',
Discovery_To_BigQuery(
'dfareporting',
'v3.4'
).method_schema(
'accounts.list',
iterate=True
)
)
sheets_clear(
config,
task['auth_sheets'],
task['sheet'],
'CM Accounts',
'B2:D'
)
def cm_account_load(config, task):
# load multiple partners from user defined sheet
def load_multiple():
for row in get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Profiles',
'header':False,
'range': 'A2:A'
}}
):
if row:
account_id = lookup_id(row[0])
is_superuser, profile_id = get_profile_for_api(config, task['auth_cm'], account_id)
kwargs = { 'profileId': profile_id, 'accountId': account_id } if is_superuser else { 'profileId': profile_id }
yield from API_DCM(
config,
task['auth_cm'],
iterate=True,
internal=is_superuser
).accounts().list(**kwargs).execute()
cm_account_clear(config, task)
# write accounts to database
put_rows(
config,
task['auth_bigquery'],
{ 'bigquery': {
'dataset': task['dataset'],
'table': 'CM_Accounts',
'schema': Discovery_To_BigQuery(
'dfareporting',
'v3.4'
).method_schema(
'accounts.list',
iterate=True
),
'format':'JSON'
}},
load_multiple()
)
# write accounts to sheet
put_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Accounts',
'header':False,
'range': 'B2'
}},
get_rows(
config,
task['auth_bigquery'],
{ 'bigquery': {
'dataset': task['dataset'],
'query': "SELECT CONCAT(name, ' - ', id), active FROM `%s.CM_Accounts`" % task['dataset'],
'legacy': False
}}
)
)
| 26.272727 | 118 | 0.600189 |
f8962fca2a6957b609af43b68fb3c533a95a3e01 | 119 | py | Python | build/lib/FinanceHub/dataapi/__init__.py | VFermat/FinanceHub | bf55ec39ae75ecdccf35381fd23e5f604115cbf8 | [
"MIT"
] | null | null | null | build/lib/FinanceHub/dataapi/__init__.py | VFermat/FinanceHub | bf55ec39ae75ecdccf35381fd23e5f604115cbf8 | [
"MIT"
] | null | null | null | build/lib/FinanceHub/dataapi/__init__.py | VFermat/FinanceHub | bf55ec39ae75ecdccf35381fd23e5f604115cbf8 | [
"MIT"
] | null | null | null | from .getsgsdata import SGS
from .getfreddata import FRED
from .getimfdata import IMF
__all__ = ['SGS', 'FRED', 'IMF'] | 23.8 | 32 | 0.739496 |
38f9880da430103e4e2d9b9670d0267db7497a0b | 145 | py | Python | ServerDev/init.py | NigelChen/Polar-Server | 06b217584f39f9091fcc8e0f3ce41d3594fdb726 | [
"MIT"
] | 1 | 2017-03-16T17:26:09.000Z | 2017-03-16T17:26:09.000Z | ServerDev/init.py | NigelChen/Polar-Server | 06b217584f39f9091fcc8e0f3ce41d3594fdb726 | [
"MIT"
] | null | null | null | ServerDev/init.py | NigelChen/Polar-Server | 06b217584f39f9091fcc8e0f3ce41d3594fdb726 | [
"MIT"
] | null | null | null | import Server, Client
try:
print "Starting up Polar Chat"
server = Server.server()
except KeyboardInterrupt:
print "Shutting down server..\n" | 20.714286 | 33 | 0.758621 |
1c6acbdd98f022c659390916d7200ffa2aa26768 | 1,032 | py | Python | codingame/utils.py | DevHyperCoder/codingame | 7a370fa47b4eff2c88680c76cd2c6bc795420fff | [
"MIT"
] | null | null | null | codingame/utils.py | DevHyperCoder/codingame | 7a370fa47b4eff2c88680c76cd2c6bc795420fff | [
"MIT"
] | null | null | null | codingame/utils.py | DevHyperCoder/codingame | 7a370fa47b4eff2c88680c76cd2c6bc795420fff | [
"MIT"
] | null | null | null | from typing import get_type_hints
from functools import wraps
def validate_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
hints = get_type_hints(func)
all_args = dict(zip(func.__code__.co_varnames, args))
all_args.update(kwargs.copy())
for arg, arg_type in ((i, type(j)) for i, j in all_args.items()):
if arg in hints:
if not issubclass(arg_type, hints[arg]):
raise TypeError(
"Argument {0!r} needs to be of type {1.__name__!r} "
"(got type {2.__name__!r})".format(arg, hints[arg], arg_type)
)
result = func(*args, **kwargs)
if "return" in hints:
if type(result) != hints["return"]:
raise TypeError(
"Return value needs to be of type {0.__name__!r} "
"(got type {1.__name__!r})".format(hints["return"], type(result))
)
return result
return wrapper
| 31.272727 | 85 | 0.528101 |
487e4fe8019b6a30f2778a47f71a1900aeb0e72c | 2,450 | py | Python | No_0061_Rotate List/rotate_list_by_link_and_break.py | coderMaruf/leetcode-1 | 20ffe26e43999e44c8acf9800acb371a49bb5853 | [
"MIT"
] | 32 | 2020-01-05T13:37:16.000Z | 2022-03-26T07:27:09.000Z | No_0061_Rotate List/rotate_list_by_link_and_break.py | coderMaruf/leetcode-1 | 20ffe26e43999e44c8acf9800acb371a49bb5853 | [
"MIT"
] | null | null | null | No_0061_Rotate List/rotate_list_by_link_and_break.py | coderMaruf/leetcode-1 | 20ffe26e43999e44c8acf9800acb371a49bb5853 | [
"MIT"
] | 8 | 2020-06-18T16:17:27.000Z | 2022-03-15T23:58:18.000Z | '''
Description:
Given a linked list, rotate the list to the right by k places, where k is non-negative.
Example 1:
Input: 1->2->3->4->5->NULL, k = 2
Output: 4->5->1->2->3->NULL
Explanation:
rotate 1 steps to the right: 5->1->2->3->4->NULL
rotate 2 steps to the right: 4->5->1->2->3->NULL
Example 2:
Input: 0->1->2->NULL, k = 4
Output: 2->0->1->NULL
Explanation:
rotate 1 steps to the right: 2->0->1->NULL
rotate 2 steps to the right: 1->2->0->NULL
rotate 3 steps to the right: 0->1->2->NULL
rotate 4 steps to the right: 2->0->1->NULL
'''
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
# corner case
if head is None:
return None
# connect tail and head, make a circular linked list
cur, size = head, 1
while cur.next:
size += 1
cur = cur.next
# link
cur.next = head
# locate the new head after rotation, and break the circle
r = size - ( k % size )
cur = head
for i in range(1, r):
cur = cur.next
new_head_after_rotation = cur.next
# break
cur.next = None
return new_head_after_rotation
# n : the number of elements in linked list
## Time Compleixty: O( n )
#
# The major overhead in time is the while loop iterating on cur, which is of O( n ).
# The minor overhead in time is the for loop iterating on i, which is of O( n-k ).
## Space Complexity: O( 1 )
#
# The major overhead in space is the variable for looping and node operation, which is of O( 1 )
def traverse( node:ListNode ):
cur = node
while cur:
print( cur.val, end = ' ')
cur = cur.next
print()
return
def test_bench():
# expected output:
'''
5 6 1 2 3 4
'''
head = ListNode( 1 )
head.next = ListNode( 2 )
head.next.next = ListNode( 3 )
head.next.next.next = ListNode( 4 )
head.next.next.next.next = ListNode( 5 )
head.next.next.next.next.next = ListNode( 6 )
head_after_rotation = Solution().rotateRight( head, 2)
traverse( head_after_rotation )
return
if __name__ == '__main__':
test_bench() | 20.247934 | 96 | 0.562449 |
9579f5c721a4921a916f95090d3578157afbb138 | 1,585 | py | Python | Pegasus/Stage2/Stage2.py | esha-singh/Stackoverflow_BigData | 20abf97c8325f8a932e4a0238eb1eb9981405441 | [
"MIT"
] | null | null | null | Pegasus/Stage2/Stage2.py | esha-singh/Stackoverflow_BigData | 20abf97c8325f8a932e4a0238eb1eb9981405441 | [
"MIT"
] | null | null | null | Pegasus/Stage2/Stage2.py | esha-singh/Stackoverflow_BigData | 20abf97c8325f8a932e4a0238eb1eb9981405441 | [
"MIT"
] | null | null | null | import os
import sys
import shutil
import xml.etree.ElementTree as xml
from urlparse import urlparse
from pyunpack import Archive
import random
class Stage2:
def filterUnwantedSources(self, data_source_directory, data_destination_directory, required_file_names):
# get domain directories from source
walk = os.walk(data_source_directory)
data_source_directories = [x[0] for x in walk]
data_source_directories = data_source_directories[1:]
#for each domain directory copy required tables to destination directory
for directory in data_source_directories:
destination_folder = directory.split("//")[-1]
destination = data_destination_directory + destination_folder
os.mkdir(destination)
directory_walk = os.walk(directory)
files = [x[2] for x in directory_walk][0]
for source_file in files:
if source_file in required_file_names:
shutil.copy(directory+"//"+source_file, destination+"//"+source_file)
if __name__ == "__main__":
stage2 = Stage2()
args = sys.argv
# if ssd argument is passed, use it as a source and destination for data
if len(args) > 1 and args[1] == 'ssd':
stage2.filterUnwantedSources("D://BigData//Stage2_data//", "D://BigData//Stage3_data//", ["Posts.xml"])
else:
stage2.filterUnwantedSources("C://Users//PranayDev//Documents//BigData//ETL//Pegasus//Stage2//Stage2_data//", "C://Users//PranayDev//Documents//BigData//ETL//Pegasus//Stage3//Stage3_data//", ["Posts.xml"]) | 46.617647 | 213 | 0.684543 |
a584ed39af22c509cd74e24379b68539d0814498 | 6,005 | py | Python | kerastuner/engine/hypermodel.py | stefanvasilev/keras-tuner | 5c402b02af9a2a98ab5eece802f1ec7ca5331379 | [
"Apache-2.0"
] | 1 | 2021-05-07T17:12:41.000Z | 2021-05-07T17:12:41.000Z | kerastuner/engine/hypermodel.py | stefanvasilev/keras-tuner | 5c402b02af9a2a98ab5eece802f1ec7ca5331379 | [
"Apache-2.0"
] | null | null | null | kerastuner/engine/hypermodel.py | stefanvasilev/keras-tuner | 5c402b02af9a2a98ab5eece802f1ec7ca5331379 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Keras Tuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"HyperModel base class."
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import gc
import traceback
import numpy as np
from tensorflow import keras
from .. import config as config_module
class HyperModel(object):
"""Defines a searchable space of Models and builds Models from this space.
# Attributes:
name: The name of this HyperModel.
tunable: Whether the hyperparameters defined in this hypermodel
should be added to search space. If `False`, either the search
space for these parameters must be defined in advance, or the
default values will be used.
"""
def __init__(self, name=None, tunable=True):
self.name = name
self.tunable = tunable
self._build = self.build
self.build = self._build_wrapper
def build(self, hp):
"""Builds a model.
# Arguments:
hp: A `HyperParameters` instance.
# Returns:
A model instance.
"""
raise NotImplementedError
def _build_wrapper(self, hp, *args, **kwargs):
if not self.tunable:
# Copy `HyperParameters` object so that new entries are not added
# to the search space.
hp = hp.copy()
return self._build(hp, *args, **kwargs)
class DefaultHyperModel(HyperModel):
def __init__(self, build, name=None, tunable=True):
super(DefaultHyperModel, self).__init__(name=name)
self.build = build
class KerasHyperModel(HyperModel):
"""Builds and compiles a Keras Model with optional compile overrides."""
def __init__(
self,
hypermodel,
max_model_size=None,
optimizer=None,
loss=None,
metrics=None,
distribution_strategy=None,
**kwargs
):
super(KerasHyperModel, self).__init__(**kwargs)
self.hypermodel = get_hypermodel(hypermodel)
self.max_model_size = max_model_size
self.optimizer = optimizer
self.loss = loss
self.metrics = metrics
self.distribution_strategy = distribution_strategy
self._max_fail_streak = 5
def build(self, hp):
for i in range(self._max_fail_streak + 1):
# clean-up TF graph from previously stored (defunct) graph
keras.backend.clear_session()
gc.collect()
# Build a model, allowing max_fail_streak failed attempts.
try:
with maybe_distribute(self.distribution_strategy):
model = self.hypermodel.build(hp)
except:
if config_module.DEBUG:
traceback.print_exc()
print("Invalid model %s/%s" % (i, self._max_fail_streak))
if i == self._max_fail_streak:
raise RuntimeError("Too many failed attempts to build model.")
continue
# Stop if `build()` does not return a valid model.
if not isinstance(model, keras.models.Model):
raise RuntimeError(
"Model-building function did not return "
"a valid Keras Model instance, found {}".format(model)
)
# Check model size.
size = maybe_compute_model_size(model)
if self.max_model_size and size > self.max_model_size:
print("Oversized model: %s parameters -- skipping" % (size))
if i == self._max_fail_streak:
raise RuntimeError("Too many consecutive oversized models.")
continue
break
return self._compile_model(model)
def _compile_model(self, model):
with maybe_distribute(self.distribution_strategy):
if self.optimizer or self.loss or self.metrics:
compile_kwargs = {
"optimizer": model.optimizer,
"loss": model.loss,
"metrics": model.metrics,
}
if self.loss:
compile_kwargs["loss"] = self.loss
if self.optimizer:
compile_kwargs["optimizer"] = self.optimizer
if self.metrics:
compile_kwargs["metrics"] = self.metrics
model.compile(**compile_kwargs)
return model
def maybe_compute_model_size(model):
"""Compute the size of a given model, if it has been built."""
if model.built:
params = [keras.backend.count_params(p) for p in model.trainable_weights]
return int(np.sum(params))
return 0
@contextlib.contextmanager
def maybe_distribute(distribution_strategy):
"""Distributes if distribution_strategy is set."""
if distribution_strategy is None:
yield
else:
with distribution_strategy.scope():
yield
def get_hypermodel(hypermodel):
"""Gets a HyperModel from a HyperModel or callable."""
if isinstance(hypermodel, HyperModel):
return hypermodel
else:
if not callable(hypermodel):
raise ValueError(
"The `hypermodel` argument should be either "
"a callable with signature `build(hp)` returning a model, "
"or an instance of `HyperModel`."
)
return DefaultHyperModel(hypermodel)
| 32.994505 | 82 | 0.615487 |
d7bc74bf7715fefa02b9a0a1a753d2bf0411422f | 1,174 | py | Python | app/report/routes.py | rrsk/hiwayPay | c84b7581475164751f64540a521b803bdf08a9fb | [
"MIT"
] | 31 | 2020-07-01T06:40:16.000Z | 2022-03-30T18:49:02.000Z | app/report/routes.py | rrsk/hiwayPay | c84b7581475164751f64540a521b803bdf08a9fb | [
"MIT"
] | 2 | 2020-11-02T06:21:23.000Z | 2021-06-02T00:31:06.000Z | app/report/routes.py | rrsk/hiwayPay | c84b7581475164751f64540a521b803bdf08a9fb | [
"MIT"
] | 13 | 2020-07-02T07:06:05.000Z | 2022-03-15T11:34:41.000Z | from flask import Blueprint, render_template
from flask import render_template, redirect, url_for, request, session , jsonify
from flask_login import login_user, logout_user, current_user , login_required
from app.report import bp
from app.model import User , Role
from app import db
@bp.route('/salary_slips' , methods=['GET' , 'POST'])
@login_required
def view_slips():
return render_template('reports/salary_slips.html', title=('Report - Salary Sheet'))
@bp.route('/salary_sheet' , methods=['GET' , 'POST'])
@login_required
def view_sheet():
return render_template('reports/salary_sheet.html', title=('Report - Salary Sheet'))
@bp.route('/advance' , methods=['GET' , 'POST'])
@login_required
def view_advance():
return render_template('reports/advance.html', title=('Report - Salary Sheet'))
@bp.route('/attendence' , methods=['GET' , 'POST'])
@login_required
def view_attendence():
return render_template('reports/attendence.html', title=('Report - Salary Sheet'))
@bp.route('/performance' , methods=['GET' , 'POST'])
@login_required
def view_performance():
return render_template('reports/performance.html', title=('Report - Salary Sheet'))
| 33.542857 | 88 | 0.734242 |
60c2dfaad4093db31f5afa89c7e0ba7d1a320896 | 2,677 | py | Python | 2019/day12.py | okomarov/aoc | e6e90d09fd52b8b1b2104b3db8ba5a980c8d6c79 | [
"MIT"
] | null | null | null | 2019/day12.py | okomarov/aoc | e6e90d09fd52b8b1b2104b3db8ba5a980c8d6c79 | [
"MIT"
] | null | null | null | 2019/day12.py | okomarov/aoc | e6e90d09fd52b8b1b2104b3db8ba5a980c8d6c79 | [
"MIT"
] | 1 | 2020-04-03T16:51:43.000Z | 2020-04-03T16:51:43.000Z | from itertools import combinations
import math
with open('data/day12.txt', 'r') as f:
# with open('data/day12_test.txt', 'r') as f:
data = f.read().splitlines()
data = [''.join(c for c in line if c not in set('<>xyz= ')) for line in data]
data = [[int(c) for c in l.split(',')] for l in data]
all_comb = [comb for comb in combinations(range(4), r=2)]
def step(moons, velocity):
for comb in all_comb:
first, second = comb
for coord in range(3):
if moons[first][coord] < moons[second][coord]:
velocity[first][coord] += 1
velocity[second][coord] -= 1
elif moons[first][coord] > moons[second][coord]:
velocity[first][coord] -= 1
velocity[second][coord] += 1
for moon in range(4):
for c in range(3):
moons[moon][c] += velocity[moon][c]
return moons, velocity
# Part 1
# =================================
# What is the total energy in the system after simulating the moons
# given in your scan for 1000 steps?
velocity = [[0,0,0] for _ in range(4)]
for i in range(1000):
data, velocity = step(data, velocity)
total = 0
for moon in range(4):
potential = 0
kinetic = 0
for c in range(3):
potential += abs(data[moon][c])
kinetic += abs(velocity[moon][c])
total += (potential * kinetic)
print('Part 1: ', total)
print()
# Part 2
# =================================
# How many steps does it take to reach the first state that exactly \
# matches a previous state?
with open('data/day12.txt', 'r') as f:
data = f.read().splitlines()
data = [''.join(c for c in line if c not in set('<>xyz= ')) for line in data]
data = [[int(c) for c in l.split(',')] for l in data]
allx = set()
ally = set()
allz = set()
seenx = False
seeny = False
seenz = False
velocity = [[0,0,0] for _ in range(4)]
i = 0
while True:
if not seenx:
xk = str([[m[0], v[0]] for m,v in zip(data,velocity)])
if xk in allx:
seenx = i
else:
allx.add(xk)
if not seeny:
xy = str([[m[1], v[1]] for m,v in zip(data,velocity)])
if xy in ally:
seeny = i
else:
ally.add(xy)
if not seenz:
xz = str([[m[2], v[2]] for m,v in zip(data,velocity)])
if xz in allz:
seenz = i
else:
allz.add(xz)
if seenx and seeny and seenz:
print('X cycle', seenx, '\nY cycle', seeny, '\nZ cycle', seenz)
break
data, velocity = step(data, velocity)
i+=1
def lcm(x, y):
return x // math.gcd(x, y) * y
print('Part 2: ', lcm(lcm(seenx, seeny), seenz))
| 26.77 | 81 | 0.540904 |
4c8b1e56fb1d4c607d70bddc9dc391ef7ac03003 | 14,101 | py | Python | mne/tests/test_bem.py | candleinwindsteve/mne-python | a361dced7663c616ac1fd184f7eed183d2b71580 | [
"BSD-3-Clause"
] | 1 | 2021-03-08T22:53:49.000Z | 2021-03-08T22:53:49.000Z | mne/tests/test_bem.py | frentesteven/mne-python | a361dced7663c616ac1fd184f7eed183d2b71580 | [
"BSD-3-Clause"
] | null | null | null | mne/tests/test_bem.py | frentesteven/mne-python | a361dced7663c616ac1fd184f7eed183d2b71580 | [
"BSD-3-Clause"
] | 1 | 2017-04-20T12:21:15.000Z | 2017-04-20T12:21:15.000Z | # Authors: Marijn van Vliet <w.m.vanvliet@gmail.com>
#
# License: BSD 3 clause
from copy import deepcopy
import os.path as op
import warnings
import numpy as np
from nose.tools import assert_raises, assert_true
from numpy.testing import assert_equal, assert_allclose
from mne import (make_bem_model, read_bem_surfaces, write_bem_surfaces,
make_bem_solution, read_bem_solution, write_bem_solution,
make_sphere_model, Transform, Info)
from mne.preprocessing.maxfilter import fit_sphere_to_headshape
from mne.io.constants import FIFF
from mne.transforms import translation
from mne.datasets import testing
from mne.utils import run_tests_if_main, _TempDir, slow_test, catch_logging
from mne.bem import (_ico_downsample, _get_ico_map, _order_surfaces,
_assert_complete_surface, _assert_inside,
_check_surface_size, _bem_find_surface)
from mne.io import read_info
warnings.simplefilter('always')
fname_raw = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data',
'test_raw.fif')
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
fname_bem_3 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-320-320-bem.fif')
fname_bem_1 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-bem.fif')
fname_bem_sol_3 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-320-320-bem-sol.fif')
fname_bem_sol_1 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-bem-sol.fif')
def _compare_bem_surfaces(surfs_1, surfs_2):
"""Helper to compare BEM surfaces"""
names = ['id', 'nn', 'rr', 'coord_frame', 'tris', 'sigma', 'ntri', 'np']
ignores = ['tri_cent', 'tri_nn', 'tri_area', 'neighbor_tri']
for s0, s1 in zip(surfs_1, surfs_2):
assert_equal(set(names), set(s0.keys()) - set(ignores))
assert_equal(set(names), set(s1.keys()) - set(ignores))
for name in names:
assert_allclose(s0[name], s1[name], rtol=1e-3, atol=1e-6,
err_msg='Mismatch: "%s"' % name)
def _compare_bem_solutions(sol_a, sol_b):
"""Helper to compare BEM solutions"""
# compare the surfaces we used
_compare_bem_surfaces(sol_a['surfs'], sol_b['surfs'])
# compare the actual solutions
names = ['bem_method', 'field_mult', 'gamma', 'is_sphere',
'nsol', 'sigma', 'source_mult', 'solution']
assert_equal(set(sol_a.keys()), set(sol_b.keys()))
assert_equal(set(names + ['surfs']), set(sol_b.keys()))
for key in names:
assert_allclose(sol_a[key], sol_b[key], rtol=1e-3, atol=1e-5,
err_msg='Mismatch: %s' % key)
@testing.requires_testing_data
def test_io_bem():
"""Test reading and writing of bem surfaces and solutions
"""
tempdir = _TempDir()
temp_bem = op.join(tempdir, 'temp-bem.fif')
assert_raises(ValueError, read_bem_surfaces, fname_raw)
assert_raises(ValueError, read_bem_surfaces, fname_bem_3, s_id=10)
surf = read_bem_surfaces(fname_bem_3, patch_stats=True)
surf = read_bem_surfaces(fname_bem_3, patch_stats=False)
write_bem_surfaces(temp_bem, surf[0])
surf_read = read_bem_surfaces(temp_bem, patch_stats=False)
_compare_bem_surfaces(surf, surf_read)
assert_raises(RuntimeError, read_bem_solution, fname_bem_3)
temp_sol = op.join(tempdir, 'temp-sol.fif')
sol = read_bem_solution(fname_bem_sol_3)
assert_true('BEM' in repr(sol))
write_bem_solution(temp_sol, sol)
sol_read = read_bem_solution(temp_sol)
_compare_bem_solutions(sol, sol_read)
sol = read_bem_solution(fname_bem_sol_1)
assert_raises(RuntimeError, _bem_find_surface, sol, 3)
def test_make_sphere_model():
"""Test making a sphere model"""
info = read_info(fname_raw)
assert_raises(ValueError, make_sphere_model, 'foo', 'auto', info)
assert_raises(ValueError, make_sphere_model, 'auto', 'auto', None)
assert_raises(ValueError, make_sphere_model, 'auto', 'auto', info,
relative_radii=(), sigmas=())
assert_raises(ValueError, make_sphere_model, 'auto', 'auto', info,
relative_radii=(1,)) # wrong number of radii
# here we just make sure it works -- the functionality is actually
# tested more extensively e.g. in the forward and dipole code
bem = make_sphere_model('auto', 'auto', info)
assert_true('3 layers' in repr(bem))
assert_true('Sphere ' in repr(bem))
assert_true(' mm' in repr(bem))
bem = make_sphere_model('auto', None, info)
assert_true('no layers' in repr(bem))
assert_true('Sphere ' in repr(bem))
@testing.requires_testing_data
def test_bem_model():
"""Test BEM model creation from Python with I/O"""
tempdir = _TempDir()
fname_temp = op.join(tempdir, 'temp-bem.fif')
for kwargs, fname in zip((dict(), dict(conductivity=[0.3])),
[fname_bem_3, fname_bem_1]):
model = make_bem_model('sample', ico=2, subjects_dir=subjects_dir,
**kwargs)
model_c = read_bem_surfaces(fname)
_compare_bem_surfaces(model, model_c)
write_bem_surfaces(fname_temp, model)
model_read = read_bem_surfaces(fname_temp)
_compare_bem_surfaces(model, model_c)
_compare_bem_surfaces(model_read, model_c)
assert_raises(ValueError, make_bem_model, 'sample', # bad conductivity
conductivity=[0.3, 0.006], subjects_dir=subjects_dir)
@slow_test
@testing.requires_testing_data
def test_bem_solution():
"""Test making a BEM solution from Python with I/O"""
# test degenerate conditions
surf = read_bem_surfaces(fname_bem_1)[0]
assert_raises(RuntimeError, _ico_downsample, surf, 10) # bad dec grade
s_bad = dict(tris=surf['tris'][1:], ntri=surf['ntri'] - 1, rr=surf['rr'])
assert_raises(RuntimeError, _ico_downsample, s_bad, 1) # not isomorphic
s_bad = dict(tris=surf['tris'].copy(), ntri=surf['ntri'],
rr=surf['rr']) # bad triangulation
s_bad['tris'][0] = [0, 0, 0]
assert_raises(RuntimeError, _ico_downsample, s_bad, 1)
s_bad['id'] = 1
assert_raises(RuntimeError, _assert_complete_surface, s_bad)
s_bad = dict(tris=surf['tris'], ntri=surf['ntri'], rr=surf['rr'].copy())
s_bad['rr'][0] = 0.
assert_raises(RuntimeError, _get_ico_map, surf, s_bad)
surfs = read_bem_surfaces(fname_bem_3)
assert_raises(RuntimeError, _assert_inside, surfs[0], surfs[1]) # outside
surfs[0]['id'] = 100 # bad surfs
assert_raises(RuntimeError, _order_surfaces, surfs)
surfs[1]['rr'] /= 1000.
assert_raises(RuntimeError, _check_surface_size, surfs[1])
# actually test functionality
tempdir = _TempDir()
fname_temp = op.join(tempdir, 'temp-bem-sol.fif')
# use a model and solution made in Python
conductivities = [(0.3,), (0.3, 0.006, 0.3)]
fnames = [fname_bem_sol_1, fname_bem_sol_3]
for cond, fname in zip(conductivities, fnames):
for model_type in ('python', 'c'):
if model_type == 'python':
model = make_bem_model('sample', conductivity=cond, ico=2,
subjects_dir=subjects_dir)
else:
model = fname_bem_1 if len(cond) == 1 else fname_bem_3
solution = make_bem_solution(model)
solution_c = read_bem_solution(fname)
_compare_bem_solutions(solution, solution_c)
write_bem_solution(fname_temp, solution)
solution_read = read_bem_solution(fname_temp)
_compare_bem_solutions(solution, solution_c)
_compare_bem_solutions(solution_read, solution_c)
def test_fit_sphere_to_headshape():
"""Test fitting a sphere to digitization points"""
# Create points of various kinds
rad = 0.09
big_rad = 0.12
center = np.array([0.0005, -0.01, 0.04])
dev_trans = np.array([0., -0.005, -0.01])
dev_center = center - dev_trans
dig = [
# Left auricular
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_LPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': np.array([-1.0, 0.0, 0.0])},
# Nasion
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_NASION,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': np.array([0.0, 1.0, 0.0])},
# Right auricular
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_RPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': np.array([1.0, 0.0, 0.0])},
# Top of the head (extra point)
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EXTRA,
'r': np.array([0.0, 0.0, 1.0])},
# EEG points
# Fz
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'r': np.array([0, .72, .69])},
# F3
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'r': np.array([-.55, .67, .50])},
# F4
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'r': np.array([.55, .67, .50])},
# Cz
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'r': np.array([0.0, 0.0, 1.0])},
# Pz
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'r': np.array([0, -.72, .69])},
]
for d in dig:
d['r'] *= rad
d['r'] += center
# Device to head transformation (rotate .2 rad over X-axis)
dev_head_t = Transform('meg', 'head', translation(*(dev_trans)))
info = Info(dig=dig, dev_head_t=dev_head_t)
# Degenerate conditions
assert_raises(ValueError, fit_sphere_to_headshape, info,
dig_kinds=(FIFF.FIFFV_POINT_HPI,))
assert_raises(ValueError, fit_sphere_to_headshape, info,
dig_kinds='foo', units='m')
info['dig'][0]['coord_frame'] = FIFF.FIFFV_COORD_DEVICE
assert_raises(RuntimeError, fit_sphere_to_headshape, info, units='m')
info['dig'][0]['coord_frame'] = FIFF.FIFFV_COORD_HEAD
# # Test with 4 points that match a perfect sphere
dig_kinds = (FIFF.FIFFV_POINT_CARDINAL, FIFF.FIFFV_POINT_EXTRA)
with warnings.catch_warnings(record=True): # not enough points
r, oh, od = fit_sphere_to_headshape(info, dig_kinds=dig_kinds,
units='m')
kwargs = dict(rtol=1e-3, atol=1e-5)
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, dev_center, **kwargs)
# Test with all points
dig_kinds = ('cardinal', FIFF.FIFFV_POINT_EXTRA, 'eeg')
kwargs = dict(rtol=1e-3, atol=1e-3)
with warnings.catch_warnings(record=True): # not enough points
r, oh, od = fit_sphere_to_headshape(info, dig_kinds=dig_kinds,
units='m')
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, dev_center, **kwargs)
# Test with some noisy EEG points only.
dig_kinds = 'eeg'
with warnings.catch_warnings(record=True): # not enough points
r, oh, od = fit_sphere_to_headshape(info, dig_kinds=dig_kinds,
units='m')
kwargs = dict(rtol=1e-3, atol=1e-2)
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, center, **kwargs)
# Test big size
dig_kinds = ('cardinal', 'extra')
info_big = deepcopy(info)
for d in info_big['dig']:
d['r'] -= center
d['r'] *= big_rad / rad
d['r'] += center
with warnings.catch_warnings(record=True): # fit
with catch_logging() as log_file:
r, oh, od = fit_sphere_to_headshape(info_big, dig_kinds=dig_kinds,
verbose='warning', units='mm')
log_file = log_file.getvalue().strip()
assert_equal(len(log_file.split('\n')), 2)
assert_true('Estimated head size' in log_file)
assert_allclose(oh, center * 1000, atol=1e-3)
assert_allclose(r, big_rad * 1000, atol=1e-3)
del info_big
# Test offcenter
dig_kinds = ('cardinal', 'extra')
info_shift = deepcopy(info)
shift_center = np.array([0., -0.03, 0.])
for d in info_shift['dig']:
d['r'] -= center
d['r'] += shift_center
with warnings.catch_warnings(record=True):
with catch_logging() as log_file:
r, oh, od = fit_sphere_to_headshape(
info_shift, dig_kinds=dig_kinds, verbose='warning', units='m')
log_file = log_file.getvalue().strip()
assert_equal(len(log_file.split('\n')), 2)
assert_true('from head frame origin' in log_file)
assert_allclose(oh, shift_center, atol=1e-6)
assert_allclose(r, rad, atol=1e-6)
# Test "auto" mode (default)
# Should try "extra", fail, and go on to EEG
with warnings.catch_warnings(record=True): # not enough points
r, oh, od = fit_sphere_to_headshape(info, units='m')
kwargs = dict(rtol=1e-3, atol=1e-3)
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, dev_center, **kwargs)
with warnings.catch_warnings(record=True): # not enough points
r2, oh2, od2 = fit_sphere_to_headshape(info, units='m')
assert_allclose(r, r2, atol=1e-7)
assert_allclose(oh, oh2, atol=1e-7)
assert_allclose(od, od2, atol=1e-7)
# this one should pass, 1 EXTRA point and 3 EEG (but the fit is terrible)
info = Info(dig=dig[:7], dev_head_t=dev_head_t)
with warnings.catch_warnings(record=True): # bad fit
r, oh, od = fit_sphere_to_headshape(info, units='m')
# this one should fail, 1 EXTRA point and 3 EEG (but the fit is terrible)
info = Info(dig=dig[:6], dev_head_t=dev_head_t)
assert_raises(ValueError, fit_sphere_to_headshape, info, units='m')
assert_raises(TypeError, fit_sphere_to_headshape, 1, units='m')
run_tests_if_main()
| 41.84273 | 78 | 0.642649 |
f72796cfdb731f2805f402a224271fe7a843f908 | 3,858 | py | Python | tests/test_similarity.py | dhimmel/sematch | 7e92b171c27a8b25e844a467554fe4bb2adfb883 | [
"Apache-2.0"
] | 397 | 2015-05-30T11:02:28.000Z | 2022-03-09T01:39:31.000Z | tests/test_similarity.py | dhimmel/sematch | 7e92b171c27a8b25e844a467554fe4bb2adfb883 | [
"Apache-2.0"
] | 32 | 2015-04-27T21:26:29.000Z | 2021-08-19T10:20:45.000Z | tests/test_similarity.py | dhimmel/sematch | 7e92b171c27a8b25e844a467554fe4bb2adfb883 | [
"Apache-2.0"
] | 110 | 2015-11-06T17:01:48.000Z | 2022-02-17T05:09:02.000Z | # -*- coding: utf-8 -*-
def test_word_similarity():
from sematch.semantic.similarity import WordNetSimilarity
wns = WordNetSimilarity()
dog = wns.word2synset('dog')
cat = wns.word2synset('cat')
# Measuring semantic similarity between concepts using Path method
assert wns.similarity(dog[0], cat[0], 'path') is not None # 0.2
# Computing English word similarity using Li method
assert wns.word_similarity('dog', 'cat', 'li') is not None# 0.449327301063
# Computing Spanish word similarity using Lin method
assert wns.monol_word_similarity('perro', 'gato', 'spa', 'lin') is not None#0.876800984373
# Computing Chinese word similarity using Wu & Palmer method
assert wns.monol_word_similarity('狗', '猫', 'cmn', 'wup') is not None# 0.857142857143
# Computing Spanish and English word similarity using Resnik method
assert wns.crossl_word_similarity('perro', 'cat', 'spa', 'eng', 'res') is not None#7.91166650904
# Computing Spanish and Chinese word similarity using Jiang & Conrad method
assert wns.crossl_word_similarity('perro', '猫', 'spa', 'cmn', 'jcn') is not None#0.31023804699
# Computing Chinese and English word similarity using WPath method
assert wns.crossl_word_similarity('狗', 'cat', 'cmn', 'eng', 'wpath') is not None#0.593666388463
def test_yago_concept_similarity():
from sematch.semantic.similarity import YagoTypeSimilarity
yagosim = YagoTypeSimilarity()
dancer = yagosim.word2yago('dancer')
actor = yagosim.word2yago('actor')
singer = yagosim.word2yago('singer')
assert yagosim.yago2synset(actor[0]) is not None
assert yagosim.yago_similarity(dancer[0], actor[0], 'wpath') is not None
assert yagosim.yago_similarity(singer[0], actor[0], 'wpath') is not None
assert yagosim.word2yago('university') is not None
assert yagosim.yago2synset('http://dbpedia.org/class/yago/EducationalInstitution108276342') is not None
assert yagosim.yago2synset('http://dbpedia.org/class/yago/Organization108008335') is not None
assert yagosim.yago2synset('http://dbpedia.org/class/yago/Institution108053576') is not None
assert yagosim.yago2synset('http://dbpedia.org/class/yago/Organization108008335') is not None
#using corpus-based IC from brown corpus
assert yagosim.word_similarity('dancer', 'actor', 'wpath') is not None
#using graph-based IC from DBpedia
assert yagosim.word_similarity('dancer', 'actor', 'wpath_graph') is not None
def test_dbpedia_concept_similarity():
from sematch.semantic.graph import DBpediaDataTransform, Taxonomy
from sematch.semantic.similarity import ConceptSimilarity
concept_sim = ConceptSimilarity(Taxonomy(DBpediaDataTransform()), 'models/dbpedia_type_ic.txt')
assert concept_sim.similarity('http://dbpedia.org/ontology/Actor', 'http://dbpedia.org/ontology/Film', 'path') is not None
def test_synset_expand():
from sematch.semantic.similarity import WordNetSimilarity
wns = WordNetSimilarity()
cat = wns.word2synset('cat')[0]
assert wns.synset_expand(cat) is not None
def test_entity_similarity():
from sematch.semantic.similarity import EntitySimilarity
entity_sim = EntitySimilarity()
assert entity_sim.similarity('http://dbpedia.org/resource/Madrid',
'http://dbpedia.org/resource/Barcelona') is not None
assert entity_sim.relatedness('http://dbpedia.org/resource/Madrid', 'http://dbpedia.org/resource/Barcelona') is not None
def test_language():
from sematch.semantic.similarity import WordNetSimilarity
wns = WordNetSimilarity()
#check the supported languages
assert wns.languages() is not None
#find the language code
assert wns.languages('English') is not None
assert wns.languages('chinese_simplified') is not None
assert wns.languages('spanish') is not None
| 52.849315 | 126 | 0.736133 |
024ceabe749710cae0bf2e32650291db5d290478 | 4,484 | py | Python | src/climsoft_api/api/physicalfeature/router.py | openclimateinitiative/climsoft-api | 3591d7499dd7777617b8086332dc83fab1af9588 | [
"MIT"
] | null | null | null | src/climsoft_api/api/physicalfeature/router.py | openclimateinitiative/climsoft-api | 3591d7499dd7777617b8086332dc83fab1af9588 | [
"MIT"
] | 2 | 2022-03-01T13:10:22.000Z | 2022-03-24T08:47:20.000Z | src/climsoft_api/api/physicalfeature/router.py | openclimateinitiative/climsoft-api | 3591d7499dd7777617b8086332dc83fab1af9588 | [
"MIT"
] | 2 | 2021-12-22T21:50:19.000Z | 2022-01-28T12:53:32.000Z | import logging
import climsoft_api.api.physicalfeature.schema as physicalfeature_schema
from climsoft_api.api import deps
from climsoft_api.services import physicalfeature_service
from climsoft_api.utils.exception import handle_exceptions
from climsoft_api.utils.response import get_success_response, \
get_success_response_for_query
from climsoft_api.utils.response import translate_schema
from fastapi import APIRouter, Depends
from sqlalchemy.orm.session import Session
router = APIRouter()
logger = logging.getLogger(__file__)
logging.basicConfig(level=logging.INFO)
@router.get(
"/physical-features"
)
@handle_exceptions
def get_physical_feature(
associated_with: str = None,
begin_date: str = None,
end_date: str = None,
image: str = None,
description: str = None,
classified_into: str = None,
limit: int = 25,
offset: int = 0,
db_session: Session = Depends(deps.get_session),
):
total, physical_feature = physicalfeature_service.query(
db_session=db_session,
associated_with=associated_with,
begin_date=begin_date,
end_date=end_date,
image=image,
description=description,
classified_into=classified_into,
limit=limit,
offset=offset,
)
return get_success_response_for_query(
limit=limit,
total=total,
offset=offset,
result=physical_feature,
message=_("Successfully fetched physical feature."),
schema=translate_schema(
_,
physicalfeature_schema.PhysicalFeatureQueryResponse.schema()
)
)
@router.get(
"/physical-features/{associated_with}/{begin_date}/{classified_into}"
)
@handle_exceptions
def get_physical_feature_by_id(
associated_with: str,
begin_date: str,
classified_into: str,
db_session: Session = Depends(deps.get_session),
):
return get_success_response(
result=[
physicalfeature_service.get(
db_session=db_session,
associated_with=associated_with,
begin_date=begin_date,
classified_into=classified_into,
)
],
message=_("Successfully fetched physical feature."),
schema=translate_schema(
_,
physicalfeature_schema.PhysicalFeatureWithStationAndPhysicalFeatureClassResponse.schema()
)
)
@router.post("/physical-features")
@handle_exceptions
def create_physical_feature(
data: physicalfeature_schema.CreatePhysicalFeature,
db_session: Session = Depends(deps.get_session)
):
return get_success_response(
result=[physicalfeature_service.create(
db_session=db_session,
data=data
)],
message=_("Successfully created physical feature."),
schema=translate_schema(
_,
physicalfeature_schema.PhysicalFeatureResponse.schema()
)
)
@router.put(
"/physical-features/{associated_with}/{begin_date}/{classified_into}"
)
@handle_exceptions
def update_physical_feature(
associated_with: str,
begin_date: str,
classified_into: str,
data: physicalfeature_schema.UpdatePhysicalFeature,
db_session: Session = Depends(deps.get_session),
):
return get_success_response(
result=[
physicalfeature_service.update(
db_session=db_session,
associated_with=associated_with,
begin_date=begin_date,
classified_into=classified_into,
updates=data,
)
],
message=_("Successfully updated physical feature."),
schema=translate_schema(
_,
physicalfeature_schema.PhysicalFeatureResponse.schema()
)
)
@router.delete(
"/physical-features/{associated_with}/{begin_date}/{classified_into}"
)
@handle_exceptions
def delete_physical_feature(
associated_with: str,
begin_date: str,
classified_into: str,
db_session: Session = Depends(deps.get_session),
):
physicalfeature_service.delete(
db_session=db_session,
associated_with=associated_with,
begin_date=begin_date,
classified_into=classified_into,
)
return get_success_response(
result=[],
message=_("Successfully deleted physical feature."),
schema=translate_schema(
_,
physicalfeature_schema.PhysicalFeatureResponse.schema()
)
)
| 28.379747 | 101 | 0.676405 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.