text stringlengths 38 1.54M |
|---|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from typing import Optional
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "Amazon VPC Lattice"
prefix = "vpc-lattice"
class Action(BaseAction):
def __init__(self, action: Optional[str] = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
CreateAccessLogSubscription = Action("CreateAccessLogSubscription")
CreateListener = Action("CreateListener")
CreateRule = Action("CreateRule")
CreateService = Action("CreateService")
CreateServiceNetwork = Action("CreateServiceNetwork")
CreateServiceNetworkServiceAssociation = Action(
"CreateServiceNetworkServiceAssociation"
)
CreateServiceNetworkVpcAssociation = Action("CreateServiceNetworkVpcAssociation")
CreateTargetGroup = Action("CreateTargetGroup")
DeleteAccessLogSubscription = Action("DeleteAccessLogSubscription")
DeleteAuthPolicy = Action("DeleteAuthPolicy")
DeleteListener = Action("DeleteListener")
DeleteResourcePolicy = Action("DeleteResourcePolicy")
DeleteRule = Action("DeleteRule")
DeleteService = Action("DeleteService")
DeleteServiceNetwork = Action("DeleteServiceNetwork")
DeleteServiceNetworkServiceAssociation = Action(
"DeleteServiceNetworkServiceAssociation"
)
DeleteServiceNetworkVpcAssociation = Action("DeleteServiceNetworkVpcAssociation")
DeleteTargetGroup = Action("DeleteTargetGroup")
DeregisterTargets = Action("DeregisterTargets")
GetAccessLogSubscription = Action("GetAccessLogSubscription")
GetAuthPolicy = Action("GetAuthPolicy")
GetListener = Action("GetListener")
GetResourcePolicy = Action("GetResourcePolicy")
GetRule = Action("GetRule")
GetService = Action("GetService")
GetServiceNetwork = Action("GetServiceNetwork")
GetServiceNetworkServiceAssociation = Action("GetServiceNetworkServiceAssociation")
GetServiceNetworkVpcAssociation = Action("GetServiceNetworkVpcAssociation")
GetTargetGroup = Action("GetTargetGroup")
ListAccessLogSubscriptions = Action("ListAccessLogSubscriptions")
ListListeners = Action("ListListeners")
ListRules = Action("ListRules")
ListServiceNetworkServiceAssociations = Action("ListServiceNetworkServiceAssociations")
ListServiceNetworkVpcAssociations = Action("ListServiceNetworkVpcAssociations")
ListServiceNetworks = Action("ListServiceNetworks")
ListServices = Action("ListServices")
ListTagsForResource = Action("ListTagsForResource")
ListTargetGroups = Action("ListTargetGroups")
ListTargets = Action("ListTargets")
PutAuthPolicy = Action("PutAuthPolicy")
PutResourcePolicy = Action("PutResourcePolicy")
RegisterTargets = Action("RegisterTargets")
TagResource = Action("TagResource")
UntagResource = Action("UntagResource")
UpdateAccessLogSubscription = Action("UpdateAccessLogSubscription")
UpdateListener = Action("UpdateListener")
UpdateRule = Action("UpdateRule")
UpdateService = Action("UpdateService")
UpdateServiceNetwork = Action("UpdateServiceNetwork")
UpdateServiceNetworkVpcAssociation = Action("UpdateServiceNetworkVpcAssociation")
UpdateTargetGroup = Action("UpdateTargetGroup")
|
import datetime
def printTimeStamp(name):
print("Автор програми: " + name)
print("Час компіляції: " + str(datetime.datetime.now()),"\n")
printTimeStamp("Valeriy Neroznak")
result = ("")
q = int(input("Введите число: "))
while q != 0:
res = q%2
result =result + str(res)
q = q//2
f=result[::-1]
print(f)
|
from classes.simulation.abstract.device import AbstractDevice
from models import Object
import requests
class Lamp(AbstractDevice):
"""Basic Lamp device
Arguments:
AbstractDevice {abc} -- Device abstraction
"""
def __init__(self, name, address):
super().__init__(name, address)
self.SCRIPT_ID = 1 # Manual script id for now
def initialize_device_properties(cls):
cls.PROTOCOL = 'PROTOCOL'
cls.DATA_TYPE = 2 # See documentation on data types
cls.INTERACTABLE = True
# Other device properties
cls.MODEL = ''
cls.INITIAL_VALUE = True
def initialize_device_schedule(cls):
cls.DO_MAIN_LOOP = False
cls.MAIN_LOOP_SCHEDULE = False
def interaction(cls, args):
""" Basic interaction. Checks wether or not the value is correct and creates an event at the API_Endpoint
Arguments:
args {dictionary} -- Any arguments you may want to send to this device
"""
if 'value' in args:
obj = Object.query \
.filter_by(address=cls.ADDRESS) \
.first()
print(args['value'])
data = 0
if args['value'] == 'True' or args['value'] == '1' or args['value'] == True or args['value'] == 1:
data = 1
if args['value'] == 'False' or args['value'] == '0' or args['value'] == False or args['value'] == 0:
data = 0
payload = {'usage_id': obj.id, 'data_type': 'TOGGLE', 'data': data}
events_endpoint = 'http://172.20.10.3:5000/api/v1/events'
events_response = requests.post(events_endpoint, data=payload)
print(events_endpoint)
|
# -*- coding: utf-8 -*-
from openerp import api, fields, models
import logging
_logger = logging.getLogger(__name__)
class ProductPrice(models.TransientModel):
_name = 'product.prices'
markup = fields.Float(string='Mark-up', default=10)
@api.multi
def set_prices(self):
context = dict(self._context or {})
active_ids = context.get('active_ids', []) or []
active_model = context.get('active_model', None)
for rec in self.env[active_model].browse(active_ids):
if rec.standard_price != 0:
rec.list_price = rec.standard_price * (1+(self.markup/100))
_logger.info('%s new price = %s' % (rec.name, rec.list_price))
return
|
for i in range(101):
result = 0
n = len(str(i))
while(i != 0):
t = i % 10
result = result + t**n
i = i//10
if i == result:
print(i)
|
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import ipdb
fileName = "sub_proba.csv"
fileNameFlip = "sub_proba_dp.csv"
mode = 'r'
resultName = "result"
def readFile(fileName, mode):
arr = []
with open(fileName, 'r') as inputfile:
lines = inputfile.readlines()[1000:1100]
for line in lines:
try:
clicked = float(line.split(',')[2])
arr.append(clicked)
except:
print "can't conver to float"
return arr
def drawTwo(y1, y2):
if (len(y1) != len(y2)):
return None
x = np.arange(len(y1))
# plt.scatter(x, y1, marker="+")
# plt.scatter(x, y2, marker="o")
plt.plot(x, y1, color='r')
plt.plot(x, y2, color='g')
plt.savefig(resultName + "1000-1100_v1.png")
#plt.show()
def draw(y):
x = np.arange(len(y))
plt.plot(x, y)
plt.savefig("compare.png")
if __name__ == '__main__':
y1 = readFile(fileName, mode)
y2 = readFile(fileNameFlip, mode)
v = list(map(lambda x: x[0]-x[1], zip(y2, y1)))
#print ("%s\n%s\n%s" %(y1, y2, v))
#draw(v)
drawTwo(y1, y2)
|
#-------------------------------------------------------------------------------
# Name: Unit test Reverse list recursively
# Purpose:
#
# Author: mmk and emeka
#
# Created: 04/09/2018
# Copyright: (c) mmk 2018
# Licence: <gloriaconcepto>
#-------------------------------------------------------------------------------
import Recursion3A as reverse
import unittest
class TestForProblem3ARecursion(unittest.TestCase):
"""
Test to assert if the list is reverse.
"""
def setUp(self):
'''Get the list from the expected function'''
self.expected = ['anemone', 'mantis', 'shrimp', 'blobfish', 'octopus', 'shark']
self.result = reverse.return_reverse_list(['shark', 'octopus','blobfish','mantis shrimp', 'anemone'])
def test_count_eq(self):
"""Will succeed"""
self.assertCountEqual(self.result, self.expected)
def test_list_eq(self):
"""Will PASS"""
self.assertListEqual(self.result, self.expected)
def main():
pass
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
from odoo import http
# class XmartsSdp(http.Controller):
# @http.route('/xmarts_sdp/xmarts_sdp/', auth='public')
# def index(self, **kw):
# return "Hello, world"
# @http.route('/xmarts_sdp/xmarts_sdp/objects/', auth='public')
# def list(self, **kw):
# return http.request.render('xmarts_sdp.listing', {
# 'root': '/xmarts_sdp/xmarts_sdp',
# 'objects': http.request.env['xmarts_sdp.xmarts_sdp'].search([]),
# })
# @http.route('/xmarts_sdp/xmarts_sdp/objects/<model("xmarts_sdp.xmarts_sdp"):obj>/', auth='public')
# def object(self, obj, **kw):
# return http.request.render('xmarts_sdp.object', {
# 'object': obj
# }) |
import requests
from lxml import etree
import time
import csv
# 定义函数抓取每页前30条商品信息
def crow_first(n):
# 构造每一页的url变化
url = 'https://search.jd.com/Search?keyword=%E6%89%8B%E6%9C%BA&enc=utf-8&qrst=1&rt=1&stop=1&vt=2&cid2=653&cid3=655&page=' + str(
2 * n - 1)
head = {'authority': 'search.jd.com',
'method': 'GET',
'path': '/s_new.php?keyword=%E6%89%8B%E6%9C%BA&enc=utf-8&qrst=1&rt=1&stop=1&vt=2&wq=%E6%89%8B%E6%9C%BA&cid2=653&cid3=655&page=4&s=84&scrolling=y&log_id=1529828108.22071&tpl=3_M&show_items=7651927,7367120,7056868,7419252,6001239,5934182,4554969,3893501,7421462,6577495,26480543553,7345757,4483120,6176077,6932795,7336429,5963066,5283387,25722468892,7425622,4768461',
'scheme': 'https',
'referer': 'https://search.jd.com/Search?keyword=%E6%89%8B%E6%9C%BA&enc=utf-8&qrst=1&rt=1&stop=1&vt=2&wq=%E6%89%8B%E6%9C%BA&cid2=653&cid3=655&page=3&s=58&click=0',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36',
'x-requested-with': 'XMLHttpRequest',
'Cookie': 'qrsc=3; pinId=RAGa4xMoVrs; xtest=1210.cf6b6759; ipLocation=%u5E7F%u4E1C; _jrda=5; TrackID=1aUdbc9HHS2MdEzabuYEyED1iDJaLWwBAfGBfyIHJZCLWKfWaB_KHKIMX9Vj9_2wUakxuSLAO9AFtB2U0SsAD-mXIh5rIfuDiSHSNhZcsJvg; shshshfpa=17943c91-d534-104f-a035-6e1719740bb6-1525571955; shshshfpb=2f200f7c5265e4af999b95b20d90e6618559f7251020a80ea1aee61500; cn=0; 3AB9D23F7A4B3C9B=QFOFIDQSIC7TZDQ7U4RPNYNFQN7S26SFCQQGTC3YU5UZQJZUBNPEXMX7O3R7SIRBTTJ72AXC4S3IJ46ESBLTNHD37U; ipLoc-djd=19-1607-3638-3638.608841570; __jdu=930036140; user-key=31a7628c-a9b2-44b0-8147-f10a9e597d6f; areaId=19; __jdv=122270672|direct|-|none|-|1529893590075; PCSYCityID=25; mt_xid=V2_52007VwsQU1xaVVoaSClUA2YLEAdbWk5YSk9MQAA0BBZOVQ0ADwNLGlUAZwQXVQpaAlkvShhcDHsCFU5eXENaGkIZWg5nAyJQbVhiWR9BGlUNZwoWYl1dVF0%3D; __jdc=122270672; shshshfp=72ec41b59960ea9a26956307465948f6; rkv=V0700; __jda=122270672.930036140.-.1529979524.1529984840.85; __jdb=122270672.1.930036140|85.1529984840; shshshsID=f797fbad20f4e576e9c30d1c381ecbb1_1_1529984840145'
}
print(url)
r = requests.get(url, headers=head)
# 指定编码方式,不然会出现乱码
r.encoding = 'utf-8'
html1 = etree.HTML(r.text)
# 定位到每一个商品标签li
datas = html1.xpath('//li[contains(@class,"gl-item")]')
# 将抓取的结果保存到本地CSV文件中
with open('JD_Phone.csv', 'a', newline='', encoding='utf-8')as f:
write = csv.writer(f)
for data in datas:
p_price = data.xpath('div/div[@class="p-price"]/strong/i/text()')
p_pic = data.xpath('div/div[@class="p-img"]/a/img/@source-data-lazy-img')
p_name = data.xpath('div/div[@class="p-name p-name-type-2"]/a/em/text()')
# 这个if判断用来处理那些价格可以动态切换的商品,比如上文提到的小米MIX2,他们的价格位置在属性中放了一个最低价
if len(p_price) == 0:
p_price = data.xpath('div/div[@class="p-price"]/strong/@data-price')
# xpath('string(.)')用来解析混夹在几个标签中的文本
if len(p_name) == 0:
p_name = ["null"]
if len(p_pic) == 0:
p_pic = ["null"]
write.writerow([p_name[0], p_price[0], p_pic[0]])
f.close()
# 定义函数抓取每页后30条商品信息
def crow_last(n):
# 获取当前的Unix时间戳,并且保留小数点后5位
a = time.time()
b = '%.5f' % a
url = 'https://search.jd.com/s_new.php?keyword=%E6%89%8B%E6%9C%BA&enc=utf-8&qrst=1&rt=1&stop=1&vt=2&wq=%E6%89%8B%E6%9C%BA&cid2=653&cid3=655&page=' + str(
2 * n) + '&s=' + str(48 * n - 20) + '&scrolling=y&log_id=' + str(b)
head = {'authority': 'search.jd.com',
'method': 'GET',
'path': '/s_new.php?keyword=%E6%89%8B%E6%9C%BA&enc=utf-8&qrst=1&rt=1&stop=1&vt=2&wq=%E6%89%8B%E6%9C%BA',
'scheme': 'https',
'referer': 'https://search.jd.com/Search?keyword=%E6%89%8B%E6%9C%BA&enc=utf-8&qrst=1&rt=1&stop=1&vt=2&wq=%E6%89%8B%E6%9C%BA&cid2=653&cid3=655&page=3&s=58&click=0',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36',
'x-requested-with': 'XMLHttpRequest',
'Cookie': 'qrsc=3; pinId=RAGa4xMoVrs; xtest=1210.cf6b6759; ipLocation=%u5E7F%u4E1C; _jrda=5; TrackID=1aUdbc9HHS2MdEzabuYEyED1iDJaLWwBAfGBfyIHJZCLWKfWaB_KHKIMX9Vj9_2wUakxuSLAO9AFtB2U0SsAD-mXIh5rIfuDiSHSNhZcsJvg; shshshfpa=17943c91-d534-104f-a035-6e1719740bb6-1525571955; shshshfpb=2f200f7c5265e4af999b95b20d90e6618559f7251020a80ea1aee61500; cn=0; 3AB9D23F7A4B3C9B=QFOFIDQSIC7TZDQ7U4RPNYNFQN7S26SFCQQGTC3YU5UZQJZUBNPEXMX7O3R7SIRBTTJ72AXC4S3IJ46ESBLTNHD37U; ipLoc-djd=19-1607-3638-3638.608841570; __jdu=930036140; user-key=31a7628c-a9b2-44b0-8147-f10a9e597d6f; areaId=19; __jdv=122270672|direct|-|none|-|1529893590075; PCSYCityID=25; mt_xid=V2_52007VwsQU1xaVVoaSClUA2YLEAdbWk5YSk9MQAA0BBZOVQ0ADwNLGlUAZwQXVQpaAlkvShhcDHsCFU5eXENaGkIZWg5nAyJQbVhiWR9BGlUNZwoWYl1dVF0%3D; __jdc=122270672; shshshfp=72ec41b59960ea9a26956307465948f6; rkv=V0700; __jda=122270672.930036140.-.1529979524.1529984840.85; __jdb=122270672.1.930036140|85.1529984840; shshshsID=f797fbad20f4e576e9c30d1c381ecbb1_1_1529984840145'
}
r = requests.get(url, headers=head)
r.encoding = 'utf-8'
html1 = etree.HTML(r.text)
datas = html1.xpath('//li[contains(@class,"gl-item")]')
with open('JD_Phone.csv', 'a', newline='', encoding='utf-8')as f:
write = csv.writer(f)
for data in datas:
p_price = data.xpath('div/div[@class="p-price"]/strong/i/text()')
p_pic = data.xpath('div/div[@class="p-img"]/a/img/@source-data-lazy-img')
p_name = data.xpath('div/div[@class="p-name p-name-type-2"]/a/em/text()')
# 这个if判断用来处理那些价格可以动态切换的商品,比如上文提到的小米MIX2,他们的价格位置在属性中放了一个最低价
if len(p_price) == 0:
p_price = data.xpath('div/div[@class="p-price"]/strong/@data-price')
# xpath('string(.)')用来解析混夹在几个标签中的文本
if len(p_name) == 0:
p_name = ["null"]
if len(p_pic) == 0:
p_pic = ["null"]
write.writerow([p_name[0], p_price[0], p_pic[0]])
f.close()
if __name__ == '__main__':
for i in range(1, 101):
# 下面的print函数主要是为了方便查看当前抓到第几页了
print('***************************************************')
# try:
print(' First_Page: ' + str(i))
crow_first(i)
print(' Finish')
# except Exception as e:
# print("error: ")
# print(e)
# print('------------------')
try:
print(' Last_Page: ' + str(i))
crow_last(i)
print(' Finish')
except Exception as e:
print(e) |
from cart.models import OrderItem
from django.shortcuts import render , redirect
from django.contrib.auth import login
from django.contrib.auth.decorators import login_required
from .models import Customer,my_balance
from django.utils.text import slugify
from product.models import Product
from .forms import balanceForm
from django.contrib.auth.forms import UserCreationForm
# Create your views here.
def become_customer(request,backend='django.contrib.auth.backends.ModelBackend'):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
user = form.save()
login(request, user,backend='django.contrib.auth.backends.ModelBackend')
customer = Customer.objects.create(name=user.username, created_by=user)
return redirect('home')
else:
form = UserCreationForm()
return render(request, 'customer/become_customer.html', {'form': form})
@login_required
def customer_admin(request):
my_user_profile = Customer.objects.filter(name=request.user).first()
my_orders=OrderItem.objects.filter(owner=my_user_profile)
#user = my_balance.objects.filter(user_balance=my_user_profile)
return render(request,'customer/customer_admin.html',{'my_user_profile':my_user_profile,'my_orders':my_orders})
@login_required
def balance(request):
form=balanceForm()
if request.method == 'POST':
form = balanceForm(request.POST)
if form.is_valid():
form.save()
return redirect('/')
return render(request, 'customer/balance.html', {'form': form})
@login_required
def updatebalance(request,pk):
customers=request.user.customer(id=pk)
form=balanceForm(instance=customers)
if request.method == 'POST':
form = balanceForm(request.POST,instance=customers)
if form.is_valid():
form.save()
return redirect('/')
return render(request, 'customer/balance.html', {'form': form})
@login_required
def wishlist(request):
products = Product.objects.filter(users_wishlist=request.user)
return render(request, "customer/user_wish_list.html", {"wishlist": products})
|
# https://codeforces.com/contest/750/problem/B
#coding: utf-8
n = int(raw_input())
cont = 0
answer = True
for i in range(n):
coordinate, direction = raw_input().split()
coordinate = int(coordinate)
if (direction == "South"):
cont += coordinate
elif (direction == "North"):
cont -= coordinate
elif (cont == 0 or cont == 20000):
answer = False
if (cont < 0 or cont > 20000):
answer = False
if (answer == True and cont == 0):
print "YES"
else:
print "NO"
|
import numpy as np
import bem2d
from importlib import reload
import bem2d
import matplotlib.pyplot as plt
bem2d = reload(bem2d)
# List of elements for forward model
n_elements = 2
mu = np.array([3e10])
nu = np.array([0.25])
elements = []
element = {}
L = 10000
# x1, y1, x2, y2 = bem2d.discretized_line(-L, 0, L, 0, n_elements)
x1, y1, x2, y2 = bem2d.discretized_line(-L, -L, L, L, n_elements)
for i in range(0, x1.size):
element["x1"] = x1[i]
element["y1"] = y1[i]
element["x2"] = x2[i]
element["y2"] = y2[i]
elements.append(element.copy())
elements = bem2d.standardize_elements(elements)
# Observation coordinates for far-field calculation
n_pts = 100
width = 20000
x = np.linspace(-width, width, n_pts)
y = np.linspace(-width, width, n_pts)
x, y = np.meshgrid(x, y)
x = x.flatten()
y = y.flatten()
# Just a simple forward model for the volume
displacement_constant_slip = np.zeros((2, x.size))
stress_constant_slip = np.zeros((3, x.size))
displacement_quadratic = np.zeros((2, x.size))
stress_quadratic = np.zeros((3, x.size))
# slip_linear = np.linspace(-1, 1, len(elements))
for i, element in enumerate(elements):
displacement, stress = bem2d.displacements_stresses_constant_linear(
x,
y,
element["half_length"],
mu,
nu,
"constant",
"slip",
0,
1,
element["x_center"],
element["y_center"],
element["rotation_matrix"],
element["inverse_rotation_matrix"],
)
displacement_constant_slip += displacement
stress_constant_slip += stress
bem2d.plot_fields(
elements,
x.reshape(n_pts, n_pts),
y.reshape(n_pts, n_pts),
displacement_constant_slip,
stress_constant_slip,
"constant elements (slip)",
)
for element in elements:
quadratic_coefficients = np.array([1, 1, 1]) # constant slip quadratic element
# displacement, stress = bem2d.displacements_stresses_quadratic_farfield_coefficients(
displacement, stress = bem2d.displacements_stresses_quadratic_NEW(
x,
y,
element["half_length"],
mu,
nu,
"slip",
[0, 0, 0],
[1, 1, 1],
element["x_center"],
element["y_center"],
element["rotation_matrix"],
element["inverse_rotation_matrix"],
)
displacement_quadratic += displacement
stress_quadratic += stress
bem2d.plot_fields(
elements,
x.reshape(n_pts, n_pts),
y.reshape(n_pts, n_pts),
displacement_quadratic,
stress_quadratic,
"quadratic elements (constant slip)",
)
bem2d.plot_fields(
elements,
x.reshape(n_pts, n_pts),
y.reshape(n_pts, n_pts),
displacement_constant_slip - displacement_quadratic,
stress_constant_slip - stress_quadratic,
"residuals",
)
|
"""
<중심 극한 정리>
= 모집단이 어떤 분포든지 상관없이 표본의 크기가 충분히 크다면 모든 가능한 표본 평균은 모평균 주위에서 정규 분포를 따른다.
전체 인구 : 모집단(전체 집합) / 모평균 : 모집단(전체 집합)의 평균
전체 인구 중 일부 : 표본(부분 집합) / 표본 평균 : 표본(부분 집합)의 평균
즉, 부분집합(표본 평균)의 평균은 전체 집합의 평균(모평균) 주위에서 정규 분포를 따른다.
만약 모집단 평균이 'mu'이고, 표준 편차가 'sigma'인 정규 분포를 따른다면,
표본 평균의 분포는 평균이 'mu'이고, 분산이 'sigma/sqrt(n)'인 정규 분포이다.
- 베르누이 확률 변수
= 어떤 시행의 결과가 '성공' / '실패'중 하나로 나타나고,
성공의 확률이 'p' / 실패의 확률이 '1-p'라 할 때,
그 결과가 '성공'이면 '확률 변수'는 '1'을,
결과가 '실패'라면 '확률 변수'는 '0'을 갖는 확률 변수 X
- 베르누이 확률 질량 함수(PMF, Probability Mass Function)
pmf(x) = p if x = 1, 1 - p if x = 0
= (p**x) * ((1-p)**(1-x))
~ 이항 확률 변수(binomial random variable)
n개의 독립적인 베르누의 확률 변수들을 더한 것.
(EX)
베르누이 확률 변수 -> 동전 1개를 던질 때 앞면의 수
이항 확률 변수 -> 동전 1개를 n번 던질 때 앞면의 수
~ 베르누이 확률 변수의 기대값(평균) = p, 표준 편차 = sqrt(p(1-p))
~ 중심 극한 정리
: n이 적당히 크다면, 이항 확률 변수의 분포는 대략 평균이 '(n*p)'이고,
표준 편차가 'sqrt(n*p(1-p))'인 정규 분포의 확률 변수와 같다.
<<<중심 극한 정리의 핵심>>>
~~~> 표본의 평균(np)와 분산(np(1-p))을 알면, 모집단의 평균(p)와 분산(p(1-P))를 예측 가능하다.
"""
import math
import random
from collections import Counter
import matplotlib.pyplot as plt
from scratch06_확률.ex06 import normal_cdf, normal_pdf
def bernoulli_trial(p):
"""
베르누이 확률 변수 0 또는 1을 확률 p에 따라 리턴함
:param p: 확률
:return: 베르누이 확률 변수(0 또는 1)
"""
x = random.random() # random() ~> 0 ~ 1 사이의 난수 리턴
return 1 if x < p else 0 # 생성된 난수가 확률 p보다 작으면 1, 크다면 0 리턴
def binomial(n, p):
"""
1이 나올 확률이 p인 베르누이 시행을 n번 실시 했을 때, 1아 나오는 횟수를 리턴(이항 확률 변수를 리턴)
:param n: 횟수
:param p: 확률
:return: 1이 나온 횟수(이항 확률 변수)
"""
s = 0 # 1이 나오는 횟수
for _ in range(n):
s += bernoulli_trial(p)
return s
if __name__ == '__main__':
for _ in range(10):
print(bernoulli_trial(0.5), end='') # 확률 p = 0.5
print() # 0110000101
for _ in range(10):
print(binomial(10, 0.5), end='') # 10개의 동전을 던지고 앞면(1) 나올 확률 0.5
print() # 5862678543 ~~~> 앞면(1)이 나오는 횟수가 10개 중 5번, 8번, 6번, 2번, ...
trials = 10000
data = [binomial(1, 0.5) for _ in range(10000)] # 동전 1개를 1번 던지기를 10000번. 앞면(1) 나올 확률은 0.5
print(data[0:10]) # [0, 0, 1, 0, 1, 0, 1, 0, 1, 1]
# 이항 확률 변수와 그에 따른 확률 값을 그리기 위해
# pyplot.hist() 사용하여 히스토그램 출력
#plt.hist(data)
#plt.show()
# pyplot.hist()없이 직접 그려보려면
histogram = Counter(data)
x_bar = [k for k in histogram.keys()]
y_bar = [v / trials for v in histogram.values()]
plt.bar(x_bar, y_bar)
plt.show()
trials = 10000
data_3 = [binomial(3, 0.5) for _ in range(trials)] # 동전 3개를 1번 던지기를 10000번. 앞면(1) 나올 확률은 0.5
histogram = Counter(data_3)
x_bar = [k for k in histogram.keys()]
y_bar = [v / trials for v in histogram.values()]
plt.bar(x_bar, y_bar)
plt.show()
trials = 10000
data_12 = [binomial(12, 0.5) for _ in range(trials)] # 동전 12개를 1번 던지기를 10000번. 앞면(1) 나올 확률은 0.5
histogram = Counter(data_12)
x_bar = [k for k in histogram.keys()]
y_bar = [v / trials for v in histogram.values()]
plt.bar(x_bar, y_bar)
plt.show()
# ~~~~~> 동전 개수가 늘어날수록, 그래프가 '정규 분포형'에 가까워진다.
trials = 10000
n = 4 #동전 던지는 횟수
p = 0.5 # 동전 앞면(1)이 나오는 횟수
data = [binomial(n, p) for _ in range(trials)]
histogram = Counter(data)
x_bar = [k for k in histogram.keys()]
y_bar = [v / trials for v in histogram.values()]
plt.bar(x_bar, y_bar)
# 이항 확률 변수의 정규 분포 근사
# 이항 확률 변수는 n이 충분히 크다면 정규 분포를 따른다.
mu = n * p # 평균
sigma = math.sqrt(n * p * (1 - p)) # 표준 편차
# 정규 분포 그래프 그리기위해
x_line = range(min(data), max(data) + 1)
y_line = [normal_cdf(x + 0.5, mu, sigma) - normal_cdf(x - 0.5, mu, sigma) for x in x_line] # 확률 변수에 따른 누적 확률 값
plt.plot(x_line, y_line)
plt.show()
# 그렇다면 동전을 100개로 늘려보자.
trials = 10000
n = 100 # 동전 던지는 횟수
p = 0.5 # 동전 앞면(1)이 나오는 횟수
data = [binomial(n, p) for _ in range(trials)]
histogram = Counter(data)
x_bar = [k for k in histogram.keys()]
y_bar = [v / trials for v in histogram.values()]
plt.bar(x_bar, y_bar)
mu = n * p # 평균
sigma = math.sqrt(n * p * (1 - p)) # 표준 편차
x_line = range(min(data), max(data) + 1)
y_line = [normal_cdf(x + 0.5, mu, sigma) - normal_cdf(x - 0.5, mu, sigma) for x in x_line] # 확률 변수에 따른 누적 확률 값
plt.plot(x_line, y_line)
plt.show()
# ~~~~~> n이 크게 증가하자, 그래프가 정규 분포형에 매우 가까워졌다.
# PDF를 사용하면, cdf보다 더 간단하다.
trials = 10000
n = 100 # 동전 던지는 횟수
p = 0.5 # 동전 앞면(1)이 나오는 횟수
data = [binomial(n, p) for _ in range(trials)]
histogram = Counter(data)
x_bar = [k for k in histogram.keys()]
y_bar = [v / trials for v in histogram.values()]
plt.bar(x_bar, y_bar)
mu = n * p # 평균
sigma = math.sqrt(n * p * (1 - p)) # 표준 편차
x_line = range(min(data), max(data) + 1)
y_line = [normal_pdf(x, mu, sigma) for x in x_line]
plt.plot(x_line, y_line)
plt.show()
|
def sumoflist( x, y ): #x = [6, 123, 453, 2, 8]
sum = 0
for i in x:
sum = sum + i #sum = ?, ? = 0 + 6 = 6 -> sum. sum = ?, ? = 6 + 123 = 129 -> sum
sum = sum + y
return sum
l = []
l.append(6)
l.append( 123 )
l.append( 453 )
l.append( 2 )
l.append( 8 )
print( f"Outside l = { l }" )
print("Calling Function sumoflist for 1st time ")
a = sumoflist( l, 5 ) #a = ?, ? = sumoflist( ? ), ? = [6, 123, 453, 2, 8]
#a = ?, ? = sumoflist( [6, 123, 453, 2, 8] )
#a = sumoflist( [6, 123, 453, 2, 8] )
print( f"Outside a = { a }" ) |
# -*- coding: utf-8 -*-
import math
pi = math.pi
def circulo (radio):
resultado = pi*(radio**2)
return resultado
def circulo_peri (diametro):
resultado = pi*(diametro)
return resultado
def triangulo (a,b):
resultador= b*a/2
return resultador
def triangulo_peri (lado_a,lado_b,lado_c):
resultador= lado_a+lado_b+lado_c
return resultador
def cuadrado (c):
resultadom= c**2
return resultadom
def cuadrado_peri (d):
resultadow = d+d+d+d
return resultadow
def rectangulo (e,f):
resultadod= e*f
return resultadod
def rectangulo_peri (e,f):
resultadod= 2*e(2*f)
return resultadod
def pentagono (g,h):
resultadoz= 5*g*h/2
return resultadoz
def pentagono_peri(i):
resultadoz= 5*i
return resultadoz
def hexagono (j,k):
resultadoz= 6*j*k/2
return resultadoz
def hexagono_peri(l):
resultadoz= 6*l
return resultadoz
def heptagono (m,n):
resultadoz= 7*m*n/2
return resultadoz
def heptgono_peri(o):
resultadoz= 7*o
return resultadoz
def octagono (p,q):
resultadoz= 8*p*q/2
return resultadoz
def octagono_peri(r):
resultadoz= 8*r
return resultadoz
def nonagono (s,t):
resultadoz= 9*s*t/2
return resultadoz
def nonagono_peri(u):
resultadoz= 9*u
return resultadoz
def decagono (v,x):
resultadoz= 10*v*x/2
return resultadoz
def decagono_peri(y):
resultadoz= 10*y
return resultadoz
eleccion = input(" Si quieres circulo escoge 1,si quieres triangulo escoge 2,si quieres cuadrado escoge 3,si quieres rectangulo escoge 4,si quiere un pentagono escoge 5,si quiere un hexagono escoge 6,si quiere un hexagono escoge 7,si quiere un hexagono escoge 8,si quiere un hexagono escoge 9,si quiere un hexagono escoge 10 ")
elecciondos = input("Escoge 11 si quieres area ó 12 si quieres perimetro ")
if eleccion == 1:
if elecciondos == 11:
radio = input("Agregame tu radio aqui ")
resultado = circulo(radio)
print resultado
elif elecciondos == 12:
diametro = input ("Agregame tu diametro aqui")
resultado = circulo(diametro)
print resultado
elif eleccion == 2:
if elecciondos == 11:
a= input("Escribe altura ")
b= input("Escribe base ")
resultador = triangulo(a,b)
print resultador
elif elecciondos == 12:
lado_a= input("Escribe lado 1; ")
lado_b= input("Escribe lado 2; ")
lado_c= input("Escribe lado 3; ")
resultador = triangulo(lado_a,lado_b,lado_c)
print resultador
elif eleccion == 3:
if elecciondos == 11:
c= input("Escribe lado ")
resultadom = cuadrado(c)
print resultadom
elif elecciondos == 12:
d= input("Escribe lado ")
resultadow = cuadrado_peri(d)
print resultadow
elif eleccion == 4:
if elecciondos == 11:
e= input("Escribe altura ")
f= input("Escribe base ")
resultadod = rectangulo (e,f)
print resultadod
elif elecciondos == 12:
e= input("Escribe altura ")
f= input("Escribe base ")
resultadod = rectangulo (e,f)
print resultadod
elif eleccion == 5:
if elecciondos == 11:
g= input("Escribe lado ")
h= input("Escribe radio ")
resultadoz = pentagono(g,h)
print resultadoz
elif elecciondos == 12:
i= input("Escribe lado ")
resultadoz = pentagono_peri(i)
print resultadoz
elif eleccion == 6:
if elecciondos == 11:
j= input("Escribe lado ")
k= input("Escribe radio ")
resultadoa = hexagono(j,k)
print resultadoa
elif elecciondos == 12:
l= input("Escribe lado ")
resultadoa = hexagono_peri(l)
print resultadoa
elif eleccion == 7:
if elecciondos == 11:
m= input("Escribe lado ")
n= input("Escribe radio ")
resultadob = hexagono(m,n)
print resultadob
elif elecciondos == 12:
o= input("Escribe lado ")
resultadob = hexagono_peri(o)
print resultadob
elif eleccion == 8:
if elecciondos == 11:
p= input("Escribe lado ")
q= input("Escribe radio ")
resultadoc = hexagono(p,q)
print resultadoc
elif elecciondos == 12:
r= input("Escribe lado ")
resultadoc = hexagono_peri(r)
print resultadoc
elif eleccion == 9:
if elecciondos == 11:
s= input("Escribe lado ")
t= input("Escribe radio ")
resultadoe = nonagono(s,t)
print resultadoe
elif elecciondos == 12:
u= input("Escribe lado ")
resultadoe = nonagono_peri(u)
print resultadoe
elif eleccion == 10:
if elecciondos == 11:
v= input("Escribe lado ")
x= input("Escribe radio ")
resultadof = decagono(v,x)
print resultadof
elif elecciondos == 12:
y= input("Escribe lado ")
resultadof = decagono_peri(y)
print resultadof
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testscenarios
from tuning_box.tests.cli import _BaseCLITest
class TestCreateComponent(testscenarios.WithScenarios, _BaseCLITest):
scenarios = [
(s[0], dict(zip(('mock_url', 'args', 'expected_result'), s[1])))
for s in [
('json', ('/components',
'comp create --name comp_name --format json',
'{\n "a": "b"\n}')),
('yaml', ('/components',
'comp create -n comp_name -f yaml',
'a: b\n')),
]
]
mock_url = None
args = None
expected_result = None
def test_post(self):
self.req_mock.post(
self.BASE_URL + self.mock_url,
headers={'Content-Type': 'application/json'},
json={'a': 'b'},
)
self.cli.run(self.args.split())
self.assertEqual(self.expected_result, self.cli.stdout.getvalue())
class TestListComponents(testscenarios.WithScenarios, _BaseCLITest):
scenarios = [
(s[0], dict(zip(('mock_url', 'args', 'expected_result'), s[1])))
for s in [
('json', ('/components', 'comp list -f json', '[]')),
('yaml', ('/components', 'comp list --format yaml', '[]\n')),
]
]
mock_url = None
args = None
expected_result = None
def test_get(self):
self.req_mock.get(
self.BASE_URL + self.mock_url,
headers={'Content-Type': 'application/json'},
json=[],
)
self.cli.run(self.args.split())
self.assertEqual(self.expected_result, self.cli.stdout.getvalue())
class TestShowComponent(testscenarios.WithScenarios, _BaseCLITest):
scenarios = [
(s[0], dict(zip(('mock_url', 'args', 'expected_result'), s[1])))
for s in [
('yaml', ('/components/9', 'comp show 9 -f yaml',
'id: 1\nname: n\nresource_definitions: []\n')),
]
]
mock_url = None
args = None
expected_result = None
def test_get(self):
self.req_mock.get(
self.BASE_URL + self.mock_url,
headers={'Content-Type': 'application/json'},
json={'id': 1, 'name': 'n', 'resource_definitions': []},
)
self.cli.run(self.args.split())
self.assertEqual(self.expected_result, self.cli.stdout.getvalue())
class TestDeleteComponent(testscenarios.WithScenarios, _BaseCLITest):
scenarios = [
(s[0], dict(zip(('mock_url', 'args', 'expected_result'), s[1])))
for s in [
('', ('/components/9', 'comp delete 9',
'Component with id 9 was deleted\n')),
]
]
mock_url = None
args = None
expected_result = None
def test_delete(self):
self.req_mock.delete(
self.BASE_URL + self.mock_url,
headers={'Content-Type': 'application/json'}
)
self.cli.run(self.args.split())
self.assertEqual(self.expected_result, self.cli.stdout.getvalue())
class TestUpdateComponent(testscenarios.WithScenarios, _BaseCLITest):
scenarios = [
(s[0], dict(zip(('mock_url', 'args', 'expected_result'), s[1])))
for s in [
('no_data', ('/components/9', 'comp update 9', '{}')),
('s_name', ('/components/9',
'comp update 9 -n comp_name', '{}')),
('l_name', ('/components/9',
'comp update 9 --name comp_name', '{}')),
('s_r_defs', ('/components/9',
'comp update 9 -r 1,2 ', '{}')),
('l_r_ders', ('/components/9',
'comp update 9 --resource-definitions 1,2', '{}')),
('empty_s_r_defs', ('/components/9',
'comp update 9 -r [] -n comp_name', '{}')),
('empty_l_r_defs', ('/components/9',
'comp update 9 --resource-definitions []',
'{}'))
]
]
mock_url = None
args = None
expected_result = None
def test_update(self):
self.req_mock.patch(
self.BASE_URL + self.mock_url,
headers={'Content-Type': 'application/json'},
json={}
)
self.cli.run(self.args.split())
self.assertEqual(self.expected_result, self.cli.stdout.getvalue())
|
from django.shortcuts import render, redirect
from django.contrib import messages
from django.contrib.auth.models import User, auth
# Create your views here.
def register(request):
if request.method == 'POST':
first_name = request.POST['first_name']
last_name = request.POST['last_name']
user_name = request.POST['username']
email = request.POST['email']
pass1 = request.POST['password1']
pass2 = request.POST['password2']
if pass1 == pass2:
if User.objects.filter(username = user_name).exists():
messages.info(request, 'Username taken')
return redirect('register')
elif User.objects.filter(email = email).exists():
messages.info(request, 'Email taken')
return redirect('register')
else:
user = User.objects.create_user(username=user_name, email= email, first_name= first_name, last_name= last_name, password=pass1)
user.save()
print("User created")
return redirect('login')
else:
messages.info(request, 'Password do not match')
return redirect('register')
return redirect('/')
else:
return render(request, 'register.html')
def login(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = auth.authenticate(username = username,password = password)
if user is not None:
auth.login(request, user)
return redirect("home")
else:
messages.info(request, "Check Username and Password!")
return redirect('login')
else:
return render(request, 'login.html')
def logout(request):
auth.logout(request)
return redirect('/')
def intro(request):
return render(request, 'intro.html')
def home(request):
return render(request, "home.html")
def speechsum(request):
return render(request, 'speechsum.html')
def textsum(request):
return render(request, 'textsum.html')
def speechlist(request):
return render(request, 'speechlist.html')
def textlist(request):
return render(request, 'textlist.html')
def maketextsummary(request):
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize, sent_tokenize
text = str(request.POST['summary'])
print(text)
print(type(text))
lang = str(request.POST['language'])
if lang == "eng":
# Tokenizing the text
stopWords = set(stopwords.words("english"))
words = word_tokenize(text)
# Creating a frequency table to keep the
# score of each word
freqTable = dict()
for word in words:
word = word.lower()
if word in stopWords:
continue
if word in freqTable:
freqTable[word] += 1
else:
freqTable[word] = 1
#Creating a dictionary to keep the score
# of each sentence
sentences = sent_tokenize(text)
sentenceValue = dict()
for sentence in sentences:
for word, freq in freqTable.items():
if word in sentence.lower():
if sentence in sentenceValue:
sentenceValue[sentence] += freq
else:
sentenceValue[sentence] = freq
sumValues = 0
for sentence in sentenceValue:
sumValues += sentenceValue[sentence]
# Average value of a sentence from the original text
average = int(sumValues / len(sentenceValue))
# Storing sentences into our summary.
summary = ''
top = int(request.POST['lines'])
if top>len(sentenceValue):
error_msg = 'Cannot be summarised since the number of sentences is greater than the number of sentences in the text'
return render(request, 'textsum.html', {'result' : error_msg})
else:
l=[]
mm=[]
for sentence in sorted(sentenceValue.items(), key=lambda item: item[1], reverse=True):
l.append(sentence[0])
for i in range(top):
mm.append(l[i])
for sentence in sentences:
if sentence in mm:
summary += " " + sentence
return render(request, 'textsum.html', {'result': summary})
if lang == "hin":
top = int(request.POST['lines'])
from nltk.tokenize import word_tokenize, sent_tokenize
st=['के','का',',','में','की','है','यह','थे','ही','और','से','हैं','थी','को','पर','इस',
'होता','कि','जो','कर','मे','गया','करने','किया','लिये','अपने','ने','बनी','नहीं','तो',
'ही','या',"एवं",'दिया','हो','इसका','था','ही','द्वारा','हुआ','तक','साथ','करना','वाले',
'बाद','लिए','आप','कुछ','सकते','किसी','ये','इसके','सबसे','इसमें','थे','दो','होने','वह',
'वे','करते','बहुत','कहा','वर्ग','कई','करें','होती','अपनी','उनके','थी','यदि','हुई','जा','ना',
'इसे','कहते','जब','होते','कोई','हुए','व','न','अभी','जैसे','सभी','करता','उनकी','तरह','उस',
'आदि','कुल','एस','रहा','इसकी','सकता','रहे','उनका','इसी','रखें','अपना','पे','उसके']
new=[]
ne=[]
ovr=[]
a=text.split('।')
for i in a:
ne.append(word_tokenize(i))
for j in ne:
for i in j:
if '।' not in i and i not in [',','?','!']:
t=i
if t not in st:
new.append(i)
ovr.append(new)
new=[]
suffixes = {
1: ["ो", "े", "ू", "ु", "ी", "ि", "ा"],
2: ["कर", "ाओ", "िए", "ाई", "ाए", "ने", "नी", "ना", "ते", "ीं", "ती", "ता", "ाँ", "ां", "ों", "ें"],
3: ["ाकर", "ाइए", "ाईं", "ाया", "ेगी", "ेगा", "ोगी", "ोगे", "ाने", "ाना", "ाते", "ाती", "ाता", "तीं", "ाओं", "ाएं", "ुओं", "ुएं", "ुआं"],
4: ["ाएगी", "ाएगा", "ाओगी", "ाओगे", "एंगी", "ेंगी", "एंगे", "ेंगे", "ूंगी", "ूंगा", "ातीं", "नाओं", "नाएं", "ताओं", "ताएं", "ियाँ", "ियों", "ियां"],
5: ["ाएंगी", "ाएंगे", "ाऊंगी", "ाऊंगा", "ाइयाँ", "ाइयों", "ाइयां"],
}
lemm=[]
lemm_f=[]
for new in ovr:
for word in new:
c=0
for L in 5, 4, 3, 2, 1:
if len(word)>L+1:
for suf in suffixes[L]:
if word.endswith(suf):
c=1
lemm.append(word[:-L])
break
if c==1:
break
if c==0:
lemm.append(word)
lemm_f.append(lemm)
lemm=[]
import math
idf={}
uni=[]
freqTable = dict()
for lemm in lemm_f:
for word in lemm:
if word not in uni:
uni.append(word)
if word in freqTable:
freqTable[word] += 1
else:
freqTable[word] = 1
n=len(lemm_f)
for word in uni:
idf[word]=0
for i in lemm_f:
if word in i:
idf[word]+=1
for word in idf:
idf[word]=math.log((1+n)/idf[word])
for word in freqTable:
freqTable[word]*=idf[word]
weight=[]
s_weight=[]
d={}
for i in lemm_f:
su=0
for j in i:
su+=freqTable[j]
weight.append(su)
for i in weight:
s_weight.append(i)
s_weight.sort(reverse=True)
for i in s_weight:
d[i]=weight.index(i)
summary = ''
if top>len(a):
print('Cannot be summarised since the number of sentences entered is greater than the number of sentences in the text')
else:
i=0
for j in s_weight:
if i in range(top):
summary += a[d[j]]
i+=1
return render(request, 'textsum.html', {'result': summary})
def maketextlist(request):
import nltk
from nltk.corpus import stopwords
from nltk.stem import PorterStemmer
from nltk.tokenize import sent_tokenize, word_tokenize
from nltk.stem import WordNetLemmatizer
content = str(request.POST['lister'])
lang = str(request.POST['language'])
if lang == "eng":
content = word_tokenize(content)
blank_list = []
stop_word = set(stopwords.words('english'))
sub_list = []
for w in content:
if w not in stop_word:
sub_list.append(w)
if w == '.':
blank_list.append(sub_list)
sub_list = []
final_list = []
for i in range(len(blank_list)):
sentence = str(i+1) + ". "
for j in range(len(blank_list[i])):
sentence += blank_list[i][j]
sentence += " "
final_list.append(sentence)
return render(request, 'textlist.html', {'lists': final_list})
if lang == "hindi":
content = word_tokenize(content)
from nltk.tokenize import word_tokenize, sent_tokenize
st=['के','का',',','में','की','है','यह','थे','ही','और','से','हैं','थी','को','पर','इस','होता','कि','जो','कर','मे','गया','करने','किया','लिये','अपने','ने','बनी','नहीं','तो','ही','या',"एवं",'दिया','हो','इसका','था','ही','द्वारा','हुआ','तक','साथ','करना','वाले','बाद','लिए','आप','कुछ','सकते','किसी','ये','इसके','सबसे','इसमें','थे','दो','होने','वह','वे','करते','बहुत','कहा','वर्ग','कई','करें','होती','अपनी','उनके','थी','यदि','हुई','जा','ना','इसे','कहते','जब','होते','कोई','हुए','व','न','अभी','जैसे','सभी','करता','उनकी','तरह','उस','आदि','कुल','एस','रहा','इसकी','सकता','रहे','उनका','इसी','रखें','अपना','पे','उसके']
s='एक किसान था, वह अपने खेतों में काम कर घर लौट रहा था। रास्ते में ही एक हलवाई की दुकान थी। उस दिन किसान ने कुछ ज्यादा काम कर लिया था और उसे भूख भी बहुत लग रही थी। ऐसे में जब वह हलवाई की दुकान के पास से गुजरा तो उसे मिठाइयों की खुशबू आने लगी। वह वहां खुद को रोके बिना नहीं रह पाया। लेकिन उस दिन उसके पास ज्यादा पैसे नहीं थे, ऐसे में वह मिठाई खरीद नहीं सकता था, तब वह कुछ देर वहीं खड़े होकर मिठाइयों की सुगंध का आनंद लेने लगा।'
new=[]
ne=[]
ovr=[]
sent = ''
for i in content:
sent += i
sent += " "
a=sent.split('।')
for i in a:
ne.append(word_tokenize(i))
for j in ne:
for i in j:
if '।' not in i and i not in [',','?','!']:
t=i
if t not in st:
new.append(i)
ovr.append(new)
new=[]
final=[]
for i in range(len(ovr)):
sentence=str(i+1)+'. '
for j in ovr[i]:
sentence+=' '
sentence+=j
final.append(sentence)
return render(request, 'textlist.html', {'lists': final})
def makespeechsum(request):
lang = str(request.POST['language'])
if lang == "eng":
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize, sent_tokenize
import speech_recognition as sr
import os
import pyttsx3
r=sr.Recognizer()
mic=sr.Microphone()
c=0
i=1
with mic as source:
r.adjust_for_ambient_noise(source, duration=2)
audio=r.listen(source)
MyText = r.recognize_google(audio)
MyText = MyText.lower()
i+=1
s=''
MyText=MyText.split('dot')
for j in MyText:
for i in j:
if i=='coma':
s+=', '
else:
s+=i
s+='.'
print(s)
lang = str(request.POST['language'])
# Tokenizing the text
stopWords = set(stopwords.words("english"))
words = word_tokenize(s)
# Creating a frequency table to keep the
# score of each word
freqTable = dict()
for word in words:
word = word.lower()
if word in stopWords:
continue
if word in freqTable:
freqTable[word] += 1
else:
freqTable[word] = 1
#Creating a dictionary to keep the score
# of each sentence
sentences = sent_tokenize(s)
sentenceValue = dict()
for sentence in sentences:
for word, freq in freqTable.items():
if word in sentence.lower():
if sentence in sentenceValue:
sentenceValue[sentence] += freq
else:
sentenceValue[sentence] = freq
sumValues = 0
for sentence in sentenceValue:
sumValues += sentenceValue[sentence]
# Average value of a sentence from the original text
average = int(sumValues / len(sentenceValue))
# Storing sentences into our summary.
summary = ''
top = int(request.POST['lines'])
if top>len(sentenceValue):
error_msg = 'Error: Number of sentences is greater than the number of sentences in the text'
return render(request, 'speechsum.html', {'result' : error_msg})
else:
l=[]
mm=[]
for sentence in sorted(sentenceValue.items(), key=lambda item: item[1], reverse=True):
l.append(sentence[0])
for i in range(top):
mm.append(l[i])
for sentence in sentences:
if sentence in mm:
summary += " " + sentence
return render(request, 'speechsum.html', {'result': summary})
if lang == "hin":
import speech_recognition as sr
import os
import pyttsx3
r=sr.Recognizer()
mic=sr.Microphone()
c=0
i=1
with mic as source:
r.adjust_for_ambient_noise(source, duration=2)
audio=r.listen(source)
MyText = r.recognize_google(audio, language='hi-IN')
MyText = MyText.lower()
i+=1
s=''
MyText=MyText.split('विराम')
print(MyText)
for i in MyText:
s+=i
s+='। '
top = int(request.POST['lines'])
from nltk.tokenize import word_tokenize, sent_tokenize
st=['के','का',',','में','की','है','यह','थे','ही','और','से','हैं','थी','को','पर','इस',
'होता','कि','जो','कर','मे','गया','करने','किया','लिये','अपने','ने','बनी','नहीं','तो',
'ही','या',"एवं",'दिया','हो','इसका','था','ही','द्वारा','हुआ','तक','साथ','करना','वाले',
'बाद','लिए','आप','कुछ','सकते','किसी','ये','इसके','सबसे','इसमें','थे','दो','होने','वह',
'वे','करते','बहुत','कहा','वर्ग','कई','करें','होती','अपनी','उनके','थी','यदि','हुई','जा','ना',
'इसे','कहते','जब','होते','कोई','हुए','व','न','अभी','जैसे','सभी','करता','उनकी','तरह','उस',
'आदि','कुल','एस','रहा','इसकी','सकता','रहे','उनका','इसी','रखें','अपना','पे','उसके']
new=[]
ne=[]
ovr=[]
a=s.split('।')
for i in a:
ne.append(word_tokenize(i))
for j in ne:
for i in j:
if '।' not in i and i not in [',','?','!']:
t=i
if t not in st:
new.append(i)
ovr.append(new)
new=[]
suffixes = {
1: ["ो", "े", "ू", "ु", "ी", "ि", "ा"],
2: ["कर", "ाओ", "िए", "ाई", "ाए", "ने", "नी", "ना", "ते", "ीं", "ती", "ता", "ाँ", "ां", "ों", "ें"],
3: ["ाकर", "ाइए", "ाईं", "ाया", "ेगी", "ेगा", "ोगी", "ोगे", "ाने", "ाना", "ाते", "ाती", "ाता", "तीं", "ाओं", "ाएं", "ुओं", "ुएं", "ुआं"],
4: ["ाएगी", "ाएगा", "ाओगी", "ाओगे", "एंगी", "ेंगी", "एंगे", "ेंगे", "ूंगी", "ूंगा", "ातीं", "नाओं", "नाएं", "ताओं", "ताएं", "ियाँ", "ियों", "ियां"],
5: ["ाएंगी", "ाएंगे", "ाऊंगी", "ाऊंगा", "ाइयाँ", "ाइयों", "ाइयां"],
}
lemm=[]
lemm_f=[]
for new in ovr:
for word in new:
c=0
for L in 5, 4, 3, 2, 1:
if len(word)>L+1:
for suf in suffixes[L]:
if word.endswith(suf):
c=1
lemm.append(word[:-L])
break
if c==1:
break
if c==0:
lemm.append(word)
lemm_f.append(lemm)
lemm=[]
import math
idf={}
uni=[]
freqTable = dict()
for lemm in lemm_f:
for word in lemm:
if word not in uni:
uni.append(word)
if word in freqTable:
freqTable[word] += 1
else:
freqTable[word] = 1
n=len(lemm_f)
for word in uni:
idf[word]=0
for i in lemm_f:
if word in i:
idf[word]+=1
for word in idf:
idf[word]=math.log((1+n)/idf[word])
for word in freqTable:
freqTable[word]*=idf[word]
weight=[]
s_weight=[]
d={}
for i in lemm_f:
su=0
for j in i:
su+=freqTable[j]
weight.append(su)
for i in weight:
s_weight.append(i)
s_weight.sort(reverse=True)
for i in s_weight:
d[i]=weight.index(i)
summary = ''
if top>len(a):
print('Cannot be summarised since the number of sentences entered is greater than the number of sentences in the text')
else:
i=0
for j in s_weight:
if i in range(top):
summary += a[d[j]]
i+=1
return render(request, 'speechsum.html', {'result': summary})
def makespeechlist(request):
import speech_recognition as sr
import os
import pyttsx3
lang = str(request.POST['language'])
r=sr.Recognizer()
mic=sr.Microphone()
c=0
l=[]
if lang == "eng":
with mic as source:
r.adjust_for_ambient_noise(source, duration=2)
#print('speak now for part ')
audio=r.listen(source)
MyText = r.recognize_google(audio)
MyText = MyText.lower()
l.append(MyText)
for MyText in l:
MyText=MyText.split("next")
final_list = []
for i in range(len(MyText)):
sentence = ''
sentence += str(i+1) + ". "
sentence += MyText[i]
final_list.append(sentence)
return render(request, 'speechlist.html', {'lists': final_list})
if lang == "hin":
with mic as source:
r.adjust_for_ambient_noise(source, duration=2)
#print('speak now for part ')
audio=r.listen(source)
MyText = r.recognize_google(audio, language='hi-IN')
MyText = MyText.lower()
l.append(MyText)
for MyText in l:
MyText=MyText.split("विराम")
final_list = []
for i in range(len(MyText)):
sentence = ''
sentence += str(i+1) + ". "
sentence += MyText[i]
final_list.append(sentence)
return render(request, 'speechlist.html', {'lists': final_list})
|
"""
MIT License
Copyright (c) 2023 TheHamkerCat
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including witout limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from io import BytesIO
from os import path, remove
from time import time
import img2pdf
from PIL import Image
from pyrogram import filters
from pyrogram.types import Message
from wbb import app
from wbb.core.decorators.errors import capture_err
from wbb.core.sections import section
async def convert(
main_message: Message,
reply_messages,
status_message: Message,
start_time: float,
):
m = status_message
documents = []
for message in reply_messages:
if not message.document:
return await m.edit("Not document, ABORTED!")
if message.document.mime_type.split("/")[0] != "image":
return await m.edit("Invalid mime type!")
if message.document.file_size > 5000000:
return await m.edit("Size too large, ABORTED!")
documents.append(await message.download())
for img_path in documents:
img = Image.open(img_path).convert("RGB")
img.save(img_path, "JPEG", quality=100)
pdf = BytesIO(img2pdf.convert(documents))
pdf.name = "wbb.pdf"
if len(main_message.command) >= 2:
names = main_message.text.split(None, 1)[1]
if not names.endswith(".pdf"):
pdf.name = names + ".pdf"
else:
pdf.name = names
elapsed = round(time() - start_time, 2)
await main_message.reply_document(
document=pdf,
caption=section(
"IMG2PDF",
body={
"Title": pdf.name,
"Size": f"{pdf.__sizeof__() / (10 ** 6)}MB",
"Pages": len(documents),
"Took": f"{elapsed}s",
},
),
)
await m.delete()
pdf.close()
for file in documents:
if path.exists(file):
remove(file)
@app.on_message(filters.command("pdf"))
@capture_err
async def img_to_pdf(_, message: Message):
reply = message.reply_to_message
if not reply:
return await message.reply(
"Reply to an image (as document) or group of images."
)
m = await message.reply_text("Converting..")
start_time = time()
if reply.media_group_id:
messages = await app.get_media_group(
message.chat.id,
reply.id,
)
return await convert(message, messages, m, start_time)
return await convert(message, [reply], m, start_time)
|
from . import OAuthSignIn
class OutlookSignIn(OAuthSignIn):
def __init__(self):
super(OutlookSignIn, self).__init__('outlook')
self.service = OAuth2Service(
'microsoft',
consumer_key='Register your app at apps.dev.microsoft.com',
consumer_secret='Register your app at apps.dev.microsoft.com',
request_token_params={'scope': 'offline_access User.Read'},
base_url='https://graph.microsoft.com/v1.0/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://login.microsoftonline.com/common/oauth2/v2.0/token',
authorize_url='https://login.microsoftonline.com/common/oauth2/v2.0/authorize'
)
def authorize(self):
return redirect(self.service.get_authorize_url(
scope='email',
response_type='code',
redirect_uri=self.get_callback_url())
)
def callback(self):
def decode_json(payload):
return json.loads(payload.decode('utf-8'))
if 'code' not in request.args:
return None, None, None
oauth_session = self.service.get_auth_session(
data={'code': request.args['code'],
'grant_type': 'authorization_code',
'redirect_uri': self.get_callback_url()},
decoder=decode_json
)
me = oauth_session.get('me?fields=id,email').json()
return (
'facebook$' + me['id'],
me.get('email').split('@')[0], # Facebook does not provide
# username, so the email's user
# is used instead
me.get('email')
)
|
import numpy as np
import cv2
import os
face_cascade = cv2.CascadeClassifier('cascades/data/haarcascade_frontalface_alt2.xml')
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read('trainner.yml')
opened_yet = False
cap = cv2.VideoCapture(0)
while True:
ret,frame = cap.read()
gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray,scaleFactor=1.5,minNeighbors=5)
for (x,y,w,h) in faces:
#print(x,y,w,h)
roi_gray = gray[y:y+h,x:x+w]
roi_color = frame[y:y+h,x:x+w]
id_ ,conf = recognizer.predict(roi_gray)
if conf >= 45 and conf <= 85:
print(id_)
if id_ == 0 and not opened_yet:
opened_yet = True
os.startfile(os.path.join(os.path.dirname(os.path.abspath(__file__)),'video.mp4'))
img_item = 'myimage.png'
#cv2.imwrite(img_item,roi_color)
color = (255,0,0)
stroke = 2
width = x + w
hight = y + h
cv2.rectangle(frame,(x,y),(width,hight),color,stroke)
cv2.imshow('frame',frame)
if cv2.waitKey(20) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows() |
#!/usr/bin/env python
"""
_JobEmulator_
"""
__revision__ = "$Id: "
__version__ = "$Revision: "
__all__ = []
|
import collections
from lanedet.utils import build_from_cfg
from ..registry import PROCESS
class Process(object):
"""Compose multiple process sequentially.
Args:
process (Sequence[dict | callable]): Sequence of process object or
config dict to be composed.
"""
def __init__(self, processes, cfg):
assert isinstance(processes, collections.abc.Sequence)
self.processes = []
for process in processes:
if isinstance(process, dict):
process = build_from_cfg(process, PROCESS, default_args=dict(cfg=cfg))
self.processes.append(process)
elif callable(process):
self.processes.append(process)
else:
raise TypeError('process must be callable or a dict')
def __call__(self, data):
"""Call function to apply processes sequentially.
Args:
data (dict): A result dict contains the data to process.
Returns:
dict: Processed data.
"""
for t in self.processes:
data = t(data)
if data is None:
return None
return data
def __repr__(self):
format_string = self.__class__.__name__ + '('
for t in self.processes:
format_string += '\n'
format_string += f' {t}'
format_string += '\n)'
return format_string
|
# encoding: utf-8
"""
gites.proprio
Created by mpeeters
Licensed under the GPL license, see LICENCE.txt for more details.
Copyright by Affinitic sprl
"""
import os
import simplejson
from PIL import Image, ImageFile
import zope.interface
from five import grok
from Products.CMFCore.utils import getToolByName
grok.templatedir('templates')
class MainForm(grok.View):
grok.context(zope.interface.Interface)
grok.name(u'affinitic.imageuploader.macros')
grok.require('cmf.ModifyPortalContent')
grok.template('form')
def hasExistingImage(self, folder, fileName):
utool = getToolByName(self.context, 'portal_url')
portal = utool.getPortalObject()
localFS = getattr(portal, folder)
path = os.path.join(localFS.basepath, '%s.png' % fileName)
return os.path.exists(path)
class ImageUpload(grok.View):
grok.context(zope.interface.Interface)
grok.name(u'upload-image')
grok.require('cmf.ModifyPortalContent')
def render(self):
pass
def __call__(self):
message = ''
fields = self.request.form
fileName = fields.get('filename')
fileUpload = fields.get('file')
desiredWidth = fields.get('width')
desiredHeight = fields.get('height')
destination = fields.get('dest_localfs')
redirectUrl = fields.get('redirectUrl')
layoutMaxWidth = fields.get('layoutmaxwidth')
extension = fileUpload.filename.split('.')[-1]
if not extension.lower() in ['jpg', 'jpeg', 'png', 'gif']:
message = 'Votre image doit être au format JPEG, PNG ou GIF.'
return simplejson.dumps({'message': message,
'status': -1})
img = Image.open(fileUpload.name)
width, height = img.size
if width < int(desiredWidth) or height < int(desiredHeight):
message = 'Votre image est trop petite : elle doit faire au moins %spx de large et %spx de haut.' % (desiredWidth, desiredHeight)
return simplejson.dumps({'message': message,
'status': -1})
utool = getToolByName(self.context, 'portal_url')
portal = utool.getPortalObject()
imageStorage = getattr(portal, destination)
destinationFS = os.path.join(imageStorage.basepath,
'%s-tmp.png' % fileName)
ImageFile.MAXBLOCK = width * height
if os.path.exists(destinationFS):
os.unlink(destinationFS)
img.save(destinationFS, "PNG")
self.request.response.setHeader('content-type', 'text/x-json')
self.request.response.setHeader('Cache-Control', 'no-cache')
return simplejson.dumps({'filename': fileName,
'height': height,
'width': width,
'desiredHeight': desiredHeight,
'desiredWidth': desiredWidth,
'destination': destination,
'redirectUrl': redirectUrl,
'layoutmaxwidth': layoutMaxWidth,
'message': message,
'status': 1})
class ImageCrop(grok.View):
grok.context(zope.interface.Interface)
grok.name(u'crop-image')
grok.require('cmf.ModifyPortalContent')
grok.template('imagecrop')
class ImageSave(grok.View):
grok.context(zope.interface.Interface)
grok.name(u'save-image')
grok.require('cmf.ModifyPortalContent')
def render(self):
pass
def __call__(self):
fields = self.request.form
fileName = fields.get('filename')
desiredWidth = fields.get('desiredWidth')
desiredHeight = fields.get('desiredHeight')
destination = fields.get('destination')
redirectUrl = fields.get('redirectUrl')
layoutMaxWidth = fields.get('layoutmaxwidth')
coordX = int(round(float(fields.get('x'))))
if coordX < 0:
coordX = 0
coordY = int(round(float(fields.get('y'))))
if coordY < 0:
coordY = 0
width = int(round(float(fields.get('w'))))
height = int(round(float(fields.get('h'))))
scale = fields.get('scale', '')
utool = getToolByName(self.context, 'portal_url')
portal = utool.getPortalObject()
imageStorage = getattr(portal, destination)
origin = os.path.join(imageStorage.basepath, '%s-tmp.png' % fileName)
destination = os.path.join(imageStorage.basepath, '%s.png' % fileName)
img = Image.open(origin)
if scale:
imgWidth, imgHeight = img.size
scaling = imgWidth / float(int(layoutMaxWidth))
coordX = float(coordX) * scaling
coordY = float(coordY) * scaling
width = float(width) * scaling
height = float(height) * scaling
box = (int(coordX), int(coordY),
int(coordX + width), int(coordY + height))
img = img.crop(box)
if os.path.exists(destination):
os.unlink(destination)
img.save(destination, "PNG")
img = Image.open(destination)
img = img.resize((int(desiredWidth), int(desiredHeight)),
Image.ANTIALIAS)
img.save(destination, "PNG")
os.unlink(origin)
self.request.response.redirect(redirectUrl)
return ''
|
from go_core.lambda_goboard import LambdaGoBoard
import random
import time
import sys
def random_move_in_board(go_board, step_to_run):
move_step = 0
is_both_pass = False
black_pass = False
white_pass = False
while True:
black_pass = False
valid_move = go_board.get_valid_move(LambdaGoBoard.ColorBlackChar)
move_number = len(valid_move)
if move_number < 1:
black_pass = True
else:
random_index = random.randint(0,move_number-1)
random_move = list(valid_move)[random_index]
go_board.apply_move(LambdaGoBoard.ColorBlackChar, random_move)
move_step += 1
if move_step >= step_to_run:
break
if black_pass and white_pass:
is_both_pass = True
break
white_pass = False
valid_move = go_board.get_valid_move(LambdaGoBoard.ColorWhiteChar)
move_number = len(valid_move)
if move_number < 1:
white_pass = True
else:
random_index = random.randint(0,move_number-1)
random_move = list(valid_move)[random_index]
go_board.apply_move(LambdaGoBoard.ColorWhiteChar, random_move)
move_step += 1
if move_step >= step_to_run:
break
if black_pass and white_pass:
is_both_pass = True
break
return move_step
def random_move_with_copy(go_board, step_to_run):
move_step = 0
is_both_pass = False
black_pass = False
white_pass = False
parallel_copy = go_board.copy()
while True:
black_pass = False
valid_move = go_board.get_valid_move(LambdaGoBoard.ColorBlackChar)
move_number = len(valid_move)
move_and_result = go_board.simulate_all_valid_move(LambdaGoBoard.ColorBlackChar)
copy_board = dict()
has_exception = False
incorrect_move = None
for each_move in valid_move:
copy_board[each_move] = go_board.copy()
try:
copy_board[each_move].apply_move(LambdaGoBoard.ColorBlackChar, each_move)
except Exception, e:
incorrect_move = each_move
has_exception = True
break
copy_board_result = copy_board[each_move].output_board
simulate_board_result = move_and_result[each_move]
if (copy_board_result == simulate_board_result).all() :
# print ('equal'),
pass
else:
print ('Error, different')
if has_exception:
debug_copy_board(go_board, incorrect_move)
if move_number < 1:
black_pass = True
else:
random_index = random.randint(0,move_number-1)
random_move = list(valid_move)[random_index]
go_board.apply_move(LambdaGoBoard.ColorBlackChar, random_move)
parallel_copy.apply_move(LambdaGoBoard.ColorBlackChar, random_move)
original_board = go_board.output_board
parallel_board = parallel_copy.output_board
if (original_board == parallel_board).all():
print ('.'),
sys.stdout.flush()
else:
print (' !!!!!!!!! inconsisitant board of original board and parallel board')
temp_board = parallel_board.copy()
parallel_board = temp_board.copy()
move_step += 1
print (str(move_step)),
sys.stdout.flush()
if move_step >= step_to_run:
break
if black_pass and white_pass:
is_both_pass = True
break
white_pass = False
valid_move = go_board.get_valid_move(LambdaGoBoard.ColorWhiteChar)
move_number = len(valid_move)
move_and_result = go_board.simulate_all_valid_move(LambdaGoBoard.ColorWhiteChar)
copy_board = dict()
has_exception = False
incorrect_move = None
for each_move in valid_move:
copy_board[each_move] = go_board.copy()
try:
copy_board[each_move].apply_move(LambdaGoBoard.ColorWhiteChar, each_move)
except Exception, e:
incorrect_move = each_move
has_exception = True
break
copy_board_result = copy_board[each_move].output_board
simulate_board_result = move_and_result[each_move]
if (copy_board_result == simulate_board_result).all() :
# print ('equal'),
pass
else:
print ('Error, different')
if has_exception:
debug_copy_board(go_board, incorrect_move)
if move_number < 1:
white_pass = True
else:
random_index = random.randint(0,move_number-1)
random_move = list(valid_move)[random_index]
go_board.apply_move(LambdaGoBoard.ColorWhiteChar, random_move)
parallel_copy.apply_move(LambdaGoBoard.ColorWhiteChar, random_move)
original_board = go_board.output_board
parallel_board = parallel_copy.output_board
if (original_board == parallel_board).all():
print ('.'),
sys.stdout.flush()
else:
print (' !!!!!!!!! inconsisitant board of original board and parallel board')
temp_board = parallel_board.copy()
parallel_board = temp_board.copy()
move_step += 1
print (str(move_step)),
sys.stdout.flush()
if move_step >= step_to_run:
break
if black_pass and white_pass:
is_both_pass = True
break
return move_step
def debug_copy_board(go_board, incorrect_move):
print ('Got exception:' + str(incorrect_move))
print ('Copy a new board to and apply again.')
new_copy = go_board.copy()
print ('==========================================')
for row in range(new_copy.board_size):
print_line = ''
for col in range(new_copy.board_size):
if new_copy.stone_group[row+1][col+1] == None:
temp_id = -1
else:
temp_id = new_copy.stone_group[row+1][col+1].id
print_line = print_line + ' '+ str(temp_id)
print (print_line)
print ('---------------------------')
for row in range(go_board.board_size):
print_line = ''
for col in range(go_board.board_size):
if go_board.stone_group[row+1][col+1] == None:
temp_id = -1
else:
temp_id = go_board.stone_group[row+1][col+1].id
print_line = print_line + ' '+ str(temp_id)
print (print_line)
print ('len of original groupdict: ' + str(len(go_board.stone_group_dict)))
print ('len of copy groupdict: ' + str(len(new_copy.stone_group_dict)))
for group in new_copy.stone_group_dict.values():
original_group = go_board.stone_group_dict[group.id]
stone_different1 = len(group.stones - original_group.stones)
stone_different2 = len(original_group.stones - group.stones)
liberty_different1 = len(group.liberties - original_group.liberties)
liberty_different2 = len(original_group.liberties - group.liberties)
if stone_different1 != 0 or \
stone_different2 != 0 or \
liberty_different1 != 0 or \
liberty_different2 != 0:
print ('Found different group!!!')
print ('==============================================================')
print ('Original stones: -----------')
for each_stone in original_group.stones:
print (str(each_stone))
print ('Original libertys: -----------')
for each_liberty in original_group.liberties:
print (str(each_liberty))
print ('------------------------------------------')
print ('copy stones: -----------')
for each_stone in group.stones:
print (str(each_stone))
print ('copy liberty: -----------')
for each_liberty in group.liberties:
print (str(each_liberty))
new_copy.apply_move(LambdaGoBoard.ColorBlackChar, incorrect_move)
print('Done!, work with new copied baord.')
print ('Trying to apply the incorrect move to original board.')
go_board.apply_move(LambdaGoBoard.ColorBlackChar, incorrect_move)
print ('Done, original board is OK with the incorrect move.')
print ('Trying to compare copy board and original board')
def display_board(go_board):
clear_screen()
print('\x1b[0;0f')
print (str(go_board))
def clear_screen():
# clear the screen
print('\033[H\033[J')
def random_play():
go_board = LambdaGoBoard(19)
clear_screen()
start_time = time.time()
move_step = random_move_in_board(go_board, 800)
end_time = time.time()
display_board(go_board)
print ('Total move steps: ' + str(move_step))
print ('Time used: ' + str(end_time - start_time))
print ('Time per move:' + str((end_time-start_time)/move_step))
def random_play_with_copy():
go_board = LambdaGoBoard(19)
# clear_screen()
start_time = time.time()
move_step = random_move_with_copy(go_board, 800)
end_time = time.time()
display_board(go_board)
print ('Total move steps: ' + str(move_step))
print ('Time used: ' + str(end_time - start_time))
print ('Time per move:' + str((end_time-start_time)/move_step))
def copy_test():
a_board = LambdaGoBoard(19)
random_move_in_board(a_board, 200)
start_time = time.time()
b_board = LambdaGoBoard(19)
b_board.copy_from(a_board)
end_time = time.time()
random_move_in_board(b_board, 400)
print str(b_board)
print ('time used to copy: ' + str(end_time - start_time))
def multiple_random_play_with_copy():
for i in range(100):
random_play_with_copy()
# random_play()
# copy_test()
# random_play_with_copy()
multiple_random_play_with_copy() |
import numpy as np
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
import pandas as pd
from sklearn.svm import SVR
import matplotlib.pyplot as plt
import pickle
dataframe = pd.read_csv("FOOTBALLL_DATASET.csv")
dataframe['fpl_sel'] = dataframe['fpl_sel'].astype('string')
dataframe['region'] = dataframe['region'].fillna(0)
for i in range(0,len(dataframe['fpl_sel'])):
dataframe['fpl_sel'][i] = dataframe['fpl_sel'][i].strip("%")
dataframe['fpl_sel'] = dataframe['fpl_sel'].astype('float')
der_df = dataframe.drop(['name','club','position','nationality'],axis=1)
x= der_df.drop('market_value',axis=1)
y= der_df['market_value']
xTrain, xTest, yTrain, yTest = train_test_split(x, y, test_size = 1/3, random_state = 0)
regressor = GradientBoostingRegressor(kernel='poly',gamma=1)
#Fitting model with trainig data
regressor.fit(x, y)
# Saving model to disk
pickle.dump(regressor, open('model.pkl','wb'))
# Loading model to compare the results
model = pickle.load(open('model.pkl','rb'))
result = model.score(xTest, yTest)
x = [28,1,4329,12,17.1,264,3,0,4,1,1,0]
result1 = model.predict([x])
print(result1)
|
# Generated by Django 3.2.6 on 2021-08-13 19:20
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Blog',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('latitude', models.FloatField(default=0.0)),
('longitude', models.FloatField(default=0.0)),
('hashtag', models.TextField(max_length=20)),
('weather', models.TextField()),
('images', models.ImageField(blank=True, upload_to='images')),
('created_at', models.DateTimeField(auto_now_add=True)),
('body', models.TextField(max_length=200)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nickname', models.CharField(blank=True, max_length=64, null=True)),
('description', models.TextField(blank=True)),
('profile_photo', models.ImageField(blank=True, upload_to='profile/')),
('user', models.OneToOneField(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Like',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='firstapp.blog')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField(max_length=200)),
('date', models.DateTimeField(default=django.utils.timezone.now)),
('post', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='firstapp.blog')),
],
),
migrations.AddField(
model_name='blog',
name='likes',
field=models.ManyToManyField(related_name='likes', through='firstapp.Like', to=settings.AUTH_USER_MODEL),
),
]
|
"""
pygluu.kubernetes.kustomize
~~~~~~~~~~~~~~~~~~~~~~~~~~~
License terms and conditions for Gluu Cloud Native Edition:
https://www.apache.org/licenses/LICENSE-2.0
"""
import base64
import contextlib
import os
import shutil
import socket
import time
from ast import literal_eval
from pathlib import Path
from pygluu.kubernetes.helpers import get_logger, copy, exec_cmd, ssh_and_remove
from pygluu.kubernetes.couchbase import Couchbase
from pygluu.kubernetes.kubeapi import Kubernetes
from pygluu.kubernetes.pycert import check_cert_with_private_key
from pygluu.kubernetes.yamlparser import Parser
from pygluu.kubernetes.settings import SettingsHandler
from pygluu.kubernetes.redis import Redis
from pygluu.kubernetes.postgres import Postgres
logger = get_logger("gluu-kustomize ")
# TEST ENVIORNMENT DEPLOYMENTS
local_ldap_minikube_folder = Path("./ldap/overlays/minikube/local-storage/")
local_jcr_minikube_folder = Path("./jackrabbit/overlays/minikube/local-storage/")
local_ldap_microk8s_folder = Path("./ldap/overlays/microk8s/local-storage/")
local_jcr_microk8s_folder = Path("./jackrabbit/overlays/microk8s/local-storage/")
# AWS
local_ldap_eks_folder = Path("./ldap/overlays/eks/local-storage/")
local_jcr_eks_folder = Path("./jackrabbit/overlays/eks/local-storage/")
dynamic_ldap_eks_folder = Path("./ldap/overlays/eks/dynamic-ebs/")
dynamic_jcr_eks_folder = Path("./jackrabbit/overlays/eks/dynamic-ebs/")
static_ldap_eks_folder = Path("./ldap/overlays/eks/static-ebs/")
static_jcr_eks_folder = Path("./jackrabbit/overlays/eks/static-ebs/")
# GCE
local_ldap_gke_folder = Path("./ldap/overlays/gke/local-storage/")
local_jcr_gke_folder = Path("./jackrabbit/overlays/gke/local-storage/")
dynamic_ldap_gke_folder = Path("./ldap/overlays/gke/dynamic-pd/")
dynamic_jcr_gke_folder = Path("./jackrabbit/overlays/gke/dynamic-pd/")
static_ldap_gke_folder = Path("./ldap/overlays/gke/static-pd/")
static_jcr_gke_folder = Path("./jackrabbit/overlays/gke/static-pd/")
# AZURE
local_ldap_azure_folder = Path("./ldap/overlays/azure/local-storage/")
local_jcr_azure_folder = Path("./jackrabbit/overlays/azure/local-storage/")
dynamic_ldap_azure_folder = Path("./ldap/overlays/azure/dynamic-dn/")
dynamic_jcr_azure_folder = Path("./jackrabbit/overlays/azure/dynamic-dn/")
static_ldap_azure_folder = Path("./ldap/overlays/azure/static-dn/")
static_jcr_azure_folder = Path("./jackrabbit/overlays/azure/static-dn/")
# DIGITAL OCEAN
local_ldap_do_folder = Path("./ldap/overlays/do/local-storage/")
local_jcr_do_folder = Path("./jackrabbit/overlays/do/local-storage/")
dynamic_ldap_do_folder = Path("./ldap/overlays/do/dynamic-dn/")
dynamic_jcr_do_folder = Path("./jackrabbit/overlays/do/dynamic-dn/")
static_ldap_do_folder = Path("./ldap/overlays/do/static-dn/")
static_jcr_do_folder = Path("./jackrabbit/overlays/do/static-dn/")
# LOCAL DEPLOYMENTS
hostpath_ldap_local_folder = Path("./ldap/overlays/local/hostpath/")
hostpath_jcr_local_folder = Path("./jackrabbit/overlays/local/hostpath/")
class Kustomize(object):
def __init__(self, timeout=300):
self.settings = SettingsHandler()
self.all_apps = self.settings.get("ENABLED_SERVICES_LIST")
self.kubernetes = Kubernetes()
self.redis = Redis()
self.postgres = Postgres()
self.timeout = timeout
self.kubectl = self.detect_kubectl
self.output_yaml_directory, self.ldap_kustomize_yaml_directory, self.jcr_kustomize_yaml_directory \
= self.set_output_yaml_directory
self.config_yaml = str(self.output_yaml_directory.joinpath("config.yaml").resolve())
self.ldap_yaml = str(self.output_yaml_directory.joinpath("ldap.yaml").resolve())
self.jackrabbit_yaml = str(self.output_yaml_directory.joinpath("jackrabbit.yaml").resolve())
self.persistence_yaml = str(self.output_yaml_directory.joinpath("persistence.yaml").resolve())
self.oxauth_yaml = str(self.output_yaml_directory.joinpath("oxauth.yaml").resolve())
self.fido2_yaml = str(self.output_yaml_directory.joinpath("fido2.yaml").resolve())
self.scim_yaml = str(self.output_yaml_directory.joinpath("scim.yaml").resolve())
self.oxtrust_yaml = str(self.output_yaml_directory.joinpath("oxtrust.yaml").resolve())
self.gluu_upgrade_yaml = str(self.output_yaml_directory.joinpath("upgrade.yaml").resolve())
self.oxshibboleth_yaml = str(self.output_yaml_directory.joinpath("oxshibboleth.yaml").resolve())
self.oxpassport_yaml = str(self.output_yaml_directory.joinpath("oxpassport.yaml").resolve())
self.oxauth_key_rotate_yaml = str(self.output_yaml_directory.joinpath("oxauth-key-rotation.yaml").resolve())
self.cr_rotate_yaml = str(self.output_yaml_directory.joinpath("cr-rotate.yaml").resolve())
self.oxd_server_yaml = str(self.output_yaml_directory.joinpath("oxd-server.yaml").resolve())
self.casa_yaml = str(self.output_yaml_directory.joinpath("casa.yaml").resolve())
self.update_lb_ip_yaml = str(self.output_yaml_directory.joinpath("update-lb-ip.yaml").resolve())
self.gluu_istio_ingress_yaml = str(self.output_yaml_directory.joinpath("gluu-istio-ingress.yaml").resolve())
self.ingress_file = self.output_yaml_directory.joinpath("nginx/nginx.yaml")
self.adjust_yamls_for_fqdn_status = dict()
self.gluu_secret = ""
self.gluu_config = ""
self.gluu_install_envs = ""
if self.settings.get("DEPLOYMENT_ARCH") == "gke":
# Clusterrolebinding needs to be created for gke with CB
if self.settings.get("INSTALL_REDIS") == "Y" or \
self.settings.get("INSTALL_COUCHBASE") == "Y":
user_account, stderr, retcode = exec_cmd("gcloud config get-value core/account")
user_account = str(user_account, "utf-8").strip()
user, stderr, retcode = exec_cmd("whoami")
user = str(user, "utf-8").strip()
cluster_role_binding_name = "cluster-admin-{}".format(user)
self.kubernetes.create_cluster_role_binding(cluster_role_binding_name=cluster_role_binding_name,
user_name=user_account,
cluster_role_name="cluster-admin")
@property
def detect_kubectl(self):
"""Detect kubectl command"""
if self.settings.get("DEPLOYMENT_ARCH") == "microk8s":
kubectl = "microk8s.kubectl"
# Check if running in container and settings.json mounted
if Path("./installer-settings.json").exists():
kubectl = "kubectl"
else:
kubectl = "kubectl"
return kubectl
def analyze_storage_class(self, storageclass):
parser = Parser(storageclass, "StorageClass")
if self.settings.get("DEPLOYMENT_ARCH") == "eks":
parser["provisioner"] = "kubernetes.io/aws-ebs"
parser["parameters"]["encrypted"] = "true"
parser["parameters"]["type"] = self.settings.get("LDAP_JACKRABBIT_VOLUME")
unique_zones = list(dict.fromkeys(self.settings.get("NODES_ZONES")))
parser["allowedTopologies"][0]["matchLabelExpressions"][0]["values"] = unique_zones
parser.dump_it()
elif self.settings.get("DEPLOYMENT_ARCH") == "gke":
parser["provisioner"] = "kubernetes.io/gce-pd"
try:
del parser["parameters"]["encrypted"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
parser["parameters"]["type"] = self.settings.get("LDAP_JACKRABBIT_VOLUME")
unique_zones = list(dict.fromkeys(self.settings.get("NODES_ZONES")))
parser["allowedTopologies"][0]["matchLabelExpressions"][0]["values"] = unique_zones
parser.dump_it()
elif self.settings.get("DEPLOYMENT_ARCH") == "aks":
parser["provisioner"] = "kubernetes.io/azure-disk"
try:
del parser["parameters"]["encrypted"]
del parser["parameters"]["type"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
parser["parameters"]["storageaccounttype"] = self.settings.get("LDAP_JACKRABBIT_VOLUME")
unique_zones = list(dict.fromkeys(self.settings.get("NODES_ZONES")))
parser["allowedTopologies"][0]["matchLabelExpressions"][0]["values"] = unique_zones
parser.dump_it()
elif self.settings.get("DEPLOYMENT_ARCH") == "do":
parser["provisioner"] = "dobs.csi.digitalocean.com"
try:
del parser["parameters"]
del parser["allowedTopologies"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
parser.dump_it()
elif self.settings.get('DEPLOYMENT_ARCH') == "microk8s":
try:
parser["provisioner"] = "microk8s.io/hostpath"
del parser["allowedTopologies"]
del parser["allowVolumeExpansion"]
del parser["parameters"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
parser.dump_it()
elif self.settings.get('DEPLOYMENT_ARCH') == "minikube":
try:
parser["provisioner"] = "k8s.io/minikube-hostpath"
del parser["allowedTopologies"]
del parser["allowVolumeExpansion"]
del parser["parameters"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
parser.dump_it()
@property
def set_output_yaml_directory(self):
output_yamls_folder = Path("gluu_microk8s_yamls")
ldap_kustomize_yaml_directory = local_ldap_microk8s_folder
jcr_kustomize_yaml_directory = local_jcr_microk8s_folder
if self.settings.get("DEPLOYMENT_ARCH") == "minikube":
copy(local_ldap_microk8s_folder, local_ldap_minikube_folder)
copy(local_jcr_microk8s_folder, local_jcr_minikube_folder)
output_yamls_folder = Path("gluu_minikube_yamls")
ldap_kustomize_yaml_directory = local_ldap_minikube_folder
jcr_kustomize_yaml_directory = local_jcr_minikube_folder
elif self.settings.get("DEPLOYMENT_ARCH") == "eks":
output_yamls_folder = Path("gluu_eks_yamls")
if self.settings.get("APP_VOLUME_TYPE") == 7:
self.analyze_storage_class(dynamic_ldap_eks_folder.joinpath("storageclasses.yaml"))
self.analyze_storage_class(dynamic_jcr_eks_folder.joinpath("storageclasses.yaml"))
ldap_kustomize_yaml_directory = dynamic_ldap_eks_folder
jcr_kustomize_yaml_directory = dynamic_jcr_eks_folder
elif self.settings.get("APP_VOLUME_TYPE") == 8:
ldap_kustomize_yaml_directory = static_ldap_eks_folder
jcr_kustomize_yaml_directory = static_jcr_eks_folder
else:
ldap_kustomize_yaml_directory = local_ldap_eks_folder
jcr_kustomize_yaml_directory = local_jcr_eks_folder
elif self.settings.get("DEPLOYMENT_ARCH") == "gke":
output_yamls_folder = Path("gluu_gke_yamls")
if self.settings.get("APP_VOLUME_TYPE") == 12:
try:
shutil.rmtree(dynamic_ldap_gke_folder)
except FileNotFoundError:
logger.info("Directory not found. Copying...")
try:
shutil.rmtree(dynamic_jcr_gke_folder)
except FileNotFoundError:
logger.info("Directory not found. Copying...")
copy(dynamic_ldap_eks_folder, dynamic_ldap_gke_folder)
copy(dynamic_jcr_eks_folder, dynamic_jcr_gke_folder)
self.analyze_storage_class(dynamic_ldap_eks_folder.joinpath("storageclasses.yaml"))
self.analyze_storage_class(dynamic_jcr_eks_folder.joinpath("storageclasses.yaml"))
ldap_kustomize_yaml_directory = dynamic_ldap_eks_folder
jcr_kustomize_yaml_directory = dynamic_jcr_eks_folder
elif self.settings.get("APP_VOLUME_TYPE") == 13:
ldap_kustomize_yaml_directory = static_ldap_gke_folder
jcr_kustomize_yaml_directory = static_jcr_gke_folder
else:
ldap_kustomize_yaml_directory = local_ldap_gke_folder
jcr_kustomize_yaml_directory = local_jcr_gke_folder
elif self.settings.get("DEPLOYMENT_ARCH") == "aks":
output_yamls_folder = Path("gluu_aks_yamls")
if self.settings.get("APP_VOLUME_TYPE") == 17:
copy(dynamic_ldap_eks_folder, dynamic_ldap_azure_folder)
copy(dynamic_jcr_eks_folder, dynamic_jcr_azure_folder)
self.analyze_storage_class(dynamic_ldap_azure_folder.joinpath("storageclasses.yaml"))
self.analyze_storage_class(dynamic_jcr_azure_folder.joinpath("storageclasses.yaml"))
ldap_kustomize_yaml_directory = dynamic_ldap_azure_folder
jcr_kustomize_yaml_directory = dynamic_ldap_azure_folder
elif self.settings.get("APP_VOLUME_TYPE") == 18:
ldap_kustomize_yaml_directory = static_ldap_azure_folder
jcr_kustomize_yaml_directory = static_jcr_azure_folder
else:
ldap_kustomize_yaml_directory = local_ldap_azure_folder
jcr_kustomize_yaml_directory = local_jcr_azure_folder
elif self.settings.get("DEPLOYMENT_ARCH") == "local":
output_yamls_folder = Path("gluu_local_yamls")
if self.settings.get("APP_VOLUME_TYPE") == 26:
ldap_kustomize_yaml_directory = hostpath_ldap_local_folder
jcr_kustomize_yaml_directory = hostpath_jcr_local_folder
elif self.settings.get("DEPLOYMENT_ARCH") == "do":
output_yamls_folder = Path("gluu_do_yamls")
if self.settings.get("APP_VOLUME_TYPE") == 22:
copy(dynamic_ldap_eks_folder, dynamic_ldap_do_folder)
copy(dynamic_jcr_eks_folder, dynamic_jcr_do_folder)
self.analyze_storage_class(dynamic_ldap_do_folder.joinpath("storageclasses.yaml"))
self.analyze_storage_class(dynamic_jcr_do_folder.joinpath("storageclasses.yaml"))
ldap_kustomize_yaml_directory = dynamic_ldap_do_folder
jcr_kustomize_yaml_directory = dynamic_jcr_do_folder
elif self.settings.get("APP_VOLUME_TYPE") == 23:
ldap_kustomize_yaml_directory = static_ldap_do_folder
jcr_kustomize_yaml_directory = static_jcr_do_folder
if not output_yamls_folder.exists():
os.mkdir(output_yamls_folder)
return output_yamls_folder, ldap_kustomize_yaml_directory, jcr_kustomize_yaml_directory
def adjust_fqdn_yaml_entries(self):
if self.settings.get("IS_GLUU_FQDN_REGISTERED") == "Y" or \
self.settings.get("DEPLOYMENT_ARCH") == "microk8s" or \
self.settings.get("DEPLOYMENT_ARCH") == "minikube" or \
self.settings.get("DEPLOYMENT_ARCH") == "gke" or \
self.settings.get("DEPLOYMENT_ARCH") == "aks" or \
self.settings.get("DEPLOYMENT_ARCH") == "do":
for k, v in self.adjust_yamls_for_fqdn_status.items():
parser = Parser(k, v)
volume_mount_list = parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"]
volume_list = parser["spec"]["template"]["spec"]["volumes"]
if k != self.cr_rotate_yaml and k != self.gluu_upgrade_yaml:
if self.settings.get("DEPLOYMENT_ARCH") == "microk8s" or \
self.settings.get("DEPLOYMENT_ARCH") == "minikube" or \
self.settings.get("DEPLOYMENT_ARCH") == "gke" or \
self.settings.get("DEPLOYMENT_ARCH") == "aks" or \
self.settings.get("DEPLOYMENT_ARCH") == "do":
parser["spec"]["template"]["spec"]["hostAliases"][0]["hostnames"] = \
[self.settings.get("GLUU_FQDN")]
parser["spec"]["template"]["spec"]["hostAliases"][0]["ip"] = self.settings.get("HOST_EXT_IP")
else:
try:
del parser["spec"]["template"]["spec"]["hostAliases"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
try:
del parser["spec"]["template"]["spec"]["containers"][0]["command"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
update_lb_ip_vm_index = next(
(index for (index, d) in enumerate(volume_mount_list) if d["name"] == "update-lb-ip"), None)
if update_lb_ip_vm_index is not None:
del volume_mount_list[update_lb_ip_vm_index]
volume_list = parser["spec"]["template"]["spec"]["volumes"]
update_lb_ip_v_index = next(
(index for (index, d) in enumerate(volume_list) if d["name"] == "update-lb-ip"), None)
if update_lb_ip_v_index is not None:
del volume_list[update_lb_ip_v_index]
if self.settings.get("PERSISTENCE_BACKEND") == "ldap":
couchbase_password_v_index = next(
(index for (index, d) in enumerate(volume_list) if d["name"] == "cb-pass"), None)
if couchbase_password_v_index is not None:
del volume_list[couchbase_password_v_index]
couchbase_crt_v_index = next(
(index for (index, d) in enumerate(volume_list) if d["name"] == "cb-crt"), None)
if couchbase_crt_v_index is not None:
del volume_list[couchbase_crt_v_index]
couchbase_password_vm_index = next(
(index for (index, d) in enumerate(volume_mount_list) if d["name"] == "cb-pass"), None)
if couchbase_password_vm_index is not None:
del volume_mount_list[couchbase_password_vm_index]
couchbase_crt_vm_index = next(
(index for (index, d) in enumerate(volume_mount_list) if d["name"] == "cb-crt"), None)
if couchbase_crt_vm_index is not None:
del volume_mount_list[couchbase_crt_vm_index]
parser.dump_it()
else:
for k, v in self.adjust_yamls_for_fqdn_status.items():
parser = Parser(k, v)
# Check Couchbase entries
if self.settings.get("PERSISTENCE_BACKEND") == "ldap":
volume_mount_list = parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"]
volume_list = parser["spec"]["template"]["spec"]["volumes"]
couchbase_password_v_index = next(
(index for (index, d) in enumerate(volume_list) if d["name"] == "cb-pass"), None)
if couchbase_password_v_index is not None:
del volume_list[couchbase_password_v_index]
couchbase_crt_v_index = next(
(index for (index, d) in enumerate(volume_list) if d["name"] == "cb-crt"), None)
if couchbase_crt_v_index is not None:
del volume_list[couchbase_crt_v_index]
couchbase_password_vm_index = next(
(index for (index, d) in enumerate(volume_mount_list) if d["name"] == "cb-pass"), None)
if couchbase_password_vm_index is not None:
del volume_mount_list[couchbase_password_vm_index]
couchbase_crt_vm_index = next(
(index for (index, d) in enumerate(volume_mount_list) if d["name"] == "cb-crt"), None)
if couchbase_crt_vm_index is not None:
del volume_mount_list[couchbase_crt_vm_index]
if k != self.cr_rotate_yaml and k != self.gluu_upgrade_yaml:
parser["spec"]["template"]["spec"]["containers"][0]["command"] = \
['/bin/sh', '-c', '/usr/bin/python3 /scripts/update-lb-ip.py & \n/app/scripts/entrypoint.sh\n']
volume_mount_list = parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"]
if {"mountPath": "/scripts", "name": "update-lb-ip"} not in volume_mount_list:
parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(
{"mountPath": "/scripts", "name": "update-lb-ip"})
parser["spec"]["template"]["spec"]["hostAliases"][0]["hostnames"] = [self.settings.get("GLUU_FQDN")]
parser["spec"]["template"]["spec"]["hostAliases"][0]["ip"] = self.settings.get("HOST_EXT_IP")
parser.dump_it()
def setup_config_kustomization(self):
config_kustmoization_yaml = Path("./config/base/kustomization.yaml")
parser = Parser(config_kustmoization_yaml, "Kustomization")
list_of_config_resource_files = parser["resources"]
if self.settings.get("DEPLOYMENT_ARCH") == "gke":
if "cluster-role-bindings.yaml" not in list_of_config_resource_files:
list_of_config_resource_files.append("cluster-role-bindings.yaml")
else:
if "cluster-role-bindings.yaml" in list_of_config_resource_files:
list_of_config_resource_files.remove("cluster-role-bindings.yaml")
if self.settings.get("USE_ISTIO") == "Y":
if "service.yaml" not in list_of_config_resource_files:
list_of_config_resource_files.append("service.yaml")
jobs_parser = Parser("./config/base/jobs.yaml", "Job")
jobs_parser["spec"]["template"]["spec"]["containers"][0]["command"] = \
["tini", "-g", "--", "/bin/sh", "-c", "\n/app/scripts/entrypoint.sh load\n"
"curl -X POST http://localhost:15020/quitquitquit"]
jobs_parser.dump_it()
parser["resources"] = list_of_config_resource_files
# if gluu crt and key were provided by user
custom_gluu_crt = Path("./gluu.crt")
custom_gluu_key = Path("./gluu.key")
if custom_gluu_crt.exists() and custom_gluu_key.exists():
cert = open(custom_gluu_crt).read()
key = open(custom_gluu_key).read()
if not check_cert_with_private_key(cert, key):
logger.error("Custom crt and key were provided but were incorrect")
raise SystemExit(1)
shutil.copy(custom_gluu_crt, Path("./config/base"))
shutil.copy(custom_gluu_key, Path("./config/base"))
parser.update({"secretGenerator": [{"name": "gluu-cert-key-override", "files": ["gluu.crt", "gluu.key"]}]})
jobs_parser = Parser("./config/base/jobs.yaml", "Job")
# Add volume mount
jobs_parser["spec"]["template"]["spec"]["volumes"].append({"name": "gluu-cert-override", "secret": {
"secretName": "gluu-cert-key-override", "items": [{"key": "gluu.crt", "path": "gluu_https.crt"}]}})
jobs_parser["spec"]["template"]["spec"]["volumes"].append({"name": "gluu-key-override", "secret": {
"secretName": "gluu-cert-key-override", "items": [{"key": "gluu.key", "path": "gluu_https.key"}]}})
# Add volumeMounts
jobs_parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(
{"name": "gluu-cert-override", "mountPath": "/etc/certs/gluu_https.crt", "subPath": "gluu_https.crt"})
jobs_parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(
{"name": "gluu-key-override", "mountPath": "/etc/certs/gluu_https.key", "subPath": "gluu_https.key"})
jobs_parser.dump_it()
parser.dump_it()
def setup_jackrabbit_volumes(self, app_file, type):
parser = Parser(app_file, type)
volume_mount_list = parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"]
if {"mountPath": "/etc/gluu/conf/jackrabbit_admin_password", "name": "gluu-jackrabbit-admin-pass"} \
not in volume_mount_list:
logger.info("Adding jackrabbbit admin pass secret volume and volume mount to {}.".format(app_file))
parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(
{"mountPath": "/etc/gluu/conf/jackrabbit_admin_password",
"name": "gluu-jackrabbit-admin-pass", "subPath": "jackrabbit_admin_password"})
parser["spec"]["template"]["spec"]["volumes"].append({"name": "gluu-jackrabbit-admin-pass",
"secret": {
"secretName": "gluu-jackrabbit-admin-pass"}})
parser.dump_it()
def adjust_istio_virtual_services_destination_rules(self, app, virtual_service):
app_internal_addresss = app + "." + self.settings.get("GLUU_NAMESPACE") + "." + "svc.cluster.local"
destination_rule_name = "gluu-" + app + "-mtls"
if self.settings.get("USE_ISTIO_INGRESS") == "Y":
# Adjust virtual services
virtual_service_path = Path("./gluu-istio/base/gluu-virtual-services.yaml")
virtual_service_parser = Parser(virtual_service_path, "VirtualService", virtual_service)
virtual_service_parser["spec"]["hosts"] = [self.settings.get("GLUU_FQDN")]
http_entries = virtual_service_parser["spec"]["http"]
for i, http in enumerate(http_entries):
virtual_service_parser["spec"]["http"][i]["route"][0]["destination"]["host"] = app_internal_addresss
virtual_service_parser.dump_it()
# Adjust destination rules
destination_rule_path = Path("./gluu-istio/base/gluu-destination-rules.yaml")
destination_rule_parser = Parser(destination_rule_path, "DestinationRule", destination_rule_name)
destination_rule_parser["spec"]["host"] = app_internal_addresss
destination_rule_parser.dump_it()
def parse_configmap(self, app_file):
if "config" in app_file:
configmap_parser = Parser(app_file, "ConfigMap", "gluu-config-cm")
else:
configmap_parser = Parser(app_file, "ConfigMap")
if self.settings.get("IS_GLUU_FQDN_REGISTERED") == "Y" or \
self.settings.get("DEPLOYMENT_ARCH") in ("microk8s", "minikube", "gke", "aks", "do"):
try:
del configmap_parser["data"]["LB_ADDR"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
configmap_parser["data"]["GLUU_CACHE_TYPE"] = self.settings.get("GLUU_CACHE_TYPE")
configmap_parser["data"]["GLUU_CONFIG_KUBERNETES_NAMESPACE"] = self.settings.get("GLUU_NAMESPACE")
configmap_parser["data"]["GLUU_SECRET_KUBERNETES_NAMESPACE"] = self.settings.get("GLUU_NAMESPACE")
configmap_parser["data"]["GLUU_PERSISTENCE_LDAP_MAPPING"] = self.settings.get("HYBRID_LDAP_HELD_DATA")
configmap_parser["data"]["GLUU_PERSISTENCE_TYPE"] = self.settings.get("PERSISTENCE_BACKEND")
configmap_parser["data"]["DOMAIN"] = self.settings.get("GLUU_FQDN")
configmap_parser["data"]["GLUU_COUCHBASE_URL"] = self.settings.get("COUCHBASE_URL")
configmap_parser["data"]["GLUU_COUCHBASE_USER"] = self.settings.get("COUCHBASE_USER")
configmap_parser["data"]["GLUU_COUCHBASE_BUCKET_PREFIX"] = self.settings.get("COUCHBASE_BUCKET_PREFIX")
configmap_parser["data"]["GLUU_COUCHBASE_INDEX_NUM_REPLICA"] = self.settings.get("COUCHBASE_INDEX_NUM_REPLICA")
configmap_parser["data"]["GLUU_COUCHBASE_SUPERUSER"] = self.settings.get("COUCHBASE_SUPERUSER")
configmap_parser["data"]["GLUU_JACKRABBIT_URL"] = self.settings.get("JACKRABBIT_URL")
# Persistence keys
if self.settings.get("GLUU_CACHE_TYPE") == "REDIS":
configmap_parser["data"]["GLUU_REDIS_URL"] = self.settings.get("REDIS_URL")
configmap_parser["data"]["GLUU_REDIS_TYPE"] = self.settings.get("REDIS_TYPE")
configmap_parser["data"]["GLUU_REDIS_USE_SSL"] = self.settings.get("REDIS_USE_SSL")
configmap_parser["data"]["GLUU_REDIS_SSL_TRUSTSTORE"] = self.settings.get("REDIS_SSL_TRUSTSTORE")
configmap_parser["data"]["GLUU_REDIS_SENTINEL_GROUP"] = self.settings.get("REDIS_SENTINEL_GROUP")
configmap_parser["data"]["GLUU_CASA_ENABLED"] = self.settings.get("ENABLE_CASA_BOOLEAN")
configmap_parser["data"]["GLUU_OXTRUST_API_ENABLED"] = self.settings.get("ENABLE_OXTRUST_API_BOOLEAN")
configmap_parser["data"]["GLUU_OXTRUST_API_TEST_MODE"] = self.settings.get("ENABLE_OXTRUST_TEST_MODE_BOOLEAN")
configmap_parser["data"]["GLUU_PASSPORT_ENABLED"] = self.settings.get("ENABLE_OXPASSPORT_BOOLEAN")
configmap_parser["data"]["GLUU_SAML_ENABLED"] = self.settings.get("ENABLE_SAML_BOOLEAN")
configmap_parser["data"]["GLUU_JACKRABBIT_ADMIN_ID"] = self.settings.get("JACKRABBIT_ADMIN_ID")
if self.settings.get("JACKRABBIT_CLUSTER") == "Y":
configmap_parser["data"]["GLUU_JACKRABBIT_CLUSTER"] = "true"
configmap_parser["data"]["GLUU_JACKRABBIT_POSTGRES_USER"] = self.settings.get("JACKRABBIT_PG_USER")
configmap_parser["data"]["GLUU_JACKRABBIT_POSTGRES_PASSWORD_FILE"] = "/etc/gluu/conf/postgres_password"
configmap_parser["data"]["GLUU_JACKRABBIT_POSTGRES_HOST"] = self.settings.get("POSTGRES_URL")
configmap_parser["data"]["GLUU_JACKRABBIT_POSTGRES_PORT"] = "5432"
configmap_parser["data"]["GLUU_JACKRABBIT_POSTGRES_DATABASE"] = self.settings.get("JACKRABBIT_DATABASE")
# oxAuth
if self.settings.get("ENABLE_CASA_BOOLEAN") == "true":
configmap_parser["data"]["GLUU_SYNC_CASA_MANIFESTS"] = "true"
# oxTrust
if self.settings.get("ENABLE_OXSHIBBOLETH") == "Y":
configmap_parser["data"]["GLUU_SYNC_SHIB_MANIFESTS"] = "true"
# oxdserver
if self.settings.get("ENABLE_OXD") == "Y":
configmap_parser["data"]["GLUU_OXD_APPLICATION_CERT_CN"] = self.settings.get("OXD_APPLICATION_KEYSTORE_CN")
configmap_parser["data"]["GLUU_OXD_ADMIN_CERT_CN"] = self.settings.get("OXD_ADMIN_KEYSTORE_CN")
# casa
configmap_parser["data"]["GLUU_OXD_SERVER_URL"] = self.settings.get("OXD_APPLICATION_KEYSTORE_CN") + ":8443"
configmap_parser.dump_it()
def kustomize_it(self):
logger.info("Building manifests...")
self.setup_config_kustomization()
for app in self.all_apps:
app_filename = app + ".yaml"
kustomization_file = "./{}/base/kustomization.yaml".format(app)
app_file = str(self.output_yaml_directory.joinpath(app_filename).resolve())
command = self.kubectl + " kustomize ./{}/base".format(app)
if app == "config":
self.build_manifest(app, kustomization_file, command,
"CONFIG_IMAGE_NAME", "CONFIG_IMAGE_TAG", app_file)
self.parse_configmap(app_file)
if app == "ldap":
if self.settings.get("PERSISTENCE_BACKEND") in ("hybrid", "ldap"):
command = self.kubectl + " kustomize " "\"" + str(
self.ldap_kustomize_yaml_directory.resolve()) + "\""
self.build_manifest(app, kustomization_file, command,
"LDAP_IMAGE_NAME", "LDAP_IMAGE_TAG", app_file)
self.adjust_ldap_jackrabbit(app_file)
self.remove_resources(app_file, "StatefulSet")
if app == "jackrabbit" and self.settings.get("INSTALL_JACKRABBIT") == "Y":
command = self.kubectl + " kustomize " "\"" + str(
self.jcr_kustomize_yaml_directory.resolve()) + "\""
self.build_manifest(app, kustomization_file, command,
"JACKRABBIT_IMAGE_NAME", "JACKRABBIT_IMAGE_TAG", app_file)
self.adjust_ldap_jackrabbit(app_file)
self.remove_resources(app_file, "StatefulSet")
self.setup_jackrabbit_volumes(app_file, "StatefulSet")
if app == "persistence":
parser = Parser(kustomization_file, "Kustomization")
list_of_config_resource_files = parser["resources"]
if self.settings.get("USE_ISTIO") == "Y":
if "service.yaml" not in list_of_config_resource_files:
list_of_config_resource_files.append("service.yaml")
jobs_parser = Parser("./persistence/base/jobs.yaml", "Job")
jobs_parser["spec"]["template"]["spec"]["containers"][0]["command"] = \
["tini", "-g", "--", "/bin/sh", "-c", "\n/app/scripts/entrypoint.sh\n"
"curl -X POST http://localhost:15020/quitquitquit"]
jobs_parser.dump_it()
parser.dump_it()
self.build_manifest(app, kustomization_file, command,
"PERSISTENCE_IMAGE_NAME", "PERSISTENCE_IMAGE_TAG", app_file)
if self.settings.get("PERSISTENCE_BACKEND") == "ldap":
persistence_job_parser = Parser(app_file, "Job")
del persistence_job_parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"]
del persistence_job_parser["spec"]["template"]["spec"]["volumes"]
persistence_job_parser.dump_it()
if app == "oxauth":
self.adjust_istio_virtual_services_destination_rules(app, "gluu-istio-oxauth")
self.build_manifest(app, kustomization_file, command,
"OXAUTH_IMAGE_NAME", "OXAUTH_IMAGE_TAG", app_file)
self.remove_resources(app_file, "Deployment")
self.setup_jackrabbit_volumes(app_file, "Deployment")
self.adjust_yamls_for_fqdn_status[app_file] = "Deployment"
if app == "fido2" and self.settings.get("ENABLE_FIDO2") == "Y":
self.adjust_istio_virtual_services_destination_rules(app, "gluu-istio-fido2-configuration")
self.build_manifest(app, kustomization_file, command,
"FIDO2_IMAGE_NAME", "FIDO2_IMAGE_TAG", app_file)
self.remove_resources(app_file, "Deployment")
self.adjust_yamls_for_fqdn_status[app_file] = "Deployment"
if app == "scim" and self.settings.get("ENABLE_SCIM") == "Y":
self.adjust_istio_virtual_services_destination_rules(app, "gluu-istio-scim-config")
self.build_manifest(app, kustomization_file, command,
"SCIM_IMAGE_NAME", "SCIM_IMAGE_TAG", app_file)
self.remove_resources(app_file, "Deployment")
self.adjust_yamls_for_fqdn_status[app_file] = "Deployment"
if app == "oxtrust":
self.adjust_istio_virtual_services_destination_rules(app, "gluu-istio-base")
self.build_manifest(app, kustomization_file, command,
"OXTRUST_IMAGE_NAME", "OXTRUST_IMAGE_TAG", app_file)
self.remove_resources(app_file, "StatefulSet")
self.setup_jackrabbit_volumes(app_file, "StatefulSet")
self.adjust_yamls_for_fqdn_status[app_file] = "StatefulSet"
if app == "oxshibboleth" and self.settings.get("ENABLE_OXSHIBBOLETH") == "Y":
self.adjust_istio_virtual_services_destination_rules(app, "gluu-istio-oxshibbioleth")
self.build_manifest(app, kustomization_file, command,
"OXSHIBBOLETH_IMAGE_NAME", "OXSHIBBOLETH_IMAGE_TAG", app_file)
self.remove_resources(app_file, "StatefulSet")
self.setup_jackrabbit_volumes(app_file, "StatefulSet")
self.adjust_yamls_for_fqdn_status[app_file] = "StatefulSet"
if app == "oxpassport" and self.settings.get("ENABLE_OXPASSPORT") == "Y":
self.adjust_istio_virtual_services_destination_rules(app, "gluu-istio-passport")
self.build_manifest(app, kustomization_file, command,
"OXPASSPORT_IMAGE_NAME", "OXPASSPORT_IMAGE_TAG", app_file)
self.remove_resources(app_file, "Deployment")
self.adjust_yamls_for_fqdn_status[app_file] = "Deployment"
if app == "oxauth-key-rotation" and self.settings.get("ENABLE_OXAUTH_KEY_ROTATE") == "Y":
parser = Parser(kustomization_file, "Kustomization")
list_of_config_resource_files = parser["resources"]
cron_job_parser = Parser("./oxauth-key-rotation/base/cronjobs.yaml", "CronJob")
cron_job_parser["spec"]["schedule"] = "0 */{} * * *".format(self.settings.get("OXAUTH_KEYS_LIFE"))
cron_job_parser["spec"]["jobTemplate"]["spec"]["template"]["spec"]["containers"][0]["args"] = \
["patch", "oxauth", "--opts", "interval:{}".format(self.settings.get("OXAUTH_KEYS_LIFE"))]
if self.settings.get("PERSISTENCE_BACKEND") in ("hybrid", "couchbase"):
cron_job_parser["spec"]["jobTemplate"]["spec"]["template"]["spec"]["volumes"] = \
[{"name": "cb-pass", "secret": {"secretName": "cb-pass"}},
{"name": "cb-crt", "secret": {"secretName": "cb-crt"}}]
cron_job_parser["spec"]["jobTemplate"]["spec"]["template"]["spec"]["containers"][0]["volumeMounts"] \
= [{"name": "cb-pass", "mountPath":
"/etc/gluu/conf/couchbase_password",
"subPath": "couchbase_password"},
{"name": "cb-crt", "mountPath":
"/etc/certs/couchbase.crt",
"subPath": "couchbase.crt"}]
if self.settings.get("USE_ISTIO") == "Y":
if "service.yaml" not in list_of_config_resource_files:
list_of_config_resource_files.append("service.yaml")
cron_job_parser["spec"]["jobTemplate"]["spec"]["template"]["spec"]["containers"][0]["command"] = \
["tini", "-g", "--", "/bin/sh", "-c", "\n/app/scripts/entrypoint.sh patch oxauth --opts "
"interval:{}\ncurl -X POST "
"http://localhost:15020/quitquitquit"
.format(self.settings.get("OXAUTH_KEYS_LIFE"))]
try:
del cron_job_parser["spec"]["jobTemplate"]["spec"]["template"]["spec"]["containers"][0]["args"]
except KeyError:
logger.warning("Key arg not found")
cron_job_parser.dump_it()
parser.dump_it()
self.build_manifest(app, kustomization_file, command,
"CERT_MANAGER_IMAGE_NAME", "CERT_MANAGER_IMAGE_TAG", app_file)
self.remove_resources(app_file, "CronJob")
if app == "cr-rotate" and self.settings.get("ENABLE_CACHE_REFRESH") == "Y":
logger.info("Building {} manifests".format(app))
self.update_kustomization_yaml(kustomization_yaml=kustomization_file,
namespace=self.settings.get("GLUU_NAMESPACE"),
image_name_key="CACHE_REFRESH_ROTATE_IMAGE_NAME",
image_tag_key="CACHE_REFRESH_ROTATE_IMAGE_TAG")
exec_cmd(command, output_file=app_file)
self.remove_resources(app_file, "DaemonSet")
self.adjust_yamls_for_fqdn_status[app_file] = "DaemonSet"
if app == "oxd-server" and self.settings.get("ENABLE_OXD") == "Y":
logger.info("Building {} manifests".format(app))
self.update_kustomization_yaml(kustomization_yaml=kustomization_file,
namespace=self.settings.get("GLUU_NAMESPACE"),
image_name_key="OXD_IMAGE_NAME",
image_tag_key="OXD_IMAGE_TAG")
exec_cmd(command, output_file=app_file)
self.remove_resources(app_file, "Deployment")
oxd_server_service_parser = Parser(app_file, "Service")
oxd_server_service_parser["metadata"]["name"] = self.settings.get("OXD_APPLICATION_KEYSTORE_CN")
oxd_server_service_parser.dump_it()
self.adjust_yamls_for_fqdn_status[app_file] = "Deployment"
if app == "casa" and self.settings.get("ENABLE_CASA") == "Y":
self.adjust_istio_virtual_services_destination_rules(app, "gluu-istio-casa")
logger.info("Building {} manifests".format(app))
self.update_kustomization_yaml(kustomization_yaml=kustomization_file,
namespace=self.settings.get("GLUU_NAMESPACE"),
image_name_key="CASA_IMAGE_NAME",
image_tag_key="CASA_IMAGE_TAG")
exec_cmd(command, output_file=app_file)
self.remove_resources(app_file, "Deployment")
self.setup_jackrabbit_volumes(app_file, "Deployment")
self.adjust_yamls_for_fqdn_status[app_file] = "Deployment"
if app == "update-lb-ip" and self.settings.get("IS_GLUU_FQDN_REGISTERED") == "N":
if self.settings.get("DEPLOYMENT_ARCH") in ("eks", "local"):
logger.info("Building {} manifests".format(app))
parser = Parser(kustomization_file, "Kustomization")
parser["namespace"] = self.settings.get("GLUU_NAMESPACE")
parser.dump_it()
exec_cmd(command, output_file=app_file)
if self.settings.get("USE_ISTIO_INGRESS") == "Y" and app == "gluu-istio-ingress":
command = self.kubectl + " kustomize ./gluu-istio/base"
exec_cmd(command, output_file=app_file)
def build_manifest(self, app, kustomization_file, command, image_name_key, image_tag_key, app_file):
logger.info("Building {} manifests".format(app))
self.update_kustomization_yaml(kustomization_yaml=kustomization_file,
namespace=self.settings.get("GLUU_NAMESPACE"),
image_name_key=image_name_key,
image_tag_key=image_tag_key)
exec_cmd(command, output_file=app_file)
def kustomize_gluu_upgrade(self):
self.update_kustomization_yaml(kustomization_yaml="upgrade/base/kustomization.yaml",
namespace=self.settings.get("GLUU_NAMESPACE"),
image_name_key="UPGRADE_IMAGE_NAME",
image_tag_key="UPGRADE_IMAGE_TAG")
command = self.kubectl + " kustomize upgrade/base"
exec_cmd(command, output_file=self.gluu_upgrade_yaml)
upgrade_cm_parser = Parser(self.gluu_upgrade_yaml, "ConfigMap")
upgrade_cm_parser["data"]["DOMAIN"] = self.settings.get("GLUU_FQDN")
upgrade_cm_parser["data"]["GLUU_CACHE_TYPE"] = self.settings.get("GLUU_CACHE_TYPE")
upgrade_cm_parser["data"]["GLUU_COUCHBASE_URL"] = self.settings.get("COUCHBASE_URL")
upgrade_cm_parser["data"]["GLUU_COUCHBASE_USER"] = self.settings.get("COUCHBASE_USER")
upgrade_cm_parser["data"]["GLUU_COUCHBASE_SUPERUSER"] = self.settings.get("COUCHBASE_SUPERUSER")
upgrade_cm_parser["data"]["GLUU_PERSISTENCE_LDAP_MAPPING"] = self.settings.get("HYBRID_LDAP_HELD_DATA")
upgrade_cm_parser["data"]["GLUU_PERSISTENCE_TYPE"] = self.settings.get("PERSISTENCE_BACKEND")
upgrade_cm_parser["data"]["GLUU_CONFIG_KUBERNETES_NAMESPACE"] = self.settings.get("GLUU_NAMESPACE")
upgrade_cm_parser["data"]["GLUU_SECRET_KUBERNETES_NAMESPACE"] = self.settings.get("GLUU_NAMESPACE")
upgrade_cm_parser["data"]["GLUU_COUCHBASE_BUCKET_PREFIX"] = self.settings.get("COUCHBASE_BUCKET_PREFIX")
upgrade_cm_parser.dump_it()
upgrade_job_parser = Parser(self.gluu_upgrade_yaml, "Job")
upgrade_job_parser["spec"]["template"]["spec"]["containers"][0]["args"] = \
["--source", self.settings.get("GLUU_VERSION"),
"--target", self.settings.get("GLUU_UPGRADE_TARGET_VERSION")]
upgrade_job_parser.dump_it()
self.adjust_yamls_for_fqdn_status[self.gluu_upgrade_yaml] = "Job"
def prepare_alb(self):
services = [self.oxauth_yaml, self.oxtrust_yaml, self.casa_yaml,
self.oxpassport_yaml, self.oxshibboleth_yaml, self.fido2_yaml, self.scim_yaml]
for service in services:
if Path(service).is_file():
service_parser = Parser(service, "Service")
service_parser["spec"].update({"type": "NodePort"})
service_parser["spec"]["ports"][0].update({"protocol": "TCP"})
service_parser["spec"]["ports"][0].update({"targetPort": 8080})
if service == self.oxpassport_yaml:
service_parser["spec"]["ports"][0]["targetPort"] = 8090
service_parser.dump_it()
ingress_parser = Parser("./alb/ingress.yaml", "Ingress")
ingress_parser["spec"]["rules"][0]["host"] = self.settings.get("GLUU_FQDN")
ingress_parser["metadata"]["annotations"]["alb.ingress.kubernetes.io/certificate-arn"] = \
self.settings.get("ARN_AWS_IAM")
if not self.settings.get("ARN_AWS_IAM"):
del ingress_parser["metadata"]["annotations"]["alb.ingress.kubernetes.io/certificate-arn"]
for path in ingress_parser["spec"]["rules"][0]["http"]["paths"]:
service_name = path["backend"]["serviceName"]
if self.settings.get("ENABLE_CASA") != "Y" and service_name == "casa":
path_index = ingress_parser["spec"]["rules"][0]["http"]["paths"].index(path)
del ingress_parser["spec"]["rules"][0]["http"]["paths"][path_index]
if self.settings.get("ENABLE_OXSHIBBOLETH") != "Y" and service_name == "oxshibboleth":
path_index = ingress_parser["spec"]["rules"][0]["http"]["paths"].index(path)
del ingress_parser["spec"]["rules"][0]["http"]["paths"][path_index]
if self.settings.get("ENABLE_OXPASSPORT") != "Y" and service_name == "oxpassport":
path_index = ingress_parser["spec"]["rules"][0]["http"]["paths"].index(path)
del ingress_parser["spec"]["rules"][0]["http"]["paths"][path_index]
ingress_parser.dump_it()
def update_kustomization_yaml(self, kustomization_yaml, namespace, image_name_key, image_tag_key):
parser = Parser(kustomization_yaml, "Kustomization")
parser["namespace"] = namespace
parser["images"][0]["name"] = self.settings.get(image_name_key)
parser["images"][0]["newTag"] = self.settings.get(image_tag_key)
parser.dump_it()
def setup_tls(self, namespace):
starting_time = time.time()
while True:
try:
ssl_cert = self.kubernetes.read_namespaced_secret("gluu",
self.settings.get("GLUU_NAMESPACE")).data["ssl_cert"]
ssl_key = self.kubernetes.read_namespaced_secret("gluu",
self.settings.get("GLUU_NAMESPACE")).data["ssl_key"]
break
except (KeyError, Exception):
logger.info("Waiting for Gluu secret...")
time.sleep(10)
end_time = time.time()
running_time = end_time - starting_time
if running_time > 600:
logger.error("Could not read Gluu secret. Please check config job pod logs.")
if namespace != self.settings.get("GLUU_NAMESPACE"):
raise SystemExit(1)
self.kubernetes.patch_or_create_namespaced_secret(name="tls-certificate",
namespace=namespace,
literal="tls.crt",
value_of_literal=ssl_cert,
secret_type="kubernetes.io/tls",
second_literal="tls.key",
value_of_second_literal=ssl_key)
def deploy_alb(self):
shutil.copy(Path("./alb/ingress.yaml"), self.output_yaml_directory.joinpath("ingress.yaml"))
self.kubernetes.create_objects_from_dict(self.output_yaml_directory.joinpath("ingress.yaml"),
self.settings.get("GLUU_NAMESPACE"))
if self.settings.get("IS_GLUU_FQDN_REGISTERED") != "Y":
prompt = input("Please input the DNS of the Application load balancer created found on AWS UI: ")
lb_hostname = prompt
while True:
try:
if lb_hostname:
break
lb_hostname = self.kubernetes.read_namespaced_ingress(
name="gluu", namespace="gluu").status.load_balancer.ingress[0].hostname
except TypeError:
logger.info("Waiting for loadbalancer address..")
time.sleep(10)
self.settings.set("LB_ADD", lb_hostname)
def adjust_ldap_jackrabbit(self, app_file):
statefulset_parser = Parser(app_file, "StatefulSet")
statefulset_parser["spec"]["volumeClaimTemplates"][0]["spec"]["resources"]["requests"]["storage"] \
= self.settings.get("JACKRABBIT_STORAGE_SIZE")
if "ldap" in app_file:
statefulset_parser["spec"]["volumeClaimTemplates"][0]["spec"]["resources"]["requests"]["storage"] \
= self.settings.get("LDAP_STORAGE_SIZE")
statefulset_parser.dump_it()
if self.settings.get("APP_VOLUME_TYPE") not in (7, 12, 17, 22, 26):
pv_parser = Parser(app_file, "PersistentVolume")
pv_parser["spec"]["capacity"]["storage"] = self.settings.get("JACKRABBIT_STORAGE_SIZE")
if "ldap" in app_file:
pv_parser["spec"]["capacity"]["storage"] = self.settings.get("LDAP_STORAGE_SIZE")
if self.settings.get("APP_VOLUME_TYPE") == 11:
pv_parser["spec"]["hostPath"]["path"] = self.settings.get("GOOGLE_NODE_HOME_DIR") + "/opendj"
if "ldap" in app_file:
pv_parser["spec"]["hostPath"]["path"] = self.settings.get("GOOGLE_NODE_HOME_DIR") + "/jackrabbit"
pv_parser.dump_it()
def remove_resources(self, app_file, kind):
if self.settings.get("DEPLOYMENT_ARCH") in ("microk8s", "minikube") \
or self.settings.get("TEST_ENVIRONMENT") == "Y":
parser = Parser(app_file, kind)
try:
logger.info("Removing resources limits and requests from {}".format(app_file))
del parser["spec"]["template"]["spec"]["containers"][0]["resources"]
except KeyError:
logger.info("Key not deleted as it does not exist inside yaml.")
parser.dump_it()
def set_lb_address(self):
"""
Sets LB address in configMap
:return:
"""
if self.settings.get("DEPLOYMENT_ARCH") in ("eks", "local"):
cm_parser = Parser(self.config_yaml, "ConfigMap", "gluu-config-cm")
cm_parser["data"]["LB_ADDR"] = self.settings.get("LB_ADD")
cm_parser.dump_it()
def wait_for_nginx_add(self):
hostname_ip = None
while True:
try:
if hostname_ip:
break
if self.settings.get("DEPLOYMENT_ARCH") == "eks":
hostname_ip = self.kubernetes.read_namespaced_service(
name="ingress-nginx", namespace="ingress-nginx").status.load_balancer.ingress[0].hostname
self.settings.set("LB_ADD", hostname_ip)
if self.settings.get("AWS_LB_TYPE") == "nlb":
try:
ip_static = socket.gethostbyname(str(hostname_ip))
if ip_static:
break
except socket.gaierror:
logger.info("Address has not recieved an ip yet.")
elif self.settings.get("DEPLOYMENT_ARCH") == "local":
self.settings.set("LB_ADD", "ingress-nginx.ingress-nginx.svc.cluster.local")
break
else:
hostname_ip = self.kubernetes.read_namespaced_service(
name="ingress-nginx", namespace="ingress-nginx").status.load_balancer.ingress[0].ip
self.settings.set("HOST_EXT_IP", hostname_ip)
except (TypeError, AttributeError):
logger.info("Waiting for address..")
time.sleep(10)
def deploy_nginx(self):
copy(Path("./nginx"), self.output_yaml_directory.joinpath("nginx"))
self.kubernetes.create_objects_from_dict(self.output_yaml_directory.joinpath("nginx/mandatory.yaml"))
if self.settings.get("DEPLOYMENT_ARCH") == "eks":
if self.settings.get("AWS_LB_TYPE") == "nlb":
if self.settings.get("USE_ARN") == "Y":
svc_nlb_yaml = self.output_yaml_directory.joinpath("nginx/nlb-service.yaml")
svc_nlb_yaml_parser = Parser(svc_nlb_yaml, "Service")
svc_nlb_yaml_parser["metadata"]["annotations"].update(
{"service.beta.kubernetes.io/aws-load-balancer-ssl-cert": self.settings.get("ARN_AWS_IAM")})
svc_nlb_yaml_parser["metadata"]["annotations"].update(
{"service.beta.kubernetes.io/aws-load-balancer-cross-zone-load-balancing-enabled": '"true"'})
svc_nlb_yaml_parser["metadata"]["annotations"].update({
"service.beta.kubernetes.io/aws-load-balancer-ssl-negotiation-policy":
"ELBSecurityPolicy-TLS-1-1-2017-01"})
svc_nlb_yaml_parser["metadata"]["annotations"].update(
{"service.beta.kubernetes.io/aws-load-balancer-backend-protocol": "http"})
svc_nlb_yaml_parser["metadata"]["annotations"].update(
{"service.beta.kubernetes.io/aws-load-balancer-ssl-ports": "https"})
svc_nlb_yaml_parser.dump_it()
self.kubernetes.create_objects_from_dict(self.output_yaml_directory.joinpath("nginx/nlb-service.yaml"))
else:
if self.settings.get("USE_ARN") == "Y":
svc_l7_yaml = self.output_yaml_directory.joinpath("nginx/service-l7.yaml")
svc_l7_yaml_parser = Parser(svc_l7_yaml, "Service")
svc_l7_yaml_parser["metadata"]["annotations"][
"service.beta.kubernetes.io/aws-load-balancer-ssl-cert"] = self.settings.get("ARN_AWS_IAM")
svc_l7_yaml_parser.dump_it()
self.kubernetes.create_objects_from_dict(svc_l7_yaml)
self.kubernetes.delete_config_map_using_name("nginx-configuration", "ingress-nginx")
time.sleep(5)
self.kubernetes.create_objects_from_dict(self.output_yaml_directory.
joinpath("nginx/patch-configmap-l7.yaml"))
else:
self.kubernetes.delete_config_map_using_name("nginx-configuration", "ingress-nginx")
time.sleep(5)
self.kubernetes.create_objects_from_dict(self.output_yaml_directory.
joinpath("nginx/service-l4.yaml"))
self.kubernetes.create_objects_from_dict(self.output_yaml_directory.
joinpath("nginx/patch-configmap-l4.yaml"))
self.wait_for_nginx_add()
if self.settings.get("DEPLOYMENT_ARCH") in ("gke", "aks", "do", "local"):
self.kubernetes.create_objects_from_dict(self.output_yaml_directory.joinpath("nginx/cloud-generic.yaml"))
self.wait_for_nginx_add()
if self.settings.get("DEPLOYMENT_ARCH") in ("eks", "local"):
self.wait_for_nginx_add()
self.set_lb_address()
self.update_ingress_fqdn()
self.kubernetes.create_objects_from_dict(self.ingress_file, self.settings.get("GLUU_NAMESPACE"))
def update_ingress_fqdn(self):
ingress_name_list = ["gluu-ingress-base", "gluu-ingress-openid-configuration",
"gluu-ingress-uma2-configuration", "gluu-ingress-webfinger",
"gluu-ingress-simple-web-discovery", "gluu-ingress-scim-configuration",
"gluu-ingress-fido-u2f-configuration", "gluu-ingress", "gluu-ingress-stateful",
"gluu-casa", "gluu-ingress-fido2-configuration", "gluu-ingress-scim"]
for ingress_name in ingress_name_list:
parser = Parser(self.ingress_file, "Ingress", ingress_name)
parser["spec"]["tls"][0]["hosts"][0] = self.settings.get("GLUU_FQDN")
parser["spec"]["rules"][0]["host"] = self.settings.get("GLUU_FQDN")
parser.dump_it()
def deploy_config(self):
self.kubernetes.create_objects_from_dict(self.config_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=config-init-load",
self.timeout)
def deploy_ldap(self):
self.kubernetes.create_objects_from_dict(self.ldap_yaml)
logger.info("Deploying LDAP.Please wait..")
time.sleep(10)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=opendj", self.timeout)
def def_jackrabbit_secret(self):
if self.settings.get("JACKRABBIT_CLUSTER") == "Y":
encoded_jackrabbit_pg_pass_bytes = base64.b64encode(
self.settings.get("JACKRABBIT_PG_PASSWORD").encode("utf-8"))
encoded_jackrabbit_pg_pass_string = str(encoded_jackrabbit_pg_pass_bytes, "utf-8")
self.kubernetes.patch_or_create_namespaced_secret(name="gluu-jackrabbit-postgres-pass",
namespace=self.settings.get("GLUU_NAMESPACE"),
literal="postgres_password",
value_of_literal=encoded_jackrabbit_pg_pass_string)
jackrabbit_parser = Parser(self.jackrabbit_yaml, "StatefulSet")
jackrabbit_parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(
{"mountPath": "/etc/gluu/conf/postgres_password",
"name": "jackrabbit-postgres-pass", "subPath": "postgres_password"})
jackrabbit_parser["spec"]["template"]["spec"]["volumes"].append(
{"name": "jackrabbit-postgres-pass", "secret": {"secretName": "gluu-jackrabbit-postgres-pass"}})
jackrabbit_parser.dump_it()
encoded_jackrabbit_admin_pass_bytes = base64.b64encode(
self.settings.get("JACKRABBIT_ADMIN_PASSWORD").encode("utf-8"))
encoded_jackrabbit_admin_pass_string = str(encoded_jackrabbit_admin_pass_bytes, "utf-8")
self.kubernetes.patch_or_create_namespaced_secret(name="gluu-jackrabbit-admin-pass",
namespace=self.settings.get("GLUU_NAMESPACE"),
literal="jackrabbit_admin_password",
value_of_literal=encoded_jackrabbit_admin_pass_string)
def deploy_jackrabbit(self):
self.def_jackrabbit_secret()
self.kubernetes.create_objects_from_dict(self.jackrabbit_yaml)
logger.info("Deploying Jackrabbit content repository.Please wait..")
time.sleep(10)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=jackrabbit", self.timeout)
def deploy_persistence(self):
self.kubernetes.create_objects_from_dict(self.persistence_yaml)
logger.info("Trying to import ldifs...")
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=persistence-load",
self.timeout)
if self.settings.get("PERSISTENCE_BACKEND") in ("hybrid", "ldap"):
self.kubernetes.patch_namespaced_stateful_set_scale(name="opendj",
replicas=self.settings.get("LDAP_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=opendj", self.timeout)
def deploy_update_lb_ip(self):
self.kubernetes.create_objects_from_dict(self.update_lb_ip_yaml)
def deploy_oxauth(self):
self.kubernetes.create_objects_from_dict(self.oxauth_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=oxauth", self.timeout)
self.kubernetes.patch_namespaced_deployment_scale(name="oxauth", replicas=self.settings.get("OXAUTH_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_fido2(self):
self.kubernetes.create_objects_from_dict(self.fido2_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=fido2", self.timeout)
self.kubernetes.patch_namespaced_deployment_scale(name="fido2", replicas=self.settings.get("FIDO2_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_scim(self):
self.kubernetes.create_objects_from_dict(self.scim_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=scim", self.timeout)
self.kubernetes.patch_namespaced_deployment_scale(name="scim", replicas=self.settings.get("SCIM_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_oxd(self):
self.kubernetes.create_objects_from_dict(self.oxd_server_yaml)
self.kubernetes.create_objects_from_dict(Path("./oxd-server/base/networkpolicy.yaml"),
self.settings.get("GLUU_NAMESPACE"))
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=oxd-server", self.timeout)
self.kubernetes.patch_namespaced_deployment_scale(name="oxd-server",
replicas=self.settings.get("OXD_SERVER_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_casa(self):
self.kubernetes.create_objects_from_dict(self.casa_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=casa", self.timeout)
self.kubernetes.patch_namespaced_deployment_scale(name="casa", replicas=self.settings.get("CASA_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_oxtrust(self):
self.kubernetes.create_objects_from_dict(self.oxtrust_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=oxtrust", self.timeout)
self.kubernetes.patch_namespaced_stateful_set_scale(name="oxtrust",
replicas=self.settings.get("OXTRUST_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_oxshibboleth(self):
self.kubernetes.create_objects_from_dict(self.oxshibboleth_yaml)
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=oxshibboleth", self.timeout)
self.kubernetes.patch_namespaced_stateful_set_scale(name="oxshibboleth",
replicas=self.settings.get("OXSHIBBOLETH_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_oxpassport(self):
self.kubernetes.create_objects_from_dict(self.oxpassport_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=oxpassport", self.timeout)
self.kubernetes.patch_namespaced_deployment_scale(name="oxpassport",
replicas=self.settings.get("OXPASSPORT_REPLICAS"),
namespace=self.settings.get("GLUU_NAMESPACE"))
def deploy_oxauth_key_rotation(self):
self.kubernetes.create_objects_from_dict(self.oxauth_key_rotate_yaml)
def deploy_cr_rotate(self):
self.kubernetes.delete_role("gluu-role", self.settings.get("GLUU_NAMESPACE"))
self.kubernetes.delete_role_binding("gluu-rolebinding", self.settings.get("GLUU_NAMESPACE"))
self.kubernetes.delete_cluster_role_binding("gluu-rolebinding")
time.sleep(10)
self.kubernetes.create_objects_from_dict(self.cr_rotate_yaml)
def deploy_gluu_istio_ingress(self):
self.kubernetes.create_objects_from_dict(self.gluu_istio_ingress_yaml,
namespace=self.settings.get("GLUU_NAMESPACE"))
def copy_configs_before_restore(self):
self.gluu_secret = self.kubernetes.read_namespaced_secret("gluu", self.settings.get("GLUU_NAMESPACE")).data
self.gluu_config = self.kubernetes.read_namespaced_configmap("gluu", self.settings.get("GLUU_NAMESPACE")).data
self.gluu_install_envs = self.kubernetes.read_namespaced_configmap("gluu-config-cm",
self.settings.get("GLUU_NAMESPACE")).data
def save_a_copy_of_config(self):
self.kubernetes.patch_or_create_namespaced_secret(name="copy-of-secret-params-before-restore", literal=None,
value_of_literal=None,
namespace=self.settings.get("GLUU_NAMESPACE"),
data=self.gluu_secret)
self.kubernetes.patch_or_create_namespaced_configmap(name="copy-of-config-params-before-restore",
namespace=self.settings.get("GLUU_NAMESPACE"),
data=self.gluu_config)
self.kubernetes.patch_or_create_namespaced_configmap(name="copy-of-install-config-params-before-restore",
namespace=self.settings.get("GLUU_NAMESPACE"),
data=self.gluu_install_envs)
def mount_config(self):
self.kubernetes.patch_or_create_namespaced_secret(name="gluu", literal=None, value_of_literal=None,
namespace=self.settings.get("GLUU_NAMESPACE"),
data=self.gluu_secret)
self.kubernetes.patch_or_create_namespaced_configmap(name="gluu",
namespace=self.settings.get("GLUU_NAMESPACE"),
data=self.gluu_config)
def run_backup_command(self):
try:
exec_ldap_command = ["/opt/opendj/bin/import-ldif", "--hostname", "localhost", "--port", "4444",
"--bindDN", "cn=Directory manager", "--backendID", "userRoot", "--trustAll",
"--ldifFile", "/opt/opendj/ldif/backup-this-copy.ldif",
"--bindPassword", self.settings.get("LDAP_PW")]
self.kubernetes.connect_get_namespaced_pod_exec(exec_command=exec_ldap_command,
app_label="app=opendj",
container="opendj",
namespace=self.settings.get("GLUU_NAMESPACE"))
except (ConnectionError, Exception) as e:
exec_ldap_command = ["/opt/opendj/bin/import-ldif", "--hostname", "localhost", "--port", "4444",
"--bindDN", "cn=Directory manager", "--backendID", "userRoot", "--trustAll",
"--ldifFile", "/opt/opendj/ldif/backup-this-copy.ldif",
"--bindPassword", "YOURPASSWORD"]
logger.error(e)
logger.info("An error has occured during importing the marked ldif. Please run the following command "
"manually inside the opendj container:\n {}".format(" ".join(exec_ldap_command)))
input("Press Enter once import has run successfully...")
def setup_backup_ldap(self):
encoded_ldap_pw_bytes = base64.b64encode(self.settings.get("LDAP_PW").encode("utf-8"))
encoded_ldap_pw_string = str(encoded_ldap_pw_bytes, "utf-8")
self.kubernetes.patch_or_create_namespaced_secret(name="ldap-auth",
namespace=self.settings.get("GLUU_NAMESPACE"),
literal="password",
value_of_literal=encoded_ldap_pw_string)
kustomize_parser = Parser("ldap/backup/kustomization.yaml", "Kustomization")
kustomize_parser["namespace"] = self.settings.get("GLUU_NAMESPACE")
kustomize_parser["configMapGenerator"][0]["literals"] = ["GLUU_LDAP_AUTO_REPLICATE=" + self.settings.get(
"GLUU_CACHE_TYPE"), "GLUU_CONFIG_KUBERNETES_NAMESPACE=" + self.settings.get("GLUU_NAMESPACE"),
"GLUU_SECRET_KUBERNETES_NAMESPACE=" +
self.settings.get("GLUU_NAMESPACE"),
"GLUU_CONFIG_ADAPTER=kubernetes",
"GLUU_SECRET_ADAPTER=kubernetes",
"GLUU_CERT_ALT_NAME='opendj'",
"GLUU_PERSISTENCE_LDAP_MAPPING=" + self.settings.get(
"HYBRID_LDAP_HELD_DATA"),
"GLUU_PERSISTENCE_TYPE=" + self.settings.get(
"PERSISTENCE_BACKEND")]
kustomize_parser.dump_it()
cron_job_parser = Parser("ldap/backup/cronjobs.yaml", "CronJob")
cron_job_parser["spec"]["schedule"] = self.settings.get("LDAP_BACKUP_SCHEDULE")
cron_job_parser.dump_it()
command = self.kubectl + " kustomize ldap/backup"
exec_cmd(command, output_file="./ldap-backup.yaml")
self.kubernetes.create_objects_from_dict("./ldap-backup.yaml")
def upgrade(self):
if self.settings.get("PERSISTENCE_BACKEND") in ("hybrid", "ldap"):
exec_delete_command = ["/opt/opendj/bin/dsconfig", "delete-backend-index", "--backend-name", "userRoot",
"--index-name", "oxAuthExpiration", "--hostName", "0.0.0.0", "--port", "4444",
"--bindDN",
"'cn=Directory Manager'", "--trustAll", "-f"]
manual_exec_delete_command = " ".join(exec_delete_command)
logger.warning("Please delete backend index manually by calling\n kubectl exec -ti opendj-0 -n {} "
"-- {}".format(self.settings.get("GLUU_NAMESPACE"), manual_exec_delete_command))
input("Press Enter once index has been deleted...")
self.kubernetes.delete_stateful_set(self.settings.get("GLUU_NAMESPACE"), "app=opendj")
if self.settings.get("PERSISTENCE_BACKEND") in ("hybrid", "couchbase"):
import click
from pygluu.kubernetes.helpers import prompt_password
self.settings.set("COUCHBASE_SUPERUSER_PASSWORD", self.settings.get("COUCHBASE_PASSWORD"))
self.settings.set("COUCHBASE_SUPERUSER", self.settings.get("COUCHBASE_USER"))
self.settings.set("COUCHBASE_USER", click.prompt("Please enter gluu couchbase username.", default="gluu"))
self.settings.set("COUCHBASE_PASSWORD", prompt_password("Couchbase Gluu user"))
self.kustomize_gluu_upgrade()
self.kustomize_it()
self.adjust_fqdn_yaml_entries()
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=gluu-upgrade", self.timeout)
self.kubernetes = Kubernetes()
self.def_jackrabbit_secret()
if self.settings.get("PERSISTENCE_BACKEND") in ("hybrid", "ldap"):
self.kubernetes.create_objects_from_dict("ldap/base/101-ox.yaml",
self.settings.get("GLUU_NAMESPACE"))
ldap_parser = Parser(self.ldap_yaml, "StatefulSet")
ldap_parser["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(
{"mountPath": "/opt/opendj/config/schema/101-ox.ldif",
"name": "ox-ldif-cm", "subPath": "101-ox.ldif"})
ldap_parser["spec"]["template"]["spec"]["volumes"].append(
{"name": "ox-ldif-cm", "configMap": {"name": "oxldif"}})
ldap_parser.dump_it()
exec_cmd("kubectl apply -f {} --record".format(self.config_yaml), silent=True)
exec_cmd("kubectl apply -f {} --record".format(self.ldap_yaml), silent=True)
logger.info("Deploying LDAP.Please wait..")
time.sleep(10)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=opendj", self.timeout)
else:
encoded_cb_super_pass_bytes = base64.b64encode(
self.settings.get("COUCHBASE_SUPERUSER_PASSWORD").encode("utf-8"))
encoded_cb_super_pass_string = str(encoded_cb_super_pass_bytes, "utf-8")
self.kubernetes.patch_or_create_namespaced_secret(name="cb-super-pass",
namespace=self.settings.get("GLUU_NAMESPACE"),
literal="couchbase_superuser_password",
value_of_literal=encoded_cb_super_pass_string)
encoded_cb_pass_bytes = base64.b64encode(self.settings.get("COUCHBASE_PASSWORD").encode("utf-8"))
encoded_cb_pass_string = str(encoded_cb_pass_bytes, "utf-8")
# Patch old password
self.kubernetes.patch_or_create_namespaced_secret(name="cb-pass",
namespace=self.settings.get("GLUU_NAMESPACE"),
literal="couchbase_password",
value_of_literal=encoded_cb_pass_string)
self.kubernetes.create_objects_from_dict(self.gluu_upgrade_yaml)
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.kubernetes.check_pods_statuses(self.settings.get("GLUU_NAMESPACE"), "app=gluu-upgrade", self.timeout)
logger.info("Updating manifests and Gluu version...")
self.kubernetes.delete_stateful_set(self.settings.get("GLUU_NAMESPACE"), "app=oxtrust")
logger.info("Waiting for oxTrust to be removed...")
time.sleep(30)
stdout, stderr, retcode = exec_cmd("kubectl apply -f {}/. --record --force".format(self.output_yaml_directory),
silent=True)
copy(Path("./nginx"), self.output_yaml_directory.joinpath("nginx"))
self.update_ingress_fqdn()
self.uninstall_ingress()
self.kubernetes.create_objects_from_dict(self.ingress_file, self.settings.get("GLUU_NAMESPACE"))
def install(self, install_couchbase=True, restore=False):
if not restore:
labels = {"app": "gluu"}
if self.settings.get("USE_ISTIO") == "Y":
labels = {"app": "gluu", "istio-injection": "enabled"}
self.kubernetes.create_namespace(name=self.settings.get("GLUU_NAMESPACE"), labels=labels)
if restore:
logger.info("Waiting for resources to be removed...")
time.sleep(30)
self.kubernetes.patch_or_create_namespaced_configmap(name="gluu-config-cm",
namespace=self.settings.get("GLUU_NAMESPACE"),
data=self.gluu_install_envs)
self.kustomize_it()
self.adjust_fqdn_yaml_entries()
if install_couchbase:
if self.settings.get("PERSISTENCE_BACKEND") != "ldap":
if self.settings.get("INSTALL_COUCHBASE") == "Y":
couchbase_app = Couchbase()
couchbase_app.uninstall()
couchbase_app = Couchbase()
couchbase_app.install()
else:
encoded_cb_pass_bytes = base64.b64encode(self.settings.get("COUCHBASE_PASSWORD").encode("utf-8"))
encoded_cb_pass_string = str(encoded_cb_pass_bytes, "utf-8")
encoded_cb_super_pass_bytes = base64.b64encode(
self.settings.get("COUCHBASE_SUPERUSER_PASSWORD").encode("utf-8"))
encoded_cb_super_pass_string = str(encoded_cb_super_pass_bytes, "utf-8")
couchbase_app = Couchbase()
couchbase_app.create_couchbase_gluu_cert_pass_secrets(self.settings.get("COUCHBASE_CRT"),
encoded_cb_pass_string,
encoded_cb_super_pass_string)
if not restore:
self.kubernetes = Kubernetes()
if self.settings.get("AWS_LB_TYPE") == "alb":
self.prepare_alb()
self.deploy_alb()
elif self.settings.get("USE_ISTIO_INGRESS") == "Y":
self.deploy_gluu_istio_ingress()
self.set_lb_address()
else:
self.deploy_nginx()
self.adjust_fqdn_yaml_entries()
if self.settings.get("DEPLOY_MULTI_CLUSTER") != "Y":
self.kubernetes = Kubernetes()
if restore:
self.mount_config()
self.save_a_copy_of_config()
else:
self.deploy_config()
if self.settings.get("USE_ISTIO_INGRESS") == "Y":
self.setup_tls(namespace=self.settings.get("ISTIO_SYSTEM_NAMESPACE"))
if self.settings.get("INSTALL_JACKRABBIT") == "Y" and not restore:
self.kubernetes = Kubernetes()
if self.settings.get("JACKRABBIT_CLUSTER") == "Y":
self.postgres.install_postgres()
self.deploy_jackrabbit()
if not self.settings.get("AWS_LB_TYPE") == "alb":
self.setup_tls(namespace=self.settings.get("GLUU_NAMESPACE"))
if self.settings.get("INSTALL_REDIS") == "Y":
self.kubernetes = Kubernetes()
self.redis.install_redis()
if self.settings.get("PERSISTENCE_BACKEND") == "hybrid" or \
self.settings.get("PERSISTENCE_BACKEND") == "ldap":
self.kubernetes = Kubernetes()
if restore:
self.run_backup_command()
self.mount_config()
if self.settings.get("DEPLOYMENT_ARCH") not in ("microk8s", "minikube"):
self.wait_for_nginx_add()
else:
self.deploy_ldap()
if self.settings.get("DEPLOYMENT_ARCH") not in ("microk8s", "minikube"):
self.setup_backup_ldap()
if not restore:
self.kubernetes = Kubernetes()
self.deploy_persistence()
if self.settings.get("IS_GLUU_FQDN_REGISTERED") != "Y":
if self.settings.get("DEPLOYMENT_ARCH") in ("eks", "local"):
self.kubernetes = Kubernetes()
self.deploy_update_lb_ip()
self.kubernetes = Kubernetes()
self.deploy_oxauth()
if self.settings.get("ENABLE_FIDO2") == "Y":
self.kubernetes = Kubernetes()
self.deploy_fido2()
if self.settings.get("ENABLE_SCIM") == "Y":
self.kubernetes = Kubernetes()
self.deploy_scim()
if self.settings.get("ENABLE_OXD") == "Y":
self.kubernetes = Kubernetes()
self.deploy_oxd()
if self.settings.get("ENABLE_CASA") == "Y":
self.kubernetes = Kubernetes()
self.deploy_casa()
self.kubernetes = Kubernetes()
self.deploy_oxtrust()
if self.settings.get("ENABLE_OXSHIBBOLETH") == "Y":
self.kubernetes = Kubernetes()
self.deploy_oxshibboleth()
if restore:
self.mount_config()
if self.settings.get("ENABLE_OXPASSPORT") == "Y":
self.kubernetes = Kubernetes()
self.deploy_oxpassport()
if self.settings.get("ENABLE_CACHE_REFRESH") == "Y":
self.kubernetes = Kubernetes()
self.deploy_cr_rotate()
if self.settings.get("ENABLE_OXAUTH_KEY_ROTATE") == "Y":
self.kubernetes = Kubernetes()
self.deploy_oxauth_key_rotation()
if restore:
self.mount_config()
def uninstall_ingress(self):
nginx_ingress_extensions_names = ["gluu-ingress-base", "gluu-ingress-openid-configuration",
"gluu-ingress-uma2-configuration", "gluu-ingress-webfinger",
"gluu-ingress-simple-web-discovery", "gluu-ingress-scim-configuration",
"gluu-ingress-fido-u2f-configuration", "gluu-ingress", "gluu-ingress-scim",
"gluu-ingress-stateful", "gluu-casa", "gluu-ingress-fido2-configuration"]
for extension in nginx_ingress_extensions_names:
self.kubernetes.delete_ingress(extension, self.settings.get("GLUU_NAMESPACE"))
def uninstall(self, restore=False):
gluu_service_names = ["casa", "cr-rotate", "opendj", "oxauth", "oxpassport",
"oxshibboleth", "oxtrust", "oxd-server",
"jackrabbit", "fido2", "scim", "config-init-load-job"]
gluu_storage_class_names = ["opendj-sc", "jackrabbit-sc"]
nginx_service_name = "ingress-nginx"
gluu_deployment_app_labels = ["app=casa", "app=oxauth", "app=fido2", "app=scim", "app=oxd-server",
"app=oxpassport", "app=oxauth-key-rotation"]
nginx_deployemnt_app_name = "nginx-ingress-controller"
stateful_set_labels = ["app=opendj", "app=oxtrust", "app=oxshibboleth", "app=jackrabbit"]
jobs_labels = ["app=config-init-load", "app=persistence-load", "app=gluu-upgrade"]
secrets = ["oxdkeystorecm", "gluu", "tls-certificate",
"gluu-jackrabbit-admin-pass", "gluu-jackrabbit-postgres-pass"]
cb_secrets = ["cb-pass", "cb-crt", "cb-super-pass"]
daemon_set_label = "app=cr-rotate"
all_labels = gluu_deployment_app_labels + stateful_set_labels + jobs_labels + [daemon_set_label]
gluu_config_maps_names = ["casacm", "updatelbip", "gluu"]
nginx_config_maps_names = ["nginx-configuration", "tcp-services", "udp-services"]
gluu_cluster_role_bindings_name = "cluster-admin-binding"
nginx_roles_name = "nginx-ingress-role"
nginx_cluster_role_name = "nginx-ingress-clusterrole"
nginx_role_bindings_name = "nginx-ingress-role-nisa-binding"
nginx_cluster_role_bindings_name = "nginx-ingress-clusterrole-nisa-binding"
nginx_service_account_name = "nginx-ingress-serviceaccount"
network_policies = ["oxd-server-policy"]
minkube_yamls_folder = Path("./gluuminikubeyamls")
microk8s_yamls_folder = Path("./gluumicrok8yamls")
eks_yamls_folder = Path("./gluueksyamls")
gke_yamls_folder = Path("./gluugkeyamls")
aks_yamls_folder = Path("./gluuaksyamls")
if restore:
# TODO: Remove pop method
gluu_service_names.remove("opendj")
gluu_service_names.remove("jackrabbit")
gluu_storage_class_names = []
stateful_set_labels.remove("app=opendj")
stateful_set_labels.remove("app=jackrabbit")
secrets.remove("gluu-jackrabbit-admin-pass")
secrets.remove("gluu-jackrabbit-postgres-pass")
all_labels = gluu_deployment_app_labels + stateful_set_labels + jobs_labels + [daemon_set_label]
for service in gluu_service_names:
self.kubernetes.delete_service(service, self.settings.get("GLUU_NAMESPACE"))
for network_policy in network_policies:
self.kubernetes.delete_network_policy(network_policy, self.settings.get("GLUU_NAMESPACE"))
if not restore:
if self.settings.get("INSTALL_REDIS") == "Y":
self.redis.uninstall_redis()
elif self.settings.get("JACKRABBIT_CLUSTER") == "Y":
self.postgres.uninstall_postgres()
self.kubernetes.delete_service(nginx_service_name, "ingress-nginx")
self.kubernetes.delete_cronjob(self.settings.get("GLUU_NAMESPACE"), "app=oxauth-key-rotation")
for deployment in gluu_deployment_app_labels:
self.kubernetes.delete_deployment_using_label(self.settings.get("GLUU_NAMESPACE"), deployment)
if not restore:
self.kubernetes.delete_deployment_using_name(nginx_deployemnt_app_name, "ingress-nginx")
for stateful_set in stateful_set_labels:
self.kubernetes.delete_stateful_set(self.settings.get("GLUU_NAMESPACE"), stateful_set)
for job in jobs_labels:
self.kubernetes.delete_job(self.settings.get("GLUU_NAMESPACE"), job)
for secret in secrets:
self.kubernetes.delete_secret(secret, self.settings.get("GLUU_NAMESPACE"))
if not restore:
for secret in cb_secrets:
self.kubernetes.delete_secret(secret, self.settings.get("GLUU_NAMESPACE"))
self.kubernetes.delete_daemon_set(self.settings.get("GLUU_NAMESPACE"), daemon_set_label)
for config_map in gluu_config_maps_names:
self.kubernetes.delete_config_map_using_name(config_map, self.settings.get("GLUU_NAMESPACE"))
if not restore:
for config_map in nginx_config_maps_names:
self.kubernetes.delete_config_map_using_name(config_map, "ingress-nginx")
for cm_pv_pvc in all_labels:
self.kubernetes.delete_config_map_using_label(self.settings.get("GLUU_NAMESPACE"), cm_pv_pvc)
self.kubernetes.delete_persistent_volume(cm_pv_pvc)
self.kubernetes.delete_persistent_volume_claim(self.settings.get("GLUU_NAMESPACE"), cm_pv_pvc)
for storage_class in gluu_storage_class_names:
self.kubernetes.delete_storage_class(storage_class)
if not restore:
self.kubernetes.delete_role("gluu-role", self.settings.get("GLUU_NAMESPACE"))
self.kubernetes.delete_role_binding("gluu-rolebinding", self.settings.get("GLUU_NAMESPACE"))
self.kubernetes.delete_role(nginx_roles_name, "ingress-nginx")
self.kubernetes.delete_cluster_role_binding("gluu-rolebinding")
self.kubernetes.delete_cluster_role_binding(gluu_cluster_role_bindings_name)
self.kubernetes.delete_role_binding(nginx_role_bindings_name, "ingress-nginx")
self.kubernetes.delete_cluster_role_binding(nginx_cluster_role_bindings_name)
self.kubernetes.delete_service_account(nginx_service_account_name, "ingress-nginx")
self.kubernetes.delete_cluster_role(nginx_cluster_role_name)
self.uninstall_ingress()
if minkube_yamls_folder.exists() or microk8s_yamls_folder.exists():
shutil.rmtree('/data', ignore_errors=True)
else:
for node_ip in self.settings.get("NODES_IPS"):
if self.settings.get("DEPLOYMENT_ARCH") == "minikube":
exec_cmd("minikube ssh 'sudo rm -rf /data'")
elif self.settings.get("DEPLOYMENT_ARCH") == "microk8s":
shutil.rmtree('/data', ignore_errors=True)
else:
if self.settings.get("APP_VOLUME_TYPE") in (6, 16):
if self.settings.get("DEPLOYMENT_ARCH") == "eks":
ssh_and_remove(self.settings.get("NODE_SSH_KEY"), "ec2-user", node_ip, "/data")
elif self.settings.get("DEPLOYMENT_ARCH") == "aks":
ssh_and_remove(self.settings.get("NODE_SSH_KEY"), "opc", node_ip, "/data")
if self.settings.get("APP_VOLUME_TYPE") == 11:
if self.settings.get("DEPLOYMENT_ARCH") == "gke":
for node_name in self.settings.get("NODES_NAMES"):
for zone in self.settings.get("NODES_ZONES"):
exec_cmd("gcloud compute ssh user@{} --zone={} --command='sudo rm -rf $HOME/opendj'".
format(node_name, zone))
exec_cmd("gcloud compute ssh user@{} --zone={} --command='sudo rm -rf $HOME/jackrabbit'".
format(node_name, zone))
if not restore:
shutil.rmtree(Path("./previousgluuminikubeyamls"), ignore_errors=True)
shutil.rmtree(Path("./previousgluumicrok8yamls"), ignore_errors=True)
shutil.rmtree(Path("./previousgluueksyamls"), ignore_errors=True)
shutil.rmtree(Path("./previousgluuaksyamls"), ignore_errors=True)
shutil.rmtree(Path("./previousgluugkeyamls"), ignore_errors=True)
with contextlib.suppress(FileNotFoundError):
shutil.copytree(minkube_yamls_folder, Path("./previousgluuminikubeyamls"))
with contextlib.suppress(FileNotFoundError):
shutil.copytree(microk8s_yamls_folder, Path("./previousgluumicrok8yamls"))
with contextlib.suppress(FileNotFoundError):
shutil.copytree(eks_yamls_folder, Path("./previousgluueksyamls"))
with contextlib.suppress(FileNotFoundError):
shutil.copytree(aks_yamls_folder, Path("./previousgluuaksyamls"))
with contextlib.suppress(FileNotFoundError):
shutil.copytree(gke_yamls_folder, Path("./previousgluugkeyamls"))
with contextlib.suppress(FileNotFoundError):
shutil.move(Path("./ingress.crt"), Path("./previous-ingress.crt"))
with contextlib.suppress(FileNotFoundError):
shutil.move(Path("./ingress.key"), Path("./previous-ingress.key"))
with contextlib.suppress(FileNotFoundError):
time_str = time.strftime("_created_%d-%m-%Y_%H-%M-%S")
shutil.copy(Path("./settings.json"), Path("./settings" + time_str + ".json"))
|
import datetime
from urllib2 import urlopen as uReq
from bs4 import BeautifulSoup
import requests
import webbrowser
import urllib3
urllib3.disable_warnings()
open("info.txt","w").close()
file = open("info.txt","a")
time = str(datetime.datetime.now())
file.write(time + "\n")
with requests.Session() as c:
url = 'https://lms.iiitb.ac.in/moodle/login/index.php'
USERNAME = #enter your username
PASSWORD = #enter your password
c.get(url, verify=False)
login_data = dict(username=USERNAME, password=PASSWORD, next='/')
c.post(url, data=login_data)
page = c.get('https://lms.iiitb.ac.in/moodle/my/')
soup = BeautifulSoup(page.content, "html.parser")
courses = soup.find_all('div', attrs = { 'class' : 'box coursebox'})
for course in courses:
titles = course.find_all('h2')
for title in titles:
file.write(">")
file.write(title.text + "\n")
prompts = course.find_all('div',attrs = {'class' : 'collapsibleregioncaption'})
for prompt in prompts :
file.write(prompt.text + "\n")
file.write("\n")
#cronjob line for hourly running of script 0 * * * * python /path/to/file/lms.py
|
from intel_extension_for_pytorch.nn.utils import _weight_prepack
from intel_extension_for_pytorch.nn.utils import _lstm_convert
from . import _model_convert, _weight_cast
from ._weight_prepack import Apply_TPPLinear_weight_prepack
|
import feature_extraction
import transforming
import numpy as np
import pandas as pd
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import GridSearchCV
class KNN:
def __init__(self, vectorizer='tfidf', n_neighbors=3):
self.vectorizer = feature_extraction.get(vectorizer)
self.classifier = self.build_classifier(n_neighbors)
def build_classifier(self, n_neighbors):
return KNeighborsClassifier(n_neighbors = n_neighbors)
def fit(self, X, y):
X = transforming.vectorize_and_concatenate_qa(X, self.vectorizer)
self.classifier.fit(X, y)
def predict(self, X):
if type(X) is not pd.DataFrame:
X = pd.DataFrame(np.reshape(X, (-1, 2)), columns=['question', 'text'])
X = transforming.vectorize_and_concatenate_qa(X, self.vectorizer, do_fit_vectorizer=False)
return self.classifier.predict(X)
def predict_proba(self, X):
if type(X) is not pd.DataFrame:
X = pd.DataFrame(np.reshape(X, (-1, 2)), columns=['question', 'text'])
X = transforming.vectorize_and_concatenate_qa(X, self.vectorizer, do_fit_vectorizer=False)
return self.classifier.predict_proba(X)
def get_classifier_params(self):
return self.classifier.get_params()
def grid_search(self, X, y, param_grid, scoring=None, n_jobs=-2):
X = transforming.vectorize_and_concatenate_qa(X, self.vectorizer)
gs = GridSearchCV(estimator=self.classifier, param_grid=param_grid, scoring=scoring, cv=3, verbose=1, n_jobs=n_jobs)
gs.fit(X, y)
return gs.best_params_ |
# -*- coding: utf-8 -*-
import scrapy
from ..items import Track
from datetime import datetime
from urllib.parse import urljoin
class NewReleaseSpider(scrapy.Spider):
name = 'new-release'
allowed_domains = ['www.djcity.com']
start_urls = ['http://www.djcity.com/digital/record-pool.aspx']
def parse(self, response):
times = response.xpath('//div[contains(@class, "day_time")]/text()').extract()
for i, elem in enumerate(response.xpath('//ul[@class="record_pool_listing"]')):
artists = elem.xpath('descendant::div[@class="player_txt"]/text()').extract()
names = elem.xpath('descendant::div[@class="player_txt"]/h2/a/text()').extract()
urls = elem.xpath('descendant::div[@class="player_txt"]/h2/a/@href').extract()
for artist, name, url in zip(artists, names, urls):
track = Track()
track['name'] = name
track['artist'] = artist
track['url'] = urljoin(self.start_urls[0].strip(), url.strip())
track['publish'] = self.parse_publish(times[i])
yield track
@staticmethod
def parse_publish(publish_str):
return datetime.strptime(publish_str, '%A, %B %d, %Y')
|
"""
With this concept of default parameters in mind, the goal of this assignment is to write a single function we are going to name randInt() that takes up to 2 arguments.
If no arguments are provided, the function should return a random integer between 0 and 100.
If only a max number is provided, the function should return a random integer between 0 and the max number.
If only a min number is provided, the function should return a random integer between the min number and 100
If both a min and max number are provided, the function should return a random integer between those 2 values.
Here are a couple of important notes about using random.random() and rounding. (Create this function without using random.randInt() -- we are trying to build that method ourselves for this assignment!)
random.random() returns a random floating number between 0.000 and 1.000
random.random() * 50 returns a random floating number between 0.000 and 50.000, i.e. scaling the range of random numbers.
random.random() * 25 + 10 returns a random floating number between 10.000 and 35.000, i.e. scaling the range of random numbers but adding in an offset as well. Our range of random numbers is 25, but the + 10 offsets or shifts the beginning of the random numbers to start at 10.
round(num) returns the rounded integer value of num
"""
import random
def randInt(min=0, max=100):
num = random.random() * max + min
return round(num)
#print(randInt()) # should print a random integer between 0 to 100
#print(randInt(max=50)) # should print a random integer between 0 to 50
#print(randInt(min=50)) # should print a random integer between 50 to 100
#print(randInt(min=50, max=500)) # should print a random integer between 50 and 500
|
import tempfile
import argparse
import logging
import datetime
import threading
import os
import re
from botocore.exceptions import ClientError
from ocs_ci.framework import config
from ocs_ci.ocs.constants import (
CLEANUP_YAML,
TEMPLATE_CLEANUP_DIR,
AWS_CLOUDFORMATION_TAG,
)
from ocs_ci.ocs.exceptions import CommandFailed
from ocs_ci.utility.utils import get_openshift_installer, destroy_cluster
from ocs_ci.utility import templating
from ocs_ci.utility.aws import (
AWS,
delete_cluster_buckets,
destroy_volumes,
get_rhel_worker_instances,
StackStatusError,
terminate_rhel_workers,
)
from ocs_ci.cleanup.aws import defaults
FORMAT = "%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s"
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
logger = logging.getLogger(__name__)
def cleanup(cluster_name, cluster_id, upi=False, failed_deletions=None):
"""
Cleanup existing cluster in AWS
Args:
cluster_name (str): Name of the cluster
cluster_id (str): Cluster id to cleanup
upi (bool): True for UPI cluster, False otherwise
failed_deletions (list): list of clusters we failed to delete, used
for reporting purposes
"""
data = {"cluster_name": cluster_name, "cluster_id": cluster_id}
template = templating.Templating(base_path=TEMPLATE_CLEANUP_DIR)
cleanup_template = template.render_template(CLEANUP_YAML, data)
cleanup_path = tempfile.mkdtemp(prefix="cleanup_")
cleanup_file = os.path.join(cleanup_path, "metadata.json")
with open(cleanup_file, "w") as temp:
temp.write(cleanup_template)
bin_dir = os.path.expanduser(config.RUN["bin_dir"])
oc_bin = os.path.join(bin_dir, "openshift-install")
if upi:
aws = AWS()
rhel_workers = get_rhel_worker_instances(cleanup_path)
logger.info(f"{cluster_name}'s RHEL workers: {rhel_workers}")
if rhel_workers:
terminate_rhel_workers(rhel_workers)
# Destroy extra volumes
destroy_volumes(cluster_name)
aws.delete_apps_record_set(cluster_name)
stack_names = list()
# Get master, bootstrap and security group stacks
for stack_type in ["ma", "bs", "sg"]:
try:
stack_names.append(
aws.get_cloudformation_stacks(
pattern=f"{cluster_name}-{stack_type}"
)[0]["StackName"]
)
except ClientError:
continue
# Get the worker stacks
worker_index = 0
worker_stack_exists = True
while worker_stack_exists:
try:
stack_names.append(
aws.get_cloudformation_stacks(
pattern=f"{cluster_name}-no{worker_index}"
)[0]["StackName"]
)
worker_index += 1
except ClientError:
worker_stack_exists = False
logger.info(f"Deleting stacks: {stack_names}")
aws.delete_cloudformation_stacks(stack_names)
# Destroy the cluster
logger.info(f"cleaning up {cluster_id}")
destroy_cluster(installer=oc_bin, cluster_path=cleanup_path)
for stack_type in ["inf", "vpc"]:
try:
stack_names.append(
aws.get_cloudformation_stacks(
pattern=f"{cluster_name}-{stack_type}"
)[0]["StackName"]
)
except ClientError:
continue
try:
aws.delete_cloudformation_stacks(stack_names)
except StackStatusError:
logger.error("Failed to fully destroy cluster %s", cluster_name)
if failed_deletions:
failed_deletions.append(cluster_name)
raise
else:
logger.info(f"cleaning up {cluster_id}")
try:
destroy_cluster(installer=oc_bin, cluster_path=cleanup_path)
except CommandFailed:
logger.error("Failed to fully destroy cluster %s", cluster_name)
if failed_deletions:
failed_deletions.append(cluster_name)
raise
delete_cluster_buckets(cluster_name)
def get_clusters(
time_to_delete, region_name, prefixes_hours_to_spare, cluster_pattern=None
):
"""
Get all cluster names that their EC2 instances running time is greater
than the specified time to delete
Args:
time_to_delete (int): The maximum time in seconds that is allowed
for clusters to continue running
region_name (str): The name of the AWS region to delete the resources from
prefixes_hours_to_spare (dict): Dictionaries of the cluster prefixes to spare
along with the maximum time in hours that is allowed for spared
clusters to continue running
cluster_pattern (str): The name of the ec2 instances
Returns:
tuple: List of the cluster names (e.g ebenahar-cluster-gqtd4) to be provided to the
ci-cleanup script, a list of VPCs that are part of cloudformation,
and a list of remaining clusters
"""
def determine_cluster_deletion(ec2_instances, cluster_name):
for instance in ec2_instances:
allowed_running_time = time_to_delete
do_not_delete = False
if instance.state["Name"] == "running":
for prefix, hours in prefixes_hours_to_spare.items():
# case insensitive 'startswith'
if bool(re.match(prefix, cluster_name, re.I)):
if hours == "never":
do_not_delete = True
else:
allowed_running_time = int(hours) * 60 * 60
break
if do_not_delete:
logger.info(
"%s marked as 'do not delete' and will not be " "destroyed",
cluster_name,
)
return False
else:
launch_time = instance.launch_time
current_time = datetime.datetime.now(launch_time.tzinfo)
running_time = current_time - launch_time
logger.info(
f"Instance {[tag['Value'] for tag in instance.tags if tag['Key'] == 'Name'][0]} "
f"(id: {instance.id}) running time is {running_time} hours while the allowed"
f" running time for it is {allowed_running_time/3600} hours"
)
if running_time.total_seconds() > allowed_running_time:
return True
return False
def determine_cluster_deletion_base_name(ec2_instance_objs, vpc_id):
"""
Determine cluster deletion base on name
Args:
ec2_instance_objs (list): list of ec2 instance obj
vpc_id (str): vpc id
Returns:
bool: True if vpc_id exist and all ec2 instances on same vpc otherwise False
"""
# Get all instances
vpc_ids = [
ec2_instance.get("Instances")[0].get("VpcId")
for ec2_instance in ec2_instance_objs
]
# Verify vpc_id exist and all ec2 instances on same vpc
return True if vpc_id in vpc_ids and len(set(vpc_ids)) == 1 else False
aws = AWS(region_name=region_name)
clusters_to_delete = list()
remaining_clusters = list()
cloudformation_vpc_names = list()
vpcs = aws.ec2_client.describe_vpcs()["Vpcs"]
vpc_ids = [vpc["VpcId"] for vpc in vpcs]
vpc_objs = [aws.ec2_resource.Vpc(vpc_id) for vpc_id in vpc_ids]
ec2_instance_objs = None
if cluster_pattern:
worker_filter = [{"Name": "tag:Name", "Values": [f"{cluster_pattern}*"]}]
ec2_instance_objs = aws.ec2_client.describe_instances(
Filters=worker_filter
).get("Reservations")
for vpc_obj in vpc_objs:
vpc_tags = vpc_obj.tags
if vpc_tags:
cloudformation_vpc_name = [
tag["Value"] for tag in vpc_tags if tag["Key"] == AWS_CLOUDFORMATION_TAG
]
if cloudformation_vpc_name:
cloudformation_vpc_names.append(cloudformation_vpc_name[0])
continue
vpc_name = [tag["Value"] for tag in vpc_tags if tag["Key"] == "Name"][0]
cluster_name = vpc_name.replace("-vpc", "")
vpc_instances = vpc_obj.instances.all()
if not vpc_instances:
clusters_to_delete.append(cluster_name)
continue
# Append to clusters_to_delete if cluster should be deleted
if cluster_pattern is not None:
if determine_cluster_deletion_base_name(ec2_instance_objs, vpc_obj.id):
clusters_to_delete.append(cluster_name)
else:
remaining_clusters.append(cluster_name)
else:
if determine_cluster_deletion(vpc_instances, cluster_name):
clusters_to_delete.append(cluster_name)
else:
remaining_clusters.append(cluster_name)
else:
logger.info("No tags found for VPC")
# Get all cloudformation based clusters to delete
cf_clusters_to_delete = list()
for vpc_name in cloudformation_vpc_names:
instance_dicts = aws.get_instances_by_name_pattern(
f"{vpc_name.replace('-vpc', '')}*"
)
ec2_instances = [
aws.get_ec2_instance(instance_dict["id"])
for instance_dict in instance_dicts
]
if not ec2_instances:
continue
cluster_io_tag = None
for instance in ec2_instances:
cluster_io_tag = [
tag["Key"]
for tag in instance.tags
if "kubernetes.io/cluster" in tag["Key"]
]
if cluster_io_tag:
break
if not cluster_io_tag:
logger.warning(
"Unable to find valid cluster IO tag from ec2 instance tags "
"for VPC %s. This is probably not an OCS cluster VPC!",
vpc_name,
)
continue
cluster_name = cluster_io_tag[0].replace("kubernetes.io/cluster/", "")
logger.info(f"cluster_name={cluster_name}")
if cluster_pattern is not None:
if cluster_pattern in cluster_name:
cf_clusters_to_delete.append(cluster_name)
else:
remaining_clusters.append(cluster_name)
else:
if determine_cluster_deletion(ec2_instances, cluster_name):
cf_clusters_to_delete.append(cluster_name)
else:
remaining_clusters.append(cluster_name)
return clusters_to_delete, cf_clusters_to_delete, remaining_clusters
def cluster_cleanup():
parser = argparse.ArgumentParser(description="Cleanup AWS Resource")
parser.add_argument(
"--cluster", nargs=1, action="append", required=True, help="Cluster name tag"
)
parser.add_argument(
"--upi", action="store_true", required=False, help="For UPI cluster deletion"
)
logging.basicConfig(level=logging.DEBUG)
args = parser.parse_args()
procs = []
for id in args.cluster:
cluster_name = id[0].rsplit("-", 1)[0]
logger.info(f"cleaning up {id[0]}")
proc = threading.Thread(target=cleanup, args=(cluster_name, id[0], args.upi))
proc.start()
procs.append(proc)
for p in procs:
p.join()
def aws_cleanup():
parser = argparse.ArgumentParser(
description="AWS overall resources cleanup according to running time",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument(
"--hours",
type=hour_valid,
action="store",
required=False,
help="""
Maximum running time of the cluster (in hours).
Clusters older than this will be deleted.
The minimum is 10 hours
""",
)
parser.add_argument(
"--region",
action="store",
required=False,
help="The name of the AWS region to delete the resources from",
)
parser.add_argument(
"--prefix",
action="append",
required=False,
type=prefix_hour_mapping,
help="""
Additional prefix:hour combo to treat as a special rule.
Clusters starting with this prefix will only be cleaned up if
their runtime exceeds the provided hour(this takes precedence
over the value provided to --hours). Note: if you want to skip
cleanup of a cluster entirely you can use 'never' for the hour.
Example: --prefix foo:24 --prefix bar:48 --prefix foobar:never
""",
)
parser.add_argument(
"--force",
action="store_true",
required=False,
help="""
Force cluster cleanup.
User will not be prompted for confirmation.
WARNING: this utility is destructive, only use this option if
you know what you are doing.
""",
)
parser.add_argument(
"--cluster-name",
action="store",
required=False,
help="The name of the cluster to delete from AWS",
)
args = parser.parse_args()
if not args.force:
confirmation = input(
"Careful! This action could be highly destructive. "
"Are you sure you want to proceed? "
)
assert (
confirmation == defaults.CONFIRMATION_ANSWER
), "Wrong confirmation answer. Exiting"
prefixes_hours_to_spare = defaults.CLUSTER_PREFIXES_SPECIAL_RULES
if args.prefix:
for prefix, hours in args.prefix:
logger.info(
"Adding special rule for prefix '%s' with hours %s", prefix, hours
)
prefixes_hours_to_spare.update({prefix: hours})
time_to_delete = args.hours * 60 * 60 if args.hours else None
region = defaults.AWS_REGION if not args.region else args.region
clusters_to_delete, cf_clusters_to_delete, remaining_clusters = get_clusters(
time_to_delete=time_to_delete,
region_name=region,
prefixes_hours_to_spare=prefixes_hours_to_spare,
cluster_pattern=args.cluster_name,
)
if not clusters_to_delete:
logger.info("No clusters to delete")
else:
logger.info("Deleting clusters: %s", clusters_to_delete)
get_openshift_installer()
procs = []
failed_deletions = []
for cluster in clusters_to_delete:
cluster_name = cluster.rsplit("-", 1)[0]
logger.info(f"Deleting cluster {cluster_name}")
proc = threading.Thread(
target=cleanup, args=(cluster_name, cluster, False, failed_deletions)
)
proc.start()
procs.append(proc)
for p in procs:
p.join()
for cluster in cf_clusters_to_delete:
cluster_name = cluster.rsplit("-", 1)[0]
logger.info(f"Deleting UPI cluster {cluster_name}")
proc = threading.Thread(
target=cleanup, args=(cluster_name, cluster, True, failed_deletions)
)
proc.start()
procs.append(proc)
for p in procs:
p.join()
logger.info("Remaining clusters: %s", remaining_clusters)
filename = "failed_cluster_deletions.txt"
content = "None\n"
if failed_deletions:
logger.error("Failed cluster deletions: %s", failed_deletions)
content = ""
for cluster in failed_deletions:
content += f"{cluster}\n"
with open(filename, "w") as f:
f.write(content)
def prefix_hour_mapping(string):
"""
Validate that the string provided to --prefix is properly formatted
Args:
string (str): input provided to --prefix
Raises:
argparse.ArgumentTypeError: if the provided string is not
correctly formatted
Returns:
str, str: prefix, hours
"""
msg = (
f"{string} is not a properly formatted prefix:hour combination. "
f"See the --help for more information."
)
try:
prefix, hours = string.split(":")
if not prefix or not hours:
raise argparse.ArgumentTypeError(msg)
# 'never' should be the only non-int value for hours
if hours != "never":
int(hours)
except ValueError:
raise argparse.ArgumentTypeError(msg)
return prefix, hours
def hour_valid(string):
"""
Validate that the hour value provided is an int and not lower than the
minimum allowed running time
Args:
string: input provided to --hours
Raises:
argparse.ArgumentTypeError: if the provided hours value is not an int
or lower than the minimum allowed running time
Returns:
int: valid hour value
"""
try:
hours = int(string)
assert hours >= defaults.MINIMUM_CLUSTER_RUNNING_TIME
except ValueError:
msg = f"{string} is not an int, please provide an int value"
raise argparse.ArgumentTypeError(msg)
except AssertionError:
msg = (
f"Number of hours ({hours}) is lower than the required minimum "
f"({defaults.MINIMUM_CLUSTER_RUNNING_TIME})."
)
raise argparse.ArgumentTypeError(msg)
return hours
|
import csv
import re
from converter.binance import BinanceConverter
from strategy.base import BaseStrategy
BASE_MARKET_CURRENCIES = ['BTC', 'ETH', 'BNB']
class BinanceStrategy(BaseStrategy):
DATE = 'Date(UTC)'
MARKET = 'Market'
TYPE = 'Type'
PRICE = 'Price'
AMOUNT = 'Amount'
TOTAL = 'Total'
FEE = 'Fee'
FEE_COIN = 'Fee Coin'
CURRENCY = 'Currency'
TRANSACTION_TYPE_BUY = 'BUY'
TRANSACTION_TYPE_SELL = 'SELL'
def __init__(self):
self.converter = BinanceConverter()
def convert_data(self, filename):
converted_data = []
with open(filename) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
transactions = self._split_transaction(row)
for transaction in transactions:
converted_data.append(self.converter.convert(transaction))
return converted_data
def _split_transaction(self, transaction):
date = transaction.get(self.DATE)
market = transaction.get(self.MARKET)
transaciton_type = transaction.get(self.TYPE)
amount = transaction.get(self.AMOUNT)
total = transaction.get(self.TOTAL)
fee = transaction.get(self.FEE)
fee_currency = transaction.get(self.FEE_COIN)
pattern = '(.*)({})'.format('|'.join(BASE_MARKET_CURRENCIES))
currencies = re.findall(pattern, market)
transactions = []
if transaciton_type == self.TRANSACTION_TYPE_BUY:
transactions += self._split_buy_transaction(date, currencies[0][0], amount, fee, fee_currency)
transactions.append(self._build_sell_transaction(date, currencies[0][1], total))
else:
transactions += self._split_buy_transaction(date, currencies[0][1], total, fee, fee_currency)
transactions.append(self._build_sell_transaction(date, currencies[0][0], amount))
return transactions
def _split_buy_transaction(self, date, currency, amount, fee, fee_currency):
transactions = []
buy_transaction = {self.DATE: date,
self.CURRENCY: currency,
self.TYPE: self.TRANSACTION_TYPE_BUY,
self.AMOUNT: amount
}
if fee_currency == currency:
buy_transaction[self.FEE] = fee
else:
buy_transaction[self.FEE] = '0'
transactions.append(self._build_fee_transaction(date, fee, fee_currency))
transactions.append(buy_transaction)
return transactions
def _build_fee_transaction(self, date, fee, fee_currency):
return {self.DATE: date,
self.CURRENCY: fee_currency,
self.TYPE: self.TRANSACTION_TYPE_BUY,
self.AMOUNT: '0',
self.FEE: fee
}
def _build_sell_transaction(self, date, currency, amount):
return {self.DATE: date,
self.CURRENCY: currency,
self.TYPE: self.TRANSACTION_TYPE_SELL,
self.AMOUNT: amount,
self.FEE: '0'
}
|
from tkinter import *
import tkinter as tk
import matplotlib.pyplot
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
#matplotlib.use('TkAgg')
import Adafruit_DHT
from PIL import ImageTk, Image
from datetime import datetime
class Generate_plot():
"""Generates and updates temperature and humidity graph."""
def __init__(self):
# Create plot of temp and humidity data.
self.f = Figure(figsize=(5, 4), dpi=100)
self.a = self.f.add_subplot(111)
self.a.plot(indoor_temp_hist)
self.a.plot(indoor_hum_hist)
self.a.set_title("Temperature and humidity last 24h")
self.a.set_xlabel("Time")
self.a.set_ylabel("Degrees C / % Relative Humidity")
self.a.legend(["Indoor temperature", "Indoor humidity"], loc="upper left")
self.canvas = FigureCanvasTkAgg(self.f, master=page_3)
self.canvas.show()
#canvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=True)
self.canvas._tkcanvas.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
def clear_plot(self):
"""Clears graph."""
self.a.clear()
self.canvas.draw()
def update_plot(self):
"""Updates graph with current data."""
self.a.plot(indoor_temp_hist)
self.a.plot(indoor_hum_hist)
self.a.set_title("Temperature and humidity last 24h")
self.a.set_xlabel("Time")
self.a.set_ylabel("Degrees C / % Relative Humidity")
self.a.legend(["Indoor temperature", "Indoor humidity"], loc="upper left")
self.canvas.draw()
def exit():
"""Function for quitting."""
root.quit()
def raise_frame(frame):
"""Function for navigating to page."""
frame.tkraise()
# if frame=='page_3':
# # Regenerates graph
# plot.clear_plot()
# plot.update_plot()
def clock():
"""Gets current time for the display."""
root.after(1000, clock)
currenttime = datetime.now().strftime("%H:%M:%S")
time.set(currenttime)
def get_temp():
"""Gets current temperature and humidity from sensor."""
root.after(temp_interval, get_temp)
humidity, temperature = Adafruit_DHT.read_retry(22, 4)
temp = "{0:0.1f}".format(temperature)
indoor_temp_hist.append(temp)
if len(indoor_temp_hist) > (86400/temp_interval):
indoor_temp_hist.pop(0)
temperature_in.set(temp + "°C")
hum = "{0:0.1f}".format(humidity)
# Make a sanity check on humidity to avoid crazy readings
if float(hum) >= 0 and float(hum) <= 100:
indoor_hum_hist.append(hum)
if len(indoor_hum_hist) > (86400/temp_interval):
indoor_hum_hist.pop(0)
humidity_in.set(hum + "%RH")
def plot_page(frame):
global plot
"""Activates plot page (3) and regenerates plot."""
frame.tkraise()
plot.clear_plot()
plot.update_plot()
# Definitions
menu_color = "#004060"
main_color = "#111111"
text_color = "#D0D0D0"
highlight_color = "#000000"
main_font = "Dejavu Sans"
indoor_temp_hist = []
indoor_hum_hist = []
temp_interval = 10000
# TKinter
root = tk.Tk()
root.geometry("1024x600")
root.resizable(0,0)
root.overrideredirect(True) # Overrides window manager
root.wm_attributes("-topmost", 1) # Makes window top
root.config(cursor="none") # Hides cursor
back = tk.Frame(master=root, bg=menu_color)
back.pack_propagate(0)
back.pack(fill=tk.BOTH, expand=False)
# Icons
temp_icon = ImageTk.PhotoImage(file="temp.png")
forecast_icon = ImageTk.PhotoImage(file="forecast.png")
history_icon = ImageTk.PhotoImage(file="history.png")
settings_icon = ImageTk.PhotoImage(file="settings.png")
# Main menu buttons
button_1 = Button(master=back, image=temp_icon, bg=menu_color,
highlightbackground=highlight_color, command=lambda:raise_frame(page_1),
width=250, height=80, padx=5, pady=3)
button_1.grid(row=0, column=0)
button_2 = Button(master=back, image=forecast_icon, bg=menu_color,
highlightbackground=highlight_color, command=lambda:raise_frame(page_2),
width=250, height=80, padx=5, pady=3)
button_2.grid(row=0, column=1)
button_3 = Button(master=back, image=history_icon, bg=menu_color,
highlightbackground=highlight_color, command=lambda:plot_page(page_3),
width=250, height=80, padx=5, pady=3)
button_3.grid(row=0, column=2)
button_4 = Button(master=back, image=settings_icon, bg=menu_color,
highlightbackground=highlight_color, command=lambda:raise_frame(page_4),
width=250, height=80, padx=5, pady=3)
button_4.grid(row=0, column=3)
# Frame for pages
container = tk.Frame()
container.pack(side="top", fill="both", expand=True)
# Individual pages
page_1 = tk.Frame(bg=main_color)
page_1.place(in_=container, x=0, y=0, relwidth=1, relheight=1)
page_2 = tk.Frame(bg=main_color)
page_2.place(in_=container, x=0, y=0, relwidth=1, relheight=1)
page_3 = tk.Frame(bg=main_color)
page_3.place(in_=container, x=0, y=0, relwidth=1, relheight=1)
page_4 = tk.Frame(bg=main_color)
page_4.place(in_=container, x=0, y=0, relwidth=1, relheight=1)
# Background image
#image = ImageTk.PhotoImage(file="background.jpg")
#background = Label(page_1, image=image)
#background.place(x=0, y=0, relwidth=1, relheight=1)
"""Page 1 contents"""
# Clock
time = StringVar()
time.set("--:--:--")
clockLabel = Label(master=page_1, fg=text_color, bg=main_color,
textvariable=time, font=(main_font, 40))
clockLabel.place(x=385, y=60)
# Indoor/outdoor static labels
indoor = StringVar()
indoor.set("Indoor")
indoorLabel = Label(master=page_1, fg=text_color, bg=main_color,
textvariable=indoor, font=(main_font, 30))
indoorLabel.place(x=100, y=350)
outdoor = StringVar()
outdoor.set("Outdoor")
outdoorLabel = Label(master=page_1, fg=text_color, bg=main_color,
textvariable=outdoor, font=(main_font, 30))
outdoorLabel.place(x=600, y=350)
# Indoor temperature label
temperature_in = StringVar()
temperature_in.set('---' + "°C")
temperature_inLabel = Label(master=page_1, fg=text_color, bg=main_color,
textvariable=temperature_in, font=(main_font, 80))
temperature_inLabel.place(x=100, y=150)
# Indoor humidity label
humidity_in = StringVar()
humidity_in.set('---' + "%RH")
humidity_inLabel = Label(master=page_1, fg=text_color, bg=main_color,
textvariable=humidity_in, font=(main_font, 48))
humidity_inLabel.place(x=100, y=250)
# Outdoor temperature label
temperature_out = StringVar()
temperature_out.set("---" + "°C")
temperature_outLabel = Label(master=page_1, fg=text_color, bg=main_color,
textvariable=temperature_out, font=(main_font, 80))
temperature_outLabel.place(x=600, y=150)
# Page 2 contents
page_2text = StringVar()
page_2text.set("Weather Forecast")
page_2textLabel = Label(master=page_2, fg=text_color, bg=main_color,
textvariable=page_2text, font=(main_font, 30, "bold"))
page_2textLabel.place(x=20, y=20)
# Page 3 contents
# Page 4 contents
page_4text = StringVar()
page_4text.set("Settings")
page_4textLabel = Label(master=page_4, fg=text_color, bg=main_color,
textvariable=page_4text, font=(main_font, 30, "bold"))
page_4textLabel.place(x=20, y=20)
# Exit button
button_exit = Button(master=page_4, text="Exit", bg=menu_color,
highlightbackground=highlight_color, command=exit, width=10, height=3)
button_exit.place(x=880, y=420)
# Show first page on startup
raise_frame(page_1)
# Create graph
plot = Generate_plot()
# Read clock and sensors
root.after(1000, clock)
root.after(temp_interval, get_temp)
# Run main interface
root.mainloop()
|
from celery import Celery
from django.core.mail import send_mail
from dailyfresh import settings
# 创建celery客户端
# 参数1:自定义
# 参数2:中间人
app = Celery('dailyfresh', broker='redis://127.0.0.1:6379/1')
@app.task
def send_active_mail(username, email, token):
"""发送激活邮件"""
subject = '天天生鲜用户激活' # 标题 不能为空 否则会报错
message = '' # 邮件的正文(纯文本) 如果html_message有值那么message会被覆盖掉
sender = settings.EMAIL_FROM # 发件人
receivers = [email]
# 邮件正文(html样式)
html_message = '<h2>尊敬的 %s, 感谢注册天天生鲜</h2>' \
'<p>请点击此链接激活您的帐号: ' \
'<a href="http://127.0.0.1:8000/users/active/%s">' \
'http://127.0.0.1:8000/users/active/%s</a>' \
% (username, token, token)
send_mail(subject, message, sender, receivers, html_message=html_message)
send_mail(subject, message, sender, receivers,
html_message=html_message) |
from django.conf.urls import url
from rest_framework import routers
from talentmap_api.common.urls import get_retrieve, patch_update
from talentmap_api.administration.views import aboutpage as views
router = routers.SimpleRouter()
urlpatterns = [
url(r'^$', views.AboutPageView.as_view({**get_retrieve, ** patch_update}), name='administration.aboutpage'),
]
urlpatterns += router.urls
|
from urllib.request import urlopen
from urllib.request import Request
from bs4 import BeautifulSoup
# 웃긴대학 메인 페이지
headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:65.0) Gecko/20100101 Firefox/65.0'
}
req = Request(url="http://web.humoruniv.com/main.html", headers=headers)
response = urlopen(req)
html = response.read()#.decode("ms949")
bs = BeautifulSoup(html, 'html.parser')
rankSection = bs.find('div',{'class':'best_right'})
ranks = rankSection.find('li').find_all('a')
for i, rank in enumerate(ranks):
href = rank.attrs['href']
text = rank.string
rank = i + 1
print(str(rank)+"위", text, ":" "http://web.humoruniv.com" +str(href))
|
# This one times out. Will have to look for another way. Maybe use Binary Index Trees.
# If you have n intersections, you need to do n shifts to sort the list.
# If we are able to find the number of intersections, we will be able to find the nubmer of shifts that we need.
# If i < j and A[i] > A[j], then this is an arc or an inversion.
# You are also going to have to implement merge sort in order for this to work.
import math
import os
import random
import re
import sys
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
t = int(input().strip())
for t_itr in range(t):
n = int(input().strip())
arr = list(map(int, input().rstrip().split()))
result = insertionSort(arr)
fptr.write(str(result) + '\n')
fptr.close()
|
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def findTarget(self, root, k):
"""
:type root: TreeNode
:type k: int
:rtype: bool
"""
if root is None or k is None:
return False
queue = [root]
temp = []
num_set = set()
while len(queue) > 0:
element = queue.pop(0)
if element.left is not None:
temp.append(element.left)
if element.right is not None:
temp.append(element.right)
if k - element.val in num_set:
return True
else:
num_set.add(element.val)
if len(queue) == 0:
queue = temp
temp = []
return False
if __name__ == '__main__':
solution = Solution()
root = TreeNode(5)
left = TreeNode(3)
right = TreeNode(6)
left_left = TreeNode(2)
left_right = TreeNode(4)
right_right = TreeNode(7)
root.left = left
root.right = right
left.left = left_left
left.right = left_right
right.right = right_right
print(solution.findTarget(root, 9))
# root = TreeNode(2)
# left = TreeNode(1)
# right = TreeNode(3)
# root.left = left
# root.right = right
# print(solution.findTarget(root, 4)) |
from django.shortcuts import render
from django.http import HttpResponse
from django.core.mail import EmailMessage
from django.shortcuts import redirect
# Create your views here.
def index(request):
return render(request, 'main/index.html')
def ajax_send_mail(request):
try:
name = request.GET.get('name')
email = request.GET.get('email')
text = request.GET.get('text')
message = ("Name: "+name+ "\nEmail: "+email+ "\n\n "+ text)
emailMessage = EmailMessage('Mail from site', message, to=['director@aits.ua'])
emailMessage.send()
return HttpResponse("ok")
except:
return HttpResponse("") |
import networkx as nx
def load_data(file):
G = nx.Graph()
with open(file, "r") as f:
for line in f.readlines():
G.add_edge(*tuple(map(int, line.split())))
return G
def main():
G = load_data("348.edges.txt")
print(G.number_of_nodes())
b = sum(map(lambda node: len(G.neighbors(node)), G.nodes()))/float(G.number_of_nodes())
p = G.number_of_nodes()
total = 1
for i in range(10):
total += total*(p - i*b)
print(b)
print(total)
if __name__ == '__main__':
main()
|
import sqlite3
conn = sqlite3.connect('my_database.sqlite')
cursor = conn.cursor()
print('hi, are you looking for vote')
response = input("enter Y or N ")
if response=='N':
print("Thank you")
else:
aadhar = int(input("Give your adhar number "))
name = input("enter your name")
cursor.execute("SELECT AADHAR from AADHAR where aadhar = ?", (aadhar,))
#cursor.execute("SELECT NAME from AADHAR where name = ?", (name,))
rows = cursor.fetchall()
#print(rows)
for row in rows:
if row[0] == aadhar:
print('aadhar exists')
break;
if rows ==[]:
cursor.execute("INSERT INTO AADHAR (NAME,AADHAR) \
VALUES (?, ?)", (name, aadhar))
print("aadhar added")
# rows = {row[0] for row in cursor.fetchall()}
# print(rows)
# if aadhar in rows:
# print("aadhar Taken")
#
# else :
# print("aadhar added")
# for row in results:
# print(row[0])
# if(row[0] == ''):
# cursor.execute("INSERT INTO AADHAR (ID,NAME,AADHAR,STATUS) \
# VALUES (?, ?, ?, ?)", (6, 'gfhyftyftf', aadhar, 0));
# print("aadhar added")
#
#
# else:
# print(row[0])
# print("aadhar exists", row[0])
cursor.close()
conn.commit()
conn.close()
|
# Common utils.
import os
import random
import time
import subprocess
from subprocess import call
import logging
FORMAT_DBG = "%(levelname)-7s **: (%(filename)s:%(lineno)s:%(funcName)s) - %(message)s"
FORMAT_INFO = "%(levelname)-7s **: %(message)s"
# Loggers
logging.basicConfig()
logger = logging.getLogger()
hndlr = logger.handlers[0]
filehndlr = logging.FileHandler("tvss_test.log")
logger.addHandler(filehndlr)
def random_sleep(min, max):
timeout = random.randint(min, max)
logger.debug("Random timeout %s", timeout)
time.sleep(timeout)
def random_true():
res = random.randint(0, 1)
return res
def check_hostname(hostname):
FNULL = open(os.devnull, 'w')
res = call("ping %s -c 1" % (hostname), stdout = FNULL, stderr = subprocess.STDOUT, shell=True)
# logger.debug("Res %s" % res)
return res |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Named entity recognition fine-tuning: utilities to work with CoNLL-2003 task. """
import logging
import os
import json
from utils import get_labels, write_file
logger = logging.getLogger(__name__)
class InputExample(object):
"""A single training/test example for token classification."""
def __init__(self, id, words, start_labels, end_labels, event_type=None, role=None):
"""Constructs a InputExample.
Args:
id: Unique id for the example.
words: list. The words of the sequence.
labels: (Optional) list. The labels for each word of the sequence. This should be
specified for train and dev examples, but not for test examples.
"""
self.id = id
self.words = words
self.event_type = event_type
self.role = role
self.start_labels = start_labels
self.end_labels = end_labels
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self, input_ids, attention_mask, token_type_ids, start_label_ids, end_label_ids):
self.input_ids = input_ids
self.attention_mask = attention_mask
self.token_type_ids = token_type_ids
self.start_label_ids = start_label_ids
self.end_label_ids = end_label_ids
## ccks格式
def trigger_process_bin_ccks(input_file, schema_file, is_predict=False):
event_type_list = []
rows = open(schema_file, encoding='utf-8').read().splitlines()
for row in rows:
row = json.loads(row)
event_type = row['event_type']
event_type_list.append(event_type)
rows = open(input_file, encoding='utf-8').read().splitlines()
results = []
for row in rows:
if len(row)==1: print(row)
row = json.loads(row)
start_labels = [0]*len(row["content"])
end_labels = [0]*len(row["content"])
if is_predict:
results.append({"id":row["id"], "words":list(row["content"]), "start_labels":start_labels, "end_labels":end_labels, "event_type":None})
continue
for gold_event_type in event_type_list:
start_labels = [0]*len(row["content"])
end_labels = [0]*len(row["content"])
for event in row["events"]:
event_type = event["type"]
if event_type != gold_event_type: continue
for mention in event["mentions"]:
if mention["role"]=="trigger":
trigger = mention["word"]
trigger_start_index, trigger_end_index = mention["span"]
trigger_end_index -= 1
start_labels[trigger_start_index]= 1
end_labels[trigger_end_index]= 1
break
results.append({"id":row["id"], "words":list(row["content"]), "start_labels":start_labels, "end_labels":end_labels, "event_type":gold_event_type})
# write_file(results,output_file)
return results
## lic格式
def trigger_process_bin_lic(input_file, schema_file, is_predict=False):
event_type_list = []
rows = open(schema_file, encoding='utf-8').read().splitlines()
for row in rows:
row = json.loads(row)
event_type = row['event_type']
event_type_list.append(event_type)
rows = open(input_file, encoding='utf-8').read().splitlines()
results = []
for row in rows:
if len(row)==1: print(row)
row = json.loads(row)
start_labels = [0]*len(row["text"])
end_labels = [0]*len(row["text"])
if is_predict:
results.append({"id":row["id"], "words":list(row["text"]), "start_labels":start_labels, "end_labels":end_labels, "event_type":None})
continue
for gold_event_type in event_type_list:
start_labels = [0]*len(row["content"])
end_labels = [0]*len(row["content"])
for event in row["event_list"]:
trigger = event["trigger"]
event_type = event["event_type"]
if event_type != gold_event_type: continue
trigger_start_index = event["trigger_start_index"]
trigger_end_index = trigger_start_index + len(trigger) - 1
start_labels[trigger_start_index]= 1
end_labels[trigger_end_index]= 1
results.append({"id":row["id"], "words":list(row["text"]), "start_labels":start_labels, "end_labels":end_labels, "event_type":gold_event_type})
# write_file(results,output_file)
return results
## ccks格式
def role_process_bin_ccks(input_file, schema_file, is_predict=False):
role_dict = {}
rows = open(schema_file, encoding='utf-8').read().splitlines()
for row in rows:
row = json.loads(row)
event_type = row['event_type']
role_dict[event_type] = []
for role in row["role_list"]:
role_dict[event_type].append(role["role"])
rows = open(input_file, encoding='utf-8').read().splitlines()
results = []
count = 0
for row in rows:
if len(row)==1: print(row)
row = json.loads(row)
count += 1
if "id" not in row:
row["id"]=count
# arguments = []
if is_predict:
results.append({"id":row["id"], "words":list(row["content"]), "start_labels":start_labels, "end_labels":end_labels})
continue
# for gold_event_type in role_dict.keys():
# for gold_role in role_dict[gold_event_type]:
# for event in row["events"]:
# start_labels = [0]*len(row["content"])
# end_labels = [0]*len(row["content"])
# event_type = event["type"]
# if event_type != gold_event_type: continue
# for arg in event["mentions"]:
# role = arg['role']
# if role=="trigger": continue
# if role!=gold_role: continue
# argument_start_index, argument_end_index = arg["span"]
# argument_end_index -= 1
# start_labels[argument_start_index] = 1
# end_labels[argument_end_index] = 1
# results.append({"id":row["id"], "words":list(row["content"]), "event_type":gold_event_type, "role":gold_role, \
# "start_labels":start_labels, "end_labels":end_labels})
# 假设事件类型全部是对的
for event in row["events"]:
event_type = event["type"]
for gold_role in role_dict[event_type]:
start_labels = [0]*len(row["content"])
end_labels = [0]*len(row["content"])
for arg in event["mentions"]:
role = arg['role']
if role=="trigger": continue
if role!=gold_role: continue
argument_start_index, argument_end_index = arg["span"]
argument_end_index -= 1
start_labels[argument_start_index] = 1
end_labels[argument_end_index] = 1
results.append({"id":row["id"], "words":list(row["content"]), "event_type":event_type, "role":gold_role, \
"start_labels":start_labels, "end_labels":end_labels})
return results
## lic格式
def role_process_bin_lic(input_file, schema_file, is_predict=False):
role_dict = {}
rows = open(schema_file, encoding='utf-8').read().splitlines()
for row in rows:
row = json.loads(row)
event_type = row['event_type']
role_dict[event_type] = []
for role in row["role_list"]:
role_dict[event_type].append(role["role"])
rows = open(input_file, encoding='utf-8').read().splitlines()
results = []
count = 0
for row in rows:
if len(row)==1: print(row)
row = json.loads(row)
count += 1
if "id" not in row:
row["id"]=count
# arguments = []
if is_predict:
results.append({"id":row["id"], "words":list(row["text"]), "start_labels":start_labels, "end_labels":end_labels})
continue
# # 假设事件类型全部是对的
for event in row["event_list"]:
event_type = event["event_type"]
for gold_role in role_dict[event_type]:
start_labels = [0]*len(row["text"])
end_labels = [0]*len(row["text"])
for arg in event["arguments"]:
role = arg['role']
if role!=gold_role: continue
argument = arg['argument']
argument_start_index = arg["argument_start_index"]
argument_end_index = argument_start_index + len(argument) -1
start_labels[argument_start_index] = 1
end_labels[argument_end_index] = 1
results.append({"id":row["id"], "words":list(row["text"]), "event_type":event_type, "role":gold_role, \
"start_labels":start_labels, "end_labels":end_labels})
return results
## ace格式
def role_process_bin_ace(input_file, schema_file, is_predict=False):
role_dict = {}
rows = open(schema_file, encoding='utf-8').read().splitlines()
for row in rows:
row = json.loads(row)
event_type = row['event_type']
role_dict[event_type] = []
for role in row["role_list"]:
role_dict[event_type].append(role["role"])
results = []
count = 0
file = open(input_file,'r',encoding='utf-8')
rows = json.load(file)
for row in rows:
count += 1
if "id" not in row:
row["id"]=count
# arguments = []
if is_predict:
results.append({"id":row["id"], "words":list(row["words"]), "start_labels":start_labels, "end_labels":end_labels})
continue
entities = row['entities']
# # 假设事件类型全部是对的
for event in row["event-mentions"]:
event_type = event["event_type"]
for gold_role in role_dict[event_type]:
start_labels = [0]*len(row["words"])
end_labels = [0]*len(row["words"])
for i, role in enumerate(event["arguments"]):
if role!=gold_role: continue
entity = entities[i]
# argument = entity['text']
# if entity['text'] != entity['head']["text"]:
# print(entity['text'], '\n', entity['head']["text"])
# assert entity['text'] == entity['head']["text"]
argument_start_index = entity['head']["start"]
argument_end_index = entity['head']["end"] - 1
start_labels[argument_start_index] = 1
end_labels[argument_end_index] = 1
results.append({"id":row["id"], "words":list(row["words"]), "event_type":event_type, "role":gold_role, \
"start_labels":start_labels, "end_labels":end_labels})
return results
def read_examples_from_file(data_dir, schema_file, mode, task, dataset="ccks"):
file_path = os.path.join(data_dir, "{}.json".format(mode))
if dataset=="ccks":
if task=='trigger': items = trigger_process_bin_ccks(file_path, schema_file,)
if task=='role': items = role_process_bin_ccks(file_path, schema_file,)
elif dataset=="lic":
if task=='trigger': items = trigger_process_bin_lic(file_path, schema_file,)
if task=='role': items = role_process_bin_lic(file_path, schema_file,)
elif dataset=="ace":
if task=='role': items = role_process_bin_ace(file_path, schema_file,)
return [InputExample(**item) for item in items]
def get_query_templates_trigger(dataset):
query_file = "./query_template/trigger/"+dataset+".csv"
query_templates = dict()
with open(query_file, "r", encoding='utf-8') as f:
next(f)
for line in f:
if dataset == "ccks":
event_type, description = line.strip().split(",")
elif dataset == 'lic':
event_type, description = line.strip().split(",")
if event_type not in query_templates:
query_templates[event_type] = list()
# 0
query_templates[event_type].append(event_type)
# 1
query_templates[event_type].append(event_type + " "+ description)
# 2
query_templates[event_type].append(event_type + "的触发词是什么?" + "(" + description + ")" )
# 3
query_templates[event_type].append(event_type + " " + description+ " "+ description)
# query_templates[event_type][role].append(role + " in [trigger]")
# query_templates[event_type][role].append(query[:-1] + " in [trigger]?")
return query_templates
def get_query_templates_role(dataset):
"""Load query templates"""
query_file = "./query_template/role/"+dataset+".csv"
query_templates = dict()
with open(query_file, "r", encoding='utf-8') as f:
next(f)
for line in f:
if dataset == "ccks":
event_type, role, role_chinese, description, role_type = line.strip().split(",")
elif dataset == 'lic':
event_type, role = line.strip().split(",")
role_chinese, description, role_type = role, "", ""
if event_type not in query_templates:
query_templates[event_type] = dict()
if role not in query_templates[event_type]:
query_templates[event_type][role] = list()
# 0
query_templates[event_type][role].append(role_chinese)
# 1
query_templates[event_type][role].append(event_type + " "+ role_chinese)
# 2
query_templates[event_type][role].append(role_chinese+ " "+ description)
# 3
query_templates[event_type][role].append(event_type + " " + role_chinese+ " "+ description)
# 4
query_templates[event_type][role].append(event_type + "中的" + role_chinese+ " "+ description+ " 是什么?")
# 5
query_templates[event_type][role].append(["[unused2]", "[unused3]"] +list(event_type) + ["[unused4]", "[unused5]"] + list(role_chinese)+ ["[unused6]", "[unused7]"]+ list(description) + ["[unused8]", "[unused9]"])
# query_templates[event_type][role].append(role + " in [trigger]")
# query_templates[event_type][role].append(query[:-1] + " in [trigger]?")
return query_templates
def get_query_templates(dataset, task):
if task=='role': return get_query_templates_role(dataset)
elif task=="trigger": return get_query_templates_trigger(dataset)
def convert_examples_to_features(
examples,
label_list,
max_seq_length,
tokenizer,
cls_token_at_end=False,
cls_token="[CLS]",
cls_token_segment_id=1,
sep_token="[SEP]",
sep_token_extra=False,
pad_on_left=False,
pad_token=0,
pad_token_segment_id=0,
pad_token_label_id=-100,
sequence_a_segment_id=0,
sequence_b_segment_id=1,
mask_padding_with_zero=True,
nth_query=2,
dataset='ccks',
task='trigger'
):
""" Loads a data file into a list of `InputBatch`s
`cls_token_at_end` define the location of the CLS token:
- False (Default, BERT/XLM pattern): [CLS] + A + [SEP] + B + [SEP]
- True (XLNet/GPT pattern): A + [SEP] + B + [SEP] + [CLS]
`cls_token_segment_id` define the segment id associated to the CLS token (0 for BERT, 2 for XLNet)
"""
query_templates = get_query_templates(dataset, task)
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
logger.info("Writing example %d of %d", ex_index, len(examples))
# print(example.words, example.labels)
# print(len(example.words), len(example.labels))
tokens = []
start_label_ids = []
end_label_ids = []
token_type_ids = []
# query
if task=='role':
event_type, role = example.event_type, example.role
query = query_templates[event_type][role][nth_query]
elif task=='trigger':
event_type = example.event_type
query = query_templates[event_type][nth_query]
for i in range(len(query)):
word = query[i]
if 'unused' in word:
word_tokens = [word]
else:
word_tokens = tokenizer.tokenize(word)
if len(word_tokens)==1:
tokens.extend(word_tokens)
if len(word_tokens)>1:
print(word,">1")
tokens.extend(word_tokens[:1])
pass
if len(word_tokens)<1:
# print(word,"<1") 基本都是空格
tokens.extend(["[unused1]"])
# continue
start_label_ids.append(pad_token_label_id)
end_label_ids.append(pad_token_label_id)
# [SEP]
tokens += [sep_token]
start_label_ids += [pad_token_label_id]
end_label_ids += [pad_token_label_id]
token_type_ids = [sequence_a_segment_id] * len(tokens)
# paragraph
for word, start_label, end_label in zip(example.words, example.start_labels, example.end_labels):
word_tokens = tokenizer.tokenize(word)
if len(word_tokens)==1:
tokens.extend(word_tokens)
if len(word_tokens)>1:
print(word,">1")
tokens.extend(word_tokens[:1])
# tokens.extend(word_tokens)
# label_ids.extend([label_map[label]] + [pad_token_label_id] * (len(word_tokens) - 1))
if len(word_tokens)<1:
# print(word,"<1") 基本都是空格
tokens.extend(["[unused1]"])
# continue
start_label_ids.append(start_label)
end_label_ids.append(end_label)
token_type_ids.append(sequence_b_segment_id)
# if len(tokens)!= len(label_ids):
# print(word, word_tokens, tokens, label_ids)
# print(len(tokens),len(label_ids))
# Account for [CLS] and [SEP] with "- 2" and with "- 3" for RoBERTa.
special_tokens_count = 3 if sep_token_extra else 2
if len(tokens) > max_seq_length - special_tokens_count:
tokens = tokens[: (max_seq_length - special_tokens_count)]
start_label_ids = start_label_ids[: (max_seq_length - special_tokens_count)]
end_label_ids = end_label_ids[: (max_seq_length - special_tokens_count)]
token_type_ids = token_type_ids[: (max_seq_length - special_tokens_count)]
# [SEP]
tokens += [sep_token]
start_label_ids += [pad_token_label_id]
end_label_ids += [pad_token_label_id]
token_type_ids += [sequence_b_segment_id]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambiguously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
if sep_token_extra:
# roberta uses an extra separator b/w pairs of sentences
tokens += [sep_token]
start_label_ids += [pad_token_label_id]
end_label_ids += [pad_token_label_id]
token_type_ids += [sequence_b_segment_id]
if cls_token_at_end:
tokens += [cls_token]
start_label_ids += [pad_token_label_id]
end_label_ids += [pad_token_label_id]
token_type_ids += [cls_token_segment_id]
else:
tokens = [cls_token] + tokens
start_label_ids = [pad_token_label_id] + start_label_ids
end_label_ids = [pad_token_label_id] + end_label_ids
token_type_ids = [cls_token_segment_id] + token_type_ids
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# print(len(tokens), len(input_ids), len(label_ids))
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
attention_mask = [1 if mask_padding_with_zero else 0] * len(input_ids)
# Zero-pad up to the sequence length.
padding_length = max_seq_length - len(input_ids)
if pad_on_left:
input_ids = ([pad_token] * padding_length) + input_ids
attention_mask = ([0 if mask_padding_with_zero else 1] * padding_length) + attention_mask
token_type_ids = ([pad_token_segment_id] * padding_length) + token_type_ids
start_label_ids = ([pad_token_label_id] * padding_length) + start_label_ids
end_label_ids = ([pad_token_label_id] * padding_length) + end_label_ids
else:
input_ids += [pad_token] * padding_length
attention_mask += [0 if mask_padding_with_zero else 1] * padding_length
token_type_ids += [pad_token_segment_id] * padding_length
start_label_ids += [pad_token_label_id] * padding_length
end_label_ids += [pad_token_label_id] * padding_length
# print(len(label_ids), max_seq_length)
assert len(input_ids) == max_seq_length
assert len(attention_mask) == max_seq_length
assert len(token_type_ids) == max_seq_length
assert len(start_label_ids) == max_seq_length
assert len(end_label_ids) == max_seq_length
if ex_index < 5:
logger.info("*** Example ***")
logger.info("id: %s", example.id)
logger.info("tokens: %s", " ".join([str(x) for x in tokens]))
logger.info("input_ids: %s", " ".join([str(x) for x in input_ids]))
logger.info("attention_mask: %s", " ".join([str(x) for x in attention_mask]))
logger.info("token_type_ids: %s", " ".join([str(x) for x in token_type_ids]))
logger.info("start_label_ids: %s", " ".join([str(x) for x in start_label_ids]))
logger.info("end_label_ids: %s", " ".join([str(x) for x in end_label_ids]))
features.append(
InputFeatures(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, \
start_label_ids=start_label_ids, end_label_ids= end_label_ids)
)
return features
|
#All the classes for pieces
#Simply describes how they are drawn
from vpython import*
import numpy as np
import fvector as fvec
class Piece:
'A parent class for all the piece subclasses'
def __init__(self):
self.base = None
self.ogcolor = None
self.boxes = []
def move(self,newPos):
self.base.pos = newPos
self.x = round(self.base.pos.x)
self.y = round(ytoy(self.base.pos.y))
self.z = round(ztoz(self.base.pos.z))
self.d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(self.x,self.y,self.z,self.d)
self.base.color = self.ogcolor
self.started=True
return True
def __eq__(self, obj):
if(obj!=None):
return self.base.pos.equals(obj.pos)
def setvisible(self,state):
'Makes more complex shapes invisible'
if hasattr(self.base,'objects'):
for obj in self.base.objects:
obj.visible = state
else:
self.base.visible = state
def select(self):
self.selected = not self.selected
def completepositions(self,x,y,z,d,kingpos,board):
nowvec = fvec.Fvector(x,y,z,d)
return nowvec.removecheckpos(self,board,kingpos)
def glowlegal(self,kingpos,board):
x = round(self.base.pos.x)
y = round(ytoy(self.base.pos.y))
z = round(ztoz(self.base.pos.z))
d = round(ztod(self.base.pos.z))
positions = self.completepositions(x,y,z,d,kingpos,board)
for boxx in positions:
self.boxes.append(box(pos=vec(boxx.x,(boxx.y*2.5),boxx.z+(5*boxx.d)),length=1,height=0.1,width=1,color=color.green))
def unglow(self):
for boxx in self.boxes:
x = round(boxx.pos.x)
y = round(ytoy(boxx.pos.y))
z = round(ztoz(boxx.pos.z))
d = round(ztod(boxx.pos.z))
sColor=None
if (x+y+z+d) % 2 == 1:
sColor = color.blue
else:
sColor = color.white
box(pos=vector(x,(y*2.5),z+(5*d)),length=1,height=0.1,width=1,color=sColor)
self.boxes.clear()
def getpos(self):
x = round(self.base.pos.x)
y = round(ytoy(self.base.pos.y))
z = round(ztoz(self.base.pos.z))
d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(x,y,z,d)
return self.posvec
class Pawn(Piece):
def __init__(self,spos,sColor,board):
self.board = board
self.score = 1
self.started=False
self.selected = False
#self.base = compound([cone(pos=(spos),radius=0.4,axis=vector(0,1,0),color=sColor),
#sphere(pos=(spos)+vector(0,1,0),radius=0.1,axis=vector(0,1,0),color=sColor)])
self.base = cone(pos=(spos),radius=0.4,axis=vector(0,1,0),color=sColor)
self.ogcolor = sColor
self.boxes = []
self.positions = None
self.x = round(self.base.pos.x)
self.y = round(ytoy(self.base.pos.y))
self.z = round(ztoz(self.base.pos.z))
self.d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(self.x,self.y,self.z,self.d)
def __repr__(self):
return 'p'
def getpositions(self,x,y,z,d,board):
self.posvec = fvec.Fvector(x,y,z,d)
c = -1
if(self.ogcolor.equals(color.white)):
c = 1
self.positions = None
positions = self.posvec.pawn(0,0,c,0,True,board)
if(not self.started):
positions+=self.posvec.pawn(0,0,c,0,False,board)
positions+=self.posvec.pawn(1,0,c,0,True,board)
positions+=self.posvec.pawn(-1,0,c,0,True,board)
positions+=self.posvec.pawn(0,1,c,0,True,board)
positions+=self.posvec.pawn(0,-1,c,0,True,board)
positions+=self.posvec.pawn(0,0,c,c,True,board)
self.positions = positions
return positions
class Rook(Piece):
def __init__(self,spos,sColor,board):
self.board = board
self.score = 5
self.started=False
self.selected = False
self.ogcolor = sColor
self.boxes = []
self.base = compound([cylinder(pos=spos+vector(0,0,0),radius=0.3,length=0.8,axis=vector(0,1,0),color=sColor),cylinder(pos=spos+vector(0,0.8,0),length=0.2,radius=0.4,axis=vector(0,1,0),color=sColor)])
self.x = round(self.base.pos.x)
self.y = round(ytoy(self.base.pos.y))
self.z = round(ztoz(self.base.pos.z))
self.d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(self.x,self.y,self.z,self.d)
def __repr__(self):
return 'r'
def getpositions(self,x,y,z,d,board):
self.posvec = fvec.Fvector(x,y,z,d)
positions = self.posvec.getPosArray(1,0,0,0,False,board)
positions+= self.posvec.getPosArray(-1,0,0,0,False,board)
positions+= self.posvec.getPosArray(0,1,0,0,False,board)
positions+= self.posvec.getPosArray(0,-1,0,0,False,board)
positions+= self.posvec.getPosArray(0,0,1,0,False,board)
positions+= self.posvec.getPosArray(0,0,-1,0,False,board)
positions+= self.posvec.getPosArray(0,0,0,1,False,board)
positions+= self.posvec.getPosArray(0,0,0,-1,False,board)
self.positions = positions
return positions
class Knight(Piece):
def __init__(self,spos,sColor,board):
self.board = board
self.score = 3
self.started=False
self.selected = False
self.ogcolor = sColor
self.boxes = []
face = 0.1
if(sColor == color.red):
face = -0.1
self.base = compound([box(pos=spos+(vector(0,0.4,0)),width=0.4,length=0.8,height=0.4,axis=vector(0,1,0),color=sColor),
box(pos=spos+(vector(0,0.8,face)),width=0.6,length=0.4,height=0.4,axis=vector(0,1,0),color=sColor)])
#cone(pos=spos+vector(0,0.6,0),radius=0.2,axis=vector(0,0,face),color=sColor)])
self.x = round(self.base.pos.x)
self.y = round(ytoy(self.base.pos.y))
self.z = round(ztoz(self.base.pos.z))
self.d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(self.x,self.y,self.z,self.d)
def __repr__(self):
return 'k'
def getpositions(self,x,y,z,d,board):
self.posvec = fvec.Fvector(x,y,z,d)
positions = self.posvec.getPosArray(2, 1, 0, 0, True,board)
positions+= self.posvec.getPosArray(2, 0, 1, 0, True,board)
positions+= self.posvec.getPosArray(2, 0, 0, 1, True,board)
positions+= self.posvec.getPosArray(1, 2, 0, 0, True,board)
positions+= self.posvec.getPosArray(0, 2, 1, 0, True,board)
positions+= self.posvec.getPosArray(0, 2, 0, 1, True,board)
positions+= self.posvec.getPosArray(1, 0, 2, 0, True,board)
positions+= self.posvec.getPosArray(0, 1, 2, 0, True,board)
positions+= self.posvec.getPosArray(0, 0, 2, 1, True,board)
positions+= self.posvec.getPosArray(1, 0, 0, 2, True,board)
positions+= self.posvec.getPosArray(0, 1, 0, 2, True,board)
positions+= self.posvec.getPosArray(0, 0, 1, 2, True,board)
positions+= self.posvec.getPosArray(-2, 1, 0, 0, True,board)
positions+= self.posvec.getPosArray(-2, 0, 1, 0, True,board)
positions+= self.posvec.getPosArray(-2, 0, 0, 1, True,board)
positions+= self.posvec.getPosArray(1, -2, 0, 0, True,board)
positions+= self.posvec.getPosArray(0, -2, 1, 0, True,board)
positions+= self.posvec.getPosArray(0, -2, 0, 1, True,board)
positions+= self.posvec.getPosArray(1, 0, -2, 0, True,board)
positions+= self.posvec.getPosArray(0, 1, -2, 0, True,board)
positions+= self.posvec.getPosArray(0, 0, -2, 1, True,board)
positions+= self.posvec.getPosArray(1, 0, 0, -2, True,board)
positions+= self.posvec.getPosArray(0, 1, 0, -2, True,board)
positions+= self.posvec.getPosArray(0, 0, 1, -2, True,board)
positions+= self.posvec.getPosArray(2, -1, 0, 0, True,board)
positions+= self.posvec.getPosArray(2, 0, -1, 0, True,board)
positions+= self.posvec.getPosArray(2, 0, 0, -1, True,board)
positions+= self.posvec.getPosArray(-1, 2, 0, 0, True,board)
positions+= self.posvec.getPosArray(0, 2, -1, 0, True,board)
positions+= self.posvec.getPosArray(0, 2, 0, -1, True,board)
positions+= self.posvec.getPosArray(-1, 0, 2, 0, True,board)
positions+= self.posvec.getPosArray(0, -1, 2, 0, True,board)
positions+= self.posvec.getPosArray(0, 0, 2, -1, True,board)
positions+= self.posvec.getPosArray(-1, 0, 0, 2, True,board)
positions+= self.posvec.getPosArray(0, -1, 0, 2, True,board)
positions+= self.posvec.getPosArray(0, 0, -1, 2, True,board)
positions+= self.posvec.getPosArray(-2, -1, 0, 0, True,board)
positions+= self.posvec.getPosArray(-2, 0, -1, 0, True,board)
positions+= self.posvec.getPosArray(-2, 0, 0, -1, True,board)
positions+= self.posvec.getPosArray(-1, -2, 0, 0, True,board)
positions+= self.posvec.getPosArray(0, -2, -1, 0, True,board)
positions+= self.posvec.getPosArray(0, -2, 0, -1, True,board)
positions+= self.posvec.getPosArray(-1, 0, -2, 0, True,board)
positions+= self.posvec.getPosArray(0, -1, -2, 0, True,board)
positions+= self.posvec.getPosArray(0, 0, -2, -1, True,board)
positions+= self.posvec.getPosArray(-1, 0, 0, -2, True,board)
positions+= self.posvec.getPosArray(0, -1, 0, -2, True,board)
positions+= self.posvec.getPosArray(0, 0, -1, -2, True,board)
self.positions = positions
return positions
class Bishop(Piece):
def __init__(self,spos,sColor,board):
self.board = board
self.score = 3
self.started=False
self.selected = False
self.ogcolor = sColor
self.boxes = []
self.base = compound([cylinder(pos=spos+vector(0,0,0),radius=0.2,length=0.8,axis=vector(0,1,0),color=sColor),cone(pos=spos+vector(0,0.8,0),radius=0.2,axis=vector(0,1,0),color=sColor)])
self.x = round(self.base.pos.x)
self.y = round(ytoy(self.base.pos.y))
self.z = round(ztoz(self.base.pos.z))
self.d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(self.x,self.y,self.z,self.d)
def __repr__(self):
return 'b'
def getpositions(self,x,y,z,d,board):
self.posvec = fvec.Fvector(x,y,z,d)
positions = self.posvec.getPosArray(1,1,0,0,False,board)
positions+= self.posvec.getPosArray(1, 0, 1, 0,False,board)
positions+= self.posvec.getPosArray(1, 0, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, 1, 1, 0,False,board)
positions+= self.posvec.getPosArray(0, 1, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, 0, 1, 1,False,board)
positions+= self.posvec.getPosArray(-1, 1, 0, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, 1, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, -1, 1, 0,False,board)
positions+= self.posvec.getPosArray(0, -1, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, 0, -1, 1,False,board)
positions+= self.posvec.getPosArray(1, -1, 0, 0,False,board)
positions+= self.posvec.getPosArray(1, 0, -1, 0,False,board)
positions+= self.posvec.getPosArray(1, 0, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, 1, -1, 0,False,board)
positions+= self.posvec.getPosArray(0, 1, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, 0, 1, -1,False,board)
positions+= self.posvec.getPosArray(-1, -1, 0, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, -1, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, -1, -1, 0,False,board)
positions+= self.posvec.getPosArray(0, -1, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, 0, -1, -1,False,board)
self.positions = positions
return positions
class Queen(Piece):
def __init__(self,spos,sColor,board):
self.board = board
self.score = 9
self.started=False
self.selected = False
self.ogcolor = sColor
self.boxes = []
self.base = compound([cylinder(pos=spos+vector(0,0,0),radius=0.4,length=1.0,axis=vector(0,1,0),color=sColor),sphere(radius=0.4,pos=spos+vector(0,1.4,0),color=sColor)])
self.x = round(self.base.pos.x)
self.y = round(ytoy(self.base.pos.y))
self.z = round(ztoz(self.base.pos.z))
self.d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(self.x,self.y,self.z,self.d)
def __repr__(self):
return 'q'
def getpositions(self,x,y,z,d,board):
self.posvec = fvec.Fvector(x,y,z,d)
positions = self.posvec.getPosArray(1,0,0,0,False,board)
positions+= self.posvec.getPosArray(-1,0,0,0,False,board)
positions+= self.posvec.getPosArray(0,1,0,0,False,board)
positions+= self.posvec.getPosArray(0,-1,0,0,False,board)
positions+= self.posvec.getPosArray(0,0,1,0,False,board)
positions+= self.posvec.getPosArray(0,0,-1,0,False,board)
positions+= self.posvec.getPosArray(0,0,0,1,False,board)
positions+= self.posvec.getPosArray(0,0,0,-1,False,board)
positions+= self.posvec.getPosArray(1,1,0,0,False,board)
positions+= self.posvec.getPosArray(1, 0, 1, 0,False,board)
positions+= self.posvec.getPosArray(1, 0, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, 1, 1, 0,False,board)
positions+= self.posvec.getPosArray(0, 1, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, 0, 1, 1,False,board)
positions+= self.posvec.getPosArray(-1, 1, 0, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, 1, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, -1, 1, 0,False,board)
positions+= self.posvec.getPosArray(0, -1, 0, 1,False,board)
positions+= self.posvec.getPosArray(0, 0, -1, 1,False,board)
positions+= self.posvec.getPosArray(1, -1, 0, 0,False,board)
positions+= self.posvec.getPosArray(1, 0, -1, 0,False,board)
positions+= self.posvec.getPosArray(1, 0, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, 1, -1, 0,False,board)
positions+= self.posvec.getPosArray(0, 1, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, 0, 1, -1,False,board)
positions+= self.posvec.getPosArray(-1, -1, 0, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, -1, 0,False,board)
positions+= self.posvec.getPosArray(-1, 0, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, -1, -1, 0,False,board)
positions+= self.posvec.getPosArray(0, -1, 0, -1,False,board)
positions+= self.posvec.getPosArray(0, 0, -1, -1,False,board)
self.positions = positions
return positions
class King(Piece):
def __init__(self,spos,sColor,board):
self.board = board
self.score = 1e8
self.started=False
self.selected = False
self.ogcolor = sColor
self.boxes = []
self.base = compound([cylinder(pos=spos+vector(0,0,0),radius=0.4,length=1.2,axis=vector(0,1,0),color=sColor),box(height=0.6,width=0.6,length=0.6,pos=spos+vector(0,1.5,0),color=sColor)])
self.x = round(self.base.pos.x)
self.y = round(ytoy(self.base.pos.y))
self.z = round(ztoz(self.base.pos.z))
self.d = round(ztod(self.base.pos.z))
self.posvec = fvec.Fvector(self.x,self.y,self.z,self.d)
def __repr__(self):
return 'K'
def getpositions(self,x,y,z,d,board):
self.posvec = fvec.Fvector(x,y,z,d)
positions = self.posvec.getPosArray(1,0,0,0,True,board)
positions+= self.posvec.getPosArray(-1,0,0,0,True,board)
positions+= self.posvec.getPosArray(0,1,0,0,True,board)
positions+= self.posvec.getPosArray(0,-1,0,0,True,board)
positions+= self.posvec.getPosArray(0,0,1,0,True,board)
positions+= self.posvec.getPosArray(0,0,-1,0,True,board)
positions+= self.posvec.getPosArray(0,0,0,1,True,board)
positions+= self.posvec.getPosArray(0,0,0,-1,True,board)
positions+= self.posvec.getPosArray(1,1,0,0,True,board)
positions+= self.posvec.getPosArray(1, 0, 1, 0,True,board)
positions+= self.posvec.getPosArray(1, 0, 0, 1,True,board)
positions+= self.posvec.getPosArray(0, 1, 1, 0,True,board)
positions+= self.posvec.getPosArray(0, 1, 0, 1,True,board)
positions+= self.posvec.getPosArray(0, 0, 1, 1,True,board)
positions+= self.posvec.getPosArray(-1, 1, 0, 0,True,board)
positions+= self.posvec.getPosArray(-1, 0, 1, 0,True,board)
positions+= self.posvec.getPosArray(-1, 0, 0, 1,True,board)
positions+= self.posvec.getPosArray(0, -1, 1, 0,True,board)
positions+= self.posvec.getPosArray(0, -1, 0, 1,True,board)
positions+= self.posvec.getPosArray(0, 0, -1, 1,True,board)
positions+= self.posvec.getPosArray(1, -1, 0, 0,True,board)
positions+= self.posvec.getPosArray(1, 0, -1, 0,True,board)
positions+= self.posvec.getPosArray(1, 0, 0, -1,True,board)
positions+= self.posvec.getPosArray(0, 1, -1, 0,True,board)
positions+= self.posvec.getPosArray(0, 1, 0, -1,True,board)
positions+= self.posvec.getPosArray(0, 0, 1, -1,True,board)
positions+= self.posvec.getPosArray(-1, -1, 0, 0,True,board)
positions+= self.posvec.getPosArray(-1, 0, -1, 0,True,board)
positions+= self.posvec.getPosArray(-1, 0, 0, -1,True,board)
positions+= self.posvec.getPosArray(0, -1, -1, 0,True,board)
positions+= self.posvec.getPosArray(0, -1, 0, -1,True,board)
positions+= self.posvec.getPosArray(0, 0, -1, -1,True,board)
self.positions = positions
return positions
def ztoz(z):
if(z>=15):
z=z-15
elif(z>=10):
z=z-10
elif(z>=5):
z=z-5
return z
def ztod(d):
return int(d/5)
def ytoy(y):
return y/2.5 |
import numpy as np
from graphblas import Matrix, Vector, binary, indexunary, monoid, replace, select, unary
from graphblas.semiring import any_pair, min_plus
from .._bfs import _bfs_level, _bfs_levels, _bfs_parent, _bfs_plain
from ..exceptions import NoPath, Unbounded
__all__ = [
"single_source_bellman_ford_path_length",
"bellman_ford_path",
"bellman_ford_path_length",
"bellman_ford_path_lengths",
"negative_edge_cycle",
]
def _bellman_ford_path_length(G, source, target=None, *, cutoff=None, name):
# No need for `is_weighted=` keyword, b/c this is assumed to be weighted (I think)
src_id = G._key_to_id[source]
if target is not None:
dst_id = G._key_to_id[target]
else:
dst_id = None
if G.get_property("is_iso"):
# If the edges are iso-valued (and positive), then we can simply do level BFS
is_negative, iso_value = G.get_properties("has_negative_edges+ iso_value")
if not is_negative:
if cutoff is not None:
cutoff = int(cutoff // iso_value)
d = _bfs_level(G, source, target, cutoff=cutoff, dtype=iso_value.dtype)
if dst_id is not None:
d = d.get(dst_id)
if d is None:
raise NoPath(f"node {target} not reachable from {source}")
if iso_value != 1:
d *= iso_value
return d
# It's difficult to detect negative cycles with BFS
if G._A[src_id, src_id].get() is not None:
raise Unbounded("Negative cycle detected.")
if not G.is_directed() and G._A[src_id, :].nvals > 0:
# For undirected graphs, any negative edge is a cycle
raise Unbounded("Negative cycle detected.")
# Use `offdiag` instead of `A`, b/c self-loops don't contribute to the result,
# and negative self-loops are easy negative cycles to avoid.
# We check if we hit a self-loop negative cycle at the end.
if dst_id is None:
A, has_negative_diagonal = G.get_properties("offdiag has_negative_diagonal")
else:
A, is_negative, has_negative_diagonal = G.get_properties(
"offdiag has_negative_edges- has_negative_diagonal"
)
if A.dtype == bool:
# Should we upcast e.g. INT8 to INT64 as well?
dtype = int
else:
dtype = A.dtype
n = A.nrows
d = Vector(dtype, n, name="single_source_bellman_ford_path_length")
d[src_id] = 0
cur = d.dup(name="cur")
mask = Vector(bool, n, name="mask")
one = unary.one[bool]
for _i in range(n - 1):
# This is a slightly modified Bellman-Ford algorithm.
# `cur` is the current frontier of values that improved in the previous iteration.
# This means that in this iteration we drop values from `cur` that are not better.
cur << min_plus(cur @ A)
if cutoff is not None:
cur << select.valuele(cur, cutoff)
# Mask is True where cur not in d or cur < d
mask << one(cur)
mask(binary.second) << binary.lt(cur & d)
# Drop values from `cur` that didn't improve
cur(mask.V, replace) << cur
if cur.nvals == 0:
break
# Update `d` with values that improved
d(cur.S) << cur
if dst_id is not None and not is_negative:
# Limit exploration if we have a target
cutoff = cur.get(dst_id, cutoff)
else:
# Check for negative cycle when for loop completes without breaking
cur << min_plus(cur @ A)
if cutoff is not None:
cur << select.valuele(cur, cutoff)
mask << binary.lt(cur & d)
if dst_id is None and mask.reduce(monoid.lor) or dst_id is not None and mask.get(dst_id):
raise Unbounded("Negative cycle detected.")
if has_negative_diagonal:
# We removed diagonal entries above, so check if we visited one with a negative weight
diag = G.get_property("diag")
cur << select.valuelt(diag, 0)
if any_pair(d @ cur):
raise Unbounded("Negative cycle detected.")
if dst_id is not None:
d = d.get(dst_id)
if d is None:
raise NoPath(f"node {target} not reachable from {source}")
return d
def single_source_bellman_ford_path_length(
G, source, *, cutoff=None, name="single_source_bellman_ford_path_length"
):
return _bellman_ford_path_length(G, source, cutoff=cutoff, name=name)
def bellman_ford_path_length(G, source, target):
return _bellman_ford_path_length(G, source, target, name="bellman_ford_path_length")
def bellman_ford_path_lengths(G, nodes=None, *, expand_output=False):
"""Extra parameter: expand_output
Parameters
----------
expand_output : bool, default False
When False, the returned Matrix has one row per node in nodes.
When True, the returned Matrix has the same shape as the input Matrix.
"""
# Same algorithms as in `single_source_bellman_ford_path_length`, but with
# `Cur` as a Matrix with each row corresponding to a source node.
if G.get_property("is_iso"):
is_negative, iso_value = G.get_properties("has_negative_edges+ iso_value")
if not is_negative:
D = _bfs_levels(G, nodes, dtype=iso_value.dtype)
if iso_value != 1:
D *= iso_value
if nodes is not None and expand_output and D.ncols != D.nrows:
ids = G.list_to_ids(nodes)
rv = Matrix(D.dtype, D.ncols, D.ncols, name=D.name)
rv[ids, :] = D
return rv
return D
if not G.is_directed():
# For undirected graphs, any negative edge is a cycle
if nodes is not None:
ids = G.list_to_ids(nodes)
if G._A[ids, :].nvals > 0:
raise Unbounded("Negative cycle detected.")
elif G._A.nvals > 0:
raise Unbounded("Negative cycle detected.")
A, has_negative_diagonal = G.get_properties("offdiag has_negative_diagonal")
if A.dtype == bool:
dtype = int
else:
dtype = A.dtype
n = A.nrows
if nodes is None:
# TODO: `D = Vector.from_scalar(0, n, dtype).diag()`
D = Vector(dtype, n, name="bellman_ford_path_lengths_vector")
D << 0
D = D.diag(name="bellman_ford_path_lengths")
else:
ids = G.list_to_ids(nodes)
D = Matrix.from_coo(
np.arange(len(ids), dtype=np.uint64),
ids,
0,
dtype,
nrows=len(ids),
ncols=n,
name="bellman_ford_path_lengths",
)
Cur = D.dup(name="Cur")
Mask = Matrix(bool, D.nrows, D.ncols, name="Mask")
one = unary.one[bool]
for _i in range(n - 1):
Cur << min_plus(Cur @ A)
Mask << one(Cur)
Mask(binary.second) << binary.lt(Cur & D)
Cur(Mask.V, replace) << Cur
if Cur.nvals == 0:
break
D(Cur.S) << Cur
else:
Cur << min_plus(Cur @ A)
Mask << binary.lt(Cur & D)
if Mask.reduce_scalar(monoid.lor):
raise Unbounded("Negative cycle detected.")
if has_negative_diagonal:
diag = G.get_property("diag")
cur = select.valuelt(diag, 0)
if any_pair(D @ cur).nvals > 0:
raise Unbounded("Negative cycle detected.")
if nodes is not None and expand_output and D.ncols != D.nrows:
rv = Matrix(D.dtype, n, n, name=D.name)
rv[ids, :] = D
return rv
return D
def _reconstruct_path_from_parents(G, parents, src, dst):
indices, values = parents.to_coo(sort=False)
d = dict(zip(indices.tolist(), values.tolist()))
if dst not in d:
return []
cur = dst
path = [cur]
while cur != src:
cur = d[cur]
path.append(cur)
return G.list_to_keys(reversed(path))
def bellman_ford_path(G, source, target):
src_id = G._key_to_id[source]
dst_id = G._key_to_id[target]
if G.get_property("is_iso"):
# If the edges are iso-valued (and positive), then we can simply do level BFS
is_negative = G.get_property("has_negative_edges+")
if not is_negative:
p = _bfs_parent(G, source, target)
return _reconstruct_path_from_parents(G, p, src_id, dst_id)
raise Unbounded("Negative cycle detected.")
A, is_negative, has_negative_diagonal = G.get_properties(
"offdiag has_negative_edges- has_negative_diagonal"
)
if A.dtype == bool:
# Should we upcast e.g. INT8 to INT64 as well?
dtype = int
else:
dtype = A.dtype
cutoff = None
n = A.nrows
d = Vector(dtype, n, name="bellman_ford_path_length")
d[src_id] = 0
p = Vector(int, n, name="bellman_ford_path_parent")
p[src_id] = src_id
prev = d.dup(name="prev")
cur = Vector(dtype, n, name="cur")
indices = Vector(int, n, name="indices")
mask = Vector(bool, n, name="mask")
B = Matrix(dtype, n, n, name="B")
Indices = Matrix(int, n, n, name="Indices")
cols = prev.to_coo(values=False)[0]
one = unary.one[bool]
for _i in range(n - 1):
# This is a slightly modified Bellman-Ford algorithm.
# `cur` is the current frontier of values that improved in the previous iteration.
# This means that in this iteration we drop values from `cur` that are not better.
cur << min_plus(prev @ A)
if cutoff is not None:
cur << select.valuele(cur, cutoff)
# Mask is True where cur not in d or cur < d
mask << one(cur)
mask(binary.second) << binary.lt(cur & d)
# Drop values from `cur` that didn't improve
cur(mask.V, replace) << cur
if cur.nvals == 0:
break
# Update `d` with values that improved
d(cur.S) << cur
if not is_negative:
# Limit exploration if we have a target
cutoff = cur.get(dst_id, cutoff)
# Now try to find the parents!
# This is also not standard. Typically, UDTs and UDFs are used to keep
# track of both the minimum element and the parent id at the same time.
# Only include rows and columns that were used this iteration.
rows = cols
cols = cur.to_coo(values=False)[0]
B.clear()
B[rows, cols] = A[rows, cols]
# Reverse engineer to determine parent
B << binary.plus(prev & B)
B << binary.iseq(B & cur)
B << select.valuene(B, False)
Indices << indexunary.rowindex(B)
indices << Indices.reduce_columnwise(monoid.min)
p(indices.S) << indices
prev, cur = cur, prev
else:
# Check for negative cycle when for loop completes without breaking
cur << min_plus(prev @ A)
if cutoff is not None:
cur << select.valuele(cur, cutoff)
mask << binary.lt(cur & d)
if mask.get(dst_id):
raise Unbounded("Negative cycle detected.")
path = _reconstruct_path_from_parents(G, p, src_id, dst_id)
if has_negative_diagonal and path:
mask.clear()
mask[G.list_to_ids(path)] = True
diag = G.get_property("diag", mask=mask.S)
if diag.nvals > 0:
raise Unbounded("Negative cycle detected.")
mask << binary.first(mask & cur) # mask(cur.S, replace) << mask
if mask.nvals > 0:
# Is there a path from any visited node with negative self-loop to target?
# We could actually stop as soon as any from `path` is visited
indices, _ = mask.to_coo(values=False)[0]
q = _bfs_plain(G, target=target, index=indices, cutoff=_i)
if dst_id in q:
raise Unbounded("Negative cycle detected.")
return path
def negative_edge_cycle(G):
# TODO: use a heuristic to try to stop early
if G.is_directed():
deg = "total_degrees-"
else:
deg = "degrees-"
A, degrees, has_negative_diagonal, has_negative_edges = G.get_properties(
f"offdiag {deg} has_negative_diagonal has_negative_edges-"
)
if has_negative_diagonal:
return True
if not has_negative_edges:
return False
if A.dtype == bool:
# Should we upcast e.g. INT8 to INT64 as well?
dtype = int
else:
dtype = A.dtype
n = A.nrows
# Begin from every node that has edges
d = Vector(dtype, n, name="negative_edge_cycle")
d(degrees.S) << 0
cur = d.dup(name="cur")
mask = Vector(bool, n, name="mask")
one = unary.one[bool]
for _i in range(n - 1):
cur << min_plus(cur @ A)
mask << one(cur)
mask(binary.second) << binary.lt(cur & d)
cur(mask.V, replace) << cur
if cur.nvals == 0:
return False
d(cur.S) << cur
cur << min_plus(cur @ A)
mask << binary.lt(cur & d)
if mask.reduce(monoid.lor):
return True
return False
|
import os, sys
import numpy as np
import pathlib
import glob
import scipy.io as sio
def env():
return ('/').join(os.path.abspath(__file__).split('/')[:-1])
class Reader:
def __init__(self):
self.home = env()
self.PATH_PC = '%s/processed_dataset/{}/{}/{}.mat' % self.home
self.PATH_SUMMARY = '%s/relative_pose/summary/{}/{}/{}.mat' % self.home
self.PATH_SCENE = '%s/processed_dataset/{}' % self.home
self.PATH_REL = '%s/relative_pose/{}/{}/{}_{}.mat' % self.home
self.PATH_SCAN = '%s/processed_dataset/{}/{}' % self.home
def get_scanids(self, dataset, sceneid):
model_path = self.PATH_SCAN.format(dataset, sceneid)
scans = glob.glob('%s/*.mat' % model_path)
scanids = [int(scan.split('/')[-1].split('.')[0]) for scan in scans]
scanids = sorted(scanids)
return scanids
def read_scan(self, dataset, sceneid, scanid, variable_names=None):
mat = self.PATH_PC.format(dataset, sceneid, scanid)
mat = sio.loadmat(mat, variable_names=variable_names)
return mat
def read_summary(self, dataset, source, sceneid):
path = self.PATH_SUMMARY.format(dataset, source, sceneid)
mat = sio.loadmat(path)
return mat
def list_scenes(self, dataset):
home = env()
return os.listdir('%s/processed_dataset/%s/' % (home, dataset))
def list_relative_poses(self, dataset, source, sceneid):
rel = glob.glob(self.PATH_REL.format(dataset, sceneid, '*', source))
return rel
def inverse(T):
R, t = decompose(T)
invT = np.zeros((4, 4))
invT[:3, :3] = R.T
invT[:3, 3] = -R.T.dot(t)
invT[3, 3] = 1
return invT
def pack(R, t):
T = np.zeros((4, 4))
T[:3, :3] = R
T[:3, 3] = t
T[3, 3] = 1.0
return T
def decompose(T):
R = T[:3, :3]
t = T[:3, 3]
return R, t
"""
Find a matrix Q \in O(n) such that \|A Q - B\|_F is minimized
equivalent to maximize trace of (Q^T A^T B)
"""
def project(A, B):
X = A.T.dot(B)
U, S, VT = np.linalg.svd(X)
Q = U.dot(VT)
return Q
"""
Find a matrix Q \in SO(n) such that \|Q - X\|_F is minimized
equivalent to project(I, X)
"""
def project_so(X):
d = X.shape[0]
assert X.shape[1] == d
Q = project(np.eye(d), X)
Q = Q * np.linalg.det(Q)
return Q
def make_dirs(path):
dump_folder = os.path.dirname(path)
pathlib.Path(dump_folder).mkdir(exist_ok=True, parents=True)
def angular_distance_np(R_hat, R):
# measure the angular distance between two rotation matrice
# R1,R2: [n, 3, 3]
#print('hey')
n = R.shape[0]
trace_idx = [0,4,8]
det = np.linalg.det(R_hat)
det2 = np.linalg.det(R)
assert (det > 0).all()
assert (det2 > 0).all()
trace = np.matmul(R_hat, R.transpose(0,2,1)).reshape(n,-1)[:,trace_idx].sum(1)
metric = np.arccos(((trace - 1)/2).clip(-1,1)) / np.pi * 180.0
return metric
def read_super4pcs(rel):
if not os.path.exists(rel):
return np.eye(4)
with open(rel, 'r') as fin:
lines = fin.readlines()
if len(lines) == 0:
return np.eye(4)
T = []
for line in lines:
if 'MATRIX' in line:
continue
if 'VERSION' in line:
continue
if len(line.strip()) < 1:
continue
#print(line, len(line.strip()))
T.append([float(token) for token in line.strip().split(' ') if len(token) > 0])
T = np.array(T)
assert T.shape == (4, 4)
return T
if __name__ == '__main__':
print('home dir = %s' % env())
|
import numpy as np
import pandas as pd
import pickle
from flask import Flask, jsonify, render_template, request
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
import h5py
from keras.models import load_model
# load the dataset but only keep the top n words, zero the rest
top_words = 90000
max_words = 500
#load the csv file saved
df = pd.read_csv('C:/Users/vivek/OneDrive/Desktop/flaskapps/Sentiment_Analysis/food_delivery_reviews.csv', encoding='utf-8')
tokenizer_obj = Tokenizer(num_words=top_words)
tokenizer_obj.fit_on_texts(df['content'].values)
def pred(userreview):
test_samples = [userreview]
review_tokens = tokenizer_obj.texts_to_sequences(test_samples)
review_tokens_pad = pad_sequences(review_tokens, maxlen=max_words)
print("call predict")
# Load in pretrained model
loaded_model = load_model('C:/Users/vivek/OneDrive/Desktop/flaskapps/Sentiment_Analysis/Model/model.h5')
print("Loaded model from disk")
pred = loaded_model.predict(x=review_tokens_pad)
predict_class = np.argmax(pred, axis=1)
print(predict_class)
if predict_class[0] == 0:
sentiment_str = "Negative"
elif predict_class[0] == 1:
sentiment_str = "Neutral"
elif predict_class[0]==2:
sentiment_str = "Positive"
return sentiment_str
#sentiment = loaded_model.predict(x=review_tokens_pad)
# webapp
app = Flask(__name__, template_folder='C:/Users/vivek/OneDrive/Desktop/flaskapps/Sentiment_Analysis/')
@app.route('/predict', methods=['POST'])
def prediction():
message = request.form['message']
print(message)
response = pred(message)
print(response)
return jsonify(response)
@app.route('/C:/Users/vivek/OneDrive/Desktop/flaskapps/Sentiment_Analysis/')
def main():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True) |
from common.Converter.FeatureConVerTer import FeatureConVerTer
class FeatureConverTerTest:
def __init__(self):
self.fCverter = FeatureConVerTer()
def TestWordsToFeatures(self, words, messages):
return self.fCverter.cVertMessagesToDict(words, messages)
if __name__ == '__main__':
featureCvert = FeatureConverTerTest()
words = {'Get': 0.5, 'POST': 0.4, 'link': 0.1}
msgs = ['Get it down', 'POST link Get']
print(featureCvert.TestWordsToFeatures(words, msgs)) |
assignments = []
def cross(A, B):
"Cross product of elements in A and elements in B."
return [s + t for s in A for t in B]
"""Global Variables used in following functions"""
rows = 'ABCDEFGHI'
cols = '123456789'
boxes = cross(rows, cols)
row_units = [cross(r, cols) for r in rows]
column_units = [cross(rows, c) for c in cols]
square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')]
diag_units = [[i[0] + i[1] for i in zip(rows, cols)], [i[0] + i[1] for i in zip(rows[::-1], cols)]]
unitlist = row_units + column_units + square_units + diag_units
units = dict((s, [u for u in unitlist if s in u]) for s in boxes)
peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes)
def assign_value(values, box, value):
"""
Please use this function to update your values dictionary!
Assigns a value to a given box. If it updates the board record it.
"""
# Don't waste memory appending actions that don't actually change any values
if values[box] == value:
return values
values[box] = value
if len(value) == 1:
assignments.append(values.copy())
return values
def naked_twins(values):
"""Eliminate values using the naked twins strategy.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the naked twins eliminated from peers.
"""
if type(values) is bool:
return values
# Find all instances of naked twins
for unit in unitlist:
# get all boxes with two values in this unit
possibletwins = [box for box in unit if len(values[box]) == 2]
twins = []
twinvalues = []
if len(possibletwins) > 1:
cmptwins = possibletwins.copy()
# we have to do n * (n - 1) / 2 comparisons with n = len(possibleTwins)
for b1 in possibletwins:
# compare b1 just with following boxes
cmptwins = cmptwins[1:].copy()
for b2 in cmptwins:
# check if values are equal, if True save boxes and value
# there may be more than one twin pair in an unit
if values[b1] == values[b2]:
twins.append(b1)
twins.append(b2)
twinvalues.append(values[b1])
# now eliminate the naked twins as possibilities from their peers
if len(twins) > 0:
# loop over peers
for u in unit:
# check if box is a twin. As we want to eliminate twin values, we have to be sure that we do not
# delete the values in a twin box
if u not in twins:
# eliminate twin values from boxes in box u
for v in twinvalues:
for vv in v:
values = assign_value(values, u, values[u].replace(vv, ""))
return values
def grid_values(grid):
"""
Convert grid into a dict of {square: char} with '123456789' for empties.
Args:
grid(string) - A grid in string form.
Returns:
A grid in dictionary form
Keys: The boxes, e.g., 'A1'
Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'.
"""
chars = []
digits = '123456789'
for c in grid:
if c in digits:
chars.append(c)
if c == '.':
chars.append(digits)
assert len(chars) == 81
return dict(zip(boxes, chars))
def display(values):
"""
Display the values as a 2-D grid.
Args:
values(dict): The sudoku in dictionary form
"""
width = 1+max(len(values[s]) for s in boxes)
line = '+'.join(['-'*(width*3)]*3)
for r in rows:
print(''.join(values[r+c].center(width)+('|' if c in '36' else '')
for c in cols))
if r in 'CF': print(line)
return
def eliminate(values):
solved_values = [box for box in values.keys() if len(values[box]) == 1]
for box in solved_values:
digit = values[box]
for peer in peers[box]:
values = assign_value(values, peer, values[peer].replace(digit,''))
return values
def only_choice(values):
for unit in unitlist:
for digit in '123456789':
dplaces = [box for box in unit if digit in values[box]]
if len(dplaces) == 1:
values = assign_value(values, dplaces[0], digit)
return values
def reduce_puzzle(values):
stalled = False
while not stalled:
solved_values_before = len([box for box in values.keys() if len(values[box]) == 1])
values = eliminate(values)
values = only_choice(values)
values = naked_twins(values)
solved_values_after = len([box for box in values.keys() if len(values[box]) == 1])
stalled = solved_values_before == solved_values_after
if len([box for box in values.keys() if len(values[box]) == 0]):
return False
return values
def search(values):
# First, reduce the puzzle using the previous function
values = reduce_puzzle(values)
if type(values) is bool:
return values
# Choose one of the unfilled squares with the fewest possibilities
vMin = 10
vValues = ''
kMin = ''
for k, v in values.items():
vLen = len(v)
if vLen > 1:
if vLen < vMin:
kMin = k
vMin = len(v)
vValues = v
elif vLen == 0:
return False
if kMin == '':
return values
else:
for v in vValues:
valuesCP = values.copy()
valuesCP = assign_value(valuesCP, kMin, v)
searchReturn = search(valuesCP)
if type(searchReturn) is dict:
return searchReturn
return False
return values
def solve(grid):
"""
Find the solution to a Sudoku grid.
Args:
grid(string): a string representing a sudoku grid.
Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
Returns:
The dictionary representation of the final sudoku grid. False if no solution exists.
"""
# generate grid
values = grid_values(grid)
# solve soduko
return search(values)
if __name__ == '__main__':
diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
display(solve(diag_sudoku_grid))
try:
from visualize import visualize_assignments
visualize_assignments(assignments)
except SystemExit:
pass
except:
print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
|
import linecache
MAX_LINE = 194
date = []
count = -2
filename='flo_gen_output.txt'
for i in range(MAX_LINE):
count = count + 12
date.append(float(linecache.getline(filename, count).split()[-1]))
print("Minimum: " + str(min(date)))
print("Maximum: " + str(max(date)))
print("Average: " + "{0:.3f}".format(sum(date)/len(date))) |
def remove_dollar_sign(s):
return s.replace("$","")
m = str(input("Nhập chuỗi : "))
t=remove_dollar_sign(m)
print(t)
string_with_no_dollars = remove_dollar_sign("$80% percent of $life is to show $up")
if string_with_no_dollars == "80% percent of life is to show up":
print("Your function is correct")
else:
print("Oops, there's a bug") |
#!/usr/bin/env python3
text = laba.return_text_value()
import lab3a
text = laba.return_text_value()
print(text)
print(lab3a.return_number_value())
|
True or False """This should be True"""
False and True """This should be False"""
1 == 1 and 2 == 1 """True and False >>> False"""
"test" = "test" """This should True"""
1 ==1 or 2 != 1 """True or True >>> True"""
True and 1 == 1 """True and True >>> True"""
False and 0 != 0 """False and False >>> False"""
True or 1 == 1 """True or True >>> True"""
"test" == "testing" """This is should False"""
1 != 0 and 2 != 1 """True and True >>> True"""
"test" != "testing" """This should be True """
|
n, m, k = map(int, input().split(" "))
a = list(map(int, input().split(" ")))
for i in range(1, n+1):
if (m-i-1 >= 0 and a[m-i-1] != 0 and k >= a[m-i-1]) or \
(m+i-1 < n and a[m+i-1] != 0 and k >= a[m+i-1]):
print(i*10)
exit()
|
Shop_list=[
("苹果",20),
("香蕉",30),
("草莓",50)
]
User_list=[]
Salary=input("请输入工资:")
if Salary.isdigit():
Salary=int(Salary)
while True:
for index, shop_list in enumerate(Shop_list):
print(index, shop_list)
buy_list = input("请输入要购买的商品序号:")
if buy_list.isdigit():
buy_list = int(buy_list)
if buy_list < len(Shop_list) and buy_list >= 0:
p_item=Shop_list[buy_list] #获取列表中的内容用[]
if p_item[1]<=Salary:
Salary=Salary-p_item[1]
User_list.append(p_item[0])
print(User_list)
print("您目前还剩下 \033[31;1m%s\033[0m 元钱"%Salary)
else:
print("你的钱不够啦")
else:
print("商品序号不存在,请重新选择")
else:
buy_list=='q'
break
exit()
|
# https://en.wiktionary.org/wiki/%C2%BD
RECIPE_SCHEDULE = [
{
'title': 'Vegetable Lasagna',
'id': 'vegetable_lasagna',
'link': 'https://www.aicr.org/assets/docs/pdf/her/vegetable_lasagna.pdf',
'category': ['Entrée', 'Vegetarian'],
"recipe_nutrition":
[
{
"primaryText": "360 calories"
},
{
"primaryText": "11 g total fat (5 g saturated fat)"
},
{
"primaryText": "45 g carbohydrate"
},
{
"primaryText": "23 g protein"
},
{
"primaryText": "11 g dietary fiber"
},
{
"primaryText": "11 g total fat (5 g saturated fat)"
},
{
"primaryText": "45 g carbohydrate"
},
{
"primaryText": "23 g protein"
},
{
"primaryText": "11 g dietary fiber"
}
]
}
# the rest dictionaries in the RECIPE_SCHEDULE are deleted to protect Intellectual Property
]
TOPIC_OF_DAY_SCHEDULE = [
{
'title': 'Theme of the Week',
'subtitle': 'General Introduction About Nutrition',
'text': 'Welcome! Over the next few months, we\'re going to talk about topics related to cancer and nutrition. '
'I\'m going to provide you with trustworthy resources and recipes to help you get the nutrition you need'
' during treatment. The American Institute for Cancer Research is a great resource that has a lot of '
'information that you might find helpful! For more information, say \"Alexa, more information\". '
'To proceed to the intervention activities, say \"Alexa, start intervention\".',
'if_link': 'true',
'link': 'https://www.aicr.org/'
}
# the rest dictionaries in the TOPIC_OF_DAY_SCHEDULE are deleted to protect Intellectual Property
]
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^vpusti/', include(admin.site.urls)),
url(r'^operator/(?P<washing_id>\d+)/$', 'orders.views.operator'),
url(r'^operator/data/byday/(?P<day>\d+)/month/(?P<month>\d+)/year/(?P<year>\d+)/washing/(?P<washing_id>\d+)', 'orders.views.operator_viewmodel'),
url(r'^operator/data/delete/(?P<order_id>\d+)$', 'orders.views.operator_deleteorder'),
url(r'^operator/data/update/(?P<order_id>\d+)$', 'orders.views.operator_updateorder'),
url(r'^operator/data/create/washing/(?P<washing_id>\d+)$', 'orders.views.operator_createorder'),
url(r'^operator/data/changepostnumber/order/(?P<order_id>\d+)/newpostnumber/(?P<new_post_number>\d+)$', 'orders.views.operator_changeorderpost'),
url(r'^partners/', 'orders.views.login_view', name='login'),
url(r'^logout/', 'orders.views.logout_view'),
url(r'^washings/', include('orders.urls')),
url(r'^nomobile/', 'orders.views.nomobile'),
url(r'^mobile/', 'orders.views.mobile'),
url(r'^$', 'orders.views.index'),
)
|
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib import style
import time
def pid_plot(ax1, setPoint, processVariable):
sp = setPoint
pv = processVariable
# xs = []
ys = []
for line in pv:
# xs.append(t)
ys.append(line)
ax1.clear()
ax1.plot(ys)
plt.show()
def animate(fig, plot, interval=1000):
ani = animation.FuncAnimation(fig, plot, interval)
plt.show()
|
import cv2 as cv
haar_cascades = cv.CascadeClassifier('haarcascade_frontalface_default.xml')
video = cv.VideoCapture(0)
video.set(cv.CAP_PROP_FRAME_WIDTH, 1024)
video.set(cv.CAP_PROP_FRAME_HEIGHT, 576)
while True:
ret, frame = video.read()
if frame is None:
break
frame = cv.flip(frame, 1)
grayscaled_frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
faces = haar_cascades.detectMultiScale(grayscaled_frame, 1.1, 4)
# frame_HSV = cv.cvtColor(frame, cv.COLOR_BGR2HSV)
# low_hsv = (0, 0.23 * 255, 0)
# hight_hsv = (0.5 * 255, 0.68 * 255, 255)
# new_frame = cv.inRange(frame_HSV, low_hsv, hight_hsv)
copy_frame = frame.copy()
for x, y, w, h in faces:
cv.rectangle(copy_frame, (x, y), (x+w, y+h), color=(0, 255, 0), thickness=10)
roi = frame[y:y+h, x:x+w]
cv.imshow('roi', roi)
cv.imshow('video stream', copy_frame)
key = cv.waitKey(30)
if key == ord('q') or key == 27:
break
video.release() |
"""
Test cases for generate facial landmark localization training data
"""
import os
import sys
import random
import unittest
import cv2
from mtcnn.datasets import get_by_name
import mtcnn.train.gen_landmark as gl
from mtcnn.utils import draw
DEFAULT_DATASET = 'CelebA'
here = os.path.dirname(__file__)
class TestGenLandmarks(unittest.TestCase):
def setUp(self):
self.datasets = get_by_name(DEFAULT_DATASET)
self.output_folder = os.path.join(here, '../output/test/pnet')
self.top = 100
def test_gen_landmark_data(self):
meta = self.datasets.get_train_meta()
meta = random.choices(meta, k=self.top)
gl.gen_landmark_data(meta, 12, self.output_folder, argument=True)
def test_get_landmark_data(self):
images, landmarks = gl.get_landmark_data(self.output_folder)
self.assertEqual(len(images), len(landmarks))
# Random sampling 10 pictures and draw landmark points on them.
output_folder = os.path.join(self.output_folder, 'sample_images')
if not os.path.isdir(output_folder):
os.makedirs(output_folder)
for i, (im, lm) in enumerate(zip(images[:10], landmarks[:10])):
w = im.shape[0]
h = im.shape[1]
lm[:, 0] *= w
lm[:, 1] *= h
lm = lm.astype(int)
draw.draw_landmarks(im, lm)
cv2.imwrite(os.path.join(output_folder, '%d.jpg' % i), im)
|
import datetime
import hashlib
import conector.conexion as conexion
connect = conexion.conectar()
database = connect[0]
cursor = connect[1]
class Producto:
def __init__(self, proveedor_id, nombre, precio, cantidad):
self.proveedor_id = proveedor_id
self.nombre = nombre
self.precio = precio
self.cantidad = cantidad
def registrarProducto(self):
fecha = datetime.datetime.now()
sql = "INSERT INTO productos VALUES(null, %s, %s, %s, %s, %s)"
producto = (self.proveedor_id, self.nombre, self.precio, self.cantidad, fecha)
try:
cursor.execute(sql, producto)
database.commit()
result = [cursor.rowcount, self]
except:
result = [0, self]
return result |
print("This program calculates your GPA...have fun")
course_titles = []
credit_load = []
Grade = []
cl = []
name = input('Hi, what is your name? ')
matriculation_number = input('what is your matriculation number? ')
course_num = int(input('How many courses are you offering? Please state: '))
for Courses in range(course_num):
course_title = input('Input course title: ')
course_titles.append(course_title)
Credit_load = int(input(f'input the credit load of {course_title}: '))
credit_load.append(Credit_load)
grade = (input(f'input grade for {course_title}: '))
Grade.append(grade)
if grade == 'A' or 'a':
cl.append(Credit_load * 5)
elif grade == 'B' or 'b':
cl.append(Credit_load * 4)
elif grade == 'C' or 'c':
cl.append(Credit_load * 3)
elif grade == 'D' or 'd':
cl.append(Credit_load * 2)
ans = sum(cl) / sum(credit_load)
print("""
""")
print(name)
print(matriculation_number)
print('COURSE_TITLE CREDIT_LOAD GRADE')
for i in range(course_num):
print(course_titles[i], credit_load[i], Grade[i])
print('Your GPA is : ', ans)
|
import numpy as np
from scipy.ndimage import interpolation
from ocear.preprocess.utils import clip_borders
MAX_SKEW = 3
SKEW_STEPS = 32
def _skew_angle(image):
"""
Estimate skew angle where the horizontal variance in pixel intensity is
highest; the higher the variance, the "straighter up" the letters should
stand.
"""
estimates = []
for angle in np.linspace(-MAX_SKEW, MAX_SKEW, SKEW_STEPS + 1):
variance = np.mean(
interpolation.rotate(image, angle, order=0, mode='constant'),
axis=1
).var()
estimates.append((variance, angle))
return max(estimates)[1]
def skew(image):
"""
Rotate image by an estimated skew.
"""
# increase contrast for better skew estimation
img = np.amax(image) - image
img = img - np.amin(img)
# estimate skew angle
angle = _skew_angle(clip_borders(img))
img = interpolation.rotate(img, angle, reshape=False)
return np.amax(img) - img
|
from a10sdk.common.A10BaseClass import A10BaseClass
class CostCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param cost: {"description": "Interface cost", "minimum": 1, "type": "number", "maximum": 65535, "format": "number"}
:param instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "cost-cfg"
self.DeviceProxy = ""
self.cost = ""
self.instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class HelloIntervalCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param hello_interval: {"description": "Time between HELLO packets (Seconds)", "format": "number", "default": 10, "maximum": 65535, "minimum": 1, "type": "number"}
:param instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "hello-interval-cfg"
self.DeviceProxy = ""
self.hello_interval = ""
self.instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class PriorityCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param priority: {"description": "Router priority", "format": "number", "default": 1, "maximum": 255, "minimum": 0, "type": "number"}
:param instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "priority-cfg"
self.DeviceProxy = ""
self.priority = ""
self.instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class MtuIgnoreCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param mtu_ignore: {"default": 0, "type": "number", "description": "Ignores the MTU in DBD packets", "format": "flag"}
:param instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "mtu-ignore-cfg"
self.DeviceProxy = ""
self.mtu_ignore = ""
self.instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class RetransmitIntervalCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param retransmit_interval: {"description": "Time between retransmitting lost link state advertisements (Seconds)", "format": "number", "default": 5, "maximum": 65535, "minimum": 1, "type": "number"}
:param instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "retransmit-interval-cfg"
self.DeviceProxy = ""
self.retransmit_interval = ""
self.instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class NetworkList(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param broadcast_type: {"enum": ["broadcast", "non-broadcast", "point-to-point", "point-to-multipoint"], "type": "string", "description": "'broadcast': Specify OSPF broadcast multi-access network; 'non-broadcast': Specify OSPF NBMA network; 'point-to-point': Specify OSPF point-to-point network; 'point-to-multipoint': Specify OSPF point-to-multipoint network; ", "format": "enum"}
:param p2mp_nbma: {"default": 0, "type": "number", "description": "Specify non-broadcast point-to-multipoint network", "format": "flag"}
:param network_instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "network-list"
self.DeviceProxy = ""
self.broadcast_type = ""
self.p2mp_nbma = ""
self.network_instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class TransmitDelayCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param transmit_delay: {"description": "Link state transmit delay (Seconds)", "format": "number", "default": 1, "maximum": 65535, "minimum": 1, "type": "number"}
:param instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "transmit-delay-cfg"
self.DeviceProxy = ""
self.transmit_delay = ""
self.instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class NeighborCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param neighbor_priority: {"description": "OSPF priority of non-broadcast neighbor", "minimum": 0, "type": "number", "maximum": 255, "format": "number"}
:param neig_inst: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param neighbor_poll_interval: {"description": "OSPF dead-router polling interval (Seconds)", "minimum": 0, "type": "number", "maximum": 4294967295, "format": "number"}
:param neighbor_cost: {"description": "OSPF cost for point-to-multipoint neighbor (metric)", "minimum": 1, "type": "number", "maximum": 65535, "format": "number"}
:param neighbor: {"default": "::", "type": "string", "description": "OSPFv3 neighbor (Neighbor IPv6 address)", "format": "ipv6-address"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "neighbor-cfg"
self.DeviceProxy = ""
self.neighbor_priority = ""
self.neig_inst = ""
self.neighbor_poll_interval = ""
self.neighbor_cost = ""
self.neighbor = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class DeadIntervalCfg(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param dead_interval: {"description": "Interval after which a neighbor is declared dead (Seconds)", "format": "number", "default": 40, "maximum": 65535, "minimum": 1, "type": "number"}
:param instance_id: {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "dead-interval-cfg"
self.DeviceProxy = ""
self.dead_interval = ""
self.instance_id = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Ospf(A10BaseClass):
"""Class Description::
Open Shortest Path First for IPv6 (OSPFv3).
Class ospf supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param bfd: {"default": 0, "optional": true, "type": "number", "description": "Bidirectional Forwarding Detection (BFD)", "format": "flag"}
:param cost_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"cost": {"description": "Interface cost", "minimum": 1, "type": "number", "maximum": 65535, "format": "number"}, "optional": true, "instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param hello_interval_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "hello-interval": {"description": "Time between HELLO packets (Seconds)", "format": "number", "default": 10, "maximum": 65535, "minimum": 1, "type": "number"}, "instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param disable: {"default": 0, "optional": true, "type": "number", "description": "Disable BFD", "format": "flag"}
:param priority_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"priority": {"description": "Router priority", "format": "number", "default": 1, "maximum": 255, "minimum": 0, "type": "number"}, "optional": true, "instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param mtu_ignore_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"mtu-ignore": {"default": 0, "type": "number", "description": "Ignores the MTU in DBD packets", "format": "flag"}, "optional": true, "instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param retransmit_interval_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"retransmit-interval": {"description": "Time between retransmitting lost link state advertisements (Seconds)", "format": "number", "default": 5, "maximum": 65535, "minimum": 1, "type": "number"}, "optional": true, "instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param network_list: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"broadcast-type": {"enum": ["broadcast", "non-broadcast", "point-to-point", "point-to-multipoint"], "type": "string", "description": "'broadcast': Specify OSPF broadcast multi-access network; 'non-broadcast': Specify OSPF NBMA network; 'point-to-point': Specify OSPF point-to-point network; 'point-to-multipoint': Specify OSPF point-to-multipoint network; ", "format": "enum"}, "optional": true, "p2mp-nbma": {"default": 0, "type": "number", "description": "Specify non-broadcast point-to-multipoint network", "format": "flag"}, "network-instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param transmit_delay_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "transmit-delay": {"description": "Link state transmit delay (Seconds)", "format": "number", "default": 1, "maximum": 65535, "minimum": 1, "type": "number"}, "instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param neighbor_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"neighbor-priority": {"description": "OSPF priority of non-broadcast neighbor", "minimum": 0, "type": "number", "maximum": 255, "format": "number"}, "neig-inst": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}, "neighbor-poll-interval": {"description": "OSPF dead-router polling interval (Seconds)", "minimum": 0, "type": "number", "maximum": 4294967295, "format": "number"}, "neighbor-cost": {"description": "OSPF cost for point-to-multipoint neighbor (metric)", "minimum": 1, "type": "number", "maximum": 65535, "format": "number"}, "neighbor": {"default": "::", "type": "string", "description": "OSPFv3 neighbor (Neighbor IPv6 address)", "format": "ipv6-address"}, "optional": true}}]}
:param dead_interval_cfg: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"dead-interval": {"description": "Interval after which a neighbor is declared dead (Seconds)", "format": "number", "default": 40, "maximum": 65535, "minimum": 1, "type": "number"}, "optional": true, "instance-id": {"description": "Specify the interface instance ID", "format": "number", "default": 0, "maximum": 255, "minimum": 0, "type": "number"}}}]}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/interface/trunk/{ifnum}/ipv6/ospf`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "ospf"
self.a10_url="/axapi/v3/interface/trunk/{ifnum}/ipv6/ospf"
self.DeviceProxy = ""
self.bfd = ""
self.cost_cfg = []
self.hello_interval_cfg = []
self.disable = ""
self.priority_cfg = []
self.mtu_ignore_cfg = []
self.retransmit_interval_cfg = []
self.network_list = []
self.transmit_delay_cfg = []
self.neighbor_cfg = []
self.dead_interval_cfg = []
for keys, value in kwargs.items():
setattr(self,keys, value)
|
#!/usr/bin/python
#-*- coding:utf-8 -*-
#**********************************************************
#Filename: 172_format_number.py
#Author: Andrew Wang - shuguang.wang1990@gmail.com
#Description: ---
#Create: 2016-11-01 22:48:59
#Last Modifieda: 2016-11-01 22:48:59
#*********************************************************
print "%d" % 2
print "%3d" % 2
print "%03d" % 2
print "%-8d" % 2
print "%.8d" % 2
print "%10.8d" % 2
print "%-10.8d" % 2
print "%x, %#x" %(100,100)
print "%o, %#o" %(100,100)
#print "%b, %#b" %(10,10)
print "%x, %#x" %(-100,-100)
print "%o, %#o" %(-100,-100)
|
#encoding: UTF-8
#Autor: Omar Israel Galván García A01745810
#Este programa pregunta al usuario cuantos boletos quiere comprar de cada tipo e imprime el total a pagar.
def calcularPago(asientosA, asientosB, asientosC): # esta funcion toma los valores de A,B,C y los multiplica por el costo
totalPago = (asientosA * 400) + (asientosB * 250) + (asientosC * 135)
return totalPago
def main(): # la funcion main lee los datos e imprime los resultados
numeroBoletosA = int(input("Número de boletos de clase A: "))
numeroBoletosB = int(input("Número de boletos de clase B: "))
numeroBoletosC = int(input("Número de boletos de clase C: "))
total = calcularPago(numeroBoletosA,numeroBoletosB,numeroBoletosC)
print("El costo total es: $%.2f"%(total))
main() # se ejecuta la funcion main |
import pygame
import time
class Player:
"""
Class player manages the players infomation
about the on going game. This includes which
piece the player is and where their piece are
placed on the game board
"""
def __init__(self,name,color):
self.name = name
self.color = color
self.piece_loc = []
def add_piece(self,piece):
"""
Tracks the location of where the player has
placed their piece on the board
"""
self.piece_loc.append(piece)
class Board:
"""
Class Board handles calculating where x's
and o's whereplaced in the GUI. It also
calculates if a player has won the game
or not
"""
def __init__(self):
self.win_states = [
[1,2,3],
[4,5,6],
[7,8,9],
[1,4,7],
[2,5,8],
[3,6,9],
[1,5,9],
[3,5,7]
]
self.avalible_move = {1,2,3,4,5,6,7,8,9}
self.click_map = [
(150,150),
(250,150),
(350,150),
(150,250),
(250,250),
(350,250),
(150,350),
(250,350),
(350,350)
]
self.turn_count = 0
def check_win(self,p_loc):
"""
Checks to see if a player owns the pieces
that make up one of the possible win states
"""
for states in self.win_states:
result = [loc for loc in p_loc if loc in states]
if len(result) == 3:
return True
return False
def calc_space(self,x,y):
"""
Calculates which square the player wants to
place their piece depending on where their
mouse was clicked
"""
space = 0
if 100 < x < 200 and 100 < y < 200:
space = 1
elif 200 < x < 300 and 100 < y < 200:
space = 2
elif 300 < x < 400 and 100 < y < 200:
space = 3
elif 100 < x < 200 and 200 < y < 300:
space = 4
elif 200 < x < 300 and 200 < y < 300:
space = 5
elif 300 < x < 400 and 200 < y < 300:
space = 6
elif 100 < x < 200 and 300 < y < 400:
space = 7
elif 200 < x < 300 and 300 < y < 400:
space = 8
elif 300 < x < 400 and 300 < y < 400:
space = 9
# removes the space from avalible moves
if space in self.avalible_move and space != 0:
self.avalible_move.remove(space)
return space
else:
return space
def Start():
# Initilizing game definitions
size = [500, 600]
white = 255, 255, 255
black = 0, 0, 0
blue = 0, 0, 255
red = 255, 0, 0
green = 0, 255, 0
left = 1
pygame.init()
pygame.font.init()
title_font = pygame.font.SysFont("monospace", 20)
ply_font = pygame.font.SysFont("monospace", 15)
pygame.display.set_caption("Tic Tac Toe")
screen = pygame.display.set_mode(size)
# Defining Player and the Gameboard
Player1 = Player("Player 1", red)
Player2 = Player("Player 2", blue)
game_board = Board()
player_que = [Player1,Player2]
# Player 1 will always go first
player_turn = 0
# render text
title = title_font.render("Tic Tac Toe",True, blue)
ply_1 = ply_font.render(Player1.name,True, red)
ply_2 = ply_font.render(Player2.name,True, blue)
done = False
win = False
# Drawing game board
screen.fill(white)
screen.blit(title, (182, 30))
screen.blit(ply_1, (50, 450))
screen.blit(ply_2, (380, 450))
# Vertical lines
pygame.draw.line(screen, black, [100, 100], [100,400], 5)
pygame.draw.line(screen, black, [200, 100], [200,400], 5)
pygame.draw.line(screen, black, [300, 100], [300,400], 5)
pygame.draw.line(screen, black, [400, 100], [400,400], 5)
# Horizontal lines
pygame.draw.line(screen, black, [100, 100], [400,100], 5)
pygame.draw.line(screen, black, [100, 200], [400,200], 5)
pygame.draw.line(screen, black, [100, 300], [400,300], 5)
pygame.draw.line(screen, black, [100, 400], [400,400], 5)
while not done:
for event in pygame.event.get():
player_move = player_que[player_turn]
# mouse click event
if event.type == pygame.MOUSEBUTTONDOWN and event.button == left:
x, y = event.pos
space = game_board.calc_space(x,y)
if space != 0:
# place player piece on the game board
player_que[player_turn].add_piece(space)
color = player_que[player_turn].color
loc = game_board.click_map[space-1]
pygame.draw.circle(screen,color,loc,25,0)
# checks for a win
if 9 >= game_board.turn_count >= 4:
win = game_board.check_win(player_move.piece_loc)
if win == True:
outtxt = player_que[player_turn].name + " has won!"
out = title_font.render(outtxt,True, black)
done = True
# change players turn update piece
if player_turn == 0:
player_turn = 1
ply_2 = ply_font.render(Player2.name,True, black)
ply_1 = ply_font.render(Player1.name,True, red)
pygame.display.update()
elif player_turn == 1:
player_turn = 0
ply_1 = ply_font.render(Player1.name,True, black)
ply_2 = ply_font.render(Player2.name,True, blue)
pygame.display.update()
game_board.turn_count = game_board.turn_count + 1
if game_board.turn_count == 9 and win == False:
out = title_font.render("Cats game!",True, black)
done = True
if event.type == pygame.QUIT:
pygame.quit()
exit(0)
pygame.display.flip()
screen.blit(out, (145, 500))
pygame.display.flip()
time.sleep(5)
if __name__ == "__main__":
Start()
|
# Copyright (C) 2023 Intel Labs
#
# BSD-3-Clause License
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
# Linear Flipout Layers with flipout weight estimator to perform
# variational inference in Bayesian neural networks. Variational layers
# enables Monte Carlo approximation of the distribution over the weights
#
# @authors: Ranganath Krishnan, Piero Esposito
#
# ======================================================================================
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.distributions.normal import Normal
from torch.distributions.uniform import Uniform
from ..base_variational_layer import BaseVariationalLayer_
from torch.quantization.observer import HistogramObserver, PerChannelMinMaxObserver, MinMaxObserver
from torch.quantization.qconfig import QConfig
__all__ = ["LinearFlipout"]
class LinearFlipout(BaseVariationalLayer_):
def __init__(self,
in_features,
out_features,
prior_mean=0,
prior_variance=1,
posterior_mu_init=0,
posterior_rho_init=-3.0,
bias=True):
"""
Implements Linear layer with Flipout reparameterization trick.
Ref: https://arxiv.org/abs/1803.04386
Inherits from bayesian_torch.layers.BaseVariationalLayer_
Parameters:
in_features: int -> size of each input sample,
out_features: int -> size of each output sample,
prior_mean: float -> mean of the prior arbitrary distribution to be used on the complexity cost,
prior_variance: float -> variance of the prior arbitrary distribution to be used on the complexity cost,
posterior_mu_init: float -> init trainable mu parameter representing mean of the approximate posterior,
posterior_rho_init: float -> init trainable rho parameter representing the sigma of the approximate posterior through softplus function,
bias: bool -> if set to False, the layer will not learn an additive bias. Default: True,
"""
super().__init__()
self.in_features = in_features
self.out_features = out_features
self.prior_mean = prior_mean
self.prior_variance = prior_variance
self.posterior_mu_init = posterior_mu_init
self.posterior_rho_init = posterior_rho_init
self.mu_weight = nn.Parameter(torch.Tensor(out_features, in_features))
self.rho_weight = nn.Parameter(torch.Tensor(out_features, in_features))
self.register_buffer('eps_weight',
torch.Tensor(out_features, in_features),
persistent=False)
self.register_buffer('prior_weight_mu',
torch.Tensor(out_features, in_features),
persistent=False)
self.register_buffer('prior_weight_sigma',
torch.Tensor(out_features, in_features),
persistent=False)
if bias:
self.mu_bias = nn.Parameter(torch.Tensor(out_features))
self.rho_bias = nn.Parameter(torch.Tensor(out_features))
self.register_buffer('prior_bias_mu', torch.Tensor(out_features), persistent=False)
self.register_buffer('prior_bias_sigma',
torch.Tensor(out_features),
persistent=False)
self.register_buffer('eps_bias', torch.Tensor(out_features), persistent=False)
else:
self.register_buffer('prior_bias_mu', None, persistent=False)
self.register_buffer('prior_bias_sigma', None, persistent=False)
self.register_parameter('mu_bias', None)
self.register_parameter('rho_bias', None)
self.register_buffer('eps_bias', None, persistent=False)
self.init_parameters()
self.quant_prepare=False
def prepare(self):
self.qint_quant = nn.ModuleList([torch.quantization.QuantStub(
QConfig(weight=MinMaxObserver.with_args(dtype=torch.qint8, qscheme=torch.per_tensor_symmetric), activation=MinMaxObserver.with_args(dtype=torch.qint8,qscheme=torch.per_tensor_symmetric))) for _ in range(4)])
self.quint_quant = nn.ModuleList([torch.quantization.QuantStub(
QConfig(weight=MinMaxObserver.with_args(dtype=torch.quint8), activation=MinMaxObserver.with_args(dtype=torch.quint8))) for _ in range(8)])
self.dequant = torch.quantization.DeQuantStub()
self.quant_prepare=True
def init_parameters(self):
# init prior mu
self.prior_weight_mu.fill_(self.prior_mean)
self.prior_weight_sigma.fill_(self.prior_variance)
# init weight and base perturbation weights
self.mu_weight.data.normal_(mean=self.posterior_mu_init, std=0.1)
self.rho_weight.data.normal_(mean=self.posterior_rho_init, std=0.1)
if self.mu_bias is not None:
self.prior_bias_mu.fill_(self.prior_mean)
self.prior_bias_sigma.fill_(self.prior_variance)
self.mu_bias.data.normal_(mean=self.posterior_mu_init, std=0.1)
self.rho_bias.data.normal_(mean=self.posterior_rho_init, std=0.1)
def kl_loss(self):
sigma_weight = torch.log1p(torch.exp(self.rho_weight))
kl = self.kl_div(self.mu_weight, sigma_weight, self.prior_weight_mu, self.prior_weight_sigma)
if self.mu_bias is not None:
sigma_bias = torch.log1p(torch.exp(self.rho_bias))
kl += self.kl_div(self.mu_bias, sigma_bias, self.prior_bias_mu, self.prior_bias_sigma)
return kl
def forward(self, x, return_kl=True):
if self.dnn_to_bnn_flag:
return_kl = False
# sampling delta_W
sigma_weight = torch.log1p(torch.exp(self.rho_weight))
eps_weight = self.eps_weight.data.normal_()
delta_weight = sigma_weight * eps_weight
# delta_weight = (sigma_weight * self.eps_weight.data.normal_())
# get kl divergence
if return_kl:
kl = self.kl_div(self.mu_weight, sigma_weight, self.prior_weight_mu,
self.prior_weight_sigma)
bias = None
if self.mu_bias is not None:
sigma_bias = torch.log1p(torch.exp(self.rho_bias))
bias = (sigma_bias * self.eps_bias.data.normal_())
if return_kl:
kl = kl + self.kl_div(self.mu_bias, sigma_bias, self.prior_bias_mu,
self.prior_bias_sigma)
# linear outputs
outputs = F.linear(x, self.mu_weight, self.mu_bias)
sign_input = x.clone().uniform_(-1, 1).sign()
sign_output = outputs.clone().uniform_(-1, 1).sign()
x_tmp = x * sign_input
perturbed_outputs_tmp = F.linear(x_tmp, delta_weight, bias)
perturbed_outputs = perturbed_outputs_tmp * sign_output
out = outputs + perturbed_outputs
if self.quant_prepare:
# quint8 quantstub
x = self.quint_quant[0](x) # input
outputs = self.quint_quant[1](outputs) # output
sign_input = self.quint_quant[2](sign_input)
sign_output = self.quint_quant[3](sign_output)
x_tmp = self.quint_quant[4](x_tmp)
perturbed_outputs_tmp = self.quint_quant[5](perturbed_outputs_tmp) # output
perturbed_outputs = self.quint_quant[6](perturbed_outputs) # output
out = self.quint_quant[7](out) # output
# qint8 quantstub
sigma_weight = self.qint_quant[0](sigma_weight) # weight
mu_weight = self.qint_quant[1](self.mu_weight) # weight
eps_weight = self.qint_quant[2](eps_weight) # random variable
delta_weight =self.qint_quant[3](delta_weight) # multiply activation
# returning outputs + perturbations
if return_kl:
return out, kl
return out
|
import datetime
import hashlib
import re
import time
import requests
import redis
conn = redis.Redis(host='120.78.122.64', password='790623',db=7)
md5 = lambda x:hashlib.md5(x.encode('utf-8')).hexdigest()
url = 'http://api.fanyi.baidu.com/api/trans/vip/translate'
appid = '20200530000477540'
appkey = 'ZpDXdRwvjmGEyo3G2XSO'
def genSign(q):
salt = int(datetime.datetime.now().timestamp())
value = f'{appid}{q}{salt}{appkey}'
return salt, md5(value)
def baidu_translate_to_english(q):
salt, sign = genSign(q)
data = {
'q': q,
'from': 'zh',
'to':'en',
'appid':appid,
'salt':salt,
'sign':sign
}
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
response = requests.post(url, headers=headers, data=data)
try:
return response.json()['trans_result'][0]['dst']
except:
return ''
def translate(word, retry=0):
if not word:
return ''
if retry==3:
return ''
key = md5(word)
try:
ret = conn.get(key)
if ret and ret.decode('utf-8').strip():
return ret.decode('utf-8').strip()
else:
pass
except:
pass
try:
ret = baidu_translate_to_english(word)
if ret:
conn.set(key, ret)
return ret
else:
time.sleep(1)
return translate(word, retry+1)
except:
return translate(word, retry+1)
def get_key(key):
eng = translate(key)
eng = re.sub(r'[/]',' or ',eng)
eng = re.sub(r'[\'\",.?\(\)]','',eng)
words = eng.lower().split(' ')
snake = '_'.join([x for x in words if x.strip()])
bigCamel = ''.join([x.capitalize() for x in words])
littleCamel = bigCamel[0].lower()+bigCamel[1:]
hungary = f'm{bigCamel}'
const = snake.upper()
return [key,littleCamel,bigCamel,hungary,snake,const]
if __name__ == '__main__':
ret = get_key('联系人姓名')
print(ret)
|
__author__ = "Gil Ortiz"
__version = "1.0"
__date_last_modification__ = "12/7/2019"
__notes__ = ''' timecheck_synchronous.py - routine executed in standard synchronous mode
when executed, get_data() will be called synchronously 6x'''
import time
import random
import string
start = time.time()
def get_data():
# random_num = random.randint(1,100)
letters_and_numbers = string.ascii_uppercase + string.ascii_lowercase + ''.join([str(i) for i in range(10)])
random_password = ''.join(random.choice(letters_and_numbers) for i in range(10))
print("Random generated password is %s ------ %.2f seconds" % (random_password, time.time() - start))
time.sleep(2) # this simulates "processing time" in the get_data() function.
def main():
# get_data() will be called 6 times in sequence (traditional synchronous programming)
get_data()
get_data()
get_data()
get_data()
get_data()
get_data()
if __name__ == '__main__':
main() |
from django.contrib import admin
from django.urls import path
# from GilioInventario.views import login, administrador
from superSu.views import *
from login.views import *
from administrador.views import *
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
# path('', views.login, name="login"),
path('', login),
path('logout/', logout),
path('login_ajax/', login_ajax),
path('login_validar/', ValidarUsuario),
path('super-usuario/', SuperSuHome),
path('nuevo-usuario/', Nuevo_usuario),
path('administrador/', AdministradorHome),
path('administrador/inventario', AdministradorInventario),
path('administrador/nuevo-articulo', AdministradorNuevoArticulo),
path('administrador/areas', AdministradorAreas),
path('administrador/nueva-area', AdministradorNuevaArea),
path('administrador/guardar-area', Guardar_area),
# path('administrador/', administrador),
path('prueba/', prueba_get),
path('prueba_up/', subir),
path('buscar/', busqueda),
path('buscar_ajax/', busqueda_ajax),
path('altas/', altas),
path('altas_ajax/', altas_ajax),
path('cambios/', Cambios),
path('cambios_ajax/', Cambios_ajax),
path('eliminar/', Eliminar),
path('eliminar_ajax/', Eliminar_ajax),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
year_start = 2017
year_end = 2018
month_start = 4
month_end = 11
file_name = 'data.xlsx'
import requests
import utils
import numpy
from bs4 import BeautifulSoup
from openpyxl import Workbook
wb = Workbook()
sheet = wb.active
x_data, y_data = [],[]
for year in range(year_start, year_end + 1):
for month in range(month_start, month_end + 1):
for day in range(1 , 32):
print('Year %d Month %d Day %d' %(year, month, day))
url = 'http://www.scorecenterlive.com/ko/sports-livescore.html? sports=bs&nation=ko&date='+str(year)+'-'+utils.numtoten(month)+'-'+utils.numtoten(day)
source_code = requests.get(url).text
soup = BeautifulSoup(source_code, 'lxml')
name, score = [], []
min, max = -1, -1
index = 0
#Parsing Data
for data in soup.find_all("li", {"class":"team_n"}):
if int(utils.KBO_num(data.string)) < 10:
if min == -1:
min = index
max = index
name.append(data.string)
index = index + 1
index = 0
index2 = 0 #name의 value를 지우기 위한 index
for data in soup.find_all("li", {"class":"score"}):
if index >= min and index <= max and len(name) > 1:
if data.string == None:
name[index2] = ""
else:
score.append(int(data.string))
index2 = index2 + 1
index = index + 1
#Processing Data
index = len(name) - 1
while index >= 0:
if name[index] == "":
del name[index]
index = index - 1
if len(score) > 1:
for i in range(len(name)):
name[i] = utils.KBO_num(name[i])
tmp = []
for i in range(0, len(score), 2):
if score[i] > score[i + 1]:
tmp.append(0)
elif score[i] < score[i + 1]:
tmp.append(1)
else:
tmp.append(2)
score = tmp
print(name, score)
x_data.extend(name)
y_data.extend(score)
x_data = numpy.reshape(numpy.array(x_data), [int(len(x_data)/2), 2])
for i in range(x_data.shape[0]):
sheet.cell(row=i+1, column=1, value=x_data[i, 0])
sheet.cell(row=i+1, column=2, value=x_data[i, 1])
sheet.cell(row=i+1, column=3, value=y_data[i])
wb.save(filename=file_name)
|
"""Variational Auto-Encoder
arXiv:1312.6114v10
"""
import matplotlib.pyplot as plt
import torch
import torch.nn.functional as F
from datasets import MyDataset, get_mnist
from functools import reduce
from torch.utils.data import DataLoader
from tqdm import tqdm
from utils import compose
class AutoEncoder(torch.nn.Module):
"""Auto-Encoder with Bernoulli output"""
def __init__(self, input_dim, latent_dim=2, num_noise_samples=1):
super().__init__()
self.num_noise_samples = num_noise_samples
self.latent_dim = latent_dim
self.enc_fc1 = torch.nn.Linear(input_dim, 512)
self.enc_fc2 = torch.nn.Linear(512, latent_dim)
self.enc_fc3 = torch.nn.Linear(512, latent_dim)
self.dec_fc1 = torch.nn.Linear(latent_dim, 512)
self.dec_fc2 = torch.nn.Linear(512, input_dim)
def encode(self, x):
h = torch.tanh(self.enc_fc1(x))
return self.enc_fc2(h), self.enc_fc3(h)
def decode(self, mean, var):
eps = torch.randn_like(var)
f = reduce(compose,
[self.dec_fc1, torch.relu, self.dec_fc2, torch.sigmoid])
return f(mean + var * eps)
def forward(self, x):
mean, logvar = self.encode(x)
var = torch.exp(logvar)
outs = [self.decode(mean, var) for _ in range(self.num_noise_samples)]
return mean, var, torch.stack(outs, dim=1)
def neg_elbo(mu, sigma2, x, x_gen, weight=.5):
"""Negative evidence lower bound (ELBO) for Gaussian prior and Bernoulli
posterior
"""
kl_div = -.5 * torch.sum(1 + torch.log(sigma2) - mu**2 - sigma2)
num_latent_samples = x_gen.shape[1]
x = x.repeat(num_latent_samples, 1, 1).transpose(0, 1)
reconstruction_err = F.binary_cross_entropy(x_gen, x, reduction='sum')
return weight * kl_div + (1 - weight) * reconstruction_err
class VAE:
def __init__(self,
data_dim,
latent_dim=2,
num_noise_samples=1,
learning_rate=1e-4,
device=None):
self.data_dim = data_dim
self.latent_dim = latent_dim
self.num_noise_samples = num_noise_samples
self.ae = AutoEncoder(data_dim, latent_dim, num_noise_samples)
if device is None or str(device) != 'cuda':
self.dev = torch.device('cpu')
else:
self.dev = device
self.ae = self.ae.cuda()
self.optimizer = torch.optim.Adam(
self.ae.parameters(), lr=learning_rate)
def fit(self,
dataset,
epochs=40,
batch_size=64,
loss_weight=.5,
verbose=False):
loader = DataLoader(dataset, batch_size)
for epoch in range(epochs):
cum_loss = 0.
if verbose:
loader = tqdm(loader)
for x in loader:
x = x.to(self.dev)
mus, sigma2s, x_gen = self.ae(x)
loss = len(dataset) / len(x) * neg_elbo(
mus, sigma2s, x, x_gen, weight=loss_weight)
cum_loss += loss.item()
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
if verbose:
print('epoch', epoch, cum_loss)
@torch.no_grad()
def generate(self, mean, var):
return self.ae.decode(mean, var)
@torch.no_grad()
def latent_representation(self, x):
return self.ae.encode(x)
if __name__ == '__main__':
train_x, train_y, test_x, test_y = get_mnist()
train_x /= 255
test_x /= 255
if torch.cuda.is_available():
dev = torch.device('cuda')
else:
dev = torch.device('cpu')
dataset = MyDataset(train_x)
m = VAE(28 * 28, device=dev)
m.fit(dataset, epochs=5, verbose=True)
mean = torch.randn(1, m.latent_dim)
var = 10 * torch.rand(1, m.latent_dim)
im = m.generate(mean, var)
plt.imshow(im.detach().numpy().reshape(28, 28))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# Create your models here.
#class Student(models.Model):
# name = models.CharField(max_length = 128,verbose_name = "name")
# work_time = models.CharField(max_length = 128,verbose_name = "work time")
# work_location= models.CharField(max_length = 128,verbose_name = "work location")
## work_content= models.CharField(max_length = 128,verbose_name = "work content")
## work_requirment= models.CharField(max_length = 128,verbose_name = "work requirment")
## payment= models.CharField(max_length = 128,verbose_name = "payment")
## roll_number= models.CharField(max_length = 128,verbose_name = "roll number")
## remarks = models.CharField(max_length = 128,verbose_name = "remarks")
## work_time= models.DateTimeField(auto_now_add = True,editable = False, verbose_name = "work time")
#
# def __unicode__(self):
# return '<Student: {}>'.format(self.name)
#
# class Meta:
# verbose_name = verbose_name_plural = "information of students"
#
class Work(models.Model):
name = models.CharField(max_length = 128,verbose_name = "名字")
work_time = models.CharField(max_length = 128,verbose_name = "工作时间")
work_location= models.CharField(max_length = 128,verbose_name = "工作地点")
#slug = models.CharField(max_length = 128,verbose_name = "slug")
# work_content= models.CharField(max_length = 128,verbose_name = "work content")
# work_requirment= models.CharField(max_length = 128,verbose_name = "work requirment")
# payment= models.CharField(max_length = 128,verbose_name = "payment")
# roll_number= models.CharField(max_length = 128,verbose_name = "roll number")
# remarks = models.CharField(max_length = 128,verbose_name = "remarks")
# work_time= models.DateTimeField(auto_now_add = True,editable = False, verbose_name = "work time")
def __unicode__(self):
#return '<Work: {}>'.format(self.name)
return self.name
class Meta:
verbose_name = verbose_name_plural = "兼职公告"
class User(models.Model):
name = models.CharField(max_length=20, null=False)
email = models.EmailField()
password = models.CharField(max_length=20, null=False)
enable = models.BooleanField(default=False)
def __unicode__(self):
return self.name |
n = int(input())
special_numbers = []
for number in range(1111, 10000):
number_string = str(number)
if '0' not in number_string:
if (int(number_string[0]) + int(number_string[1])) == (int(number_string[2]) + int(number_string[3])):
if n % (int(number_string[0]) + int(number_string[1])) == 0:
special_numbers.append(number)
print(*special_numbers)
|
import base64
import logging
import os
import asyncio
import signal
import ipaddress
from configparser import ConfigParser
import click
import jinja2
import aiohttp_jinja2
from aiohttp import web
from aiohttp_session import session_middleware
from aiohttp_session.cookie_storage import EncryptedCookieStorage
from cryptography.fernet import Fernet
from concurrent.futures import ThreadPoolExecutor
from . import views
def config_load(config_file):
config = ConfigParser(allow_no_value=True)
config.read(config_file)
return config
def config_logging(config, log_level=None):
log_level = log_level or config.get('log', 'level', fallback='info')
log_format = config.get('log', 'format', fallback='%(asctime)s %(levelname)-8s %(message)s')
level = getattr(logging, log_level.upper())
logging.basicConfig(level=level, format=log_format)
def root_package_name():
return __name__.split('.')[0]
def root_package_path(relative_path=None):
root_module = __import__(root_package_name())
path = os.path.dirname(os.path.abspath(root_module.__file__))
if relative_path is not None:
path = os.path.join(path, relative_path)
return path
class WebServer:
def __init__(self, config, loop=None):
self._loop = loop
self._srv = None
self._handler = None
self._app = None
self._cfg = config
async def start(self):
# Fernet key must be 32 bytes.
cookie_secret = self._cfg.get('http', 'cookie_secret', fallback=None)
if cookie_secret is None:
cookie_secret = base64.urlsafe_b64decode(Fernet.generate_key())
middlewares = [
session_middleware(EncryptedCookieStorage(cookie_secret)),
]
self._app = web.Application(middlewares=middlewares)
self._app.data_dir = self._cfg.get('http', 'data', fallback='./data')
default_creds = os.path.join(self._app.data_dir, 'credentials')
self._app.credentials_file = self._cfg.get('http', 'credentials', fallback=default_creds)
self._app.ioloop = self._loop
self._app.som_url = self._cfg.get('http', 'som-url', fallback=None)
self._app.meta_data_cache = {}
net_whitelist = self._cfg.get('http', 'whitelist', fallback='127.0.0.1/32')
if net_whitelist is not None:
self._app.net_whitelist = [ipaddress.ip_network(net) for net in net_whitelist.split()]
self._executor = ThreadPoolExecutor(4)
self._loop.set_default_executor(self._executor)
def jinja_url_helper(route_name, *args, **kwargs):
return self._app.router[route_name].url(*args, **kwargs)
jinja_env = aiohttp_jinja2.setup(
self._app,
loader=jinja2.FileSystemLoader(root_package_path('templates')))
jinja_env.globals['url'] = jinja_url_helper
for handler, args, kwargs in views.handlers:
path, = args
methods = kwargs.get('methods', ['GET'])
name = kwargs.get('name')
for method in methods:
self._app.router.add_route(method, path, handler, name=name)
self._app.router.add_static('/images', self._app.data_dir)
self._app.router.add_static('/', root_package_path('web-static'), name='static')
host, port = self._cfg.get('http', 'bind', fallback='127.0.0.1:8000').split(':')
self._handler = self._app.make_handler()
self._srv = await self._loop.create_server(self._handler, host, int(port))
async def stop(self):
await self._handler.finish_connections(1.0)
self._srv.close()
await self._srv.wait_closed()
await self._app.finish()
@click.command()
@click.option('-c', '--config', 'config_file', required=True, type=click.Path(exists=True, dir_okay=False))
@click.option('-l', '--log-level', 'log_level')
def cli(config_file, log_level):
config = config_load(config_file)
config_logging(config, log_level)
loop = asyncio.get_event_loop()
try:
loop.add_signal_handler(signal.SIGTERM, loop.stop)
except NotImplementedError:
# signals are not available on Windows
pass
webserver = WebServer(config, loop=loop)
loop.run_until_complete(webserver.start())
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
loop.run_until_complete(webserver.stop())
# дожидаемся завершения всех оставшихся задач и выходим.
pending = asyncio.Task.all_tasks()
loop.run_until_complete(asyncio.gather(*pending))
loop.close()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-09-25 19:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('companies', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Teams',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('team_name', models.CharField(max_length=20)),
('mentor', models.CharField(max_length=20)),
('mem1', models.CharField(max_length=20)),
('mem2', models.CharField(max_length=20)),
('mem3', models.CharField(max_length=20)),
('member_nos', models.IntegerField()),
],
),
migrations.DeleteModel(
name='Stock',
),
]
|
from PIL import Image
def decrypt(image, out='decrypted.png'):
"""
:param image: Path to image-like (png, jpeg, ...) to be decrypted
:type image: String
:param out: Name of the file to store the decrypted image in
:type out: String
"""
img = Image.open(image)
img_data = img.load()
for i in range(img.size[0]):
for j in range(img.size[1]):
if img_data[i, j][0] % 2 == 0:
# red value is even so change it to white pixel
img_data[i, j] = (255, 255, 255)
else:
# red value is odd so change it to black pixel
img_data[i, j] = (0, 0, 0)
img.save(out)
if __name__ == '__main__':
decrypt('./Test cases/111111.png')
|
import streamlit as st
import tweepy
from wordcloud import WordCloud
import pandas as pd
import numpy as np
import re
import matplotlib.pyplot as plt
from PIL import Image
import seaborn as sns
#import config
from functions import *
import json
consumerKey = st.secrets["consumerKey"]
consumerSecret = st.secrets["consumerSecret"]
accessToken = st.secrets["accessToken"]
accessTokenSecret =st.secrets["accessTokenSecret"]
try:
#Create the authentication object
authenticate = tweepy.OAuthHandler(consumerKey, consumerSecret)
# Set the access token and access token secret
authenticate.set_access_token(accessToken, accessTokenSecret)
# Creating the API object while passing in auth information
api = tweepy.API(authenticate, wait_on_rate_limit = True)
api.verify_credentials()
st.write("Connected to twitter")
except:
st.write("Error during connection with twitter")
#plt.style.use('fivethirtyeight')
def app(choice):
st.title("Twitter Activity Analyzer")
if choice=="Tweet Analyzer":
st.subheader("Analyze the tweets of some famous personalities")
st.subheader("Following are the tasks performed: ")
st.write("1. Fetches the 5 most recent tweets from the provided twitter handel")
st.write("2. Generates a Word Cloud from the recent tweets")
st.write("3. Performs Sentiment Analysis")
raw_text = st.text_area("Enter the exact twitter handle of the Personality (with or without @)")
st.markdown("You can select different activities from the selectbox")
Analyzer_choice = st.selectbox("Select the Activities", ["Show Recent Tweets","Generate WordCloud" ,"Visualize the Sentiment Analysis"])
if st.button("Analyze"):
if Analyzer_choice == "Show Recent Tweets":
st.success("Fetching last 5 Tweets")
def Show_Recent_Tweets(raw_text):
# Extract 5 tweets from the twitter user
posts = api.user_timeline(screen_name=raw_text, count = 5, lang ="en", tweet_mode="extended")
def get_tweets():
l=[]
i=1
for tweet in posts[:5]:
l.append(str(i)+". "+tweet.full_text+"\n")
i+=1
return l
recent_tweets=get_tweets()
return recent_tweets
recent_tweets= Show_Recent_Tweets(raw_text)
st.write("\n".join(recent_tweets))
elif Analyzer_choice=="Generate WordCloud":
st.success("Generating the Word Cloud")
def gen_wordcloud():
posts = api.user_timeline(screen_name=raw_text, count = 100, lang ="en", tweet_mode="extended")
# Create a dataframe with a column called Tweets
df = pd.DataFrame([tweet.full_text for tweet in posts], columns=['Tweets'])
# Clean the tweets
df['Tweets'] = df['Tweets'].apply(cleanTxt)
# word cloud visualization
allWords = ' '.join([twts for twts in df['Tweets']])
wordCloud = WordCloud(width=500, height=300, random_state=21, max_font_size=110).generate(allWords)
plt.imshow(wordCloud, interpolation="bilinear")
plt.axis('off')
plt.savefig('WC.jpg')
img= Image.open("WC.jpg")
return img
img=gen_wordcloud()
st.image(img)
else:
def Plot_Analysis():
st.success("Generating Visualization for Sentiment Analysis")
posts = api.user_timeline(screen_name=raw_text, count = 100, lang ="en", tweet_mode="extended")
df = pd.DataFrame([tweet.full_text for tweet in posts], columns=['Tweets'])
# Clean the tweets by applying functions from functions module
df['Tweets'] = df['Tweets'].apply(cleanTxt)
# Create two new columns 'Subjectivity' & 'Polarity' by applying functions from functions module
df['Subjectivity'] = df['Tweets'].apply(getSubjectivity)
df['Polarity'] = df['Tweets'].apply(getPolarity)
# create a new column based on polarity score by applying functions from functions module
df['Analysis'] = df['Polarity'].apply(getAnalysis)
return df
df= Plot_Analysis()
st.write(sns.countplot(x=df["Analysis"],data=df))
st.pyplot(use_container_width=True)
elif choice=="Generate Twitter Data":
st.subheader("This tool fetches the tweets from the twitter handel & Performs the following tasks")
st.write("1. Converts it into a DataFrame")
st.write("2. Cleans the text")
st.write("3. Analyzes Subjectivity of tweets and adds an additional column for it")
st.write("4. Analyzes Polarity of tweets and adds an additional column for it")
st.write("5. Analyzes Sentiments of tweets and adds an additional column for it")
user_name = st.text_area("*Enter the exact twitter handle of the Personality (without @)*")
st.markdown("<-------- Also Do checkout the another cool tool from the sidebar")
count=st.slider("How many tweets to fetch",min_value=10,max_value=100)
def get_data(user_name,count):
posts = api.user_timeline(screen_name=user_name, count = count, lang ="en", tweet_mode="extended")
df = pd.DataFrame([tweet.full_text for tweet in posts], columns=['Tweets'])
# Clean the tweets
df['Tweets'] = df['Tweets'].apply(cleanTxt)
# Create two new columns 'Subjectivity' & 'Polarity' by applying functions from functions module
df['Subjectivity'] = df['Tweets'].apply(getSubjectivity)
df['Polarity'] = df['Tweets'].apply(getPolarity)
# create a new column based on polarity score by applying functions from functions module
df['Analysis'] = df['Polarity'].apply(getAnalysis)
return df
if st.button("Show Data"):
st.success(f"Fetching Last {count} Tweets")
df=get_data(user_name,count)
st.write(df)
# if watch trending is used
elif choice=="Watch trending":
st.subheader("To see what is trending around")
#country with woeid
country={"World":1,"India":23424848,"US":23424977,"Germany":23424829,"Australia":23424748,"UK":23424975}
choosen_country=st.selectbox("Choose location",list(country.keys()))
location=country[choosen_country]
trend_count=st.slider("How many top tweets to fetch",min_value=5,max_value=20)
trends_result = api.trends_place(location)
if st.button("Show top {} trending".format(trend_count)):
li=list()
i=1
for trend in trends_result[0]["trends"][:trend_count]:
li.append(str(i)+". "+cleanTxt(trend["name"]))
i+=1
st.write("\n\n".join(li))
def genWordCloud():
li=list()
for trend in trends_result[0]["trends"][:50]:
li.append(cleanTxt(trend["name"]))
str=" ".join(li)
res_list = [s for s in re.split("([A-Z][^A-Z]*)", str) if s]
# word cloud visualization
allWords = ' '.join(res_list)
wordCloud = WordCloud(width=500, height=300, random_state=21, max_font_size=110).generate(allWords)
plt.imshow(wordCloud, interpolation="bilinear")
plt.axis('off')
plt.savefig('WC.jpg')
img= Image.open("WC.jpg")
return img
if st.button("Make word cloud of trending topics"):
img=genWordCloud()
st.image(img)
st.subheader(':sunglasses: ------------------- Made By SM,PKS and TT ----------------- :sunglasses:')
if __name__ == "__main__":
app()
|
import pandas as pd
import os
folder = "data/"
files = os.listdir(folder)
print("\n", len(files), " files found\n")
states = pd.read_csv(folder + files[0], delimiter=" ", header=1)
del states["Day"]
states = states.columns
errors = pd.DataFrame(index=files, columns = states)
errors.fillna(0, inplace=True)
state_wise_daily = pd.read_csv('state_wise_daily.csv')
del state_wise_daily["DN"]
del state_wise_daily["DD"]
del state_wise_daily["ML"]
del state_wise_daily["MZ"]
del state_wise_daily["NL"]
# Rename column TT as Total
state_wise_daily.rename(columns={"TT" : "Total"}, inplace=True)
# Move the column Total to the end
column_total = state_wise_daily.pop("Total")
state_wise_daily["Total"] = column_total
state_wise_daily = state_wise_daily[state_wise_daily.Status == "Confirmed"]
del state_wise_daily["Status"]
del state_wise_daily["Date"]
state_wise_daily["index"]=[i for i in range(len(state_wise_daily.index))]
state_wise_daily.set_index("index", inplace = True)
for i in range(1,len(state_wise_daily.index)):
for column in state_wise_daily.columns:
state_wise_daily.loc[i, column] += state_wise_daily.loc[i-1, column]
state_wise_daily = state_wise_daily.iloc[9:]
state_wise_daily["index"]=[i for i in range(len(state_wise_daily.index))]
state_wise_daily.set_index("index", inplace = True)
for file in files:
df = pd.read_csv(folder + file, delimiter=" ", header=1)
for state in states:
error_count = 0
for i in range(min(len(state_wise_daily.index),len(df.index))):
error_count += (state_wise_daily.loc[i, state] - df.loc[i, state])*(state_wise_daily.loc[i, state] - df.loc[i, state])
errors.loc[file, state] = error_count
errors = errors.append(errors.idxmin(axis=0), ignore_index=True)
best_parameters = pd.DataFrame()
best_parameters["state"] = list(errors.columns)
best_parameters["file"] = list(errors.iloc[-1])
best_parameters.to_csv('best_parameters.data', sep=" ", index=False)
print("\nData written to best_parameters.data\n") |
class Board():
def __init__(self,size=3,):
self.size=size
self.board=[]
for r in range(self.size):
self.board.append([])
for c in range(self.size):
self.board[r].append(" ")
def print_board(self):
print("----"*len(self.board[0]))
for line in self.board:
for column in line:
print("| {} ".format(column), end="")
print("|")
print("----"*len(self.board[0]))
def clear_board(self):
for r in range(self.row):
for c in range(self.column):
self.board[r][c]=" "
def add_symbol(self,symbol,row,column):
if self.is_in_boundaries(row,column):
self.board[row-1][column-1]=symbol
return True
else:
return False
def is_full(self):
for r in range(len(self.board)):
for c in range(len(self.board[0])):
if self.board[r][c]==' ':
return False
return True
def is_position_taken(self,row,column):
if self.is_in_boundaries(row,column):
if self.board[row-1][column-1]!=" ":
print("This position is taken")
return True
else:
print("This position is not existing")
return False
def is_in_boundaries(self,row,column):
return 0<row and row<=len(self.board) and 0<column and column<=len(self.board[0])
def is_there_a_winner(self):
#Step 1 find if there a winner in the row
for r in range(self.size):
potential_winner_symbol=self.board[r][0]
if potential_winner_symbol==" ":
continue
is_winner=True
for c in range(self.size):
# Check winner in a row
if self.board[r][c]!=potential_winner_symbol:
is_winner=False
break
if is_winner:
print("winner step 1")
return (True,potential_winner_symbol)
#Step 2: Check if there is a winner in the column
for c in range(self.size):
potential_winner_symbol = self.board[0][c]
if potential_winner_symbol==" ":
continue
is_winner = True
for r in range(self.size):
# Check winner in a row
if self.board[r][c] != potential_winner_symbol:
is_winner = False
break
if is_winner:
print("winner step 2")
return (True,potential_winner_symbol)
#Step 3: Check if there is a winner in the diagonale from left
potential_winner_symbol = self.board[0][0]
if potential_winner_symbol != " ":
is_winner = True
for e in range(self.size):
if self.board[e][e] != potential_winner_symbol:
is_winner = False
break
if is_winner:
print("winner step 3")
return (True,potential_winner_symbol)
#Step 4: Check if there is a winner in the diagonale from right
potential_winner_symbol = self.board[0][len(self.board)-1]
if potential_winner_symbol != " ":
is_winner = True
for e in range(self.size):
if self.board[e][len(self.board)-1-e] != potential_winner_symbol:
is_winner = False
break
if is_winner:
print("winner step 4")
return (True,potential_winner_symbol)
#we test all cases
print("No winner step 5")
return (False,' ')
|
import sys, os, csv, matplotlib, numpy
from operator import itemgetter
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from matplotlib.backends.backend_pdf import PdfPages
sys.path.append(os.getenv("BC") + '-old')
from constants import CATEGORY_COLORS, ALL_COLORS, DISPLAY_LABELS
from plotting import set_common_rc_params
from matplotlib import rcParams
set_common_rc_params(rcParams)
TITLE_FONT_SIZE = 36
XAXIS_FONT_SIZE = 36
YAXIS_FONT_SIZE = 28
def read_data(filepath):
hs = {}
with open(filepath, 'r') as f:
reader = csv.reader(f, delimiter="\t")
for row in reader:
try:
cat = row[0]
h = float(row[2])
if cat not in hs:
hs[cat] = []
hs[cat].append(h)
except:
print 'Could not read:', row
exit()
return hs
def plot_pbias(dest, user_bias, cat_excludes=set(), xline=None,
title=None, xlabel=None, ylabel=None, show_legend=False, elinewidth=2,
xticksfont=XAXIS_FONT_SIZE, yticksfont=YAXIS_FONT_SIZE):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
# plot using bars
cats_means = {cat: numpy.mean(h) for cat, h in user_bias.items() if cat not in cat_excludes}
ordered_cats_means = sorted(cats_means.items(), key=itemgetter(1))
cats, means = zip(*ordered_cats_means)
errs = [2 * numpy.std(user_bias[c]) / numpy.sqrt(len(user_bias[c])) for c in cats]
error_config = {'linewidth': elinewidth, 'capsize': 0, 'ecolor': '#000000'}
x = range(len(means))
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.barh(x, means,
alpha=1, align='center', linewidth=1,
color=[ALL_COLORS[c] for c in cats],
xerr=errs, error_kw=error_config)
if show_legend:
patches = []
for cat in sorted(CATEGORY_COLORS):
p = mpatches.Patch(
edgecolor='#000000',
linewidth=1,
facecolor=CATEGORY_COLORS[cat],
label=DISPLAY_LABELS[cat]
)
patches.append(p)
plt.legend(handles=patches, loc='lower left')
ax.set_ylim(bottom=x[0] - 1, top=x[-1] + 1)
yvals = numpy.linspace(x[0] - 1, x[-1] + 1, 50)
if xline is not None:
yvals = numpy.linspace(x[0] - 1, x[-1] + 1, 50)
ax.plot([xline for _ in range(len(yvals))], yvals, color='#000000')
ax.annotate('random walker\nbaseline',
xy=(xline - .005, x[0] - .7), xycoords='data',
xytext=(50, 10), textcoords='offset points',
arrowprops=dict(
arrowstyle="->",
connectionstyle="angle3,angleA=90,angleB=0"
)
)
plt.yticks(x, [DISPLAY_LABELS[c] if c in DISPLAY_LABELS else c for c in cats])
plt.legend()
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(yticksfont)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(xticksfont)
if title is not None:
plt.title(title, fontsize=TITLE_FONT_SIZE, y=1.02)
if xlabel is not None:
ax.set_xlabel(xlabel, fontsize=TITLE_FONT_SIZE)
if ylabel is not None:
ax.set_ylabel(ylabel, fontsize=TITLE_FONT_SIZE)
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
plt.tight_layout()
pp = PdfPages(dest)
fig.savefig(pp, format='pdf')
pp.close()
plt.close()
if __name__ == "__main__":
print "Processing random walks"
#random_walker_hs = read_data(os.path.join(os.getenv('BR'), 'random-walks', 'random-walks-c100.tab'))
#assert len(random_walker_hs) == 1
#rwh = numpy.mean(random_walker_hs['random'])
basename = 'news-targets-with-pageranks-gte10-cat-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
cat_excludes=set(['earthlink', 'comcast', 'gmail', 'yelp', 'linkedin', 'timewarner', 'googleplus', 'hi5']),
#xline=rwh,
elinewidth=2, xticksfont=22, yticksfont=14,
xlabel='$B_p$',
show_legend=True
)
basename = 'oct2014-news4-gte10-site-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
cat_excludes=set(['hi5', 'timewarner', 'earthlink', 'comcast', 'gmail', 'linkedin', 'yelp', 'duckduckgo', 'aolmail', 'livemail', 'tumblr', 'yahoomail']),
#cat_excludes=set(['earthlink', 'comcast', 'gmail', 'yelp', 'linkedin', 'timewarner', 'googleplus', 'hi5']),
#xline=rwh,
elinewidth=2, xticksfont=22, yticksfont=14,
xlabel='$B_p$',
show_legend=True
)
'''
basename = 'u150-c100-site-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
cat_excludes=set(['earthlink', 'comcast']),
#xline=rwh,
elinewidth=2, xticksfont=22, yticksfont=14,
xlabel='$B_p$',
show_legend=True
)
basename = 'u500-c100-site-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
cat_excludes=set(['earthlink', 'comcast']),
#xline=rwh,
elinewidth=2, xticksfont=22, yticksfont=14,
xlabel='$B_p$',
show_legend=True
)
basename = 'u500-c1000-site-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
cat_excludes=set(['earthlink', 'comcast']),
#xline=rwh,
elinewidth=2, xticksfont=22, yticksfont=14,
xlabel='$B_p$',
show_legend=True
)
basename = 'u1200-c100-cat-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
#xline=rwh,
elinewidth=7, xticksfont=22, yticksfont=22,
xlabel='$B_p$'
)
basename = 'u750-c1000-cat-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
#xline=rwh,
elinewidth=7, xticksfont=22, yticksfont=22,
xlabel='$B_p$'
)
basename = 'feb2015-01to05-gte10-cat-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
#xline=rwh,
elinewidth=7, xticksfont=22, yticksfont=22,
xlabel='$B_p$'
)
basename = 'feb2015-01to05-gte10-site-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
cat_excludes=set(['earthlink', 'comcast', 'duckduckgo', 'gmail', 'googleplus', 'hi5', 'linkedin', 'timewarner', 'yelp']),
#xline=rwh,
elinewidth=2, xticksfont=22, yticksfont=14,
xlabel='$B_p$',
show_legend=True
)
basename = 'oct2014-gte10-cat-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
#xline=rwh,
elinewidth=7, xticksfont=22, yticksfont=22,
xlabel='$B_p$'
)
basename = 'oct2014-gte10-site-dl10'
print 'Processing', basename
plot_pbias(
os.path.join(os.getenv('BP'), 'pbias', 'pbias-%s.pdf' % basename),
read_data(os.path.join(os.getenv('BR'), 'pbias', '%s.tab' % basename)),
cat_excludes=set(['earthlink', 'comcast', 'duckduckgo', 'gmail', 'googleplus', 'hi5', 'linkedin', 'timewarner', 'yelp']),
#xline=rwh,
elinewidth=2, xticksfont=22, yticksfont=14,
xlabel='$B_p$',
show_legend=True
)'''
|
import torch
from torch import nn , optim
import torch.nn.functional as F
from torchvision import datasets, transforms, models
import json
from collections import OrderedDict
def transformations():
data_transforms = {"train_transform" : transforms.Compose([
transforms.RandomRotation(30),
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],[0.229, 0.224, 0.225])
]),
"validation_or_test_transform": transforms.Compose([
transforms.Resize(255),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],[0.229, 0.224, 0.225])
]) }
return data_transforms
def create_datasets(source_dir):
data_dir = source_dir
train_dir = data_dir + '/train'
valid_dir = data_dir + '/valid'
test_dir = data_dir + '/test'
data_transforms = transformations()
image_datasets = {
"train_datasets" : datasets.ImageFolder(train_dir, transform=data_transforms["train_transform"]),
"validation_dataset": datasets.ImageFolder(valid_dir, transform=data_transforms["validation_or_test_transform"]),
"test_dataset": datasets.ImageFolder(test_dir, transform=data_transforms["validation_or_test_transform"]),
}
return image_datasets
|
import re
text = """one two
three four
five six seven
eight nine
ten"""
# process line by line
# ^..*.$ : at least two char
# . does not match a newline by default
pat = r"^.*$"
m = re.search(pat, text)
if m :
print(m.group())
else:
print("no match")
# pickup the whole paragraph
# re.S : make . match a new line
pat = r"^.*$"
m = re.search(pat, text, re.S)
if m :
print(m.group()) # one line
else:
print("no match")
print("-" * 40)
#re.M : make $ and ^ meaningful for each line
# got the first line
pat = r"^.*$"
m = re.search(pat, text, re.M)
if m :
print("matched : ", m.group())
else:
print("no match")
print("-" * 40)
# get all lines
print(re.findall(pat, text, re.M))
print("-" * 40)
# walk thro the matches
for m in re.finditer(pat, text, re.M):
print(m.group())
print("-" * 40)
|
'''
logWithGpsOneXTRA.py - This is a GNSS test using Sixfab's HAT with gpsOneXTRA Assistance Function from Qualcomm that logs the GPS info in a file
Created by Marc Leroy (Pix4D), May 8th 2019
'''
from cellulariot import cellulariot
import time
def main():
node = cellulariot.CellularIoT()
node.setupGPIO()
print("Power up sequence - disabling first")
node.disable()
print("Disable done\n")
time.sleep(1)
print("Starting enable")
node.enable()
print("Enable done\n")
time.sleep(1)
print("Starting power up")
node.powerUp()
print("Power up done\n")
print("Getting HW info")
node.getHardwareInfo()
print("Done")
time.sleep(1)
#rint("Sending ATE1 command")
#ode.sendATComm("ATE1", "OK")
print("Enabling gpsOneXTRA Assistance")
node.sendATComm("AT+QGPSXTRA=1", "OK")
node.sendATCommOnce("AT+QGPSXTRADATA?")
print("Enabled\n")
time.sleep(0.5)
print("Turning GNSS on")
node.turnOnGNSS()
time.sleep(1)
print("Starting loop")
ctr = 0
while ctr<1000:
ctr+=1
try:
#ode.sendATCommOnce("AT+QGPSXTRADATA?")
node.getFixedLocation()
time.sleep(1)
except KeyboardInterrupt:
break
print("\nExited loop, will turn GNSS off and quit.")
node.turnOffGNSS()
print("Done. Quiting.\n")
if __name__=='__main__':
main()
|
#coding:utf8
import wx
def openFile(x):
fc = open(filepath.GetValue(),'r').read() #获取内容
contents.SetValue(fc)
def savefile(x):
fc = open(filepath.GetValue(),'w')
fc.write(contents.GetValue())
fc.close()
app = wx.App()
win = wx.Frame(None,title="xyb's Notepad") #显示框
bkg = wx.Panel(win)
#添加组件
savebutton = wx.Button(bkg,label="保存")
openbutton = wx.Button(bkg,label="打开")
filepath = wx.TextCtrl(bkg)
contents = wx.TextCtrl(bkg,style=wx.TE_MULTILINE|wx.HSCROLL) #下拉显示
#事件处理
openbutton.Bind(wx.EVT_BUTTON,openFile)
savebutton.Bind(wx.EVT_BUTTON,savefile)
#布局管理
headbox = wx.BoxSizer()
headbox.Add(filepath,proportion=1,flag=wx.EXPAND)
headbox.Add(openbutton,proportion=0,flag=wx.LEFT,border=5)
headbox.Add(savebutton,proportion=0,flag=wx.LEFT,border=5)
allbox = wx.BoxSizer(wx.VERTICAL)
allbox.Add(headbox,proportion=0,flag=wx.EXPAND|wx.ALL,border=5)
allbox.Add(contents,proportion=1,flag=wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM,border=5)
bkg.SetSizer(allbox)
win.Show()
app.MainLoop()
|
# ------------------------------------------------------------
# Copyright (c) 2017-present, SeetaTech, Co.,Ltd.
#
# Licensed under the BSD 2-Clause License.
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
# <https://opensource.org/licenses/BSD-2-Clause>
#
# ------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from dragon.core import mpi
from dragon.vm.torch.tensor import Tensor, _LeafTensor, _Device
from dragon.vm.torch.ops.primitive import MakeDevice, WrapScalar
from dragon.vm.torch.ops.factory import get_module
from dragon.vm.torch.ops.modules.control_flow import Compare
from dragon.vm.torch.ops.modules.arithmetic import (
Fundamental, Log, Exp, Sqrt,
Accumulate,
MM, FullyConnected,
Maximum, Minimum, Clamp,
)
from dragon.vm.torch.ops.modules.init import (
Fill, RandomUniform, RandomNormal,
)
from dragon.vm.torch.ops.modules.array import (
Reshape, Squeeze, UnSqueeze, Permute,
Indexing, Assigning, Repeat, Concat, Gather,
Reduce, ArgReduce, OneHot, Multinomial,
)
from dragon.vm.torch.ops.modules.update import (
Accumulate as _Accumulate, Collective, Update,
)
from dragon.vm.torch.ops.modules.vision import (
Resize2d, RoIPool, RoIAlign,
)
__all__ = [
'add', 'sub', 'mul', 'div',
'accumulate',
'maximum', 'minimum', 'clamp',
'log', 'exp', 'sqrt',
'mm', 'xw_plus_b',
'squeeze', 'unsqueeze',
'mean', 'sum', 'min', 'max', 'topk',
'argmin', 'argmax',
'gt', 'lt', 'eq', 'ge', 'le',
'cat', 'gather', 'narrow',
'one_hot', 'multinomial', 'rand', 'randn',
'zeros', 'zeros_like', 'ones', 'ones_like',
'nn_resize', 'bilinear_resize', 'roi_pool', 'roi_align',
]
##############################################
# #
# Arithmetic #
# #
##############################################
def _fundamental(input, value, op='Add', out=None):
if not isinstance(value, Tensor):
value = WrapScalar(value, input.dtype, input.device)
dev = MakeDevice(inputs=[input, value])
key = '{}/{}'.format(op, dev)
module = get_module(Fundamental, key, dev, op_type=op)
return module.forward(input, value, out)
def _rfundamental(input, value, op='RAdd', out=None):
if not isinstance(value, Tensor):
value = WrapScalar(value, input.dtype, input.device)
dev = MakeDevice(inputs=[input, value])
key = '{}/{}'.format(op, dev)
module = get_module(Fundamental, key, dev, op_type=op)
return module.forward(value, input, out)
def add(input, value, out=None):
"""Add the ``input`` and ``value`` into the output tensor.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
value : dragon.vm.torch.Tensor, number
The value tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _fundamental(input, value, out=out, op='Add')
def sub(input, value, out=None):
"""Subtract the ``input`` and ``value`` into the output tensor.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
value : dragon.vm.torch.Tensor or number
The value tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
torch.Tensor
The output tensor.
"""
return _fundamental(input, value, out=out, op='Sub')
def mul(input, value, out=None):
"""Multiply the ``input`` and ``value`` into the output tensor.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
value : dragon.vm.torch.Tensor or number
The value tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _fundamental(input, value, out=out, op='Mul')
def div(input, value, out=None):
"""Divide the ``input`` and ``value`` into the output tensor.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
value : dragon.vm.torch.Tensor or number
The value tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _fundamental(input, value, out=out, op='Div')
def maximum(input, other, out=None):
"""Return the max value of given two tensors.
Parameters
----------
input : dragon.vm.torch.Tensor or number
The input tensor.
other : dragon.vm.torch.Tensor or number
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
if not isinstance(input, Tensor):
input = WrapScalar(input, other.dtype, other.device)
elif not isinstance(other, Tensor):
other = WrapScalar(other, input.dtype, input.device)
dev = MakeDevice(inputs=[input])
key = 'Maximum/{}'.format(dev)
module = get_module(Maximum, key, dev)
return module.forward(input, other, out)
def minimum(input, other, out=None):
"""Return the min value of given two tensors.
Parameters
----------
input : dragon.vm.torch.Tensor or number
The input tensor.
other : dragon.vm.torch.Tensor or number
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
if not isinstance(input, Tensor):
input = WrapScalar(input, other.dtype, other.device)
elif not isinstance(other, Tensor):
other = WrapScalar(other, input.dtype, input.device)
dev = MakeDevice(inputs=[input])
key = 'Minimum/{}'.format(dev)
module = get_module(Minimum, key, dev)
return module.forward(input, other, out)
def clamp(input, min=None, max=None, out=None):
"""Clamp all elements into the range [min, max].
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
min : number, optional
The min value.
max : number, optional
The max value.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Clamp/{}/min:{}/max:{}'.format(dev, min, max)
module = get_module(Clamp, key, dev, min=min, max=max)
return module.forward(input, out)
def log(input, out=None):
"""Compute the natural logarithm of input.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Log/{}'.format(dev)
module = get_module(Log, key, dev)
return module.forward(input, out)
def exp(input, out=None):
"""Compute the exponential of input.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Exp/{}'.format(dev)
module = get_module(Exp, key, dev)
return module.forward(input, out)
def sqrt(input, out=None):
"""Compute the square-root of input.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Sqrt/{}'.format(dev)
module = get_module(Sqrt, key, dev)
return module.forward(input, out)
def accumulate(input, alpha=1., beta=1., out=None):
"""Compute *out = alpha * input + beta * out*
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
alpha : float, optional, default=1.
The value of alpha.
beta : float, optional, default=1.
The value beta.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Accumulate/{}/alpha:{}/beta:{}'.format(dev, alpha, beta)
module = get_module(Accumulate, key, dev, alpha=alpha, beta=beta)
return module.forward(input, out)
def mm(mat1, mat2, transA=False, transB=False, out=None):
"""Performs a matrix multiplication of the matrices ``mat1`` and ``mat2.``
Parameters
----------
mat1 : dragon.vm.torch.Tensor
The matrix A.
mat2 : dragon.vm.torch.Tensor
The matrix B.
transA : boolean
Whether to transpose the ``mat1``.
transB : boolean
Whether to transpose the ``mat2``.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[mat1, mat2])
key = 'Matmul/{}/transA:{}/transB:{}'.format(dev, transA, transB)
module = get_module(MM, key, dev, transA=transA, transB=transB)
return module.forward(mat1, mat2, out)
def xw_plus_b(x, w, bias=None, transW=True, out=None):
"""Compute *matmul(x, w) + bias.*``
Parameters
----------
x : dragon.vm.torch.Tensor
The x.
w : dragon.vm.torch.Tensor
The w.
bias : dragon.vm.torch.Tensor, optional
The bias.
transW : boolean
Whether to transpose the ``w``.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[x, w] + ([bias] if bias else []))
key = 'FullyConnected/{}/transW:{}'.format(dev, transW)
module = get_module(FullyConnected, key, dev, transW=transW)
return module.forward(x, w, bias, out)
##############################################
# #
# Array #
# #
##############################################
def _reshape(input, shape, shape_like=None):
if shape_like is not None: shape = shape_like.shape
dev = MakeDevice(inputs=[input]); n_dim = len(shape)
key = 'Reshape/{}/n_dim:{}'.format(dev, n_dim)
module = get_module(Reshape, key, dev, n_dim=n_dim)
return module.forward(input, shape)
def _permute(input, perm):
dev = MakeDevice(inputs=[input]); n_perm = len(perm)
key = 'Permute/{}/n_perm:{}'.format(dev, n_perm)
module = get_module(Permute, key, dev, n_perm=n_perm)
return module.forward(input, perm)
def _repeat(input, times):
dev = MakeDevice(inputs=[input]); n_times = len(times)
key = 'Repeat/{}/n_times:{}'.format(dev, n_times)
module = get_module(Repeat, key, dev, n_times=n_times)
return module.forward(input, times)
def _fill(input, shape, value):
dev = MakeDevice(inputs=[input]); n_dim = len(shape)
key = 'Fill/{}/dtype:{}/n_dim:{}/value:{}'.format(
dev, input.dtype, n_dim, value)
module = get_module(Fill, key, dev, n_dim=n_dim,
value=value, dtype=input.dtype)
return module.forward(input, shape)
def _uniform(input, shape, low, high):
dev = MakeDevice(inputs=[input]); n_dim = len(shape)
key = 'Uniform/{}/dtype:{}/n_dim:{}/low:{}/high:{}'.format(
dev, input.dtype, n_dim, float(low), float(high))
module = get_module(
RandomUniform, key, dev, n_dim=n_dim,
low=low, high=high, dtype=input.dtype)
return module.forward(input, shape)
def _normal(input, shape, mean, std):
dev = MakeDevice(inputs=[input]); n_dim = len(shape)
key = 'Normal/{}/dtype:{}/n_dim:{}/mean:{}/std:{}'.format(
dev, input.dtype, n_dim, float(mean), float(std))
module = get_module(
RandomNormal, key, dev, n_dim=n_dim,
mean=mean, std=std, dtype=input.dtype)
return module.forward(input, shape)
def _reduce(input, operation, dim=None, keepdim=False, out=None):
if dim is None: keepdim = False
dev = MakeDevice(inputs=[input])
key = '{}/{}/dim:{}/keepdim:{}'.format(
operation, dev, dim, int(keepdim))
module = get_module(
Reduce, key, dev, operation=operation,
dim=dim, keepdim=keepdim)
return module.forward(input, out)
def _arg_reduce(input, operation, dim=None, keepdim=False, top_k=1, out=None):
if dim is None: keepdim = False
dev = MakeDevice(inputs=[input])
key = '{}/{}/dim:{}/keepdim:{}/top_k:{}'.format(
operation, dev, dim, int(keepdim), top_k)
module = get_module(
ArgReduce, key, dev,
operation=operation, axis=dim,
keepdim=keepdim, top_k=top_k)
return module.forward(input, out)
def _indexing(input, starts, sizes):
n_starts, n_sizes = len(starts), len(sizes)
dev = MakeDevice(inputs=[input])
key = 'Index/{}/n_starts:{}/n_sizes:{}'.format(dev, n_starts, n_sizes)
module = get_module(Indexing, key, dev, n_starts=n_starts, n_sizes=n_sizes)
return module.forward(input, starts, sizes)
def _assigning(output, input, starts, sizes):
if not isinstance(input, Tensor):
if isinstance(input, (tuple, list)):
input = Tensor(input, dtype=output.dtype, device=output.device)
else:
input = WrapScalar(input, output.dtype, output.device)
n_starts, n_sizes = len(starts), len(sizes)
dev = MakeDevice(inputs=[input])
key = 'Assign/{}/n_starts:{}/n_sizes:{}'.format(dev, n_starts, n_sizes)
module = get_module(Assigning, key, dev, n_starts=n_starts, n_sizes=n_sizes)
return module.forward(input, output, starts, sizes)
def _compare(input, other, operation, out=None):
if not isinstance(other, Tensor):
other = WrapScalar(other, input.dtype, input.device)
dev = MakeDevice(inputs=[input, other])
key = 'Compare/{}/{}'.format(operation, dev)
module = get_module(Compare, key, dev, operation=operation)
return module.forward(input, other, out)
def squeeze(input, dim=None, out=None):
"""Return a tensor with all the dimensions of input of size 1 removed.
Parameters
----------
dim : int
The optional dim to remove.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The new tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Squeeze/{}/dim:{}'.format(dev, dim if dim else 'None')
module = get_module(Squeeze, key, dev, dim=dim)
return module.forward(input, out=out)
def unsqueeze(input, dim, out=None):
"""Returns a tensor with a dimension of size 1 inserted at the specified position.
Parameters
----------
dim : int
The dim to remove.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The new tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Unsqueeze/{}/dim:{}'.format(dev, dim if dim else 'None')
module = get_module(UnSqueeze, key, dev, dim=dim)
return module.forward(input, out=out)
def mean(input, dim=None, keepdim=False, out=None):
"""Return the mean of all elements or elements along the given dim.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int, optional
The axis of tensor to compute mean value.
keepdim : bool, optional
Whether the output tensor has dim retained or not.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The mean-reduced tensor.
"""
return _reduce(input, 'MEAN', dim, keepdim, out)
def sum(input, dim=None, keepdim=False, out=None):
"""Return the sum of all elements or elements along the given dim.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int, optional
The axis of tensor to compute sum value.
keepdim : bool, optional
Whether the output tensor has dim retained or not.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
torch.Tensor
The sum-reduced tensor.
"""
return _reduce(input, 'SUM', dim, keepdim, out)
def argmax(input, dim=None, keepdim=False, out=None):
"""Return the indices of maximum elements along the given axis.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int, optional
The axis of tensor to compute sum value.
keepdim : bool, optional
Whether the output tensor has dim retained or not.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
torch.Tensor
The maximum indices.
"""
return _arg_reduce(input, 'ARGMAX', dim, keepdim, 1, out)
def max(input, dim=None, keepdim=False, out=None):
"""Return the values and indices of maximum elements along the given axis.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int, optional
The axis of tensor to compute sum value.
keepdim : bool, optional
Whether the output tensor has dim retained or not.
out : dragon.torch.Tensor, optional
The optional output tensor.
Returns
-------
tuple
The maximum values and indices.
"""
return _arg_reduce(input, 'MAX', dim, keepdim, 1, out)
def argmin(input, dim=None, keepdim=False, out=None):
"""Return the indices of minimum elements along the given axis.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int, optional
The axis of tensor to compute sum value.
keepdim : bool, optional
Whether the output tensor has dim retained or not.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
torch.Tensor
The minimum indices.
"""
return _arg_reduce(input, 'ARGMIN', dim, keepdim, 1, out)
def min(input, dim=None, keepdim=False, out=None):
"""Return the values and indices of maximum elements along the given axis.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int, optional
The axis of tensor to compute sum value.
keepdim : bool, optional
Whether the output tensor has dim retained or not.
out : dragon.torch.Tensor, optional
The optional output tensor.
Returns
-------
sequence
The minimum values and indices.
"""
return _arg_reduce(input, 'MIN', dim, keepdim, 1, out)
def topk(input, k, dim=None, largest=True, sorted=True, out=None):
"""Return the k largest/smallest values and indices along the given axis.
If ``dim`` is not given, the last dimension of the input is chosen.
If ``largest`` is False then the k smallest elements are returned.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
k : int
The top k.
dim : int, optional
The axis of tensor to compute sum value.
largest : bool, optional
Whether to return largest or smallest elements.
sorted : bool, optional
Whether to return in the sorted order.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
sequence
The values and indices.
"""
operation = 'MAX' if largest else 'MIN'
if dim is None: dim = input.ndimension() - 1
return _arg_reduce(input, operation, dim, True, k, out)
def gt(input, other, out=None):
"""Compute *input* > *other* element-wise.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : dragon.vm.torch.Tensor, number
The other tensor.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _compare(input, other, 'GT', out)
def ge(input, other, out=None):
"""Compute *input* >= *other* element-wise.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : dragon.vm.torch.Tensor, number
The other tensor.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _compare(input, other, 'GE', out)
def lt(input, other, out=None):
"""Compute *input* < *other* element-wise.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : dragon.vm.torch.Tensor, number
The other tensor.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _compare(input, other, 'LT', out)
def le(input, other, out=None):
"""Compute *input* <= *other* element-wise.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : dragon.vm.torch.Tensor, number
The other tensor.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _compare(input, other, 'LE', out)
def eq(input, other, out=None):
"""Compute *input* == *other* element-wise.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : dragon.vm.torch.Tensor, number
The other tensor.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _compare(input, other, 'EQ', out)
def cat(seq, dim=0, out=None):
"""Concatenate the inputs along the given axis.
Parameters
----------
seq : sequence of dragon.vm.torch.Tensor
The sequence.
dim : int, optional
The dim to concatenate.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=seq, outputs=[out] if out else [])
key = 'Concat/{}/dim:{}'.format(dev, dim)
module = get_module(Concat, key, dev, axis=dim)
return module.forward(seq, out)
def gather(input, dim, index, out=None):
"""Gather the input values along the given axis.
Note that it is a tensorflow style gather, which takes a vector index,
values of other dimension will be copied automatically.
Parameters
----------
input : dragon.vm.torch.Tensor
The values.
dim : int
The dim to gather.
index : dragon.vm.torch.Tensor
The indices.
out : dragon.vm.torch.Tensor, optional
The optional output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(
inputs=[input, index],
outputs=[out] if out else [])
key = 'Gather/{}/dim:{}'.format(dev, dim)
module = get_module(Gather, key, dev, axis=dim)
return module.forward(input, index, out)
def narrow(input, dimension, start, length):
"""Return a new tensor that is a narrowed version of input tensor.
Parameters
----------
input : torch.Tensor
The input tensor.
dimension : int
The dimension to narrow.
start : int
The starting position.
length : int
The distance to the ending postion.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
sizes = list(input.shape[:]); starts = [0] * len(sizes)
starts[dimension], sizes[dimension] = start, length
return _indexing(input, starts, sizes)
def one_hot(input, depth):
"""Return a ont hot tensor according to given input.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
depth : int
The depth of channels.
Returns
-------
dragon.vm.torch.FloatTensor
The output tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'OneHot/{}/depth:{}'.format(dev, depth)
module = get_module(OneHot, key, dev, depth=depth)
return module.forward(input)
def multinomial(input, num_samples, normalize=False, out=None):
"""Return a tensor where each row contains ``num_samples``,
sampled from the multinomial distribution.
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
num_samples : int
The number of samples.
normalize : boolean, optional, default=False
Whether to normalize the inputs.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
dev = MakeDevice(inputs=[input])
key = 'Multinomial/{}/num_samples:{}/normalize:{}'.format(
dev, num_samples, normalize)
module = get_module(
Multinomial, key, dev,
num_samples=num_samples,
normalize=normalize)
return module.forward(input, out)
##############################################
# #
# Creation #
# #
##############################################
def _get_leaf_tensor(sizes, kwargs):
return _LeafTensor(sizes,
requires_grad=kwargs['requires_grad'] \
if 'requires_grad' in kwargs else False,
dtype=kwargs.get('dtype', 'float32'),
device=kwargs.get('device', _Device()))
def zeros(*sizes, **kwargs):
"""Return a float tensor with values of ``0``.
Parameters
----------
sizes : tuple, list or int
The sizes indicating the shape of the output tensor.
out : dragon.vm.torch.Tensor
The optional output tensor.
Returns
-------
vm.torch.FloatTensor
The output tensor.
"""
out = kwargs['out'] if 'out' in kwargs else None
if out is None: out = _get_leaf_tensor(sizes, kwargs)
return _fill(out, shape=sizes, value=0)
def zeros_like(input, out=None, **kwargs):
"""Return a float tensor with values of ``0``, shape as the input.
Parameters
----------
input : dragon.vm.torch.Tensor
The tensor for indicating shape.
out : dragon.vm.torch.Tensor
The optional output tensor.
Returns
-------
vm.torch.FloatTensor
The output tensor.
"""
if not hasattr(input, 'shape'):
raise ValueError('Input does not have the shape attribute.')
if out is None: out = _get_leaf_tensor(input.shape, kwargs)
return _fill(out, shape=input.shape, value=0)
def ones(*sizes, **kwargs):
"""Return a float tensor with values of ``1``.
Parameters
----------
sizes : tuple, list or int
The sizes indicating the shape of the output tensor.
out : dragon.vm.torch.Tensor
The optional output tensor.
Returns
-------
vm.torch.FloatTensor
The output tensor.
"""
out = kwargs['out'] if 'out' in kwargs else None
if out is None: out = _get_leaf_tensor(sizes, kwargs)
return _fill(out, shape=sizes, value=1)
def ones_like(input, out=None, **kwargs):
"""Return a float tensor with values of ``1``, shape as the input.
Parameters
----------
input : dragon.vm.torch.Tensor
The tensor for indicating shape.
out : dragon.vm.torch.Tensor
The optional output tensor.
Returns
-------
vm.torch.FloatTensor
The output tensor.
"""
if not hasattr(input, 'shape'):
raise ValueError('Input does not have the shape attribute.')
if out is None: out = _get_leaf_tensor(input.shape, kwargs)
return _fill(out, shape=input.shape, value=1)
def rand(*sizes, **kwargs):
"""Return a float tensor with a uniform distribution of U(0, 1).
Parameters
----------
sizes : tuple, list or int
The sizes indicating the shape of the output tensor.
out : dragon.vm.torch.Tensor
The optional output tensor.
Returns
-------
vm.torch.FloatTensor
The output tensor.
"""
out = kwargs['out'] if 'out' in kwargs else None
if out is None: out = _get_leaf_tensor(sizes, kwargs)
return _uniform(out, sizes, low=0, high=1)
def randn(*sizes, **kwargs):
"""Return a float tensor with a normal distribution of N(0, 1).
Parameters
----------
sizes : tuple, list or int
The sizes indicating the shape of the output tensor.
out : dragon.vm.torch.Tensor
The optional output tensor.
Returns
-------
vm.torch.FloatTensor
The output tensor.
"""
out = kwargs['out'] if 'out' in kwargs else None
if out is None: out = _get_leaf_tensor(sizes, kwargs)
return _normal(out, sizes, mean=0, std=1)
##############################################
# #
# Update #
# #
##############################################
def _accumulate(grads):
if len(grads) == 0: return
if not isinstance(grads, (list, tuple)): grads = [grads]
dev = MakeDevice(inputs=grads)
key = 'Accumulate/{}/alpha:1./beta:1.'.format(dev)
module = get_module(_Accumulate, key, dev)
return module.forward(grads)
def _allreduce(grads):
if not isinstance(grads, (list, tuple)): grads = [grads]
dev = MakeDevice(inputs=grads)
mode = mpi.GetParallelMode() + '_ALLREDUCE'
key = 'Collective/{}/{}'.format(dev, mode.lower())
module = get_module(Collective, key, dev, mode=mode)
return module.forward(grads)
def _update(param, grad, op_type, slot,
lr_mult=1.0, decay_mult=1.0):
dev = MakeDevice(inputs=[param])
key = '{}/{}/{}/{}'.format(op_type, dev, slot, param.name)
module = get_module(Update, key, dev, op_type=op_type,
lr_mult=lr_mult, decay_mult=decay_mult, slot=slot)
return module.forward(param, grad)
##############################################
# #
# Vision #
# #
##############################################
def _resize_2d(input, op_type, dsize, fx, fy):
if dsize is None:
if fx < 0 or fy < 0:
raise ValueError('Set fx and fy if dsize is None.')
else:
if len(dsize) != 2:
raise ValueError('The dsize should be a list with 2 elements.')
if dsize is None and (fy == -1.0 or fx == -1.0):
raise RuntimeError('The dsize, fx/fy should be specified either.')
dev = MakeDevice(inputs=[input])
key = '{}/{}/dsize:{}/fx:{}/fy:{}'.format(
op_type, dev, '2' if dsize else 'none', fx, fy)
module = get_module(Resize2d, key, dev,
op_type=op_type, dsize=dsize, fx=fx, fy=fy)
return module.forward(input, dsize)
def nn_resize(input, dsize, fx=-1.0, fy=-1.0):
return _resize_2d(input, 'NNResize', dsize, fx, fy)
def bilinear_resize(input, dsize, fx=-1.0, fy=-1.0):
return _resize_2d(input, 'BilinearResize', dsize, fx, fy)
def roi_pool(feature, rois, pooled_h, pooled_w, spatial_scale):
dev = MakeDevice(inputs=[feature])
key = 'RoIPool/{}/pool_h:{}/pool_w:{}/spatial_scale:{}'.format(
dev, pooled_h, pooled_w, spatial_scale)
module = get_module(
RoIPool, key, dev,
pooled_h=pooled_h, pooled_w=pooled_w,
spatial_scale=spatial_scale)
return module.forward(feature, rois)
def roi_align(feature, rois, pooled_h, pooled_w,
spatial_scale, sampling_ratio=2):
dev = MakeDevice(inputs=[feature])
key = 'RoIAlign/{}/pool_h:{}/pool_w:{}/' \
'spatial_scale:{}/sampling_ratio:{}'.format(
dev, pooled_h, pooled_w, spatial_scale, sampling_ratio)
module = get_module(
RoIAlign, key, dev,
pooled_h=pooled_h, pooled_w=pooled_w,
spatial_scale=spatial_scale,
sampling_ratio=sampling_ratio)
return module.forward(feature, rois) |
from copy import deepcopy
################################
### Heap Traversal Functions ###
################################
def left_child_idx(i):
""" Performs the arithmetic to get the left child of a given heap index """
return (i << 1) + 1
def right_child_idx(i):
""" Performs the arithmetic to get the right child of a given heap index """
return (i << 1) + 2
def parent_idx(i):
""" Performs the arithmetic to get the parent of a given heap index """
return (i - 1) >> 1
def left_child(i, aheap):
""" Performs the arithmetic to get the left child of a given heap index """
index = (i << 1) + 1
if index >= len(aheap):
return None
return aheap[index]
def right_child(i, aheap):
""" Performs the arithmetic to get the right child of a given heap index """
index = (i << 1) + 2
if index >= len(aheap):
return None
return aheap[index]
def parent(i, aheap):
""" Performs the arithmetic to get the parent of a given heap index """
if i == 0:
return None
return aheap[(i - 1) >> 1]
def push_max(val, aheap):
""" Pushes a new value into a heap using the heap ordering rules """
# Append to end of list backing the heap
aheap.append(val)
# Continuously check parent to see if parent is greater,
# starting from the bottom.
parent_idx = len(aheap) - 1
while True:
# Get next current & parent indices
current_idx = parent_idx
parent_idx = (current_idx - 1) >> 1
# If the parent is smaller than the pushed value, the current index...
# ... takes the parent value. Otherwise, the current index is the...
# ... correct position for the pushed value and the process is done.
p_val = aheap[parent_idx]
if p_val < val:
aheap[current_idx] = p_val
else:
aheap[current_idx] = val
return aheap
# If the process has reached the top of the heap, then it's done
if parent_idx == 0:
aheap[parent_idx] = val
return aheap
def max_heapify(l, i, size):
""" Takes list 'l' that is not necessarily max heap ordered and
restores its max heap order. For this to work for every node at i,
its direct children must violate the heap order, or the process will
not begin or continue to the children further down the heap.
"""
# TODO Broken fix plz
# # Get left/right children indices & heap_len
# left = (i << 1) + 1
# right = (i << 1) + 2
# largest = i
# # If within heap bounds &
# # ...left child is larger than current, largest changes to that index
# if left < size and l[left] > l[largest]:
# largest = left
# # If still within heap bounds &
# # ...right child is larger than new/current largest, change largest to that
# if right < size and l[right] > l[largest]:
# largest = right
# # Untill no child is larger than current root @ i, swap then recurse.
# if largest != i:
# l[i], l[largest] = l[largest], l[i]
# max_heapify(l, largest, size)
def build_max_heap(l):
""" Take any list and in-place re-order it with max heap structure """
# First the size is needed by max_heapify to check for going out of bounds
size = len(l)
# Start from the lowest depth of nodes that has children,
# ...computed from floor(size/2) - 1 or binary shift right subtract 1,
# then max heapify the list from that index then decrement that index.
for i in range((size >> 1) - 1, -1, -1):
max_heapify(l, i >> 1, size)
def pop_max(aheap):
""" Pops out & returns the root node & calls max_heapify
to restore the heap structure """
size = len(aheap)
# Edge case for a 1 length heap, just remove the single item
if size <= 1:
result = []
if size == 1:
result = aheap.pop()
return result
# Save the root of the heap to be returned
root = aheap[0]
# Then replace the root with the last element & remove last element
last = aheap.pop()
aheap[0] = last
# Max heapify the new root to restore structure.
max_heapify(aheap, 0, size)
# Return the old root, which will be the max of the old heap
return root
# TODO Min heaps
# def min_heapify(l):
# """ Takes list 'l' and converts to a properly min ordered heap list """
# return []
# class MaxHeap():
# def __init__(self, l):
# """ Initializes the heap, if a list is given, initializes using heapify
# """
# self.size = 0
# self.data = []
# if type(l) != 'list':
# raise ValueError("Can only initialize heap with no arguments or a list")
# else:
# self.data = deepcopy(l)
# build_max_heap(self.data)
# def __repr__(self):
# return '<MaxHeap: {}'.format(self.data)
# # def build(self, l):
# # build_max_heap(l)
# # def push(self, x):
# # self.data = push_max(x, self.data)
# # def pop()
|
def show(name):
print(f'Przed modyfikacją: {name}')
name[0] = 'Beata'
name[1] = 'Barbara'
name[2] = 'Bogdan'
print(f'Po modyfikacji: {name}')
print(f'Id po modyfikacji: {id(name)}')
data = ['Anna', 'Agnieszka', 'Andrzej']
print(f'Przed wywoładniem funkcji show: {id(data)}')
print()
show(data)
print(f'Po wywołaniu funkcji show: {data}')
############### słownik ####################
print('slownik:')
data1 = {0: 'Artur', 1: 'Andrzej'}
print(f'Id przed modyfikacją: {id(data1)}')
show(data1)
#przekierowywanie argumentu przez wartosc
def show1(city):
print(f'Przed modyfikacją: {city}')
city[0] = 'Berlin'
city[1] = 'Londyn'
print(f'Po modyfikacji: {city}')
print(f'Id po modyfikacji: {id(city)}')
miasto = ['Gniezno', 'Poznan']
print(f'Przed wywołaniem funkcji show1: {miasto}')
print(f'Id miasto show1 {id(miasto)}')
show1(list(miasto))
print(f'Po wywołaniu funkcji show1: {miasto}')
########################################
miasto1 = {
0: 'Gniezno',
1: 'Poznań'
}
print(f'\n\n Przed wywołaniem funkcji show1: {miasto1}')
print(f'\n\n Przed wywołaniem funkcji show1: {miasto1}')
print(f'Po wywołaniu funkcji show1: {miasto1}')
|
from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.core.urlresolvers import reverse_lazy
from . import views
app_name='weddings'
urlpatterns = [
# event model
url(r'^$', views.Invite1View.as_view(), name='invite1'),
# url(r'^invite/(?P<pk>[0-9]+)/$', views.Invite2View.as_view(), name='invite2'),
url(r'^invite/(?P<slug>\w+)/$', views.Invite2View.as_view(), name="invite2"),
url(r'^invite/(?P<slug>\w+)/detail/$', views.InviteDetailView.as_view(), name="invite-detail"),
url(r'^events/$', views.EventListView.as_view(), name='event-list'),
url(r'^events/new/$', views.EventNewView.as_view(), name='event-new'),
url(r'^events/(?P<pk>[0-9]+)/delete/$', views.EventDeleteView.as_view(), name='event-delete'),
url(r'^events/(?P<pk>[0-9]+)/edit/$', views.EventEditView.as_view(), name='event-edit'),
url(r'^events/(?P<pk>[0-9]+)/$', views.EventDetailView.as_view(), name='event-detail'),
# guest model
url(r'^events/(?P<pk>[0-9]+)/guests/$', views.GuestListView.as_view(), name='guest-list'),
url(r'^guest/(?P<pk>[0-9]+)/$', views.GuestDetailView.as_view(), name='guest-detail'),
url(r'^events/(?P<pk>[0-9]+)/guest/new/$', views.GuestNewView.as_view(),name='guest-new'),
url(r'^guest/(?P<pk>[0-9]+)/delete/$', views.GuestDeleteView.as_view(), name='guest-delete'),
url(r'^guest/(?P<pk>[0-9]+)/edit/$', views.GuestEditView.as_view(), name='guest-edit'),
url(r'^login/$', auth_views.login, {'template_name': 'weddings/login.html'},name='login'),
url(r'^logout/$', auth_views.logout, {'next_page': reverse_lazy('weddings:event-list')}, name='logout'),
] |
# REST Endpoint URL: https://ftx.com/api
# Local-Time vs FTX-Servers: https://otc.ftx.com/api/time
# from datetime import datetime
# from ciso8601 import
import os
import time
import hmac
from queue import Queue
from typing import Optional, Dict, Any, List
from requests import Request, Session, Response
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
__all__ = ['FtxClientREST', 'time', 'Dict', 'Queue']
class FtxClientREST:
""" Local-Time vs FTX-Servers: https://otc.ftx.com/api/time """
_ENDPOINT = 'https://ftx.com/api/'
def __init__(self) -> None:
self._session = Session()
self._api_key = os.getenv('FTX_DATA_KEY')
self._api_secret = os.getenv('FTX_DATA_SEC')
self.response_data = Queue()
def _process_response(self, response: Response) -> Any:
try:
data = response.json()
except ValueError:
response.raise_for_status()
raise
else:
if not data['success']:
raise Exception(data['error'])
self.response_data.put(data['result'])
# return data['result']
def _authorize_request(self, request: Request) -> None:
timestamp = int(time.time() * 1000)
foobar = request.prepare()
sig_load = f"{timestamp}{foobar.method}{foobar.path_url}".encode()
signature = hmac.new(self._api_secret.encode(), sig_load, 'sha256').hexdigest()
request.headers = {
'FTX-KEY': self._api_key, 'FTX-SIGN': signature, 'FTX-TS': str(timestamp)
}
def _request(self, method: str, path: str, **kwargs) -> Any:
assert self._api_key and self._api_secret is not None, 'need keys foo'
request = Request(method, self._ENDPOINT + path, **kwargs)
self._authorize_request(request)
response = self._session.send(request.prepare())
response.close()
return self._process_response(response)
def _get(self, path: str, params: Optional[Dict[str, Any]] = None) -> Any:
return self._request('GET', path, params=params)
def _post(self, path: str, params: Optional[Dict[str, Any]] = None) -> Any:
""" TODO complete _post method """
pass
def _delete(self, path: str, params: Optional[Dict[str, Any]] = None) -> Any:
""" TODO complete delete method """
pass
# ################ ACCT/ORDERS ################ #
##################################################
def get_account_info(self) -> dict:
return self._get(f'account')
def get_positions(self):
return self._get(f'positions')
def get_open_orders(self, market: str) -> List[dict]:
return self._get(f'orders', {'market': market})
def get_open_triggers(self, market: str) -> List[dict]:
return self._get('conditional_orders', {'market': market})
def get_trigger_triggers(self, cond_ord_id: str) -> dict:
return self._get(f'conditional_orders/{cond_ord_id}/triggers')
def get_trigger_history(self, market: str) -> dict:
return self._get('conditional_orders/history', {'market': market})
# ################## MARKETS ################## #
##################################################
def list_markets(self) -> List[dict]:
"""
Examples for each type are:
'BTC/USD', 'BTC-PERP', 'BTC-0626', and 'BTC-MOVE-1005'.
For futures that expired in 2019, prepend a 2019 to the date, like so:
BTC-20190628 or BTC-MOVE-20190923.
:return: List[dict] containing all the markets available on FTX
"""
return self._get('markets')
def get_single_market(self, market: str) -> List[dict]:
"""
Get market data snapshot report on a single asset
:param market: Market name; str('BTC-0628')
:return: Dictionary of the assets metrics
"""
return self._get(f'markets/{market}')
def get_orderbook(self, market: str, depth: int = None) -> dict:
"""
:param market: Name of the market; str('BTC-0628')
:param depth: depth of search; int( default(20) - max(100) )
:return: dict[list] w/ keys being; 'bid' and 'ask'
"""
return self._get(f'markets/{market}/orderbook', {'depth': depth})
def get_trades(self, market: str, limit: int = None, start: int = None, end: int = None) -> List:
"""
Market is the only required param, others are optional
:param market: Name of the market; str('BTC-0628')
:param limit: Depth of search; int( default(20) - max(100) )
:param start: Start time; int(1559881511)
:param end: End time; int(1559881511)
:return: List[dict] containing tradeID, price, time, etc
"""
return self._get(f'markets/{market}/trades',
{'limit': limit,
'start_time': start, 'end_time': end})
def get_historical(self,
market: str,
resolution: int,
limit: int = None,
start: int = None,
end: int = None) -> List[dict]:
"""
TODO start/end_time formatting???
:param market: Name of the market; str('BTC-0628')
:param resolution: Interval(sec); int(15|60|300|900|3600|14400|86400)
:param limit: depth of search int(max(5000))
:param start: int(1559881511)
:param end: int(1559881511)
:return: Dictionary of historical price data
"""
return self._get(f'markets/{market}/candles',
{'resolution': resolution, 'limit': limit,
'start_time': start, 'end_time': end})
# ################ SPOT Margin ################ #
##################################################
def spot_margin_mkt_info(self, market):
return self._get('/spot_margin/market_info',
{'market': market})
# ################## FUTURES ################## #
##################################################
def list_futures(self) -> List[dict]:
return self._get('futures')
def get_future(self, future: str) -> dict:
return self._get(f'futures/{future}')
def get_future_stats(self, future: str) -> dict:
return self._get(f'futures/{future}/stats')
def get_funding_rates(self, start: int, end: int, future: str) -> List[dict]:
return self._get('funding_rates',
{'start_time': start, 'end_time': end, 'future': future})
def get_hist_index(self,
market: str,
resolution: int,
limit: int,
start: int,
end: int) -> List[dict]:
return self._get(f'indexes/{market}/candles',
{'resolution': resolution, 'limit': limit,
'start_time': start, 'end_time': end})
# ################## OPTIONS ################## #
##################################################
def get_pub_options_trades(self) -> List[dict]:
return self._get('options/trades')
def get_options_fills(self,
start: int = None,
end: int = None,
limit: int = None) -> List[dict]:
return self._get('options/fills',
{'start_time': start, 'end_time': end, 'limit': limit})
def get_options_vol_24hr(self) -> List[dict]:
return self._get('stats/24h_options_volume')
def get_options_vol_hist_btc(self):
return self._get('options/historical_volumes/BTC')
def get_options_open_interest(self):
return self._get('/options/open_interest/BTC')
def get_options_hist_open_interest(self):
return self._get('/options/historical_open_interest/BTC')
if __name__ == '__main__':
# from pprint import pprint
# foo = FtxClientREST()
#
# print('foo.')
# print(foo.get_options_open_interest())
print(dir(FtxClientREST))
else:
print(f'>>> Running FTX_REST.py as {__name__}')
|
import pygame
from pygame.locals import *
class Puntos(pygame.sprite.Sprite):
def __init__(self, valor, x, y, pantalla):
super(Puntos, self).__init__()
self.valor = valor
self.presionada = False
self.x = x
self.y = y
self.pantalla = pantalla
self.circulo = pygame.draw.circle(self.pantalla, (0, 0, 0),
(self.x, self.y), 15)
def presionar(self):
self.presionada = not self.presionada
def pintarPunto(self):
if(self.presionada is True):
self.circulo = pygame.draw.circle(self.pantalla, (255, 0, 0),
(self.x, self.y), 15)
else:
self.circulo = pygame.draw.circle(self.pantalla, (0, 0, 0),
(self.x, self.y), 15) |
import os
import numpy as np
import json
import detectron2
from detectron2.utils.logger import setup_logger
setup_logger()
from detectron2.structures import BoxMode
import itertools
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.engine import DefaultTrainer, DefaultPredictor
from detectron2.config import get_cfg
from detectron2.utils.visualizer import Visualizer
import cv2
import glob
from PIL import Image, ImageDraw
import sys
sys.path.append("/mnt/data/2D-3D-Semantics")
import assets.utils as utils
import matplotlib.pyplot as plt
import random
import argparse
from subprocess import Popen, PIPE
class_dict = {'table': 0, 'chair': 1, 'sofa': 2, 'bookcase': 3, 'board': 4}
class_list = ['table', 'chair', 'sofa', 'bookcase', 'board']
def get_stanford_dicts(img_dir, json_path):
with open(json_path) as f:
imgs_anns = json.load(f)
dataset_dicts = []
for _, v in imgs_anns.items():
record = {}
filename = os.path.join(img_dir, v["file_name"])
record["file_name"] = filename
record["height"] = v["height"]
record["width"] = v["width"]
annos = v["objects"]
objs = []
for _, anno in annos.items():
obj = {
"bbox": anno['bbox'],
"bbox_mode": BoxMode.XYXY_ABS,
"category_id": anno['category_id'],
"iscrowd": 0
}
objs.append(obj)
record["annotations"] = objs
dataset_dicts.append(record)
return dataset_dicts
parser = argparse.ArgumentParser()
parser.add_argument('--area_dir', type=str, help='Path of the 2D-3D-S Area directory.')
parser.add_argument('--area_json', type=str, help='Path of the area json file. These are included in data folder.')
parser.add_argument('--model_path', type=str, help='Path of the trained detectron2 model.')
parser.add_argument('--model_config_path', type=str, help='Path of the trained model\'s detectron config yaml file.')
parser.add_argument('--detection_dir', type=str, help='Output directory path to create detection and ground truth files.')
args = parser.parse_args()
cfg = get_cfg()
#cfg.merge_from_file("/home/ubuntu/detectron2_repo/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml")
cfg.merge_from_file(args.model_config_path)
cfg.DATALOADER.NUM_WORKERS = 2
#cfg.MODEL.WEIGHTS = "detectron2://COCO-Detection/faster_rcnn_R_50_FPN_3x/137849458/model_final_280758.pkl" # initialize from model zoo
#cfg.TEST.EXPECTED_RESULTS = [['bbox', 'AP', 38.5, 0.2]]
#cfg.TEST.EVAL_PERIOD = 5
cfg.SOLVER.IMS_PER_BATCH = 1
#cfg.SOLVER.BASE_LR = 0.001
#cfg.SOLVER.MAX_ITER = 1000000000 # 300 iterations seems good enough, but you can certainly train longer
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 5 # only has one class (ballon)
cfg.MODEL.WEIGHTS = args.model_path
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.7 # set the testing threshold for this model
predictor = DefaultPredictor(cfg)
img_dir = os.path.join(args.area_dir, 'data', 'rgb')
pose_dir = os.path.join(args.area_dir, 'data', 'pose')
os.system("mkdir {}".format(os.path.join(args.detection_dir, 'detections')))
os.system("mkdir {}".format(os.path.join(args.detection_dir, 'groundtruths')))
dataset_dicts = get_stanford_dicts(img_dir, args.area_json)
for d in dataset_dicts:
img_base_path = ('_').join(d['file_name'].split('_')[:-1])
pose_file_path = os.path.join(pose_dir, img_base_path + '_pose.json')
img_file_path = d['file_name']
det_file_name = "{}.txt".format(os.path.basename(d['file_name']).split('.')[0])
det_file_path = os.path.join(args.detection_dir, 'detections', det_file_name)
gt_file_path = os.path.join(args.detection_dir, 'groundtruths', det_file_name)
os.system( 'touch {}'.format(det_file_path) )
for ann in d['annotations']:
line = "{} {} {} {} {}".format( class_list[ann['category_id']], ann['bbox'][0], ann['bbox'][1], ann['bbox'][2], ann['bbox'][3] )
#print("echo '{}' >> {}".format(line, gt_file_path))
os.system("echo {} >> {}".format(line, gt_file_path))
im = cv2.imread(d["file_name"])
outputs = predictor(im)
detections = outputs["instances"].to("cpu").get_fields()
detection_boxes = detections['pred_boxes'].tensor
detection_classes = detections['pred_classes']
detection_scores = detections['scores']
detection_num = detection_boxes.shape[0]
for i in range(detection_num):
line = "{} {} {} {} {} {}".format( class_list[detection_classes[i].item()], detection_scores[i], detection_boxes[i][0], detection_boxes[i][1], detection_boxes[i][2], detection_boxes[i][3] )
#print(line)
os.system("echo {} >> {}".format(line, det_file_path))
|
import re
import requests
from bs4 import BeautifulSoup
INSPECTION_DOMAIN = 'http://info.kingcounty.gov'
INSPECTION_PATH = '/health/ehs/foodsafety/inspections/Results.aspx'
INSPECTION_PARAMS = {
'Output': 'W',
'Business_Name': '',
'Business_Address': '',
'Longitude': '',
'Latitude': '',
'City': '',
'Zip_Code': '',
'Inspection_Type': 'All',
'Inspection_Start': '',
'Inspection_End': '',
'Inspection_Closed_Business': 'A',
'Violation_Points': '',
'Violation_Red_Points': '',
'Violation_Descr': '',
'Fuzzy_Search': 'N',
'Sort': 'H'
}
def get_inspection_page(**kwargs):
"""
Given a set of inspection parameters, return an inspection page.
This function shoud:
* accept keyword arguments for each of the possible query values
* build a dictionary of request query parameters from incoming keywords, using INSPECTION_PARAMS as a template
* make a request to the inspection service search page using this query
* return the unicode-encoded content of the page
"""
arguments = INSPECTION_PARAMS.copy() # Use the kwargs dictionary to replace the defaults arguments in arguments
for key, val in kwargs.items():
if key in INSPECTION_PARAMS:
arguments[key] = val
# Combine the domain and path to make the URL the URL
url = INSPECTION_DOMAIN + INSPECTION_PATH
resp = requests.get(url, params=arguments)
return resp.text
def parse_source(html):
"""
Returns a BeautifulSoup object, given the html
"""
return BeautifulSoup(html, "html5lib")
def restaurant_data_generator(html):
"""
Given a BeautifulSoup instance return a find_all generator
with only the restaurant data divs.
"""
id_finder = re.compile(r'PR[\d]+~')
return html.find_all('div', id=id_finder)
def has_two_tds(elem):
"""
Predicate which reports if a BeautifulSoup element is a table
row which has exactly two tds.
"""
pass
def clean_data(td):
"""
Given a td, return its text, after stripping away newlines, spaces,
colons, and dashes.
"""
pass
def extract_restaurant_metadata(elem):
pass
def is_inspection_data_row(elem):
pass
def get_score_data(elem):
pass
def result_generator(count):
pass
if __name__ == '__main__':
results = get_inspection_page(Inspection_Start="12/7/2001",
Inspection_End="12/7/2002"
)
parsed = parse_source(results)
info_divs = restaurant_data_generator(parsed)
print(info_divs)
|
import inspect
import taichi.lang
from taichi._lib import core as _ti_core
from taichi.lang import impl, ops
from taichi.lang._texture import RWTextureAccessor, TextureSampler
from taichi.lang.any_array import AnyArray
from taichi.lang.expr import Expr
from taichi.lang.matrix import MatrixType
from taichi.lang.struct import StructType
from taichi.lang.util import cook_dtype
from taichi.types.primitive_types import RefType, u64
from taichi.types.compound_types import CompoundType
class KernelArgument:
def __init__(self, _annotation, _name, _default=inspect.Parameter.empty):
self.annotation = _annotation
self.name = _name
self.default = _default
class SparseMatrixEntry:
def __init__(self, ptr, i, j, dtype):
self.ptr = ptr
self.i = i
self.j = j
self.dtype = dtype
def _augassign(self, value, op):
call_func = f"insert_triplet_{self.dtype}"
if op == "Add":
taichi.lang.impl.call_internal(call_func, self.ptr, self.i, self.j, ops.cast(value, self.dtype))
elif op == "Sub":
taichi.lang.impl.call_internal(call_func, self.ptr, self.i, self.j, -ops.cast(value, self.dtype))
else:
assert False, "Only operations '+=' and '-=' are supported on sparse matrices."
class SparseMatrixProxy:
def __init__(self, ptr, dtype):
self.ptr = ptr
self.dtype = dtype
def subscript(self, i, j):
return SparseMatrixEntry(self.ptr, i, j, self.dtype)
def decl_scalar_arg(dtype, name, arg_depth):
is_ref = False
if isinstance(dtype, RefType):
is_ref = True
dtype = dtype.tp
dtype = cook_dtype(dtype)
if is_ref:
arg_id = impl.get_runtime().compiling_callable.insert_pointer_param(dtype, name)
else:
arg_id = impl.get_runtime().compiling_callable.insert_scalar_param(dtype, name)
argload_di = _ti_core.DebugInfo(impl.get_runtime().get_current_src_info())
return Expr(
_ti_core.make_arg_load_expr(arg_id, dtype, is_ref, create_load=True, arg_depth=arg_depth, dbg_info=argload_di)
)
def get_type_for_kernel_args(dtype, name):
if isinstance(dtype, MatrixType):
# Compiling the matrix type to a struct type because the support for the matrix type is not ready yet on SPIR-V based backends.
if dtype.ndim == 1:
elements = [(dtype.dtype, f"{name}_{i}") for i in range(dtype.n)]
else:
elements = [(dtype.dtype, f"{name}_{i}_{j}") for i in range(dtype.n) for j in range(dtype.m)]
return _ti_core.get_type_factory_instance().get_struct_type(elements)
if isinstance(dtype, StructType):
elements = []
for k, element_type in dtype.members.items():
if isinstance(element_type, CompoundType):
new_dtype = get_type_for_kernel_args(element_type, k)
elements.append([new_dtype, k])
else:
elements.append([element_type, k])
return _ti_core.get_type_factory_instance().get_struct_type(elements)
# Assuming dtype is a primitive type
return dtype
def decl_matrix_arg(matrixtype, name, arg_depth):
arg_type = get_type_for_kernel_args(matrixtype, name)
arg_id = impl.get_runtime().compiling_callable.insert_scalar_param(arg_type, name)
argload_di = _ti_core.DebugInfo(impl.get_runtime().get_current_src_info())
arg_load = Expr(
_ti_core.make_arg_load_expr(arg_id, arg_type, create_load=False, arg_depth=arg_depth, dbg_info=argload_di)
)
return matrixtype.from_taichi_object(arg_load)
def decl_struct_arg(structtype, name, arg_depth):
arg_type = get_type_for_kernel_args(structtype, name)
arg_id = impl.get_runtime().compiling_callable.insert_scalar_param(arg_type, name)
argload_di = _ti_core.DebugInfo(impl.get_runtime().get_current_src_info())
arg_load = Expr(
_ti_core.make_arg_load_expr(arg_id, arg_type, create_load=False, arg_depth=arg_depth, dbg_info=argload_di)
)
return structtype.from_taichi_object(arg_load)
def push_argpack_arg(name):
impl.get_runtime().compiling_callable.insert_argpack_param_and_push(name)
def decl_argpack_arg(argpacktype, member_dict):
impl.get_runtime().compiling_callable.pop_argpack_stack()
return argpacktype.from_taichi_object(member_dict)
def decl_sparse_matrix(dtype, name):
value_type = cook_dtype(dtype)
ptr_type = cook_dtype(u64)
# Treat the sparse matrix argument as a scalar since we only need to pass in the base pointer
arg_id = impl.get_runtime().compiling_callable.insert_scalar_param(ptr_type, name)
argload_di = _ti_core.DebugInfo(impl.get_runtime().get_current_src_info())
return SparseMatrixProxy(
_ti_core.make_arg_load_expr(arg_id, ptr_type, is_ptr=False, dbg_info=argload_di), value_type
)
def decl_ndarray_arg(element_type, ndim, name, needs_grad, boundary):
arg_id = impl.get_runtime().compiling_callable.insert_ndarray_param(element_type, ndim, name, needs_grad)
return AnyArray(_ti_core.make_external_tensor_expr(element_type, ndim, arg_id, needs_grad, 0, boundary))
def decl_texture_arg(num_dimensions, name):
# FIXME: texture_arg doesn't have element_shape so better separate them
arg_id = impl.get_runtime().compiling_callable.insert_texture_param(num_dimensions, name)
dbg_info = _ti_core.DebugInfo(impl.get_runtime().get_current_src_info())
return TextureSampler(_ti_core.make_texture_ptr_expr(arg_id, num_dimensions, 0, dbg_info), num_dimensions)
def decl_rw_texture_arg(num_dimensions, buffer_format, lod, name):
# FIXME: texture_arg doesn't have element_shape so better separate them
arg_id = impl.get_runtime().compiling_callable.insert_rw_texture_param(num_dimensions, buffer_format, name)
dbg_info = _ti_core.DebugInfo(impl.get_runtime().get_current_src_info())
return RWTextureAccessor(
_ti_core.make_rw_texture_ptr_expr(arg_id, num_dimensions, 0, buffer_format, lod, dbg_info), num_dimensions
)
def decl_ret(dtype):
if isinstance(dtype, StructType):
dtype = dtype.dtype
if isinstance(dtype, MatrixType):
dtype = _ti_core.get_type_factory_instance().get_tensor_type([dtype.n, dtype.m], dtype.dtype)
else:
dtype = cook_dtype(dtype)
impl.get_runtime().compiling_callable.insert_ret(dtype)
|
class file:
"""
This is a class that represents a file object
Attributes:
id (String): The real part of complex number.
name (String): The imaginary part of complex number.
date (String): The date of the file was created
size(String): The size of the file
url(String): The url in which the file was downloaded from
"""
Id = ""
name = ""
date = ""
size = ""
url = ""
#https://homes.cs.washington.edu/~djg/teachingMaterials/spac/sophomoricParallelismAndConcurrency.pdf
def __init__(self,ID,Name,Url):
"""
Constructor that initializes the Id, name and url
"""
self.Id = ID
self.name = Name
self.url = Url
def getName(self):
"""
get the name of the file
"""
return name
def getSize(self):
"""
get the size of the file
"""
return size
def getDate(self):
"""
get the date of the file
"""
return date
def getUrl(self):
"""
get the url from which the file was downloaded from
"""
return url
def setName(self,Name):
"""
set the name of the file
"""
self.name = Name
def setSize(self,Size):
"""
set the size of the file
"""
self.size = Size
def setDate(self,Date):
"""
set the date of the file
"""
self.date = Date
def setUrl(self,Url):
"""
set the url which the file can be downloaded from
"""
self.url = Url |
# Write a Python program to generate all permutations of a list in Python.
class generatePermutation:
def permutationList(self,List):
for i in range(len(List)):
for j in range(len(List)):
for k in range(len(List)):
if (List[i] != List[j] != List[k]):
if (List[i] != List[k]):
print("[",List[i], List[j], List[k],"]")
List=[1,2,3]
permu=generatePermutation()
permu.permutationList(List) |
'''
def double_it(number):
return(2 * number)
print(double_it(2))
print(double_it(2.2))
print(double_it("hello"))
'''
def calc_hypo(a, b):
if ((type(a) and type(b)) in (int, float)) and a >= 0 and b >=0:
print("a and be are either of type float or int, and positive numbers.")
c = (a*a + b*b)**0.5
return(c)
else:
print("A string cannot be calculates, neither 0 or a negative number can.")
return(False)
print(calc_hypo(-3, 4))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.