index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
9,000 | 5044b8bc8cabd7762df6a0327828df4546ab8d96 | import cv2
import imutils
import detect
def detectByPathVideo(path, writer):
video = cv2.VideoCapture(path)
check, frame = video.read()
if check == False:
print('Video Not Found. Please Enter a Valid Path (Full path of Video Should be Provided).')
return
print('Detecting people...')
while video.isOpened():
#check is True if reading was successful
check, frame = video.read()
if check:
frame = imutils.resize(frame , width=min(800,frame.shape[1]))
frame = detect.detect(frame)
if writer is not None:
writer.write(frame)
key = cv2.waitKey(1)
if key== ord('q'):
break
else:
break
video.release()
cv2.destroyAllWindows()
def detectByCamera(writer):
video = cv2.VideoCapture(0)
print('Detecting people...')
while True:
check, frame = video.read()
frame = detect.detect(frame)
if writer is not None:
writer.write(frame)
key = cv2.waitKey(1)
if key == ord('q'):
break
video.release()
cv2.destroyAllWindows() |
9,001 | f5e57c95e2c86aeb83872b29324b0b73a41caa47 | #!/usr/bin/python
from PyQt4 import QtCore, QtGui
import sys
import json
import re
from Interface_Recommended_Results import obtain_list
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1122, 672)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.rest_table = QtGui.QTableWidget(self.centralwidget)
self.rest_table.setGeometry(QtCore.QRect(20, 250, 710, 371))
self.rest_table.setObjectName(_fromUtf8("rest_table"))
self.rest_table.setColumnCount(18)
self.rest_table.setRowCount(10)
self.rest_table.setHorizontalHeaderLabels(["City","Review Count","Name","Type","ID","Address","State", "Stars","Categories"])
self.user_select_button = QtGui.QPushButton(self.centralwidget)
self.user_select_button.setGeometry(QtCore.QRect(450, 90, 121, 28))
self.user_select_button.setObjectName(_fromUtf8("user_select_button"))
self.label = QtGui.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(140, 10, 281, 41))
font = QtGui.QFont()
font.setFamily(_fromUtf8("MS Sans Serif"))
font.setPointSize(18)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.user_box = QtGui.QTextEdit(self.centralwidget)
self.user_box.setGeometry(QtCore.QRect(120, 90, 291, 31))
self.user_box.setObjectName(_fromUtf8("user_box"))
self.label_2 = QtGui.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(30, 100, 72, 15))
font = QtGui.QFont()
font.setPointSize(11)
self.label_2.setFont(font)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.rest_name = QtGui.QTextEdit(self.centralwidget)
self.rest_name.setGeometry(QtCore.QRect(190, 140, 291, 31))
self.rest_name.setObjectName(_fromUtf8("rest_name"))
self.label_3 = QtGui.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(30, 140, 141, 16))
font = QtGui.QFont()
font.setPointSize(11)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_4 = QtGui.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(30, 190, 61, 16))
font = QtGui.QFont()
font.setPointSize(11)
self.label_4.setFont(font)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.stars_box = QtGui.QComboBox(self.centralwidget)
self.stars_box.setGeometry(QtCore.QRect(120, 180, 141, 31))
self.stars_box.setObjectName(_fromUtf8("stars_box"))
self.label_5 = QtGui.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(360, 190, 91, 16))
font = QtGui.QFont()
font.setPointSize(11)
self.label_5.setFont(font)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.category_box = QtGui.QComboBox(self.centralwidget)
self.category_box.setGeometry(QtCore.QRect(490, 180, 191, 31))
self.category_box.setEditable(False)
self.category_box.setObjectName(_fromUtf8("category_box"))
self.user_pic = QtGui.QLabel(self.centralwidget)
self.user_pic.setGeometry(QtCore.QRect(760, 270, 321, 321))
self.user_pic.setScaledContents(True)
self.user_pic.setObjectName(_fromUtf8("user_pic"))
self.word_count_pic = QtGui.QLabel(self.centralwidget)
self.word_count_pic.setGeometry(QtCore.QRect(750, 20, 351, 221))
self.word_count_pic.setScaledContents(True)
self.word_count_pic.setObjectName(_fromUtf8("word_count_pic"))
self.find_rest_button = QtGui.QPushButton(self.centralwidget)
self.find_rest_button.setGeometry(QtCore.QRect(530, 140, 171, 28))
self.find_rest_button.setObjectName(_fromUtf8("find_rest_button"))
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1122, 26))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.user_select_button.setText(_translate("MainWindow", "Select User", None))
self.label.setText(_translate("MainWindow", "Yelp Recommender", None))
self.label_2.setText(_translate("MainWindow", "User ID:", None))
self.label_3.setText(_translate("MainWindow", "Restaurant Name:", None))
self.label_4.setText(_translate("MainWindow", "Stars:", None))
self.label_5.setText(_translate("MainWindow", "Category:", None))
#self.user_pic.setPixmap(QtGui.QPixmap('../picture/13916.png'))
self.find_rest_button.setText(_translate("MainWindow", "Find Restaruants", None))
class MyApp(QtGui.QMainWindow, Ui_MainWindow):
def __init__(self):
QtGui.QMainWindow.__init__(self)
Ui_MainWindow.__init__(self)
self.setupUi(self)
star_list=["1 Star and above",
"2 Stars and above",
"3 Stars and above",
"4 stars and above",
"5 Stars and above"]
for star in star_list:
self.stars_box.addItem(star)
category_list=["Chinese",
"Italian",
"American (Traditional)",
"American (New)",
"Bars",
"Pizza",
"Vegetarian"]
for category in category_list:
self.category_box.addItem(category)
self.user_select_button.clicked.connect(self.ChangeUser)
self.find_rest_button.clicked.connect(self.findRest)
def DisplayRest(self, rest_out_list):
rest_table_list=[]
for rest in rest_out_list:
temp_list=[]
for key in rest.keys():
key_str=key.encode("ascii","ignore")
if( type(rest[key]) == unicode ):
content=rest[key].encode("ascii","ignore")
temp_list.append(content)
elif( key_str!='longitude' and key_str!='latitude' and type(rest[key]) in [int, float] ):
content=str(rest[key])
temp_list.append(content)
elif( key_str=='categories'):
for item in rest[key]:
item_str=item.encode("ascii","ignore")
temp_list.append(item_str)
rest_table_list.append(temp_list)
self.rest_table.clearContents()
for row in range(10):
for column in range(18):
try:
newitem = QtGui.QTableWidgetItem(rest_table_list[row][column])
self.rest_table.setItem(row, column, newitem)
except:
pass
def ShowPic(self, user_id):
pic=QtGui.QPixmap('../picture/'+str(user_id)+'lay1.png')
wc_pic=QtGui.QPixmap('../picture/'+str(user_id)+'.png')
self.user_pic.setPixmap(pic)
self.word_count_pic.setPixmap(wc_pic)
def ChangeUser(self):
user=int(self.user_box.toPlainText())
user_rest_list=obtain_list(user)
file1=open("rest_pitt.json")
rest_list=[]
for line in file1.readlines():
rest_list.append(json.loads(line))
rest_out_list=[]
for rest in rest_list:
rest_id=rest[unicode("business_id")].decode("ascii","ignore")
if( rest_id in user_rest_list ):
rest_out_list.append(rest)
self.DisplayRest(rest_out_list)
self.ShowPic(user)
def findRest(self):
file1=open("rest_pitt.json")
rest_list=[]
for line in file1.readlines():
rest_list.append(json.loads(line))
filter_stars=self.stars_box.currentIndex()+1
filter_category=unicode(self.category_box.currentText())
filter_name=str(self.rest_name.toPlainText())
count=0
rest_out_list=[]
for rest in rest_list:
if(count<10):
rest_name=rest[unicode("name")].encode('ascii', 'ignore')
rest_stars=rest[unicode("stars")]
rest_category=rest[unicode("categories")]
match=re.search(filter_name, rest_name)
if( match and rest_stars>=filter_stars and (filter_category in rest_category)):
rest_out_list.append(rest)
count+=1
self.DisplayRest(rest_out_list)
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
window = MyApp()
window.show()
sys.exit(app.exec_())
|
9,002 | 06161b1f45e435d0273dd193229ad2ecfd46c625 | from ob import *
if __name__ == "__main__":
# Game starts
print('New game!')
# Deal
deck = Deck()
deck.shuffle()
players = deck.deal()
# Bid
auction = Auction(players)
auction.bid()
# Play
tricks = Tricks(auction)
tricks.play()
|
9,003 | a139042d0c6fa4941b7149a33b0a48018e9f511b | from django.contrib.auth.models import User
from django.core import validators
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import Group
from django.conf import settings
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def assign_group(sender, instance, created, **kwargs):
"""Сигнал, добавляющий созданного пользователя в группу editors"""
if created:
editors_group = Group.objects.get(name='editors')
instance.groups.add(editors_group)
class Employee(models.Model):
"""Сотрудники"""
name = models.CharField("Имя", max_length=100)
age = models.PositiveSmallIntegerField("Возраст", validators=[validators.MaxValueValidator(120),
validators.MinValueValidator(18)])
position = models.CharField("Должность", max_length=60)
photo = models.ImageField("Фото", upload_to="employees/")
achievements = models.TextField("Достижения", max_length=2000,
help_text="Информация об образовании, опыте, квалификации и профессиональных достижениях")
def __str__(self):
return self.name
class Meta:
verbose_name = "Сотрудник"
verbose_name_plural = "Сотрудники"
class Category(models.Model):
"""Категории"""
name = models.CharField("Категория", max_length=150)
url = models.SlugField(max_length=160, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "Категория"
verbose_name_plural = "Категории"
class Service(models.Model):
"""Услуга"""
PERIOD = (
(0, ''),
(1, '6'),
(2, '12'),
(3, '24'),
)
title = models.CharField("Название", max_length=100)
description = models.TextField("Описание")
image = models.ImageField("Фото", upload_to="services/", null=True, blank=True)
employee = models.ManyToManyField(Employee, verbose_name="Cотрудник", related_name="service_employee")
category = models.ForeignKey(Category, verbose_name="Категория", on_delete=models.SET_NULL, null=True)
warranty = models.PositiveSmallIntegerField("Гарантийный срок", choices=PERIOD, help_text="Указать в месяцах")
price = models.DecimalField("Стоимость услуги", max_digits=9, decimal_places=2, default=0,
help_text="Указывать сумму в рублях", validators=[validators.MinValueValidator(0)])
url = models.SlugField(max_length=130, unique=True)
def __str__(self):
return self.title
class Meta:
verbose_name = "Услуга"
verbose_name_plural = "Услуги"
|
9,004 | 39bc90f34cccebe9a8b1475e396caa1c14f6b2df | import unittest
import sys
from tests.jep_pipe import jep_pipe
from tests.jep_pipe import build_java_process_cmd
import jep
@unittest.skipIf(sys.platform.startswith("win"), "subprocess complications on Windows")
class TestSharedModules(unittest.TestCase):
def setUp(self):
pass
def test_shared_modules(self):
jep_pipe(build_java_process_cmd('jep.test.TestSharedModules'))
@unittest.skipIf(not jep.JEP_NUMPY_ENABLED, 'Jep library built without numpy support')
def test_numpy_prod_succeeds(self):
jep_pipe(build_java_process_cmd('jep.test.numpy.TestNumpyProdShared'))
@unittest.skipIf(not jep.JEP_NUMPY_ENABLED, 'Jep library built without numpy support')
def test_numpy_array_to_string(self):
jep_pipe(build_java_process_cmd(
'jep.test.numpy.TestNumpyArrayToString'))
|
9,005 | 531d1cab3d0860de38f8d1fefee28f10fc018bdb | from django.shortcuts import get_object_or_404, render
from django.http import Http404
from django.urls import reverse
# Create your views here.
from django.template import loader
from django.http import HttpResponse, HttpResponseRedirect
from .models import Categories, News, SalesSentences
from .models_gfl import Informations, Sentences, SalestalkReports, UserBrowseHistories
from django.db import connection
import json
def index(request):
listTopNews = News.objects.filter(category__category='topside-news')
#SELECT * FROM tsukapota_news join tsukapota_categories on tsukapota_news.category_id = tsukapota_categories.id WHERE tsukapota_categories.category = 'topside-news'
listBottomNews = News.objects.filter(category__category='bottomside-news')
listLeftNews = News.objects.filter(category__category='leftside-news')
listRightNews = News.objects.filter(category__category='rightside-news')
cursor = connection.cursor()
cursor.execute("SELECT informations.id, informations.train_subway_station_1, TRUNCATE(informations.price_1 /10000, 0), informations.floor_plan, informations.property_category, REPLACE(REPLACE(sentences.sentence_content, '<span>', ''), '</span>', '') FROM `gfl-testgfl`.informations left join `gfl-testgfl`.sentences on sentences.property_no = informations.property_id WHERE informations.property_id = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) and sentences.factor_code > 16 and sentences.sentence_id in ('v2_s12', 'v2_s22', 'v2_s32') order by RAND() limit 1;")
salesSentences = cursor.fetchall()[0]
print(salesSentences)
# cursor = connection.cursor()
cursor.execute("SELECT REPLACE(REPLACE(salestalk, '<span>', ''), '</span>', '') FROM `gfl-testgfl`.`salestalk_reports` WHERE common_property_no=(SELECT `common_property_no` from `gfl-testgfl`.informations WHERE property_id = (SELECT `property_no` FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1)) and case_conditions LIKE CONCAT('%', (select max(case when rank=1 then name end) as `highest value` from (select common_property_no, @rownum := @rownum + 1 AS rank, name, amt from (select common_property_no, age as amt, 'age' as name from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, closeness, 'closeness' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, size, 'size' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, convenient, 'convenient' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, security, 'security' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, comfortable, 'comfortable' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) ) amounts, (SELECT @rownum :=0) r order by amt desc limit 2 ) top2 group by common_property_no), '%') limit 5")
salesComment1 = cursor.fetchall()
print(salesComment1)
cursor.execute("SELECT REPLACE(REPLACE(salestalk, '<span>', ''), '</span>', '') FROM `gfl-testgfl`.`salestalk_reports` WHERE common_property_no=(SELECT `common_property_no` from `gfl-testgfl`.informations WHERE property_id = (SELECT `property_no` FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1)) and case_conditions LIKE CONCAT('%', (select max(case when rank=2 then name end) as `second value` from (select common_property_no, @rownum := @rownum + 1 AS rank, name, amt from (select common_property_no, age as amt, 'age' as name from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, closeness, 'closeness' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, size, 'size' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, convenient, 'convenient' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, security, 'security' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) union select common_property_no, comfortable, 'comfortable' from `gfl-testgfl`.charts where property_no = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1) ) amounts, (SELECT @rownum :=0) r order by amt desc limit 2 ) top2 group by common_property_no), '%') limit 5")
salesComment2 = cursor.fetchall()
print(salesComment2)
# js_data = simplejson.dumps(my_dict)
context = {
'listTopNews': listTopNews,
'listBottomNews': listBottomNews,
'listLeftNews': listLeftNews,
'listRightNews': listRightNews,
'salesSentences': salesSentences,
'salesComment1': salesComment1,
'salesComment2': salesComment2,
}
return render(request, 'tsukapota/index-pc.html', context)
# SELECT informations.train_subway_station_1, informations.price_1 /10000, informations.floor_plan, informations.property_category, sentences.sentence_content
# FROM `gfl-testgfl`.informations left join
# `gfl-testgfl`.sentences on sentences.property_no = informations.property_id
# WHERE
# informations.property_id = (SELECT property_no FROM (SELECT * FROM `gfl-testgfl`.user_browse_histories ORDER BY user_browse_histories.created_at desc limit 10) as A ORDER BY stay_time desc limit 1)
# and sentences.factor_code > 16
# and sentences.sentence_id in ('v2_s12', 'v2_s22', 'v2_s32') order by RAND() limit 1; |
9,006 | 094e7c150456888389c764d4dd7bf3c9a87a022c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import rospy
import json
import requests
import time
import logging
import numpy as np
from matplotlib import path
from geometry_msgs.msg import PoseWithCovarianceStamped
from std_msgs.msg import String
from people_msgs.msg import People
import rsb
import sys
# -busy state (bool)
# -CALL (bool)
robotname = "pepper"
serverurl = "http://warp1337.com:5000/"
current_location = ""
current_number_of_people = 0
last_people_update = time.time()
def pointInPoly(point, polygon):
poly = path.Path(polygon)
ret = poly.contains_point(point)
return ret
p1 = (16.3588726504, 6.30925045713)
p2 = (13.3668566439, 9.40414529184)
p3 = (9.9387360579, 13.0877831966)
p4 = (20.1978948252, 9.74892582772)
p5 = (17.198790766, 12.8383869617)
p6 = (13.6412316995, 16.646593463)
p7 = (24.0390379612, 13.6097755096)
p8 = (21.1956458327, 16.5876670178)
p9 = (17.4775239932, 20.1390011251)
p10 = (21.7561656767, 11.3351165489)
p11 = (18.6364057548, 14.3130282238)
oldkitchen = [p1, p2, p5, p4, p1]
oldlivingroom = [p4, p5, p8, p7, p4]
kitchen = [p1, p2, p11, p10, p1]
livingroom = [p10, p11, p8, p7, p10]
diningroom = [p9, p8, p5, p6, p9]
bedroom = [p2, p3, p6, p5, p2]
def resetPeople():
global last_people_update
last_people_update = time.time()
headers = {'Content-type': 'application/json'}
payload = "0"
r = requests.put(serverurl + robotname + "/numberOfPersons", headers=headers, data=json.dumps(payload))
print "reset people"
def updateRobotInfo():
global robotname
# print 'Number of arguments:', len(sys.argv), 'arguments.'
# print 'Argument List:', str(sys.argv)
if len(sys.argv) == 2:
robotname = str(sys.argv[1])
print "set robotname to " + robotname
print "init updater"
rospy.init_node('updateRobotInfos', anonymous=True)
print "ros connection established"
rsb_informer = rsb.createInformer("/tobi", dataType=str)
rate = rospy.Rate(1)
def checkForNavGoal():
headers = {'Content-type': 'application/json'}
r = requests.get(serverurl + robotname + "/setlocation", headers=headers)
# pos = r.json().split(',')
# x = pos[0]
# y = pos[1]
# theta = pos[2]
if r.json() == 'called':
print "navgoal detected"
headers = {'Content-type': 'application/json'}
payload = 'none'
r = requests.put(serverurl + robotname + "/setlocation", headers=headers, data=json.dumps(payload))
# robot_call = "komm"
# pub_called.publish(robot_call)
# Create an informer for strings on scope "/example/informer".
rsb_informer.publishData("")
def positionCB(data):
global current_location
x = data.pose.pose.position.x
y = data.pose.pose.position.y
print "callback: " + str(x) + ", " + str(y)
# get location from Position (e.g "kitchen")
if (pointInPoly((x, y), kitchen)):
location = "er ist in der kche"
elif (pointInPoly((x, y), livingroom)):
location = "er ist im wohnzimmer"
elif (pointInPoly((x, y), bedroom)):
location = "er ist im schlafzimmer"
elif (pointInPoly((x, y), diningroom)):
location = "er ist im esszimmer"
else:
location = "er ist ausserhalb der arena"
if current_location != location:
print location
current_location = location
headers = {'Content-type': 'application/json'}
payload = location
r = requests.put(serverurl + robotname + "/location", headers=headers, data=json.dumps(payload))
print r.json()
def personsCB(data):
global last_people_update
global current_number_of_people
last_people_update = time.time()
numberOfPeople = str(len(data.people))
# get number of persons from Persons (e.g. "2")
# send curl -i -H 'Content-Type: application/json' -X PUT -d '"2"' http://localhost:5000/pepper/persons
if numberOfPeople != current_number_of_people:
current_number_of_people = numberOfPeople
headers = {'Content-type': 'application/json'}
payload = numberOfPeople
r = requests.put(serverurl + robotname + "/numberOfPersons", headers=headers, data=json.dumps(payload))
print r.json()
position_sub = rospy.Subscriber('/amcl_pose', PoseWithCovarianceStamped, positionCB)
person_sub = rospy.Subscriber('/people_tracker/people', People, personsCB)
resetPeople()
while not rospy.is_shutdown():
rate.sleep()
checkForNavGoal()
if time.time() - last_people_update > 1:
resetPeople()
if __name__ == '__main__':
try:
updateRobotInfo()
except rospy.ROSInterruptException:
pass
|
9,007 | 2e8d39d6d72672de8e4eac8295b90d68b1dff938 | '''
A linear regression learning algorithm example using TensorFlow library.
Author: Aymeric Damien
Project: https://github.com/aymericdamien/TensorFlow-Examples/
'''
from __future__ import print_function
import tensorflow as tf
import argparse
import numpy
rng = numpy.random
#"python tf_cnn_benchmarks.py --device=cpu --data_format=NHWC --num_warmup_batches=0 --model=lenet --batch_size=32 --num_intra_threads=19 --num_batches=3750"
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', help='batch_size', required=False, default=32)
parser.add_argument('--data_size', help='data_size', required=False, default=1700)
parser.add_argument('--num_intra_threads', help='num_intra_threads', required=False, default=19)
parser.add_argument('--num_batches', help='num_batches', required=False, default=5000000)
parser.add_argument('--device', help='device', required=False, default='gpu')
args = vars(parser.parse_args())
batch_size = int(args['batch_size'])
data_size = int(args['data_size'])
num_intra_threads =int(args['num_intra_threads'])
num_batches =int(args['num_batches'])
device =args['device']
# Parameters
learning_rate = 0.01
training_epochs = num_batches
display_step = 50
# Training Data
#train_X = numpy.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167, 7.042,10.791,5.313,7.997,5.654,9.27,3.1])
#train_Y = numpy.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221, 2.827,3.465,1.65,2.904,2.42,2.94,1.3])
#n_samples = train_X.shape[0]
n_samples=data_size
train_X=rng.rand(1,n_samples)
train_Y=rng.rand(1,n_samples)
with tf.device('/'+device+':0'):
# tf Graph Input
X = tf.placeholder("float")
Y = tf.placeholder("float")
# Set model weights
W = tf.Variable(rng.randn(), name="weight")
b = tf.Variable(rng.randn(), name="bias")
# Construct a linear model
pred = tf.add(tf.multiply(X, W), b)
# Mean squared error
cost = tf.reduce_sum(tf.pow(pred-Y, 2))/(2*n_samples)
# Gradient descent
# Note, minimize() knows to modify W and b because Variable objects are trainable=True by default
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
# Initializing the variables
init = tf.global_variables_initializer()
# gpu share
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.2)
# Launch the graph
newConfig = tf.ConfigProto()
newConfig.intra_op_parallelism_threads = num_intra_threads
with tf.Session(config=newConfig) as sess:
# with tf.Session() as sess:
sess.run(init)
# Fit all training data
for epoch in range(training_epochs):
for (x, y) in zip(train_X, train_Y):
sess.run(optimizer, feed_dict={X: x, Y: y}) |
9,008 | 2876c9f8db0395143b165b855b22e364e3cc8121 | import sys
a = 3
b = 4
c = 5.66
d = 8.0
e = complex(c,d)
f = complex(float(a),float(b))
print("a is type:",type(a))
print("c is type:",type(c))
print("e is type:",type(e))
print(a + b)
print(d / c)
print(b / a)
#2个除约成整型
print(b // a)
print(e)
print(e + f)
print(sys.float_info) |
9,009 | de4e14a4fa8520c1aae60805084224337dd9620c | # -*- coding:utf-8 -*-
#随机森林调参
#RandomizedSearchCV 随机最佳
#GridSearchCV 地毯式最佳
import pandas as pd
features = pd.read_csv('data/temps_extended.csv')
features = pd.get_dummies(features)
labels = features['actual']
features = features.drop('actual', axis = 1)
feature_list = list(features.columns)
import numpy as np
features = np.array(features)
labels = np.array(labels)
from sklearn.model_selection import train_test_split
train_features, test_features, train_labels, test_labels = train_test_split(features, labels,
test_size = 0.25, random_state = 42)
print('Training Features Shape:', train_features.shape)
print('Training Labels Shape:', train_labels.shape)
print('Testing Features Shape:', test_features.shape)
print('Testing Labels Shape:', test_labels.shape)
#################选择6个比较重要的参数当做训练集,重新创建训练集##############################
important_feature_names = ['temp_1', 'average', 'ws_1', 'temp_2', 'friend', 'year']
important_indices = [feature_list.index(feature) for feature in important_feature_names]
important_train_features = train_features[:, important_indices]
important_test_features = test_features[:, important_indices]
print('Important train features shape:', important_train_features.shape)
print('Important test features shape:', important_test_features.shape)
train_features = important_train_features[:]
test_features = important_test_features[:]
feature_list = important_feature_names[:]
#################选择6个比较重要的参数当做训练集,重新创建训练集##############################
########创建随机森林模型###################
from sklearn.ensemble import RandomForestRegressor
rf = RandomForestRegressor(random_state = 42)
from pprint import pprint
# 打印所有参数
pprint(rf.get_params())
# {'bootstrap': True,#是否随机采样
# 'criterion': 'mse',#指定目标方程 损失的计算方法 熵值 回归 mse计算误差
# 'max_depth': None,# 树的最大深度 重要
# 'max_features': 'auto',
# 'max_leaf_nodes': None, 最大叶子节点 重要
# 'min_impurity_decrease': 0.0,
# 'min_impurity_split': None,
# 'min_samples_leaf': 1, 信息增益 重要
# 'min_samples_split': 2, 最小分裂次数 重要
# 'min_weight_fraction_leaf': 0.0,
# 'n_estimators': 'warn',
# 'n_jobs': None, #多少核CPU 去跑
# 'oob_score': False,
# 'random_state': 42,
# 'verbose': 0,
# 'warm_start': False}
from sklearn.model_selection import RandomizedSearchCV# 随机最好
# 建立树的个数
n_estimators = [int(x) for x in np.linspace(start = 200, stop = 2000, num = 10)]
# 最大特征的选择方式
max_features = ['auto', 'sqrt']
# 树的最大深度 10 20 none
max_depth = [int(x) for x in np.linspace(10, 20, num = 2)]
max_depth.append(None)
# 节点最小分裂所需样本个数
min_samples_split = [2, 5, 10]
# 叶子节点最小样本数,任何分裂不能让其子节点样本数少于此值
min_samples_leaf = [1, 2, 4]
# 样本采样方法
bootstrap = [True, False]
# Random grid
random_grid = {'n_estimators': n_estimators,
'max_features': max_features,
'max_depth': max_depth,
'min_samples_split': min_samples_split,
'min_samples_leaf': min_samples_leaf,
'bootstrap': bootstrap}
rf = RandomForestRegressor()# 创建模型
#随机寻找参数 cv:交叉验证 , n_iter 随机100次,scoring:评估方法,verbose:打印信息,n_jobs:所以cpu去跑
rf_random = RandomizedSearchCV(estimator=rf, param_distributions=random_grid,
n_iter = 100, scoring='neg_mean_absolute_error',
cv = 3, verbose=2, random_state=42, n_jobs=-1)
# 执行寻找操作
# rf_random.fit(train_features, train_labels)
# print(rf_random.best_params_)
best_params = {'n_estimators': 1800, 'min_samples_split': 10, 'min_samples_leaf': 4, 'max_features': 'auto', 'max_depth': None, 'bootstrap': True}
def evaluate(model, test_features, test_labels): #评估
predictions = model.predict(test_features)
errors = abs(predictions - test_labels)
mape = 100 * np.mean(errors / test_labels)
accuracy = 100 - mape
print('平均气温误差.',np.mean(errors))
print('Accuracy = {:0.2f}%.'.format(accuracy))
#################使用默认参数##########################
# 平均气温误差. 3.91697080292
# Accuracy = 93.36%.
base_model = RandomForestRegressor( random_state = 42) #使用默认的参数
base_model.fit(train_features, train_labels)
print('默认参数')
evaluate(base_model, test_features, test_labels)
#################使用默认参数##########################
#################使用最好参数##########################
# 平均气温误差. 3.7141472957
# Accuracy = 93.73%.
best_random = RandomForestRegressor(n_estimators=1800,min_samples_split=10,random_state = 42,min_samples_leaf=4,max_features='auto',max_depth=None,bootstrap=True)
best_random.fit(train_features, train_labels)
print('局部最好')
evaluate(best_random, test_features, test_labels)
#################使用最好参数##########################
################在随机最好的参数进行微调######################
# 平均气温误差. 3.69222090145
# Accuracy = 93.77%.
from sklearn.model_selection import GridSearchCV# 地毯式搜索
param_grid = {'n_estimators': [1000, 1200, 1400, 1600],
'min_samples_split': [3, 5, 7],
'min_samples_leaf': [2,3, 4, 5,6],
'max_features': ['auto'],
'max_depth': [None],
'bootstrap': [True]}
rf = RandomForestRegressor()
# 网络搜索
grid_search = GridSearchCV(estimator = rf, param_grid = param_grid,
scoring = 'neg_mean_absolute_error', cv = 3,
n_jobs = -1, verbose = 2)
grid_search.fit(train_features, train_labels)
best_grid = grid_search.best_estimator_
evaluate(best_grid, test_features, test_labels)
################在随机最好的参数进行微调######################
########创建随机森林模型################### |
9,010 | d8ba2557e20920eaadd2fd35f0ebdf1b4a5b33da | """Unit tests for misc. ticket functions."""
from pdm_utils.classes import bundle
from pdm_utils.classes import genome
from pdm_utils.classes import ticket
from pdm_utils.classes import eval
from pdm_utils.functions import tickets
from pdm_utils.constants import constants
import unittest
class TestTicketFunctions1(unittest.TestCase):
def setUp(self):
self.required_keys = constants.IMPORT_TABLE_STRUCTURE["required"]
self.optional_keys = constants.IMPORT_TABLE_STRUCTURE["optional"]
self.keywords = constants.IMPORT_TABLE_STRUCTURE["keywords"]
self.ticket_dict1 = {}
self.ticket_dict1["type"] = "add"
self.ticket_dict1["phage_id"] = "Trixie"
self.ticket_dict1["description_field"] = "product"
self.ticket_dict1["eval_mode"] = "final"
self.ticket_dict1["host_genus"] = "retrieve"
self.ticket_dict1["cluster"] = "retain"
self.ticket_dict1["subcluster"] = "A2"
self.ticket_dict1["accession"] = "parse"
self.ticket_dict2 = {}
self.ticket_dict3 = {}
self.ticket_dict3["type"] = "ADD"
self.ticket_dict3["phage_id"] = "Trixie"
self.ticket_dict3["description_field"] = "PRODUCT"
self.ticket_dict3["eval_mode"] = "FINAL"
self.ticket_dict3["host_genus"] = "RETRIEVE"
self.ticket_dict3["subcluster"] = None
self.ticket_dict3["accession"] = "PARSE"
self.ticket_dict3["retrieve_record"] = "RETAIN"
self.ticket_dict4 = {}
self.ticket_dict4["type"] = "ADD"
self.ticket_dict4["phage_id"] = "Trixie"
def test_modify_import_data_1(self):
"""Verify returns False if there are missing required keys."""
result = tickets.modify_import_data(self.ticket_dict2,
self.required_keys, self.optional_keys, self.keywords)
self.assertFalse(result)
def test_modify_import_data_2(self):
"""Verify returns False if there are extra keys."""
self.ticket_dict3["extra"] = "extra"
result = tickets.modify_import_data(self.ticket_dict3,
self.required_keys, self.optional_keys, self.keywords)
self.assertFalse(result)
def test_modify_import_data_3(self):
"""Verify returns True with keywords identified and values lowercased."""
result = tickets.modify_import_data(self.ticket_dict3,
self.required_keys, self.optional_keys, self.keywords)
with self.subTest():
self.assertTrue(result)
with self.subTest():
self.assertEqual(self.ticket_dict3["host_genus"], "retrieve")
with self.subTest():
self.assertEqual(self.ticket_dict3["retrieve_record"], "retain")
with self.subTest():
self.assertEqual(self.ticket_dict3["subcluster"], "retrieve")
with self.subTest():
self.assertEqual(self.ticket_dict3["accession"], "parse")
with self.subTest():
self.assertEqual(self.ticket_dict3["type"], "add")
with self.subTest():
self.assertEqual(self.ticket_dict3["description_field"], "product")
with self.subTest():
self.assertEqual(self.ticket_dict3["eval_mode"], "final")
def test_modify_import_data_4(self):
"""Verify returns True with completed dictionary from a
minimal add ticket."""
self.ticket_dict4["description_field"] = "product"
self.ticket_dict4["eval_mode"] = "final"
result = tickets.modify_import_data(self.ticket_dict4,
self.required_keys, self.optional_keys, self.keywords)
with self.subTest():
self.assertTrue(result)
with self.subTest():
self.assertEqual(self.ticket_dict4["host_genus"], "retrieve")
with self.subTest():
self.assertEqual(self.ticket_dict4["cluster"], "retrieve")
with self.subTest():
self.assertEqual(self.ticket_dict4["subcluster"], "retrieve")
with self.subTest():
self.assertEqual(self.ticket_dict4["annotation_author"], "1")
with self.subTest():
self.assertEqual(self.ticket_dict4["retrieve_record"], "1")
with self.subTest():
self.assertEqual(self.ticket_dict4["annotation_status"], "draft")
with self.subTest():
self.assertEqual(self.ticket_dict4["accession"], "")
def test_modify_import_data_5(self):
"""Verify returns True with completed dictionary from a
minimal replace ticket."""
self.ticket_dict4["type"] = "replace"
self.ticket_dict4["description_field"] = "product"
self.ticket_dict4["eval_mode"] = "final"
result = tickets.modify_import_data(self.ticket_dict4,
self.required_keys, self.optional_keys, self.keywords)
with self.subTest():
self.assertTrue(result)
with self.subTest():
self.assertEqual(self.ticket_dict4["host_genus"], "retain")
with self.subTest():
self.assertEqual(self.ticket_dict4["cluster"], "retain")
with self.subTest():
self.assertEqual(self.ticket_dict4["subcluster"], "retain")
with self.subTest():
self.assertEqual(self.ticket_dict4["annotation_author"], "retain")
with self.subTest():
self.assertEqual(self.ticket_dict4["retrieve_record"], "retain")
with self.subTest():
self.assertEqual(self.ticket_dict4["annotation_status"], "final")
with self.subTest():
self.assertEqual(self.ticket_dict4["accession"], "retain")
def test_parse_import_ticket_data_1(self):
"""Verify ticket is generated from correct data dictionary."""
tkt = tickets.parse_import_ticket_data(self.ticket_dict1)
with self.subTest():
self.assertEqual(tkt.type, "add")
with self.subTest():
self.assertEqual(tkt.phage_id, "Trixie")
with self.subTest():
self.assertEqual(tkt.description_field, "product")
with self.subTest():
self.assertEqual(tkt.eval_mode, "final")
with self.subTest():
self.assertEqual(len(tkt.data_dict.keys()), 8)
with self.subTest():
self.assertEqual(tkt.data_retrieve, set(["host_genus"]))
with self.subTest():
self.assertEqual(tkt.data_retain, set(["cluster"]))
with self.subTest():
self.assertEqual(tkt.data_parse, set(["accession"]))
with self.subTest():
self.assertEqual(tkt.data_add, set(["subcluster"]))
def test_parse_import_ticket_data_2(self):
"""Verify ticket is generated from correct data dictionary with
no data in 'retain', 'retrieve', or 'parse' sets."""
self.ticket_dict1["host_genus"] = "Mycobacterium"
self.ticket_dict1["cluster"] = "A"
self.ticket_dict1["subcluster"] = "A2"
self.ticket_dict1["accession"] = "ABC123"
tkt = tickets.parse_import_ticket_data(self.ticket_dict1)
with self.subTest():
self.assertEqual(tkt.type, "add")
with self.subTest():
self.assertEqual(tkt.phage_id, "Trixie")
with self.subTest():
self.assertEqual(tkt.description_field, "product")
with self.subTest():
self.assertEqual(tkt.eval_mode, "final")
with self.subTest():
self.assertEqual(len(tkt.data_dict.keys()), 8)
with self.subTest():
self.assertEqual(tkt.data_retrieve, set())
with self.subTest():
self.assertEqual(tkt.data_retain, set())
with self.subTest():
self.assertEqual(tkt.data_parse, set())
with self.subTest():
self.assertEqual(tkt.data_add, set(["subcluster", "host_genus",
"cluster", "accession"]))
def test_parse_import_ticket_data_3(self):
"""Verify ticket is generated from correct data dictionary with
no data in 'add' sets."""
self.ticket_dict1["host_genus"] = "retrieve"
self.ticket_dict1["cluster"] = "retrieve"
self.ticket_dict1["subcluster"] = "retrieve"
self.ticket_dict1["accession"] = "retrieve"
tkt = tickets.parse_import_ticket_data(self.ticket_dict1)
with self.subTest():
self.assertEqual(tkt.type, "add")
with self.subTest():
self.assertEqual(tkt.phage_id, "Trixie")
with self.subTest():
self.assertEqual(tkt.description_field, "product")
with self.subTest():
self.assertEqual(tkt.eval_mode, "final")
with self.subTest():
self.assertEqual(len(tkt.data_dict.keys()), 8)
with self.subTest():
self.assertEqual(tkt.data_retrieve, set(["subcluster", "host_genus",
"cluster", "accession"]))
with self.subTest():
self.assertEqual(tkt.data_retain, set())
with self.subTest():
self.assertEqual(tkt.data_parse, set())
with self.subTest():
self.assertEqual(tkt.data_add, set())
def test_set_empty_1(self):
"""Verify one None value is set to ''."""
data_dict = {"type":"add","cluster":None}
tickets.set_empty(data_dict)
with self.subTest():
self.assertEqual(data_dict["type"], "add")
with self.subTest():
self.assertEqual(data_dict["cluster"], "")
def test_set_keywords_1(self):
"""Verify one value is lowercased."""
data_dict = {"type":"ADD",
"cluster":"RETRIEVE",
"subcluster": "NONE",
"host_genus": "PARSE",
"retrieve_record": "RETAIN"}
keywords = set(["retrieve", "retain"])
tickets.set_keywords(data_dict, self.keywords)
with self.subTest():
self.assertEqual(data_dict["type"], "ADD")
with self.subTest():
self.assertEqual(data_dict["cluster"], "retrieve")
with self.subTest():
self.assertEqual(data_dict["subcluster"], "none")
with self.subTest():
self.assertEqual(data_dict["host_genus"], "parse")
with self.subTest():
self.assertEqual(data_dict["retrieve_record"], "retain")
def test_set_missing_keys_1(self):
"""Verify one missing key is added."""
data_dict = {"type":"add", "cluster":""}
key_set = set(["type", "host_genus"])
tickets.set_missing_keys(data_dict, key_set)
with self.subTest():
self.assertEqual(len(data_dict.keys()), 3)
with self.subTest():
self.assertEqual(data_dict["host_genus"], "")
def test_set_missing_keys_2(self):
"""Verify no missing key is added."""
data_dict = {"type":"add", "cluster":""}
key_set = set(["type", "cluster"])
tickets.set_missing_keys(data_dict, key_set)
self.assertEqual(len(data_dict.keys()), 2)
def test_set_dict_value_1(self):
"""Verify empty value is replaced with first value."""
data_dict = {"type":"add", "cluster":""}
tickets.set_dict_value(data_dict, "cluster", "A", "B")
self.assertEqual(data_dict["cluster"], "A")
def test_set_dict_value_2(self):
"""Verify empty value is replaced with second value."""
data_dict = {"type":"replace", "cluster":""}
tickets.set_dict_value(data_dict, "cluster", "A", "B")
self.assertEqual(data_dict["cluster"], "B")
def test_set_dict_value_3(self):
"""Verify non-empty value is not replaced."""
data_dict = {"type":"replace", "cluster":"C"}
tickets.set_dict_value(data_dict, "cluster", "A", "B")
self.assertEqual(data_dict["cluster"], "C")
def test_construct_tickets_1(self):
"""Verify two tickets are constructed correctly.
The first ticket contains all required and optional fields.
The second ticket contains all required fields."""
dict_list = [self.ticket_dict1, self.ticket_dict4]
eval_data_dict = {"eval_mode": "custom_eval_mode",
"eval_flag_dict": {"check_locus_tag": False}}
list_of_tickets = tickets.construct_tickets(dict_list,
eval_data_dict, "function", self.required_keys,
self.optional_keys, self.keywords)
with self.subTest():
self.assertEqual(len(list_of_tickets), 2)
with self.subTest():
self.assertEqual(list_of_tickets[0].id, 1)
with self.subTest():
self.assertEqual(list_of_tickets[0].eval_mode, "final")
with self.subTest():
self.assertEqual(list_of_tickets[0].description_field, "product")
with self.subTest():
self.assertTrue(list_of_tickets[0].eval_flags["check_locus_tag"])
with self.subTest():
self.assertEqual(list_of_tickets[1].id, 2)
with self.subTest():
self.assertEqual(list_of_tickets[1].eval_mode, "custom_eval_mode")
with self.subTest():
self.assertEqual(list_of_tickets[1].description_field, "function")
with self.subTest():
self.assertFalse(list_of_tickets[1].eval_flags["check_locus_tag"])
def test_construct_tickets_2(self):
"""Verify one ticket is constructed correctly. The second data
dictionary is not structured correctly."""
dict_list = [self.ticket_dict1, self.ticket_dict2]
eval_data_dict = {"eval_mode": "custom_eval_mode",
"eval_flag_dict": {}}
list_of_tickets = tickets.construct_tickets(dict_list,
eval_data_dict, "function", self.required_keys,
self.optional_keys, self.keywords)
with self.subTest():
self.assertEqual(len(list_of_tickets), 1)
def test_construct_tickets_3(self):
"""Verify four tickets constructed correctly. The first two tickets
contain all required and optional fields. The second two tickets
contain all required fields. Verify that each eval_flag dictionary
is a separate object that can be modified without impacting the other
eval_flag dictionaries."""
tkt_dict1 = {}
tkt_dict1["type"] = "add"
tkt_dict1["phage_id"] = "Trixie"
tkt_dict1["description_field"] = "product"
tkt_dict1["eval_mode"] = "final"
tkt_dict2 = {}
tkt_dict2["type"] = "add"
tkt_dict2["phage_id"] = "L5"
tkt_dict2["description_field"] = "product"
tkt_dict2["eval_mode"] = "final"
tkt_dict3 = {}
tkt_dict3["type"] = "add"
tkt_dict3["phage_id"] = "RedRock"
tkt_dict4 = {}
tkt_dict4["type"] = "add"
tkt_dict4["phage_id"] = "Bxb1"
dict_list = [tkt_dict1, tkt_dict2, tkt_dict3, tkt_dict4]
eval_data_dict = {"eval_mode": "custom_eval_mode",
"eval_flag_dict": {"check_locus_tag": False}}
tkt_list = tickets.construct_tickets(dict_list,
eval_data_dict, "function", self.required_keys,
self.optional_keys, self.keywords)
tkt_list[0].eval_flags["check_locus_tag"] = 0
tkt_list[1].eval_flags["check_locus_tag"] = 1
tkt_list[2].eval_flags["check_locus_tag"] = 2
tkt_list[3].eval_flags["check_locus_tag"] = 3
with self.subTest():
self.assertEqual(tkt_list[0].eval_flags["check_locus_tag"], 0)
with self.subTest():
self.assertEqual(tkt_list[1].eval_flags["check_locus_tag"], 1)
with self.subTest():
self.assertEqual(tkt_list[2].eval_flags["check_locus_tag"], 2)
with self.subTest():
self.assertEqual(tkt_list[3].eval_flags["check_locus_tag"], 3)
def test_identify_duplicates_1(self):
"""Verify no duplicates are produced."""
ticket1 = ticket.ImportTicket()
ticket1.id = 1
ticket1.type = "replace"
ticket1.phage_id = "Trixie"
ticket2 = ticket.ImportTicket()
ticket2.id = 2
ticket2.type = "replace"
ticket2.phage_id = "L5"
null_set = set(["none"])
list_of_tickets = [ticket1, ticket2]
id_dupes, phage_id_dupes = \
tickets.identify_duplicates(list_of_tickets, null_set=null_set)
with self.subTest():
self.assertEqual(len(id_dupes), 0)
with self.subTest():
self.assertEqual(len(phage_id_dupes), 0)
def test_identify_duplicates_2(self):
"""Verify two tickets with 'none' duplicates
do not generate an error."""
ticket1 = ticket.ImportTicket()
ticket1.id = "none"
ticket1.type = "replace"
ticket1.phage_id = "none"
ticket2 = ticket.ImportTicket()
ticket2.id = "none"
ticket2.type = "replace"
ticket2.phage_id = "none"
null_set = set(["none"])
list_of_tickets = [ticket1, ticket2]
id_dupes, phage_id_dupes = \
tickets.identify_duplicates(list_of_tickets, null_set=null_set)
with self.subTest():
self.assertEqual(len(id_dupes), 0)
with self.subTest():
self.assertEqual(len(phage_id_dupes), 0)
def test_identify_duplicates_3(self):
"""Verify two tickets with id duplicates
do generate an error."""
ticket1 = ticket.ImportTicket()
ticket1.id = 1
ticket1.type = "replace"
ticket1.phage_id = "L5"
ticket2 = ticket.ImportTicket()
ticket2.id = 1
ticket2.type = "replace"
ticket2.phage_id = "Trixie"
null_set = set(["none"])
list_of_tickets = [ticket1, ticket2]
id_dupes, phage_id_dupes = \
tickets.identify_duplicates(list_of_tickets, null_set=null_set)
with self.subTest():
self.assertEqual(len(id_dupes), 1)
with self.subTest():
self.assertEqual(len(phage_id_dupes), 0)
def test_identify_duplicates_4(self):
"""Verify two tickets with Primary Phage ID duplicates
do generate an error."""
ticket1 = ticket.ImportTicket()
ticket1.id = 1
ticket1.type = "replace"
ticket1.phage_id = "Trixie"
ticket2 = ticket.ImportTicket()
ticket2.id = 2
ticket2.type = "replace"
ticket2.phage_id = "Trixie"
null_set = set(["none"])
list_of_tickets = [ticket1, ticket2]
id_dupes, phage_id_dupes = \
tickets.identify_duplicates(list_of_tickets, null_set=null_set)
with self.subTest():
self.assertEqual(len(id_dupes), 0)
with self.subTest():
self.assertEqual(len(phage_id_dupes), 1)
def test_identify_duplicates_6(self):
"""Verify two tickets with multiple duplicates
do generate multiple errors."""
ticket1 = ticket.ImportTicket()
ticket1.id = 1
ticket1.type = "replace"
ticket1.phage_id = "Trixie"
ticket2 = ticket.ImportTicket()
ticket2.id = 1
ticket2.type = "replace"
ticket2.phage_id = "Trixie"
null_set = set(["none"])
list_of_tickets = [ticket1, ticket2]
id_dupes, phage_id_dupes = \
tickets.identify_duplicates(list_of_tickets, null_set=null_set)
with self.subTest():
self.assertEqual(len(id_dupes), 1)
with self.subTest():
self.assertEqual(len(phage_id_dupes), 1)
class TestTicketFunctions2(unittest.TestCase):
def setUp(self):
self.ticket1 = ticket.ImportTicket()
self.ticket2 = ticket.ImportTicket()
self.ticket1.phage_id = "Trixie"
self.ticket2.phage_id = "L5"
self.bundle1 = bundle.Bundle()
self.bundle2 = bundle.Bundle()
self.bundle1.ticket = self.ticket1
self.bundle2.ticket = self.ticket2
class TestTicketFunctions3(unittest.TestCase):
def setUp(self):
self.data_dict = {}
self.data_dict["host_genus"] = "Mycobacterium smegmatis"
self.data_dict["accession"] = "ABC123.1"
self.data_dict["annotation_status"] = "final"
self.data_dict["cluster"] = "A"
self.data_dict["subcluster"] = "A2"
self.data_dict["annotation_author"] = 1
self.data_dict["retrieve_record"] = 1
self.tkt1 = ticket.ImportTicket()
self.tkt1.phage_id = "Trixie_Draft"
self.tkt1.data_dict = self.data_dict
def test_get_genome_1(self):
"""Verify no data from ticket is added to genome."""
self.tkt1.data_add = set([""])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.id, "Trixie")
with self.subTest():
self.assertEqual(gnm.name, "Trixie_Draft")
with self.subTest():
self.assertEqual(gnm.type, "add")
with self.subTest():
self.assertEqual(gnm.host_genus, "")
with self.subTest():
self.assertEqual(gnm.cluster, "")
with self.subTest():
self.assertEqual(gnm.subcluster, "")
with self.subTest():
self.assertEqual(gnm.annotation_status, "")
with self.subTest():
self.assertEqual(gnm.annotation_author, -1)
with self.subTest():
self.assertEqual(gnm.retrieve_record, -1)
with self.subTest():
self.assertEqual(gnm.accession, "")
def test_get_genome_2(self):
"""Verify host_genus data from ticket is added to genome."""
self.tkt1.data_add = set(["host_genus"])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.host_genus, "Mycobacterium")
with self.subTest():
self.assertEqual(gnm.cluster, "")
def test_get_genome_3(self):
"""Verify cluster data from ticket is added to genome."""
self.tkt1.data_add = set(["cluster"])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.host_genus, "")
with self.subTest():
self.assertEqual(gnm.cluster, "A")
def test_get_genome_4(self):
"""Verify subcluster data from ticket is added to genome."""
self.tkt1.data_add = set(["subcluster"])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.host_genus, "")
with self.subTest():
self.assertEqual(gnm.subcluster, "A2")
def test_get_genome_5(self):
"""Verify annotation_status data from ticket is added to genome."""
self.tkt1.data_add = set(["annotation_status"])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.host_genus, "")
with self.subTest():
self.assertEqual(gnm.annotation_status, "final")
def test_get_genome_6(self):
"""Verify annotation_author data from ticket is added to genome."""
self.tkt1.data_add = set(["annotation_author"])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.host_genus, "")
with self.subTest():
self.assertEqual(gnm.annotation_author, 1)
def test_get_genome_7(self):
"""Verify retrieve_record data from ticket is added to genome."""
self.tkt1.data_add = set(["retrieve_record"])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.host_genus, "")
with self.subTest():
self.assertEqual(gnm.retrieve_record, 1)
def test_get_genome_8(self):
"""Verify accession data from ticket is added to genome."""
self.tkt1.data_add = set(["accession"])
gnm = tickets.get_genome(self.tkt1, gnm_type="add")
with self.subTest():
self.assertEqual(gnm.host_genus, "")
with self.subTest():
self.assertEqual(gnm.accession, "ABC123")
if __name__ == '__main__':
unittest.main()
|
9,011 | fcf4cb5c47e4aa51d97b633ecdfec65246e82bd8 | from tkinter import *
from tkinter.scrolledtext import ScrolledText
def load():
with open(filename.get()) as file:
# delete every between line 1 char 0 to END
# INSERT is the current insertion point
contents.delete('1.0', END)
contents.insert(INSERT, file.read())
def save():
with open(filename.get(), 'w') as file:
file.write(contents.get('1.0', END))
# initialize window widget
top = Tk()
top.title("Simple Editor")
# initialize text field with scroll bar
contents = ScrolledText()
# pack manager
contents.pack(side=BOTTOM, expand=True, fill=BOTH)
# text field
filename = Entry()
filename.pack(side=LEFT, expand=True, fill=X)
Button(text='Open', command=load).pack(side=LEFT)
Button(text='Save', command=save).pack(side=LEFT)
mainloop()
|
9,012 | 6f53702d9265a7fc57d2ec2e47dc35a0bc7a9f87 | from pydub import AudioSegment
import sys
import tensorflow as tf
import numpy as np
from adwtmk.audio import Audio
from adwtmk.encoder import *
from adwtmk.decoder import *
class DAE(object):
def __init__(self,model_name):
self.model_name = model_name
self.process = 0
self.loss = 0
self.origin_loss = 0
self.core_size = 3
self.batch_size = 600
self.Epoches = 100
def _get_batches(self,batch_size,data,core_size):
assert batch_size % core_size == 0
dim_0 = len(data)
#print("dim_0:",dim_0)
length = len(data[0])
num_batches = length // batch_size
remainder_length = length % batch_size
res = list()
for i in range(num_batches):
res.append(data[:,i*batch_size:(i+1)*batch_size])
res = [np.array(x,np.float64).reshape(dim_0,batch_size//core_size,core_size) for x in res]
remainder = data[:,-remainder_length:]
return res,remainder
#np.set_printoptions(threshold=1e6)
#def _my_config():
#core_size = 5
#batch_size = 500
#Epoches = 200
def fast_training(self,sound):
self.core_size = 100
self.batch_size = 1000
self.Epoches = 50
self._main(sound,100,1000,50)
def medium_training(self,sound):
self.core_size = 5
self.batch_size = 500
self.Epoches = 100
self._main(sound,5,500,100)
def slow_training(self,sound):
self.core_size = 3
self.batch_size = 300
self.Epoches = 100
self._main(sound,3,300,150)
def get_train_result_music_file(self):
if (self.new_sound):
return self.new_sound
else:
raise Exception("You should run training firstly !")
def get_current_training_process(self):
return self.process
def test(self,sound):
audio_matrix = sound.get_reshaped_samples()
#max_value = np.max(audio_matrix)
#min_value = np.min(audio_matrix)
#audio_matrix = (audio_matrix-min_value) / (max_value-min_value)
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix-mean_value) / std_value
channels = len(audio_matrix)
batches,remainder = self._get_batches(batch_size=self.batch_size,core_size=self.core_size,data=audio_matrix)
losses = list()
for i in range(len(batches)):
dropout_indicator = np.random.rand()
if (dropout_indicator <= 0.2):
losses.append(np.sum(abs(batches[i])))
batches[i] *= 0.00
losses.append(0)
sum_losses = np.sum(np.array(losses).reshape(-1))
#print("losses:")
#print(np.array(losses).reshape(-1))
#print(sum_losses)
test_batches = np.array(batches,np.float64).reshape(channels,-1)
test_batches = np.concatenate((test_batches,remainder),axis=1)
count = audio_matrix.shape
count = count[0]*count[1]
self.origin_loss = sum_losses/(float)(count)
test_batches = test_batches * std_value + mean_value
test_sound = sound.spawn(test_batches)
self._main(test_sound,self.core_size,self.batch_size,1,1.0)
return test_sound,self.new_sound
def _main(self,sound,core_size,batch_size,Epoches,drop_out_rate=0.9):
self.new_sound = None
self.process = 0
self.loss = 0
#print(sound.frame_rate,sound.duration_seconds, len(sound.get_array_of_samples()))
audio_matrix = sound.get_reshaped_samples()
#max_value = np.max(audio_matrix)
#min_value = np.min(audio_matrix)
#audio_matrix = (audio_matrix-min_value) / (max_value-min_value)
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix-mean_value) / std_value
batches,remainder = self._get_batches(batch_size=batch_size,core_size=core_size,data=audio_matrix)
steps = batch_size // core_size
channels = len(audio_matrix)
best_output = ""
with tf.Session() as sess:
fw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.BasicLSTMCell(core_size),drop_out_rate)
fw_rnn_cell = tf.contrib.rnn.MultiRNNCell([fw_cell]*2)
bw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.BasicLSTMCell(core_size),drop_out_rate)
bw_rnn_cell = tf.contrib.rnn.MultiRNNCell([bw_cell]*2)
input_data = tf.placeholder(shape=[channels,steps,core_size],dtype=tf.float64)
in_weights = tf.get_variable(name="in_weight",shape=[steps*core_size,steps*core_size],dtype=tf.float64)
in_bias = tf.get_variable(name="in_bias",shape=[core_size*steps],dtype=tf.float64)
hidden_data = tf.tanh(tf.nn.xw_plus_b(tf.reshape(input_data,(channels,-1)),in_weights,in_bias))
hidden_data_out = tf.reshape(hidden_data,[channels,steps,core_size])
bi_outputs,last_state = tf.nn.bidirectional_dynamic_rnn(fw_rnn_cell,bw_rnn_cell,hidden_data_out,dtype=tf.float64)
out_weights = tf.get_variable(name="out_weight",shape=[steps*core_size*2,steps*core_size],dtype=tf.float64)
out_bias = tf.get_variable(name="out_bias",shape=[core_size*steps],dtype=tf.float64)
outputs = tf.nn.xw_plus_b(tf.reshape(tf.concat(bi_outputs,2),(channels,-1)),out_weights,out_bias)
#outputs,last_state = tf.nn.dynamic_rnn(fw_rnn_cell,input_data,dtype=tf.float64)
loss = tf.reduce_mean(tf.sqrt(tf.squared_difference(tf.reshape(input_data,(channels,-1)),outputs)))
train = tf.train.AdamOptimizer(0.001).minimize(loss)
saver = tf.train.Saver()
train_loss = 999999999
try:
saver.restore(sess,self.model_name)
print("model restored")
except:
sess.run(tf.global_variables_initializer())
print("restore failed, randomly initialize")
for i in range(Epoches):
loss_temp = 0
outputs_temp = list()
for item in batches:
if (drop_out_rate < 1):
epoch_outputs,epoch_loss,_ = sess.run([outputs,loss,train],feed_dict={
input_data:item
})
else:
epoch_outputs,epoch_loss = sess.run([outputs,loss],feed_dict={
input_data:item
})
loss_temp += epoch_loss
outputs_temp.append(epoch_outputs)
loss_temp /= len(batches)
if (i == 0 and drop_out_rate<1):
self.origin_loss = loss_temp
self.process = i/Epoches
self.loss = loss_temp
#print("process:%f,loss:%f" % (i/Epoches,loss_temp))
if (loss_temp < train_loss):
train_loss = loss_temp
if (drop_out_rate < 1):
saver.save(sess,self.model_name)
best_output = outputs_temp
#best_output = best_output.append(remainder)
best_output = np.array(best_output,np.float64).reshape(channels,-1)
best_output = np.concatenate((best_output,remainder),axis=1)
#best_output = best_output.T
#best_output = best_output.reshape(-1)
best_output = best_output*std_value+mean_value
#best_output *= max_value-min_value
#best_output += min_value
self.new_sound = sound.spawn(best_output)
#new_sound.export("test.flac","flac")
#ex.add_artifact(filename="./test.flac")
#ex.add_artifact(filename="./rnn_model_key_multirnn_bi_input.ckpt*")
#audio_matrix = np.array(audio_matrix,np.float64).reshape(channels,-1)
#audio_matrix = audio_matrix.T
#audio_matrix = audio_matrix.reshape(-1)
#audio_matrix = audio_matrix * (max_value-min_value)+min_value
audio_matrix = audio_matrix * std_value + mean_value
new_sound = sound.spawn(audio_matrix)
#new_sound.export("test2.flac","flac")
#sound = Audio.from_file("./mark.flac", format="flac")
#fast_training(sound)
|
9,013 | 8aacc8dbfdd70d24689ae17b9c29b1ffc80fb231 |
from Models.AdminPageModel import AdminPageModel
class StudentDebtsController:
def __init__(self, master, model, view):
self._master = master
self._model = model
self._view = view
def BackToAdminPage(self):
from Views.AdminPage import AdminPage
self._master.switch_frame(AdminPage, AdminPageModel)
def GetStudentsInfo(self, text):
studentsList = self._model.GetStudentsList(text)
return studentsList
def GetStudentDebtsAndShowTable(self, text):
self._view.HideUserInfo()
if (not text):
self._view.ClearTable()
self._view.ShowNoDataLabelWithText("No issues found. Select user first.")
else:
self._view.ClearTable()
info = self._model.GetStudentInfo(text)
self._view.ShowUserInfo(info[0], info[1], info[2], info[3])
studentDebts = self._model.GetStudentDebts(text)
if (len(studentDebts) > 0):
self._view.HideNoDataLabel()
else:
self._view.ShowNoDataLabelWithText("Student don't have issues yet.")
self._view.FillTable(studentDebts)
def ReturnBooks(self, idCopies):
if (len(idCopies) > 0):
try:
id = self._model.GetStudentId(idCopies[0])
self._model.ReturnBooks(idCopies)
self._view.ClearTable()
studentDebts = self._model.GetStudentDebts(id)
self._view.FillTable(studentDebts)
singOrPlural = 'Book'
if (len(idCopies) > 1):
singOrPlural = 'Books'
self._view.SetMessageLabel(singOrPlural + " successfully returned" , "green")
except Exception as e:
print(e)
self._view.SetMessageLabel("Something went wrong", "red")
else:
self._view.SetMessageLabel("0 books have been selected. Nothing to return", "red")
|
9,014 | e221b840239b6e9af735238760fd1157f333c1a4 |
def filter_long_words(word_lng, words_list):
return [word for word in words_list if len(word) > word_lng]
assert filter_long_words(5, ['piwo', 'wino', 'czasopisma', 'ubrania', 'napoje']) == ['czasopisma', 'ubrania', 'napoje']
|
9,015 | e307bcc28526081141f1f2204c225d8e5f0100a8 | # Generated by Django 3.0.3 on 2020-04-24 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('HMS', '0009_auto_20200329_0911'),
]
operations = [
migrations.CreateModel(
name='mess_timetable',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.CharField(choices=[('Breakfast', 'Breakfast'), ('Lunch', 'Lunch'), ('Dinner', 'Dinner')], max_length=10)),
('Monday', models.CharField(max_length=100)),
('Tuesday', models.CharField(max_length=100)),
('Wednesday', models.CharField(max_length=100)),
('Thursday', models.CharField(max_length=100)),
('Friday', models.CharField(max_length=100)),
('Saturday', models.CharField(max_length=100)),
('Sunday', models.CharField(max_length=100)),
],
),
]
|
9,016 | 5d568c5ac9040ad93749c27bd6fe1a956e7456f7 | #!/usr/bin/env python3
# crits_ldap.py
# This connects to an LDAP server and pulls data about all users.
# Then, it either updates existing targets in CRITS or creates a new entry.
import json
import sys
import datetime
import logging
import logging.config
from configparser import ConfigParser
from ldap3 import Server, Connection, SIMPLE, SYNC, ASYNC, SUBTREE, ALL,
ALL_ATTRIBUTES
from pymongo import MongoClient
try:
config = ConfigParser()
config.read('etc/ldap.ini')
except ImportError:
raise SystemExit('ldap.ini was not found or was not accessible.')
try:
logging.config.fileConfig('etc/logging.ini')
log = logging.getLogger("ldap")
except Exception as e:
raise SystemExit("unable to load logging configuration file {0}: {1}"
.format('logging.ini', str(e)))
# load ldap settings from configuration file
ldap_server = config.get('ldap', 'ldap_server')
ldap_bind_user = config.get('ldap', 'ldap_bind_user')
ldap_bind_password = config.get('ldap', 'ldap_bind_password')
ldap_base_dn = config.get('ldap', 'ldap_base_dn')
crits_user = config.get('crits', 'user')
crits_server = config.get('crits', 'server')
TARGET_SCHEMA_VERSION = 3
def ldap_paged_query(query):
response_entries = []
try:
server = Server(ldap_server, port = 389, get_info = ALL)
with Connection(
server,
client_strategy = SYNC,
user=ldap_bind_user,
password=ldap_bind_password,
authentication=SIMPLE,
check_names=True) as c:
log.debug("running ldap query for ({0})".format(query))
c.search(ldap_base_dn, '({0})'.format(query), SUBTREE, attributes =
ALL_ATTRIBUTES, paged_criticality=True, paged_size=100)
cookie = c.result['controls']['1.2.840.113556.1.4.319']['value']
['cookie']
# a little hack to move the result into json
response = json.loads(c.response_to_json())
if len(response['entries']) < 1:
return None
for entry in response['entries']:
response_entries.append(entry)
while cookie:
c.search(ldap_base_dn, '({0})'.format(query), SUBTREE,
attributes = ALL_ATTRIBUTES, paged_criticality=True,
paged_size=100, paged_cookie=cookie)
# a little hack to move the result into json
cookie = c.result['controls']['1.2.840.113556.1.4.319']
['value']['cookie']
response = json.loads(c.response_to_json())
if len(response['entries']) < 1:
return None
for entry in response['entries']:
response_entries.append(entry)
return response_entries
except Exception as e:
log.warning("unable to perform ldap query: {0}".format(str(e)))
return response_entries
def add_results_to_crits(entries):
"""Adds LDAP data to CRITS targets.
Args:
entries: dict with all the entry data from LDAP
"""
client = MongoClient(crits_server, 27017)
db = client.crits
targets = db.targets
for result in entries:
firstname = ''
lastname = ''
if 'givenName' in result['attributes']:
firstname = result['attributes']['givenName']
if 'sn' in result['attributes']:
lastname = result['attributes']['sn']
department = ''
if 'department' in result['attributes']:
department = result['attributes']['department']
orgid = ''
if 'cn' in result['attributes']:
orgid = result['attributes']['cn']
company = ''
if 'company' in result['attributes']:
company = result['attributes']['company']
title = ''
if 'title' in result['attributes']:
title = result['attributes']['title']
mail = ''
if 'mail' in result['attributes']:
mail = result['attributes']['mail']
tmpmail = str.strip(mail)
if tmpmail == '':
continue
mongo_result = targets.find_one( { 'email_address' : mail.lower() } )
if mongo_result:
log.debug('Found id of {} for the target {}'.format(
mongo_result['_id'], mail))
modified = datetime.datetime.now()
data = {
'firstname' : firstname,
'lastname' : lastname,
'division' : company,
'department' : department,
'organization_id' : orgid,
'title' : title,
'modified' : modified
}
# The user is already in crits, do we need to
# update any information?
update_information = False
for key in data.keys():
if key == 'modified':
continue
if key in mongo_result:
if mongo_result[key] != data[key]:
update_information = True
else:
update_information = True
if update_information:
update_result = targets.update_one( { 'email_address' :
mail.lower() }, { '$set' : data } )
log.info("Records matched: {}, modified: {}, email_address: {}"
.format(update_result.matched_count,
update_result.modified_count, mail.lower()))
else:
# The user is not in CRITS, let's add the information
created = datetime.datetime.now()
data = {
"status" : "New",
"created" : created,
"modified" : created,
"schema_version" : TARGET_SCHEMA_VERSION,
"actions" : [ ],
"tickets" : [ ],
"bucket_list" : [ ],
"campaign" : [ ],
"locations" : [ ],
"objects" : [ ],
"relationships" : [ ],
"releasability" : [ ],
"screenshots" : [ ],
"sectors" : [ ],
"email_address" : mail.lower(),
"email_count" : 0,
'firstname' : firstname,
'lastname' : lastname,
'division' : company,
'department' : department,
'organization_id' : orgid,
'title' : title,
'note' : ''
}
insert_result = targets.insert_one( data )
if insert_result:
log.info("Record inserted: {}".format(
insert_result.inserted_id ))
else:
log.error("Insert failed for {}".format(mail.lower()))
log.info('Beginning LDAP update.')
# Before we do anything, we need to connect to the crits server and make sure
# the schema version is the same for our target collection
client = MongoClient(crits_server, 27017)
db = client.crits
targets = db.targets
tmp_target = targets.find_one()
if 'schema_version' not in tmp_target:
log.error("schema_version not found in target result.")
sys.exit(1)
if tmp_target['schema_version'] != TARGET_SCHEMA_VERSION:
log.error("schema_version has changed (found {}, expected {}). Check "
"CRITS target table.".format(tmp_target['schema_version'],
TARGET_SCHEMA_VERSION))
sys.exit(1)
log.info('Running LDAP query.')
results = ldap_paged_query("mail=*")
if results is not None:
add_results_to_crits(results)
else:
log.info("No results returned from LDAP")
log.info('LDAP update complete.')
|
9,017 | 4bdff51a4e277889f4d54d4ace7a0f5384e74f1e | import argparse, os, joblib, json, torch
import pandas as pd
from utils import regression, dataset, lstm
PREDICT_X_SKIP_COLS = ["date", "weight", "ts_id", "resp", "resp_1", "resp_2", "resp_3", "resp_4"]
X_COLS = ["resp_1", "resp_2", "resp_3", "resp_4"]
Y_OUTPUT_COLS = ["date", "ts_id"]
Y_COL = ["resp"]
METRICS_INFO = ["mse", "r2", "mape"]
DROPOUT = 0.25
HIDDEN_SIZE = 20
def get_prediction_data(data, model_path):
x = data.drop(PREDICT_X_SKIP_COLS, axis=1)
y = data[X_COLS]
model = joblib.load(model_path)
(y_pred, metrics) = regression.evaluate(model, x, y, METRICS_INFO)
y_pred = pd.DataFrame(data=y_pred, columns=X_COLS)
return (y_pred, metrics)
def prepare_data(data_folder, model_path):
(train, test, na_value) = dataset.read_data(data_folder)
x_train = train[X_COLS]
y_train = train[Y_COL]
x_test = test[X_COLS]
y_test = test[Y_COL]
out_train = train[Y_OUTPUT_COLS]
out_test = test[Y_OUTPUT_COLS]
(x_pred_train , metrics_train) = get_prediction_data(train, model_path)
(x_pred_test, metrics_test) = get_prediction_data(test, model_path)
train = { "x": x_train, "y": y_train, "x_pred": x_pred_train, "out": out_train}
test = { "x": x_test, "y": y_test, "x_pred": x_pred_test, "out": out_test}
metrics = {
"reg_train_pred": metrics_train,
"reg_test_pred": metrics_test
}
return (train, test, metrics, na_value)
def postprocess_data(out_data, y_pred):
y_output = out_data.copy()
y_output[Y_COL] = y_pred
return y_output
def train_evaluate(data_folder, output_folder, model_path):
model = lstm.get_model(DROPOUT, len(X_COLS), HIDDEN_SIZE)
print("Preparing data...")
(train, test, metrics, na_value) = prepare_data(data_folder, model_path)
print("Training...")
model = lstm.train(model, train["x"], train["y"])
model = lstm.train(model, train["x_pred"], train["y"])
print("Evaluating...")
(y_pred, metrics_lstm) = lstm.evaluate(model, test["x"],
test["y"], METRICS_INFO)
(y_pred_reg, metrics_reg_lstm) = lstm.evaluate(model,
test["x_pred"], test["y"], METRICS_INFO)
metrics["lstm_pred"] = metrics_lstm
metrics["reg_lstm_pred"] = metrics_reg_lstm
print("Postprocessing data...")
y_output = postprocess_data(test["out"], y_pred)
y_output_reg = postprocess_data(test["out"], y_pred_reg)
output_path = os.path.join(output_folder, "pred.csv")
y_output.to_csv(output_path, index=False)
output_path = os.path.join(output_folder, "pred_reg.csv")
y_output_reg.to_csv(output_path, index=False)
result = { "metrics": metrics, "na_value": na_value }
result_path = os.path.join(output_folder, "result.json")
json_config = json.dumps(result, indent=4)
with open(result_path, "w") as result_file:
result_file.write(json_config)
model_path = os.path.join(output_folder, "lstm.mdl")
torch.save(model, model_path)
print("Output files (model, result, prediction) saved to {}".format(
output_folder))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--data_path", type=str, help="specifies the data folder path",
required=True)
parser.add_argument(
"--output_path", type=str, help="specifies the output folder path",
required=True)
parser.add_argument(
"--regression_model_path", type=str, required = True,
help="specifies the regression model path")
return vars(parser.parse_args())
def main():
args = parse_args()
print("Args: {}".format(args))
data_path = os.path.abspath(args["data_path"])
output_path = os.path.abspath(args["output_path"])
model_path = os.path.abspath(args["regression_model_path"])
train_evaluate(data_path, output_path, model_path)
main()
|
9,018 | da062dfe494b363c8ef3ec9f19af912736aaf77b | """DogGroom URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import url
from django.urls import path
from django.contrib.auth import views as auth_views
from .views import *
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^$', IndexView.as_view(), name = 'index'),
url(r'^register/', RegistrationView.as_view(), name = 'register'),
url(r'^home/', HomeView.as_view(), name = 'home'),
url(r'^home_adddog_form/', AddDogFormView.as_view(), name = 'home_adddog_form'),
url(r'^booking_delete_view/', DeleteBookingView.as_view(), name = 'booking_delete_view'),
url(r'^dog_delete_view/', DeleteDogView.as_view(), name='dog_delete_view'),
url(r'^fetch_date_view/', FetchDateView.as_view()),
url(r'^login/$', auth_views.LoginView.as_view(template_name='login.html'), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url(r'^profile/$', UserUpdateView.as_view(), name='profile'),
url(r'^booking_update_view/', BookingUpdateView.as_view(), name='bookingmodify'),
url(r'^dog_update_view/', DogUpdateView.as_view(), name='dogupdateview'),
]
|
9,019 | 5961c593b46a8d3a0f7c62d862cce9a2814e42f4 | from typing import List
import uvicorn
from fastapi import Depends, FastAPI, HTTPException
from sqlalchemy.orm import Session
from . import crud, models, schemas
from .database import SessionLocal, engine
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.post("/users/", response_model=schemas.UserCreate)
def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)):
db_user = crud.get_user_by_mail(db, user.mail)
if db_user:
raise HTTPException(status_code=400, detail="Email already registered")
return crud.create_user(db=db, user=user)
@app.get("/users/{user_id}", response_model=schemas.User)
def read_user(user_id: int, db: Session = Depends(get_db)):
db_user = crud.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail = "User not found")
return db_user
@app.delete("/users/{user_id}", response_model=schemas.User)
def delete_user(user_id: int, db: Session = Depends(get_db)):
db_user = crud.delete_user(user_id)
if db_user is None:
raise HTTPException(status_code=404, detail="User not found")
return db_user
# @app.post("/cars/", response_model=schemas.Car)
# def create_user(car: schemas.CarCreate, db: Session = Depends(get_db)):
#
# if db_car:
# raise HTTPException(status_code=400, detail="License already registered")
# return crud.create_car(db=db, car=car)
@app.get("/demand", response_model=schemas.Demand)
def place_demand(demand: schemas.DemandCreate, db: Session = Depends(get_db)):
db_demand = crud.get_active_demand_user(db, demand.user_id)
if db_demand:
raise HTTPException(status_code=400, detail="The user already has an open demand")
db_demand = crud.create_demand(db, demand)
#ToDo Trigger schedular
return db_demand
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000) |
9,020 | 644d0a0d88f1a051e004d271359dcc3df855bd77 | speak = 'speak'
def hacker():
try:
raise speak # go to hacker's except
print 'not reached'
except speak:
print 'Hello world!'
raise speak # go to primate's except
def primate():
try:
hacker()
print 'not reached'
except speak:
print 'Huh?'
raise speak # go to mammal's except
def mammal():
try:
primate()
print 'not reached'
except speak:
print 'Spam!'
|
9,021 | 265c594b12ea45a2dda12e1157e5ea040f4d6ce4 | from Logic.ProperLogic.helper_classes.reducer import MaxReducer
from Logic.ProperLogic.misc_helpers import log_error
import torch
from itertools import count
import logging
logging.basicConfig(level=logging.INFO)
class Cluster:
metric = 2
def __init__(self, cluster_id, embeddings=None, embeddings_ids=None, label=None, center_point=None):
"""
embeddings must be (flat) iterable of embeddings with len applicable
:param embeddings:
:param embeddings_ids:
"""
if label is None:
label = 'Unknown Person'
self.label = label
self.max_id_reducer = MaxReducer()
if embeddings is None:
self.embeddings_dict = dict()
self.num_embeddings = 0
self.center_point = None
self.max_embedding_id = 0
self.max_id_reducer(self.max_embedding_id)
else:
if embeddings_ids is None:
embeddings_ids = count(1)
# cast embeddings to dict
self.embeddings_dict = dict(zip(embeddings_ids, embeddings))
self.num_embeddings = len(self.embeddings_dict)
if center_point is not None:
self.center_point = center_point
else:
self.center_point = self.sum_embeddings(embeddings) / self.num_embeddings
self.max_id_reducer.process_iterable(self.embeddings_dict.keys())
self.max_embedding_id = self.max_id_reducer.get_state()
self.cluster_id = cluster_id
def __len__(self):
return len(self.embeddings_dict)
def set_label(self, label):
self.label = label
def set_cluster_id(self, cluster_id):
self.cluster_id = cluster_id
@classmethod
def set_metric(cls, metric):
cls.metric = metric
def get_embeddings(self, with_embeddings_ids=False, as_dict=False, as_list=False):
if with_embeddings_ids or as_dict:
if as_dict:
return self.embeddings_dict
return self.embeddings_dict.items()
embeddings = self.embeddings_dict.values()
if as_list:
return list(embeddings)
return embeddings
def get_embeddings_ids(self):
return self.embeddings_dict.keys()
def get_size(self):
return len(self.embeddings_dict)
def add_embedding(self, new_embedding, new_embedding_id=None, overwrite=False):
return self.add_embeddings([new_embedding], [new_embedding_id], overwrite)
def add_embeddings(self, new_embeddings, new_embeddings_ids=None, overwrite=False):
if not new_embeddings:
return
if new_embeddings_ids is None:
next_embedding_id = self.max_embedding_id + 1
new_embeddings_ids = count(start=next_embedding_id)
new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))
if overwrite:
self.embeddings_dict.update(new_embeddings_dict)
else:
new_embeddings_dict.update(self.embeddings_dict)
self.embeddings_dict = new_embeddings_dict
old_num_embeddings = self.num_embeddings
self.num_embeddings = len(self.embeddings_dict)
embeddings = self.get_embeddings(as_list=True)
embeddings_sum = self.sum_embeddings(embeddings)
# TODO: Check the math!!!
if self.center_point is not None:
self.center_point = (old_num_embeddings * self.center_point + embeddings_sum) / self.num_embeddings
else:
self.center_point = embeddings_sum / self.num_embeddings
def remove_embedding_by_id(self, embedding_id):
try:
embedding = self.embeddings_dict.pop(embedding_id)
except KeyError:
log_error(f'embedding with id {embedding_id} not found.')
return
old_num_embeddings = self.num_embeddings
self.num_embeddings -= 1
# TODO: Check the math!!!
# (old_center is a uniformly weighted sum of the old embeddings)
try:
self.center_point = (old_num_embeddings * self.center_point - embedding) / self.num_embeddings
except ZeroDivisionError: # num_embeddings is 0
self.center_point = None
def get_center_point(self):
return self.center_point
def get_embedding(self, embedding_id):
return self.embeddings_dict[embedding_id]
def contains_embedding(self, embedding_id):
return self.embeddings_dict.get(embedding_id) is not None
def compute_dist_to_center(self, embedding):
return self.compute_dist(self.center_point, embedding)
@classmethod
def compute_dist(cls, embedding1, embedding2, metric=None):
if metric is None:
metric = cls.metric
return float(torch.dist(embedding1, embedding2, p=metric))
@staticmethod
def sum_embeddings(embeddings):
# return reduce(torch.add, embeddings)
return torch.sum(torch.stack(embeddings), dim=0)
|
9,022 | 14ce803e3deb529b489c150c7ecc702118448acb | from typing import Union, Tuple
import numpy as np
from UQpy.utilities.kernels.baseclass.GrassmannianKernel import GrassmannianKernel
class ProjectionKernel(GrassmannianKernel):
def __init__(self, kernel_parameter: Union[int, float] = None):
"""
:param kernel_parameter: Number of independent p-planes of each Grassmann point.
"""
super().__init__(kernel_parameter)
def element_wise_operation(self, xi_j: Tuple) -> float:
"""
Compute the Projection kernel entry for a tuple of points on the Grassmann manifold.
:param xi_j: Tuple of orthonormal matrices representing the grassmann points.
"""
xi, xj = xi_j
r = np.dot(xi.T, xj)
n = np.linalg.norm(r, "fro")
return n * n
|
9,023 | 485729398b51bebd16f38800c6100289b7b0b347 |
import sys
if sys.hexversion < 0x03000000:
from .foo import foo
|
9,024 | d7e24730ce9f2835d55d3995abec2a7d00eb05ef | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the
# Pystacho Project (https://github.com/aruderman/pystacho/).
# Copyright (c) 2021, Francisco Fernandez, Benjamin Marcologno, Andrés Ruderman
# License: MIT
# Full Text: https://github.com/aruderman/pystacho/blob/master/LICENSE
# =====================================================================
# DOCS
# =====================================================================
"""This file is for distribute and install Pystacho"""
# ======================================================================
# IMPORTS
# ======================================================================
import os
import pathlib
from setuptools import setup
# =============================================================================
# CONSTANTS
# =============================================================================
PATH = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
REQUIREMENTS = [
"diskcache",
"numpy",
"pandas",
"matplotlib",
"pymatgen",
"seaborn",
"lightgbm",
"matminer",
"scikit-learn",
]
with open(PATH / "pystacho" / "__init__.py") as fp:
for line in fp.readlines():
if line.startswith("__version__ = "):
VERSION = line.split("=", 1)[-1].replace('"', "").strip()
break
with open("README.md") as fp:
LONG_DESCRIPTION = fp.read()
# =============================================================================
# FUNCTIONS
# =============================================================================
setup(
name="Pystacho",
version=VERSION,
description="ESCRIBIR DESCRIPCION DEL PROYECTO",
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
author=["Francisco Fernandez", "Benjamin Marcologno", "Andrés Ruderman"],
author_email="andres.ruderman@gmail.com",
url="https://github.com/aruderman/pystacho",
packages=["pystacho"],
license="The MIT License",
install_requires=REQUIREMENTS,
keywords=["pystacho"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Scientific/Engineering",
],
# include_package_data=True,
)
|
9,025 | 4830da6bee6b19a5e5a82a73d2f3b220ca59d28b | from .linked_list import LinkedList
class Queue:
def __init__(self):
self.list = LinkedList()
def enqueue(self, value):
self.list.insert_last(value)
def dequeue(self):
element = self.list.get_head()
self.list.remove_first()
return element
def front(self):
return self.list.get_tail()
def rear(self):
return self.list.get_head() |
9,026 | 4bbd97942023370e053ccf4b5c1496c7247c7bf2 | #!/usr/bin/env python
# encoding: utf-8
from rest_client import PY2
from tornado.testing import gen_test
from tornado.web import Application, RequestHandler
from .server import AsyncRESTTestCase
class Handler(RequestHandler):
if PY2:
S = '\xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82 \xd0\xbc\xd0\xb8\xd1\x80'.decode('utf-8')
else:
S = 'Привет мир'
def get(self):
self.set_header('Content-Type', 'text/plain; charset=utf-8')
self.write(self.S.encode('utf-8'))
class TestCopy(AsyncRESTTestCase):
def get_app(self):
return Application(handlers=[
('/', Handler),
])
@gen_test
def test_get(self):
response = yield self.http_client.get(self.api_url.format("/"))
assert response.body == Handler.S |
9,027 | 63391b31d1746f9b3583df5353ae160a430943a9 | a, b = input().split()
def test_input_text(expected_result, actual_result):
assert expected_result == actual_result, \
f'expected {expected_result}, got {actual_result}'
test_input_text(a,b)
|
9,028 | c8f899958ce19e7e2bf1307a685e65873695f140 | from utils import *
import math
class State:
"This class represents the search state that will be used for ARA* search"
def __init__(self, x, y, theta, parent=None, parent_action=None, g=float('inf'), h=float('inf')):
self.x = x
self.y = y
self.theta = theta % (2*math.pi)
self.g = g
self.h = h
self.parent = parent
self.parent_action = parent_action
def __eq__(self, other):
if not isinstance(other, State):
return False
return (self.x == other.x) and (self.y == other.y) and (almostEqual(self.theta, other.theta))
def __hash__(self):
deg = round(math.degrees(self.theta))
return hash((self.x, self.y, deg))
def __lt__(self, other):
return self.g < other.g
def setG(self, g):
self.g = g
def setH(self, h):
self.h = h
def setParent(self, parent):
self.parent = parent
def setParentAction(self, parent_action):
self.parent_action = parent_action
|
9,029 | 5ed91b98ece3ac9525e9d2c42db9c9d9912d5ed2 | import random
'''
通用文件头,浏览器访问时随机选择
'''
user_agent = [
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
"Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10",
"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13",
"Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+",
"Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0",
"Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)",
"UCWEB7.0.2.37/28/999",
"NOKIA5700/ UCWEB7.0.2.37/28/999",
"Openwave/ UCWEB7.0.2.37/28/999",
"Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999",
# iPhone 6:
"Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25"
]
def get_user_agent():
return {'User-Agent': random.choice(user_agent)}
|
9,030 | 8262d8b5bbb156eccae021c1c9333d3cd1a6260f | import requests, csv, configuration
headers = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'}
url = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs'
date = configuration.DATE
i = 1
params = {
'limit': '1',
'order': 'date_stop asc',
'filter': f'date_stop ge {date}'
}
r = requests.get(url, headers=headers, params=params)
dr_items = r.json()['items']
if len(dr_items):
with open('calls.csv', 'w', encoding='UTF8') as csv_file:
csv_writer = csv.writer(csv_file)
csv_header = ['dr_sid','date_start','number_src','number_dst','direction','duration','price']
csv_writer.writerow(csv_header)
dr_sid = dr_items[0]['dr_sid']
csv_row = [dr_items[0]['dr_sid'],dr_items[0]['date_start'],dr_items[0]['number_src'],dr_items[0]['number_dst'],dr_items[0]['direction'],dr_items[0]['duration'],dr_items[0]['price']]
csv_writer.writerow(csv_row)
print(f"{i}. {dr_items[0]['dr_sid']}")
while True:
params = {
'limit': '100',
'order': 'date_stop asc',
'after': dr_sid
}
r = requests.get(url, headers=headers, params=params)
if len(r.json()['items']):
dr_items = r.json()['items']
for item in dr_items:
i += 1
dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']
csv_row = [item['dr_sid'],item['date_start'],item['number_src'],item['number_dst'],item['direction'],item['duration'],item['price']]
csv_writer.writerow(csv_row)
print(f"{i}. {item['dr_sid']}")
else:
print('No more new calls')
break
else:
print(f'No calls since {date}') |
9,031 | a14c23398bbf42832a285d29c1b80aefc5fdaf6c | import numpy as np
import cv2
import datetime
import random
# from random import randint
import time
import logging
def GetDateTimeString():
dt = str(datetime.datetime.now()).split(".")[0]
clean = dt.replace(" ","_").replace(":","_")
return clean
def GetBackground(bgNumber):
# bgImage = './backgrounds/' + str(new_img_nums[bgNumber]) + '.jpg'
bgImage = '/home/pi/pibooth/backgrounds/space.jpg'
return cv2.imread(bgImage)
def GetImage(bg):
ret, frame = cam.read()
sensitivity = 1 # play with sensitivity to get rid of noise...
lowerRange = np.array([0, 0, 255 - sensitivity]) # this is currently set to white
upperRange = np.array([255, sensitivity, 255]) # this is currently set to white
#Mask the green screen
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
image_mask = cv2.inRange(hsv, lowerRange, upperRange)
bg_mask = cv2.bitwise_and(bg, bg, mask = image_mask)
fg_mask = cv2.bitwise_and(frame, frame, mask = cv2.bitwise_not(image_mask))
img = cv2.add(bg_mask, fg_mask)
return img
# Set up window for full screen
cv2.namedWindow("Photobooth", cv2.WND_PROP_FULLSCREEN)
# cv2.setWindowProperty("Photobooth", cv2.WND_PROP_FULLSCREEN, 1)
# options for countdown timer
fontFace = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 1
thickness = 4
countdownSeconds = 5
displayPhotoSeconds = 5
# Set up WebCam
width = 640
height = 480
cam = cv2.VideoCapture(0)
cam.set(3,width)
cam.set(4,height)
bgNumber = 0
new_img_nums = random.sample(range(1,9), 4)
bg = GetBackground(bgNumber)
clicked = False
clickedTime = {}
while(True):
img = GetImage(bg) #get masked image from webcam
key = cv2.waitKey(1) #check for keypress
if clicked == True : # if countdown timer started
elapsed = datetime.datetime.now() - clickedTime
secs = int(elapsed.total_seconds())
if secs > countdownSeconds : # if five seconds are up, save the current image
clicked = False
cv2.imwrite('/home/pi/pibooth/newImages/img_' + GetDateTimeString() + '.jpg',img)
# cv2.imwrite('./newImages/img_' + GetDateTimeString() + '.jpg',img)
cv2.imshow('Photobooth',img)
time.sleep(displayPhotoSeconds) # show the photo for 5 seconds
bgNumber += 1
bg = GetBackground(bgNumber) # get a new background
else : # show the countdown timer
if secs - 5 == 1:
text = 'Say cheese!'
else:
text = str(5 - secs) + "..."
textSize, base = cv2.getTextSize(text, fontFace, fontScale, thickness)
textWidth = int((width - textSize[0]) / 2)
textHeight = int((height + textSize[1]) / 2)
cv2.putText(img, text, (textWidth, textHeight), fontFace, fontScale, (255, 255, 255), thickness)
elif key == 32 : # on spacebar pressed, start the countdown timer
clickedTime = datetime.datetime.now()
clicked = True
elif key == 27 : # on escape, close the program
break
elif bgNumber == 4:
# assemble photos into strip
# print strip
# reset app
break
cv2.imshow('Photobooth',img) #display masked image
cv2.destroyAllWindows()
cam.release()
|
9,032 | 5ec2ac3e0d66026da1b0c957d10c95e95c201f8f | '''
Useful constants.
Inspired by pyatspi:
http://live.gnome.org/GAP/PythonATSPI
@author: Eitan Isaacson
@copyright: Copyright (c) 2008, Eitan Isaacson
@license: LGPL
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
'''
# Child ID.
CHILDID_SELF = 0
# IAccessibleText Constants
IA2_TEXT_OFFSET_LENGTH = -1
IA2_TEXT_OFFSET_CARET = -2
# Accessible Roles
# TODO: Is there a way to retrieve this at runtime or build time?
#
ROLE_SYSTEM_ALERT = 8
ROLE_SYSTEM_ANIMATION = 54
ROLE_SYSTEM_APPLICATION = 14
ROLE_SYSTEM_BORDER = 19
ROLE_SYSTEM_BUTTONDROPDOWN = 56
ROLE_SYSTEM_BUTTONDROPDOWNGRID = 58
ROLE_SYSTEM_BUTTONMENU = 57
ROLE_SYSTEM_CARET = 7
ROLE_SYSTEM_CELL = 29
ROLE_SYSTEM_CHARACTER = 32
ROLE_SYSTEM_CHART = 17
ROLE_SYSTEM_CHECKBUTTON = 44
ROLE_SYSTEM_CLIENT = 10
ROLE_SYSTEM_CLOCK = 61
ROLE_SYSTEM_COLUMN = 27
ROLE_SYSTEM_COLUMNHEADER = 25
ROLE_SYSTEM_COMBOBOX = 46
ROLE_SYSTEM_CURSOR = 6
ROLE_SYSTEM_DIAGRAM = 53
ROLE_SYSTEM_DIAL = 49
ROLE_SYSTEM_DIALOG = 18
ROLE_SYSTEM_DOCUMENT = 15
ROLE_SYSTEM_DROPLIST = 47
ROLE_SYSTEM_EQUATION = 55
ROLE_SYSTEM_GRAPHIC = 40
ROLE_SYSTEM_GRIP = 4
ROLE_SYSTEM_GROUPING = 20
ROLE_SYSTEM_HELPBALLOON = 31
ROLE_SYSTEM_HOTKEYFIELD = 50
ROLE_SYSTEM_INDICATOR = 39
ROLE_SYSTEM_LINK = 30
ROLE_SYSTEM_LIST = 33
ROLE_SYSTEM_LISTITEM = 34
ROLE_SYSTEM_MENUBAR = 2
ROLE_SYSTEM_MENUITEM = 12
ROLE_SYSTEM_MENUPOPUP = 11
ROLE_SYSTEM_OUTLINE = 35
ROLE_SYSTEM_OUTLINEITEM = 36
ROLE_SYSTEM_PAGETAB = 37
ROLE_SYSTEM_PAGETABLIST = 60
ROLE_SYSTEM_PANE = 16
ROLE_SYSTEM_PROGRESSBAR = 48
ROLE_SYSTEM_PROPERTYPAGE = 38
ROLE_SYSTEM_PUSHBUTTON = 43
ROLE_SYSTEM_RADIOBUTTON = 45
ROLE_SYSTEM_ROW = 28
ROLE_SYSTEM_ROWHEADER = 26
ROLE_SYSTEM_SCROLLBAR = 3
ROLE_SYSTEM_SEPARATOR = 21
ROLE_SYSTEM_SLIDER = 51
ROLE_SYSTEM_SOUND = 5
ROLE_SYSTEM_SPINBUTTON = 52
ROLE_SYSTEM_STATICTEXT = 41
ROLE_SYSTEM_STATUSBAR = 23
ROLE_SYSTEM_TABLE = 24
ROLE_SYSTEM_TEXT = 42
ROLE_SYSTEM_TITLEBAR = 1
ROLE_SYSTEM_TOOLBAR = 22
ROLE_SYSTEM_TOOLTIP = 13
ROLE_SYSTEM_WHITESPACE = 59
ROLE_SYSTEM_WINDOW = 9
IA2_ROLE_UNKNOWN = 0
IA2_ROLE_CANVAS = 0x401
IA2_ROLE_CAPTION = 0x402
IA2_ROLE_CHECK_MENU_ITEM = 0x403
IA2_ROLE_COLOR_CHOOSER = 0x404
IA2_ROLE_DATE_EDITOR = 0x405
IA2_ROLE_DESKTOP_ICON = 0x406
IA2_ROLE_DESKTOP_PANE = 0x407
IA2_ROLE_DIRECTORY_PANE = 0x408
IA2_ROLE_EDITBAR = 0x409
IA2_ROLE_EMBEDDED_OBJECT = 0x40a
IA2_ROLE_ENDNOTE = 0x40b
IA2_ROLE_FILE_CHOOSER = 0x40c
IA2_ROLE_FONT_CHOOSER = 0x40d
IA2_ROLE_FOOTER = 0x40e
IA2_ROLE_FOOTNOTE = 0x40f
IA2_ROLE_FORM = 0x410
IA2_ROLE_FRAME = 0x411
IA2_ROLE_GLASS_PANE = 0x412
IA2_ROLE_HEADER = 0x413
IA2_ROLE_HEADING = 0x414
IA2_ROLE_ICON = 0x415
IA2_ROLE_IMAGE_MAP = 0x416
IA2_ROLE_INPUT_METHOD_WINDOW = 0x417
IA2_ROLE_INTERNAL_FRAME = 0x418
IA2_ROLE_LABEL = 0x419
IA2_ROLE_LAYERED_PANE = 0x41a
IA2_ROLE_NOTE = 0x41b
IA2_ROLE_OPTION_PANE = 0x41c
IA2_ROLE_PAGE = 0x41d
IA2_ROLE_PARAGRAPH = 0x41e
IA2_ROLE_RADIO_MENU_ITEM = 0x41f
IA2_ROLE_REDUNDANT_OBJECT = 0x420
IA2_ROLE_ROOT_PANE = 0x421
IA2_ROLE_RULER = 0x422
IA2_ROLE_SCROLL_PANE = 0x423
IA2_ROLE_SECTION = 0x424
IA2_ROLE_SHAPE = 0x425
IA2_ROLE_SPLIT_PANE = 0x426
IA2_ROLE_TEAR_OFF_MENU = 0x427
IA2_ROLE_TERMINAL = 0x428
IA2_ROLE_TEXT_FRAME = 0x429
IA2_ROLE_TOGGLE_BUTTON = 0x42a
IA2_ROLE_VIEW_PORT = 0x42b
IA2_ROLE_COMPLEMENTARY_CONTENT = 0x42c
IA2_ROLE_LANDMARK = 0x42d
# Unlocalized role strings
UNLOCALIZED_ROLE_NAMES = {
1: u'ROLE_SYSTEM_TITLEBAR',
2: u'ROLE_SYSTEM_MENUBAR',
3: u'ROLE_SYSTEM_SCROLLBAR',
4: u'ROLE_SYSTEM_GRIP',
5: u'ROLE_SYSTEM_SOUND',
6: u'ROLE_SYSTEM_CURSOR',
7: u'ROLE_SYSTEM_CARET',
8: u'ROLE_SYSTEM_ALERT',
9: u'ROLE_SYSTEM_WINDOW',
10: u'ROLE_SYSTEM_CLIENT',
11: u'ROLE_SYSTEM_MENUPOPUP',
12: u'ROLE_SYSTEM_MENUITEM',
13: u'ROLE_SYSTEM_TOOLTIP',
14: u'ROLE_SYSTEM_APPLICATION',
15: u'ROLE_SYSTEM_DOCUMENT',
16: u'ROLE_SYSTEM_PANE',
17: u'ROLE_SYSTEM_CHART',
18: u'ROLE_SYSTEM_DIALOG',
19: u'ROLE_SYSTEM_BORDER',
20: u'ROLE_SYSTEM_GROUPING',
21: u'ROLE_SYSTEM_SEPARATOR',
22: u'ROLE_SYSTEM_TOOLBAR',
23: u'ROLE_SYSTEM_STATUSBAR',
24: u'ROLE_SYSTEM_TABLE',
25: u'ROLE_SYSTEM_COLUMNHEADER',
26: u'ROLE_SYSTEM_ROWHEADER',
27: u'ROLE_SYSTEM_COLUMN',
28: u'ROLE_SYSTEM_ROW',
29: u'ROLE_SYSTEM_CELL',
30: u'ROLE_SYSTEM_LINK',
31: u'ROLE_SYSTEM_HELPBALLOON',
32: u'ROLE_SYSTEM_CHARACTER',
33: u'ROLE_SYSTEM_LIST',
34: u'ROLE_SYSTEM_LISTITEM',
35: u'ROLE_SYSTEM_OUTLINE',
36: u'ROLE_SYSTEM_OUTLINEITEM',
37: u'ROLE_SYSTEM_PAGETAB',
38: u'ROLE_SYSTEM_PROPERTYPAGE',
39: u'ROLE_SYSTEM_INDICATOR',
40: u'ROLE_SYSTEM_GRAPHIC',
41: u'ROLE_SYSTEM_STATICTEXT',
42: u'ROLE_SYSTEM_TEXT',
43: u'ROLE_SYSTEM_PUSHBUTTON',
44: u'ROLE_SYSTEM_CHECKBUTTON',
45: u'ROLE_SYSTEM_RADIOBUTTON',
46: u'ROLE_SYSTEM_COMBOBOX',
47: u'ROLE_SYSTEM_DROPLIST',
48: u'ROLE_SYSTEM_PROGRESSBAR',
49: u'ROLE_SYSTEM_DIAL',
50: u'ROLE_SYSTEM_HOTKEYFIELD',
51: u'ROLE_SYSTEM_SLIDER',
52: u'ROLE_SYSTEM_SPINBUTTON',
53: u'ROLE_SYSTEM_DIAGRAM',
54: u'ROLE_SYSTEM_ANIMATION',
55: u'ROLE_SYSTEM_EQUATION',
56: u'ROLE_SYSTEM_BUTTONDROPDOWN',
57: u'ROLE_SYSTEM_BUTTONMENU',
58: u'ROLE_SYSTEM_BUTTONDROPDOWNGRID',
59: u'ROLE_SYSTEM_WHITESPACE',
60: u'ROLE_SYSTEM_PAGETABLIST',
61: u'ROLE_SYSTEM_CLOCK'}
# Unlocalized role strings
UNLOCALIZED_IA2_ROLE_NAMES = {
0x000: u'IA2_ROLE_UNKNOWN',
0x401: u'IA2_ROLE_CANVAS',
0x402: u'IA2_ROLE_CAPTION',
0x403: u'IA2_ROLE_CHECK_MENU_ITEM',
0x404: u'IA2_ROLE_COLOR_CHOOSER',
0x405: u'IA2_ROLE_DATE_EDITOR',
0x406: u'IA2_ROLE_DESKTOP_ICON',
0x407: u'IA2_ROLE_DESKTOP_PANE',
0x408: u'IA2_ROLE_DIRECTORY_PANE',
0x409: u'IA2_ROLE_EDITBAR',
0x40a: u'IA2_ROLE_EMBEDDED_OBJECT',
0x40b: u'IA2_ROLE_ENDNOTE',
0x40c: u'IA2_ROLE_FILE_CHOOSER',
0x40d: u'IA2_ROLE_FONT_CHOOSER',
0x40e: u'IA2_ROLE_FOOTER',
0x40f: u'IA2_ROLE_FOOTNOTE',
0x410: u'IA2_ROLE_FORM',
0x411: u'IA2_ROLE_FRAME',
0x412: u'IA2_ROLE_GLASS_PANE',
0x413: u'IA2_ROLE_HEADER',
0x414: u'IA2_ROLE_HEADING',
0x415: u'IA2_ROLE_ICON',
0x416: u'IA2_ROLE_IMAGE_MAP',
0x417: u'IA2_ROLE_INPUT_METHOD_WINDOW',
0x418: u'IA2_ROLE_INTERNAL_FRAME',
0x419: u'IA2_ROLE_LABEL',
0x41a: u'IA2_ROLE_LAYERED_PANE',
0x41b: u'IA2_ROLE_NOTE',
0x41c: u'IA2_ROLE_OPTION_PANE',
0x41d: u'IA2_ROLE_PAGE',
0x41e: u'IA2_ROLE_PARAGRAPH',
0x41f: u'IA2_ROLE_RADIO_MENU_ITEM',
0x420: u'IA2_ROLE_REDUNDANT_OBJECT',
0x421: u'IA2_ROLE_ROOT_PANE',
0x422: u'IA2_ROLE_RULER',
0x423: u'IA2_ROLE_SCROLL_PANE',
0x424: u'IA2_ROLE_SECTION',
0x425: u'IA2_ROLE_SHAPE',
0x426: u'IA2_ROLE_SPLIT_PANE',
0x427: u'IA2_ROLE_TEAR_OFF_MENU',
0x428: u'IA2_ROLE_TERMINAL',
0x429: u'IA2_ROLE_TEXT_FRAME',
0x42a: u'IA2_ROLE_TOGGLE_BUTTON',
0x42b: u'IA2_ROLE_VIEW_PORT',
0x42c: u'IA2_ROLE_COMPLEMENTARY_CONTENT',
0x42d: u'IA2_ROLE_LANDMARK'}
# Navigation constants
NAVDIR_DOWN = 2
NAVDIR_FIRSTCHILD = 7
NAVDIR_LASTCHILD = 8
NAVDIR_LEFT = 3
NAVDIR_NEXT = 5
NAVDIR_PREVIOUS = 6
NAVDIR_RIGHT = 4
NAVDIR_UP = 1
STATE_SYSTEM_UNAVAILABLE = 0x1
STATE_SYSTEM_SELECTED = 0x2
STATE_SYSTEM_FOCUSED = 0x4
STATE_SYSTEM_PRESSED = 0x8
STATE_SYSTEM_CHECKED = 0x10
STATE_SYSTEM_MIXED = 0x20
STATE_SYSTEM_READONLY = 0x40
STATE_SYSTEM_HOTTRACKED = 0x80
STATE_SYSTEM_DEFAULT = 0x100
STATE_SYSTEM_EXPANDED = 0x200
STATE_SYSTEM_COLLAPSED = 0x400
STATE_SYSTEM_BUSY = 0x800
STATE_SYSTEM_FLOATING = 0x1000
STATE_SYSTEM_MARQUEED = 0x2000
STATE_SYSTEM_ANIMATED = 0x4000
STATE_SYSTEM_INVISIBLE = 0x8000
STATE_SYSTEM_OFFSCREEN = 0x10000
STATE_SYSTEM_SIZEABLE = 0x20000
STATE_SYSTEM_MOVEABLE = 0x40000
STATE_SYSTEM_SELFVOICING = 0x80000
STATE_SYSTEM_FOCUSABLE = 0x100000
STATE_SYSTEM_SELECTABLE = 0x200000
STATE_SYSTEM_LINKED = 0x400000
STATE_SYSTEM_TRAVERSED = 0x800000
STATE_SYSTEM_MULTISELECTABLE = 0x1000000
STATE_SYSTEM_EXTSELECTABLE = 0x2000000
STATE_SYSTEM_HASSUBMENU = 0x4000000
STATE_SYSTEM_ALERT_LOW = 0x4000000
STATE_SYSTEM_ALERT_MEDIUM = 0x8000000
STATE_SYSTEM_ALERT_HIGH = 0x10000000
STATE_SYSTEM_PROTECTED = 0x20000000
STATE_SYSTEM_HASPOPUP = 0x40000000
STATE_SYSTEM_VALID = 0x1fffffff
# Unlocalized state strings
UNLOCALIZED_STATE_NAMES = {
1: u'STATE_SYSTEM_UNAVAILABLE',
2: u'STATE_SYSTEM_SELECTED',
4: u'STATE_SYSTEM_FOCUSED',
8: u'STATE_SYSTEM_PRESSED',
16: u'STATE_SYSTEM_CHECKED',
32: u'STATE_SYSTEM_MIXED',
64: u'STATE_SYSTEM_READONLY',
128: u'STATE_SYSTEM_HOTTRACKED',
256: u'STATE_SYSTEM_DEFAULT',
512: u'STATE_SYSTEM_EXPANDED',
1024: u'STATE_SYSTEM_COLLAPSED',
2048: u'STATE_SYSTEM_BUSY',
4096: u'STATE_SYSTEM_FLOATING',
8192: u'STATE_SYSTEM_MARQUEED',
16384: u'STATE_SYSTEM_ANIMATED',
32768: u'STATE_SYSTEM_INVISIBLE',
65536: u'STATE_SYSTEM_OFFSCREEN',
131072: u'STATE_SYSTEM_SIZEABLE',
262144: u'STATE_SYSTEM_MOVEABLE',
524288: u'STATE_SYSTEM_SELFVOICING',
1048576: u'STATE_SYSTEM_FOCUSABLE',
2097152: u'STATE_SYSTEM_SELECTABLE',
4194304: u'STATE_SYSTEM_LINKED',
8388608: u'STATE_SYSTEM_TRAVERSED',
16777216: u'STATE_SYSTEM_MULTISELECTABLE',
33554432: u'STATE_SYSTEM_EXTSELECTABLE',
67108864: u'STATE_SYSTEM_ALERT_LOW',
134217728: u'STATE_SYSTEM_ALERT_MEDIUM',
268435456: u'STATE_SYSTEM_ALERT_HIGH',
536870912: u'STATE_SYSTEM_PROTECTED',
1073741824: u'STATE_SYSTEM_HASPOPUP',
0x1fffffff: u'STATE_SYSTEM_VALID'}
IA2_STATE_ACTIVE = 0x1
IA2_STATE_ARMED = 0x2
IA2_STATE_DEFUNCT = 0x4
IA2_STATE_EDITABLE = 0x8
IA2_STATE_HORIZONTAL = 0x10
IA2_STATE_ICONIFIED = 0x20
IA2_STATE_INVALID_ENTRY = 0x40
IA2_STATE_MANAGES_DESCENDANTS = 0x80
IA2_STATE_MODAL = 0x100
IA2_STATE_MULTI_LINE = 0x200
IA2_STATE_OPAQUE = 0x400
IA2_STATE_REQUIRED = 0x800
IA2_STATE_SELECTABLE_TEXT = 0x1000
IA2_STATE_SINGLE_LINE = 0x2000
IA2_STATE_STALE = 0x4000
IA2_STATE_SUPPORTS_AUTOCOMPLETION = 0x8000
IA2_STATE_TRANSIENT = 0x10000
IA2_STATE_VERTICAL = 0x20000
IA2_STATE_CHECKABLE = 0x40000
IA2_STATE_PINNED = 0x80000
UNLOCALIZED_IA2_STATE_NAMES = {
1: u'IA2_STATE_ACTIVE',
2: u'IA2_STATE_ARMED',
4: u'IA2_STATE_DEFUNCT',
8: u'IA2_STATE_EDITABLE',
16: u'IA2_STATE_HORIZONTAL',
32: u'IA2_STATE_ICONIFIED',
64: u'IA2_STATE_INVALID_ENTRY',
128: u'IA2_STATE_MANAGES_DESCENDANTS',
256: u'IA2_STATE_MODAL',
512: u'IA2_STATE_MULTI_LINE',
1024: u'IA2_STATE_OPAQUE',
2048: u'IA2_STATE_REQUIRED',
4096: u'IA2_STATE_SELECTABLE_TEXT',
8192: u'IA2_STATE_SINGLE_LINE',
16384: u'IA2_STATE_STALE',
32768: u'IA2_STATE_SUPPORTS_AUTOCOMPLETION',
65536: u'IA2_STATE_TRANSIENT',
131072: u'IA2_STATE_VERTICAL',
262144: u'IA2_STATE_CHECKABLE',
524288: u'IA2_STATE_PINNED'}
UNLOCALIZED_IA2_RELATION_TYPES = {
u'containingApplication' : u'IA2_RELATION_CONTAINING_APPLICATION',
u'containingDocument' : u'IA2_RELATION_CONTAINING_DOCUMENT',
u'containingTabPane' : u'IA2_RELATION_CONTAINING_TAB_PANE',
u'containingWindow' : u'IA2_RELATION_CONTAINING_WINDOW',
u'controlledBy' : u'IA2_RELATION_CONTROLLED_BY',
u'controllerFor' : u'IA2_RELATION_CONTROLLER_FOR',
u'describedBy' : u'IA2_RELATION_DESCRIBED_BY',
u'descriptionFor' : u'IA2_RELATION_DESCRIPTION_FOR',
u'details' : u'IA2_RELATION_DETAILS',
u'detailsFor' : u'IA2_RELATION_DETAILS_FOR',
u'embeddedBy' : u'IA2_RELATION_EMBEDDED_BY',
u'embeds' : u'IA2_RELATION_EMBEDS',
u'errorMessage' : u'IA2_RELATION_ERROR_MESSAGE',
u'errorFor' : u'IA2_RELATION_ERROR_FOR',
u'flowsFrom' : u'IA2_RELATION_FLOWS_FROM',
u'flowsTo' : u'IA2_RELATION_FLOWS_TO',
u'labelFor' : u'IA2_RELATION_LABEL_FOR',
u'labelledBy' : u'IA2_RELATION_LABELED_BY',
u'labelledBy' : u'IA2_RELATION_LABELLED_BY',
u'memberOf' : u'IA2_RELATION_MEMBER_OF',
u'nextTabbable' : u'IA2_RELATION_NEXT_TABBABLE',
u'nodeChildOf' : u'IA2_RELATION_NODE_CHILD_OF',
u'nodeParentOf' : u'IA2_RELATION_NODE_PARENT_OF',
u'parentWindowOf' : u'IA2_RELATION_PARENT_WINDOW_OF',
u'popupFor' : u'IA2_RELATION_POPUP_FOR',
u'previousTabbable' : u'IA2_RELATION_PREVIOUS_TABBABLE',
u'subwindowOf' : u'IA2_RELATION_SUBWINDOW_OF'}
# SetWinEventHook() flags
WINEVENT_OUTOFCONTEXT = 0x0
WINEVENT_SKIPOWNTHREAD =0x1
WINEVENT_SKIPOWNPROCESS = 0x2
WINEVENT_INCONTEXT = 0x4
#win events
EVENT_SYSTEM_SOUND = 0x1
EVENT_SYSTEM_ALERT = 0x2
EVENT_SYSTEM_FOREGROUND = 0x3
EVENT_SYSTEM_MENUSTART = 0x4
EVENT_SYSTEM_MENUEND = 0x5
EVENT_SYSTEM_MENUPOPUPSTART = 0x6
EVENT_SYSTEM_MENUPOPUPEND = 0x7
EVENT_SYSTEM_CAPTURESTART = 0x8
EVENT_SYSTEM_CAPTUREEND = 0x9
EVENT_SYSTEM_MOVESIZESTART = 0xa
EVENT_SYSTEM_MOVESIZEEND = 0xb
EVENT_SYSTEM_CONTEXTHELPSTART = 0xc
EVENT_SYSTEM_CONTEXTHELPEND = 0xd
EVENT_SYSTEM_DRAGDROPSTART = 0xe
EVENT_SYSTEM_DRAGDROPEND = 0xf
EVENT_SYSTEM_DIALOGSTART = 0x10
EVENT_SYSTEM_DIALOGEND = 0x11
EVENT_SYSTEM_SCROLLINGSTART = 0x12
EVENT_SYSTEM_SCROLLINGEND = 0x13
EVENT_SYSTEM_SWITCHSTART = 0x14
EVENT_SYSTEM_SWITCHEND = 0x15
EVENT_SYSTEM_MINIMIZESTART = 0x16
EVENT_SYSTEM_MINIMIZEEND = 0x17
EVENT_OBJECT_CREATE = 0x8000
EVENT_OBJECT_DESTROY = 0x8001
EVENT_OBJECT_SHOW = 0x8002
EVENT_OBJECT_HIDE = 0x8003
EVENT_OBJECT_REORDER = 0x8004
EVENT_OBJECT_FOCUS = 0x8005
EVENT_OBJECT_SELECTION = 0x8006
EVENT_OBJECT_SELECTIONADD = 0x8007
EVENT_OBJECT_SELECTIONREMOVE = 0x8008
EVENT_OBJECT_SELECTIONWITHIN = 0x8009
EVENT_OBJECT_STATECHANGE = 0x800a
EVENT_OBJECT_LOCATIONCHANGE = 0x800b
EVENT_OBJECT_NAMECHANGE = 0x800c
EVENT_OBJECT_DESCRIPTIONCHANGE = 0x800d
EVENT_OBJECT_VALUECHANGE = 0x800e
EVENT_OBJECT_PARENTCHANGE = 0x800f
EVENT_OBJECT_HELPCHANGE = 0x8010
EVENT_OBJECT_DEFACTIONCHANGE = 0x8011
EVENT_OBJECT_ACCELERATORCHANGE = 0x8012
EVENT_CONSOLE_CARET = 0x4001
EVENT_CONSOLE_UPDATE_REGION = 0x4002
EVENT_CONSOLE_UPDATE_SIMPLE = 0x4003
EVENT_CONSOLE_UPDATE_SCROLL = 0x4004
EVENT_CONSOLE_LAYOUT = 0x4005
EVENT_CONSOLE_START_APPLICATION = 0x4006
EVENT_CONSOLE_END_APPLICATION = 0x4007
# IAccessible2 events
IA2_EVENT_ACTION_CHANGED = 0x101
IA2_EVENT_ACTIVE_DECENDENT_CHANGED = 0x102
IA2_EVENT_ACTIVE_DESCENDANT_CHANGED = 0x102
IA2_EVENT_DOCUMENT_ATTRIBUTE_CHANGED = 0x103
IA2_EVENT_DOCUMENT_CONTENT_CHANGED = 0x104
IA2_EVENT_DOCUMENT_LOAD_COMPLETE = 0x105
IA2_EVENT_DOCUMENT_LOAD_STOPPED = 0x106
IA2_EVENT_DOCUMENT_RELOAD = 0x107
IA2_EVENT_HYPERLINK_END_INDEX_CHANGED = 0x108
IA2_EVENT_HYPERLINK_NUMBER_OF_ANCHORS_CHANGED = 0x109
IA2_EVENT_HYPERLINK_SELECTED_LINK_CHANGED = 0x10a
IA2_EVENT_HYPERTEXT_LINK_ACTIVATED = 0x10b
IA2_EVENT_HYPERTEXT_LINK_SELECTED = 0x10c
IA2_EVENT_HYPERLINK_START_INDEX_CHANGED = 0x10d
IA2_EVENT_HYPERTEXT_CHANGED = 0x10e
IA2_EVENT_HYPERTEXT_NLINKS_CHANGED = 0x11f
IA2_EVENT_OBJECT_ATTRIBUTE_CHANGED = 0x120
IA2_EVENT_PAGE_CHANGED = 0x111
IA2_EVENT_SECTION_CHANGED = 0x112
IA2_EVENT_TABLE_CAPTION_CHANGED = 0x113
IA2_EVENT_TABLE_COLUMN_DESCRIPTION_CHANGED = 0x114
IA2_EVENT_TABLE_COLUMN_HEADER_CHANGED = 0x115
IA2_EVENT_TABLE_MODEL_CHANGED = 0x116
IA2_EVENT_TABLE_ROW_DESCRIPTION_CHANGED = 0x117
IA2_EVENT_TABLE_ROW_HEADER_CHANGED = 0x118
IA2_EVENT_TABLE_SUMMARY_CHANGED = 0x119
IA2_EVENT_TEXT_ATTRIBUTE_CHANGED = 0x11a
IA2_EVENT_TEXT_CARET_MOVED = 0x11b
IA2_EVENT_TEXT_CHANGED = 0x11c
IA2_EVENT_TEXT_COLUMN_CHANGED = 0x11d
IA2_EVENT_TEXT_INSERTED = 0x11e
IA2_EVENT_TEXT_REMOVED = 0x11f
IA2_EVENT_TEXT_UPDATED = 0x120
IA2_EVENT_TEXT_SELECTION_CHANGED = 0x121
IA2_EVENT_VISIBLE_DATA_CHANGED = 0x122
UNLOCALIZED_EVENT_NAMES = {
0x1: u'EVENT_SYSTEM_SOUND',
0x2: u'EVENT_SYSTEM_ALERT',
0x3: u'EVENT_SYSTEM_FOREGROUND',
0x4: u'EVENT_SYSTEM_MENUSTART',
0x5: u'EVENT_SYSTEM_MENUEND',
0x6: u'EVENT_SYSTEM_MENUPOPUPSTART',
0x7: u'EVENT_SYSTEM_MENUPOPUPEND',
0x8: u'EVENT_SYSTEM_CAPTURESTART',
0x9: u'EVENT_SYSTEM_CAPTUREEND',
0xa: u'EVENT_SYSTEM_MOVESIZESTART',
0xb: u'EVENT_SYSTEM_MOVESIZEEND',
0xc: u'EVENT_SYSTEM_CONTEXTHELPSTART',
0xd: u'EVENT_SYSTEM_CONTEXTHELPEND',
0xe: u'EVENT_SYSTEM_DRAGDROPSTART',
0xf: u'EVENT_SYSTEM_DRAGDROPEND',
0x10: u'EVENT_SYSTEM_DIALOGSTART',
0x11: u'EVENT_SYSTEM_DIALOGEND',
0x12: u'EVENT_SYSTEM_SCROLLINGSTART',
0x13: u'EVENT_SYSTEM_SCROLLINGEND',
0x14: u'EVENT_SYSTEM_SWITCHSTART',
0x15: u'EVENT_SYSTEM_SWITCHEND',
0x16: u'EVENT_SYSTEM_MINIMIZESTART',
0x17: u'EVENT_SYSTEM_MINIMIZEEND',
0x101: u'IA2_EVENT_ACTION_CHANGED',
0x102: u'IA2_EVENT_ACTIVE_DESCENDANT_CHANGED',
0x103: u'IA2_EVENT_DOCUMENT_ATTRIBUTE_CHANGED',
0x104: u'IA2_EVENT_DOCUMENT_CONTENT_CHANGED',
0x105: u'IA2_EVENT_DOCUMENT_LOAD_COMPLETE',
0x106: u'IA2_EVENT_DOCUMENT_LOAD_STOPPED',
0x107: u'IA2_EVENT_DOCUMENT_RELOAD',
0x108: u'IA2_EVENT_HYPERLINK_END_INDEX_CHANGED',
0x109: u'IA2_EVENT_HYPERLINK_NUMBER_OF_ANCHORS_CHANGED',
0x10a: u'IA2_EVENT_HYPERLINK_SELECTED_LINK_CHANGED',
0x10b: u'IA2_EVENT_HYPERTEXT_LINK_ACTIVATED',
0x10c: u'IA2_EVENT_HYPERTEXT_LINK_SELECTED',
0x10d: u'IA2_EVENT_HYPERLINK_START_INDEX_CHANGED',
0x10e: u'IA2_EVENT_HYPERTEXT_CHANGED',
0x10f: u'IA2_EVENT_HYPERTEXT_NLINKS_CHANGED',
0x110: u'IA2_EVENT_OBJECT_ATTRIBUTE_CHANGED',
0x111: u'IA2_EVENT_PAGE_CHANGED',
0x112: u'IA2_EVENT_SECTION_CHANGED',
0x113: u'IA2_EVENT_TABLE_CAPTION_CHANGED',
0x114: u'IA2_EVENT_TABLE_COLUMN_DESCRIPTION_CHANGED',
0x115: u'IA2_EVENT_TABLE_COLUMN_HEADER_CHANGED',
0x116: u'IA2_EVENT_TABLE_MODEL_CHANGED',
0x117: u'IA2_EVENT_TABLE_ROW_DESCRIPTION_CHANGED',
0x118: u'IA2_EVENT_TABLE_ROW_HEADER_CHANGED',
0x119: u'IA2_EVENT_TABLE_SUMMARY_CHANGED',
0x11a: u'IA2_EVENT_TEXT_ATTRIBUTE_CHANGED',
0x11b: u'IA2_EVENT_TEXT_CARET_MOVED',
0x11c: u'IA2_EVENT_TEXT_CHANGED',
0x11d: u'IA2_EVENT_TEXT_COLUMN_CHANGED',
0x11e: u'IA2_EVENT_TEXT_INSERTED',
0x11f: u'IA2_EVENT_TEXT_REMOVED',
0x120: u'IA2_EVENT_TEXT_UPDATED',
0x121: u'IA2_EVENT_TEXT_SELECTION_CHANGED',
0x122: u'IA2_EVENT_VISIBLE_DATA_CHANGED',
0x4001: u'EVENT_CONSOLE_CARET',
0x4002: u'EVENT_CONSOLE_UPDATE_REGION',
0x4003: u'EVENT_CONSOLE_UPDATE_SIMPLE',
0x4004: u'EVENT_CONSOLE_UPDATE_SCROLL',
0x4005: u'EVENT_CONSOLE_LAYOUT',
0x4006: u'EVENT_CONSOLE_START_APPLICATION',
0x4007: u'EVENT_CONSOLE_END_APPLICATION',
0x8000: u'EVENT_OBJECT_CREATE',
0x8001: u'EVENT_OBJECT_DESTROY',
0x8002: u'EVENT_OBJECT_SHOW',
0x8003: u'EVENT_OBJECT_HIDE',
0x8004: u'EVENT_OBJECT_REORDER',
0x8005: u'EVENT_OBJECT_FOCUS',
0x8006: u'EVENT_OBJECT_SELECTION',
0x8007: u'EVENT_OBJECT_SELECTIONADD',
0x8008: u'EVENT_OBJECT_SELECTIONREMOVE',
0x8009: u'EVENT_OBJECT_SELECTIONWITHIN',
0x800a: u'EVENT_OBJECT_STATECHANGE',
0x800b: u'EVENT_OBJECT_LOCATIONCHANGE',
0x800c: u'EVENT_OBJECT_NAMECHANGE',
0x800d: u'EVENT_OBJECT_DESCRIPTIONCHANGE',
0x800e: u'EVENT_OBJECT_VALUECHANGE',
0x800f: u'EVENT_OBJECT_PARENTCHANGE',
0x8010: u'EVENT_OBJECT_HELPCHANGE',
0x8011: u'EVENT_OBJECT_DEFACTIONCHANGE',
0x8012: u'EVENT_OBJECT_ACCELERATORCHANGE'}
winEventIDsToEventNames={}
for _sym, _val in locals().items():
if _sym.startswith('EVENT_') or _sym.startswith('IA2_EVENT_'):
winEventIDsToEventNames[_val] = _sym
|
9,033 | bde37f3b41c810ab465de5e0ae374703af9f01f3 | # -*- coding: utf-8 -*-
def create_map(rows):
maze = []
for row in rows:
row = row[:-1]
subarr = []
for i in row:
subarr.append(i)
maze.append(subarr)
return maze
def print_map(chart):
for subarr in chart:
print(subarr)
def find_start(chart):
for y in range(len(chart)):
row = chart[y]
for x in range(len(row)):
if row[x] == 'S':
return (y, x)
def find_exit(y, x, chart, path):
h = len(chart)
w = len(chart[0])
# left
if x-1 == 0 and chart[y][x-1] == ' ':
chart[y][x-1] = 'E'
path[(y, x-1)] = [y, x]
return
elif x-1 > 0 and chart[y][x-1] == ' ':
chart[y][x-1] = '0'
path[(y, x - 1)] = [y, x]
find_exit(y, x-1, chart, path)
# up
if y-1 == 0 and chart[y-1][x] == ' ':
chart[y-1][x] = 'E'
path[(y-1, x)] = [y, x]
return
elif y-1 > 0 and chart[y-1][x] == ' ':
chart[y-1][x] = '0'
path[(y - 1, x)] = [y, x]
find_exit(y-1, x, chart, path)
# right
if x+1 == w-1 and chart[y][x+1] == ' ':
chart[y][x+1] = 'E'
path[(y, x+1)] = [y, x]
return
elif x+1 < w - 1 and chart[y][x+1] == ' ':
chart[y][x+1] = '0'
path[(y, x + 1)] = [y, x]
find_exit(y, x+1, chart, path)
# down
if y+1 == h-1 and chart[y+1][x] == ' ':
chart[y+1][x] = 'E'
path[(y+1, x)] = [y, x]
return
elif y+1 < h - 1 and chart[y+1][x] == ' ':
chart[y+1][x] = '0'
path[(y + 1, x)] = [y, x]
find_exit(y+1, x, chart, path)
def check_exit(chart):
height = len(chart)
width = len(chart[0])
for x in range(width):
v = chart[0][x]
if v == 'E':
return True, 0, x
v = chart[height-1][x]
if v == 'E':
return True, height-1, x
for y in range(height):
v = chart[y][0]
if v == 'E':
return True, y, 0
v = chart[y][width-1]
if v == 'E':
return True, y, width-1
return False, -1, -1
if __name__ == '__main__':
file = open('../00_text_files/01_labyrinth.txt', 'rt')
labyrinth = file.readlines()
file.close()
maze = create_map(labyrinth)
start = find_start(maze)
maze[start[0]][start[1]] = '0'
path = {}
find_exit(start[0], start[1], maze, path)
print_map(maze)
ex = check_exit(maze)
if ex[0]:
y = ex[1]
x = ex[2]
print([y, x, maze[y][x]])
while True:
coord = (y, x)
if coord in path:
y, x = path[coord]
print([y, x, maze[y][x]])
else:
break
else:
print("NO WAY")
|
9,034 | dd4892c5a0b675d1c97fb91a5ca8115801a2bbca |
import sys
import datetime
training = False if (sys.argv[1]=='0') else True
def read_file(filename):
with open(filename) as f:
aux = [str(x) for x in f.readline().split()]
array = []
for line in f: # read rest of lines
s=line.split()
array2=[s[0]] + [float(x) for x in s[1:]]
array.append(array2)
return array[::-1]
def operar(datos):
new_datos = []
for x in datos:
n=[]
d=datetime.datetime.strptime(x[0], "%d/%m/%y").date()
n.append(d.day)
n.append(d.month)
n.append(d.weekday())
n.append(x[1]-x[2])
n.append(x[4]-x[2])
n.append(x[5]-x[2])
n.append(0 if (x[3]<0) else 1)
new_datos.append(n)
return new_datos
def imprimir(fname, outname, datos, num):
fp = open('datos/'+outname+str(num-1), 'w+')
i=num-1
for x in datos[num-1:]:
for a in datos[i-num+1:i]:
for b in a[3:]:
if(isinstance(b, float)):
fp.write(str(round(b, 3)))
else:
fp.write(str(b))
fp.write(' ')
for c in datos[i][:3]:
if(isinstance(c, float)):
fp.write(str(round(c, 3)))
else:
fp.write(str(c))
fp.write(' ')
if(training):
fp.write(str(datos[i][6]))
fp.write('\n')
i+=1
fp.close()
fname = sys.argv[3]
comb = int(sys.argv[2])
datos = read_file(fname)
print len(datos), 'datos'
datos=operar(datos)
for i in list(range(2, comb+1)):
imprimir(fname, sys.argv[4], datos, i)
|
9,035 | eb8aec947cc1eeeb56b3884286b46ec7468dcc23 | import requests
from app.main.model.location import Location
from app.main.util.db_util import save_changes
key = 'a544aecdde85a1f52a56292f77ecde6e'
def save_location(ip_addr):
try:
existing_location = Location.query.filter_by(ip=ip_addr).first()
if existing_location:
location_data = existing_location.location
else:
location_data = get_location(ip_addr=ip_addr)
location = Location(
ip=ip_addr,
location=location_data
)
save_changes(location)
except Exception as e:
if 'UNIQUE constraint failed: location.ip' not in str(e):
response_object = {
'status': 'fail',
'message': e
}
return response_object, 400
response_object = {
'status': 'success',
'message': 'Successfully saved location.',
'location': location_data
}
return response_object, 200
def get_location(ip_addr):
r = requests.get('http://api.ipstack.com/{ip}?access_key={key}'.format(ip=ip_addr, key=key))
return r.text |
9,036 | 70de2bed00aabe3805c3a19da004713d4109568a | ##Extras
def permissao():
editor = False
for row in session.auth.user_groups:
grupo = session.auth.user_groups[row]
if (grupo == "gerenciador") or (grupo == "administrador"):
editor = True
return editor |
9,037 | 57ab0421d5234caf7a97ce93908cd07e23f53a0b | import copy
import time
import random
from twisted.python import log, failure
from twisted.internet import defer, error, protocol, reactor
from twisted.protocols import basic, policies
from pn.util import url
from pn.core import stream as stream_mod
try:
from collections import deque
except ImportError:
class deque(list):
def popleft(self):
return self.pop(0)
class BaseClientRequest(object):
"""Base Client Request"""
def __init__(self, cmd, headers, stream=None):
self.cmd = cmd
self.headers = headers
self.stream = stream
if stream is not None:
self.stream = stream_mod.IByteStream(stream)
else:
self.stream = None
def __str__(self):
return '<%s:%s>' % (self.__class__.__name__, self.cmd)
class BaseClientChannelRequest(object):
length = None
finished = False
finishedWriting = False
finishedReading = False
channel = None
stream = None
responseDefer = None
def __init__(self, channel, request):
self.channel = channel
self.request = request
self.responseDefer = defer.Deferred()
def lineReceived(self, line):
raise NotImplementedError, "must be implemented in subclass"
def rawDataReceived(self, data):
"""Handle incoming content."""
datalen = len(data)
if datalen < self.length:
self.handleContentChunk(data)
self.length = self.length - datalen
else:
self.handleContentChunk(data[:self.length])
extraneous = data[self.length:]
self.allContentReceived()
self.channel.setLineMode(extraneous)
def allContentReceived(self):
self.finishedReading = True
if self.stream is not None and not self.stream.closed:
self.stream.finish()
def finishRequest(self):
self.finished = True
self.channel.requestFinished(self)
def submit(self):
self.submitHeaders()
if self.request.stream:
d = stream_mod.StreamProducer(self.request.stream).beginProducing(self)
d.addCallback(self.finishWriting).addErrback(self.abortWithError)
else:
self.finishWriting(None)
def submitHeaders(self):
"""Write request headers"""
r = self.request
self.channel.write("%s %s\r\n" % (r.cmd, url.encode_url_string(r.headers)))
def write(self, data):
if not data:
return
self.channel.write(data)
def finishWriting(self, x=None):
"""We are finished writing data."""
self.finishedWriting = True
def abortWithError(self, err):
if self.stream is not None:
self.stream.finish(err)
if self.responseDefer:
d = self.responseDefer
del self.responseDefer
d.errback(err)
self.finishRequest()
def connectionLost(self, reason):
if not self.finished:
self.abortWithError(reason)
def createResponse(self):
if self.length:
self.stream = stream_mod.ProducerStream()
self.response = self.channel.createResponse(self)
self.stream.registerProducer(self, True)
else:
self.response = self.channel.createResponse(self)
def processResponse(self, result=None):
if result is None:
result = self.response
if self.responseDefer:
d = self.responseDefer
del self.responseDefer
d.callback(result)
def handleContentChunk(self, data):
if self.stream:
self.stream.write(data)
def registerProducer(self, producer, streaming):
"""Register a producer.
"""
self.channel.registerProducer(producer, streaming)
def unregisterProducer(self):
self.channel.unregisterProducer()
# producer interface
def pauseProducing(self):
if not self.finishedReading:
self.channel.pauseProducing()
def resumeProducing(self):
if not self.finishedReading:
self.channel.resumeProducing()
def stopProducing(self):
if not self.finishedReading:
self.channel.stopProducing()
class BaseClientProtocol(basic.LineReceiver, policies.TimeoutMixin, object):
"""Base Client Protocol"""
timeOut = 60
chanRequest = None
ChannelRequest = BaseClientChannelRequest
pool = None
def __init__(self):
self._requests = deque()
def submitRequest(self, request, *args, **kwargs):
req = self.ChannelRequest(self, request, *args, **kwargs)
if self.chanRequest is not None:
self._requests.append(req)
else:
self.chanRequest = req
req.submit()
return req.responseDefer
def write(self, data):
self.setTimeout(self.timeOut)
self.transport.write(data)
def writeSequence(self, sequence):
self.setTimeout(self.timeOut)
self.transport.writeSequence(sequence)
def lineReceived(self, line):
if not self.chanRequest:
# server sending random unrequested data.
self.transport.loseConnection()
return
self.setTimeout(None)
try:
self.chanRequest.lineReceived(line)
self.setTimeout(self.timeOut)
except Exception, err:
self.chanRequest.abortWithError(failure.Failure(err))
def rawDataReceived(self, data):
"""Handle incoming content."""
if not self.chanRequest:
# server sending random unrequested data.
self.transport.loseConnection()
return
self.setTimeout(None)
try:
self.chanRequest.rawDataReceived(data)
self.setTimeout(self.timeOut)
except Exception, err:
self.chanRequest.abortWithError(failure.Failure(err))
def createResponse(self, chanRequest):
raise NotImplementedError, "must be implemented in subclass"
def requestFinished(self, request):
"""Request done."""
if self.chanRequest is not None:
del self.chanRequest
self.setTimeout(None)
if self._requests:
self.chanRequest = self._requests.popleft()
self.chanRequest.submit()
return
if self.pool and not self.transport.disconnecting:
self.pool.freeProtocol(self)
def connectionLost(self, reason):
self.setTimeout(None)
# Tell all requests to abort.
if self.chanRequest is not None:
req = self.chanRequest
del self.chanRequest
req.connectionLost(reason)
while self._requests:
self._requests.popleft().connectionLost(reason)
if self.pool:
self.pool.protocolConnectionLost(self, reason)
def loseConnection(self):
self.transport.loseConnection()
def makeConnection(self, transport):
basic.LineReceiver.makeConnection(self, transport)
if self.pool:
self.pool.protocolCreated(self)
def registerProducer(self, producer, streaming):
"""Register a producer."""
self.transport.registerProducer(producer, streaming)
def unregisterProducer(self):
self.transport.unregisterProducer()
class ClientProtocolPool(object):
def __init__(self, addr, factory, maxConn=50, maxIdleTime=600):
self.addr = addr
self.factory = factory
self.maxConn = maxConn
self.maxIdleTime = maxIdleTime
self._busy = []
self._idle = []
self._size = 0
self.dead = False
self.deferredRequests = deque()
def protocolCreated(self, protocol):
if self.dead:
self.dead = False
self._size += 1
self.touch(protocol)
if self.deferredRequests: # if there's deferred requests, return this protocol
self._busy.append(protocol)
self.deferredRequests.popleft().callback(protocol)
else:
self._idle.append(protocol)
protocol.busy = False
def deferRequest(self):
d = defer.Deferred()
self.deferredRequests.append(d)
return d
def markDead(self, reason):
log.msg('Host[%s:%s] is dead' % self.addr)
if self.dead:
return
self.dead = True
while self.deferredRequests:
self.deferredRequests.popleft().errback(reason)
self._busy = []
self._idle = []
self._size = 0
def create(self):
self.factory.createProtocol(self.addr)
return self.deferRequest()
def get(self, wait=True):
try:
p = self._idle.pop(0)
self._busy.append(p)
self.touch(p)
return p
except IndexError:
if not wait:
return None
if self._size < self.maxConn:
return self.create()
elif self._busy:
# wait busy conn to be idle
return self.deferRequest()
return None # should not happen if maxConn > 0
def touch(self, p):
p.last_access = int(time.time())
p.busy = True
def free(self, protocol):
assert protocol.addr == self.addr
if self.deferredRequests: # if there's deferred requests, return this protocol
self.touch(protocol)
self.deferredRequests.popleft().callback(protocol)
return
try:
self._busy.remove(protocol)
except:
log.err()
self._idle.append(protocol)
protocol.busy = False
def remove(self, protocol):
assert protocol.addr == self.addr
if protocol.busy:
ls = (self._busy, self._idle)
else:
ls = (self._idle, self._busy)
try:
ls[0].remove(protocol)
self._size -= 1
except:
try:
ls[1].remove(protocol)
self._size -= 1
except: # already removed
pass
def maintain(self):
expire = int(time.time()) - self.maxIdleTime
idles = copy.copy(self._idle)
for p in idles:
if not p.connected:
log.msg('removing disconnected protocol %s from idle pool' % str(p))
self.remove(p)
elif p.last_access < expire:
log.msg('removing expired protocol %s' % str(p))
p.loseConnection()
self.remove(p)
busies = copy.copy(self._busy)
for p in busies:
if not p.connected:
log.msg('removing disconnected protocol %s from busy pool' % str(p))
self.remove(p)
class PooledClientFactory(protocol.ClientFactory):
protocol = BaseClientProtocol
def __init__(self, pool):
self.pool = pool
def buildProtocol(self, addr):
p = protocol.ClientFactory.buildProtocol(self, addr)
p.addr = (addr.host, addr.port)
p.pool = self.pool
return p
def clientConnectionLost(self, connector, reason):
addr = (connector.host, connector.port)
self.pool.connectionLost(addr, reason)
def clientConnectionFailed(self, connector, reason):
addr = (connector.host, connector.port)
self.pool.connectionFailed(addr, reason)
class BaseClient(object):
FactoryClass = PooledClientFactory
def __init__(self, hosts=None, connector=None, connTimeout=30, maintTime=300, deadRetryTime=5, retry=0, **kwargs):
self.factory = self.FactoryClass(self)
self.connector = connector or reactor
self.connTimeout = connTimeout
self.maintTime = maintTime
self.deadRetryTime = deadRetryTime
self.retry = retry
self.hosts = []
self.hostsPool = {}
self.hostsDead = {}
if hosts is not None:
for host in hosts:
ip, port = host.split(":")
port = int(port)
self.addHost((ip, port))
self.maintID = reactor.callLater(self.maintTime, self._selfMaintain)
def addHost(self, addr):
pool = self.getPool(addr)
self.hosts.append(pool)
def protocolCreated(self, protocol):
addr = protocol.addr
pool = self.getPool(addr)
pool.protocolCreated(protocol)
if self.hostsDead.has_key(addr):
self.hostsDead.remove(addr)
def getPool(self, addr):
if self.hostsPool.has_key(addr):
return self.hostsPool[addr]
pool = self.hostsPool[addr] = ClientProtocolPool(addr, self)
return pool
def protocolConnectionLost(self, protocol, reason):
addr = protocol.addr
pool = self.getPool(addr)
pool.remove(protocol)
def connectionLost(self, addr, reason):
self._maybeDead(addr, reason)
def connectionFailed(self, addr, reason):
self._maybeDead(addr, reason)
def _maybeDead(self, addr, reason):
if reason.check(error.ConnectionDone, error.ConnectionLost):
return
pool = self.getPool(addr)
if pool.dead:
return
#if reason.check(ConnectionRefusedErrr,...):
pool.markDead(reason)
def createProtocol(self, addr):
self.connector.connectTCP(addr[0], addr[1], self.factory, self.connTimeout)
def freeProtocol(self, protocol):
pool = self.getPool(protocol.addr)
pool.free(protocol)
def getProtocol(self, addr=None):
if addr is not None:
now = time.time()
# try dead hosts every 5 seconds
# if host is down and last down time is
# less than 5 seconds, ignore
if addr in self.hostsDead and self.hostsDead[addr] > now - self.deadRetryTime:
return None
return self.getPool(addr).get()
else:
p = self._getRandomProtocol(wait=False)
if p is not None:
return p
# no idle protocol found
return self._getRandomProtocol(wait=True)
def _getRandomProtocol(self, wait=True):
size = len(self.hostsPool)
if size == 0:
return None
if size > 15:
tries = 15
else:
tries = size
pools = self.hostsPool.values()
idx = random.randint(1, size)
for t in xrange(tries):
pool = pools[idx % size]
idx += 1
p = pool.get(wait)
if p is not None:
return p
return None
def _selfMaintain(self):
self.maintID = None
pools = self.hostsPool.values()
for pool in pools:
pool.maintain()
self.maintID = reactor.callLater(self.maintTime, self._selfMaintain)
def doRequest(self, request):
if hasattr(request, 'addr'):
d = self.getProtocol(getattr(request, 'addr'))
else:
d = self.getProtocol()
if d is None:
raise error.ConnectError, "Can not connect to host"
if self.retry and not hasattr(request, 'retry'):
setattr(request, 'retry', self.retry)
if isinstance(d, defer.Deferred):
d.addCallback(self._doRequest, request)
d.addErrback(self._errConn, request)
return d
else:
return self._doRequest(d, request)
def _doRequest(self, protocol, request):
return protocol.submitRequest(request)
def _errConn(self, fail, request):
log.err(fail)
fail.trap(error.ConnectError) # I only retry when ConnectError happened
if self.retry and hasattr(request, 'retry'):
if request.retry:
request.retry -= 1
return self.doRequest(request)
fail.raiseException()
|
9,038 | b0f0bcfb5739d46de54cbe46614e82bf5a2d13fb | """
* author - kajol
* date - 12/24/2020
* time - 1:24 PM
* package - com.bridgelabz.basicprograms
* Title - Print a table of the powers of 2 that are less than or equal to 2^N
"""
try:
number = int(input("Enter number: "))
#print power of 2 within given range
if number < 31:
for num in range(1, number+1):
print("2 ^", num, "=", 2**num)
else:
print("Enter number in valid range")
except Exception:
print("Exception occured")
|
9,039 | d3b55863c6e3a1b6cbdcec37db81ee42b769938d | from setuptools import setup
import sys
if not sys.version_info >= (3, 6, 0):
msg = 'Unsupported version %s' % sys.version
raise Exception(msg)
def get_version(filename):
import ast
version = None
with open(filename) as f:
for line in f:
if line.startswith('__version__'):
version = ast.parse(line).body[0].value.s
break
else:
raise ValueError('No version found in %r.' % filename)
if version is None:
raise ValueError(filename)
return version
version = get_version(filename='src/zuper_nodes/__init__.py')
line = 'z5'
setup(
name=f'zuper-nodes-{line}',
version=version,
keywords='',
package_dir={'': 'src'},
packages=[
'zuper_nodes',
'zuper_nodes_tests',
'zuper_nodes_wrapper',
'zuper_nodes_wrapper_tests',
],
install_requires=[
'compmake',
'pyparsing',
'PyContracts',
'networkx<=2.2',
'termcolor',
'zuper-ipce-z5',
'cbor2',
'base58',
],
entry_points={
'console_scripts': [
'zuper-node-identify=zuper_nodes_wrapper.identify:identify_main',
],
},
)
|
9,040 | c2ba60a321eff63f6321831093d7254f6939549b | #encoding:utf-8
x="There are %d types of peopel."%10
#定义字符串变量x,将10以%d方式输出
binary="binary"
do_not="don't"
#定义字符串变量binary和do_not
y="Those who know %s and those who %s."%(binary,do_not)
#使用binary和do_not定义字符串变量y
print x
print y
#打印以上两个变量
print "I said:%r"%x
print "I also said:%r."%y
#用%r的格式输出以上两个变量
hilarious=False
joke_evaluation="Isn't that joke funny?!%r"
#定义两个变量hilarious和joke_evaluation
print joke_evaluation%hilarious
#把变量joke_evaluation中的格式化字符用hilarious打印出
w="This is the left side of ..."
a="a string with the right side."
#定义字符串变量w和a
print w+a
#使用加号连接w和a联合输出
#因为+作为操作符,可以将两个字符串变量连接后输出 |
9,041 | 8675deb69eae04a722073432eaf69ce3d24a11ad |
# coding: utf-8
from mrcnn import utils
import numpy as np
import os
import skimage
class SlicesDataset(utils.Dataset):
""" Extension of maskrcnn dataset class to be used with our provided data. """
def load_slices(self, dataset_dir, n_images, n_patches, channels = ["base"]):
"""Load a subset of the Slices dataset.
dataset_dir: Root directory of the dataset.
n_images: number of images to load. Will load in os.listdir list order.
n_patches: number of patches to load per image.
channels: list of strings indicating channels to be stacked in the image.
currently "base", "mf", "edges" and "none" can be arbitrarily stacked.
"""
# add classes to be trained on
self.add_class("slices", 1, "tissue")
self.add_class("slices", 2, "mag")
# collect image list and initialize counter
image_list = os.listdir(dataset_dir)
image_counter = 0
patch_counter = 0
# cycle over images and save patches to database.
for i in range(n_images):
image_path = os.path.join(dataset_dir,image_list[i])
patch_list = os.listdir(image_path)
print(f"processing: image {i}")
for j in range(n_patches):
patch_path = os.path.join(image_path, patch_list[j])
patch_image_path = os.path.join(patch_path,"images")
file_list = os.listdir(patch_image_path)
image_file_path = os.path.join(patch_image_path,file_list[0])
image = skimage.io.imread(image_file_path)
height, width = image.shape
self.add_image(
"slices",
image_id = patch_counter,
path = patch_path,
width = width, height = height,
channels = channels,
)
patch_counter += 1
def load_image(self, image_id):
"""Returns an image with a given id."""
# load image infos
info = self.image_info[image_id]
patch_path = info['path']
width = info['width']
height = info['height']
impath = os.path.join(patch_path,"images")
file_list = os.listdir(impath)
channels = info['channels']
image = []
# stack channels to be loaded.
for channel in channels:
if channel == "none":
channel_image = skimage.img_as_ubyte(np.zeros( (height,width) ) )
else:
channel_image_name = [x for x in file_list if channel in x][0]
channel_image_path = os.path.join(impath, channel_image_name)
channel_image = skimage.io.imread(channel_image_path)
channel_image = skimage.img_as_ubyte(channel_image)
image.append(channel_image)
image = np.stack(image, axis=2)
return image
def load_mask(self, image_id):
"""Loads masks from dataset.
"""
# load image infos
info = self.image_info[image_id]
patch_path = info['path']
height = info['height']
width = info['width']
mag_path = os.path.join(patch_path,"mag")
tissue_path = os.path.join(patch_path,"tissue")
# collect mask names
mag_mask_list = os.listdir(mag_path)
tissue_mask_list = os.listdir(tissue_path)
classes = []
masks = []
# append masks and ids in list
if mag_mask_list:
for filename in mag_mask_list:
a = os.path.join(mag_path,filename)
masks.append(skimage.io.imread(a).astype(bool))
classes.append(2)
if tissue_mask_list:
for filename in tissue_mask_list:
a = os.path.join(tissue_path,filename)
masks.append(skimage.io.imread(a).astype(bool))
classes.append(1)
return np.stack(masks,axis=2), np.asarray(classes).astype(int)
|
9,042 | 39312ec60c9ef1c9c95cf4206b6d0bbdb0aedf94 | from rest_framework import serializers
from .models import SensorValue
class SensorValueSerializer(serializers.ModelSerializer):
timestamp = serializers.DateTimeField(required=False)
class Meta:
model = SensorValue
fields = ("id", "timestamp", "sensor_type", "value")
|
9,043 | 93baa6ba14d06661731dce3e34ea93d49c06001b | my_func = lambda x, y: x**y
|
9,044 | f73cbc25152a63bb6552e2cd8272c67a1f4277ba | def main():
a, b = map(int, input().split())
diff = abs(max(b, a) - min(a, b))
if diff % 2 != 0:
print("IMPOSSIBLE")
else:
bigger = max(a, b)
ans = bigger - (diff//2)
print(ans)
if __name__ == "__main__":
main()
|
9,045 | 1dec7a997b0bef3226fb17e4039b053c7a2e457e | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-03 19:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mybus', '0007_auto_20160104_0053'),
]
operations = [
migrations.RemoveField(
model_name='businfo',
name='description',
),
migrations.AlterField(
model_name='businfo',
name='title',
field=models.CharField(max_length=255, verbose_name=b'Bus Info'),
),
migrations.AlterField(
model_name='businfo',
name='url',
field=models.CharField(max_length=255, verbose_name=b'Bus No'),
),
]
|
9,046 | fd6a32652b845b2a6d6d8934c0dde91afdddd9f3 | from django import urls
from django.urls import path
from genius.views import (home, Class_create, Class_Update, Class_Delete, Class_Detail, Classes, Add_name,
Student_Main, Student_Create, Student_Update, Student_Delete, Student_Detail, Search)
app_name = 'genius'
urlpatterns = [
path('', home, name='home'),
path('class/', Classes, name='class'),
path('class/add-name', Add_name, name='add-name'),
path('class/create', Class_create, name='create-class'),
path('class/<int:id>', Class_Detail, name='detail'),
path('class/<int:id>/edit/', Class_Update, name='update'),
path('class/<int:id>/delete/', Class_Delete, name='delete'),
path('stds/', Student_Main, name='stds'),
path('stds/create', Student_Create, name='stds-new'),
path('stds/<int:id>',Student_Detail , name='std-detail'),
path('stds/search/',Search , name='std-search'),
path('stds/<int:id>/edit/', Student_Update, name='std-update'),
path('stds/<int:id>/delete/', Student_Delete, name='std-delete'),
]
|
9,047 | 8c17f2c770c24bbf8c73628c6740c0b866e6b1c0 | from liver_tumor_segmentation.CGBS_Net import *
from liver_tumor_segmentation.loss import *
from keras.optimizers import *
from liver_tumor_segmentation.CGBS_data_generator import *
from keras.callbacks import *
import os
from keras.callbacks import ReduceLROnPlateau
from keras import losses
from configuration import *
def get_lr_metric(optimizer):
def lr(y_true, y_pred):
return optimizer.lr
return lr
def train():
batch_size = 4 #4 for single GPU; 8 for two GPUs
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
trainGene = trainGenerator(batch_size, data_path='/data',
folder='train', aug_dict=aug_args, seed = 1, interaction='RECIST')
devGene = trainGenerator(batch_size, data_path='/data',
folder='dev', aug_dict=no_aug_args, seed = 1, interaction='RECIST')
testGene = testGenerator(test_path='test_path', interaction='RECIST')
model = CGBS_Net(input_shape=(256, 256, 4),rate=3)
model.summary()
# GPU_COUNT = 2
# model = multi_gpu_model(original_model, GPU_COUNT)
opt=SGD(lr=4e-4, decay=1e-6, momentum=0.9, nesterov=True)
lr_metric = get_lr_metric(opt)
model.compile(optimizer=opt, loss={'out_seg': dice_coef_loss, 'out_shape': losses.binary_crossentropy},
loss_weights={'out_seg': 1, 'out_shape': 1}, metrics=[dice_coef, lr_metric])
csv_logger = CSVLogger('./Models/'+'CGBS_Net.csv', append=True) # ss-0.01
# tensorboard = TensorBoard(log_dir='./tmp/graph', write_graph=True, write_images=True)
# earlystopping = EarlyStopping(monitor='val_loss', patience=0, verbose=0, mode='auto')
model_checkpoint = ModelCheckpoint(
'./Models/CGBS/{epoch:02d}-{val_out_seg_dice_coef:.4f}.h5',
monitor='val_out_seg_loss',
verbose=0, save_best_only=True, save_weights_only=True, mode='auto', period=1)
reduce_lr = ReduceLROnPlateau(monitor='val_out_seg_loss', factor=0.1, patience=50, mode='auto')
model.fit_generator(generator=trainGene, steps_per_epoch=int(5000/batch_size),
epochs=500, validation_data=devGene,
validation_steps=int(5000/batch_size), verbose=2,
callbacks=[model_checkpoint, csv_logger, reduce_lr])
train()
|
9,048 | 8a2cf1d550a593beae579104413b424e007d511f | '''
"MAIN" module
All operations are added to the defaultgraph.
Network functions are found in module network_functions_2
Display graph in tensorboard by opening a new terminal and write "tensorboard --logdir=tensorbaord/debug/01/" where
the last number depends on which directory the current graph is saved in (see line 35 in this module where the
FileWriter is created). After this, open the local webpage displayed in the terminal (looks something like http://OSCAR-LENOVO-LAPTOP:6006)
but with your own username.
'''
import network_functions_2_elin as nf
import tensorflow as tf
import numpy as np
import read_data as rd
with tf.name_scope("input_data"):
# import images
(iterate_data, sub_images, sub_depths, sub_images_placeholder, sub_depths_placeholder) = rd.read_debug_data()
sub_images_coarse = tf.constant(value = np.moveaxis(sub_images[0:223, 0:303, :, :], -1, 0), dtype = tf.float32, name = "images_coarse")
sub_images_fine = tf.constant(value = np.moveaxis(sub_images[0:227, 0:303, :, :], -1, 0), dtype = tf.float32, name = "images_fine")
depthmaps_groundtruth = tf.constant(value = np.moveaxis(sub_depths[0:55, 0:74, :], -1, 0), dtype = tf.float32, name = "depthmaps_groundtruth")
sub_images_coarse = tf.constant(value = sub_images[:,0:223, 0:303, :], dtype = tf.float32, name = "images_coarse")
sub_images_fine = tf.constant(value = sub_images[:, 0:227, 0:303, :], dtype = tf.float32, name = "images_fine")
depthmaps_groundtruth = tf.constant(value = np.moveaxis(sub_depths[:,0:55, 0:74, :], -1, 0), dtype = tf.float32, name = "depthmaps_groundtruth")
# print sample images to tensorboard
tf.summary.image(name = "images_coarse", tensor = sub_images_coarse, max_outputs = 1)
tf.summary.image(name = "images_fine", tensor = sub_images_fine, max_outputs = 1)
# define coarse and fine networks
coarse_depthmap_predictions = nf.get_coarse_network(input_placeholder = sub_images_coarse)
fine_depthmap_predictions = nf.get_fine_network(input_placeholder = sub_images_fine, coarse_prediction = coarse_depthmap_predictions)
# Session: tensorflow calculates all values using the input
with tf.Session() as sess:
# tensorboard writer CHANGE THE DIR NUMBER EVERY RUN (27 -> 28 -> 29 etc.)
# tensorboard/* in .gitignore
writer = tf.summary.FileWriter("./tensorboard/debug/07", sess.graph)
sess.run(tf.global_variables_initializer())
sess.run(fine_depthmap_predictions)
# compute cost function
fine_cost = nf.get_cost_function(depthmaps_predicted = fine_depthmap_predictions,
depthmaps_groundtruth = depthmaps_groundtruth)
# calculate and run optimizer
optimizer_fine = nf.get_fine_optimizer(fine_cost)
sess.run(tf.global_variables_initializer())
sess.run(optimizer_fine)
# this code makes sure that all info gets written to tensorboard
merged_summary = sess.run(tf.summary.merge_all())
writer.add_summary(merged_summary)
writer.close()
|
9,049 | 8b49aa63cc6e4490b7b22cd304dbba132962c870 | from abc import abstractmethod
from suzieq.shared.sq_plugin import SqPlugin
class InventoryAsyncPlugin(SqPlugin):
"""Plugins which inherit this class will have methods 'run'
Once the controller check that the object inherit this class, it launches
a new task executing the run method.
"""
async def run(self):
"""Background task to launch in order to execute the plugin"""
try:
await self._execute()
finally:
await self._stop()
@abstractmethod
async def _execute(self):
"""Launch the backuground task
"""
async def _stop(self):
"""Actions to execute before terminating the task
"""
return
|
9,050 | 3b1b3cab1fa197f75812ca5b1f044909914212c0 | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import seaborn as sns
# In[2]:
df = pd.read_csv("ipl_matches.csv")
df.head()
# In[3]:
## -----data cleaning------
## remove unwanted columns
columns_to_remove = ['mid','batsman','bowler','striker','non-striker']
df.drop(labels=columns_to_remove,axis=1,inplace=True)
# In[4]:
df.head()
# In[5]:
df['bat_team'].unique()
# In[6]:
### keeping only consistant team
consistant_team = ['Kolkata Knight Riders','Chennai Super Kings','Rajasthan Royals', 'Mumbai Indians',
'Kings XI Punjab', 'Royal Challengers Bangalore','Delhi Daredevils','Sunrisers Hyderabad',]
# In[7]:
df = df[(df['bat_team'].isin(consistant_team)) & (df['bowl_team'].isin(consistant_team))]
# In[8]:
df.head()
# In[9]:
df = df[df['overs']>=5.0]
# In[10]:
df.head()
# In[11]:
### converting the 'date' column from string to datetime object
from datetime import datetime
df['date'] = df['date'].apply(lambda x: datetime.strptime(x, '%d-%m-%Y'))
# In[12]:
df.head()
# In[13]:
print(df['bat_team'].unique())
print(df['bowl_team'].unique())
# In[14]:
###-------data processing-------
### converting the categoral features using one hot encoding
encoded_df = pd.get_dummies(data=df,columns=['venue','bat_team','bowl_team'])
encoded_df.head()
# In[15]:
encoded_df.columns
# In[16]:
### rearranging the columns
encoded_df = encoded_df[['date','runs', 'wickets', 'overs', 'runs_last_5', 'wickets_last_5',
'venue_Barabati Stadium', 'venue_Brabourne Stadium',
'venue_Buffalo Park', 'venue_De Beers Diamond Oval',
'venue_Dr DY Patil Sports Academy',
'venue_Dr. Y.S. Rajasekhara Reddy ACA-VDCA Cricket Stadium',
'venue_Dubai International Cricket Stadium', 'venue_Eden Gardens',
'venue_Feroz Shah Kotla',
'venue_Himachal Pradesh Cricket Association Stadium',
'venue_Holkar Cricket Stadium',
'venue_JSCA International Stadium Complex', 'venue_Kingsmead',
'venue_M Chinnaswamy Stadium', 'venue_MA Chidambaram Stadium, Chepauk',
'venue_Maharashtra Cricket Association Stadium',
'venue_New Wanderers Stadium', 'venue_Newlands',
'venue_OUTsurance Oval',
'venue_Punjab Cricket Association IS Bindra Stadium, Mohali',
'venue_Punjab Cricket Association Stadium, Mohali',
'venue_Rajiv Gandhi International Stadium, Uppal',
'venue_Sardar Patel Stadium, Motera', 'venue_Sawai Mansingh Stadium',
'venue_Shaheed Veer Narayan Singh International Stadium',
'venue_Sharjah Cricket Stadium', 'venue_Sheikh Zayed Stadium',
"venue_St George's Park", 'venue_Subrata Roy Sahara Stadium',
'venue_SuperSport Park', 'venue_Wankhede Stadium',
'bat_team_Chennai Super Kings', 'bat_team_Delhi Daredevils',
'bat_team_Kings XI Punjab', 'bat_team_Kolkata Knight Riders',
'bat_team_Mumbai Indians', 'bat_team_Rajasthan Royals',
'bat_team_Royal Challengers Bangalore', 'bat_team_Sunrisers Hyderabad',
'bowl_team_Chennai Super Kings', 'bowl_team_Delhi Daredevils',
'bowl_team_Kings XI Punjab', 'bowl_team_Kolkata Knight Riders',
'bowl_team_Mumbai Indians', 'bowl_team_Rajasthan Royals',
'bowl_team_Royal Challengers Bangalore',
'bowl_team_Sunrisers Hyderabad', 'total']]
# In[17]:
encoded_df.head()
# In[18]:
### Splitting the data into train and test dataset
x_train = encoded_df.drop(labels=['total'],axis=1)[encoded_df['date'].dt.year <=2016]
x_test = encoded_df.drop(labels=['total'],axis=1)[encoded_df['date'].dt.year >=2017]
# In[19]:
y_train = encoded_df[encoded_df['date'].dt.year <=2016]['total'].values
y_test = encoded_df[encoded_df['date'].dt.year >=2017]['total'].values
# In[20]:
### removing the 'date' column
x_train.drop(labels='date',axis=1,inplace=True)
x_test.drop(labels='date',axis=1,inplace=True)
# In[25]:
### -----Model Building-----
### Linear Regression
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(x_train,y_train)
# In[26]:
### creating a pickel file for the classifier
import pickle
filename = 'model.pkl'
pickle.dump(regressor, open(filename, 'wb'))
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
|
9,051 | 80a397b0974e41c4669f07638b5b38830b58cb37 | import pytest
import mock
from awx.main.models import (
UnifiedJob,
WorkflowJob,
WorkflowJobNode,
Job
)
def test_unified_job_workflow_attributes():
with mock.patch('django.db.ConnectionRouter.db_for_write'):
job = UnifiedJob(id=1, name="job-1", launch_type="workflow")
job.unified_job_node = WorkflowJobNode(workflow_job=WorkflowJob(pk=1))
assert job.spawned_by_workflow is True
assert job.workflow_job_id == 1
@pytest.fixture
def unified_job(mocker):
mocker.patch.object(UnifiedJob, 'can_cancel', return_value=True)
j = UnifiedJob()
j.status = 'pending'
j.cancel_flag = None
j.save = mocker.MagicMock()
j.websocket_emit_status = mocker.MagicMock()
return j
def test_cancel(unified_job):
unified_job.cancel()
assert unified_job.cancel_flag is True
assert unified_job.status == 'canceled'
assert unified_job.job_explanation == ''
# Note: the websocket emit status check is just reflecting the state of the current code.
# Some more thought may want to go into only emitting canceled if/when the job record
# status is changed to canceled. Unlike, currently, where it's emitted unconditionally.
unified_job.websocket_emit_status.assert_called_with("canceled")
unified_job.save.assert_called_with(update_fields=['cancel_flag', 'status'])
def test_cancel_job_explanation(unified_job):
job_explanation = 'giggity giggity'
unified_job.cancel(job_explanation=job_explanation)
assert unified_job.job_explanation == job_explanation
unified_job.save.assert_called_with(update_fields=['cancel_flag', 'status', 'job_explanation'])
def test_log_representation():
'''
Common representation used inside of log messages
'''
uj = UnifiedJob(status='running', id=4)
job = Job(status='running', id=4)
assert job.log_format == 'job 4 (running)'
assert uj.log_format == 'unified_job 4 (running)'
|
9,052 | c2c51dcd05c21e91e591de25fc2de034c88c48a1 | #!/usr/bin/env python
from django import template
from django.conf import settings
from django.utils.html import format_html
register = template.Library()
@register.simple_tag
def website_title():
return settings.WEBSITE_TITLE
def split_page(result_obj):
"""
分页模块,后台传入一个分页结果集就可以
:param result_obj:
:return:
"""
return_str = "<nav>"
return_str += "<ul class='pagination pull-right'>"
if result_obj.has_previous():
return_str += "<li>"
return_str += "<a href='?page=" + str(result_obj.previous_page_number()) + "' aria-label='Previous'>"
return_str += "<span aria-hidden='true'>«</span>"
return_str += "</a></li>"
for i in result_obj.paginator.page_range:
# print(i,result_obj.paginator.page_range,result_obj.number)
hide_page_num = abs(result_obj.number - i)
if hide_page_num <= 3: # 3为当前页前后显示多少个
return_str += "<li "
if i == result_obj.number:
return_str += "class='active'><a href='?page=" + str(i) + "'>" + str(i) + "</a></li>"
else:
return_str += "><a href='?page=" + str(i) + "'>" + str(i) + "</a></li>"
if result_obj.has_next():
return_str += "<li><a href='?page=" + str(result_obj.next_page_number()) + "' aria-label='Next'>"
return_str += "<span aria-hidden='true'>»</span></a></li></ul></nav>"
#return format_html(return_str)
return return_str
@register.simple_tag
def test(string):
return string
|
9,053 | 70325d0e5eb9dcd7a065f83eaf14647bc30bd7f3 |
#----------- writing our for loop
""" number = [1,2,3,4,5]
friends = ['ahmet', 'mehmet','ayşe']
# for n in number:
# print(n)
# for n in friends:
# print(n)
def my_for_loop(my_iterable):
my_iterator = iter(my_iterable)
while True:
try:
print(next(my_iterator))
except StopIteration:
break
my_for_loop(number)
my_for_loop(friends) """
#--------------to show thirth power of given range numbers with iterator class
""" class CubeNumbers:
def __init__(self, start, end):
self.start = start
self.end = end
def __iter__(self):
return self
def __next__(self):
if self.start <= self.end:
result = self.start ** 3
self.start += 1
return result
else:
raise StopIteration
cubed = CubeNumbers(0, 5)
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed)) """
#--------to show thirth power of given range numbers with generator
""" cubed = (x**3 for x in range(0, 5))
print(type(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed)) """
#---------------fibonacci numbers with generator function
""" def fibo(limit):
x = 0
y = 1
while x < limit:
yield x
x, y = y, x + y
my_fib = fibo(1000)
for fib in my_fib:
print(fib) """
#-------------to show index and value together
""" friends = ['john', 'walter', 'henry']
# i = 0
# while i < len(friends):
# v = friends[i]
# print(i, v)
# i += 1
# for n in range(len(friends)):
# v = friends[n]
# print(n, v)
for i, v in enumerate(friends):
print(i, v) """ |
9,054 | 93eafb5b23bac513fc5dcc177a4e8a080b2a49b4 | #-*-coding:utf-8 -*-
import subprocess
def get_audio(text):
stat = subprocess.call(['./tts', text])
if stat == 0:
return "Success"
else:
print "Failed"
if __name__ == '__main__':
text = "我是聊天机器人"
get_audio(text) |
9,055 | b28bada020ac593783ac62994bb45311ebb78813 | """
测试用例
"""
import unittest
import jsonpath
import requests
from apiunittest.lib.loadIni import LoadIni
from apiunittest.keyword.keyword import Keyword
from apiunittest.lib.log import logger
from ddt import ddt, file_data
@ddt
class ApiTest(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.keyword = Keyword()
cls.cookie = None
cls.confData = LoadIni('config.ini')
logger.info('----------用例开始执行----------')
# 登录
@file_data('../data/data.yaml')
def test_1_login(self, username, password):
s = requests.Session()
loginUrl = self.confData.getConfig('urlConfig', 'login')
data = {
'uname': username,
'upass': password,
'encode': 1
}
res = s.post(url=loginUrl, data=data)
logger.info(res.text)
cookie = dict(res.cookies)
sess = jsonpath.jsonpath(cookie, '$..{0}'.format('PHPSESSION'))
phpSession = 'PHP_SESSION=' + sess[0]
ApiTest.cookie = phpSession
logger.info('用例执行成功')
if __name__ == '__main__':
unittest.main()
|
9,056 | 2e571e3412bf9f3a42bf87976ea9a5ec68d5815c | import requests
from bs4 import BeautifulSoup
import urllib.request
url='http://www.dytt8.net/'
user={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'
}
html=urllib.request.urlopen(url)
html.encoding='utf-8'
soup=BeautifulSoup(html.read())
for i in soup.find_all('a'):
if 'href' in i.attrs:
print(i.attrs['href'])
|
9,057 | cf931da4c06e16fe6f6da5eb1826d8b7a59c1f7b | # Copyright 2013 Rackspace Hosting Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from neutron.common import exceptions as n_exc_ext
from neutron_lib import exceptions as n_exc
from quark import exceptions as q_exc
from quark.plugin_modules import ip_policies as ippol
from quark.tests import test_base
from quark.tests import test_quark_plugin
class TestQuarkGetIpPolicies(test_quark_plugin.TestQuarkPlugin):
@contextlib.contextmanager
def _stubs(self, ip_policy):
db_mod = "quark.db.api"
with mock.patch("%s.ip_policy_find" % db_mod) as ip_policy_find:
ip_policy_find.return_value = ip_policy
yield
def test_get_ip_policy_not_found(self):
with self._stubs(None):
with self.assertRaises(q_exc.IPPolicyNotFound):
self.plugin.get_ip_policy(self.context, 1)
def test_get_ip_policy(self):
ip_policy = dict(
id=1,
tenant_id=1,
name="foo",
subnets=[dict(id=1)],
networks=[dict(id=2)],
exclude=[dict(cidr="0.0.0.0/32")])
with self._stubs(ip_policy):
resp = self.plugin.get_ip_policy(self.context, 1)
self.assertEqual(len(resp.keys()), 6)
self.assertEqual(resp["id"], 1)
self.assertEqual(resp["name"], "foo")
self.assertEqual(resp["subnet_ids"], [1])
self.assertEqual(resp["network_ids"], [2])
self.assertEqual(resp["exclude"], ["0.0.0.0/32"])
self.assertEqual(resp["tenant_id"], 1)
def test_get_ip_policies(self):
ip_policy = dict(
id=1,
tenant_id=1,
name="foo",
subnets=[dict(id=1)],
networks=[dict(id=2)],
exclude=[dict(cidr="0.0.0.0/32")])
with self._stubs([ip_policy]):
resp = self.plugin.get_ip_policies(self.context)
self.assertEqual(len(resp), 1)
resp = resp[0]
self.assertEqual(len(resp.keys()), 6)
self.assertEqual(resp["id"], 1)
self.assertEqual(resp["subnet_ids"], [1])
self.assertEqual(resp["network_ids"], [2])
self.assertEqual(resp["exclude"], ["0.0.0.0/32"])
self.assertEqual(resp["name"], "foo")
self.assertEqual(resp["tenant_id"], 1)
class TestQuarkCreateIpPolicies(test_quark_plugin.TestQuarkPlugin):
@contextlib.contextmanager
def _stubs(self, ip_policy, subnets=None, nets=None):
db_mod = "quark.db.api"
with contextlib.nested(
mock.patch("%s.subnet_find" % db_mod),
mock.patch("%s.network_find" % db_mod),
mock.patch("%s.ip_policy_create" % db_mod),
mock.patch("%s.route_find" % db_mod)
) as (subnet_find, net_find, ip_policy_create, route_find):
subnet_find.return_value = subnets if subnets else None
net_find.return_value = nets if nets else None
ip_policy_create.return_value = ip_policy
route_find.return_value = [{"nexthop": "1.2.3.4"}]
yield ip_policy_create
def test_create_ip_policy_invalid_body_missing_exclude(self):
with self._stubs(None):
with self.assertRaises(n_exc.BadRequest):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict()))
def test_create_ip_policy_with_both_network_and_subnet_ids(self):
with self._stubs(None):
with self.assertRaises(n_exc.BadRequest):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(network_ids=[1], subnet_ids=[1])))
def test_create_ip_policy_invalid_body_missing_netsubnet(self):
with self._stubs(None):
with self.assertRaises(n_exc.BadRequest):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(exclude=["1.1.1.1/24"])))
def test_create_ip_policy_invalid_subnet(self):
with self._stubs(None):
with self.assertRaises(n_exc.SubnetNotFound):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(subnet_ids=[1],
exclude=["1.1.1.1/24"])))
def test_create_ip_policy_invalid_network(self):
with self._stubs(None):
with self.assertRaises(n_exc.NetworkNotFound):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(network_ids=[1],
exclude=["1.1.1.1/24"])))
def test_create_ip_policy_network_ip_policy_already_exists(self):
with self._stubs(None, nets=[dict(id=1, ip_policy=dict(id=2),
subnets=[dict(id=1,
cidr="1.1.1.1/16")])]):
with self.assertRaises(q_exc.IPPolicyAlreadyExists):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(network_ids=[1],
exclude=["1.1.1.1/24"])))
def test_create_ip_policy_subnet_ip_policy_already_exists(self):
with self._stubs(None, subnets=[dict(id=1, ip_policy=dict(id=2),
cidr="1.1.1.1/16")]):
with self.assertRaises(q_exc.IPPolicyAlreadyExists):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(subnet_ids=[1],
exclude=["1.1.1.1/24"])))
def test_create_ip_policy_network(self):
ipp = dict(subnet_id=None, network_id=1,
exclude=["1.1.1.1/24"])
with self._stubs(ipp, nets=[dict(id=1, ip_policy=dict(id=2),
subnets=[dict(id=1,
cidr="1.1.1.1/16")])]):
with self.assertRaises(q_exc.IPPolicyAlreadyExists):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(network_ids=[ipp["network_id"]],
exclude=ipp["exclude"])))
def test_create_ip_policy_subnet(self):
ipp = dict(subnet_id=1, network_id=None,
exclude=["1.1.1.1/24"])
with self._stubs(ipp, subnets=[dict(id=1, ip_policy=dict(id=2),
cidr="1.1.1.1/16")]):
with self.assertRaises(q_exc.IPPolicyAlreadyExists):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(subnet_ids=[ipp["subnet_id"]],
exclude=ipp["exclude"])))
def test_create_ip_policy_with_cidr_that_does_not_fit_into_subnet(self):
ipp = dict(
subnets=[dict(id=1, version=4, cidr="192.168.1.1/24")],
networks=[],
id=1,
tenant_id=1,
exclude=["10.10.10.100/32"],
name="foo")
with self._stubs(ipp,
subnets=[dict(id=1, ip_policy=None,
version=ipp["subnets"][0]["version"],
cidr=ipp["subnets"][0]["cidr"])]):
with self.assertRaises(n_exc.BadRequest):
self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(subnet_ids=[1],
exclude=ipp["exclude"])))
def test_create_ip_policy_with_ipv6_subnet_cidr(self):
ipp = dict(
subnets=[dict(id=1, version=6, cidr='::/64')],
networks=[],
id=1,
tenant_id=1,
exclude=[dict(cidr="::/128")],
name="foo")
with self._stubs(ipp,
subnets=[dict(id=1, ip_policy=None,
version=ipp["subnets"][0]["version"],
cidr=ipp["subnets"][0]["cidr"])]):
exclude = [ippc["cidr"] for ippc in ipp["exclude"]]
resp = self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(subnet_ids=[1], exclude=exclude)))
self.assertEqual(len(resp.keys()), 6)
self.assertEqual(resp["subnet_ids"], [1])
self.assertEqual(resp["network_ids"], [])
# NOTE(jmeridth): below is mocked that way, so it won't get
# additional default policies in exclude
# ippol.ensure_default_policy is tested below in this file
self.assertEqual(resp["exclude"], ["::/128"])
self.assertEqual(resp["name"], "foo")
self.assertEqual(resp["tenant_id"], 1)
def test_create_ip_policy(self):
ipp = dict(
subnets=[dict(id=1, cidr='0.0.0.0/16')],
networks=[],
id=1,
tenant_id=1,
exclude=[dict(cidr="0.0.0.0/24")],
name="foo")
with self._stubs(ipp, subnets=[dict(
id=1, ip_policy=None, cidr=ipp["subnets"][0]["cidr"])]):
exclude = [ippc["cidr"] for ippc in ipp["exclude"]]
resp = self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(subnet_ids=[1], exclude=exclude)))
self.assertEqual(len(resp.keys()), 6)
self.assertEqual(resp["subnet_ids"], [1])
self.assertEqual(resp["network_ids"], [])
# NOTE(jmeridth): below is mocked that way, so it won't get
# additional default policies in exclude
# ippol.ensure_default_policy is tested below in this file
self.assertEqual(resp["exclude"], ["0.0.0.0/24"])
self.assertEqual(resp["name"], "foo")
self.assertEqual(resp["tenant_id"], 1)
def test_create_ip_policy_only_called_once_with_multiple_networks(self):
ipp = dict(
subnets=[],
networks=[dict(id=1, subnets=[dict(id=1,
ip_policy=None, cidr='0.0.0.0/24')]),
dict(id=2, subnets=[dict(id=2,
ip_policy=None, cidr='0.0.0.0/24')])],
id=1,
tenant_id=1,
exclude=[dict(cidr="0.0.0.1/32")],
name="foo")
with self._stubs(ipp, nets=ipp["networks"]) as (ip_policy_create):
resp = self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(network_ids=[1, 2], exclude=["0.0.0.1/32"])))
exclude = ['0.0.0.1/32', '0.0.0.0/32', '0.0.0.255/32']
ip_policy_create.assert_called_once_with(
self.context, exclude=exclude,
networks=[{'subnets':
[{'cidr': '0.0.0.0/24', 'ip_policy': None,
'id': 1}], 'id': 1},
{'subnets':
[{'cidr': '0.0.0.0/24', 'ip_policy': None,
'id': 2}], 'id': 2}])
self.assertEqual(len(resp.keys()), 6)
self.assertEqual(resp["subnet_ids"], [])
self.assertEqual(resp["network_ids"], [1, 2])
# NOTE(jmeridth): below is mocked that way, so it won't get
# additional default policies in exclude
# ippol.ensure_default_policy is tested below in this file
self.assertEqual(resp["exclude"], ["0.0.0.1/32"])
self.assertEqual(resp["name"], "foo")
self.assertEqual(resp["tenant_id"], 1)
def test_create_ip_policy_only_called_once_with_multiple_subnets(self):
ipp = dict(
subnets=[dict(id=3, cidr='0.0.0.0/16'),
dict(id=4, cidr='0.0.0.0/16')],
networks=[],
id=1,
tenant_id=1,
exclude=[dict(cidr="0.0.0.1/32")],
name="foo")
with self._stubs(ipp, subnets=ipp["subnets"]) as (ip_policy_create):
resp = self.plugin.create_ip_policy(self.context, dict(
ip_policy=dict(subnet_ids=[3, 4], exclude=["0.0.0.1/32"])))
exclude = ['0.0.0.1/32', '0.0.0.0/32', '0.0.255.255/32']
ip_policy_create.assert_called_once_with(
self.context, exclude=exclude,
subnets=[{'cidr': '0.0.0.0/16', 'id': 3},
{'cidr': '0.0.0.0/16', 'id': 4}])
self.assertEqual(len(resp.keys()), 6)
self.assertEqual(resp["subnet_ids"], [3, 4])
self.assertEqual(resp["network_ids"], [])
# NOTE(jmeridth): below is mocked that way, so it won't get
# additional default policies in exclude
# ippol.ensure_default_policy is tested below in this file
self.assertEqual(resp["exclude"], ["0.0.0.1/32"])
self.assertEqual(resp["name"], "foo")
self.assertEqual(resp["tenant_id"], 1)
class TestQuarkUpdateIpPolicies(test_quark_plugin.TestQuarkPlugin):
@contextlib.contextmanager
def _stubs(self, ip_policy, subnets=None, networks=None):
if not subnets:
subnets = []
if not networks:
networks = []
db_mod = "quark.db.api"
with contextlib.nested(
mock.patch("%s.ip_policy_find" % db_mod),
mock.patch("%s.subnet_find" % db_mod),
mock.patch("%s.network_find" % db_mod),
mock.patch("%s.ip_policy_update" % db_mod),
) as (ip_policy_find, subnet_find, network_find, ip_policy_update):
ip_policy_find.return_value = ip_policy
subnet_find.return_value = subnets
network_find.return_value = networks
yield ip_policy_update
def test_update_ip_policy_not_found(self):
with self._stubs(None):
with self.assertRaises(q_exc.IPPolicyNotFound):
self.plugin.update_ip_policy(self.context, 1,
dict(ip_policy=None))
def test_update_ip_policy_with_both_network_and_subnet_ids(self):
ipp = dict(id=1, subnets=[])
with self._stubs(ipp):
with self.assertRaises(n_exc.BadRequest):
self.plugin.update_ip_policy(self.context, 1, dict(
ip_policy=dict(network_ids=[1], subnet_ids=[1])))
def test_update_ip_policy_subnets_not_found(self):
ipp = dict(id=1, subnets=[])
with self._stubs(ipp):
with self.assertRaises(n_exc.SubnetNotFound):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(subnet_ids=[100])))
def test_update_ip_policy_subnets_already_exists(self):
ipp = dict(id=1, subnets=[dict()])
with self._stubs(
ipp, subnets=[dict(id=1, ip_policy=dict(id=1))]
):
with self.assertRaises(q_exc.IPPolicyAlreadyExists):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(subnet_ids=[100])))
def test_update_ip_policy_subnets(self):
ipp = dict(id=1, subnets=[dict()],
exclude=["0.0.0.0/24"],
name="foo", tenant_id=1)
with self._stubs(
ipp, subnets=[dict(id=1, ip_policy=None)]
) as (ip_policy_update):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(subnet_ids=[100])))
self.assertEqual(ip_policy_update.called, 1)
def test_update_ip_policy_subnets_empty_exclude(self):
ipp = dict(id=1, subnets=[dict()],
exclude=["0.0.0.40/32"],
name="foo", tenant_id=1)
with self._stubs(
ipp, subnets=[dict(id=1, cidr="0.0.0.0/16", ip_policy=None)]
) as (ip_policy_update):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(subnet_ids=[100], exclude=[])))
ip_policy_update.assert_called_once_with(
self.context, ipp, subnet_ids=[100], exclude=[
"0.0.0.0/32", "0.0.255.255/32"])
def test_update_ip_policy_subnets_empty_exclude_without_subnet_ids(self):
ipp = dict(id=1, subnets=[dict(cidr="0.0.0.0/16")],
exclude=["0.0.0.40/32"],
name="foo", tenant_id=1)
with self._stubs(ipp) as (ip_policy_update):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(exclude=[])))
ip_policy_update.assert_called_once_with(
self.context, ipp, exclude=["0.0.0.0/32", "0.0.255.255/32"])
def test_update_ip_policy_networks_not_found(self):
ipp = dict(id=1, networks=[])
with self._stubs(ipp):
with self.assertRaises(n_exc.NetworkNotFound):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(network_ids=[100])))
def test_update_ip_policy_networks(self):
ipp = dict(id=1, networks=[dict()],
exclude=["0.0.0.0/24"],
name="foo", tenant_id=1)
with self._stubs(
ipp, networks=[dict(id=1, ip_policy=None)]
) as (ip_policy_update):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(network_ids=[100])))
self.assertEqual(ip_policy_update.called, 1)
def test_update_ip_policy_exclude_v4(self):
subnets = [dict(id=100, cidr="0.0.0.0/16")]
ipp = dict(id=1, subnets=subnets,
exclude=["0.0.0.0/24"],
name="foo", tenant_id=1)
with self._stubs(ipp, subnets=subnets) as (ip_policy_update):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(subnet_ids=[100], exclude=["0.0.0.1/32"])))
ip_policy_update.assert_called_once_with(
self.context,
ipp,
subnet_ids=[100],
exclude=["0.0.0.1/32", "0.0.0.0/32", "0.0.255.255/32"])
def test_update_ip_policy_exclude_v6(self):
subnets = [dict(id=100, cidr="::/64")]
ipp = dict(id=1, subnets=subnets,
exclude=["::/128"],
name="foo", tenant_id=1)
with self._stubs(ipp, subnets=subnets) as (ip_policy_update):
self.plugin.update_ip_policy(
self.context,
1,
dict(ip_policy=dict(subnet_ids=[100], exclude=["::1/128"])))
ip_policy_update.assert_called_once_with(
self.context,
ipp,
subnet_ids=[100],
exclude=["::1/128", "::/128", "::ffff:ffff:ffff:ffff/128"])
class TestQuarkDeleteIpPolicies(test_quark_plugin.TestQuarkPlugin):
@contextlib.contextmanager
def _stubs(self, ip_policy):
db_mod = "quark.db.api"
with contextlib.nested(
mock.patch("%s.ip_policy_find" % db_mod),
mock.patch("%s.ip_policy_delete" % db_mod),
) as (ip_policy_find, ip_policy_delete):
ip_policy_find.return_value = ip_policy
yield ip_policy_find, ip_policy_delete
def test_delete_ip_policy_not_found(self):
with self._stubs(None):
with self.assertRaises(q_exc.IPPolicyNotFound):
self.plugin.delete_ip_policy(self.context, 1)
def test_delete_ip_policy_in_use(self):
with self._stubs(dict(networks=True)):
with self.assertRaises(q_exc.IPPolicyInUse):
self.plugin.delete_ip_policy(self.context, 1)
def test_delete_ip_policy(self):
ip_policy = dict(
id=1,
networks=[],
subnets=[])
with self._stubs(ip_policy) as (ip_policy_find, ip_policy_delete):
self.plugin.delete_ip_policy(self.context, 1)
self.assertEqual(ip_policy_find.call_count, 1)
self.assertEqual(ip_policy_delete.call_count, 1)
class TestQuarkUpdatePolicySubnetWithRoutes(test_quark_plugin.TestQuarkPlugin):
@contextlib.contextmanager
def _stubs(self, ip_policy, subnets=None, routes=None):
subnets = subnets or []
db_mod = "quark.db.api"
with contextlib.nested(
mock.patch("%s.ip_policy_find" % db_mod),
mock.patch("%s.subnet_find" % db_mod),
mock.patch("%s.route_find" % db_mod),
mock.patch("%s.ip_policy_update" % db_mod),
) as (ip_policy_find, subnet_find, route_find, ip_policy_update):
ip_policy_find.return_value = ip_policy
subnet_find.return_value = subnets
route_find.return_value = routes
yield ip_policy_update
def test_update_ip_policy_has_route_conflict_raises(self):
subnet = dict(id=1, cidr="192.168.0.0/24")
ipp = dict(id=1, subnets=[subnet], exclude=["192.168.0.1/32"],
name="foo", tenant_id=1)
route = {"gateway": "192.168.0.1", "subnet_id": subnet["id"]}
with self._stubs(ipp, subnets=[subnet], routes=[route]):
with self.assertRaises(
n_exc_ext.GatewayConflictWithAllocationPools):
self.plugin.update_ip_policy(
self.context, 1,
dict(ip_policy=dict(subnet_ids=[1], exclude=[])))
def test_update_ip_policy_no_route_conflict(self):
subnet = dict(id=1, cidr="192.168.0.0/24")
ipp = dict(id=1, subnets=[subnet], exclude=["192.168.0.1/32"],
name="foo", tenant_id=1)
route = {"gateway": "192.168.0.1", "subnet_id": subnet["id"]}
with self._stubs(ipp, subnets=[subnet], routes=[route]):
try:
self.plugin.update_ip_policy(
self.context, 1,
dict(ip_policy=dict(subnet_ids=[1],
exclude=["192.168.0.0/24"])))
except Exception as e:
self.fail("This shouldn't have raised: %s" % e)
class TestQuarkValidateCIDRsFitsIntoSubnets(test_quark_plugin.TestQuarkPlugin):
def test_normal_cidr_and_valid_subnet(self):
try:
ippol._validate_cidrs_fit_into_subnets(
["192.168.0.100/32"],
[dict(id=1, cidr="192.168.0.0/24")])
except Exception:
self.fail("Should not have failed")
def test_normal_ipv4_cidr_and_valid_ipv6_subnet(self):
try:
ippol._validate_cidrs_fit_into_subnets(
["192.168.0.100/32"], [dict(id=1, cidr="::/96")])
except Exception:
self.fail("Should not have failed")
def test_normal_ipv6_cidr_and_valid_ipv6_subnet(self):
try:
ippol._validate_cidrs_fit_into_subnets(
["::/128"], [dict(id=1, cidr="::/96")])
except Exception:
self.fail("Should not have failed")
def test_normal_ipv6_cidr_and_valid_ipv4_subnet(self):
try:
ippol._validate_cidrs_fit_into_subnets(
["::/128"], [dict(id=1, cidr="192.168.0.0/24")])
except Exception:
self.fail("Should not have failed")
def test_normal_cidr_and_multiple_valid_subnet(self):
try:
ippol._validate_cidrs_fit_into_subnets(
["192.168.0.100/32"],
[dict(id=1, cidr="192.168.0.0/24"),
dict(id=2, cidr="192.168.0.0/16")])
except Exception:
self.fail("Should not have failed")
def test_normal_ipv6_cidr_and_multiple_valid_ipv6_subnet(self):
try:
ippol._validate_cidrs_fit_into_subnets(
["::/128"],
[dict(id=1, cidr="::/96"),
dict(id=2, cidr="::/64")])
except Exception:
self.fail("Should not have failed")
def test_normal_cidr_and_invalid_subnet(self):
with self.assertRaises(n_exc.BadRequest):
ippol._validate_cidrs_fit_into_subnets(
["192.168.0.100/32"],
[dict(id=1, cidr="10.10.10.0/24")])
def test_normal_ipv6_cidr_and_invalid_ipv6_subnet(self):
with self.assertRaises(n_exc.BadRequest):
ippol._validate_cidrs_fit_into_subnets(
["::/64"], [dict(id=1, cidr="::/96")])
def test_normal_cidr_and_one_invalid_and_one_valid_subnet(self):
with self.assertRaises(n_exc.BadRequest):
ippol._validate_cidrs_fit_into_subnets(
["192.168.0.100/32"],
[dict(id=1, cidr="10.10.10.0/24"),
dict(id=1, cidr="192.168.0.0/24")])
def test_normal_ipv6_cidr_and_one_invalid_and_one_valid_ipv6_subnet(self):
with self.assertRaises(n_exc.BadRequest):
ippol._validate_cidrs_fit_into_subnets(
["::/127"],
[dict(id=1, cidr="::/96"),
dict(id=1, cidr="::/128")])
class TestQuarkEnsureDefaultPolicy(test_base.TestBase):
def test_no_cidrs_no_subnets(self):
cidrs = []
subnets = []
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, [])
self.assertEqual(subnets, [])
def test_no_cidrs_v4(self):
cidrs = []
subnets = [dict(cidr="192.168.10.1/24")]
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["192.168.10.0/32", "192.168.10.255/32"])
self.assertEqual(subnets, [dict(cidr="192.168.10.1/24")])
def test_no_subnets_v4(self):
cidrs = ["192.168.10.0/32", "192.168.10.255/32"]
subnets = []
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["192.168.10.0/32", "192.168.10.255/32"])
self.assertEqual(subnets, [])
def test_cidrs_without_default_cidrs_v4(self):
cidrs = ["192.168.10.20/32", "192.168.10.40/32"]
subnets = [dict(cidr="192.168.10.1/24")]
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["192.168.10.20/32", "192.168.10.40/32",
"192.168.10.0/32", "192.168.10.255/32"])
self.assertEqual(subnets, [dict(cidr="192.168.10.1/24")])
def test_cidrs_with_default_cidrs_v4(self):
cidrs = ["192.168.10.0/32", "192.168.10.255/32"]
subnets = [dict(cidr="192.168.10.1/24")]
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["192.168.10.0/32", "192.168.10.255/32"])
self.assertEqual(subnets, [dict(cidr="192.168.10.1/24")])
def test_no_cidrs_v6(self):
cidrs = []
subnets = [dict(cidr="::/64")]
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["::/128", "::ffff:ffff:ffff:ffff/128"])
self.assertEqual(subnets, [dict(cidr="::/64")])
def test_no_subnets_v6(self):
cidrs = ["::/128", "::ffff:ffff:ffff:ffff/128"]
subnets = []
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["::/128", "::ffff:ffff:ffff:ffff/128"])
self.assertEqual(subnets, [])
def test_cidrs_without_default_cidrs_v6(self):
cidrs = ["::10/128", "::20/128"]
subnets = [dict(cidr="::/64")]
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["::10/128", "::20/128",
"::/128", "::ffff:ffff:ffff:ffff/128"])
self.assertEqual(subnets, [dict(cidr="::/64")])
def test_cidrs_with_default_cidrs_v6(self):
cidrs = ["::/128", "::ffff:ffff:ffff:ffff/128"]
subnets = [dict(cidr="::/64")]
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["::/128", "::ffff:ffff:ffff:ffff/128"])
self.assertEqual(subnets, [dict(cidr="::/64")])
def test_no_duplicates_in_result_when_called_twice(self):
cidrs = ["192.168.10.10/32"]
subnets = [dict(cidr="192.168.10.0/24")]
self.assertIsNone(ippol.ensure_default_policy(cidrs, subnets))
self.assertEqual(cidrs, ["192.168.10.10/32", "192.168.10.0/32",
"192.168.10.255/32"])
cidrs2 = ["192.168.10.10/32"]
self.assertIsNone(ippol.ensure_default_policy(cidrs2, subnets))
self.assertEqual(cidrs, ["192.168.10.10/32", "192.168.10.0/32",
"192.168.10.255/32"])
self.assertEqual(subnets, [dict(cidr="192.168.10.0/24")])
|
9,058 | df60d3b829c5702385f59fdefaea04f569fb7db2 | #!/usr/bin/python
# Original code found at:
# https://github.com/zzeromin/raspberrypi/tree/master/i2c_lcd
# requires I2C_LCD_driver.py
import I2C_LCD_driver
from time import *
import os
mylcd = I2C_LCD_driver.lcd()
mylcd.lcd_clear()
mylcd.lcd_display_string("RAS Hi-Pi shutdown", 1)
mylcd.lcd_display_string(" See you again ~", 2)
mylcd.lcd_display_string("http://rasplay.org", 3)
mylcd.lcd_display_string("RaspberryPi Village", 4)
sleep(2) # 2 sec delay
os.system("shutdown now -h")
|
9,059 | 1a166a08c835caa8dd308d59227051751aff7c0f |
# coding=utf-8
from numpy import *
""" 1
函数loadDataSet()创建了一些实验样本。 该函数返回的第一个变量是进行词条切分后的文档集合, 这些文档来自斑点犬爱好者留言
板。 这些留言文本被切分成一系列的词条集合, 标点符号从文本中去掉,
loadDataSet( )函数返回的第二个
变量是一个类别标签的集合。 这里有两类, 侮辱性和非侮辱性。 这些文本的类别由人工标注, 这些标注信息用于训练程序以便自动检测侮辱性留言
"""
def loadDataSet():
postingList=[['my', 'dog', 'has', 'flea', 'problems', 'help', 'please'],
['maybe', 'not', 'take', 'him', 'to', 'dog', 'park', 'stupid'],
['my', 'dalmation', 'is', 'so', 'cute', 'I', 'love', 'him'],
['stop', 'posting', 'stupid', 'worthless', 'garbage'],
['mr', 'licks', 'ate', 'my', 'steak', 'how', 'to', 'stop', 'him'],
['quit', 'buying', 'worthless', 'dog', 'food', 'stupid']]
classVec = [0,1,0,1,0,1] #1 is abusive, 0 not
return postingList,classVec
""" 2
函数createVocabList()会创建一个包含在所有文档中出现的不重复词的列表, 为此使用了Python的set数据类型。 将词条列表输给
set构造函数, set就会返回一个不重复词表。 首先, 创建一个空集合❶, 然后将每篇文档返回的新词集合添加到该集合中❷。 操作符|用于
求两个集合的并集, 这也是一个按位或(OR) 操作符在数学符号表示上, 按位或操作与集合求并操作使用相同记号
"""
def createVocabList(dataSet):
vocabSet = set([]) #create empty set
for document in dataSet:
vocabSet = vocabSet | set(document) #union of the two sets
return list(vocabSet)
""" 3
获得词汇表后, 便可以使用函数setOfWords2Vec(), 该函数的输入参数为词汇表及某个文档, 输出的是文档向量, 向量的每一元素为1或0,
分别表示词汇表中的单词在输入文档中是否出现。 函数首先创建一个和词汇表等长的向量, 并将其元素都设置为0❸。 接着, 遍历文档中的所有单词,
如果出现了词汇表中的单词, 则将输出的文档向量中的对应值
设为1。 一切都顺利的话, 就不需要检查某个词是否还在vocabList中, 后边可能会用到这一操作
"""
def setOfWords2Vec(vocabList, inputSet):
returnVec = [0]*len(vocabList)
for word in inputSet:
if word in vocabList:
returnVec[vocabList.index(word)] = 1
return returnVec
"""
先看看前三个函数的执行效果
"""
def test1():
listPosts, listClass = loadDataSet()
mVocabList = createVocabList(listPosts)
print mVocabList
setOfWords2Vec(mVocabList, listPosts[0])
# test1()
# ---------------------------训练算法: 从词向量计算概率--------------------------
"""
函数中的输入参数为文档矩阵trainMatrix, 以及由每篇文档类别标签所构成的向量trainCategory。 首先, 计算文档属于侮辱性文档
(class=1) 的概率, 即P(1)。 因为这是一个二类分类问题, 所以可以通过1-P(1)得到P(0)。 对于多于两类的分类问题, 则需要对代码稍加
修改。计算p(wi|c1) 和p(wi|c0), 需要初始化程序中的分子变量和分母变量❶。 由于w中元素如此众多, 因此可以使用NumPy数组快速计算这些
值。 上述程序中的分母变量是一个元素个数等于词汇表大小的NumPy数组。 在for循环中, 要遍历训练集trainMatrix中的所有文档。 一旦某
个词语(侮辱性或正常词语) 在某一文档中出现, 则该词对应的个数(p1Num或者p0Num) 就加1, 而且在所有的文档中, 该文档的总词数也
相应加1❷。 对于两个类别都要进行同样的计算处理。最后, 对每个元素除以该类别中的总词数❸。 利用NumPy可以很好实现,
用一个数组除以浮点数即可, 若使用常规的Python列表则难以完成这种任务, 读者可以自己尝试一下。 最后, 函数会返回两个向量和一个概率。
"""
def trainNB0(trainMatrix,trainCategory):
numTrainDocs = len(trainMatrix)
numWords = len(trainMatrix[0])
pAbusive = sum(trainCategory)/float(numTrainDocs)
# 1. (以下两行) 初始化概率
"""
利用贝叶斯分类器对文档进行分类时, 要计算多个概率的乘积以获得文档属于某个类别的概率, 即计算p(w0|1)p(w1|1)p(w2|1)。 如果其中一
个概率值为0, 那么最后的乘积也为0。 为降低这种影响, 可以将所有词的出现数初始化为1, 并将分母初始化为2
"""
p0Num = ones(numWords); p1Num = ones(numWords) # change to ones()
p0Denom = 2.0; p1Denom = 2.0 # change to 2.0
for i in range(numTrainDocs):
if trainCategory[i] == 1:
# 2. (以下两行) 向量相加
p1Num += trainMatrix[i]
p1Denom += sum(trainMatrix[i])
else:
p0Num += trainMatrix[i]
p0Denom += sum(trainMatrix[i])
# 3. 对每个元素做除法
"""
另一个遇到的问题是下溢出, 这是由于太多很小的数相乘造成的。 当计算乘积p(w0|ci)p(w1|ci)p(w2|ci)...p(wN|ci)时, 由于大部分因子都
非常小, 所以程序会下溢出或者得到不正确的答案。 (读者可以用Python尝试相乘许多很小的数, 最后四舍五入后会得到0。 ) 一种解决
办法是对乘积取自然对数。 在代数中有ln(a*b) = ln(a)+ln(b), 于是通过求对数可以避免下溢出或者浮点数舍入导致的错误。 同时, 采用
自然对数进行处理不会有任何损失。 图4-4给出函数f(x)与ln(f(x))的曲线。 检查这两条曲线, 就会发现它们在相同区域内同时增加或者减
少, 并且在相同点上取到极值。 它们的取值虽然不同, 但不影响最终结果。 通过修改return前的两行代码, 将上述做法用到分类器中:
"""
p1Vect = log(p1Num/p1Denom) # change to log()
p0Vect = log(p0Num/p0Denom) # change to log()
return p0Vect,p1Vect,pAbusive
def test2():
listPosts, listClass = loadDataSet()
# 构建了一个包含所有词的列表mVocabList
mVocabList = createVocabList(listPosts)
setOfWords2Vec(mVocabList, listPosts[0])
trainMat = []
for postinDoc in listPosts:
temp = setOfWords2Vec(mVocabList, postinDoc)
trainMat.append(temp)
# 文档属于侮辱类的概率pAb
p0v, p1v, pAb = trainNB0(trainMat, listClass)
print pAb
"""
接下来看一看在给定文档类别条件下词汇表中单词的出现概率, 看看是否正确。 词汇表中的第一个词是cute, 其在类别0中出现1次, 而在类别1中
从未出现。 对应的条件概率分别为0.041 666 67与0.0。 该计算是正确的。 我们找找所有概率中的最大值, 该值出现在P(1)数组第26个下标位
置, 大小为0.157 894 74。 在myVocabList的第26个下标位置上可以查到该单词是stupid。 这意味着stupid是最能表征类别1(侮辱性文档类)的单词。
"""
"""
代码有4个输入: 要分类的向量vec2Classify以及使用函数trainNB0()计算得到的三个概率。 使用NumPy的数组来计算两个
向量相乘的结果❶。 这里的相乘是指对应元素相乘, 即先将两个向量中的第1个元素相乘, 然后将第2个元素相乘, 以此类推。 接下来将词汇表
中所有词的对应值相加, 然后将该值加到类别的对数概率上。 最后, 比较类别的概率返回大概率对应的类别标签。 这一切不是很难, 对吧?
"""
def classifyNB(vec2Classify, p0Vec, p1Vec, pClass1):
# 1. 元素相乘 分类计算的核心
p1 = sum(vec2Classify * p1Vec) + log(pClass1) # element-wise mult
p0 = sum(vec2Classify * p0Vec) + log(1.0 - pClass1)
if p1 > p0:
return 1
else:
return 0
"""
对文本做一些修改, 看看分类器会输出什么结果。 这个例子非常简单,但是它展示了朴素贝叶斯分类器的工作原理。
接下来,我们会对代码做些修改, 使分类器工作得更好。
函数setOfWords2Vec()稍加修改, 修改后的函数称为bagOfWords2Vec()
-----------------------------------准备数据: 文档词袋模型---------------------------------------
"""
def bagOfWords2VecMN(vocabList, inputSet):
returnVec = [0]*len(vocabList)
for word in inputSet:
if word in vocabList:
# todo 这个词的操作
returnVec[vocabList.index(word)] += 1
return returnVec
"""
函数是一个便利函数(convenience function) , 该函数封装所有操作, 以节省输入
"""
def testingNB():
listOPosts,listClasses = loadDataSet()
myVocabList = createVocabList(listOPosts)
trainMat=[]
for postinDoc in listOPosts:
trainMat.append(setOfWords2Vec(myVocabList, postinDoc))
p0V,p1V,pAb = trainNB0(array(trainMat),array(listClasses))
testEntry = ['love', 'my', 'dalmation']
thisDoc = array(setOfWords2Vec(myVocabList, testEntry))
print (testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb))
testEntry = ['stupid', 'garbage']
thisDoc = array(setOfWords2Vec(myVocabList, testEntry))
print (testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb))
# testingNB()
# -----------------------------------使用朴素贝叶斯过滤垃圾邮件----------------------------
"""
准备数据: 切分文本
可以看到, 切分的结果不错, 但是标点符号也被当成了词的一部分。 可以使用正则表示式来切分句子, 其中分隔符是除单词、 数字外的任意字符串
"""
def textParse(bigString): #input is big string, #output is word list
import re
listOfTokens = re.split(r'\W*', bigString)
return [tok.lower() for tok in listOfTokens if len(tok) > 2]
""""
函数spamTest()对贝叶斯垃圾邮件分类器进行自动化处理。 导入文件夹spam与ham下的文本文件, 并将它们解析为词列表❶。 接下来
构建一个测试集与一个训练集, 两个集合中的邮件都是随机选出的。 本例中共有50封电子邮件, 并不是很多, 其中的10封电子邮件被随机选择
为测试集。 分类器所需要的概率计算只利用训练集中的文档来完成。Python变量trainingSet是一个整数列表, 其中的值从0到49。 接下
来, 随机选择其中10个文件❷。 选择出的数字所对应的文档被添加到测试集, 同时也将其从训练集中剔除。 这种随机选择数据的一部分作为训
练集, 而剩余部分作为测试集的过程称为留存交叉验证(hold-out crossvalidation) 。 假定现在只完成了一次迭代, 那么为了更精确地估计分类
器的错误率, 就应该进行多次迭代后求出平均错误率。接下来的for循环遍历训练集的所有文档, 对每封邮件基于词汇表并使
用setOfWords2Vec()函数来构建词向量。 这些词在traindNB0()函数中用于计算分类所需的概率。 然后遍历测试集, 对其中每封电子邮件进
行分类❸。 如果邮件分类错误, 则错误数加1, 最后给出总的错误百分比
"""
def spamTest():
docList=[]; classList = []; fullText =[]
for i in range(1,26):
wordList = textParse(open('email/spam/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(1)
wordList = textParse(open('email/ham/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(0)
vocabList = createVocabList(docList)# create vocabulary
trainingSet = range(50); testSet=[] # create test set
# (以下四行) 随机构建训练集
for i in range(10):
randIndex = int(random.uniform(0,len(trainingSet)))
testSet.append(trainingSet[randIndex])
# todo del这个操作步骤
del(trainingSet[randIndex])
trainMat=[]; trainClasses = []
# (以下四行) 对测试集分类
for docIndex in trainingSet:#train the classifier (get probs) trainNB0
trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))
trainClasses.append(classList[docIndex])
# todo 入参出参的计算方法
p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
errorCount = 0
for docIndex in testSet: #classify the remaining items
wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])
if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
errorCount += 1
print ("classification error",docList[docIndex])
print ('the error rate is: ',float(errorCount)/len(testSet))
#return vocabList,fullText
# ------------------------自动化处理----------------------------------------
spamTest()
# ----------------------------- 4.7. 示例: 使用朴素贝叶斯分类器从个人广告中获取区域倾向---------------------------
""""
RSS源分类器及高频词去除函数
函数calcMostFreq() ❶。 该函数遍历词汇表中的每个词并统计它在文本中出现的次数, 然后根据出现次数从高到低对词典进行排序,
最后返回排序最高的30个单词。 你很快就会明白这个函数的重要性
以下四行) 计算出现频率
"""
def calcMostFreq(vocabList,fullText):
import operator
freqDict = {}
for token in vocabList:
freqDict[token]=fullText.count(token)
sortedFreq = sorted(freqDict.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedFreq[:30]
""""
函数localWords()使用两个RSS源作为参数。 RSS源要在函数外
导入, 这样做的原因是RSS源会随时间而改变。 如果想通过改变代码来
比较程序执行的差异, 就应该使用相同的输入。 重新加载RSS源就会得
到新的数据, 但很难确定是代码原因还是输入原因导致输出结果的改
变。 函数localWords()与程序清单4-5中的spamTest()函数几乎相
同, 区别在于这里访问的是RSS源❷而不是文件。 然后调用函
数calcMostFreq()来获得排序最高的30个单词并随后将它们移除❸。
函数的剩余部分与spamTest()基本类似, 不同的是最后一行要返回下
面要用到的值。
"""
def localWords(feed1,feed0):
import feedparser
docList=[]; classList = []; fullText =[]
minLen = min(len(feed1['entries']),len(feed0['entries']))
for i in range(minLen):
# 2 每次访问一条RSS源
wordList = textParse(feed1['entries'][i]['summary'])
docList.append(wordList)
fullText.extend(wordList)
classList.append(1) #NY is class 1
wordList = textParse(feed0['entries'][i]['summary'])
docList.append(wordList)
fullText.extend(wordList)
classList.append(0)
# (以下四行) 去掉出现次数最高的那些词
vocabList = createVocabList(docList)#create vocabulary
top30Words = calcMostFreq(vocabList,fullText) #remove top 30 words
for pairW in top30Words:
if pairW[0] in vocabList: vocabList.remove(pairW[0])
trainingSet = range(2*minLen); testSet=[] #create test set
for i in range(20):
randIndex = int(random.uniform(0,len(trainingSet)))
testSet.append(trainingSet[randIndex])
del(trainingSet[randIndex])
trainMat=[]; trainClasses = []
for docIndex in trainingSet:#train the classifier (get probs) trainNB0
trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))
trainClasses.append(classList[docIndex])
p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
errorCount = 0
for docIndex in testSet: #classify the remaining items
wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])
if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
errorCount += 1
print ('the error rate is: ',float(errorCount)/len(testSet))
return vocabList,p0V,p1V
def getTopWords(ny,sf):
import operator
vocabList,p0V,p1V=localWords(ny,sf)
topNY=[]; topSF=[]
for i in range(len(p0V)):
if p0V[i] > -6.0 : topSF.append((vocabList[i],p0V[i]))
if p1V[i] > -6.0 : topNY.append((vocabList[i],p1V[i]))
sortedSF = sorted(topSF, key=lambda pair: pair[1], reverse=True)
print ("SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**")
for item in sortedSF:
print (item[0])
sortedNY = sorted(topNY, key=lambda pair: pair[1], reverse=True)
print ("NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**")
for item in sortedNY:
print (item[0])
|
9,060 | 0c297e6f79682896e98c7a2933a4da6d9af7d7fe | #juego trivia hecho por mayu xD
print('¡hola! te invito a jugar mi juego trivia, trataremos temas como termux xd y entre otras cosas')
n1 = input('\n por favor dime como te llamas:')
print('\nmucho gusto', n1, ',empecemos')
puntaje = 0
print('me puedes decir con que comando en linux puedo listar la informacion de un directorio?')
print('a)cd')
print('b) ls')
print('c) cat')
print('d) mv')
print('e) rm')
respuesta_1 = input('\n tu respuesta: ')
while respuesta_1 not in ('a', 'b', 'c', 'd', 'e'):
respuesta_1 = input("debes volver a ingresar tu respuesta:")
if respuesta_1 == "b":
puntaje += 10
print("Muy bien", n1, "!")
else:
puntaje -= 5
print("Incorrecto", n1, "!")
print('\nsiguiente pregunta')
print('\ncual de estos comandos sirve para mover un archivo en termux')
print('a) cd')
print('b) cp')
print('c) mv')
print('d) cat')
print('e) chmod')
respuesta_2 = input('tu respuesta: ')
while respuesta_2 not in ('a', 'b', 'c', 'd', 'e'):
respuesta_2 = input("debes volver a ingresar tu respuesta:")
if respuesta_2 == "b":
puntaje -= 5
print('incorrecto', n1, '!')
elif respuesta_2 == "a":
puntaje -= 5
print('mal', n1, ', incorreto')
elif respuesta_2 == "d":
puntaje -= 5
print('no', n1, '! incorrecto')
elif respuesta_2 == "e":
puntaje -= 5
print('mal', n1, '! incorrecto')
else:
puntaje += 10
print('correcto', n1, '!!!!')
print('\nsiguiente pregunta')
print('\nque comando puede dar permisos?')
print('a) chmod')
print('b) cal')
print('c) rm')
print('d) mkdir')
print('e) ls -l')
respuesta_3 = input('\n tu respuesta: ')
while respuesta_3 not in ('a', 'b', 'c', 'd', 'e'):
respuesta_3 = input("debes volver a ingresar tu respuesta:")
if respuesta_3 == "a":
puntaje += 10
print("Muy bien", n1, "!")
else:
puntaje -= 5
print("Incorrecto", n1, "!")
print('\nsiguiente pregunta')
print('\ncual de estos comandos puede crear un directorio?')
print('a) rm')
print('b) mv')
print('c) cp')
print('d) mkdir')
print('e) exit')
respuesta_4 = input('\n tu respuesta: ')
while respuesta_4 not in ('a', 'b', 'c', 'd', 'e'):
respuesta_4 = input("debes volver a ingresar tu respuesta:")
if respuesta_4 == "d":
puntaje += 10
print("Muy bien", n1, "!")
else:
puntaje -= 5
print("Incorrecto", n1, "!")
print('\nsiguiente pregunta')
print('\ncon que comando puedo dar permisos e almacenaminto a termux?')
print('a) pwd')
print('b) ls -a')
print('c) lstree')
print('d) temux setup-storage')
print('e) rm -rf')
respuesta_5 = input('\n tu respuesta: ')
while respuesta_5 not in ('a', 'b', 'c', 'd', 'e'):
respuesta_5 = input("debes volver a ingresar tu respuesta:")
if respuesta_5 == "d":
puntaje += 10
print("Muy bien", n1, "!")
else:
puntaje -= 5
print("Incorrecto", n1, "!")
print('\ngracias por jugar', n1, '!')
print('\neste es tu puntaje:')
print('tienes', puntaje , 'puntos')
print('\nchao, chuidate xd') |
9,061 | 035de226c2d2ee85cb7e319de35fb09b21bc523d | from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from analyze import views
#from lecture import views
urlpatterns = patterns('',
url(r'^$', 'analyze.views.analyze', name='analyze'),
)
|
9,062 | 24187284ff3e03cf79b8545415005c71f9355ddc | flags =[]
sourcefiles:str = []
headerfiles:str = []
mainfile:str = ""
outfilename = "a.out"
assemblyfilename = "a.asm"
includedfilenames = []
class Variables:
size:bytes
name:str
class cstruct:
structname:string
def claprocessor():
print(sys.argv)
i=0
for stri in sys.argv:
if stri.__eq__("-o"):
outfilename=sys.argv(i+1)
if stri.__eq__("-ASM") or stri.__eq__("-asm") :
assemblyfilename = sys.argv(i+1)
if stri.__contains__(".c"):
sourcefiles.append(stri)
if stri.__contains__(".h"):
headerfiles.append(stri)
i += 1
return
def cpreprosscssor():
maintokens = lexer(mainfile)
return
def cprocessor():
return
if __name__ == '__main__':
claprocessor()
|
9,063 | 3240310653930662dcc4d79646b1a75c2994cda7 | #coding: utf-8
#/usr/bin/python
__author__='julia sayapina'
### Use db_reset.py to drop the db and recreate it, then use 'migrate' --> 'createsuperuser' --> 'makemigrations' --> 'migrate' as usual.
### This will create the DB structure as it has to be from django
### Then use test_db_fullfill.py to fullfill the db with test data. if you don't need to create tables manually don't use db_create()
from warnings import filterwarnings
import MySQLdb as db
import os
import shutil
import os
import sys
from subprocess import Popen, PIPE, STDOUT
import uuid
from decimal import *
from datetime import date
from random import randint
# Создание или открытие файла базы данных и создание схемы
filterwarnings('ignore', category = db.Warning)
db_name = 'ved3'
def db_create(): # creates tables manually (doesn't create AO and AB tables)
cur.execute("""
create table if not exists Offshores_asset (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
asset_name VARCHAR(100),
asset_link VARCHAR(200),
slug CHAR(200),
uuid CHAR(36)
);
""")
cur.execute("""
create table if not exists Offshores_offshore (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
off_name VARCHAR(50),
off_jurisdiction VARCHAR(50),
file VARCHAR(100),
image VARCHAR(100),
off_parent VARCHAR(50),
off_link VARCHAR(300),
slug VARCHAR(200),
uuid CHAR(36)
);
""")
cur.execute("""
create table if not exists Offshores_beneficiary (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
ben_name VARCHAR(50),
ben_lastname VARCHAR(100),
ben_midname VARCHAR(30),
ben_holding VARCHAR(70),
ben_link VARCHAR(300),
slug VARCHAR(200),
uuid CHAR(36)
);
""")
cur.execute("""
create table if not exists Offshores_beneficiariesoffshores (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
share DECIMAL,
rel_date DATE,
source VARCHAR(150),
link VARCHAR(200),
beneficiary_id INT,
offshore_id INT,
uuid CHAR(36)
);
""")
conn.commit()
print('tables created')
def db_insert(numrows):
# inserts test data into tables
for x in xrange(0,numrows): #creates test data for tables
num = str(x)
a_name = 'Asset' + num
a_link = 'http://somelink/'+a_name
a_uuid = uuid.uuid4().hex
a_slug = a_name + '-' + str(a_uuid)
o_name = 'Offshore' + num
o_jur = 'Cyprus'
o_file = 'offshores/favicon.xcf'
o_image = 'offshores/favicon.png'
o_prnt = 'parent' + num
o_link = 'http://' + o_name + '-' + num + '.com'
o_uuid = uuid.uuid4().hex
o_slug = o_name + str(o_uuid)
b_name = 'Michael' + num
b_lname = 'Prohorov' + num
b_mname = 'Dmitrievich' + num
b_holding = 'Onexim' + num
b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'
b_uuid = uuid.uuid4().hex
b_slug = b_lname + str(b_uuid)
try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why
cur.execute("""INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)""",(a_name, a_link, a_slug, a_uuid))
cur.execute("""INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)""",(o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid))
cur.execute("""INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid))
conn.commit()
except Exception as e:
print ("Exception 1:", type(e), e)
def db_insert_linktables(numrows):
# inserts test data into linking tables; has to be called after db_insert(), as first basic tables need to be generated to produce links between
# them using random numbers
for x in xrange(0,numrows): #creates test data for tables
num = str(x)
bo_share = Decimal(x)
bo_date = date(2016, randint(1, 12), randint(1, 28))
bo_source = 'source' + num
bo_link = 'http://bo.ru/' + bo_source + '-' + num
bo_ben = randint(1, numrows)
bo_off = randint(1, numrows)
bo_uuid = uuid.uuid4().hex
oa_uuid = uuid.uuid4().hex
oa_share = Decimal(x)
oa_date = date(2016, randint(1, 12), randint(1, 28))
oa_source = 'source' + num
oa_link = 'http://oa.ru/' + oa_source + '-' + num
oa_asset = randint(1, numrows)
oa_off = randint(1, numrows)
ab_uuid = uuid.uuid4().hex
ab_share = Decimal(x)
ab_date = date(2016, randint(1, 12), randint(1, 28))
ab_source = 'source' + num
ab_link = 'http://ab.ru/' + oa_source + '-' + num
ab_asset = randint(1, numrows)
ab_ben = randint(1, numrows)
try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why
cur.execute("""INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid))
cur.execute("""INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off))
cur.execute("""INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben))
conn.commit()
except Exception as e:
print ("Exception 1:", type(e), e)
numrows = 20
try:
conn = db.connect("localhost","root","0013Tau","ved2" )
cur = conn.cursor()
# db_create() #<-- to create tables manually uncomment this
db_insert(numrows)
db_insert_linktables(numrows) # IMPORTANT! has to be called ONLY after db_insert()!
except Exception as e:
print ("Exception 0:", type(e), e)
except: db.rollback()
conn.commit()
conn.close()
print ('DB fullfilled')
# def main():
# if len(sys.argv) != 2:
# print('usage: python3 db_fullfill.py [numrows]')
# sys.exit(1)
# if len(sys.argv) == 2:
# numrows = sys.argv[1]
# else:
# numrows = 15
# print (numrows)
# return numrows
# sys.exit(1)
# if __name__ == '__main__':
# main()
|
9,064 | 6aa74826f9ca0803fa8c1d5af1d4cec4980e2ce6 | import numpy as np
from scipy.stats import multivariate_normal
from functions.io_data import read_data, write_data
np.random.seed(0)
class IsingModel():
def __init__(self, image, J, rate, sigma):
self.width = image.shape[0]
self.height = image.shape[1]
self._J = J
self._rate = rate
self._sigma = sigma
self.image, self.logodds = self.presenting_image(image)
def presenting_image(self, image):
logodds = multivariate_normal.logpdf(image.flatten(), mean=+1, cov=self._sigma ** 2) - multivariate_normal.logpdf(image.flatten(), mean=-1, cov=self._sigma ** 2)
logodds = np.reshape(logodds, image.shape)
pr_plus1 = 1 / (1 + np.exp(-1*logodds)) # sigmoid(logodds) # plus 1 -> +1 -> 1 / (1 + exp(logodds)) -> sigmoid(x) = 1 / (1 + exp{x})
return 2 * pr_plus1 - 1, logodds
def neighbors(self, x, y):
nbrs = []
if x == 0:
nbrs.append(self.image[self.width - 1, y])
else:
nbrs.append(self.image[x - 1, y])
if x == self.width - 1:
nbrs.append(self.image[0, y])
else:
nbrs.append(self.image[x + 1, y])
if y == 0:
nbrs.append(self.image[x, self.height - 1])
else:
nbrs.append(self.image[x, y - 1])
if y == self.height - 1:
nbrs.append(self.image[x, 0])
else:
nbrs.append(self.image[x, y + 1])
return nbrs
def interaction_potentials(self, x, y):
nbrs = self.neighbors(x, y)
return sum(nbrs)
def variational_inference(self, x, y):
E = self._J * self.interaction_potentials(x, y)
self.image[x, y] = (1 - self._rate) * self.image[x, y] + self._rate * np.tanh(E + 0.5 * self.logodds[x, y])
def denoising(image, iterations, rate, sigma, J=3):
ising = IsingModel(image, J=J, rate=rate, sigma=sigma)
for i in range(iterations):
for x in range(image.shape[0]):
for y in range(image.shape[1]):
ising.variational_inference(x, y)
return ising.image
if __name__ == "__main__":
for img in range(1,5):
print("Denoising for image " + str(img))
data, image = read_data("../a1/"+str(img)+"_noise.txt", True)
print(data.shape)
print(image.shape)
image[image == 0] = -1
image[image == 255] = 1
iterations = 15
J = 3
sigma = 2
rate = 0.5
d_img = denoising(image, iterations=iterations, rate=rate, sigma=sigma)
d_img[d_img >= 0] = 255
d_img[d_img < 0] = 0
print(d_img.shape)
height = d_img.shape[0]
width = d_img.shape[1]
counter = 0
for i in range(0, width):
for j in range(0, height):
data[counter][2] = d_img[j][i][0]
counter = counter + 1
write_data(data, "../output/vi/"+str(img)+"_denoise.txt")
read_data("../output/vi/"+str(img)+"_denoise.txt", True, save=True, save_name="../output/vi/"+str(img)+"_denoise.png")
print("Finished writing data. Please check "+str(img)+"_denoise.png \n") |
9,065 | a8d13c3fbf6051eba392bcdd6dcb3e946696585f | import itertools
def zbits(n,k):
zeros = "0" * k
ones = "1" * (n-k)
binary = ones+zeros
string = {''.join(i) for i in itertools.permutations(binary, n)}
return(string)
assert zbits(4, 3) == {'0100', '0001', '0010', '1000'}
assert zbits(4, 1) == {'0111', '1011', '1101', '1110'}
assert zbits(5, 4) == {'00001', '00100', '01000', '10000', '00010'} |
9,066 | 68bade5767d4f418bcae07485a179df5e47e652c | DEBUG = True
ADMINS = frozenset(["briandowe@gmail.com"]) |
9,067 | e92a738d3233450b255605619dafadd4d829604b | #!/usr/bin/python3
from optparse import OptionParser
from urllib import request, parse
from urllib.error import URLError, HTTPError
import ssl
import re
ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ssl_context.options &= ssl.CERT_NONE
class Settings:
SINGLETON = None
def __init__(self):
self.url_pattern = r'href="((http[s]?://|/)(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)"'
self.crawl_content_type_whitelist_pattern = r'text/html'
self.crawl_url_blacklist_pattern = r'/activity/'
self.no_color = False
self.verbosity = 2
# Options singleton
@classmethod
def instance(cls):
if Settings.SINGLETON is None:
Settings.SINGLETON = Settings()
return Settings.SINGLETON
class Style:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
@classmethod
def colored(cls, string, color):
return string if Settings.instance().no_color else ("%s%s%s" % (color, string, Style.ENDC))
def info(string):
return Style.colored(string, Style.OK_BLUE)
def warn(string):
return Style.colored(string, Style.WARNING)
def error(string):
return Style.colored(string, Style.FAIL)
def result(string):
return Style.colored(string, Style.OKGREEN)
def log(priority, string):
if priority <= Settings.instance().verbosity:
print(string)
class MapItem:
def __init__(self):
self.code = 200
self.children = []
def main():
settings = Settings.instance()
usage = "usage: %prog [options] site_url"
parser = OptionParser(usage=usage)
parser.add_option("-c", "--content-type-regex", metavar="REGEX", dest="content_type_regex",
help="whitelist regex for content-type of crawled nodes",
default=settings.crawl_content_type_whitelist_pattern)
parser.add_option("-b", "--url-blacklist-regex", metavar="REGEX", dest="url_blacklist_regex",
help="blacklist regex for URLs of crawled nodes",
default=settings.crawl_url_blacklist_pattern)
parser.add_option("-v", action="count", dest="verbosity", default=0,
help="output verbosity")
parser.add_option("--no-color", action="store_true", dest="no_color")
(opts, args) = parser.parse_args()
if len(args) != 1:
parser.print_help()
return
site_url = args[0]
settings.crawl_content_type_whitelist_pattern = opts.content_type_regex
settings.crawl_url_blacklist_pattern = opts.url_blacklist_regex
settings.verbosity = opts.verbosity
settings.no_color = opts.no_color
site_map = map_site(site_url)
external_urls = get_external_children(site_url, site_map)
for url in sorted(external_urls):
try:
request.urlopen(url)
log(1, Style.result('EXT: %s' % url))
except HTTPError as e:
referrers = find_referrers(url, site_map)
log(0, Style.error('EXT-ERROR-HTTP: %d %s' % (e.code, url)))
log(0, Style.error('\n'.join(' REF: %s' % r for r in referrers)))
except URLError as e:
referrers = find_referrers(url, site_map)
log(0, Style.error('EXT-ERROR: %s %s' % (e.reason, url)))
log(0, Style.error('\n'.join(' REF: %s' % r for r in referrers)))
def find_referrers(url, site_map):
results = []
for referrer, item in site_map.items():
for child in item.children:
if child == url:
results.append(referrer)
break
return results
def get_external_children(site_url, site_map):
site_host = strip_path(site_url)
external = set()
for item in site_map.values():
for child in item.children:
if not child.startswith(site_host):
external.add(child)
return external
def map_site(site_url):
site_map = {}
settings = Settings.instance()
crawl_url_blacklist_regex = re.compile(settings.crawl_url_blacklist_pattern)
crawl_content_type_whitelist_regex = re.compile(settings.crawl_content_type_whitelist_pattern)
url_regex = re.compile(settings.url_pattern)
to_crawl = [site_url]
while to_crawl:
next_crawl = to_crawl.pop(0)
new_crawl = crawl(next_crawl, site_url, site_map,
crawl_url_blacklist_regex,
crawl_content_type_whitelist_regex,
url_regex)
to_crawl += new_crawl
return site_map
def crawl(url, site_host, site_map,
crawl_url_blacklist_regex,
crawl_content_type_whitelist_regex,
url_regex):
if url in site_map:
return []
log(2, 'CRAWL: %s' % url)
map_item = MapItem()
site_map[url] = map_item
if not url.startswith(site_host):
log(2, Style.warn('SKIP: external %s' % url))
return []
if crawl_url_blacklist_regex.search(url):
log(2, Style.warn('SKIP: blacklist %s' % url))
return []
try:
response = request.urlopen(url, context=ssl_context)
except URLError as e:
log(2, Style.error('ERROR: %d %s' % (e.code, url)))
map_item.code = e.code
return []
if not crawl_content_type_whitelist_regex.search(response.info().get('content-type')):
log(2, Style.warn('SKIP: content-type %s' % url))
return []
content = (
response.read()
.decode('utf-8')
.replace(r'\n', '\n')
.replace(r'\r', '\r'))
for match in url_regex.finditer(content):
child = match.group(1)
if child.startswith('/'):
child = parse.urljoin(site_host, child)
child = strip_query(child)
if child not in map_item.children:
map_item.children.append(child)
return map_item.children
def strip_path(url):
parsed = parse.urlparse(url)
return parse.urlunparse((parsed.scheme, parsed.netloc, '', '', '', ''))
def strip_query(url):
parsed = parse.urlparse(url)
return parse.urlunparse((parsed.scheme, parsed.netloc, parsed.path, '', '', ''))
if __name__ == '__main__':
main()
|
9,068 | 4774c1f4eafc0132bab0073b60c4bcad6b69380d | import shlex
class MockSOLR(object):
class MockHits(list):
@property
def hits(self):
return len(self)
@property
def docs(self):
return self
def __init__(self):
self.db = {}
def add(self, objects):
for o in objects:
o['text'] = ''.join(o['text'])
self.db[o['id']] = o
def commit(self):
pass
def search(self, q, fq=None, **kw):
if isinstance(q, unicode):
q = q.encode('latin-1')
# Parse query
preds = []
q_parts = shlex.split(q)
if fq:
q_parts += fq
for part in q_parts:
if part == '&&':
continue
if ':' in part:
field, value = part.split(':', 1)
preds.append((field, value))
else:
preds.append(('text', part))
result = self.MockHits()
for obj in self.db.values():
for field, value in preds:
neg = False
if field[0] == '!':
neg = True
field = field[1:]
if field == 'text' or field.endswith('_t'):
if (value not in str(obj.get(field, ''))) ^ neg:
break
else:
if (value != str(obj.get(field, ''))) ^ neg:
break
else:
result.append(obj)
return result
def delete(self, *args, **kwargs):
if kwargs.get('q', None) == '*:*':
self.db = {}
elif kwargs.get('id', None):
del self.db[kwargs['id']]
elif kwargs.get('q', None):
for doc in self.search(kwargs['q']):
self.delete(id=doc['id'])
|
9,069 | 8020bac94de3e68193c9891a628a48c537c5afa0 | from menu_sun_integration.application.adapters.customer_adapter import CustomerAdapter
from menu_sun_integration.infrastructure.brf.builders.brf_base_builder import BRFBaseBuilder
from menu_sun_integration.infrastructure.brf.translators.brf_customer_translator import BRFCustomerTranslator
class BRFCustomerBuilder(BRFBaseBuilder):
def define_translator(self) -> None:
self._translator = BRFCustomerTranslator()
def build_adapter(self) -> None:
self._adapter = CustomerAdapter(client=self._client, translator=self._translator)
|
9,070 | 4b3de2d817aa6f8b92d513bcdba612362becefdc | #!/usr/bin/env python
from bumblebee.motion import *
from simulation.path import *
from simulation.settings import *
import tf.transformations
from geometry_msgs.msg import TransformStamped,Transform,Quaternion,Vector3
from bumblebee.baseTypes import basicGraph,slidingGraph
from simulation.dataset import stereo_simulator_node
import pickle
import os
import rospy
import time
import scipy.stats.mstats as stat
from scipy.stats import norm,cauchy
import matplotlib.pyplot as plt
import matplotlib.style as sty
from mpl_toolkits.mplot3d import Axes3D
sty.use("seaborn")
from tf import TransformListener,TransformBroadcaster
from tf.transformations import *
import numpy as np
out="/home/ryan/recording/poseGraph/ORB/summary"
inNet="/home/ryan/recording/poseGraph/ORB"
#["5000_A1","5000_A2","5000_A3",
replayFiles=["5000_A5","5000_A6","5000_A12","5000_A13","5000_A14"]#,"/media/ryan/EXTRA/Simulation/50/G_0.3.gauss"]#,"/home/ryan/recording/poseGraph/5000_A2_full.pose"]
rospy.init_node("graph_poses_extract")
for f in replayFiles:
print("new SLiding Graph")
inlierData=[]
rmsData=[]
inlierRatio=[]
inFile=inNet+"/"+f+".pose"
with open(inFile,"r") as fread:
print(f)
data=pickle.load(fread)
print("Loaded")
with open(out+"/"+f+".inlier",'w') as outFIle:
pickle.dump(data.getInlierMotion(),outFIle)
print("1")
with open(out+"/"+f+".inlierRMS",'w') as outFIle:
pickle.dump(data.getInlierRMS(),outFIle)
print("extracted2")
with open(out+"/"+f+".tracks",'w') as outFIle:
pickle.dump(data.getTotalTracks(),outFIle)
print("extracted3")
with open(out+"/"+f+".delta",'w') as outFIle:
pickle.dump(data.getDeltaMotion(),outFIle)
print("extracted4")
# pickle.data.getInlierMotion())
# print("inlier")
# rmsData.append(data.getInlierRMS())
# print("rms")
# inlierRatio.append(data.getTotalTracks())
# print("totalTrc") |
9,071 | 97720baab961d50ceae832d52350b9871c552c84 | n,k=map(int,input().split())
k_list=[]
for i in range(k):
l,r=map(int,input().split())
k_list.append([l,r])
dp=[0]*(n+1)
dp[1]=1
dpsum=[0]*(n+1)
dpsum[1]=1
for i in range(1,n):
dpsum[i]=dp[i]+dpsum[i-1]
for j in range(k):
l,r=k_list[j]
li=i+l
ri=i+r+1
if li<=n:
dp[li]+=dpsum[i]
dp[li]=dp[li]%998244353
if ri<=n:
dp[ri]-=dpsum[i]
dp[ri]=dp[ri]%998244353
print(dp[n])
|
9,072 | 9f6e5c219f7b668720b5379dde912ff22ef434d1 | #!/usr/bin/env python3
import json
import sqlite3
import sys
from scorelib import *
#from .scorelib import *
from collections import defaultdict
def __map2list(mp):
if len(mp.keys()) == 0:
return []
lst = [None] * max(mp.keys())
for idx in mp.keys():
lst[idx-1] = mp[idx]
return lst
def __translate_keys(translation_schema):
def f(obj):
schema = translation_schema.get(type(obj))
if schema is None:
return obj.__dict__
res = {}
for key in obj.__dict__:
res[schema.get(key, key)] = obj.__dict__[key]
return res
return f
def __to_bool(val):
if val == 'Y':
return True
elif val == 'N':
return False
else:
return None
def search(substr):
connection = sqlite3.connect('scorelib.dat')
result = defaultdict(lambda: [])
for person_id, person_name in connection.execute(r"SELECT id, name FROM person WHERE name LIKE '%' || ? || '%'", (substr, )):
root_composer = person_name
for (score_id, score_name, score_genre, score_incipit, score_key, score_year) in connection.execute(r"SELECT score.id, score.name, score.genre, score.incipit, score.key, score.year FROM score JOIN score_author a on score.id = a.score WHERE a.composer = ?", (person_id, )):
voicesMap = {}
for voice_name, voice_range, voice_number in connection.execute(r"SELECT name, range, number FROM voice WHERE score = ?", (score_id, )):
voicesMap[voice_number] = Voice(voice_name, voice_range)
composers = []
for c_name, c_born, c_died in connection.execute(r"SELECT person.name, person.born, person.died FROM score_author JOIN person ON score_author.composer = person.id WHERE score_author.score = ?", (score_id,)):
composers.append(Person(c_name, c_born, c_died))
composition = Composition(score_name, score_incipit, score_key, score_genre, score_year, __map2list(voicesMap), composers)
for edition_id, edition_name, edition_year in connection.execute(r"SELECT id, name, year FROM edition WHERE score = ?", (score_id,)):
editors = []
for e_name, e_born, e_died in connection.execute(r"SELECT person.name, person.born, person.died FROM edition_author JOIN person ON edition_author.editor = person.id WHERE edition_author.edition = ?", (edition_id,)):
editors.append(Person(e_name, e_born, e_died))
edition = Edition(composition, editors, edition_name)
for print_id, print_part in connection.execute(r"SELECT id, partiture FROM print WHERE edition = ?", (edition_id, )):
print = Print(edition, print_id, __to_bool(print_part))
result[root_composer].append({"Print Number": print.print_id,
"Composer": composition.authors,
"Title": composition.name,
"Genre": composition.genre,
"Key": composition.key,
"Composition Year": composition.year,
"Edition": edition.name,
"Voices": __map2list(voicesMap),
"Editor": edition.authors,
"Partiture": print.partiture,
"Incipit": composition.incipit})
json.dump(result,
sys.stdout,
default=__translate_keys({Print: {"print_id": "Print Number", "partiture": "Partiture", "edition": "Edition"},
Edition: {"authors": "Editors", "name": "Name", "composition": "Composition"},
Composition: {"name": "Name", "incipit": "Incipit", "key": "Key", "genre": "Genre", "year": "Composition Year", "voices": "Voices", "authors": "Composer"},
Voice: {"name": "Name", "range": "Range"},
Person: {"name": "Name", "born": "Born", "died": "Died"}}),
indent=4,
ensure_ascii=False)
return
def main(args):
text = ' '.join(args).strip()
if text == '':
json.dump({}, sys.stdout)
return
search(text)
main(sys.argv[1:]) |
9,073 | c1f432ff70b21064f36cf9651f8cff9c69361d5c | # from django.contrib.auth import forms
# class UserRegister(froms.M):
# class Meta:
# fields = []
|
9,074 | d88485e37d4df4cb0c8d79124d4c9c9ba18d124e | #!/usr/bin/python
from Tkinter import *
root = Tk()
root.title("Simple Graph")
root.resizable(0,0)
points = []
spline = 0
tag1 = "theline"
def point(event):
c.create_oval(event.x, event.y, event.x+1, event.y+1, fill="black", width="10.0")
points.append(event.x)
points.append(event.y)
print(event.x)
print(event.y)
return points
def canxy(event):
print("Getting the coordinates")
print event.x, event.y
c.create_oval(event.x, event.y, event.x+1, event.y+1, fill="red", width="20.0")
def graph(event):
global theline
c.create_line(points, tags="theline")
def toggle(event):
global spline
if spline == 0:
c.itemconfigure(tag1, smooth=1)
spline = 1
elif spline == 1:
c.itemconfigure(tag1, smooth=0)
spline = 0
return spline
c = Canvas(root, bg="white", width=300, height= 300)
c.configure(cursor="crosshair")
c.pack()
c.bind("<Button-1>", point)
#c.bind("<Button-3>", graph)
c.bind("<Button-3>", canxy)
#c.bind("<Button-2>", toggle)
root.mainloop()
|
9,075 | a5e693a79211570f2d27575657496992f8fee164 | import random
def less(i1, i2):
return i1[0] * i2[1] < i2[0] * i1[1]
def equal(i1, i2):
return i1[0] * i2[1] == i2[0] * i1[1]
def more(i1, i2):
return i1[0] * i2[1] > i2[0] * i1[1]
def partition(x, l, r, pivot):
il = l
ir = l
for i in range(l, r):
if x[i] < pivot and ir < r:
x[il], x[i] = x[i], x[il]
if il != ir:
x[ir], x[i] = x[i], x[ir]
il += 1
ir += 1
elif x[i] == pivot and ir < r:
x[ir], x[i] = x[i], x[ir]
ir += 1
return il, ir
def qsort(x, l=0, r=None):
if r is None:
r = len(x)
if (r - l) > 1:
pivot = x[random.randint(l, r - 1)]
il, ir = partition(x, l, r, pivot)
qsort(x, l, il)
qsort(x, ir, r)
N, w = list(map(int, input().split()))
x = []
for i in range(N):
x.append(tuple(map(int, input().split())))
qsort(x)
x = x[::-1]
s = 0
i = 0
while (i < N) and (w >= x[i][1]):
s += x[i][0]
w -= x[i][1]
i += 1
if i < N:
s += (x[i][0] * w // x[i][1])
print(s)
|
9,076 | 0fdbdfe98496ebedb112c85b79836292ffa3a5a9 | """
If you are using MultiScript Editor make sure to set PYTHONPATH to Winexs' editor.
You can use set PYTHONPATH=c:/users/username/myscripts
Set paths according to your project!
"""
CHROME_WEBDRIVER = 'c:/users/username/project/chromedriver.exe'
WEBSITE_PDF_CONVERTER = 'https://www.ilovepdf.com/merge_pdf'
PDF_FILES = 'c:/users/username/project' |
9,077 | 7f131e17f4fbd7d6b333a51dae557ddb07c30046 | #!/usr/bin/env python
# -*-coding:utf-8-*-
# Author:SemaseMing <blog.v-api.cn>
# Email: admin@v-api.cn
# Time: 2016-10-19 11:56
import gevent
def foo():
print('Running in foo')
gevent.sleep(0)
print('Explicit context switch to foo ageni')
def bar():
print('Explicit context to bar')
gevent.sleep(0)
print('Implicit contenxt switch back to bar')
gevent.joinall([gevent.spawn(foo), gevent.spawn(bar)]) |
9,078 | b9e78629fe094d933fdc0ffa2f9d9d1880e78c12 | import pandas as pd
import numpy as np
import sys
#Best Mean Test
if len(sys.argv) <= 3:
print("Not enough args usage: anova.py <*.csv> <rv1,rv2> <target to beat>")
print("ex: best-mean.py testdata.csv nicdrop 95000")
print("<rv> is response variable")
exit()
target_to_beat = int(sys.argv[3]) #factors
rv = sys.argv[2].split(',')
data = pd.read_csv(sys.argv[1], header=[0,1])
response_var = data[[rv[0],'factors']]
response_var.columns = response_var.columns.get_level_values(1)
print("Re-run factor means")
print(response_var.groupby('code')[rv[1]].mean())
print("Lowest observed sample mean (target to beat)")
print(response_var.groupby('code')[rv[1]].mean().min())
#print factors still remaining as viable
candidiate_factors_index = response_var.groupby('code')[rv[1]].mean().index.array.to_numpy() #all factors from csv
improved_factors_bools = (response_var.groupby('code')[rv[1]].mean() < target_to_beat).to_numpy() #boolean series
all = ""
i=0
for y in candidiate_factors_index:
if improved_factors_bools[i]:
all = all + y + ","
i=i+1
print("Effects")
if len(all) == 0:
print("NONE")
exit()
print(all.rstrip(','))
|
9,079 | ad09880b9e06a129b9623be2a086ebcc8dc55c2c | """Module containing class `Station`."""
from zoneinfo import ZoneInfo
import datetime
from vesper.util.named import Named
class Station(Named):
"""Recording station."""
def __init__(
self, name, long_name, time_zone_name,
latitude=None, longitude=None, elevation=None):
super().__init__(name)
self._long_name = long_name
self._time_zone = ZoneInfo(time_zone_name)
self._latitude = latitude
self._longitude = longitude
self._elevation = elevation
@property
def long_name(self):
return self._long_name
@property
def time_zone(self):
return self._time_zone
@property
def latitude(self):
return self._latitude
@property
def longitude(self):
return self._longitude
@property
def elevation(self):
return self._elevation
def get_night(self, time):
"""
Gets the station-local night that includes the specified time.
:Parameters:
time : `datetime`
the time whose night is to be gotten.
The time may be either naive or aware. If the time
is naive, it is assumed to be in the station's
time zone.
:Returns:
the station-local night that includes the specified time, a `date`.
The station-local night of a time is the starting date of the
local 24-hour period starting at noon that contains the time.
"""
if time.tzinfo is not None:
# time is aware
# convert time to station time zone
time = time.astimezone(self.time_zone)
if time.hour < 12:
time -= datetime.timedelta(hours=12)
return time.date()
|
9,080 | dab5e7ee1d14cba485cbaece1354ec8d686ca4ab | # coding=utf-8
while True:
a,b=input().split()
a=float(a)
b=float(b)
if b==0:
print("error")
else:
c=a/b+0.5
c=int(c)
print(c) |
9,081 | 480e595c54da7426951d750187712fecdcb6d8c7 | ## SOLVED
## the possible way of solving this is to make a scoring of the hand
## of each player, by encoding the category of winning and the cards
import csv
value = ['2','3','4','5','6','7','8','9','T','J','Q','K','A']
val_order = {k:v for v,k in enumerate(value)}
def compute():
poker_hand = load_data()
ans = sum(1 for x in poker_hand if p1_wins(x))
return ans
## is player1 wins?
def p1_wins(hands):
p1 = [(a[0], a[1]) for a in hands[:5]] # (value, suit)
p2 = [(a[0], a[1]) for a in hands[5:]]
return get_score(p1) > get_score(p2)
## return the score of the hand of a player
## score based on encoded value, using shift bit
def get_score(hand):
val = [x[0] for x in hand]
suit = [x[1] for x in hand]
val_int = [val_order[x] for x in val]
straight = get_straight(val_int)
flush = get_flush(suit)
# list of how many times 'n' occurs
val_count = [sum(1 for x in val_int if x==i) for i in range(13)]
# the histogram, how many times a count occurs
# so if there is one triple, vc_hist[3] = 1
vc_hist = [0] + [val_count.count(i) for i in range(1, 6)]
if straight == 12: # royal flush
return (9 << 10)
elif straight >= 0 and flush == 1: # straight flush
return (8 << 10) + straight
elif vc_hist[4] == 1: # four of a kind
return (7 << 10) + (val_count.index(4) << 2) + val_count.index(1)
elif vc_hist[3] == 1 and vc_hist[2] == 1: # full house
return (6 << 10) + (val_count.index(3) << 2) + val_count.index(2)
elif flush == 1: # flush
return (5 << 10) + get_best_high(val_int)
elif straight >= 0: # straight
return (4 << 10) + straight
elif vc_hist[3] == 1: # three of a kind
return (3 << 10) + (val_count.index(3) << 2) + get_best_high(val_int)
elif vc_hist[2] == 2: # two pair
return (2 << 10) + ((12 - val_count[::-1].index(2)) << 2) + val_count.index(2) + min(val_int)
elif vc_hist[2] == 1: # one pair
return (1 << 10) + (val_count.index(2) << 2) + get_best_high(val_int)
else:
return get_best_high(val_int)
## encode the sorted value
def get_best_high(value):
val_enc = [v << i for (i,v) in enumerate(sorted(value))]
return sum(val_enc)
## returns the highest value in straight, or -1 if not straight
## 'value' in order form, not hand form
def get_straight(value):
v = min(value)
if min(value) == 0 and max(value) == 12: # if the highest value is Ace, ace = -1
v = -1
for i in sorted(value)[1:]:
if i == v+1:
v = i
else:
return -1
return v
## return 1 if flush, -1 if not
def get_flush(suit):
res = 1 if suit.count(suit[0]) == 5 else -1
return res
def load_data():
data_file = open('prob054_poker.txt', newline='\n')
row = csv.reader(data_file, delimiter=' ')
return [r for r in row]
if __name__ == "__main__":
print(compute()) |
9,082 | c0e94a0d20397ebbbdddf726307b19b6c5c85ae6 | # -*- coding: utf-8 -*-
import graphviz
import fa_util
class Graph:
def draw(self, directory, filename, rules, start_state, accept_states):
g = graphviz.Digraph(format="svg", graph_attr={'rankdir': 'LR'})
self.add_start_edge(g, start_state)
edges = {}
for rule in rules:
from_state = self.state_to_str(self.get_state(rule))
to_state = self.state_to_str(self.get_next_state(rule))
self.add_graph_node(g, self.get_state(rule), from_state, accept_states)
self.add_graph_node(g, self.get_next_state(rule), to_state, accept_states)
label = self.make_label(rule)
edge_labels = edges.get((from_state, to_state))
if edge_labels == None:
edges[(from_state, to_state)] = [label]
else:
edge_labels.append(label)
self.add_edges(g, edges)
g.render(filename=filename, directory=directory, format="png", view=True)
# Supposed to be extended
def make_label(self, rule):
return "ε" if rule._character == None else rule._character
# Supposed to be extended
def format_labels(self, labels):
return ','.join(labels)
# Supposed to be extended
def get_state(self, rule):
return rule._state
# Supposed to be extended
def get_next_state(self, rule):
return rule._next_state
# Supposed to be extended
def add_start_edge(self, graph, start_state):
dummy_node = fa_util.random_str(8)
graph.node(dummy_node, style="invis", shape="point")
graph.edge(dummy_node, self.state_to_str(start_state), style="bold")
def add_graph_node(self, graph, state, state_str, accept_states):
attr = {'root': 'true', 'shape': 'circle'}
if state in accept_states:
attr['shape'] = 'doublecircle'
graph.node(state_str, **attr)
def add_edges(self, graph, edges):
for (_from, to), labels in edges.items():
graph.edge(_from, to, self.format_labels(labels))
def state_to_str(self, state):
if isinstance(state, str):
return state
try:
iter(state)
### state is iterable ###
if len(state) == 0:
return 'Ø'
# converting list object directly to set object break the order of elements in string
list_str = str([self.state_to_str(e) for e in sorted(state)])
return list_str.replace('[', '{').replace(']', '}')
except TypeError:
### state is not iterable ###
return str(state)
|
9,083 | 599da0f045ab5c2b3f568def3d89452b56cac029 | #!/usr/bin/env python
'''
Generate tree search dot file
'''
import copy
# Colors supported by graphviz, in some pleasing order
colors = {
"fa": "brown",
"fb": "brown1",
"ea": "cadetblue",
"eb": "cadetblue1",
"pa": "orange",
"pb": "orange4"
}
curId = 1
capAset = 4
capBset = 7
goal = 2
def export_dot():
# helper functions
def getColor(node):
(a, b) = node["state"]
if a == goal or b == goal:
return "red"
return "black"
def getLabel(node):
if node["leaf"]:
return "{} \n cost:{}".format(node["state"], node["cost"])
else:
return node["state"]
print """digraph searchTree {
size = "8,8";
node [ shape=oval, style=filled, fillcolor=lightblue2 ] ;
edge [fontname="Helvetica"];
splines=curved;
"""
(nodes, edges) = getGraph()
for n in nodes:
print "{} [label=\"{}\", color={}, penwidth=2];".format(
n["id"], getLabel(n), getColor(n))
for (x, y, action) in edges:
print "{} -> {} [xlabel=\"{}\",color={}]".format(
x, y, action, colors[action])
print "}"
def getGraph():
tree = bfs(capAset, capBset, goal)
nodes = [v for k, v in tree.items()]
edges = [(n["parent"], n["id"], n["action"]) for n in nodes
if n["parent"] != -1]
hasChild = set()
for node in nodes:
hasChild.add(node["parent"])
for node in nodes:
if node["id"] in hasChild:
node["leaf"] = False
else:
node["leaf"] = True
return (nodes, edges)
def bfs(capA, capB, goal):
#helper functions
def fillA(state):
(a, b) = state["state"]
global curId
curId += 1
ans = {
"state": (capA, b),
"cost": state["cost"] + capA - a,
"id": curId,
"parent": state["id"],
"action": "fa",
"visited": copy.deepcopy(state["visited"])
}
return ans
def fillB(state):
(a, b) = state["state"]
global curId
curId += 1
ans = {
"state": (a, capB),
"cost": state["cost"] + capB - b,
"id": curId,
"parent": state["id"],
"action": "fb",
"visited": copy.deepcopy(state["visited"])
}
return ans
def emptyA(state):
(a, b) = state["state"]
global curId
curId += 1
ans = {
"state": (0, b),
"cost": state["cost"],
"id": curId,
"parent": state["id"],
"action": "ea",
"visited": copy.deepcopy(state["visited"])
}
return ans
def emptyB(state):
(a, b) = state["state"]
global curId
curId += 1
ans = {
"state": (a, 0),
"cost": state["cost"],
"id": curId,
"parent": state["id"],
"action": "eb",
"visited": copy.deepcopy(state["visited"])
}
return ans
def pourA(state):
(a, b) = state["state"]
global curId
curId += 1
ans = {
"state": (lambda x, y: (x + y - capB, capB)
if x + y > capB else (0, x + y))
(a, b),
"cost": state["cost"],
"id": curId,
"parent": state["id"],
"action": "pa",
"visited": copy.deepcopy(state["visited"])
}
return ans
def pourB(state):
(a, b) = state["state"]
global curId
curId += 1
ans = {
"state": (lambda x, y: (capA, x + y - capA)
if x + y > capA else (x + y, 0))
(a, b),
"cost": state["cost"],
"id": curId,
"parent": state["id"],
"action": "pb",
"visited": copy.deepcopy(state["visited"])
}
return ans
initState = {
"state": (0, 0),
"cost": 0,
"id": 0,
"parent": -1,
"action": "Nothing",
"visited": set()
}
queue = []
queue.append(initState)
tree = dict()
while queue:
state = queue.pop(0)
(a, b) = state["state"]
#check if visited
if ((a, b) == (0, 0) and curId != 1) or (a, b) == (capA, capB):
continue
if (a, b) in state["visited"]:
#tree[state["id"]] = state
continue
if a == goal or b == goal:
tree[state["id"]] = state
break
else:
tree[state["id"]] = state
state["visited"].add((a, b))
# fill A
if a != capA:
queue.append(fillA(state))
# fill B
if b != capB:
queue.append(fillB(state))
# empty A
if a > 0:
queue.append(emptyA(state))
# empty B
if b > 0:
queue.append(emptyB(state))
# pour A to B
if a > 0 and b != capB:
queue.append(pourA(state))
# pour B to A
if b > 0 and a != capA:
queue.append(pourB(state))
return tree
def main():
export_dot()
if __name__ == '__main__':
main()
|
9,084 | 6fdc9b2091652b05d6c1207d2f78b75c880fadda | __author__ = 'Administrator'
class People:
def __init__(self,name,age):
self.name = name
self.age = age
def eat(self):
pass
print("%s is eating..." % self.name)
def sleep(self):
print("%s is sleeping..." % self.name)
def talk(self):
print("%s is talking..." % self.name)
class Man(People):
def __init__(self,name,age,money):
# People.__init__(self,name,age)
super(Man,self).__init__(name,age)
self.money = money
print("%s 一出生就有%s money..." % (name,money))
def piao(self):
print("%s is piaoing...20s...isdone" % self.name)
def sleep(self):
#People.sleep(self)
print("man is sleeping")
class Women(People):
pass
def get_birth(self):
print("%s is born a baby...." % self.name)
m1 = Man("chenronghua",22,10000)
m1.eat()
m1.sleep()
m1.talk()
m1.piao()
w1 = Women("ronghua",26)
w1.get_birth()
|
9,085 | 7de06772a1024a81193ac69a1110ad2e8b7f64ac | # Given an integer, convert it to a roman numeral.
# Input is guaranteed to be within the range from 1 to 3999.
class Solution:
# @param {integer} num
# @return {string}
def intToRoman(self, num):
normalDic = {
1000: 'M',
500: 'D',
100: 'C',
50: 'L',
10: 'X',
5: 'V',
1: 'I'
}
specialDic = {
'41': 'IV', # 4
'91': 'IX', # 9
'42': 'XL', # 40
'92': 'XC', # 90
'43': 'CD', # 400
'93': 'CM', # 900
}
roman = ""
remainders = ['4', '9']
divisors = [1000, 500, 100, 50, 10, 5, 1]
for i, divisor in enumerate(divisors):
quotient = num/divisor
if quotient > 0:
roman += normalDic[divisor] * quotient
num = num % divisor
if str(num)[0] in remainders:
roman += specialDic[str(num)[0] + str(len(str(num)))]
num -= int(str(num)[0]) * (10 ** (len(str(num)) - 1))
return roman |
9,086 | f78f8f560b7eb70232658be762e2058535a68122 | # -*- coding: utf-8 -*-
"""
Created on Tue Jul 11 11:11:32 2017
@author: lindseykitchell
"""
import pandas as pd
import numpy as np
from scipy.stats.stats import pearsonr
import matplotlib.pylab as plt
import glob
import os
pwd = os.getcwd()
df_dict = {}
subj_list = []
for file in glob.glob(pwd + "/*spectrum.json"):
subj_name = os.path.basename(file)[0:6]
subj_list.append(subj_name)
df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)
all_tracts = list(df_dict[subj_list[0]])[:-1]
fig = plt.figure(figsize=(18,18))
all_corrs = []
fig_num = 1
for tract in all_tracts:
corr = np.zeros([len(subj_list), len(subj_list)])
for num in range(len(subj_list)):
for num2 in range(len(subj_list)):
corrval, pval = pearsonr(df_dict[subj_list[num]][tract], df_dict[subj_list[num2]][tract])
corr[num, num2] = corrval
all_corrs.append(corr)
ax = fig.add_subplot(5,4,fig_num)
ax.set_aspect('equal')
ax.set_title(tract)
im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.cm.viridis, aspect='equal')
#ocean hot
fig_num += 1
cax = fig.add_axes([0.9, 0.1, 0.03, 0.8])
plt.colorbar(im, cax)
plt.savefig('alltractcorrelations.png', bbox_inches='tight')
plt.show() |
9,087 | 58bd14d240242ed58dcff35fe91cebeae4899478 | """
time: X * Y
space: worst case X * Y
"""
class Solution:
def numIslands(self, grid: List[List[str]]) -> int:
if not grid:
return 0
Y = len(grid)
X = len(grid[0])
def dfs(y, x):
if y < 0 or x < 0 or y > Y-1 or x > X-1:
return
if grid[y][x] == "1":
grid[y][x] = "0"
dfs(y, x-1)
dfs(y, x+1)
dfs(y-1, x)
dfs(y+1, x)
ans = 0
for y in range(Y):
for x in range(X):
if grid[y][x] == "1":
dfs(y, x)
ans += 1
return ans
def numIslands(self, grid : List[List[str]]) -> int:
R = len(grid)
C = len(grid[0])
def dfs(r, c):
if r < 0 or c < 0 or r >= R or c >= C:
return
if grid[r][c] == '1':
grid[r][c] = '0'
dfs(r-1,c)
dfs(r+1,c)
dfs(r,c-1)
dfs(r,c+1)
rtn = 0
for r in range(R):
for c in range(C):
if grid[r][c] == '1':
rtn += 1
dfs(r,c)
return rtn
|
9,088 | de665735f02c7569ab382fdc3e910d5d3ac05bb5 | import enter
import loginout
import roleinfo
import zhanyi
import package
#import matrix |
9,089 | eff8b6a282ac73a116587e7ed04f386927c9f826 | import torch
import torch.nn as nn
class MLPNet(nn.Module):
def __init__(self, num_classes):
super(MLPNet, self).__init__()
self.fc1 = nn.Linear(32 * 32 * 3, 512)
self.fc2 = nn.Linear(512, num_classes)
def forward(self, x):
x = x.view(x.size(0), -1)
x = self.fc1(x)
x = torch.sigmoid(x)
x = self.fc2(x)
return x
def zero_weights(self):
self.fc1.weight.data.fill_(0.0)
self.fc1.bias.data.fill_(0.0)
self.fc2.weight.data.fill_(0.0)
self.fc2.bias.data.fill_(0.0)
|
9,090 | 3c01ca27a5eef877b606b93b04ffe6f73168cd6b | #Embedded file name: c:/depot/games/branches/release/EVE-TRANQUILITY/eve/client/script/paperDoll/SkinRaytracing.py
import trinity
import blue
import telemetry
import ctypes
import math
import time
import geo2
import struct
import itertools
import weakref
import uthread
import paperDoll as PD
import log
import random
mylog = log.Channel('optix', 'python')
def LogInfo(text, *args):
for arg in args:
text += ' ' + str(arg)
mylog.Log(text, log.LGINFO)
def LogWarn(text, *args):
for arg in args:
text = text + ' ' + str(arg)
mylog.Log(text, log.LGWARN)
class SkinRaytracingTools():
__guid__ = 'paperDoll.SkinRaytracingTools'
@staticmethod
def SetOptixMatrixFromTrinity(optix, matrixName, ratio = None):
proj = trinity.TriProjection()
view = trinity.TriView()
view.transform = trinity.GetViewTransform()
proj.PerspectiveFov(trinity.GetFieldOfView(), trinity.GetAspectRatio() if ratio is None else ratio, trinity.GetFrontClip(), trinity.GetBackClip())
projToView = geo2.MatrixInverse(proj.transform)
viewToWorld = geo2.MatrixInverse(view.transform)
projToWorld = geo2.MatrixMultiply(projToView, viewToWorld)
r0 = projToWorld[0]
r1 = projToWorld[1]
r2 = projToWorld[2]
r3 = projToWorld[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4(matrixName, mat)
r0 = view.transform[0]
r1 = view.transform[1]
r2 = view.transform[2]
r3 = view.transform[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4('viewTransform', mat)
return mat
@staticmethod
def CreateSamplerForTexture(name, map, waitForFinish):
rt = trinity.Tr2RenderTarget(map.width, map.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
job = trinity.CreateRenderJob()
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.SetStdRndStates(trinity.RM_FULLSCREEN)
job.RenderTexture(map)
job.PopDepthStencil()
job.PopRenderTarget()
job.ScheduleOnce()
if waitForFinish:
job.WaitForFinish()
sampler = trinity.Tr2OptixTextureSampler()
if True:
res = trinity.TriTextureRes()
res.CreateAndCopyFromRenderTarget(rt)
sampler.CreateFromTexture(res)
else:
sampler.CreateFromRenderTarget(rt)
sampler.SetNormalizedIndexingMode(True)
if True:
return (sampler, res)
else:
return (sampler, rt)
@staticmethod
def ConvertCubeToTextures(cube):
names = ['PX',
'NX',
'PY',
'NY',
'PZ',
'NZ']
viewVec = [(1, 0, 0),
(-1, 0, 0),
(0, 1, 0),
(0, -1, 0),
(0, 0, 1),
(0, 0, -1)]
upVec = [(0, 1, 0),
(0, 1, 0),
(0, 0, 1),
(0, 0, -1),
(0, 1, 0),
(0, 1, 0)]
spaceScene = trinity.EveSpaceScene()
spaceScene.envMap1ResPath = str(cube.resourcePath)
spaceScene.envMapScaling = (1, 1, -1)
spaceScene.backgroundRenderingEnabled = True
spaceScene.backgroundEffect = trinity.Load('res:/dx9/scene/starfield/bakeNebula.red')
blue.resMan.Wait()
node = PD.FindParameterByName(spaceScene.backgroundEffect, 'NebulaBrightness')
if node is None:
node = trinity.Tr2FloatParameter()
node.name = 'NebulaBrightness'
spaceScene.backgroundEffect.parameters.append(node)
if node is not None:
node.value = 100
node = PD.FindResourceByName(spaceScene.backgroundEffect, 'NebulaMap')
if node is None:
node = trinity.TriTexture2DParam()
node.name = 'NebulaMap'
spaceScene.backgroundEffect.resources.append(node)
node.SetResource(cube.resource)
blue.resMan.Wait()
mipmapped = []
useTexture = True
for i in xrange(len(names)):
name = names[i]
rt = PD.SkinLightmapRenderer.CreateRenderTarget(cube.resource.width, cube.resource.height, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM, useRT=True)
job = trinity.CreateRenderJob(name=name)
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.Clear([(1, 0, 0),
(0.2, 0, 0),
(0, 1, 0),
(0, 0.2, 0),
(0, 0, 1),
(0, 0, 0.2)][i], None)
proj = trinity.TriProjection()
proj.PerspectiveFov(math.pi * 0.5, 1, 0.1, 1000)
view = trinity.TriView()
view.SetLookAtPosition((0, 0, 0), viewVec[i], upVec[i])
viewport = trinity.TriViewport(0, 0, cube.resource.width, cube.resource.height, 0.0, 1.0)
job.SetView(view)
job.SetProjection(proj)
job.SetViewport(viewport)
job.Update(spaceScene)
job.RenderScene(spaceScene)
job.PopDepthStencil()
job.PopRenderTarget()
if useTexture:
tex = trinity.TriTextureRes(cube.resource.width, cube.resource.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
if True:
job.ScheduleOnce()
job.WaitForFinish()
if useTexture:
mipmapped.append(tex)
else:
mipmapped.append(rt)
else:
job.ScheduleRecurring()
return (mipmapped, names)
@staticmethod
def FindAllTextureResourcesFromEffect(effect, scope):
textures = {}
samplers = []
cubemaps = []
if effect is not None:
for r in effect.resources:
if type(r) == trinity.TriTexture2DParameter and r.resource is not None:
textures[r.name] = r.resource
elif type(r) == trinity.TriTextureCubeParameter and r.resource is not None:
if r.name in cubemaps:
continue
LogInfo('', r.name, ': Converting to individual textures')
cubemaps.append(r.name)
mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(r)
for i in range(len(names)):
if i < len(mipmaps):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(mipmaps[i])
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(r.name + names[i], sampler)
LogInfo('No-Copy Cube Side Interop for ' + r.name + names[i])
samplers.append(mipmaps[i])
samplers.append(sampler)
return (textures, samplers)
@staticmethod
def FindAllTextureResources(dynamic, scope):
textures = {}
samplers = []
cubemaps = []
def ProcessMesh(mesh):
for area in itertools.chain(mesh.opaqueAreas, mesh.decalAreas, mesh.transparentAreas):
newTextures, newSamplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(area.effect, scope)
textures.update(newTextures)
samplers.extend(newSamplers)
if type(dynamic) == trinity.Tr2IntSkinnedObject:
for mesh in dynamic.visualModel.meshes:
ProcessMesh(mesh)
elif type(dynamic) == trinity.EveShip2:
ProcessMesh(dynamic.highDetailMesh.object)
elif type(dynamic) == trinity.EveStation2:
ProcessMesh(dynamic.highDetailMesh.object)
return (textures, samplers)
@staticmethod
def InteropTexture(name, texture, waitForFinish, scope):
if texture.format == trinity.PIXEL_FORMAT.B8G8R8A8_UNORM:
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(texture)
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(name, sampler)
LogInfo('No-Copy Interop for', name)
return (sampler, None)
if texture.type == trinity.TRIRTYPE_CUBETEXTURE:
LogInfo('Copy-Interop for cubes not supported, skipping', name)
return
sampler_rt = SkinRaytracingTools.CreateSamplerForTexture(name, texture, waitForFinish)
if sampler_rt is None or len(sampler_rt) < 1:
LogInfo('InteropTexture failed for', name)
else:
scope.SetSampler(name, sampler_rt[0])
LogInfo('Interop for', name)
return sampler_rt
@staticmethod
def InteropAllTexturesFromEffect(optix, effect, waitForFinish, nameTranslation = None, scope = None, cache = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(effect, scope)
for name, texture in textures.iteritems():
if 'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
if cache is not None and texture in cache:
sampler = cache[texture]
scope.SetSampler(name, sampler[0])
LogInfo('Interop cache for', name)
else:
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler and cache is not None:
cache[texture] = sampler
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def InteropAllTextures(optix, dynamic, waitForFinish, nameTranslation = None, scope = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResources(dynamic, scope)
for name, texture in textures.iteritems():
if 'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def SafeLinearize(values):
peak = max(1, max(values[0], max(values[1], values[2])))
return (peak * math.pow(values[0] / peak, 2.2),
peak * math.pow(values[1] / peak, 2.2),
peak * math.pow(values[2] / peak, 2.2),
values[3])
@staticmethod
def CopyParametersToContext(effect, instance, linearNames = None):
for p in effect.parameters:
if type(p) is trinity.Tr2Vector4Parameter:
value = SkinRaytracingTools.SafeLinearize(p.value) if linearNames is not None and p.name in linearNames else p.value
instance.SetFloat4(p.name, value[0], value[1], value[2], value[3])
elif type(p) is trinity.TriFloatParameter or type(p) is trinity.Tr2FloatParameter:
instance.SetFloat4(p.name, p.value, 0, 0, 0)
@staticmethod
def CreateBufferForLights(lights, leaveEmpty = False, preserveAlpha = False):
bufEveLights = trinity.Tr2OptixBuffer()
bufEveLights.CreateUserData(64, len(lights), trinity.OPTIX_BUFFER_OUTPUT, False)
bufEveLights.MapUser()
buffer = ''
if leaveEmpty:
lights = []
for light in lights:
innerAngle = light.coneAlphaInner
outerAngle = light.coneAlphaOuter
if innerAngle + 1.0 > outerAngle:
innerAngle = outerAngle - 1.0
innerAngle = math.cos(innerAngle * 3.1415927 / 180.0)
outerAngle = math.cos(outerAngle * 3.1415927 / 180.0)
coneDir = geo2.Vec3Normalize((light.coneDirection[0], light.coneDirection[1], light.coneDirection[2]))
import struct
buffer += struct.pack('16f', light.position[0], light.position[1], light.position[2], light.radius, math.pow(light.color[0], 2.2), math.pow(light.color[1], 2.2), math.pow(light.color[2], 2.2), light.falloff if not preserveAlpha else light.color[3], coneDir[0], coneDir[1], coneDir[2], outerAngle, innerAngle, 0, 0, 0)
bufEveLights.SetUserDataFromStruct(buffer)
bufEveLights.UnmapUser()
return bufEveLights
@staticmethod
def CreateUInt1Buffer(optix, name):
buffer = trinity.Tr2OptixBuffer()
buffer.CreateUInt1(1, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
buffer.Map()
buffer.SetUserDataI(0, 0)
buffer.Unmap()
optix.SetBuffer(name, buffer)
return buffer
@staticmethod
def matEqual(m1, m2):
return m1._11 == m2._11 and m1._12 == m2._12 and m1._13 == m2._13 and m1._14 == m2._14 and m1._21 == m2._21 and m1._22 == m2._22 and m1._23 == m2._23 and m1._24 == m2._24 and m1._31 == m2._31 and m1._32 == m2._32 and m1._33 == m2._33 and m1._34 == m2._34 and m1._41 == m2._41 and m1._42 == m2._42 and m1._43 == m2._43 and m1._44 == m2._44
@staticmethod
def FuncWrapper(weakSelf, func):
if weakSelf():
func(weakSelf())
class OitHelper():
def __init__(self, optix):
self.oitAllocatorBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'oit_allocator')
oitPoolBuffer = trinity.Tr2OptixBuffer()
oitPoolBuffer.CreateUserData(64 + 112, 1048576, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('oit_pool', oitPoolBuffer)
self.oitPoolBuffer = oitPoolBuffer
def ResetAllocationCount(self):
self.oitAllocatorBuffer.Map()
self.oitAllocatorBuffer.SetUserDataI(0, 0)
self.oitAllocatorBuffer.Unmap()
def GetAllocationCount(self):
self.oitAllocatorBuffer.Map()
count = self.oitAllocatorBuffer.GetUserDataI(0)
self.oitAllocatorBuffer.Unmap()
return count
class RayCountHelper():
def __init__(self, optix):
self.rayCountBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'ray_count')
def ResetCount(self):
self.rayCountBuffer.Map()
self.rayCountBuffer.SetUserDataI(0, 0)
self.rayCountBuffer.Unmap()
def GetCount(self):
self.rayCountBuffer.Map()
count = self.rayCountBuffer.GetUserDataI(0)
self.rayCountBuffer.Unmap()
return count
class CaptureHelper():
def __init__(self, width, height):
self.capture = trinity.Tr2RenderTarget(width, height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
def SaveSurfaceToFile(self, filename):
trinity.SaveRenderTarget(filename, self.capture)
LogInfo('Saved to', filename)
def CreateRenderSteps(self, rj, blitfx):
rj.PushRenderTarget(self.capture).name = 'Begin screenshot capture'
rj.PushDepthStencil(None).name = ' push depth'
rj.RenderEffect(blitfx).name = ' Blit to screenshot'
rj.PopDepthStencil().name = ' pop depth'
rj.PopRenderTarget().name = 'End screenshot capture'
class FullScreenBlitter():
def __init__(self, width, height):
self.effect = trinity.Tr2Effect()
self.effect.effectFilePath = 'res:/graphics/effect/optix/shaders/gammaBlit.fx'
if self.effect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.highpassEffect = trinity.Tr2Effect()
self.highpassEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/highpassFilter.fx'
if self.highpassEffect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.gaussianHorizEffect = trinity.Tr2Effect()
self.gaussianHorizEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianHorizEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
self.gaussianVertEffect = trinity.Tr2Effect()
self.gaussianVertEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianVertEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
while effect.effectResource.isLoading:
PD.Yield()
self.blitcolor = trinity.Tr2Vector4Parameter()
self.blitcolor.name = 'Color'
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
effect.PopulateParameters()
effect.RebuildCachedData()
effect.parameters.append(self.blitcolor)
sizesParam = trinity.Tr2Vector4Parameter()
sizesParam.name = 'InvSize'
sizesParam.value = (1.0 / width,
1.0 / height,
0,
0)
for effect in [self.effect, self.highpassEffect]:
effect.parameters.append(sizesParam)
sizesHorizParam = trinity.Tr2Vector4Parameter()
sizesHorizParam.name = 'invTexelSize'
sizesHorizParam.value = (1.0 / width,
0.0,
0,
0)
self.gaussianHorizEffect.parameters.append(sizesHorizParam)
sizesVertParam = trinity.Tr2Vector4Parameter()
sizesVertParam.name = 'invTexelSize'
sizesVertParam.value = (0.0,
1.0 / height,
0,
0)
self.gaussianVertEffect.parameters.append(sizesVertParam)
def SetTexture(self, optixOutputTexture, highpassTexture, filteredTexture):
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(optixOutputTexture)
for effect in [self.effect, self.highpassEffect]:
effect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(highpassTexture)
self.gaussianHorizEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(filteredTexture)
self.gaussianVertEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'BloomTexture'
tex.SetResource(highpassTexture)
self.effect.resources.append(tex)
def UpdateFrameCount(self, framecount):
invFC = 1.0 / framecount if framecount > 0 else 1.0
self.blitcolor.value = (invFC,
invFC,
invFC,
invFC)
class FullOptixRenderer():
__guid__ = 'paperDoll.FullOptixRenderer'
instance = None
def AddCallback(self, func, name, rj):
cb = trinity.TriStepPythonCB()
weakSelf = weakref.ref(self)
cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))
cb.name = name
rj.steps.append(cb)
def GetFrameCount(self):
return self.framecount
def SaveScreenshot(self, filename):
self.capture.SaveSurfaceToFile(filename)
def AddRenderPreviewStep(self, renderJob):
renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'
renderJob.PushDepthStencil(None).name = ' [optix] push depth'
renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'
renderJob.PopDepthStencil().name = ' [optix] pop depth'
def RefreshMatrices(self):
model = self.skinnedObject
self.optix.RefreshMatrices(model, self.skinnedOptix)
self.RunSkinningAndTesselation()
self.ApplySettings()
print 'Refreshed'
@staticmethod
def RaytraceFrame(selfRef):
start = time.time()
VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))
if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):
selfRef.previousVP = VP
selfRef.outputBuffer.Clear()
selfRef.framecount = 0
model = selfRef.skinnedObject
pos1 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballLeft'))
pos2 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballRight'))
dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())
dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())
autodof = min(dist1, dist2)
dof = selfRef.settings.get('lens_focal_distance', autodof)
print 'Auto-depth-of-field is at', autodof, ', actual focal distance is', dof
selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)
else:
selfRef.framecount += 1
selfRef.optix.SetUInt('frameIteration', selfRef.framecount)
selfRef.oit.ResetAllocationCount()
selfRef.rayCounter.ResetCount()
time1 = time.time()
selfRef.optix.Run(0, selfRef.width, selfRef.height)
time2 = time.time()
sec = time2 - time1
raycount = selfRef.rayCounter.GetCount()
raysec = 0
if sec > 0:
raysec = raycount / float(sec)
time3 = time.time()
if selfRef.framecount % 32 == 0:
stop = time.time()
print selfRef.oit.GetAllocationCount(), 'oit allocations'
selfRef.blitfx.UpdateFrameCount(selfRef.framecount)
selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)
print 'time %05.3f / %05.3f / %05.3f / %05.3f msec' % (float(time1 - start) * 1000,
float(time2 - time1) * 1000,
float(time3 - time2) * 1000,
float(stop - time3) * 1000),
print '%d rays in %05.3f ms / %10d Krays/sec / %d rays per pixel' % (raycount,
sec * 1000,
raysec / 1000,
selfRef.framecount)
@telemetry.ZONE_METHOD
def OnBeforeOptixPositionsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxPosWorldUVEffect', self.oxMeshes)
if self.skinnedObject is not None and self.skinnedObject.visualModel is not None:
self.savedMeshes = self.skinnedObject.visualModel.meshes[:]
filteredMeshes = [ ref.object for ref in self.oxMeshes.iterkeys() if ref.object is not None ]
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, filteredMeshes)
self.scene.filterList.removeAt(-1)
self.scene.filterList.append(self.skinnedObject)
self.scene.useFilterList = True
@telemetry.ZONE_METHOD
def OnBeforeOptixNormalsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', self.oxMeshes)
def OnAfterOptix(self):
PD.SkinLightmapRenderer.DoRestoreShaders(meshes=self.oxMeshes)
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, self.savedMeshes)
del self.savedMeshes
self.scene.useFilterList = False
self.scene.filterList.removeAt(-1)
def _InitUVUnwrap(self):
self.oxMeshes = {}
self.scatterFX = set()
self.unwrapSize = 1024
posUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_POSWORLD_UV_EFFECT)
normalUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_NORMALWORLD_UV_EFFECT)
deriv = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.STRETCHMAP_RENDERER_EFFECT)
self.oxDepth = trinity.Tr2DepthStencil(self.unwrapSize, self.unwrapSize, trinity.DEPTH_STENCIL_FORMAT.D24S8, 1, 0)
for mesh in self.skinnedObject.visualModel.meshes:
if PD.SkinLightmapRenderer.IsScattering(mesh):
m = PD.SkinLightmapRenderer.Mesh()
m.ExtractOrigEffect(mesh)
m.CreateOptixEffects(includeStretchMap=True)
PD.AddWeakBlue(self, 'oxMeshes', mesh, m)
fx = PD.GetEffectsFromMesh(mesh)
for f in fx:
self.scatterFX.add(f)
self.oxWorldPosMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.oxWorldNormalMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.stretchMap = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize / 2, self.unwrapSize / 2, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
rj = trinity.CreateRenderJob('Optix UV Unwrap')
rj.PushRenderTarget(self.oxWorldPosMapUV)
rj.PushDepthStencil(self.oxDepth)
rj.Clear((0, 0, 0, 0), 1.0)
rj.SetStdRndStates(trinity.RM_FULLSCREEN)
vp = trinity.TriViewport()
vp.x = 0
vp.y = 0
vp.width = self.unwrapSize
vp.height = self.unwrapSize
rj.SetViewport(vp)
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnBeforeOptixPositionsUV, 'onBeforeOptixPositionsUV', rj)
rj.RenderScene(self.scene).name = 'Optix WorldPos (UV space)'
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.SetRenderTarget(self.oxWorldNormalMapUV)
rj.Clear((0, 0, 0, 0), 1.0)
rj.RenderScene(self.scene).name = 'Optix Normals (UV space)'
rj.SetRenderTarget(self.stretchMap)
rj.Clear((0, 0, 0, 0), 1.0)
vp2 = trinity.TriViewport()
vp2.x = 0
vp2.y = 0
vp2.width = self.unwrapSize / 2
vp2.height = self.unwrapSize / 2
rj.SetViewport(vp2)
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('stretchmapRenderEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.RenderScene(self.scene).name = 'Stretchmap'
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnAfterOptix, 'onAfterOptix', rj)
rj.PopRenderTarget()
rj.PopDepthStencil()
rj.ScheduleOnce()
rj.WaitForFinish()
if False:
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldPosMapUV, 'c:/depot/oxworldposuv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldNormalMapUV, 'c:/depot/oxworldnormaluv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.stretchMap, 'c:/depot/stretchmap2.dds', isRT=True)
print '** MAPS SAVED **'
def RunSkinningAndTesselation(self):
print '*** Tesselation phase ***'
batchTypes = self.skinnedOptix[0]
optix = self.optix
ptx = {}
ptx[72] = self.path + 'eve_skinning_kernel72.ptx'
ptx[64] = self.path + 'eve_skinning_kernel64.ptx'
for bytes, ptxfile in ptx.iteritems():
LogInfo('Processing ', bytes, 'bytes/vertex')
skinningProgram = trinity.Tr2OptixProgram(ptxfile, 'kernel_no_tesselation')
skinningProgramTesselate = trinity.Tr2OptixProgram(ptxfile, 'kernel_tesselation')
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, skinningProgram)
optix.SetRayGenerationProgram(1, skinningProgramTesselate)
for batchType in range(len(batchTypes)):
batches = batchTypes[batchType]
out = []
def needsTesselation(fx):
return 'skinnedavatarhair_detailed.fx' in fx.effectFilePath.lower()
for batch in batches:
if 'furshell' in batch[1].effectFilePath.lower():
out.append(None)
continue
tesselate = needsTesselation(batch[1])
triangle_count = batch[6]
bytes_per_vertex = batch[8]
if bytes_per_vertex != bytes:
out.append(None)
continue
vertex_buffer_output = trinity.Tr2OptixBuffer()
vertex_buffer_output.CreateUserData(bytes_per_vertex, triangle_count * 3 * 4 if tesselate else triangle_count * 3, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
out.append(vertex_buffer_output)
for i, batch in enumerate(batches):
if 'furshell' in batch[1].effectFilePath.lower():
continue
triangle_count = batch[6]
tesselate = needsTesselation(batch[1])
bytes_per_vertex = batch[8]
if bytes_per_vertex != bytes:
continue
if tesselate:
LogInfo('Tesselating geometry ', batch, ' of type ', batchType)
else:
LogInfo('Skinning geometry ', batch, ' of type ', batchType)
optix.SetBuffer('vertex_buffer', batch[2])
optix.SetBuffer('index_buffer', batch[3])
optix.SetBuffer('vertex_buffer_output', out[i])
optix.SetUInt('first_index_index', batch[5])
optix.SetBuffer('matrix_buffer', batch[7])
program = int(tesselate)
optix.Run(program, triangle_count, 1)
batch[0].SetBuffer('vertex_buffer', out[i])
if tesselate:
batch[0].SetPrimitiveCount(triangle_count * 4)
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetRayGenerationProgram(1, self.raygen)
def RemoveBadGeometry(self, model):
self.haveBeard = False
self.beardFx = None
for mesh in model.visualModel.meshes:
for area in mesh.decalAreas:
if PD.IsBeard(area):
self.haveBeard = True
self.beardFx = area.effect
area.debugIsHidden = True
break
for mesh in model.visualModel.meshes:
for area in mesh.transparentAreas:
lname = area.name.lower()
if lname.startswith('eyeshadow_'):
mesh.transparentAreas.removeAt(-1)
break
if False:
for mesh in model.visualModel.meshes:
for area in mesh.opaqueAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.opaqueAreas.removeAt(-1)
break
for area in mesh.transparentAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.transparentAreas.removeAt(-1)
break
if False:
print 'raytracing', len(model.visualModel.meshes), 'meshes'
for mesh in model.visualModel.meshes:
lname = mesh.name.lower()
if not lname.startswith('hair'):
print 'removing', lname
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
elif False:
print 'removing', lname
for a in mesh.opaqueAreas:
print 'opaque', a.name
for a in mesh.decalAreas:
print 'decal', a.name
for a in mesh.transparentAreas:
print 'transp', a.name
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
else:
print 'keeping', lname
def TransferBeardParameters(self, optix):
if self.haveBeard:
LogInfo('Beard found')
beardLength = self.settings['beardLength']
optix.SetFloat3('beardOptions', beardLength[0], beardLength[1], self.settings['beardGravity'])
floatMap = {'FurLength': 'beard_fur_length',
'UVScale': 'beard_uv_scale',
'AlphaMultiplier': 'beard_alpha_multiplier',
'CombStrength': 'beard_comb_strength',
'FurGrainRotation': 'beard_fur_grain_rotation',
'MirrorGrain': 'beard_mirror_grain',
'FurParallax': 'beard_fur_parallax'}
float3Map = {'gravityOffset': 'beard_gravity_offset',
'MaterialDiffuseColor': 'beard_diffuse_color'}
for param in self.beardFx.parameters:
optixName = floatMap.get(param.name, None)
if optixName is not None:
optix.SetFloat(optixName, param.value)
else:
optixName = float3Map.get(param.name, None)
if optixName is not None:
optix.SetFloat3(optixName, param.value[0], param.value[1], param.value[2])
def GenerateBeardGeometry(self, optix, path, any_hit_shadow):
if not self.haveBeard:
return None
LogInfo('generating beard splines')
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld')
beardProgram = trinity.Tr2OptixProgram(path + 'eve_beard_kernel.ptx', 'kernel')
curveOutputBuffer = trinity.Tr2OptixBuffer()
curveCount = 512
curveOutputBuffer.CreateUserData(80, curveCount * curveCount, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('output', curveOutputBuffer)
rayTypeCount = optix.GetRayTypeCount()
optix.SetRayTypeCount(1)
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, beardProgram)
optix.SetRayGenerationProgram(1, beardProgram)
optix.SetEntryPointCount(1)
LogInfo('beard: about to Run')
optix.Run(0, curveCount, curveCount)
LogInfo('beard: Run done')
optix.SetRayTypeCount(rayTypeCount)
hairGeometry = trinity.Tr2OptixGeometry()
hairGeometry.InitializeFromProgram(path + 'bezier_curves.ptx', 'intersect', 'bounds')
subdivideDepth = 2
hairGeometry.SetPrimitiveCount(curveCount * curveCount * (1 << subdivideDepth))
optix.SetUInt('presubdivide_depth', subdivideDepth)
optix.SetBuffer('curves', curveOutputBuffer)
LogInfo('beard: geometry setup done')
beardInstance = trinity.Tr2OptixGeometryInstance()
beardInstance.SetGeometry(hairGeometry)
closest_hit_BeardShader = trinity.Tr2OptixProgram(path + 'eve_beard_shader.ptx', 'closest_hit_BeardShader')
beardMaterial = trinity.Tr2OptixMaterial()
beardMaterial.SetClosestHit(0, closest_hit_BeardShader)
beardMaterial.SetAnyHit(1, any_hit_shadow)
beardInstance.SetMaterial(beardMaterial)
LogInfo('beard: geometry instance setup done')
return beardInstance
def _DoInit(self, scene = None):
model = None
if scene is None:
scene = PD.SkinLightmapRenderer.Scene()
self.scene = scene
self.previousVP = trinity.TriMatrix()
self.framecount = 1
self.useOIT = True
if scene is None:
LogWarn('No scene!')
return
for dynamic in scene.dynamics:
if dynamic.__typename__ == 'Tr2IntSkinnedObject':
model = dynamic
break
else:
LogWarn('No Tr2IntSkinnedObject found')
return
if model is None:
LogWarn('No Tr2IntSkinnedObject found')
return
self.skinnedObject = model
if self.skinnedObject.visualModel is None:
LogWarn('skinnedObject has no visualMeshes')
return
bg = trinity.renderContext.GetDefaultBackBuffer()
step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')
if step is not None:
bg = step.renderTarget
self.width = self.settings.get('outputWidth', bg.width)
self.height = self.settings.get('outputHeight', bg.height)
self.blitfx = FullScreenBlitter(self.width, self.height)
self.RemoveBadGeometry(model)
outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.outputTexture = outputTexture
self.capture = CaptureHelper(self.width, self.height)
self._InitUVUnwrap()
for steps in trinity.renderJobs.recurring:
if steps.name == 'FullOptixRenderer':
steps.UnscheduleRecurring()
start = time.clock()
optix = trinity.Tr2Optix()
self.optix = optix
optix.SetInteropDevice()
optix.SetRayTypeCount(4)
optix.SetEntryPointCount(1)
if False:
optix.EnableAllExceptions()
optix.SetPrintEnabled(True)
optix.SetPrintBufferSize(16384)
optix.SetUInt('radiance_ray_type', 0)
optix.SetUInt('shadow_ray_type', 1)
optix.SetUInt('translucency_ray_type', 2)
optix.SetUInt('translucency_ray_type', 3)
optix.SetFloat('scene_epsilon', 0.001)
optix.SetUInt('frameIteration', 0)
self.outputBuffer = trinity.Tr2OptixBuffer()
self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('output_buffer', self.outputBuffer)
self.ApplySettings()
path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/NCC/'))
self.path = path
LogInfo('Getting files from', path)
everything = []
any_hit_shadow = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow')
any_hit_shadow_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow_blend')
shader_diffuse_only_feeler = trinity.Tr2OptixProgram(path + 'eve_bounce.ptx', 'closest_hit_DiffuseOnlyFeeler2')
any_hit_cutout = trinity.Tr2OptixProgram(path + 'eve_cutout.ptx', 'any_hit_CutoutMask')
any_hit_diffuse_feeler_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_diffuse_feeler_blend')
everything.append(any_hit_shadow)
everything.append(any_hit_shadow_blend)
everything.append(shader_diffuse_only_feeler)
everything.append(any_hit_cutout)
mainRay = 0
shadowRay = 1
bounceRay = 3
def MakeMaterialWithShader(shader):
material = trinity.Tr2OptixMaterial()
material.SetClosestHit(mainRay, shader)
material.SetAnyHit(shadowRay, any_hit_shadow)
material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(material)
return (material, shader)
def MakeMaterial(ptxFile, shaderName):
shader = trinity.Tr2OptixProgram(path + ptxFile + '.ptx', shaderName)
everything.append(shader)
return MakeMaterialWithShader(shader)
def MakeDecal(material):
material.SetAnyHit(mainRay, any_hit_cutout)
material.SetAnyHit(shadowRay, any_hit_shadow_blend)
material.SetAnyHit(bounceRay, any_hit_cutout)
skin_single_material, skin_single_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single2')
skin_single_material_scatter = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single_Scatter2')[0]
skin_single_material_decal = MakeMaterialWithShader(skin_single_shade)[0]
MakeDecal(skin_single_material_decal)
glasses_shade = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shade')
glasses_shadow = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shadow')
glass_material = trinity.Tr2OptixMaterial()
glass_material.SetAnyHit(mainRay, glasses_shade)
glass_material.SetAnyHit(shadowRay, glasses_shadow)
glass_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(glasses_shade)
everything.append(glasses_shadow)
vizNames = ['closest_hit_VizNormal',
'closest_hit_VizUV',
'closest_hit_VizConstantColor',
'closest_hit_VizDiffuse']
vizualizer, vizualizer_shade = MakeMaterial('eve_basic', vizNames[0])
vizualizer_decal = MakeMaterialWithShader(vizualizer_shade)[0]
MakeDecal(vizualizer_decal)
skin_double_material, skin_double_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2')
skin_double_material_decal = MakeMaterialWithShader(skin_double_shade)[0]
MakeDecal(skin_double_material_decal)
skin_double_material_transparent = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2_Blend')[0]
skin_double_material_transparent.SetAnyHit(mainRay, any_hit_cutout)
skin_double_material_transparent.SetAnyHit(shadowRay, any_hit_shadow_blend)
skin_double_material_transparent.SetAnyHit(bounceRay, any_hit_cutout)
avatar_brdf_material, avatar_brdf_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Single2')
avatar_brdf_material_decal = MakeMaterialWithShader(avatar_brdf_shade)[0]
MakeDecal(avatar_brdf_material_decal)
avatar_brdf_double_material, avatar_brdf_double_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Double2')
avatar_brdf_double_material_decal = MakeMaterialWithShader(avatar_brdf_double_shade)[0]
MakeDecal(avatar_brdf_double_material_decal)
avatar_hair_material = trinity.Tr2OptixMaterial()
avatar_hair_shade = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'closest_hit_ShadeAvatarHair2' if self.useOIT else 'closest_hit_ShadeAvatarHair2_Blend')
avatar_hair_material.SetClosestHit(mainRay, avatar_hair_shade)
if self.useOIT:
avatar_hair_oit = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'any_hit_HairOIT')
avatar_hair_material.SetAnyHit(mainRay, avatar_hair_oit)
avatar_hair_material.SetAnyHit(shadowRay, any_hit_shadow_blend)
avatar_hair_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(avatar_hair_shade)
everything.append(avatar_hair_material)
avatar_hair_material_decal = trinity.Tr2OptixMaterial()
avatar_hair_material_decal.SetClosestHit(mainRay, avatar_hair_shade)
avatar_hair_material_decal.SetAnyHit(mainRay, avatar_hair_oit if self.useOIT else any_hit_cutout)
avatar_hair_material_decal.SetAnyHit(shadowRay, any_hit_shadow_blend)
avatar_hair_material_decal.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
avatar_hair_material_decal.SetAnyHit(bounceRay, any_hit_cutout)
everything.append(avatar_hair_material_decal)
eye_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEye')
eye_material = trinity.Tr2OptixMaterial()
eye_material.SetClosestHit(mainRay, eye_shade)
eye_material.SetAnyHit(shadowRay, any_hit_shadow)
everything.append(eye_shade)
everything.append(eye_material)
eye_wetness_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEyeWetness')
eye_wetness_material = trinity.Tr2OptixMaterial()
eye_wetness_material.SetClosestHit(mainRay, eye_wetness_shade)
eye_wetness_material.SetAnyHit(shadowRay, any_hit_shadow)
everything.append(eye_wetness_shade)
everything.append(eye_wetness_material)
portrait_basic_material, portrait_basic_shade = MakeMaterial('eve_basic', 'closest_hit_ShadePortraitBasic')
portrait_basic_material_decal = MakeMaterialWithShader(portrait_basic_shade)[0]
MakeDecal(portrait_basic_material_decal)
LogInfo('global setup OK', time.clock() - start, 'seconds')
def MakeSamplerFromMap(texture, name):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromSurface(texture)
sampler.SetNormalizedIndexingMode(True)
optix.SetSampler(name, sampler)
LogInfo('No-Copy Interop for ', name)
everything.append(sampler)
MakeSamplerFromMap(self.oxWorldPosMapUV, 'world_pos_uv_buffer')
MakeSamplerFromMap(self.oxWorldNormalMapUV, 'world_normal_uv_buffer')
MakeSamplerFromMap(self.stretchMap, 'stretchmap_buffer')
useHdrProbe = False
if useHdrProbe:
optix.SetSamplerFromProbe('hdr_probe_sampler', 'c:/depot/optix/data/Japan_subway2_FINAL.hdr')
start = time.clock()
self.skinnedOptix = optix.CreateFromSkinnedModel(model, 72, path + 'triangle72.ptx', 'mesh_intersect', 'mesh_bounds', 64, path + 'triangle64.ptx', 'mesh_intersect', 'mesh_bounds')
optixBatches = self.skinnedOptix[0]
self.TransferBeardParameters(optix)
group = trinity.Tr2OptixGeometryGroup()
groupChildren = []
self.rayCounter = RayCountHelper(self.optix)
self.oit = OitHelper(self.optix)
self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')
self.RunSkinningAndTesselation()
start = time.clock()
samplers = SkinRaytracingTools.InteropAllTextures(optix, model, waitForFinish=True)
everything.append(samplers)
backdrop = trinity.TriTexture2DParameter()
backdrop.resourcePath = self.settings['backgroundBitmap']
skinmap = trinity.TriTexture2DParameter()
skinmap.resourcePath = 'res:/Graphics/Character/female/paperdoll/head/head_generic/SkinMap.png'
blue.resMan.Wait()
everything.append(SkinRaytracingTools.InteropTexture('BackgroundEnvMap', backdrop.resource, waitForFinish=True, scope=optix))
everything.append(SkinRaytracingTools.InteropTexture('SkinMap', skinmap.resource, waitForFinish=True, scope=optix))
LogInfo('texture interop OK', time.clock() - start, 'seconds')
splines = self.GenerateBeardGeometry(optix, path, any_hit_shadow)
if splines is not None:
groupChildren.append(splines)
print '*** Raytracing phase ***'
def SetAlphaRef(instance, batchType):
if batchType == 1:
instance.SetFloat4('alphaRef', 0.75, 0, 0, 0)
elif batchType == 2:
instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)
haveGlasses = False
for batchType in range(len(optixBatches)):
isOpaque = batchType == 0
batches = optixBatches[batchType]
for batch in batches:
if 'furshell' in batch[1].effectFilePath.lower():
continue
instance = trinity.Tr2OptixGeometryInstance()
everything.append(instance)
instance.SetGeometry(batch[0])
r = random.random()
g = random.random()
b = random.random()
instance.SetFloat4('viz_constant_color', r, g, b, 1.0)
fxpath = batch[1].effectFilePath.lower()
if False:
instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)
elif 'glassshader' in fxpath:
instance.SetMaterial(glass_material)
if not haveGlasses:
haveGlasses = True
elif 'skinnedavatarbrdfsinglepassskin_single.fx' in fxpath:
if batch[1] in self.scatterFX:
instance.SetMaterial(skin_single_material_scatter)
else:
instance.SetMaterial(skin_single_material if isOpaque else skin_single_material_decal)
SetAlphaRef(instance, batchType)
elif 'skinnedavatarbrdfsinglepassskin_double.fx' in fxpath:
instance.SetMaterial([skin_double_material, skin_double_material_decal, skin_double_material_transparent][batchType])
SetAlphaRef(instance, batchType)
elif 'skinnedavatarbrdflinear.fx' in fxpath:
instance.SetMaterial(avatar_brdf_material if isOpaque else avatar_brdf_material_decal)
elif 'skinnedavatarbrdfdoublelinear.fx' in fxpath:
instance.SetMaterial(avatar_brdf_double_material if isOpaque else avatar_brdf_double_material_decal)
elif 'skinnedavatarhair_detailed.fx' in fxpath:
instance.SetMaterial(avatar_hair_material if isOpaque else avatar_hair_material_decal)
instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)
instance.SetUInt('enableCulling', 0)
elif 'eyeshader.fx' in fxpath:
instance.SetMaterial(eye_material)
elif 'eyewetnessshader.fx' in fxpath:
instance.SetMaterial(eye_wetness_material)
elif 'portraitbasic.fx' in fxpath:
instance.SetMaterial(portrait_basic_material if isOpaque else portrait_basic_material_decal)
else:
instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)
SkinRaytracingTools.CopyParametersToContext(batch[1], instance)
groupChildren.append(instance)
group.SetChildCount(len(groupChildren))
for x in xrange(len(groupChildren)):
group.SetChild(x, groupChildren[x])
everything.append(group)
group.SetAcceleration('Bvh', 'Bvh')
LogInfo('scene interop OK', time.clock() - start, 'seconds')
start = time.clock()
bufEveLights = SkinRaytracingTools.CreateBufferForLights(scene.lights, useHdrProbe)
optix.SetBuffer('trinity_lights', bufEveLights)
LogInfo('lights interop OK', time.clock() - start, 'seconds')
start = time.clock()
optix.SetGeometryGroup('top_scene', group)
optix.SetGeometryGroup('shadow_casters', group)
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetEntryPointCount(1)
miss = None
if not useHdrProbe:
miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')
else:
miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')
optix.SetMissProgram(3, miss)
optix.SetFloat3('bg_color', 1.0, 0, 0)
everything.append(miss)
if False:
exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')
optix.SetExceptionProgram(0, exception)
everything.append(exception)
optix.SetStackSize(4096)
self.everything = everything
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))
LogInfo('general setup OK', time.clock() - start, 'seconds')
optix.ReportObjectCounts()
start = time.clock()
optix.Compile()
LogInfo('compile OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Validate()
LogInfo('validate OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Run(0, 0, 0)
LogInfo('BVH OK', time.clock() - start, 'seconds')
start = time.clock()
self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)
rj = trinity.CreateRenderJob('FullOptixRenderer')
rj.PushRenderTarget(self.outputRT)
rj.PushDepthStencil(None)
self.AddCallback(FullOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)
rj.CopyRtToTexture(outputTexture).name = 'cuda -> outputTexture'
rj.PopDepthStencil()
rj.PopRenderTarget()
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen quad'
rj.RenderEffect(self.blitfx.effect).name = ' blit'
self.capture.CreateRenderSteps(rj, self.blitfx.effect)
rj.steps.append(trinity.TriStepRenderFps())
rj.ScheduleRecurring(insertFront=False)
self.renderJob = rj
LogInfo('final setup OK', time.clock() - start, 'seconds')
model.display = False
self.EnablePaperDollJobs(False)
@staticmethod
def EnablePaperDollJobs(enable):
if False:
for job in trinity.renderJobs.recurring:
if 'paperdollrenderjob' in job.name.lower():
for step in job.steps:
step.enabled = enable
if enable:
trinity.device.tickInterval = 10
else:
trinity.device.tickInterval = 0
def ApplySettings(self):
self.optix.SetFloat('light_size', self.settings['light_size'])
self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)
self.optix.SetFloat('HairShadows', self.settings['HairShadows'])
self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)
self.previousVP.Identity()
def SetLensRadius(self, lens_radius):
self.settings['lens_radius'] = lens_radius
self.ApplySettings()
def SetLensFocalDistance(self, lens_focal_distance):
if lens_focal_distance <= 0:
self.settings.pop('lens_focal_distance', 0)
else:
self.settings['lens_focal_distance'] = lens_focal_distance
self.ApplySettings()
def SetLightSize(self, light_size):
self.settings['light_size'] = light_size
self.ApplySettings()
def SetHairShadowsEnabled(self, enabled):
self.settings['HairShadows'] = float(enabled)
self.ApplySettings()
def SetBackgroundIntensity(self, intensity):
self.settings['EnvMapBoost'] = intensity
self.ApplySettings()
def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):
LogInfo('init', self)
blue.motherLode.maxMemUsage = 0
blue.resMan.ClearAllCachedObjects()
self.framecount = 0
self.listenForUpdate = listenForUpdate
if memento is not None:
self.settings = memento
else:
self.settings = {}
self.settings['light_size'] = 0.125
self.settings['lens_radius'] = 0.001
self.settings['HairShadows'] = 1.0
self.settings['EnvMapBoost'] = 1.0
self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'
self.settings['beardLength'] = beardLength
self.settings['beardGravity'] = beardGravity
if outputWidth is not None:
self.settings['outputWidth'] = outputWidth
if outputHeight is not None:
self.settings['outputHeight'] = outputHeight
if asyncSetup:
uthread.new(self._DoInit, scene=scene)
else:
self._DoInit(scene=scene)
def GetMemento(self):
return self.settings
def __del__(self):
LogInfo('deleting', self)
if hasattr(self, 'renderJob'):
self.renderJob.UnscheduleRecurring()
self.renderJob = None
del self.raygen
del self.rayCounter
del self.oit
del self.outputBuffer
del self.skinnedOptix
del self.everything
LogInfo('Post-cleanup leak check:')
self.optix.ReportObjectCounts()
self.EnablePaperDollJobs(True)
@staticmethod
def Pause():
if FullOptixRenderer.instance is not None:
FullOptixRenderer.instance.renderJob.UnscheduleRecurring()
@staticmethod
def NotifyUpdate():
if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:
LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)
memento = FullOptixRenderer.instance.GetMemento()
FullOptixRenderer.instance = None
FullOptixRenderer.instance = FullOptixRenderer(memento=memento)
class ShipOptixRenderer():
__guid__ = 'paperDoll.ShipOptixRenderer'
instance = None
def AddCallback(self, func, name, rj):
cb = trinity.TriStepPythonCB()
weakSelf = weakref.ref(self)
cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))
cb.name = name
rj.steps.append(cb)
def GetFrameCount(self):
return self.framecount
def SaveScreenshot(self, filename):
self.capture.SaveSurfaceToFile(filename)
def AddRenderPreviewStep(self, renderJob):
renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'
renderJob.PushDepthStencil(None).name = ' [optix] push depth'
renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'
renderJob.PopDepthStencil().name = ' [optix] pop depth'
@staticmethod
def RaytraceFrame(selfRef):
start = time.time()
VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))
if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):
selfRef.previousVP = VP
selfRef.outputBuffer.Clear()
selfRef.framecount = 0
pos1 = (0, 0, 0)
pos2 = pos1
dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())
dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())
autodof = min(dist1, dist2)
dof = selfRef.settings.get('lens_focal_distance', autodof)
LogInfo('Auto-depth-of-field is at', autodof, ', actual focal distance is', dof)
selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)
else:
selfRef.framecount += 1
selfRef.optix.SetUInt('frameIteration', selfRef.framecount)
selfRef.oit.ResetAllocationCount()
selfRef.rayCounter.ResetCount()
time1 = time.time()
selfRef.optix.Run(0, selfRef.width, selfRef.height)
time2 = time.time()
traceTime = time2 - time1
raycount = selfRef.rayCounter.GetCount()
raysec = 0
if traceTime > 0:
raysec = raycount / float(traceTime)
time3 = time.time()
if selfRef.framecount % 32 == 0:
oit = selfRef.oit.GetAllocationCount()
if oit > 0:
print oit, 'oit allocations'
selfRef.blitfx.UpdateFrameCount(selfRef.framecount)
selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)
stop = time.time()
message = 'time: call %05.3f / trace %05.3f / read %05.3f ms' % (float(time1 - start) * 1000, float(time2 - time1) * 1000, float(stop - time3) * 1000)
message += '// traced %d rays in %05.3f ms / %10d Krays/sec / %d frames' % (raycount,
traceTime * 1000,
raysec / 1000,
selfRef.framecount)
LogInfo(message)
def ConvertCubeMapToSH(self, optix, ptxPath, cubeResPath):
self.shBuffer = trinity.Tr2OptixBuffer()
self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('sh_buffer', self.shBuffer)
self.shBuffer.Clear()
program = trinity.Tr2OptixProgram(ptxPath + 'cubemapsh.ptx', 'kernel')
optix.SetRayGenerationProgram(0, program)
optix.ReportObjectCounts()
cube = trinity.TriTextureCubeParameter()
cube.resourcePath = cubeResPath
cube.name = 'Nebula'
blue.resMan.Wait()
mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(cube)
for i in range(len(names)):
if i < len(mipmaps):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(mipmaps[i])
sampler.SetNormalizedIndexingMode(True)
optix.SetSampler(cube.name + names[i], sampler)
LogInfo('No-Copy Cube Side Interop for ' + cube.name + names[i])
optix.Run(0, cube.resource.width, cube.resource.width)
if False:
names = ['Y00',
'Y1m1',
'Y10',
'Y11',
'Y2m2',
'Y2m1',
'Y20',
'Y21',
'Y22']
self.shBuffer.Map()
ofs = 0
for name in names:
print name, ': (',
print self.shBuffer.GetUserDataF(ofs), ',',
ofs = ofs + 4
print self.shBuffer.GetUserDataF(ofs), ',',
ofs = ofs + 4
print self.shBuffer.GetUserDataF(ofs), ')'
ofs = ofs + 4
self.shBuffer.Unmap()
def CachedCreateMaterial(self, path, effect):
material = self.materialCache.get(effect, None)
if material is not None:
return material
shader = None
if effect in ('tripleglowv3', 'doubleglowv3', 'singleglowv3'):
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_' + effect)
elif effect in ('singleheatv3',):
shader = trinity.Tr2OptixProgram(path + 'v3ship_heat.ptx', 'closest_hit_' + effect)
elif effect in ('tripleglowoilv3',):
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow_oil.ptx', 'closest_hit_' + effect)
elif effect == 'skinned_tripleglowv3':
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_tripleglowv3')
if shader is None:
return
material = trinity.Tr2OptixMaterial()
material.SetClosestHit(0, shader)
material.SetAnyHit(1, self.any_hit_shadow)
return material
def _DoInit(self, scene = None):
if scene is None:
scene = trinity.device.scene
self.scene = scene
self.previousVP = trinity.TriMatrix()
self.framecount = 1
self.materialCache = {}
self.useOIT = True
if scene is None:
LogWarn('No scene!')
return
bg = trinity.renderContext.GetDefaultBackBuffer()
step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')
if step is not None:
bg = step.renderTarget
self.width = self.settings.get('outputWidth', bg.width)
self.height = self.settings.get('outputHeight', bg.height)
self.blitfx = FullScreenBlitter(self.width, self.height)
bloomScale = 4
if False:
self.highpassRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.filteredRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.outputTexture = outputTexture
self.capture = CaptureHelper(self.width, self.height)
for steps in trinity.renderJobs.recurring:
if steps.name == 'ShipOptixRenderer':
steps.UnscheduleRecurring()
path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/ship/'))
self.path = path
LogInfo('Getting files from', path)
start = time.clock()
optix = trinity.Tr2Optix()
self.optix = optix
optix.SetInteropDevice()
optix.SetRayTypeCount(4)
optix.SetEntryPointCount(1)
if False:
optix.EnableAllExceptions()
if False:
optix.SetPrintEnabled(True)
optix.SetPrintBufferSize(16384)
optix.SetFloat('scene_epsilon', 0.01)
optix.SetUInt('frameIteration', 0)
nebula = PD.FindResourceByName(scene.backgroundEffect, 'NebulaMap') if scene.backgroundEffect is not None else None
if nebula is not None:
LogInfo('Converting to SH ', nebula.resourcePath)
self.ConvertCubeMapToSH(optix, path, nebula.resourcePath)
else:
self.shBuffer = trinity.Tr2OptixBuffer()
self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('sh_buffer', self.shBuffer)
self.shBuffer.Clear()
self.outputBuffer = trinity.Tr2OptixBuffer()
self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('output_buffer', self.outputBuffer)
self.ApplySettings()
everything = []
mainRay = 0
shadowRay = 1
bounceRay = 3
def MakeMaterialWithShader(shader):
return (material, shader)
def MakeMaterial(ptxFile, shaderName):
everything.append(shader)
return MakeMaterialWithShader(shader)
LogInfo('global setup OK', time.clock() - start, 'seconds')
useHdrProbe = False
start = time.clock()
self.rayCounter = RayCountHelper(self.optix)
self.oit = OitHelper(self.optix)
self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')
shader = trinity.Tr2OptixProgram(path + 'vizualizer.ptx', 'closest_hit_VizGreen')
viz_material = trinity.Tr2OptixMaterial()
viz_material.SetClosestHit(0, shader)
everything.append(viz_material)
if False:
any_hit_shadow = trinity.Tr2OptixProgram(path + 'shadow.ptx', 'any_hit_shadow')
viz_material.SetAnyHit(1, any_hit_shadow)
self.any_hit_shadow = any_hit_shadow
else:
self.any_hit_shadow = None
start = time.clock()
nameTranslation = {'GlowNormalSpecularMap': 'NormalMap'}
def GroupByVertexBuffer(optixBatches):
output = []
for batchType in range(len(optixBatches)):
batches = optixBatches[batchType]
vbDict = {}
for batch in batches:
vb = batch[2]
list = vbDict.get(vb, None)
if list is not None:
list.append(batch)
else:
vbDict[vb] = [batch]
list = []
for vb in vbDict.iterkeys():
list.append(vbDict[vb])
output.append(list)
return output
cache = {}
programs = {'skinned_tripleglowv3_48': 'triangle48',
'singlev3_48': 'triangle48',
'singleheatv3_48': 'triangle48',
'tripleglowv3_40': 'triangle40',
'singleheatv3_40': 'triangle40',
'singlefresnelreflectionwithglow_56': 'triangle56',
'doublefresnelreflectionwithglow_56': 'triangle56',
'tripleglowoilv3_80': 'triangle80'}
if False:
nullintersect = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'intersect')
nullbounds = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'bounds')
everything.append(nullintersect)
everything.append(nullbounds)
mylogOK = set({})
mylogFail = set({})
linearNames = set({})
linearNames.add('MaterialDiffuseColor')
linearNames.add('MaterialReflectionColor')
linearNames.add('MaskDiffuseColor')
linearNames.add('MaskReflectionColor')
linearNames.add('SubMaskDiffuseColor')
linearNames.add('SubMaskReflectionColor')
linearNames.add('GlowColor')
topScene = trinity.Tr2OptixGroup()
interopSamplerCache = {}
for dynamic in scene.objects:
if dynamic.__typename__ not in ('EveShip2', 'EveStation2'):
continue
model = dynamic
if model.highDetailMesh is None or model.highDetailMesh.object is None:
LogWarn('ship has no high detail meshes')
continue
skinnedOptix = optix.CreateFromEveSpaceObject2(model, 0, '', '', '')
everything.append(skinnedOptix)
optixBatches = skinnedOptix[0]
self.objectsToRefresh[model] = skinnedOptix
sorted = GroupByVertexBuffer(optixBatches)
groups = []
for batchType in range(len(optixBatches)):
isOpaque = batchType == 0
vbBatches = sorted[batchType]
for batches in vbBatches:
groupChildren = []
for batch in batches:
effect = batch[1].effectFilePath.lower()
effect = effect[effect.rfind('/') + 1:]
effect = effect[:effect.rfind('.fx')]
ptx = programs.get(effect + '_' + str(batch[8]), '')
if ptx == '':
mylogFail.add(effect)
batch[0].SetIntersectProgram(nullintersect)
batch[0].SetBoundsProgram(nullbounds)
continue
mylogOK.add(effect)
intersect, bounds = cache.get(ptx, (None, None))
if intersect is None:
intersect = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'intersect')
bounds = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'bounds')
cache[ptx] = (intersect, bounds)
batch[0].SetIntersectProgram(intersect)
batch[0].SetBoundsProgram(bounds)
batchGeometryInstance = trinity.Tr2OptixGeometryInstance()
everything.append(batchGeometryInstance)
batchGeometryInstance.SetGeometry(batch[0])
if True:
material = self.CachedCreateMaterial(path, effect)
if material is None:
material = viz_material
else:
material = viz_material
batchGeometryInstance.SetMaterial(material)
SkinRaytracingTools.CopyParametersToContext(batch[1], batchGeometryInstance, linearNames)
groupChildren.append(batchGeometryInstance)
samplers = SkinRaytracingTools.InteropAllTexturesFromEffect(optix, batch[1], waitForFinish=True, nameTranslation=nameTranslation, scope=batchGeometryInstance, cache=interopSamplerCache)
everything.append(samplers)
group = trinity.Tr2OptixGeometryGroup()
group.SetChildCount(len(groupChildren))
for x in xrange(len(groupChildren)):
group.SetChild(x, groupChildren[x])
group.SetAcceleration('Bvh', 'Bvh')
self.objectsToMarkDirty.append(group)
groups.append(group)
everything.append(cache)
baseOffset = topScene.GetChildCount()
topScene.SetChildCount(baseOffset + len(groups))
for x in xrange(len(groups)):
topScene.SetChild(baseOffset + x, groups[x])
everything.append(groups)
if False:
sphereGeometry = trinity.Tr2OptixGeometry()
sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')
sphereGeometry.SetPrimitiveCount(1)
everything.append(sphereGeometry)
sphereInstance = trinity.Tr2OptixGeometryInstance()
sphereInstance.SetGeometry(sphereGeometry)
sphereInstance.SetMaterial(viz_material)
sphereInstance.SetFloat4('pos_r', 0, 0, 0, 100)
sphereInstance.SetFloat4('color_watt', 1, 0, 0, 1)
everything.append(sphereInstance)
group = trinity.Tr2OptixGeometryGroup()
group.SetChildCount(1)
group.SetChild(0, sphereInstance)
group.SetAcceleration('Bvh', 'Bvh')
topScene.SetChildCount(topScene.GetChildCount() + 1)
topScene.SetChild(topScene.GetChildCount() - 1, group)
everything.append(topScene)
topScene.SetAcceleration('Bvh', 'Bvh')
self.objectsToMarkDirty.append(topScene)
optix.SetGroup('top_scene', topScene)
optix.SetGroup('shadow_casters', topScene)
if len(mylogOK) > 0:
LogInfo('Converted succesfully:', str(mylogOK))
else:
LogWarn('No effects converted succesfully!')
if len(mylogFail) > 0:
LogWarn('Failed to convert:', str(mylogFail))
if type(scene) == trinity.EveSpaceScene:
c = SkinRaytracingTools.SafeLinearize(scene.sunDiffuseColor)
optix.SetFloat4('SunDiffuseColor', c[0], c[1], c[2], c[3])
c = scene.sunDirection
optix.SetFloat4('SunDirWorld', -c[0], -c[1], -c[2], 0)
c = SkinRaytracingTools.SafeLinearize(scene.ambientColor)
optix.SetFloat4('SceneAmbientColor', c[0], c[1], c[2], c[3])
c = SkinRaytracingTools.SafeLinearize(scene.fogColor)
optix.SetFloat4('SceneFogColor', c[0], c[1], c[2], c[3])
LogInfo('scene interop OK', time.clock() - start, 'seconds')
start = time.clock()
light = trinity.Tr2InteriorLightSource()
if True:
wattage = 2000000
light.color = (1,
1,
1,
wattage)
light.radius = 50
light.position = (200, 500, -300)
else:
wattage = 10000000
light.color = (1,
1,
1,
wattage)
light.radius = 1000
light.position = (0, 0, 0)
bufEveLights = SkinRaytracingTools.CreateBufferForLights([], useHdrProbe, preserveAlpha=True)
optix.SetBuffer('trinity_lights', bufEveLights)
LogInfo('lights interop OK', time.clock() - start, 'seconds')
if False:
sphereGeometry = trinity.Tr2OptixGeometry()
sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')
sphereGeometry.SetPrimitiveCount(1)
sphereMaterial = trinity.Tr2OptixMaterial()
sphereShader = trinity.Tr2OptixProgram(path + 'sphere_program.ptx', 'closest_hit_radiance')
sphereMaterial.SetClosestHit(0, sphereShader)
sphereInstance = trinity.Tr2OptixGeometryInstance()
sphereInstance.SetGeometry(sphereGeometry)
sphereInstance.SetMaterial(sphereMaterial)
sphereInstance.SetFloat4('pos_r', light.position[0], light.position[1], light.position[2], light.radius)
sphereInstance.SetFloat4('color_watt', light.color[0], light.color[1], light.color[2], light.color[3])
n = topScene.GetChildCount()
topScene.SetChildCount(n + 1)
sphereGroup = trinity.Tr2OptixGeometryGroup()
sphereGroup.SetChildCount(1)
sphereGroup.SetChild(0, sphereInstance)
sphereGroup.SetAcceleration('Bvh', 'Bvh')
topScene.SetChild(n, sphereGroup)
start = time.clock()
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetEntryPointCount(1)
miss = None
if not useHdrProbe:
miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')
else:
miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')
optix.SetMissProgram(3, miss)
optix.SetFloat3('bg_color', 1.0, 0, 0)
everything.append(miss)
if False:
exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')
optix.SetExceptionProgram(0, exception)
everything.append(exception)
optix.SetStackSize(4096)
self.everything = everything
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))
LogInfo('general setup OK', time.clock() - start, 'seconds')
optix.ReportObjectCounts()
start = time.clock()
optix.Compile()
LogInfo('compile OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Validate()
LogInfo('validate OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Run(0, 0, 0)
LogInfo('BVH OK', time.clock() - start, 'seconds')
start = time.clock()
if False:
self.blitfx.SetTexture(outputTexture, self.highpassRT, self.filteredRT)
else:
self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)
rj = trinity.CreateRenderJob('ShipOptixRenderer')
self.AddCallback(ShipOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'
if False:
rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'
rj.RenderEffect(self.blitfx.highpassEffect).name = ' high pass'
rj.SetRenderTarget(self.filteredRT.wrappedRenderTarget).name = ' SetRT filteredRT'
rj.RenderEffect(self.blitfx.gaussianHorizEffect).name = ' horizontal blur'
rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'
rj.RenderEffect(self.blitfx.gaussianVertEffect).name = ' vertical blur'
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'
rj.RenderEffect(self.blitfx.effect).name = ' blit'
tp2 = None
for job in trinity.renderJobs.recurring:
if job.name == 'TrinityPanel:View1':
tp2 = job
if tp2 is None:
rj.ScheduleRecurring(insertFront=False)
else:
final = None
for step in tp2.steps:
if step.name == 'SET_FINAL_RT':
final = step
break
if final is not None:
tp2.steps.insert(tp2.steps.index(final), trinity.TriStepRunJob(rj))
else:
tp2.steps.append(trinity.TriStepRunJob(rj))
self.renderJob = rj
LogInfo('final setup OK', time.clock() - start, 'seconds')
FullOptixRenderer.EnablePaperDollJobs(False)
def ApplySettings(self):
self.optix.SetFloat('light_size', self.settings['light_size'])
self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)
self.optix.SetFloat('HairShadows', self.settings['HairShadows'])
self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)
self.previousVP.Identity()
def SetLensRadius(self, lens_radius):
self.settings['lens_radius'] = lens_radius
self.ApplySettings()
def SetLensFocalDistance(self, lens_focal_distance):
if lens_focal_distance <= 0:
self.settings.pop('lens_focal_distance', 0)
else:
self.settings['lens_focal_distance'] = lens_focal_distance
self.ApplySettings()
def SetLightSize(self, light_size):
self.settings['light_size'] = light_size
self.ApplySettings()
def SetHairShadowsEnabled(self, enabled):
self.settings['HairShadows'] = float(enabled)
self.ApplySettings()
def SetBackgroundIntensity(self, intensity):
self.settings['EnvMapBoost'] = intensity
self.ApplySettings()
def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):
LogInfo('init', self)
blue.motherLode.maxMemUsage = 0
blue.resMan.ClearAllCachedObjects()
self.framecount = 0
self.listenForUpdate = listenForUpdate
self.everything = None
self.objectsToRefresh = {}
self.objectsToMarkDirty = []
if memento is not None:
self.settings = memento
else:
self.settings = {}
self.settings['light_size'] = 0.125
self.settings['lens_radius'] = 0.001
self.settings['HairShadows'] = 1.0
self.settings['EnvMapBoost'] = 1.0
self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'
self.settings['beardLength'] = beardLength
self.settings['beardGravity'] = beardGravity
if outputWidth is not None:
self.settings['outputWidth'] = outputWidth
if outputHeight is not None:
self.settings['outputHeight'] = outputHeight
if asyncSetup:
uthread.new(self._DoInit, scene=scene)
else:
self._DoInit(scene=scene)
def GetMemento(self):
return self.settings
def __del__(self):
LogInfo('deleting', self)
if hasattr(self, 'renderJob'):
self.renderJob.UnscheduleRecurring()
self.renderJob = None
del self.any_hit_shadow
del self.raygen
del self.rayCounter
del self.oit
del self.shBuffer
del self.outputBuffer
del self.everything
del self.objectsToRefresh
del self.objectsToMarkDirty
self.optix.ClearObjects()
LogInfo('Post-cleanup leak check:')
self.optix.ReportObjectCounts()
FullOptixRenderer.EnablePaperDollJobs(True)
def RefreshMatrices(self):
for ship, optixList in self.objectsToRefresh.iteritems():
self.optix.RefreshMatrices(ship, optixList)
for dirty in self.objectsToMarkDirty:
dirty.MarkDirty()
self.ApplySettings()
LogInfo('Refreshed')
@staticmethod
def Pause():
if FullOptixRenderer.instance is not None:
FullOptixRenderer.instance.renderJob.UnscheduleRecurring()
@staticmethod
def NotifyUpdate():
if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:
LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)
memento = FullOptixRenderer.instance.GetMemento()
FullOptixRenderer.instance = None
FullOptixRenderer.instance = FullOptixRenderer(memento=memento) |
9,091 | dc51ca86a49dbec6f714753782494f21d4b1591d | import numpy as np
import pandas as pd
import logging
import matplotlib.pyplot as plt
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, StandardScaler, RobustScaler
from sklearn.compose import ColumnTransformer
from sklearn.pipeline import Pipeline, make_pipeline
from sklearn.linear_model import LinearRegression
import datetime
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.model_selection import KFold, cross_val_score, train_test_split
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
# ignore warnings
import warnings
warnings.filterwarnings(action="ignore")
df_train = pd.read_csv('./Scripts/pages/train.csv')
df_store = pd.read_csv('./Scripts/pages/store.csv')
df_test = pd.read_csv('./Scripts/pages/test.csv')
merged_train = pd.merge(left = df_train, right = df_store, how = 'inner', left_on = 'Store', right_on = 'Store')
merged_test = pd.merge(left = df_test, right = df_store, how = 'inner', left_on = 'Store', right_on = 'Store')
def preprocess_data(train, test):
# '''preprocessing'''
global train_features, test_features, train_target, categorical, numerical
# train and target features
train_features = train.drop(['Sales', 'Customers'], axis = 1) #drop the target feature + customers (~ will not be used for prediction)
train_target = train[['Sales']]
test_features = test.drop(['Id'], axis = 1) #drop id, it's required only during submission
#feature generation + transformations
try:
train_features['Date'] = pd.to_datetime(train_features.Date)
train_features['Month'] = train_features.Date.dt.month.to_list()
train_features['Year'] = train_features.Date.dt.year.to_list()
train_features['Day'] = train_features.Date.dt.day.to_list()
train_features['WeekOfYear'] = train_features.Date.dt.weekofyear.to_list()
train_features['DayOfWeek'] = train_features.Date.dt.dayofweek.to_list()
train_features['weekday'] = 1 # Initialize the column with default value of 1
train_features.loc[train_features['DayOfWeek'] == 5, 'weekday'] = 0
train_features.loc[train_features['DayOfWeek'] == 6, 'weekday'] = 0
train_features = train_features.drop(['Store'], axis = 1)
test_features['Date'] = pd.to_datetime(test_features.Date)
test_features['Month'] = test_features.Date.dt.month.to_list()
test_features['Year'] = test_features.Date.dt.year.to_list()
test_features['Day'] = test_features.Date.dt.day.to_list()
test_features['WeekOfYear'] = test_features.Date.dt.weekofyear.to_list()
test_features['DayOfWeek'] = test_features.Date.dt.dayofweek.to_list()
test_features['weekday'] = 1 # Initialize the column with default value of 1
test_features.loc[test_features['DayOfWeek'] == 5, 'weekday'] = 0
test_features.loc[test_features['DayOfWeek'] == 6, 'weekday'] = 0
test_features = test_features.drop(['Store'], axis = 1)
except KeyError:
print("Column couldn't be found")
# numerical and categorical columns (train set)
categorical = []
numerical = []
timestamp = []
for col in train_features.columns:
if train_features[col].dtype == object:
categorical.append(col)
elif train_features[col].dtype in ['int16', 'int32', 'int64', 'float16', 'float32', 'float64']:
numerical.append(col)
else:
timestamp.append(col)
# Keep selected columns only
my_cols = categorical + numerical + timestamp
train_features = train_features[my_cols].copy()
test_features = test_features[my_cols].copy()
features = pd.concat([train_features, test_features]) #merge the features columns for uniform preprocessing
# change dtypes for uniformity in preprocessing
features.CompetitionOpenSinceMonth = features.CompetitionOpenSinceMonth.astype('Int64')
features.CompetitionOpenSinceYear = features.CompetitionOpenSinceYear.astype('Int64')
features.Promo2SinceWeek = features.Promo2SinceWeek.astype('Int64')
features.Promo2SinceYear = features.Promo2SinceYear.astype('Int64')
features["StateHoliday"].loc[features["StateHoliday"] == 0] = "0"
# ''' actual preprocessing: the mighty pipeline '''
# numeric
for col in ['CompetitionDistance', 'CompetitionOpenSinceMonth', 'CompetitionOpenSinceYear', 'Promo2SinceWeek', 'Promo2SinceYear']:
features[col] = features[col].fillna((int(features[col].mean())))
features.PromoInterval = features.PromoInterval.fillna(features.PromoInterval.mode()[0])
features.Open = features.Open.fillna(features.Open.mode()[0])
features = pd.get_dummies(features, columns=['StoreType', 'Assortment', 'PromoInterval', 'StateHoliday'])
scaler = RobustScaler()
c = ['DayOfWeek', 'Open', 'Promo', 'SchoolHoliday', 'CompetitionDistance', 'CompetitionOpenSinceMonth', 'CompetitionOpenSinceYear',
'Promo2', 'Promo2SinceWeek', 'Promo2SinceYear', 'WeekOfYear', 'Month', 'Year', 'Day', 'WeekOfYear', 'weekday']
features[numerical] = scaler.fit_transform(features[numerical].values)
return features
features = preprocess_data(merged_train, merged_test)
features = features.drop(['Date'], axis = 1)
# reconstruct train and test sets
def reconstruct_sets(features):
global x_train, x_val, y_train, y_val
# global train_set
# original train and test sets
x_train = features.iloc[:len(train_features), :]
x_test = features.iloc[len(train_features):, :]
y_train = train_target
# train_set = pd.concat([x_train, y_train], axis=1)
# updated train and validation sets
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size = .20, random_state = 0)
return x_train, x_val, y_train, y_val, x_test
x_train, x_val, y_train, y_val, x_test = reconstruct_sets(features)
clf=RandomForestRegressor(n_estimators=14)
clf.fit(x_train,y_train)
y_pred = clf.predict(x_val)
print("MSE =", mean_squared_error(y_val, y_pred))
print("Mean R2 score =", r2_score(y_val, y_pred))
print("MAE =", mean_absolute_error(y_val, y_pred))
|
9,092 | 8f960ad465d0a7bf48752db35c73169be6da27d8 | from numpy import array, zeros, arange, concatenate, searchsorted, where, unique
from pyNastran.bdf.fieldWriter import print_card_8
from pyNastran.bdf.bdfInterface.assign_type import (integer, integer_or_blank,
double_or_blank, integer_double_or_blank, blank)
class PBAR(object):
type = 'PBAR'
def __init__(self, model):
"""
Defines the PCOMP object.
:param self: the PCOMP object
:param model: the BDF object
:param cards: the list of PCOMP cards
"""
self.model = model
self.n = 0
self._cards = []
self._comments = []
def add(self, card, comment):
self._cards.append(card)
self._comments.append(comment)
def build(self):
cards = self._cards
ncards = len(cards)
self.n = ncards
if ncards:
#: Property ID
self.property_id = zeros(ncards, 'int32')
self.material_id = zeros(ncards, 'int32')
self.area = zeros(ncards, 'float64')
self.I1 = zeros(ncards, 'float64')
self.I2 = zeros(ncards, 'float64')
self.J = zeros(ncards, 'float64')
self.nsm = zeros(ncards, 'float64')
for i, card in enumerate(cards):
#: property ID
self.property_id[i] = integer(card, 1, 'property_id')
#: material ID
self.material_id[i] = integer(card, 2, 'material_id')
#: material ID
self.area[i] = double_or_blank(card, 3, 'area', 0.0)
#: I1
self.I1[i] = double_or_blank(card, 4, 'I1', 0.0)
#: I2
self.I2[i] = double_or_blank(card, 5, 'I2', 0.0)
#: Polar Moment of Inertia J -> use J()
#: default=1/2(I1+I2) for SOL=600, otherwise 0.0
#: .. todo:: support SOL 600 default
Jdefault = 0.5 * (self.I1[i] + self.I2[i])
self.J[i] = double_or_blank(card, 6, 'J', Jdefault)
self.nsm[i] = double_or_blank(card, 7, 'non-structural_mass', 0.0)
if 0:
self.C1 = double_or_blank(card, 9, 'C1', 0.0)
self.C2 = double_or_blank(card, 10, 'C2', 0.0)
self.D1 = double_or_blank(card, 11, 'D1', 0.0)
self.D2 = double_or_blank(card, 12, 'D2', 0.0)
self.E1 = double_or_blank(card, 13, 'E1', 0.0)
self.E2 = double_or_blank(card, 14, 'E2', 0.0)
self.F1 = double_or_blank(card, 15, 'F1', 0.0)
self.F2 = double_or_blank(card, 16, 'F2', 0.0)
#: default=infinite; assume 1e8
self.K1 = double_or_blank(card, 17, 'K1', 1e8)
#: default=infinite; assume 1e8
self.K2 = double_or_blank(card, 18, 'K2', 1e8)
#: I12 -> use I12()
self.i12 = double_or_blank(card, 19, 'I12', 0.0)
if self.A == 0.0 and self.i12 == 0.0:
assert self.K1 is None, 'K1 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K1=%r' % (self.A, self.i12, self.K1)
assert self.K2 is None, 'K2 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K2=%r' % (self.A, self.i12, self.K2)
assert len(card) <= 20, 'len(PBAR card) = %i' % len(card)
i = self.property_id.argsort()
self.property_id = self.property_id[i]
self.material_id = self.material_id[i]
self.area = self.area[i]
self.I1 = self.I1[i]
self.I2 = self.I2[i]
self.J = self.J[i]
self.nsm = self.nsm[i]
unique_pids = unique(self.property_id)
if len(unique_pids) != len(self.property_id):
raise RuntimeError('There are duplicate PCOMP IDs...')
self._cards = []
self._comments = []
#=========================================================================
def get_index(self, property_ids):
if isinstance(property_ids, int):
property_ids = array([property_ids])
if property_ids is None:
return arange(self.n)
indexs = searchsorted(self.property_id, property_ids)
assert len(indexs) == len(property_ids), 'indexs=%s pids=%s' % (indexs, property_ids)
return indexs
#=========================================================================
def write_bdf(self, f, size=8, property_ids=None):
if self.n:
if property_ids is None:
i = arange(self.n)
else:
i = searchsorted(self.property_id, property_ids)
for (pid, mid, area, I1, I2, J) in zip(self.property_id[i], self.material_id[i],
self.area[i], self.I1[i], self.I2[i], self.J[i]):
card = ['PBAR', pid, mid, area, I1, I2, J]
f.write(print_card_8(card))
|
9,093 | ecbcd023b8fec5763c6ff7f4cd0999426fae4a50 | from Receiver import Receiver
import time
import Image
class Sender:
ACK = []
size = None
windowSize = None
tableOfFrames = []
ChosenSumAlgorithm = None
def __init__(self, receiver):
self.receiver = receiver
pass
def send_frame(self, frame):
self.receiver.receiver_frame(frame)
pass
def send_frame_selective(self):
#stworzenie tablicy z ramkami
self.tableOfFrames = Image.gruop_into_frames(self.image, self.size, self.ChosenSumAlgorithm)
# zapisuje ilosc ramek dopedli wysylania
sizeoftable = len(self.tableOfFrames)
# tworzy tablice o rozmiarze ilosci ramek z potwierdzeniami lub odrzuceniami pakietów
for i in range(0, sizeoftable):
self.ACK.append(False)
# przenoszenie do receivera potrzebnych wartosci
Receiver.numberOfFrames = sizeoftable
Receiver.reset_Data(Receiver)
endOfWindow = self.windowSize - 1
i = 0
# petla wysylajaca ramki zgodnie z regulami algotrytmu selektywnego
while i < sizeoftable:
isCorrectFrame = True
# petla operujaca oknem i wysylajaca te ramki ktore sender od nas chce
for j in range(i, endOfWindow + 1):
if j == sizeoftable:
break
if self.ACK[j] == False:
# time.sleep(0.2)
print(f'SENDER: wysłano obiekt nr "{j}"')
self.ACK[j] = self.receiver.recieve_frame(self.tableOfFrames[j], j)
else:
pass
# petla sprawdzajaca czy cala ramka zostala przeslana bez zarzutów
for j in range(i, endOfWindow + 1):
if j == sizeoftable:
break
if self.ACK[j] == False:
isCorrectFrame = False
# warunki odpowiadajace za przesuwanie sie okna gdy ramka jest dobra lub gdy ktorys z pakietow jest uszkodzony
if isCorrectFrame:
if (endOfWindow + self.windowSize) >= sizeoftable:
endOfWindow = sizeoftable
else:
endOfWindow += self.windowSize
i += self.windowSize
else:
count = 0
for j in range(i, endOfWindow + 1):
if self.ACK[j] == True:
count += 1
else:
break
endOfWindow += count
i += count
def send_frame_go_back_n(self, delay):
# self.image = interfere(self.image)
# przygotowanie ramek fo wysłania
# 1. stworzenie tablicy ramek z sumą kontrolną
self.tableOfFrames = Image.gruop_into_frames(self.image, self.size, self.ChosenSumAlgorithm)
# pokazuje ilość ramek
size_of_table = len(self.tableOfFrames)
# tworzy tablice o rozmiarze ilości ramek z potwierdzeniami lub odrzuceniami pakietów
for i in range(0, size_of_table):
self.ACK.append(False)
# przenoszenie do receivera potrzebnych wartości
self.receiver.numberOfValues = self.image.size
self.receiver.numberOfFrames = len(self.tableOfFrames)
self.receiver.reset_Data()
# rozpoczęcie przesyłania
i = 0
win_start = i
win_end = i + self.windowSize
length_table_of_frames = len(self.tableOfFrames)
while i < length_table_of_frames:
while i < win_end and i < length_table_of_frames:
# pobranie ramki do wysłania
data = self.tableOfFrames[i]
sequence_number = i
# wysyłanie ramki
print(f'\nSENDER: wysłano obiekt nr "{i}"')
self.ACK[i] = self.receiver.recieve_frame(frame=data, sequence_number=sequence_number)
time.sleep(delay)
if self.ACK[win_start]:
print(f'SENDER: odebrano ATK "{win_start}"\n')
win_end += 1
win_start += 1
# i = win_start
else:
if win_end > length_table_of_frames:
win_end = length_table_of_frames
for k in range(win_start + 1, win_end):
if self.ACK[k]:
print(f'SENDER: odebrano ATK "{k}, Pominięto ATK "{win_start}"\n')
i = win_start - 1
break
i += 1
time.sleep(delay)
pass
pass
time.sleep(delay)
if i == win_end:
i = win_start
pass
print('SENDER: koniec wysyłania\n')
pass
# Metoda wysyłająca dla protokołu stop-and-wait
def send_frame_stop_and_wait(self):
# test
# print(self.image)
self.tableOfFrames = Image.gruop_into_frames(self.image, self.size, self.ChosenSumAlgorithm)
#wyświetlenie tablicy zawierającej wszystkie ramki
print(self.tableOfFrames)
#zapis ilości ramek
sizeoftable = len(self.tableOfFrames)
#tworzy tablice o rozmiarze ilosci ramek z potwierdzeniami lub odrzuceniami pakietów
for i in range(0, sizeoftable):
self.ACK.append(False)
#przenoszenie do receivera potrzebnych wartosci
Receiver.numberOfValues = number
Receiver.numberOfFrames = sizeoftable
Receiver.reset_Data(Receiver)
i = 0
endOfWindow = self.windowSize -1
print("Rozmiar tablicy ramek:")
print(sizeoftable)
#wysyłanie poszczególnych ramek
while i < sizeoftable:
self.ACK[i] = self.receiver.receive_frame_stop_and_wait(self.tableOfFrames[i], i)
if self.ACK[i]:
i += 1
else:
self.ACK[i] = False
continue
class Frame:
value = None
seq_number = 0
pass
|
9,094 | 879bb8d67c0e1e8b125ac5994fcb142e3366c9d8 | import logging
import datetime
import numpy as np
def log_players(game_map):
logging.debug("------Players Info------")
for player in game_map.all_players():
logging.debug("-----Player ID: {}-----".format(player.id))
for ship in player.all_ships():
logging.debug("----Ship ID: {}----".format(ship.id))
logging.debug("X: {}".format(ship.x))
logging.debug("Y: {}".format(ship.y))
logging.debug("Health: {}".format(ship.health))
logging.debug("Docking status: {}".format(ship.docking_status)) ## UNDOCKED, DOCKED, DOCKING, UNDOCKING
logging.debug(" ")
def log_planets(game_map):
logging.debug("------Planet Info------")
for planet in game_map.all_planets():
logging.debug("----Planet Id: {}----".format(planet.id))
logging.debug("X: {}".format(planet.x))
logging.debug("Y: {}".format(planet.y))
logging.debug("Num of docking spots: {}".format(planet.num_docking_spots))
logging.debug("Current production: {}".format(planet.current_production))
logging.debug("docked_ship_ids: {}".format(planet._docked_ship_ids))
logging.debug("Health: {}".format(planet.health))
logging.debug("Radius: {}".format(planet.radius))
logging.debug("Owner: {}".format(planet.owner))
logging.debug("Owned: {}".format(planet.is_owned()))
logging.debug(" ")
def log_all_ships(myMap):
logging.debug("Logging all ships:")
# for player_id, dict in myMap.data_ships.items():
# logging.debug("Player id: {}".format(player_id))
# for ship_id, ship in dict.items():
# #logging.debug("ship_id: {} with data:{}".format(ship_id,ship))
# logging.debug("ship_id: {}".format(ship_id))
# for k,v in ship.items():
# logging.debug(" {}: {}".format(k,v))
for ship_id, ship in myMap.data_ships[myMap.my_id].items():
logging.debug("ship_id: {}".format(ship_id))
for k, v in ship.items():
logging.debug(" {}: {}".format(k,v))
def log_all_planets(myMap):
logging.debug("Logging all planets:")
for planet_id, dict in myMap.data_planets.items():
logging.debug("Planet id: {} with data: {}".format(planet_id, dict))
def log_myMap_ships(myMap):
logging.debug("------myMap Ships------")
logging.debug("Ships (enemy): {}".format(myMap.ships_enemy))
logging.debug("Ships (mine): {}".format(myMap.ships_owned))
logging.debug("Ships (new): {}".format(myMap.ships_new))
logging.debug("Ships (died): {}".format(myMap.ships_died))
logging.debug("Ships (mining) (mine): {}".format(myMap.ships_mining_ally))
logging.debug("Ships (mining) (enemy): {}".format(myMap.ships_mining_enemy))
logging.debug("Ships (attacking_frontline): {}".format(myMap.ships_attacking_frontline))
logging.debug("Ships (attacking): {}".format(myMap.ships_attacking))
logging.debug("Ships (evading): {}".format(myMap.ships_evading))
logging.debug("Ships (supporting): {}".format(myMap.ships_supporting))
logging.debug("Ships (defending): {}".format(myMap.ships_defending))
logging.debug("Ships (expanding): {}".format(myMap.ships_expanding))
logging.debug("Ships (running): {}".format(myMap.ships_running))
logging.debug("Ships (sniping): {}".format(myMap.ships_sniping))
logging.debug("Ships (battling): {}".format(myMap.ships_battling))
def log_myMap_planets(myMap):
logging.debug("------myMap Planets------")
logging.debug("Planets (mine): {}".format(myMap.planets_owned))
logging.debug("Planets (enemy): {}".format(myMap.planets_enemy))
logging.debug("Planets (unowned): {}".format(myMap.planets_unowned))
def log_myShip(ship):
logging.debug("My ship id: {}, x: {}, y: {}".format(ship.id, ship.x, ship.y))
logging.debug(" ")
def log_dimensions(game_map):
logging.debug("Width: {} x Height: {}".format(game_map.width,game_map.height))
logging.debug(" ")
def log_myID(game_map):
logging.debug("My ID: {}".format(game_map.my_id))
logging.debug(" ")
def log_numPlayers(game_map):
logging.debug("Number of players: {}".format(len(game_map._players)))
logging.debug(" ")
|
9,095 | e748261d1e5fd7921a022afefe5a5bea1fbfc67c | ##Arithmatic Progression
a = int(input ('Enter first number: '))
d = int(input('Enter common difference: '))
n = int(input('Number of term: '))
tn = a
while tn <= a + (n - 1) * d:
print(tn, end=" ")
tn += d
|
9,096 | 874668d5f3ea61b6aabde7b784078b431961a9c9 | #!/usr/bin/python3
"""HAWK GUI interface Selenium test: tests hawk GUI with Selenium using firefox or chrome"""
import argparse, re, hawk_test_driver, hawk_test_ssh, hawk_test_results
### MAIN
# Command line argument parsing
parser = argparse.ArgumentParser(description='HAWK GUI interface Selenium test')
parser.add_argument('-b', '--browser', type=str, required=True,
help='Browser to use in the test. Can be: firefox, chrome, chromium')
parser.add_argument('-H', '--host', type=str, default='localhost',
help='Host or IP address where HAWK is running')
parser.add_argument('-P', '--port', type=str, default='7630',
help='TCP port where HAWK is running')
parser.add_argument('-p', '--prefix', type=str, default='',
help='Prefix to add to Resources created during the test')
parser.add_argument('-t', '--test-version', type=str, default='', required=True,
help='Test version. Ex: 12-SP3, 12-SP4, 15, 15-SP1')
parser.add_argument('-s', '--secret', type=str, default='',
help='root SSH Password of the HAWK node')
parser.add_argument('-r', '--results', type=str, default='',
help='Generate hawk_test.results file')
args = parser.parse_args()
# Create driver instance
browser = hawk_test_driver.hawkTestDriver(addr=args.host.lower(), port=args.port,
browser=args.browser.lower(),
version=args.test_version.lower())
# Initialize results set
results = hawk_test_results.resultSet()
# Establish SSH connection to verify status only if SSH password was supplied
if args.secret:
ssh = hawk_test_ssh.hawkTestSSH(args.host.lower(), args.secret)
results.add_ssh_tests()
# Resources to create
if args.prefix and not re.match(r"^\w+$", args.prefix.lower()):
print("ERROR: Prefix must contain only numbers and letters. Ignoring")
args.prefix = ''
mycluster = args.prefix.lower() + 'Anderes'
myprimitive = args.prefix.lower() + 'cool_primitive'
myclone = args.prefix.lower() + 'cool_clone'
mygroup = args.prefix.lower() + 'cool_group'
# Tests to perform
browser.test('test_set_stonith_maintenance', results)
if args.secret:
ssh.verify_stonith_in_maintenance(results)
browser.test('test_disable_stonith_maintenance', results)
browser.test('test_view_details_first_node', results)
browser.test('test_clear_state_first_node', results)
browser.test('test_set_first_node_maintenance', results)
if args.secret:
ssh.verify_node_maintenance(results)
browser.test('test_disable_maintenance_first_node', results)
browser.test('test_add_new_cluster', results, mycluster)
browser.test('test_remove_cluster', results, mycluster)
browser.test('test_click_on_history', results)
browser.test('test_generate_report', results)
browser.test('test_click_on_command_log', results)
browser.test('test_click_on_status', results)
browser.test('test_add_primitive', results, myprimitive)
if args.secret:
ssh.verify_primitive(myprimitive, args.test_version.lower(), results)
browser.test('test_remove_primitive', results, myprimitive)
if args.secret:
ssh.verify_primitive_removed(results)
browser.test('test_add_clone', results, myclone)
browser.test('test_remove_clone', results, myclone)
browser.test('test_add_group', results, mygroup)
browser.test('test_remove_group', results, mygroup)
browser.test('test_click_around_edit_conf', results)
# Save results if run with -r or --results
if args.results:
results.logresults(args.results)
quit(results.get_failed_tests_total())
|
9,097 | 65ff3b5137c94890c3293a2ae3f57dee1f60a54c | import cv2
import dlib
import faceBlendCommon as face
from matplotlib import pyplot as plt
from scipy.spatial import distance as dist
import numpy as np
import cmapy
import math
def eye_aspect_ratio(eye):
A = dist.euclidean(eye[1], eye[5])
B = dist.euclidean(eye[2], eye[4])
C = dist.euclidean(eye[0], eye[3])
rad=(A+B)/2
return int(rad)
# Load Image
im = cv2.imread("imgs/2.jpg")
# Detect face landmarks
PREDICTOR_PATH = r"./model/shape_predictor_68_face_landmarks.dat"
faceDetector = dlib.get_frontal_face_detector()
landmarkDetector = dlib.shape_predictor(PREDICTOR_PATH)
landmarks = face.getLandmarks(faceDetector, landmarkDetector, im)
def createEyeMask(eyeLandmarks, im):
leftEyePoints = eyeLandmarks
eyeMask = np.zeros_like(im)
cv2.fillConvexPoly(eyeMask, np.int32(leftEyePoints), (255, 255, 255))
eyeMask = np.uint8(eyeMask)
return eyeMask
def findIris(eyeMask, im, thresh):
r = im[:,:,2]
_, binaryIm = cv2.threshold(r, thresh, 255, cv2.THRESH_BINARY_INV)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (4,4))
morph = cv2.dilate(binaryIm, kernel, 1)
morph = cv2.merge((morph, morph, morph))
morph = morph.astype(float)/255
eyeMask = eyeMask.astype(float)/255
iris = cv2.multiply(eyeMask, morph)
return iris
def findCentroid(iris):
M = cv2.moments(iris[:,:,0])
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
centroid = (cX,cY)
return centroid
def createIrisMask(iris, centroid,rad):
cnts, _ = cv2.findContours(np.uint8(iris[:,:,0]), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
flag = 10000
final_cnt = None
for cnt in cnts:
(x,y),radius = cv2.minEnclosingCircle(cnt)
distance = abs(centroid[0]-x)+abs(centroid[1]-y)
if distance < flag :
flag = distance
final_cnt = cnt
else:
continue
(x,y),radius = cv2.minEnclosingCircle(final_cnt)
center = (int(x),int(y))
center = centroid
# radius = int(radius-(radius//4))
radius=(rad//2)+2
print(radius)
irisMask = np.zeros_like(iris)
inverseIrisMask = np.ones_like(iris)*255
cv2.circle(irisMask,center,radius,(255, 255, 255),-1)
cv2.circle(inverseIrisMask,center,radius,(0, 0, 0),-1)
# irisMask = cv2.GaussianBlur(irisMask, (5,5), cv2.BORDER_DEFAULT)
# inverseIrisMask = cv2.GaussianBlur(inverseIrisMask, (5,5), cv2.BORDER_DEFAULT)
return irisMask, inverseIrisMask
def changeEyeColor(im, irisMask, inverseIrisMask):
imCopy = cv2.applyColorMap(im, cmapy.cmap('Blues_r'))
imCopy = imCopy.astype(float)/255
irisMask = irisMask.astype(float)/255
inverseIrisMask = inverseIrisMask.astype(float)/255
im = im.astype(float)/255
faceWithoutEye = cv2.multiply(inverseIrisMask, im)
newIris = cv2.multiply(irisMask, imCopy)
result = faceWithoutEye + newIris
return result
def float642Uint8(im):
im2Convert = im.astype(np.float64) / np.amax(im)
im2Convert = 255 * im2Convert
convertedIm = im2Convert.astype(np.uint8)
return convertedIm
# Create eye mask using eye landmarks from facial landmark detection
leftEyeMask = createEyeMask(landmarks[36:42], im)
rightEyeMask = createEyeMask(landmarks[42:48], im)
# Find the iris by thresholding the red channel of the image within the boundaries of the eye mask
leftIris = findIris(leftEyeMask, im, 100)
rightIris = findIris(rightEyeMask, im, 50)
# Find the centroid of the binary image of the eye
leftIrisCentroid = findCentroid(leftIris)
rightIrisCentroid = findCentroid(rightIris)
# Generate the iris mask and its inverse mask
rad_left=eye_aspect_ratio(landmarks[36:42])
rad_right=eye_aspect_ratio(landmarks[42:48])
rightIrisMask, rightInverseIrisMask = createIrisMask(rightIris, rightIrisCentroid,rad_right)
leftIrisMask, leftInverseIrisMask = createIrisMask(leftIris, leftIrisCentroid,rad_left)
# Change the eye color and merge it to the original image
coloredEyesLady = changeEyeColor(im, rightIrisMask, rightInverseIrisMask)
coloredEyesLady = float642Uint8(coloredEyesLady)
coloredEyesLady = changeEyeColor(coloredEyesLady, leftIrisMask, leftInverseIrisMask)
coloredEyesLady = float642Uint8(coloredEyesLady)
# Present results
cv2.imwrite("3.jpg", coloredEyesLady)
|
9,098 | 48a970b35aa7fd677828f5d7bd5f1dcf24511b01 | short_train <- read.csv('short_train.csv', header=TRUE)
#delete unnecessary columns
short_train[1] <- NULL
#remove ngrams containing @user_
regexp <- "@[a-zA-Z0-9_]*"
gsubtry <- gsub(pattern = regexp, replacement = "", x = short_train$Tweet)
#merge gsubtry back into short_train, rename as Tweet
short_train_clean <- cbind(short_train, gsubtry)
short_train_clean[2] <- NULL
names(short_train_clean)[3] <- "Tweet" |
9,099 | b2fecadbd99edb89379f82a935aa1622f043eeac | #!/usr/bin/env python3
print(sum([row[lineNumber * 3 % len(row)] == '#' for lineNumber, row in enumerate(open('input.txt').read().splitlines())])) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.