index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
9,600 | 92f59612b2697db155da1bdc625fdabc115867b0 | # 5. Write a program to implement polymorphism.
class Honda:
def __init__(self, name, color):
self.name = name
self.color = color
def display(self):
print("Honda car name is : ", self.name, " and color is : ", self.color)
class Audi:
def __init__(self, name, color):
self.name = name
self.color = color
def display(self):
print("Audi car name is : ", self.name, " and color is : ", self.color)
HondaCar = Honda("Honda City", "White")
AudiCar = Audi("A6", "Black")
for car in (HondaCar, AudiCar):
car.display()
|
9,601 | 98940c898d58917e652fe1514ea758768b048dbc | import pygame
def play_file(name,loop=0,time=0.0):
try: #if image exists
file='data/audio/'+name
pygame.mixer.music.load(file)
pygame.mixer.music.play(loop, time)
except ZeroDivisionError: #if image doesn't exist
print('AudioLoading: failed to load ' + name)
try:
file = 'data/audio/error.aud'
pygame.mixer.music.load(file)
pygame.mixer.music.play(loop, time)
except ZeroDivisionError:
print( 'Can not load file: '+name)
raise SystemExit()
|
9,602 | 028b38a07c71232eb42bedecd734cf7188550239 | from config import Config
def test_stf_3_2_1_pos(fixture):
seed = fixture.common.get_seed()
fixture.stf.open_stf_exercise('3-2-1', seed)
fixture.stf.open_solution_url(seed)
assert fixture.stf.get_solution() == Config.test_pass_text
fixture.common.back_to_main_page()
def test_stf_3_2_1_neg(fixture):
seed = fixture.common.get_seed()
fixture.stf.open_stf_exercise('3-2-1', seed)
fixture.stf.open_solution_url('test')
assert fixture.stf.get_solution() == Config.test_fail_text
fixture.common.back_to_main_page()
__author__ = 'GiSDeCain'
|
9,603 | bbff797fab4ac7dc7e6adb81c0eeda561f8ee147 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from unittest import TestCase
from pydsf.exceptions import DSFServiceError
from pydsf.service.response import parse_response
from pydsf.service.translations import translate_input_fields, translate_output_fields
class MockMessage(object):
SUMMARY = "Error summary"
def __init__(self, error_code):
self.CODE = error_code
class MockError(object):
feilmelding = "Error message here."
class MockErrorDetail(object):
feil = MockError()
class MockResponseOK(object):
def __init__(self, error_code=None, result=True):
if result:
self.RESULT = "Results"
else:
self.MESSAGE = MockMessage(error_code)
class MockResponseError(object):
faultstring = "Fault string"
detail = MockErrorDetail()
def __init__(self):
pass
class MockResponseParsed(object):
HOV = list()
def __init__(self):
self.HOV.append(("FODT", "010107"))
self.HOV.append(("PERS", "50160"))
self.HOV.append(("NAVN-F", "TOMAS"))
self.HOV.append(("NAVN-M", ""))
self.HOV.append(("UKJENTFELT", "something"))
class PersonTests(TestCase):
def test_bad_search_fields(self):
pass
def test_error_response(self):
pass
def tets_empty_response(self):
pass
def test_result(self):
pass
class ResponseTests(TestCase):
def test_has_result(self):
result = MockResponseOK()
parsed = parse_response((200, result))
self.assertEqual(parsed, "Results")
def test_no_result(self):
result = MockResponseOK(error_code="1", result=False)
parsed = parse_response((200, result))
self.assertIsNone(parsed)
@staticmethod
def test_result_with_error():
result = MockResponseOK(error_code="2", result=False)
with pytest.raises(DSFServiceError):
parse_response((200, result))
@staticmethod
def test_uknown_error():
result = MockResponseError()
with pytest.raises(DSFServiceError):
parse_response((500, result))
class TranslationTests(TestCase):
def test_output_translation(self):
response = MockResponseParsed()
translated = translate_output_fields(response)
self.assertIsInstance(translated, dict)
self.assertTrue("date_of_birth" in translated)
self.assertTrue("person_number" in translated)
self.assertTrue("first_name" in translated)
self.assertTrue("middle_name" in translated)
# Verify capitalisation and None
self.assertEqual(translated["first_name"], "Tomas")
self.assertEqual(translated["UKJENTFELT"], "Something")
# Verify that empty strings are translated to None
self.assertIsNone(translated["middle_name"])
def test_input_translation(self):
valid_input = {
"end_user": "unicornis-test",
"first_name": "tomas",
"last_name": "topstad"
}
invalid_input_invalid_field = {
"end_user": "unicornis_test",
"invalidfield": "somevalue"
}
with pytest.raises(ValueError):
translate_input_fields(**invalid_input_invalid_field)
translated = translate_input_fields(**valid_input)
self.assertTrue("saksref" in translated)
self.assertTrue("fornavn" in translated)
self.assertTrue("etternavn" in translated)
|
9,604 | 14e247b7b586242bfc17507fece3c60b7b8a3025 |
"""
An wrapper around openid's fetcher to be used in django.
"""
from openid import fetchers
class UrlfetchFetcher(fetchers.HTTPFetcher):
def fetch(self, url, body=None, headers=None):
return fetchers.fetch(body, headers)
|
9,605 | 49887a3914fa0021a03d89721aa47cded95d54f6 | #!/usr/bin/env python
#python
import os
import math
import sys
import time
import re
import cPickle
import random
#eman
try:
import EMAN
except:
print "EMAN module did not get imported"
#scipy
import numpy
#appion
from appionlib import appionScript
from appionlib import appiondata
from appionlib import apDisplay
from appionlib import apStack
from appionlib import apRecon
from appionlib import apEMAN
from appionlib import apSymmetry
from appionlib import apFile
#=====================
#=====================
class satAverageScript(appionScript.AppionScript):
#=====================
def makeEvenOddClasses(self, listfile, outputstack, classdata, maskrad):
f=open(listfile,'r')
f.readline()
lines = f.readlines()
f.close()
randstr = str(int(random.random()*10e5))
evenfile = self.rootname+"-even.lst"
evenf = open(evenfile,'w')
oddfile = self.rootname+"-odd.lst"
oddf = open(oddfile,'w')
evenf.write("#LST\n")
oddf.write("#LST\n")
neven=0
nodd=0
for i in range(0, len(lines)):
if i%2:
nodd+=1
oddf.write(lines[i])
else:
neven+=1
evenf.write(lines[i])
evenf.close()
oddf.close()
if neven>0:
self.makeClassAverages(evenfile, self.params['evenstack'], classdata, maskrad)
if nodd>0:
self.makeClassAverages(oddfile, self.params['oddstack'], classdata, maskrad)
apFile.removeFile(evenfile)
apFile.removeFile(oddfile)
#=====================
def getParticleInfo(self, reconid, iteration):
"""
Get all particle data for given recon and iteration
"""
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"refineparticledata-r"+str(reconid)+"-i"+str(iteration)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading refineparticledata from cache file", "cyan")
f = open(cachefile, 'r')
refineparticledata = cPickle.load(f)
f.close()
else:
refinerundata = appiondata.ApRefineRunData.direct_query(reconid)
if not refinerundata:
apDisplay.printError("Could not find refinerundata for reconrun id="+str(reconid))
refineq = appiondata.ApRefineIterData()
refineq['refineRun'] = refinerundata
refineq['iteration'] = iteration
refinedata = refineq.query(results=1)
if not refinedata:
apDisplay.printError("Could not find refinedata for reconrun id="
+str(reconid)+" iter="+str(iteration))
refinepartq=appiondata.ApRefineParticleData()
refinepartq['refineIter']=refinedata[0]
apDisplay.printMsg("querying particles on "+time.asctime())
refineparticledata = refinepartq.query()
apDisplay.printMsg("saving refineparticledata to cache file")
f = open(cachefile, 'w')
cPickle.dump(refineparticledata, f)
f.close()
apDisplay.printMsg("received "+str(len(refineparticledata))+" particles in "+apDisplay.timeString(time.time()-t0))
return refineparticledata
#=====================
def procKeepList(self):
"""
Removes particles by reading a list of particle numbers generated externally.
Requirements:
the input file has one particle per line
the first piece of data is the particle number from the db
"""
keeplist = []
f = open(self.params['keeplist'], 'r')
lines = f.readlines()
f.close()
for n in lines:
words = n.split()
keeplist.append(int(words[0])+1)
return keeplist
#=====================
def makeClassAverages(self, classlist, outputstack, classdata, maskrad):
#align images in class
#print classlist
images = EMAN.readImages(classlist, -1, -1, 0)
for image in images:
image.rotateAndTranslate()
if image.isFlipped():
image.hFlip()
#make class average
avg = EMAN.EMData()
avg.makeMedian(images)
#write class average
e = EMAN.Euler()
alt = classdata['euler1']*math.pi/180
az = classdata['euler2']*math.pi/180
phi = 0.0 #classdata['euler3']*math.pi/180
e.setAngle(alt, az, phi)
avg.setRAlign(e)
avg.setNImg(len(images))
avg.applyMask(maskrad, 0)
avg.writeImage(outputstack,-1)
#=====================
def determineClasses(self, particles):
"""
Takes refineparticledata and returns a dictionary of classes
"""
apDisplay.printMsg("sorting refineparticledata into classes")
t0 = time.time()
classes={}
class_stats={}
quality=numpy.zeros(len(particles))
for partnum in range(len(particles)):
quality[partnum] = particles[partnum]['quality_factor']
key = ("%.3f_%.3f"%(particles[partnum]['euler1'], particles[partnum]['euler2']))
if key not in classes.keys():
classes[key]={}
classes[key]['particles']=[]
classes[key]['euler1'] = particles[partnum]['euler1']
classes[key]['euler2'] = particles[partnum]['euler2']
#classes have no inplane rotation
classes[key]['euler3'] = 0.0 #particles[partnum]['euler3']
classes[key]['particles'].append(particles[partnum])
class_stats['meanquality']=quality.mean()
class_stats['stdquality']=quality.std()
class_stats['max']=quality.max()
class_stats['min']=quality.min()
apDisplay.printMsg("sorted %d particles into %d classes"%(len(particles), len(classes)))
### print stats
print "-- quality factor stats --"
print ("mean/std :: "+str(round(class_stats['meanquality'],2))+" +/- "
+str(round(class_stats['stdquality'],2)))
print ("min/max :: "+str(round(class_stats['min'],2))+" <> "
+str(round(class_stats['max'],2)))
apDisplay.printMsg("finished sorting in "+apDisplay.timeString(time.time()-t0))
return classes, class_stats
#=====================
def getClassData(self, reconid, iternum):
t0 = time.time()
cachefile = os.path.join(self.params['rundir'],
"partclassdata-r"+str(reconid)+"-i"+str(iternum)+".cache")
if os.path.isfile(cachefile):
apDisplay.printColor("loading particle class data from cache file", "cyan")
f = open(cachefile, 'r')
classes = cPickle.load(f)
f.close()
else:
apDisplay.printMsg("determine particle class data from database")
particles = self.getParticleInfo(reconid, iternum)
classes, cstats = self.determineClasses(particles)
f = open(cachefile, 'w')
apDisplay.printMsg("saving particle class data to cache file")
cPickle.dump(classes, f)
f.close()
apDisplay.printMsg("received "+str(len(classes))+" classes in "+apDisplay.timeString(time.time()-t0))
return classes
#######################################################
#### ITEMS BELOW CAN BE SPECIFIED IN A NEW PROGRAM ####
#######################################################
#=====================
def setupParserOptions(self):
self.parser.set_usage("Usage: %prog --reconid=<DEF_id> --iter=<iter> --mask=<radius>\n\t "
+"[ --stackname=<name> "
+" --avgjump=<avg> --sigma=<sigma> --eotest ]")
self.parser.add_option("-r", "--reconid", dest="reconid", type="int",
help="Reconstruction run id", metavar="INT")
self.parser.add_option("-m", "--mask", dest="mask", type="int",
help="Mask radius in pixels", metavar="INT")
self.parser.add_option("-i", "--iter", dest="iter", type="int",
help="Final eulers applied to particles will come from this iteration", metavar="INT")
self.parser.add_option("--stackname", dest="stackname", default="goodavgs.hed",
help="Name of the stack to write the averages", metavar="file.hed")
self.parser.add_option("--keep-list", dest="keeplist",
help="Keep particles in the specified text file, EMAN style 0,1,...", metavar="TEXT")
self.parser.add_option("--eotest", dest="eotest", default=False,
action="store_true", help="Perform even/odd test")
#=====================
def checkConflicts(self):
if self.params['reconid'] is None:
apDisplay.printError("enter a reconstruction ID from the database")
if self.params['mask'] is None:
apDisplay.printError("enter a mask radius")
if self.params['iter'] is None:
apDisplay.printError("enter an iteration for the final Eulers")
if self.params['keeplist'] is None:
apDisplay.printError("enter an keep list file")
self.params['keeplist'] = os.path.abspath(self.params['keeplist'])
if not os.path.isfile(self.params['keeplist']):
apDisplay.printError("could not find list file")
self.params['stackid'] = apStack.getStackIdFromRecon(self.params['reconid'])
if self.params['stackname'][-4:] != ".hed":
s = os.path.splitext(self.params['stackname'])[0]
s += ".hed"
self.params['stackname'] = s
apDisplay.printMsg("Stack name: "+self.params['stackname'])
self.params['symmetry'] = apSymmetry.getSymmetryFromReconRunId(self.params['reconid'])
self.params['symmname'] = self.params['symmetry']['eman_name']
#=====================
def setRunDir(self):
refdata = appiondata.ApRefineRunData.direct_query(self.params['reconid'])
if not refdata:
apDisplay.printError("reconid "+str(self.params['reconid'])+" does not exist in the database")
refpath = refdata['path']['path']
rundir = os.path.join(refpath, "../../satEuler/sat-recon%d/volumes"%(self.params['reconid']))
self.params['rundir'] = os.path.abspath(rundir)
#=====================
def start(self):
self.rootname = self.params['stackname'].split(".")[0]
self.params['outputstack'] = os.path.join(self.params['rundir'], self.params['stackname'])
if os.path.isfile(self.params['outputstack']):
apFile.removeStack(self.params['outputstack'])
if self.params['eotest'] is True:
self.params['evenstack'] = os.path.splitext(self.params['outputstack'])[0]+'.even.hed'
if os.path.isfile(self.params['evenstack']):
apFile.removeStack(self.params['evenstack'])
self.params['oddstack'] = os.path.splitext(self.params['outputstack'])[0]+'.odd.hed'
if os.path.isfile(self.params['oddstack']):
apFile.removeStack(self.params['oddstack'])
classes = self.getClassData(self.params['reconid'], self.params['iter'])
stackid = apStack.getStackIdFromRecon(self.params['reconid'])
stackdata = apStack.getOnlyStackData(stackid)
stackpath = os.path.join(stackdata['path']['path'], stackdata['name'])
classkeys = classes.keys()
classkeys.sort()
classnum=0
keeplist = self.procKeepList()
finallist = []
apDisplay.printMsg("Processing "+str(len(classes))+" classes")
#loop through classes
for key in classkeys:
classnum+=1
if classnum%10 == 1:
sys.stderr.write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b")
sys.stderr.write(str(classnum)+" of "+(str(len(classkeys))))
# loop through particles in class
classfile = self.rootname+"-class.lst"
classf = open(classfile, 'w')
classf.write('#LST\n')
nptcls=0
for ptcl in classes[key]['particles']:
# translate DB into EMAN
partnum = ptcl['particle']['particleNumber'] - 1
if partnum in keeplist:
if ptcl['mirror']:
mirror=1
else:
mirror=0
rot = ptcl['euler3']*math.pi/180.0
classf.write(
"%d\t%s\t%f,\t%f,%f,%f,%d\n" %
(partnum, stackpath, ptcl['quality_factor'],
rot, ptcl['shiftx'], ptcl['shifty'], mirror))
nptcls+=1
finallist.append(partnum)
classf.close()
if nptcls<1:
continue
self.makeClassAverages(classfile, self.params['outputstack'], classes[key], self.params['mask'])
if self.params['eotest'] is True:
self.makeEvenOddClasses(classfile, self.params['outputstack'], classes[key], self.params['mask'])
apFile.removeFile(classfile)
sys.stderr.write("\n")
finalfilename = self.rootname+"-keep.lst"
finalf = open(finalfilename, 'w')
finallist.sort()
for partnum in finallist:
finalf.write('%d\n' % (partnum,) )
finalf.close()
stackstr = str(stackdata.dbid)
reconstr = str(self.params['reconid'])
### recon 3d volumes
threedname = os.path.join(self.params['rundir'], self.rootname+"."+str(self.params['iter'])+"a.mrc")
emancmd = ( "make3d "+self.params['outputstack']+" out="
+threedname+" hard=50 sym="+self.params['symmname']+" pad=240 mask="+str(self.params['mask'])+"; echo ''" )
#print emancmd
apEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=True, logfile=self.rootname+"-eman.log")
threednameb = os.path.join(self.params['rundir'], self.rootname+"."+str(self.params['iter'])+"b.mrc")
emancmd = ( "proc3d "+threedname+" "+threednameb
+" apix=1.63 norm=0,1 lp=8 origin=0,0,0 mask="+str(self.params['mask'])+"; echo '' " )
apEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=True, logfile=self.rootname+"-eman.log")
if self.params['eotest'] is True:
# even
evenname = os.path.join(self.params['rundir'], self.rootname+"-even."+str(self.params['iter'])+"a.mrc")
if os.path.isfile(self.params['evenstack']):
evenemancmd = ( "make3d "+self.params['evenstack']+" out="
+evenname+" hard=50 sym="+self.params['symmname']+" pad=240 mask="+str(self.params['mask'])+"; echo ''" )
#print evenemancmd
apEMAN.executeEmanCmd(evenemancmd, verbose=False, showcmd=True, logfile=self.rootname+"-eveneman.log")
else:
apDisplay.printWarning("file "+self.params['evenstack']+" does not exist")
# odd
oddname = os.path.join(self.params['rundir'], self.rootname+"-odd."+str(self.params['iter'])+"a.mrc")
if os.path.isfile(self.params['oddstack']):
oddemancmd = ( "make3d "+self.params['oddstack']+" out="
+oddname+" hard=50 sym="+self.params['symmname']+" pad=240 mask="+str(self.params['mask'])+"; echo ''" )
#print oddemancmd
apEMAN.executeEmanCmd(oddemancmd, verbose=False, showcmd=True, logfile=self.rootname+"-oddeman.log")
else:
apDisplay.printWarning("file "+self.params['oddstack']+" does not exist")
#eotest
fscout = os.path.join(self.params['rundir'], self.rootname+"-fsc.eotest")
if os.path.isfile(oddname) and os.path.isfile(evenname):
eotestcmd = "proc3d "+oddname+" "+evenname+" fsc="+fscout
apEMAN.executeEmanCmd(eotestcmd, verbose=True, showcmd=True)
else:
apDisplay.printWarning("could not perform eotest")
if os.path.isfile(fscout):
res = apRecon.getResolutionFromFSCFile(fscout, 160.0, 1.63)
apDisplay.printColor( ("resolution: %.5f" % (res)), "cyan")
resfile = self.rootname+"-res.txt"
f = open(resfile, 'a')
f.write("[ %s ]\nresolution: %.5f\n" % (time.asctime(), res))
f.close()
#=====================
#=====================
if __name__ == '__main__':
satavg = satAverageScript()
satavg.start()
satavg.close()
|
9,606 | d3dcef6a1a6bcfc1161c4de46081703b8fe7016d | from docutils import nodes
from docutils.parsers.rst import directives, Directive
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.lexers.special import TextLexer
from pygments.formatters.html import HtmlFormatter
class Pygments(Directive):
""" Source code syntax hightlighting.
"""
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'anchorlinenos': directives.flag,
'classprefix': directives.unchanged,
'hl_lines': directives.unchanged,
'lineanchors': directives.unchanged,
'linenos': directives.unchanged,
'linenospecial': directives.nonnegative_int,
'linenostart': directives.nonnegative_int,
'linenostep': directives.nonnegative_int,
'lineseparator': directives.unchanged,
'linespans': directives.unchanged,
'nobackground': directives.flag,
'nowrap': directives.flag,
'tagsfile': directives.unchanged,
'tagurlformat': directives.unchanged,
}
has_content = True
def run(self):
self.assert_has_content()
try:
lexer = get_lexer_by_name(self.arguments[0])
except ValueError:
# no lexer found - use the text one instead of an exception
lexer = TextLexer()
if 'linenos' in self.options and self.options['linenos'] not in ('table', 'inline'):
if self.options['linenos'] == 'none':
self.options.pop('linenos')
else:
self.options['linenos'] = 'table'
for flag in ('nowrap', 'nobackground', 'anchorlinenos'):
if flag in self.options:
self.options[flag] = True
# noclasses should already default to False, but just in case...
formatter = HtmlFormatter(noclasses=False, **self.options)
parsed = highlight('\n'.join(self.content), lexer, formatter)
return [nodes.raw('', parsed, format='html')]
def register():
directives.register_directive('code-block', Pygments)
directives.register_directive('sourcecode', Pygments)
|
9,607 | a550b9406e9dd301b863744bb28bc81fac0cd80c | from django.contrib import admin
from TestApp.models import Parcel
# Register your models here.
class ParcelAdmin(admin.ModelAdmin):
list_display = ['billno','shippername','rate']
admin.site.register(Parcel,ParcelAdmin)
|
9,608 | 6d1b882af2a027f2eecaa3a881dbcab1e3a3b92b | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Spotlight Volume configuration plist plugin."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import plist as plist_formatter
from plaso.parsers import plist
from plaso.parsers.plist_plugins import spotlight_volume
from tests.parsers.plist_plugins import test_lib
class SpotlightVolumePluginTest(test_lib.PlistPluginTestCase):
"""Tests for the Spotlight Volume configuration plist plugin."""
def setUp(self):
"""Makes preparations before running an individual test."""
self._plugin = spotlight_volume.SpotlightVolumePlugin()
self._parser = plist.PlistParser()
def testProcess(self):
"""Tests the Process function."""
plist_name = u'VolumeConfiguration.plist'
event_queue_consumer = self._ParsePlistFileWithPlugin(
self._parser, self._plugin, [plist_name], plist_name)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEqual(len(event_objects), 2)
timestamps = []
for event_object in event_objects:
timestamps.append(event_object.timestamp)
expected_timestamps = frozenset([
1372139683000000, 1369657656000000])
self.assertTrue(set(timestamps) == expected_timestamps)
event_object = event_objects[0]
self.assertEqual(event_object.key, u'')
self.assertEqual(event_object.root, u'/Stores')
expected_desc = (u'Spotlight Volume 4D4BFEB5-7FE6-4033-AAAA-'
u'AAAABBBBCCCCDDDD (/.MobileBackups) activated.')
self.assertEqual(event_object.desc, expected_desc)
expected_string = u'/Stores/ {0:s}'.format(expected_desc)
expected_short = expected_string[:77] + u'...'
self._TestGetMessageStrings(
event_object, expected_string, expected_short)
if __name__ == '__main__':
unittest.main()
|
9,609 | 98f234ca0cbec419466de0504fd8d5c68fd07627 | from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QLineEdit, QRadioButton, QPushButton, QTableWidgetItem, QTableWidget, QApplication, QMainWindow, QDateEdit, QLabel, QDialog, QTextEdit, QCheckBox
from PyQt5.QtCore import QDate, QTime, QDateTime, Qt
from OOPCourseWorkTwo.GUI.SingleAnswerQuestionDialog import Ui_SingleAnswerQuestionDialog
from OOPCourseWorkTwo.GUI.MultipleAnswersQuestionDialog import Ui_MultipleAnswersQuestionDialog
from OOPCourseWorkTwo.GUI.EssayQuestionDialog import Ui_EssayQuestionDialog
from OOPCourseWorkTwo.GUI.MarkingEssayQuestionDialog import Ui_MarkingEssayQuestionDialog
class TeacherGUI():
def __init__(self):
pass
@classmethod
def setup(cls, ui_mainwindow):
cls.__ui_mainwindow = ui_mainwindow
@classmethod
def display_all_active_school_classes(cls, school_classes):
cls.__ui_mainwindow.tableWidget_14.clear()
row = 0
col = 0
for (school_class_id, ) in school_classes:
school_class_text = "Class " + str(school_class_id)
school_class_item = QTableWidgetItem(school_class_text)
cls.__ui_mainwindow.tableWidget_14.setItem(row, col, school_class_item)
if (col >= 4):
col = 0
row += 1
else:
col += 1
@classmethod
def display_all_exams(cls, all_exams):
cls.__ui_mainwindow.tableWidget_5.clear()
row = 0
col = 0
for (exam_id, ) in all_exams:
exam_text = "Exam " + str(exam_id)
exam_item = QTableWidgetItem(exam_text)
cls.__ui_mainwindow.tableWidget_5.setItem(row, col, exam_item)
if (col >= 9):
col = 0
row += 1
else:
col += 1
@classmethod
def display_not_completed_exams(cls, not_completed_exams):
cls.__ui_mainwindow.tableWidget_16.clear()
row = 0
col = 0
for (exam_id, ) in not_completed_exams:
exam_text = "Exam " + str(exam_id)
exam_item = QTableWidgetItem(exam_text)
cls.__ui_mainwindow.tableWidget_16.setItem(row, col, exam_item)
if (col >= 6):
col = 0
row += 1
else:
col += 1
@classmethod
def display_ready_to_be_marked_exams(cls, ready_to_be_marked_exams):
cls.__ui_mainwindow.tableWidget_17.clear()
row = 0
col = 0
for (exam_id, ) in ready_to_be_marked_exams:
exam_text = "Exam " + str(exam_id)
exam_item = QTableWidgetItem(exam_text)
cls.__ui_mainwindow.tableWidget_17.setItem(row, col, exam_item)
if (col >= 3):
col = 0
row += 1
else:
col += 1
@classmethod
def display_single_answer_question_dialog_preview(cls):
question_body = cls.__ui_mainwindow.textEdit.toPlainText()
option_A_text = cls.__ui_mainwindow.textEdit_2.toPlainText()
option_B_text = cls.__ui_mainwindow.textEdit_3.toPlainText()
option_C_text = cls.__ui_mainwindow.textEdit_6.toPlainText()
option_D_text = cls.__ui_mainwindow.textEdit_4.toPlainText()
option_E_text = cls.__ui_mainwindow.textEdit_5.toPlainText()
cls.__dialog = QtWidgets.QDialog()
cls.__ui_dialog = Ui_SingleAnswerQuestionDialog()
cls.__ui_dialog.setupUi(cls.__dialog)
cls.__ui_dialog.label.setText(question_body)
cls.__ui_dialog.label_3.setText("A " + option_A_text)
cls.__ui_dialog.label_4.setText("B " + option_B_text)
cls.__ui_dialog.label_5.setText("C " + option_C_text)
cls.__ui_dialog.label_6.setText("D " + option_D_text)
cls.__ui_dialog.label_7.setText("E " + option_E_text)
cls.__dialog.show()
cls.__ui_dialog.pushButton.clicked.connect(cls.close_dialog)
@classmethod
def display_multiple_answers_question_dialog_preview(cls):
question_body = cls.__ui_mainwindow.textEdit_14.toPlainText()
option_A_text = cls.__ui_mainwindow.textEdit_13.toPlainText()
option_B_text = cls.__ui_mainwindow.textEdit_15.toPlainText()
option_C_text = cls.__ui_mainwindow.textEdit_16.toPlainText()
option_D_text = cls.__ui_mainwindow.textEdit_17.toPlainText()
option_E_text = cls.__ui_mainwindow.textEdit_18.toPlainText()
cls.__dialog = QtWidgets.QDialog()
cls.__ui_dialog = Ui_MultipleAnswersQuestionDialog()
cls.__ui_dialog.setupUi(cls.__dialog)
cls.__ui_dialog.label.setText(question_body)
cls.__ui_dialog.label_3.setText(option_A_text)
cls.__ui_dialog.label_4.setText(option_B_text)
cls.__ui_dialog.label_5.setText(option_C_text)
cls.__ui_dialog.label_6.setText(option_D_text)
cls.__ui_dialog.label_7.setText(option_E_text)
cls.__dialog.show()
cls.__ui_dialog.pushButton.clicked.connect(cls.close_dialog)
@classmethod
def display_essay_question_dialog_preview(cls):
question_body = cls.__ui_mainwindow.textEdit_19.toPlainText()
cls.__dialog = QtWidgets.QDialog()
cls.__ui_dialog = Ui_EssayQuestionDialog()
cls.__ui_dialog.setupUi(cls.__dialog)
if (question_body == ""):
cls.__ui_dialog.label.setText("Question Body")
else:
cls.__ui_dialog.label.setText(question_body)
cls.__dialog.show()
cls.__ui_dialog.pushButton.clicked.connect(cls.close_dialog)
@classmethod
def close_dialog(cls):
cls.__dialog.close()
@classmethod
def get_single_answer_question_details(cls):
question_body = cls.__ui_mainwindow.textEdit.toPlainText()
if (question_body == ""):
return None
option_A_text = cls.__ui_mainwindow.textEdit_2.toPlainText()
if (option_A_text == ""):
return None
option_B_text = cls.__ui_mainwindow.textEdit_3.toPlainText()
if (option_B_text == ""):
return None
option_C_text = cls.__ui_mainwindow.textEdit_6.toPlainText()
if (option_C_text == ""):
return None
option_D_text = cls.__ui_mainwindow.textEdit_4.toPlainText()
if (option_D_text == ""):
return None
option_E_text = cls.__ui_mainwindow.textEdit_5.toPlainText()
if (option_E_text == ""):
return None
year_level_text = cls.__ui_mainwindow.lineEdit_3.text()
if (year_level_text == ""):
return None
try:
year_level = int(year_level_text)
except:
return None
phrase_tag_text = cls.__ui_mainwindow.lineEdit_4.text()
if (phrase_tag_text == ""):
return None
correct_answers_list = []
if (cls.__ui_mainwindow.radioButton.isChecked()):
correct_answers_list.append("A")
if (cls.__ui_mainwindow.radioButton_2.isChecked()):
correct_answers_list.append("B")
if (cls.__ui_mainwindow.radioButton_5.isChecked()):
correct_answers_list.append("C")
if (cls.__ui_mainwindow.radioButton_3.isChecked()):
correct_answers_list.append("D")
if (cls.__ui_mainwindow.radioButton_4.isChecked()):
correct_answers_list.append("E")
if (correct_answers_list == []):
return None
if (len(correct_answers_list) > 1):
return None
return (question_body, option_A_text, option_B_text, option_C_text, option_D_text, option_E_text, year_level, phrase_tag_text, correct_answers_list)
@classmethod
def get_multiple_answers_question_details(cls):
question_body = cls.__ui_mainwindow.textEdit_14.toPlainText()
if (question_body == ""):
return None
option_A_text = cls.__ui_mainwindow.textEdit_13.toPlainText()
if (option_A_text == ""):
return None
option_B_text = cls.__ui_mainwindow.textEdit_15.toPlainText()
if (option_B_text == ""):
return None
option_C_text = cls.__ui_mainwindow.textEdit_16.toPlainText()
if (option_C_text == ""):
return None
option_D_text = cls.__ui_mainwindow.textEdit_17.toPlainText()
if (option_D_text == ""):
return None
option_E_text = cls.__ui_mainwindow.textEdit_18.toPlainText()
if (option_E_text == ""):
return None
year_level_text = cls.__ui_mainwindow.lineEdit_25.text()
if (year_level_text == ""):
return None
try:
year_level = int(year_level_text)
except:
return None
phrase_tag_text = cls.__ui_mainwindow.lineEdit_7.text()
if (phrase_tag_text == ""):
return None
correct_answers_list = []
if (cls.__ui_mainwindow.checkBox.isChecked()):
correct_answers_list.append("A")
if (cls.__ui_mainwindow.checkBox_2.isChecked()):
correct_answers_list.append("B")
if (cls.__ui_mainwindow.checkBox_3.isChecked()):
correct_answers_list.append("C")
if (cls.__ui_mainwindow.checkBox_4.isChecked()):
correct_answers_list.append("D")
if (cls.__ui_mainwindow.checkBox_5.isChecked()):
correct_answers_list.append("E")
if (correct_answers_list == []):
return None
if (len(correct_answers_list) > 4):
return None
return (question_body, option_A_text, option_B_text, option_C_text, option_D_text, option_E_text, year_level, phrase_tag_text, correct_answers_list)
@classmethod
def get_essay_question_details(cls):
question_body = cls.__ui_mainwindow.textEdit_19.toPlainText()
if (question_body == ""):
return None
year_level_text = cls.__ui_mainwindow.lineEdit_26.text()
if (year_level_text == ""):
return None
try:
year_level = int(year_level_text)
except:
return None
phrase_tag_text = cls.__ui_mainwindow.lineEdit_27.text()
if (phrase_tag_text == ""):
return None
return (question_body, year_level, phrase_tag_text)
@classmethod
def display_all_active_questions(cls, active_questions_tuple):
row = 0
col = 0
for question_pk_tuple in active_questions_tuple:
question_pk = question_pk_tuple[0]
question_text = "Question " + str(question_pk)
question_item = QTableWidgetItem(question_text)
cls.__ui_mainwindow.tableWidget.setItem(row, col, question_item)
if (col >= 7):
col = 0
row += 1
else:
col += 1
@classmethod
def display_create_single_answer_question_success(cls):
cls.__ui_mainwindow.label_4.setText("Create Single Answer Question Success")
@classmethod
def display_invalid_single_answer_question_creation_message(cls):
cls.__ui_mainwindow.label_4.setText("Invalid Single Answer Question Creation")
@classmethod
def display_create_multiple_answers_question_success(cls):
cls.__ui_mainwindow.label_11.setText("Create Multiple Answers Question Success")
@classmethod
def display_invalid_multiple_answers_question_creation_message(cls):
cls.__ui_mainwindow.label_11.setText("Invalid Multiple Answers Question Creation")
@classmethod
def display_invalid_essay_question_creation_message(cls):
cls.__ui_mainwindow.label_42.setText("Invalid Essay Question Creation")
@classmethod
def display_create_essay_question_success(cls):
cls.__ui_mainwindow.label_42.setText("Create Essay Question Success")
@classmethod
def display_invalid_modification_message(cls):
cls.__ui_mainwindow.label_57.setText("Invalid Modification")
@classmethod
def refresh_create_single_answer_question_page(cls):
cls.__ui_mainwindow.textEdit.clear()
cls.__ui_mainwindow.textEdit_2.clear()
cls.__ui_mainwindow.textEdit_3.clear()
cls.__ui_mainwindow.textEdit_4.clear()
cls.__ui_mainwindow.textEdit_5.clear()
cls.__ui_mainwindow.textEdit_6.clear()
cls.__ui_mainwindow.lineEdit_3.clear()
cls.__ui_mainwindow.lineEdit_4.clear()
cls.__ui_mainwindow.radioButton.setChecked(False)
cls.__ui_mainwindow.radioButton_2.setChecked(False)
cls.__ui_mainwindow.radioButton_3.setChecked(False)
cls.__ui_mainwindow.radioButton_4.setChecked(False)
cls.__ui_mainwindow.radioButton_5.setChecked(False)
@classmethod
def refresh_create_multiple_answers_question_page(cls):
cls.__ui_mainwindow.textEdit_14.clear()
cls.__ui_mainwindow.textEdit_13.clear()
cls.__ui_mainwindow.textEdit_15.clear()
cls.__ui_mainwindow.textEdit_16.clear()
cls.__ui_mainwindow.textEdit_17.clear()
cls.__ui_mainwindow.textEdit_18.clear()
cls.__ui_mainwindow.lineEdit_25.clear()
cls.__ui_mainwindow.lineEdit_7.clear()
cls.__ui_mainwindow.checkBox.setChecked(False)
cls.__ui_mainwindow.checkBox_2.setChecked(False)
cls.__ui_mainwindow.checkBox_3.setChecked(False)
cls.__ui_mainwindow.checkBox_4.setChecked(False)
cls.__ui_mainwindow.checkBox_5.setChecked(False)
@classmethod
def refresh_view_or_modify_question_page(cls):
cls.__ui_mainwindow.lineEdit_5.clear()
cls.__ui_mainwindow.label_45.setText("Question ID: ")
cls.__ui_mainwindow.label_47.setText("Question Type: ")
cls.__ui_mainwindow.label_57.clear()
cls.__ui_mainwindow.label_12.clear()
cls.__ui_mainwindow.textEdit_7.clear()
cls.__ui_mainwindow.textEdit_8.clear()
cls.__ui_mainwindow.textEdit_9.clear()
cls.__ui_mainwindow.textEdit_10.clear()
cls.__ui_mainwindow.textEdit_11.clear()
cls.__ui_mainwindow.textEdit_20.clear()
cls.__ui_mainwindow.lineEdit_6.clear()
cls.__ui_mainwindow.lineEdit_8.clear()
cls.__ui_mainwindow.lineEdit_28.clear()
cls.__ui_mainwindow.radioButton_6.setDisabled(False)
cls.__ui_mainwindow.radioButton_7.setDisabled(False)
cls.__ui_mainwindow.radioButton_8.setDisabled(False)
cls.__ui_mainwindow.radioButton_9.setDisabled(False)
cls.__ui_mainwindow.radioButton_10.setDisabled(False)
cls.__ui_mainwindow.textEdit_8.setDisabled(False)
cls.__ui_mainwindow.textEdit_9.setDisabled(False)
cls.__ui_mainwindow.textEdit_10.setDisabled(False)
cls.__ui_mainwindow.textEdit_11.setDisabled(False)
cls.__ui_mainwindow.textEdit_20.setDisabled(False)
cls.__ui_mainwindow.radioButton_6.setAutoExclusive(False)
cls.__ui_mainwindow.radioButton_6.setChecked(False)
cls.__ui_mainwindow.radioButton_7.setAutoExclusive(False)
cls.__ui_mainwindow.radioButton_7.setChecked(False)
cls.__ui_mainwindow.radioButton_8.setAutoExclusive(False)
cls.__ui_mainwindow.radioButton_8.setChecked(False)
cls.__ui_mainwindow.radioButton_9.setAutoExclusive(False)
cls.__ui_mainwindow.radioButton_9.setChecked(False)
cls.__ui_mainwindow.radioButton_10.setAutoExclusive(False)
cls.__ui_mainwindow.radioButton_10.setChecked(False)
@classmethod
def refresh_create_essay_question_page(cls):
cls.__ui_mainwindow.textEdit_19.clear()
cls.__ui_mainwindow.lineEdit_26.clear()
cls.__ui_mainwindow.lineEdit_27.clear()
@classmethod
def refresh_create_exam_page(cls):
cls.__ui_mainwindow.tableWidget_3.clear()
cls.__ui_mainwindow.tableWidget_4.clear()
cls.__ui_mainwindow.lineEdit_10.clear()
cls.__ui_mainwindow.lineEdit_11.clear()
cls.__ui_mainwindow.lineEdit_12.clear()
cls.__ui_mainwindow.lineEdit_13.clear()
@classmethod
def get_question_id_to_load(cls):
question_id_text = cls.__ui_mainwindow.lineEdit_5.text()
try:
question_id = int(question_id_text)
return question_id
except:
return None
@classmethod
def load_single_answer_question_details(cls, question_details):
question_id = question_details[0]
question_type = question_details[1]
points = question_details[2]
year_level = question_details[3]
question_tag = question_details[4]
question_body = question_details[5]
option_A_text = question_details[6]
option_B_text = question_details[7]
option_C_text = question_details[8]
option_D_text = question_details[9]
option_E_text = question_details[10]
correct_answer = question_details[11]
cls.__ui_mainwindow.label_45.setText("Question ID: " + str(question_id))
cls.__ui_mainwindow.label_47.setText("Question Type: " + str(question_type))
cls.__ui_mainwindow.textEdit_7.setText(question_body)
cls.__ui_mainwindow.textEdit_8.setText(option_A_text)
cls.__ui_mainwindow.textEdit_9.setText(option_B_text)
cls.__ui_mainwindow.textEdit_10.setText(option_C_text)
cls.__ui_mainwindow.textEdit_11.setText(option_D_text)
cls.__ui_mainwindow.textEdit_20.setText(option_E_text)
cls.__ui_mainwindow.lineEdit_6.setText(str(year_level))
cls.__ui_mainwindow.lineEdit_8.setText(question_tag)
cls.__ui_mainwindow.lineEdit_28.setText(str(points))
if (correct_answer == "A"):
cls.__ui_mainwindow.radioButton_6.setChecked(True)
elif (correct_answer == "B"):
cls.__ui_mainwindow.radioButton_7.setChecked(True)
elif (correct_answer == "C"):
cls.__ui_mainwindow.radioButton_8.setChecked(True)
elif (correct_answer == "D"):
cls.__ui_mainwindow.radioButton_9.setChecked(True)
elif (correct_answer == "E"):
cls.__ui_mainwindow.radioButton_10.setChecked(True)
@classmethod
def load_multiple_answers_question_details(cls, question_details):
question_id = question_details[0]
question_type = question_details[1]
points = question_details[2]
year_level = question_details[3]
question_tag = question_details[4]
question_body = question_details[5]
option_A_text = question_details[6]
option_B_text = question_details[7]
option_C_text = question_details[8]
option_D_text = question_details[9]
option_E_text = question_details[10]
correct_answers = question_details[11]
cls.__ui_mainwindow.label_45.setText("Question ID: " + str(question_id))
cls.__ui_mainwindow.label_47.setText("Question Type: " + str(question_type))
cls.__ui_mainwindow.textEdit_7.setText(question_body)
cls.__ui_mainwindow.textEdit_8.setText(option_A_text)
cls.__ui_mainwindow.textEdit_9.setText(option_B_text)
cls.__ui_mainwindow.textEdit_10.setText(option_C_text)
cls.__ui_mainwindow.textEdit_11.setText(option_D_text)
cls.__ui_mainwindow.textEdit_20.setText(option_E_text)
cls.__ui_mainwindow.lineEdit_6.setText(str(year_level))
cls.__ui_mainwindow.lineEdit_8.setText(question_tag)
cls.__ui_mainwindow.lineEdit_28.setText(str(points))
if (correct_answers.count("A") == 1):
cls.__ui_mainwindow.radioButton_6.setChecked(True)
if (correct_answers.count("B") == 1):
cls.__ui_mainwindow.radioButton_7.setChecked(True)
if (correct_answers.count("C") == 1):
cls.__ui_mainwindow.radioButton_8.setChecked(True)
if (correct_answers.count("D") == 1):
cls.__ui_mainwindow.radioButton_9.setChecked(True)
if (correct_answers.count("E") == 1):
cls.__ui_mainwindow.radioButton_10.setChecked(True)
@classmethod
def load_essay_question_details(cls, question_details):
question_id = question_details[0]
question_type = question_details[1]
points = question_details[2]
year_level = question_details[3]
question_tag = question_details[4]
question_body = question_details[5]
cls.__ui_mainwindow.label_45.setText("Question ID: " + str(question_id))
cls.__ui_mainwindow.label_47.setText("Question Type: " + str(question_type))
cls.__ui_mainwindow.textEdit_7.setText(question_body)
cls.__ui_mainwindow.radioButton_6.setDisabled(True)
cls.__ui_mainwindow.radioButton_7.setDisabled(True)
cls.__ui_mainwindow.radioButton_8.setDisabled(True)
cls.__ui_mainwindow.radioButton_9.setDisabled(True)
cls.__ui_mainwindow.radioButton_10.setDisabled(True)
cls.__ui_mainwindow.textEdit_8.setDisabled(True)
cls.__ui_mainwindow.textEdit_9.setDisabled(True)
cls.__ui_mainwindow.textEdit_10.setDisabled(True)
cls.__ui_mainwindow.textEdit_11.setDisabled(True)
cls.__ui_mainwindow.textEdit_20.setDisabled(True)
cls.__ui_mainwindow.lineEdit_6.setText(str(year_level))
cls.__ui_mainwindow.lineEdit_8.setText(question_tag)
cls.__ui_mainwindow.lineEdit_28.setText(str(points))
@classmethod
def display_question_id_invalid_to_load_message(cls):
cls.__ui_mainwindow.label_12.setText("Invalid Question ID To Load")
@classmethod
def display_modification_success_message(cls):
cls.__ui_mainwindow.label_57.setText("Modification Success")
@classmethod
def display_invalid_school_class_id_message(cls):
cls.__ui_mainwindow.label_14.setText("Invalid School Class ID")
cls.__ui_mainwindow.tableWidget_15.clear()
@classmethod
def get_question_type_to_modify(cls):
question_type_text = cls.__ui_mainwindow.label_47.text()
if (question_type_text == "Question Type: Single Answer"):
return "Single Answer"
elif (question_type_text == "Question Type: Multiple Answers"):
return "Multiple Answers"
elif (question_type_text == "Question Type: Essay"):
return "Essay"
@classmethod
def get_single_answer_question_details_to_modify(cls):
question_pk = cls.get_question_id_to_modify()
question_type = cls.get_question_type_to_modify()
points = int(cls.__ui_mainwindow.lineEdit_28.text())
year_level = int(cls.__ui_mainwindow.lineEdit_6.text())
question_tag = cls.__ui_mainwindow.lineEdit_8.text()
question_body = cls.__ui_mainwindow.textEdit_7.toPlainText()
option_A_text = cls.__ui_mainwindow.textEdit_8.toPlainText()
option_B_text = cls.__ui_mainwindow.textEdit_9.toPlainText()
option_C_text = cls.__ui_mainwindow.textEdit_10.toPlainText()
option_D_text = cls.__ui_mainwindow.textEdit_11.toPlainText()
option_E_text = cls.__ui_mainwindow.textEdit_20.toPlainText()
correct_answer = cls.get_single_correct_answer_to_modify()
if (correct_answer == None):
return None
return (question_pk, question_type, points, year_level, question_tag,question_body, option_A_text, option_B_text, option_C_text, option_D_text, option_E_text, correct_answer)
@classmethod
def get_multiple_answers_question_details_to_modify(cls):
question_pk = cls.get_question_id_to_modify()
question_type = cls.get_question_type_to_modify()
points = int(cls.__ui_mainwindow.lineEdit_28.text())
year_level = int(cls.__ui_mainwindow.lineEdit_6.text())
question_tag = cls.__ui_mainwindow.lineEdit_8.text()
question_body = cls.__ui_mainwindow.textEdit_7.toPlainText()
option_A_text = cls.__ui_mainwindow.textEdit_8.toPlainText()
option_B_text = cls.__ui_mainwindow.textEdit_9.toPlainText()
option_C_text = cls.__ui_mainwindow.textEdit_10.toPlainText()
option_D_text = cls.__ui_mainwindow.textEdit_11.toPlainText()
option_E_text = cls.__ui_mainwindow.textEdit_20.toPlainText()
correct_answers = cls.get_multiple_correct_answers_to_modify()
if (correct_answers == None):
return None
return (question_pk, question_type, points, year_level, question_tag,question_body, option_A_text, option_B_text, option_C_text, option_D_text, option_E_text, correct_answers)
@classmethod
def get_essay_question_details_to_modify(cls):
question_pk = cls.get_question_id_to_modify()
question_type = cls.get_question_type_to_modify()
try:
points = int(cls.__ui_mainwindow.lineEdit_28.text())
except:
return None
try:
year_level = int(cls.__ui_mainwindow.lineEdit_6.text())
except:
return None
question_tag = cls.__ui_mainwindow.lineEdit_8.text()
if (question_tag == ""):
return None
question_body = cls.__ui_mainwindow.textEdit_7.toPlainText()
if (question_body == ""):
return None
return (question_pk, question_type, points, year_level, question_tag, question_body)
@classmethod
def get_question_id_to_modify(cls):
question_id_text = cls.__ui_mainwindow.label_45.text()
question_id_text_split = question_id_text.split()
question_id = int(question_id_text_split.pop())
return question_id
@classmethod
def get_single_correct_answer_to_modify(cls):
correct_answer = ""
if (cls.__ui_mainwindow.radioButton_6.isChecked()):
correct_answer = correct_answer + "A"
if (cls.__ui_mainwindow.radioButton_7.isChecked()):
correct_answer = correct_answer + "B"
if (cls.__ui_mainwindow.radioButton_8.isChecked()):
correct_answer = correct_answer + "C"
if (cls.__ui_mainwindow.radioButton_9.isChecked()):
correct_answer = correct_answer + "D"
if (cls.__ui_mainwindow.radioButton_10.isChecked()):
correct_answer = correct_answer + "E"
if (len(correct_answer) == 0):
return None
if (len(correct_answer) > 1):
return None
return correct_answer
@classmethod
def get_multiple_correct_answers_to_modify(cls):
correct_answers = ""
if (cls.__ui_mainwindow.radioButton_6.isChecked()):
correct_answers = correct_answers + "A"
if (cls.__ui_mainwindow.radioButton_7.isChecked()):
correct_answers = correct_answers + "B"
if (cls.__ui_mainwindow.radioButton_8.isChecked()):
correct_answers = correct_answers + "C"
if (cls.__ui_mainwindow.radioButton_9.isChecked()):
correct_answers = correct_answers + "D"
if (cls.__ui_mainwindow.radioButton_10.isChecked()):
correct_answers = correct_answers + "E"
if (len(correct_answers) == 0):
return None
if (len(correct_answers) > 4):
return None
return correct_answers
@classmethod
def get_school_class_id_to_view_students(cls):
school_class_id_text = cls.__ui_mainwindow.lineEdit_9.text()
try:
school_class_id = int(school_class_id_text)
return school_class_id
except:
return None
@classmethod
def display_school_class_details(cls, school_class_details):
cls.__ui_mainwindow.tableWidget_15.clear()
row = 0
col = 0
for (student, ) in school_class_details:
student_item = QTableWidgetItem(student)
cls.__ui_mainwindow.tableWidget_15.setItem(row, col, student_item)
if (col >= 1):
col = 0
row += 1
else:
col += 1
@classmethod
def refresh_view_school_class_details_page(cls):
cls.__ui_mainwindow.label_14.clear()
@classmethod
def get_number_of_questions_in_current_exam(cls):
number_of_questions = 0
row = 0
col = 0
for counter in range(10):
if (cls.__ui_mainwindow.tableWidget_3.item(row, col) != None):
number_of_questions += 1
row += 1
return number_of_questions
@classmethod
def get_number_of_school_classes_in_current_exam(cls):
number_of_school_classes = 0
row = 0
col = 0
for counter in range(5):
if (cls.__ui_mainwindow.tableWidget_4.item(row, col) != None):
number_of_school_classes += 1
row += 1
return number_of_school_classes
@classmethod
def display_number_of_questions_full_in_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("Questions Are Full In Current Exam")
@classmethod
def display_number_of_school_classes_full_in_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("School Classes Are Full In Current Exam")
@classmethod
def display_no_question_in_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("No Question In Current Exam")
@classmethod
def display_no_school_class_in_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("No School Class In Current Exam")
@classmethod
def display_question_id_already_added_to_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("Question ID Already Added To Current Exam")
@classmethod
def display_school_class_id_already_added_to_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("School Class ID Already Added To Current Exam")
@classmethod
def display_question_id_invalid_message(cls):
cls.__ui_mainwindow.label_17.setText("Question ID Invalid")
@classmethod
def display_school_class_id_invalid_message(cls):
cls.__ui_mainwindow.label_17.setText("School CLass ID Invalid")
@classmethod
def display_question_id_not_already_in_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("Question ID Not Aleady In Current Exam")
@classmethod
def display_school_class_id_not_already_in_current_exam_message(cls):
cls.__ui_mainwindow.label_17.setText("School Class ID Not Aleady In Current Exam")
@classmethod
def display_create_exam_success_message(cls):
cls.__ui_mainwindow.label_17.setText("Create Exam Success")
@classmethod
def refresh_mark_exam_drop_box(cls):
cls.__ui_mainwindow.tableWidget_19.clear()
@classmethod
def get_question_id_to_add_to_exam(cls):
question_id_text = cls.__ui_mainwindow.lineEdit_10.text()
try:
question_id = int(question_id_text)
return question_id
except:
return None
@classmethod
def get_school_class_id_to_add_to_exam(cls):
school_class_id_text = cls.__ui_mainwindow.lineEdit_11.text()
try:
school_class_id = int(school_class_id_text)
return school_class_id
except:
return None
@classmethod
def get_question_id_to_remove_from_exam(cls):
question_id_text = cls.__ui_mainwindow.lineEdit_12.text()
try:
question_id = int(question_id_text)
return question_id
except:
return None
@classmethod
def get_school_class_id_to_remove_from_exam(cls):
school_class_id_text = cls.__ui_mainwindow.lineEdit_13.text()
try:
school_class_id = int(school_class_id_text)
return school_class_id
except:
return None
@classmethod
def add_question_id_to_current_exam(cls, question_id):
row = 0
col = 0
for counter in range(10):
if (cls.__ui_mainwindow.tableWidget_3.item(row, col) == None):
question_text = "Question " + str(question_id)
question_item = QTableWidgetItem(question_text)
cls.__ui_mainwindow.tableWidget_3.setItem(row, col, question_item)
cls.__ui_mainwindow.lineEdit_10.clear()
cls.__ui_mainwindow.label_17.clear()
return
row += 1
@classmethod
def add_school_class_id_to_current_exam(cls, school_class_id):
row = 0
col = 0
for counter in range(10):
if (cls.__ui_mainwindow.tableWidget_4.item(row, col) == None):
school_class_text = "CLass " + str(school_class_id)
school_class_item = QTableWidgetItem(school_class_text)
cls.__ui_mainwindow.tableWidget_4.setItem(row, col, school_class_item)
cls.__ui_mainwindow.lineEdit_11.clear()
cls.__ui_mainwindow.label_17.clear()
return
row += 1
@classmethod
def remove_question_id_from_current_exam(cls, question_id):
col = 0
for row in range(10):
question_item = cls.__ui_mainwindow.tableWidget_3.item(row, col)
if (question_item != None):
question_text = question_item.text()
question_text_split = question_text.split(" ")
question_id_in_exam = int(question_text_split.pop())
if (question_id_in_exam == question_id):
cls.__ui_mainwindow.tableWidget_3.takeItem(row, col)
cls.__ui_mainwindow.lineEdit_12.clear()
cls.__ui_mainwindow.label_17.clear()
return
@classmethod
def remove_school_class_id_from_current_exam(cls, school_class_id):
col = 0
for row in range(5):
school_class_item = cls.__ui_mainwindow.tableWidget_4.item(row, col)
if (school_class_item != None):
school_class_text = school_class_item.text()
school_class_text_split = school_class_text.split(" ")
school_class_id_in_exam = int(school_class_text_split.pop())
if (school_class_id_in_exam == school_class_id):
cls.__ui_mainwindow.tableWidget_4.takeItem(row, col)
cls.__ui_mainwindow.lineEdit_13.clear()
cls.__ui_mainwindow.label_17.clear()
return
@classmethod
def is_question_id_already_added_to_current_exam(cls, question_id):
string_of_question_ids_in_current_exam = cls.get_string_of_question_ids_in_current_exam()
list_of_question_ids = string_of_question_ids_in_current_exam.split(" ")
return list_of_question_ids.count(str(question_id)) == 1
@classmethod
def is_school_class_id_already_added_to_current_exam(cls, school_class_id):
string_of_school_classes_ids_in_current_exam = cls.get_string_of_school_classes_ids_in_current_exam()
list_of_school_classes_ids = string_of_school_classes_ids_in_current_exam.split(" ")
return list_of_school_classes_ids.count(str(school_class_id)) == 1
@classmethod
def get_string_of_question_ids_in_current_exam(cls):
string_of_question_ids = ""
col = 0
for row in range(10):
question_item = cls.__ui_mainwindow.tableWidget_3.item(row, col)
if (question_item != None):
question_text = question_item.text()
question_text_split = question_text.split(" ")
question_id = question_text_split.pop()
string_of_question_ids = string_of_question_ids + question_id + " "
return string_of_question_ids.rstrip()
@classmethod
def get_string_of_school_classes_ids_in_current_exam(cls):
string_of_school_classes_ids = ""
col = 0
for row in range(10):
school_class_item = cls.__ui_mainwindow.tableWidget_4.item(row, col)
if (school_class_item != None):
school_class_text = school_class_item.text()
school_class_text_split = school_class_text.split(" ")
school_class_id = school_class_text_split.pop()
string_of_school_classes_ids = string_of_school_classes_ids + school_class_id + " "
return string_of_school_classes_ids.rstrip()
@classmethod
def get_exam_id_to_mark(cls):
exam_item = cls.__ui_mainwindow.tableWidget_20.item(0, 0)
exam_text = exam_item.text()
exam_text_split = exam_text.split(" ")
exam_id_text = exam_text_split.pop()
return int(exam_id_text)
@classmethod
def display_exam_id_on_marking_exam_page(cls, exam_id):
cls.__ui_mainwindow.label_49.setText("Exam ID: " + str(exam_id))
@classmethod
def display_students_full_names_with_questions_ready_to_be_marked(cls, students_names_list):
cls.__ui_mainwindow.tableWidget_6.clear()
row = 0
col = 0
for student_name in students_names_list:
student_item = QTableWidgetItem(student_name)
cls.__ui_mainwindow.tableWidget_6.setItem(row, col, student_item)
if (col >= 4):
row += 1
col = 0
else:
col += 1
@classmethod
def get_student_name_to_mark_answers(cls):
student_item = cls.__ui_mainwindow.tableWidget_19.item(0,0)
student_name = student_item.text()
return student_name
@classmethod
def get_exam_id_to_mark_student_answers(cls):
exam_id_text = cls.__ui_mainwindow.label_49.text()
exam_id_text_split = exam_id_text.split(" ")
exam_id = exam_id_text_split.pop()
return int(exam_id)
@classmethod
def display_exam_id_on_mark_student_answers_page(cls, exam_id):
exam_id_text = "Exam ID: " + str(exam_id)
cls.__ui_mainwindow.label_62.setText(exam_id_text)
@classmethod
def display_student_id_on_mark_student_answers_page(cls, student_id):
student_id_text = "Student ID: " + str(student_id)
cls.__ui_mainwindow.label_63.setText(student_id_text)
@classmethod
def display_student_name_on_mark_student_answers_page(cls,student_name):
student_name_text = "Student Name: " + str(student_name)
cls.__ui_mainwindow.label_50.setText(student_name_text)
@classmethod
def display_questions_ready_to_be_marked(cls, questions_ids_tuple):
cls.__ui_mainwindow.tableWidget_25.clear()
row = 0
col = 0
for (question_id,) in questions_ids_tuple:
question_text = "Question " + str(question_id)
question_item = QTableWidgetItem(question_text)
cls.__ui_mainwindow.tableWidget_25.setItem(row, col, question_item)
row += 1
@classmethod
def get_question_id_to_mark(cls):
question_item = cls.__ui_mainwindow.tableWidget_26.item(0,0)
if (question_item == None):
return None
question_id_text = question_item.text()
question_id_text_list = question_id_text.split(" ")
question_id = question_id_text_list.pop()
return int(question_id)
@classmethod
def get_exam_id_on_marking_question_page(cls):
exam_id_text = cls.__ui_mainwindow.label_62.text()
exam_id_text_list = exam_id_text.split(" ")
exam_id = exam_id_text_list.pop()
return int(exam_id)
@classmethod
def get_student_id_on_marking_question_page(cls):
student_id_text = cls.__ui_mainwindow.label_63.text()
student_id_text_list = student_id_text.split(" ")
student_id = student_id_text_list.pop()
return int(student_id)
@classmethod
def setup_essay_question_ui_dialog_to_mark(cls, question_details):
question_body = question_details[0]
student_answer = question_details[1]
available_points = question_details[2]
cls.__dialog = QtWidgets.QDialog()
cls.__ui_dialog = Ui_MarkingEssayQuestionDialog()
cls.__ui_dialog.setupUi(cls.__dialog)
cls.__ui_dialog.label_2.setText(question_body)
cls.__ui_dialog.label_3.setText(student_answer)
cls.__ui_dialog.label_4.setText("Total Available Points: " + str(available_points))
cls.__ui_dialog.pushButton.clicked.connect(cls.close_dialog)
cls.__dialog.show()
return cls.__ui_dialog
@classmethod
def get_essay_question_marked_points(cls):
points_text = cls.__ui_dialog.lineEdit.text()
return int(points_text)
@classmethod
def refresh_drop_question_to_mark_box(cls):
cls.__ui_mainwindow.tableWidget_26.clear()
@classmethod
def refresh_mark_student_questions_answers_page(cls):
cls.__ui_mainwindow.label_62.clear()
cls.__ui_mainwindow.label_63.clear()
cls.__ui_mainwindow.label_50.clear()
@classmethod
def display_no_more_questions_to_mark_message(cls):
cls.__ui_mainwindow.label_66.setText("No More Questions To Mark")
@classmethod
def display_marked_exams(cls, marked_exams_ids):
cls.__ui_mainwindow.tableWidget_18.clear()
row = 0
col = 0
for (exam_id,) in marked_exams_ids:
exam_text = "Exam " + str(exam_id)
exam_item = QTableWidgetItem(exam_text)
cls.__ui_mainwindow.tableWidget_18.setItem(row, col, exam_item)
if (col >= 4):
row += 1
col = 0
else:
col += 1
@classmethod
def display_no_question_selected_to_mark_message(cls):
cls.__ui_mainwindow.label_66.setText("No Question Selected To Mark")
@classmethod
def refresh_drop_student_to_mark_questions_box(cls):
cls.__ui_mainwindow.tableWidget_19.clear()
@classmethod
def get_exam_id_to_release_result(cls):
exam_item = cls.__ui_mainwindow.tableWidget_21.item(0,0)
if (exam_item == None):
return None
exam_id_text = exam_item.text()
exam_id_text_list = exam_id_text.split(" ")
exam_id = exam_id_text_list.pop()
return int(exam_id)
@classmethod
def display_result_released_exams(cls, result_released_exams_ids):
cls.__ui_mainwindow.tableWidget_11.clear()
row = 0
col = 0
for (exam_id,) in result_released_exams_ids:
exam_text = "Exam " + str(exam_id) + " Result"
exam_item = QTableWidgetItem(exam_text)
cls.__ui_mainwindow.tableWidget_11.setItem(row, col, exam_item)
if (col >= 9):
row += 1
col = 0
else:
col += 1
@classmethod
def refresh_drop_exam_to_release_result_box(cls):
cls.__ui_mainwindow.tableWidget_21.clear()
@classmethod
def display_exam_results(cls, exam_results_ids):
cls.__ui_mainwindow.tableWidget_11.clear()
row = 0
col = 0
for (exam_result_id, ) in exam_results_ids:
exam_result_text = "Exam " + str(exam_result_id) + " Result"
exam_result_item = QTableWidgetItem(exam_result_text)
cls.__ui_mainwindow.tableWidget_11.setItem(row, col, exam_result_item)
if (col >= 9):
row += 1
col = 0
else:
col += 1
@classmethod
def get_exam_result_id_to_load_details(cls):
exam_result_id_text = cls.__ui_mainwindow.lineEdit_22.text()
return int(exam_result_id_text)
@classmethod
def display_school_classes_to_view_exam_result_details(cls, school_classes_ids):
school_classes_ids_list = cls.make_string_to_list(school_classes_ids)
cls.__ui_mainwindow.tableWidget_12.clear()
row = 0
col = 0
for school_class_id in school_classes_ids_list:
school_class_text = "Class " + str(school_class_id)
school_class_item = QTableWidgetItem(school_class_text)
cls.__ui_mainwindow.tableWidget_12.setItem(row, col, school_class_item)
row += 1
@classmethod
def display_exam_result_id_on_view_exam_result_details_page(cls, exam_result_id):
cls.__ui_mainwindow.label_33.setText("Exam Result ID: " + str(exam_result_id))
@classmethod
def get_school_class_id_to_view_exam_result(cls):
school_class_id_text = cls.__ui_mainwindow.lineEdit_23.text()
try:
school_class_id = int(school_class_id_text)
except:
return None
return school_class_id
@classmethod
def display_students_full_names_to_view_exam_result(cls, students_full_names):
cls.__ui_mainwindow.tableWidget_13.clear()
row = 0
col = 0
for (student_full_name, ) in students_full_names:
student_item = QTableWidgetItem(student_full_name)
cls.__ui_mainwindow.tableWidget_13.setItem(row, col, student_item)
row += 1
@classmethod
def get_student_full_name_to_view_exam_result(cls):
student_item = cls.__ui_mainwindow.tableWidget_22.item(0, 0)
student_name_text = student_item.text()
return student_name_text
@classmethod
def get_exam_result_id_on_view_exam_result_page(cls):
exam_result_id_text = cls.__ui_mainwindow.label_33.text()
exam_result_id_text_list = exam_result_id_text.split(" ")
exam_result_id = exam_result_id_text_list.pop()
try:
exam_result_id_int = int(exam_result_id)
return exam_result_id_int
except:
return None
@classmethod
def display_student_exam_result_details(cls, exam_result_details):
student_id = exam_result_details[0]
student_full_name = exam_result_details[1]
date_of_birth = exam_result_details[2]
school_class_id = exam_result_details[3]
exam_id = exam_result_details[4]
total_available_points = exam_result_details[5]
total_points_gained = exam_result_details[6]
average_percentage_mark = exam_result_details[7]
cls.__ui_mainwindow.label_58.setText(str(student_id))
cls.__ui_mainwindow.label_72.setText(str(student_full_name))
cls.__ui_mainwindow.label_75.setText(str(date_of_birth))
cls.__ui_mainwindow.label_76.setText(str(school_class_id))
cls.__ui_mainwindow.label_77.setText(str(exam_id))
cls.__ui_mainwindow.label_78.setText(str(total_available_points))
cls.__ui_mainwindow.label_79.setText(str(total_points_gained))
cls.__ui_mainwindow.label_80.setText(str(average_percentage_mark) + " %")
@classmethod
def get_exam_id_to_view_details(cls):
exam_id_text = cls.__ui_mainwindow.lineEdit_14.text()
if (exam_id_text == ""):
return None
try:
exam_id = int(exam_id_text)
return exam_id
except:
return None
@classmethod
def diaplay_exam_id_on_view_exam_details_page(cls, exam_id):
cls.__ui_mainwindow.label_18.setText("Exam ID: " + str(exam_id))
@classmethod
def display_questions_on_view_exam_details_page(cls, questions_ids):
cls.__ui_mainwindow.tableWidget_7.clear()
questions_ids_list = cls.make_string_to_list(questions_ids)
row = 0
col = 0
for question_id in questions_ids_list:
question_text = "Question " + str(question_id)
question_item = QTableWidgetItem(question_text)
cls.__ui_mainwindow.tableWidget_7.setItem(row, col, question_item)
row += 1
@classmethod
def display_first_school_class_details_on_view_exam_details_page(cls, school_class_id, students_full_names):
cls.display_first_school_class_id_on_view_exam_details_page(school_class_id)
cls.__ui_mainwindow.tableWidget_27.clear()
row = 0
col = 0
for (student_name, ) in students_full_names:
student_item = QTableWidgetItem(student_name)
cls.__ui_mainwindow.tableWidget_27.setItem(row, col, student_item)
row += 1
@classmethod
def display_first_school_class_id_on_view_exam_details_page(cls, school_class_id):
cls.__ui_mainwindow.label_67.setText("CLass " + str(school_class_id))
@classmethod
def display_second_school_class_details_on_view_exam_details_page(cls, school_class_id, students_full_names):
cls.display_second_school_class_id_on_view_exam_details_page(school_class_id)
cls.__ui_mainwindow.tableWidget_28.clear()
row = 0
col = 0
for (student_name, ) in students_full_names:
student_item = QTableWidgetItem(student_name)
cls.__ui_mainwindow.tableWidget_28.setItem(row, col, student_item)
row += 1
@classmethod
def display_second_school_class_id_on_view_exam_details_page(cls, school_class_id):
cls.__ui_mainwindow.label_68.setText("CLass " + str(school_class_id))
@classmethod
def display_third_school_class_details_on_view_exam_details_page(cls, school_class_id, students_full_names):
cls.display_third_school_class_id_on_view_exam_details_page(school_class_id)
cls.__ui_mainwindow.tableWidget_29.clear()
row = 0
col = 0
for (student_name, ) in students_full_names:
student_item = QTableWidgetItem(student_name)
cls.__ui_mainwindow.tableWidget_29.setItem(row, col, student_item)
row += 1
@classmethod
def display_third_school_class_id_on_view_exam_details_page(cls, school_class_id):
cls.__ui_mainwindow.label_69.setText("CLass " + str(school_class_id))
@classmethod
def display_fourth_school_class_details_on_view_exam_details_page(cls, school_class_id, students_full_names):
cls.display_fourth_school_class_id_on_view_exam_details_page(school_class_id)
cls.__ui_mainwindow.tableWidget_30.clear()
row = 0
col = 0
for (student_name, ) in students_full_names:
student_item = QTableWidgetItem(student_name)
cls.__ui_mainwindow.tableWidget_30.setItem(row, col, student_item)
row += 1
@classmethod
def display_fourth_school_class_id_on_view_exam_details_page(cls, school_class_id):
cls.__ui_mainwindow.label_70.setText("CLass " + str(school_class_id))
@classmethod
def display_fifth_school_class_details_on_view_exam_details_page(cls, school_class_id, students_full_names):
cls.display_fifth_school_class_id_on_view_exam_details_page(school_class_id)
cls.__ui_mainwindow.tableWidget_31.clear()
row = 0
col = 0
for (student_name, ) in students_full_names:
student_item = QTableWidgetItem(student_name)
cls.__ui_mainwindow.tableWidget_31.setItem(row, col, student_item)
row += 1
@classmethod
def display_fifth_school_class_id_on_view_exam_details_page(cls, school_class_id):
cls.__ui_mainwindow.label_71.setText("CLass " + str(school_class_id))
@classmethod
def make_string_to_list(cls, any_string):
any_string = str(any_string)
any_list = any_string.split(" ")
return any_list
@classmethod
def refresh_drop_student_to_view_exam_result_details_box(cls):
cls.__ui_mainwindow.tableWidget_22.clear()
@classmethod
def display_exam_result_id_invalid_message(cls):
cls.__ui_mainwindow.label_32.setText("Exam Result ID Invalid")
@classmethod
def refresh_load_exam_result_details_page(cls):
cls.__ui_mainwindow.label_33.clear()
cls.__ui_mainwindow.tableWidget_12.clear()
cls.__ui_mainwindow.lineEdit_23.clear()
cls.__ui_mainwindow.tableWidget_13.clear()
cls.__ui_mainwindow.tableWidget_22.clear()
cls.__ui_mainwindow.label_58.clear()
cls.__ui_mainwindow.label_72.clear()
cls.__ui_mainwindow.label_75.clear()
cls.__ui_mainwindow.label_76.clear()
cls.__ui_mainwindow.label_77.clear()
cls.__ui_mainwindow.label_78.clear()
cls.__ui_mainwindow.label_79.clear()
cls.__ui_mainwindow.label_80.clear()
@classmethod
def refresh_exam_result_id_validity_error_message(cls):
cls.__ui_mainwindow.label_32.clear()
@classmethod
def display_school_class_id_invalid_to_view_result_message(cls):
cls.__ui_mainwindow.label_81.setText("School Class ID Invalid To View")
@classmethod
def refresh_school_class_details_table_on_view_exam_result_page(cls):
cls.__ui_mainwindow.tableWidget_13.clear()
@classmethod
def refresh_school_class_id_invalid_to_view_exam_result_error_label(cls):
cls.__ui_mainwindow.label_81.clear()
@classmethod
def refresh_student_exam_result_details(cls):
cls.__ui_mainwindow.label_58.clear()
cls.__ui_mainwindow.label_72.clear()
cls.__ui_mainwindow.label_75.clear()
cls.__ui_mainwindow.label_76.clear()
cls.__ui_mainwindow.label_77.clear()
cls.__ui_mainwindow.label_78.clear()
cls.__ui_mainwindow.label_79.clear()
cls.__ui_mainwindow.label_80.clear()
@classmethod
def display_no_exam_result_id_selected_message(cls):
cls.__ui_mainwindow.label_81.setText("No Exam Result ID Selected")
@classmethod
def refresh_school_class_id_input_box_on_view_exam_result_details_page(cls):
cls.__ui_mainwindow.lineEdit_23.clear()
@classmethod
def refresh_view_exam_details_by_id_page(cls):
cls.__ui_mainwindow.label_18.setText("Exam ID : ")
cls.__ui_mainwindow.tableWidget_7.clear()
cls.__ui_mainwindow.label_67.clear()
cls.__ui_mainwindow.label_68.clear()
cls.__ui_mainwindow.label_69.clear()
cls.__ui_mainwindow.label_70.clear()
cls.__ui_mainwindow.label_71.clear()
cls.__ui_mainwindow.tableWidget_27.clear()
cls.__ui_mainwindow.tableWidget_28.clear()
cls.__ui_mainwindow.tableWidget_29.clear()
cls.__ui_mainwindow.tableWidget_30.clear()
cls.__ui_mainwindow.tableWidget_31.clear()
@classmethod
def refresh_students_table_on_view_exam_result_details_page(cls):
cls.__ui_mainwindow.tableWidget_13.clear()
@classmethod
def refresh_school_classes_table_on_view_exam_result_details_page(cls):
cls.__ui_mainwindow.tableWidget_12.clear()
def __str__(self):
return ("This is TeacherGUI Object") |
9,610 | 8cc0314d48f81ceead863245443548297e8188f8 | import time
from numpy import empty
from src.utils import normalize_input_sentence, evaluate, add_begin_and_trailing_tag, check_for_terminal_argument
from classes.BaseTagger import BaseTagger
from src.CONSTANT import POS_TAG_KEYNAME, WORD_KEYNAME, TRUETAG_KEYNAME, DEFAULT_TRAINING_FILENAME
import sys
import os
# TODO check all document
class ForwardBackward(BaseTagger):
"""
For Learning: Calculate probability of an observation sequence given a HMM: P(O | lambda)
"""
def __init__(self):
"""
Constructor
"""
# TODO Need to seperate input reading into whether a class method or static function
self.path_to_file = check_for_terminal_argument()
BaseTagger.__init__(self)
raise NotImplementedError
def probabilities(self):
"""
Return the probabilities of a hidden state sequence given observed output sequence
:return:
"""
raise NotImplementedError
def prob_given_state(self, start=1, end=len(self.T)): # , start, end):
"""
Return the probabilities of output from "start" to "end" given current (hidden) state
:param start: start of observing time
:param end: end of observing time
:return: probabilities.
***********************
* return format *
***********************
"""
# for state_index in range(len(self.tagset)):
# self.alpha[1][state_index] = 0
raise NotImplementedError
def tag(self):
"""
alpha_t_i: probability of state S[i] at time t with the observed sequence O={o1, ..., oT} with lambda
model
"""
self.alpha = self.prob_given_state()
raise NotImplementedError
|
9,611 | 05aa8eac846154024d25d639da565135e41403c2 | lista = [2, 3.2, 4, 52, 6.25]
s = sum(lista)
print(s) |
9,612 | 7369d5a463b0f41c17d5648739d4730256e611f9 | #!/usr/bin/python3
import RPi.GPIO as GPIO
import time
# motor_EN_A: Pin7 | motor_EN_B: Pin11
# motor_A: Pin8,Pin10 | motor_B: Pin13,Pin12
#Motor_A_EN = 7
Motor_B_EN = 11
#Motor_A_Pin1 = 8
#Motor_A_Pin2 = 10
Motor_B_Pin1 = 13
Motor_B_Pin2 = 12
Dir_forward = 0
Dir_backward = 1
#pwm_A = 0
pwm_B = 0
def setup():#Motor initialization
global pwm_A, pwm_B
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
#GPIO.setup(Motor_A_EN, GPIO.OUT)
GPIO.setup(Motor_B_EN, GPIO.OUT)
#GPIO.setup(Motor_A_Pin1, GPIO.OUT)
#GPIO.setup(Motor_A_Pin2, GPIO.OUT)
GPIO.setup(Motor_B_Pin1, GPIO.OUT)
GPIO.setup(Motor_B_Pin2, GPIO.OUT)
#pwm_A = GPIO.PWM(Motor_A_EN, 1000)
pwm_B = GPIO.PWM(Motor_B_EN, 1000)
def motorStop():#Motor stops
#GPIO.output(Motor_A_Pin1, GPIO.LOW)
#GPIO.output(Motor_A_Pin2, GPIO.LOW)
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
#GPIO.output(Motor_A_EN, GPIO.LOW)
GPIO.output(Motor_B_EN, GPIO.LOW)
def motorStart(status, direction, speed):#Motor 2 positive and negative rotation
global pwm_B
if status == 0: # stop
motorStop()
else:
if direction == Dir_forward:
GPIO.output(Motor_B_Pin1, GPIO.HIGH)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
pwm_B.start(100)
pwm_B.ChangeDutyCycle(speed)
elif direction == Dir_backward:
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.HIGH)
pwm_B.start(0)
pwm_B.ChangeDutyCycle(speed)
def destroy():
motorStop()
GPIO.cleanup() # Release resource
try:
pass
except KeyboardInterrupt:
destroy()
|
9,613 | 5685befae923fc336a2a5e0eb5e382c2e7d82d04 | numero_uno=int(input("ingresa el primer numero "))
numero_dos=int(input("ingresa el segundo numero "))
print(numero_uno)
print(numero_dos)
total=numero_uno +numero_dos
print("el total de la suma de : "+str(numero_uno)+" + "+str(numero_dos)+" es = a "+str(total)) |
9,614 | 704047cb7eb05db9fa5f7ae61763ddbc8942ff60 | from rest_framework import serializers
from notes import models
class CategorySerializer(serializers.ModelSerializer):
id = serializers.StringRelatedField()
class Meta:
model = models.Category
fields = (
'id',
'name',
'color',
)
# nested category inside each Insight object
class InsightSerializer(serializers.ModelSerializer):
id = serializers.StringRelatedField()
category = CategorySerializer()
class Meta:
model = models.Insight
fields = (
'id',
'caption',
'category',
'source_url',
'created_at',
)
|
9,615 | a93818440410bde004f0203f18112fa1b666959c | # coding: utf-8
"""
Negotiation API
The <b>Negotiations API</b> gives sellers the ability to proactively send discount offers to buyers who have shown an \"interest\" in their listings. <br><br>By sending buyers discount offers on listings where they have shown an interest, sellers can increase the velocity of their sales. <br><br>There are various ways for a buyer to show <i>interest </i> in a listing. For example, if a buyer adds the listing to their <b>Watch</b> list, or if they add the listing to their shopping cart and later abandon the cart, they are deemed to have shown an interest in the listing. <br><br>In the offers that sellers send, they can discount their listings by either a percentage off the listing price, or they can set a new discounted price that is lower than the original listing price. <br><br>For details about how seller offers work, see <a href=\"/api-docs/sell/static/marketing/offers-to-buyers.html\" title=\"Selling Integration Guide\">Sending offers to buyers</a>. # noqa: E501
OpenAPI spec version: v1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...sell_negotiation.api_client import ApiClient
class OfferApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def find_eligible_items(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""find_eligible_items # noqa: E501
This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_eligible_items(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10
:param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0
:return: PagedEligibleItemCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
else:
(data) = self.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
return data
def find_eligible_items_with_http_info(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""find_eligible_items # noqa: E501
This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10
:param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0
:return: PagedEligibleItemCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_ebay_c_marketplace_id', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_eligible_items" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x_ebay_c_marketplace_id' is set
if ('x_ebay_c_marketplace_id' not in params or
params['x_ebay_c_marketplace_id'] is None):
raise ValueError("Missing the required parameter `x_ebay_c_marketplace_id` when calling `find_eligible_items`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
if 'x_ebay_c_marketplace_id' in params:
header_params['X-EBAY-C-MARKETPLACE-ID'] = params['x_ebay_c_marketplace_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/find_eligible_items', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedEligibleItemCollection', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_offer_to_interested_buyers(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""send_offer_to_interested_buyers # noqa: E501
This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_offer_to_interested_buyers(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param CreateOffersRequest body: Send offer to eligible items request.
:return: SendOfferToInterestedBuyersCollectionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
else:
(data) = self.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
return data
def send_offer_to_interested_buyers_with_http_info(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""send_offer_to_interested_buyers # noqa: E501
This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param CreateOffersRequest body: Send offer to eligible items request.
:return: SendOfferToInterestedBuyersCollectionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_ebay_c_marketplace_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_offer_to_interested_buyers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x_ebay_c_marketplace_id' is set
if ('x_ebay_c_marketplace_id' not in params or
params['x_ebay_c_marketplace_id'] is None):
raise ValueError("Missing the required parameter `x_ebay_c_marketplace_id` when calling `send_offer_to_interested_buyers`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_ebay_c_marketplace_id' in params:
header_params['X-EBAY-C-MARKETPLACE-ID'] = params['x_ebay_c_marketplace_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/send_offer_to_interested_buyers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendOfferToInterestedBuyersCollectionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
9,616 | 06b07045fcfafd174bb78ff5c3a36bed11e36e54 | import random
import string
import datetime
from app import db
from dateutil.parser import parse as date_parse
from flask_security import UserMixin, RoleMixin
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('users.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class StudyUsers(db.Model):
__tablename__ = 'study_users'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), index=True)
study_id = db.Column(db.Integer, db.ForeignKey('studies.id'), index=True)
role = db.Column(db.Enum('labeller', 'owner', name='user_role'))
def __init__(self, user_id, study_id, role):
self.study_id = study_id
self.user_id = user_id
self.study_id = study_id
self.role = role
class Users(db.Model, UserMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.Unicode)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
study_roles = db.relationship('StudyUsers',
backref=db.backref('users'), lazy='dynamic')
labels = db.relationship('UserLabels',
backref='users', lazy='dynamic')
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
def studies(self):
return db.session.query(Studies, StudyUsers).\
join(StudyUsers).filter_by(user_id=self.id)
def study_labels(self, study_id):
return db.session.query(LabelledDatasets).\
filter_by(user_id=self.id).\
join(Datasets).filter_by(study_id=study_id).count()
class StudyUploads(db.Model):
__tablename__ = 'study_uploads'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
filename = db.Column(db.Unicode)
data = db.Column(db.LargeBinary)
study_id = db.Column(db.Integer, db.ForeignKey('studies.id'), index=True)
state = db.Column(db.String(64), default='submitted', nullable=True)
error_message = db.Column(db.Text, nullable=True)
def __init__(self, filename, data, study_id):
self.filename = filename
self.data = data
self.study_id = study_id
class Studies(db.Model):
__tablename__ = 'studies'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
title = db.Column(db.Unicode)
y_min = db.Column(db.Float, default=0)
y_max = db.Column(db.Float, default=200)
token = db.Column(db.String, nullable=True)
datasets = db.relationship('Datasets', lazy="dynamic", cascade="all, delete-orphan", backref="study")
uploads = db.relationship('StudyUploads', lazy="dynamic", cascade="all, delete-orphan", backref="study")
users = db.relationship('StudyUsers',
backref=db.backref('studies'), lazy='dynamic')
def __init__(self, title):
self.title = title
def most_recent_successful_job(self):
return SZJob.query.filter(SZJob.study == self, SZJob.state == 'success').order_by(SZJob.created_at.desc()).first()
def add_user(self, user, role):
study_user = StudyUsers(user.id, self.id, role)
for existing_user in self.users:
if existing_user.user_id == user.id:
return
self.users.append(study_user)
db.session.commit()
def get_roles(self, user):
roles = [study_user.role for study_user in self.users.filter_by(user_id=user.id).all()]
return roles
def is_labeller(self, user):
return ("labeller" in self.get_roles(user)) or ("owner" in self.get_roles(user))
def is_owner(self, user):
return "owner" in self.get_roles(user)
def delete(self):
for dataset in self.datasets:
dataset.delete()
self.uploads.delete()
db.session.delete(self)
db.session.commit()
def labellers(self):
return Users.query.join(StudyUsers).\
filter(StudyUsers.role == "labeller").\
filter(StudyUsers.study_id == self.id)
def update_range(self):
maxmin = db.session.query(db.func.max(DataPoints.value), db.func.min(DataPoints.value)).\
join(Datasets).filter(Datasets.study_id == self.id).first()
self.y_max = maxmin[0]
self.y_min = maxmin[1]
db.session.commit()
def has_jobs(self):
resp = SZJob.query.filter(SZJob.study == self, ~SZJob.archived)
return resp.count() > 0
def has_archived_jobs(self):
resp = SZJob.query.filter(SZJob.study == self, SZJob.archived)
return resp.count() > 0
def generate_token(self):
self.token = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
return self.token
def user_labels_as_csv(self):
resp = db.session.execute("""SELECT
user_labels.datapoint_id,
user_labels.dataset_id,
user_labels.user_id,
label,
users.email,
datapoints.timestamp,
datasets.title
FROM user_labels
JOIN datapoints ON user_labels.datapoint_id = datapoints.id
JOIN users ON user_labels.user_id = users.id
JOIN datasets ON user_labels.dataset_id = datasets.id
WHERE study_id = :study_id""", {'study_id': self.id})
import StringIO
import csv
output = StringIO.StringIO()
writer = csv.writer(output)
writer.writerow(['datapoint_id', 'dataset_id', 'user_id', 'label', 'email', 'timestamp', 'title'])
for row in resp:
writer.writerow(row)
return output.getvalue()
class Datasets(db.Model):
__tablename__ = 'datasets'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
title = db.Column(db.Unicode)
study_id = db.Column(db.Integer, db.ForeignKey('studies.id'), index=True)
notes = db.relationship('Notes', cascade="all, delete-orphan", backref="dataset", lazy="dynamic")
user_labels = db.relationship('UserLabels', cascade="all, delete-orphan", backref="dataset", lazy="dynamic")
data_points = db.relationship('DataPoints', cascade="all, delete-orphan", backref="dataset", lazy="dynamic")
labelled = db.relationship('LabelledDatasets', cascade="all, delete-orphan", backref="dataset", lazy="dynamic")
def __init__(self, title, study_id, notes=[], data_points=[]):
self.title = title
self.study_id = study_id
self.notes = notes
self.data_points = data_points
def next(self):
return Datasets.query.\
filter(Datasets.study_id == self.study_id).\
filter(Datasets.created_at < self.created_at).\
order_by(Datasets.created_at.desc()).\
first()
def prev(self):
return Datasets.query.\
filter(Datasets.study_id == self.study_id).\
filter(Datasets.created_at > self.created_at).\
order_by(Datasets.created_at).\
first()
def items(self):
return Datasets.query.\
filter(Datasets.study_id == self.study_id).\
order_by(Datasets.created_at.desc()).\
all()
def labels_for_user(self, user):
return db.session.query(Datasets, DataPoints, UserLabels).\
filter_by(id=self.id).\
join(UserLabels).filter_by(user_id=user.id).\
join(DataPoints).\
order_by(DataPoints.timestamp)
def delete(self):
self.user_labels.delete()
self.notes.delete()
self.data_points.delete()
self.labelled.delete()
db.session.delete(self)
db.session.commit()
def user_has_labelled(self, user):
return self.labelled.filter_by(user_id=user.id).count() > 0
class LabelledDatasets(db.Model):
__tablename__ = 'labelled_datasets'
id = db.Column(db.Integer, primary_key=True)
dataset_id = db.Column(db.Integer, db.ForeignKey('datasets.id'), index=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), index=True)
def __init__(self, dataset_id, user_id):
self.dataset_id = dataset_id
self.user_id = user_id
class Notes(db.Model):
__tablename__ = 'notes'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
text = db.Column(db.Unicode)
dataset_id = db.Column(db.Integer, db.ForeignKey('datasets.id'), index=True)
def __init__(self, text):
self.text = text
@classmethod
def dict_from_parsed(cls, text, dataset_id):
return dict(
created_at=datetime.datetime.now(),
text=text,
dataset_id=dataset_id
)
class DataPoints(db.Model):
__tablename__ = 'datapoints'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
timestamp = db.Column(db.DateTime, index=True)
unit = db.Column(db.String(16))
value = db.Column(db.Float)
training = db.Column(db.Boolean)
dataset_id = db.Column(db.Integer, db.ForeignKey('datasets.id'), index=True)
user_labels = db.relationship('UserLabels', backref="datapoint", lazy="dynamic", passive_deletes=True)
result_data_points = db.relationship('ResultDataPoints', cascade="all, delete-orphan", backref="dataset", lazy="dynamic")
def __init__(self, timestamp, unit, value):
self.timestamp = timestamp
self.unit = unit
self.value = value
@classmethod
def dict_from_parsed(cls, parsed_point, dataset_id, training_selector):
timestamp = date_parse(parsed_point[0])
return dict(
created_at=datetime.datetime.now(),
timestamp=timestamp,
unit=parsed_point[1],
value=float(parsed_point[2]),
dataset_id=dataset_id,
training=training_selector(timestamp)
)
class ResultDataPoints(db.Model):
__tablename__ = 'resultdatapoints'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
timestamp = db.Column(db.DateTime, index=True)
value = db.Column(db.Float)
prediction = db.Column(db.Float)
job_id = db.Column(db.Integer, db.ForeignKey('sz_job.id'))
datapoint_id = db.Column(db.Integer, db.ForeignKey('datapoints.id'), index=True)
dataset_id = db.Column(db.Integer, db.ForeignKey('datasets.id'), index=True)
def __init__(self):
pass
@classmethod
def dict_from_parsed(cls, parsed_point, dataset_id, training_selector):
timestamp = date_parse(parsed_point[0])
return dict(
created_at=datetime.datetime.now(),
timestamp=timestamp,
unit=parsed_point[1],
value=float(parsed_point[2]),
dataset_id=dataset_id,
training=training_selector(timestamp)
)
class UserLabels(db.Model):
__tablename__ = 'user_labels'
id = db.Column(db.Integer, primary_key=True)
datapoint_id = db.Column(db.Integer, db.ForeignKey('datapoints.id'), index=True)
dataset_id = db.Column(db.Integer, db.ForeignKey('datasets.id'), index=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), index=True)
label = db.Column(db.Boolean)
def __init__(self, datapoint_id, dataset_id, user_id, label=False):
self.datapoint_id = datapoint_id
self.dataset_id = dataset_id
self.user_id = user_id
self.label = False
@classmethod
def dicts_from_datapoints(cls, data_points, dataset_id, user_id):
dicts = [dict(datapoint_id=data_point.id,
dataset_id=dataset_id,
user_id=user_id,
label=False) for data_point in data_points]
return dicts
class SZJob(db.Model):
id = db.Column(db.Integer, primary_key=True)
study_id = db.Column(db.Integer, db.ForeignKey('studies.id'), index=True)
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
state = db.Column(db.String(64), default='submitted')
message = db.Column(db.Unicode, nullable=True)
csv_blob = db.Column(db.Unicode, nullable=True)
csv_binary_blob = db.Column(db.LargeBinary, nullable=True)
archived = db.Column(db.Boolean, nullable=True, default=False)
study = db.relationship('Studies', backref='szjobs')
|
9,617 | 71dc429033b159f6ed806358f2286b4315e842d9 | def find_max(a, b):
if a>b:
return a
return b
def find_max_three(a, b, c):
return find_max(a, find_max(b, c)) |
9,618 | 912928cea0f96e601eecfcb6dba695ef26a3c6e2 | import itertools
import numpy as np
import pandas as pd
from scipy.sparse import coo_matrix
def merge_and_split(inputs, labels):
df = inputs.reset_index().merge(labels.reset_index(), on='utterance',
how='inner').set_index('utterance')
return df.feat, df.label
def list_to_sparse(inputs):
"""Convert list of lists into scipy coo matrix.
"""
data = list(itertools.chain(*inputs))
row = list(itertools.chain(
*[itertools.repeat(i, len(x)) for i, x in enumerate(inputs)]))
col = list(itertools.chain(*[range(len(x)) for x in inputs]))
s = coo_matrix((data, (row, col)), shape=(
len(inputs), np.max([len(x) for x in inputs])))
return s
class BatchGenerator(object):
def __init__(self, data, batch_size=1):
self.inputs, self.labels = data
self.batch_size = batch_size
self.data_length = len(self.inputs)
self.sequence_length = np.array([x.shape[0] for x in self.inputs])
def next_batch(self):
self._suffle()
start = 0
end = 0
batch_size, data_length = self.batch_size, self.data_length
while end != data_length:
end += batch_size
end = data_length if end >= data_length else end
yield self._get(start, end)
start = end
def _suffle(self):
permutation = np.random.permutation(self.data_length)
self.inputs = self.inputs[permutation]
self.labels = self.labels[permutation]
self.sequence_length = self.sequence_length[permutation]
def _get(self, start, end):
sequence_length = self.sequence_length[start:end]
batch_sequence_length = np.max(sequence_length)
inputs = np.array([np.pad(x, pad_width=((0, batch_sequence_length - len(x)),
(0, 0)), mode='constant') for x in self.inputs[start:end]])
labels = list_to_sparse(self.labels[start:end])
return inputs, labels, sequence_length
|
9,619 | 37748e3dd17f2bdf05bb28b4dfded12de97e37e4 | # Python program to count number of digits in a number.
# print len(str(input('Enter No.: ')))
num = input("Enter no.: ")
i = 1
while num / 10:
num = num / 10
i += 1
if num < 10:
break
print i
|
9,620 | ce1ef1ce538b8753af9e4b3e8e88f4cde9a2d860 | # - *- coding: utf- 8 - *-
import RPi.GPIO as io
import time
import math
io.setmode(io.BOARD)
hz = 50
dt = 1/hz
kr = 48
enc_res = 0.01636246
num_samples = 100
special_words = ['BackSpace', 'Tab', 'Enter', 'Cap', 'Shift2', 'Ctrl1',
'WIN1', 'Alt1', 'Alt2', 'WIN2', 'MClick', 'Ctrl2', 'Shift1', '\\']
L1=0.115 # m
L2=0.064 # m
a1=0.018 # m
# mm units converted at output of keypose
off = 56 # mm
a0=50+off # mm
zz=24 # mm
zs=float (19) #mm
yy=18 #mm
k=float (0.048)
keydic={'Ctrl1':[0,-(a0+20),12],
'WIN1':[0,-(a0+20),12+zz],
'Alt1':[0,-(a0+20),12+2*zz],
' ':[0,-(a0+20),130],
'Alt2':[0,-(a0+20),130+12+2*zz],
'WIN2':[0,-(a0+20),142+3*zz],
'MClick':[0,-(a0+20),142+4*zz],
'Ctrl2':[0,-(a0+20),142+5*zz],
'Shift1':[float (k*yy),-(a0+20+yy),22],
'z':[float (k*yy),-(a0+20+yy),53],
'Z':[float (k*yy),-(a0+20+yy),53],
'x':[float (k*yy),-(a0+20+yy),53+zs],
'X':[float (k*yy),-(a0+20+yy),53+zs],
'c':[float (k*yy),-(a0+20+yy),53+2*zs],
'C':[float (k*yy),-(a0+20+yy),53+2*zs],
'v':[float (k*yy),-(a0+20+yy),53+3*zs],
'V':[float (k*yy),-(a0+20+yy),53+3*zs],
'b':[float (k*yy),-(a0+20+yy),53+4*zs],
'B':[float (k*yy),-(a0+20+yy),53+4*zs],
'n':[float (k*yy),-(a0+20+yy),53+5*zs],
'N':[float (k*yy),-(a0+20+yy),53+5*zs],
'm':[float (k*yy),-(a0+20+yy),53+6*zs],
'M':[float (k*yy),-(a0+20+yy),53+6*zs],
',':[float (k*yy),-(a0+20+yy),53+7*zs],
'.':[float (k*yy),-(a0+20+yy),53+8*zs],
'/':[float (k*yy),-(a0+20+yy),53+9*zs],
'Shift2':[float (k*yy),-(a0+20+yy),22+10*zs+43],
'Cap':[float (2*k*yy),-(a0+20+2*yy),17],
'a':[float (2*k*yy),-(a0+20+2*yy),43],
'A':[float (2*k*yy),-(a0+20+2*yy),43],
's':[float (2*k*yy),-(a0+20+2*yy),43+zs],
'S':[float (2*k*yy),-(a0+20+2*yy),43+zs],
'd':[float (2*k*yy),-(a0+20+2*yy),43+2*zs],
'D':[float (2*k*yy),-(a0+20+2*yy),43+2*zs],
'f':[float (2*k*yy),-(a0+20+2*yy),43+3*zs],
'F':[float (2*k*yy),-(a0+20+2*yy),43+3*zs],
'g':[float (2*k*yy),-(a0+20+2*yy),43+4*zs],
'G':[float (2*k*yy),-(a0+20+2*yy),43+4*zs],
'h':[float (2*k*yy),-(a0+20+2*yy),43+5*zs],
'H':[float (2*k*yy),-(a0+20+2*yy),43+5*zs],
'j':[float (2*k*yy),-(a0+20+2*yy),43+6*zs],
'J':[float (2*k*yy),-(a0+20+2*yy),43+6*zs],
'k':[float (2*k*yy),-(a0+20+2*yy),43+7*zs],
'K':[float (2*k*yy),-(a0+20+2*yy),43+7*zs],
'l':[float (2*k*yy),-(a0+20+2*yy),43+8*zs],
'L':[float (2*k*yy),-(a0+20+2*yy),43+8*zs],
';':[float (2*k*yy),-(a0+20+2*yy),43+9*zs],
'\\':[float (2*k*yy),-(a0+20+2*yy),43+10*zs],
'Enter':[float (2*k*yy),-(a0+20+2*yy),33+20+11*zs],
'Tab':[float (3*k*yy),-(a0+20+3*yy),15],
'q':[float (3*k*yy),-(a0+20+3*yy),39],
'Q':[float (3*k*yy),-(a0+20+3*yy),39],
'w':[float (3*k*yy),-(a0+20+3*yy),zs+39],
'W':[float (3*k*yy),-(a0+20+3*yy),zs+39],
'e':[float (3*k*yy),-(a0+20+3*yy),2*zs+39],
'E':[float (3*k*yy),-(a0+20+3*yy),2*zs+39],
'r':[float (3*k*yy),-(a0+20+3*yy),3*zs+39],
'R':[float (3*k*yy),-(a0+20+3*yy),3*zs+39],
't':[float (3*k*yy),-(a0+20+3*yy),4*zs+39],
'T':[float (3*k*yy),-(a0+20+3*yy),4*zs+39],
'y':[float (3*k*yy),-(a0+20+3*yy),5*zs+39],
'Y':[float (3*k*yy),-(a0+20+3*yy),5*zs+39],
'u':[float (3*k*yy),-(a0+20+3*yy),6*zs+39],
'U':[float (3*k*yy),-(a0+20+3*yy),6*zs+39],
'i':[float (3*k*yy),-(a0+20+3*yy),7*zs+39],
'I':[float (3*k*yy),-(a0+20+3*yy),7*zs+39],
'o':[float (3*k*yy),-(a0+20+3*yy),8*zs+39],
'O':[float (3*k*yy),-(a0+20+3*yy),8*zs+39],
'p':[float (3*k*yy),-(a0+20+3*yy),9*zs+39],
'P':[float (3*k*yy),-(a0+20+3*yy),9*zs+39],
'[':[float (3*k*yy),-(a0+20+3*yy),10*zs+39],
']':[float (3*k*yy),-(a0+20+3*yy),11*zs+39],
'\\':[float (3*k*yy),-(a0+20+3*yy),12*zs+30+14],
'`':[float (4*k*yy),-(a0+20+4*yy),float (zs/2)],
'1':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+zs)],
'2':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+2*zs)],
'3':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+3*zs)],
'4':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+4*zs)],
'5':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+5*zs)],
'6':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+6*zs)],
'7':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+7*zs)],
'8':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+8*zs)],
'9':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+9*zs)],
'0':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+10*zs)],
'-':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+11*zs)],
'=':[float (4*k*yy),-(a0+20+4*yy),float (zs/2+12*zs)],
'BackSpace':[float (4*k*yy),-(a0+20+4*yy),13*zs+19]
}
# set parameters of robot (SI UNITS)
L1,L2=0.115,0.064
len_link1=0.07
len_link2=0.04#distances of centers of mass from joint axes
m_link1=0.005
m_link2=0.003
m_motor=0.06
k=0.048
R=3.6
V=5
r_pulley=0.0181102/2 #unit meters
K_p,K_d=0.25,0.125
# motor 1
m1_in1_pin = 12
m1_in2_pin = 16
m1_en_pin = 18
chan_list = [m1_en_pin, m1_in1_pin, m1_in2_pin]
io.setup(chan_list, io.OUT)
p1 = io.PWM(m1_in1_pin, hz)
p2 = io.PWM(m1_in2_pin, hz)
# motor 2
m2_in1_pin = 22
m2_in2_pin = 32
m2_en_pin = 36
chan_list = [m2_en_pin, m2_in1_pin, m2_in2_pin]
io.setup(chan_list, io.OUT)
p3 = io.PWM(m2_in1_pin, hz)
p4 = io.PWM(m2_in2_pin, hz)
# motor 3
m3_in1_pin = 38
m3_in2_pin = 40
m3_en_pin = 37
chan_list = [m3_en_pin, m3_in1_pin, m3_in2_pin]
io.setup(chan_list, io.OUT)
p5 = io.PWM(m3_in1_pin, hz)
p6 = io.PWM(m3_in2_pin, hz)
# sensor 1
en1_pin = 35
io.setup(en1_pin, io.IN, pull_up_down=io.PUD_UP)
# sensor 2
en2_pin = 33
io.setup(en2_pin, io.IN, pull_up_down=io.PUD_UP)
# encoder 1
encoder1_sensors = [en1_pin, en2_pin]
A1_old = 0
encoder1_count = 0
A1_t1 = time.time()
vel1 = 0
vel1_vec = []
# sensor 3
en3_pin = 31
io.setup(en3_pin, io.IN, pull_up_down=io.PUD_UP)
# sensor 4
en4_pin = 29
io.setup(en4_pin, io.IN, pull_up_down=io.PUD_UP)
# encoder 2
encoder2_sensors = [en3_pin, en4_pin]
A2_old = 0
encoder2_count = 0
A2_t1 = time.time()
vel2 = 0
vel2_vec = []
# sensor 5
en5_pin = 15
io.setup(en5_pin, io.IN, pull_up_down=io.PUD_UP)
# sensor 6
en6_pin = 13
io.setup(en6_pin, io.IN, pull_up_down=io.PUD_UP)
# encoder 3
encoder3_sensors = [en5_pin, en6_pin]
A3_old = 0
encoder3_count = 0
A3_t1 = time.time()
vel3 = 0
vel3_vec = []
def clockwise(duty, pwm1, pwm2, en_pin):
io.output(en_pin, io.HIGH)
pwm1.start(duty)
time.sleep(duty/100*dt)
pwm2.start(100-duty)
def counter_clockwise(duty, pwm1, pwm2, en_pin):
io.output(en_pin, io.HIGH)
pwm2.start(duty)
time.sleep(duty/100*dt)
pwm1.start(100-duty)
def countstorad(count):
# returns the joints space angle in radians
rad = 2*math.pi*count/8/kr
return rad
def radtocount(rad):
count = rad*kr*8/(2*math.pi)
return count
def initializeEncoders():
global encoder2_count, encoder3_count
encoder2_count = 0
encoder3_count = -math.pi/2
def resetEncoders():
global encoder1_count, encoder2_count, encoder3_count
encoder1_count = 0
encoder2_count = 0
encoder3_count = radtocount(-math.pi/2)
def encoder1Callback(channel):
# this function is called when an encoder reading is detected
global A1_old, encoder1_count, A1_t1, vel1, vel1_vec
A1_t2 = time.time()
if io.input(channel):
A = 1
else:
A = 0
if io.input(encoder1_sensors[1]):
B = 1
else:
B = 0
if A != A1_old:
if A != B:
encoder1_count += 1
vel1_vec.insert(0,enc_res/(A1_t2 - A1_t1))
else:
encoder1_count -= 1
vel1_vec.insert(0,-enc_res/(A1_t2 - A1_t1))
if len(vel1_vec) > num_samples:
vel1_vec.pop()
vel1 = sum(vel1_vec)/len(vel1_vec)
A1_old = A
A1_t1 = A1_t2
io.add_event_detect(en1_pin, io.BOTH, callback=encoder1Callback)
def encoder2Callback(channel):
# this function is called when an encoder reading is detected
global A2_old, encoder2_count, A2_t1, vel2, vel2_vec
A2_t2 = time.time()
if io.input(channel):
A = 1
else:
A = 0
if io.input(encoder2_sensors[1]):
B = 1
else:
B = 0
if A != A2_old:
if A != B:
encoder2_count -= 1
vel2_vec.insert(0,-enc_res/(A2_t2 - A2_t1))
else:
encoder2_count += 1
vel2_vec.insert(0,enc_res/(A2_t2 - A2_t1))
if len(vel2_vec) > num_samples:
vel2_vec.pop()
vel2 = sum(vel2_vec)/len(vel2_vec)
A2_old = A
A2_t1 = A2_t2
io.add_event_detect(en3_pin, io.BOTH, callback=encoder2Callback)
def encoder3Callback(channel):
# this function is called when an encoder reading is detected
global A3_old, encoder3_count, A3_t1, vel3, vel3_vec
A3_t2 = time.time()
if io.input(channel):
A = 1
else:
A = 0
if io.input(encoder3_sensors[1]):
B = 1
else:
B = 0
if A != A3_old:
if A != B:
encoder3_count -= 1
vel3_vec.insert(0,-enc_res/(A3_t2 - A3_t1))
else:
encoder3_count += 1
vel3_vec.insert(0,enc_res/(A3_t2 - A3_t1))
if len(vel3_vec) > num_samples:
vel3_vec.pop()
vel3 = sum(vel3_vec)/len(vel3_vec)
A3_old = A
A3_t1 = A3_t2
io.add_event_detect(en5_pin, io.BOTH, callback=encoder3Callback)
def invskinem(pose=[0,-.1,0]):
d1=pose[2]/(r_pulley)
c3=float(((pose[0]-a1)**2+pose[1]**2-L1**2-L2**2)/(2*L1*L2))
try:
s3=-math.sqrt(1-c3**2)
except:
print('Whoops!')
return
th3=math.atan2(s3,c3)
k=((pose[0]-a1)**2+L1**2+pose[1]**2-L2**2)/(2*L1)
th2=math.atan2(math.sqrt((pose[0]-a1)**2+pose[1]**2-k**2),k)+math.atan2(pose[1],pose[0]-a1)
return [d1,th2,th3]
def keypose(read):
if read in keydic:
output=keydic.get(read)
output = [float(output[0])/1000, float(output[1])/1000, float(output[2])/1000]
return output
else:
print('Whoops! No keys found!')
return
def control1(pos_d):
try:
# initialize the encoders
##################################################
#This is for motor1 control
##################################################
tolerance=0.005
pos_error1=100
f = open('data','a')
f.write('New Data Theta 1 \n')
print("Controlling motor 1")
while abs(pos_error1) >=tolerance:
pos_error1=pos_d[0]-countstorad(encoder1_count)
duty_cycle_1=100
if pos_error1>0:
clockwise(duty_cycle_1, p1, p2, m1_en_pin)
elif pos_error1<0:
clockwise(100-duty_cycle_1,p1,p2,m1_en_pin)
row = str(countstorad(encoder1_count))+'\t'+str(vel1)+'\n'
f.write(row)
p1.stop()
p2.stop()
time.sleep(2)
##################################################
#This is for motor2 and motor3 control
##################################################
print("Controlling Motors 2 and 3")
f.write('New Data Theta 2, Theta 3 \n')
position_error=[100,100]
while max(abs(position_error[0]),abs(position_error[1])) > tolerance:
# get current position
pos_current=[countstorad(encoder2_count),countstorad(encoder3_count)]
angular_velocity=[vel2,vel3]
row = str(countstorad(encoder2_count))+'\t'+str(vel2)+'\t'+str(countstorad(encoder3_count))+'\t'+str(vel3)+'\n'
f.write(row)
# estimate g(q)
g_q=[(m_link1*len_link1+m_motor*L1+m_link2*L1)*math.cos(pos_current[0])+\
m_link2*len_link2*math.cos(pos_current[0]+pos_current[1]),\
m_link2*len_link2*math.cos(pos_current[0]+pos_current[1])]
# calculate position error
position_error=[pos_d[1]-pos_current[0],pos_d[2]-pos_current[1]]
# u = PD control with gravity compensation
u=[g_q[0]+K_p*position_error[0]-K_d*angular_velocity[0],\
g_q[1]+K_p*position_error[1]-K_d*angular_velocity[1]]
for i in range(2):
if u[i]>=0.08:
u[i]=0.08
elif u[i]<=-0.08:
u[i]=-0.08
# duty = function(u)
V_d=[R*u[0]/k+k*angular_velocity[0],R*u[1]/k+k*angular_velocity[1]]
duty=[V_d[0]/V*100,V_d[1]/V*100]
# move the motors according to duty
#motor1 duty cycle ##############################
if duty[0]>0:
if duty[0]>=100:
duty[0]=100
elif duty[0]<=70:
duty[0]=50
clockwise(duty[0], p3, p4, m2_en_pin)
else:
if duty[0]<=-100:
duty[0]=0
elif duty[0] > -100 and duty[0] <= -70:
duty[0]=100+duty[0]
elif duty[0]>-70:
duty[0]=50
clockwise(duty[0],p3,p4,m2_en_pin)
###################################################
#motor2 duty cycle ################################
if duty[1]>0:
if duty[1]>=100:
duty[1]=100
elif duty[1]<=70:
duty[1]=50
clockwise(duty[1], p5, p6, m3_en_pin)
else:
if duty[1]<=-100:
duty[1]=0
elif duty[1] > -100 and duty[1] <= -70:
duty[1]=100+duty[1]
elif duty[1]>-70:
duty[1]=50
clockwise(duty[1],p5,p6,m3_en_pin)
####################################################
def control2(pos_d):
try:
# initialize the encoders
tolerance=0.005
pos_error1=100
##################################################
#This is for motor2 and motor3 control
##################################################
print("Controlling Motors 2 and 3")
position_error=[100,100]
f.open('data','a')
f.write('New Data Theta 2 Theta 3 \n')
while max(abs(position_error[0]),abs(position_error[1])) > tolerance:
# get current position
pos_current=[countstorad(encoder2_count),countstorad(encoder3_count)]
angular_velocity=[vel2,vel3]
row = str(countstorad(encoder2_count))+'\t'+str(vel2)+'\t'+str(countstorad(encoder3_count))+'\t'+str(vel3)+'\n'
f.write(row)
# estimate g(q)
g_q=[(m_link1*len_link1+m_motor*L1+m_link2*L1)*math.cos(pos_current[0])+\
m_link2*len_link2*math.cos(pos_current[0]+pos_current[1]),\
m_link2*len_link2*math.cos(pos_current[0]+pos_current[1])]
# calculate position error
position_error=[pos_d[1]-pos_current[0],pos_d[2]-pos_current[1]]
# u = PD control with gravity compensation
u=[g_q[0]+K_p*position_error[0]-K_d*angular_velocity[0],\
g_q[1]+K_p*position_error[1]-K_d*angular_velocity[1]]
for i in range(2):
if u[i]>=0.08:
u[i]=0.08
elif u[i]<=-0.08:
u[i]=-0.08
# duty = function(u)
V_d=[R*u[0]/k+k*angular_velocity[0],R*u[1]/k+k*angular_velocity[1]]
duty=[V_d[0]/V*100,V_d[1]/V*100]
# move the motors according to duty
#motor1 duty cycle ##############################
if duty[0]>0:
if duty[0]>=100:
duty[0]=100
elif duty[0]<=70:
duty[0]=50
clockwise(duty[0], p3, p4, m2_en_pin)
else:
if duty[0]<=-100:
duty[0]=0
elif duty[0] > -100 and duty[0] <= -70:
duty[0]=100+duty[0]
elif duty[0]>-70:
duty[0]=50
clockwise(duty[0],p3,p4,m2_en_pin)
###################################################
#motor2 duty cycle ################################
if duty[1]>0:
if duty[1]>=100:
duty[1]=100
elif duty[1]<=70:
duty[1]=50
clockwise(duty[1], p5, p6, m3_en_pin)
else:
if duty[1]<=-100:
duty[1]=0
elif duty[1] > -100 and duty[1] <= -70:
duty[1]=100+duty[1]
elif duty[1]>-70:
duty[1]=50
clockwise(duty[1],p5,p6,m3_en_pin)
####################################################
##################################################
#This is for motor1 control
##################################################
print("Controlling motor 1")
while abs(pos_error1) >=tolerance:
row = str(countstorad(encoder1_count))+'\t'str(vel1)
f.write(row)
pos_error1=pos_d[0]-countstorad(encoder1_count)
duty_cycle_1=100
if pos_error1>0:
clockwise(duty_cycle_1, p1, p2, m1_en_pin)
elif pos_error1<0:
clockwise(100-duty_cycle_1,p1,p2,m1_en_pin)
p1.stop()
p2.stop()
p3.stop()
p4.stop()
p5.stop()
p6.stop()
except KeyboardInterrupt:
p1.stop()
p2.stop()
p3.stop()
p4.stop()
p5.stop()
p6.stop()
io.cleanup()
def correctEncoders(desired):
global encoder1_count, encoder2_count, encoder3_count
pos_correct = [radtocount(desired[0]), radtocount(desired[1]), radtocount(desired[2])]
encoder1_count = pos_correct[0]
encoder2_count = pos_correct[1]
encoder3_count = pos_correct[2]
def taskcontrol(command_list):
initializeEncoders()
n = len(command_list)
for i in xrange(0,n):
# for each key in string_desired
# get position
current_pos = [countstorad(encoder1_count), countstorad(encoder2_count), countstorad(encoder3_count)]
# get nearest home position (theta 1 is arbitrary)
nearest_home = [current_pos[0], 0, -math.pi/2]
print(current_pos)
if (current_pos[1] != nearest_home[1]) or (current_pos[2] != nearest_home[2]):
print("Going to nearest home")
control2(nearest_home)
correctEncoders(nearest_home)
print("I'm Home!")
time.sleep(1)
cart_pos_d = keypose(command_list[i])
# getpose(key_desired)
joint_pos_d = invskinem(cart_pos_d)
# inverse kinematics to find joint space position
print("Move to "+command_list[i])
control1(joint_pos_d)
correctEncoders(joint_pos_d)
print("Motion Complete!")
time.sleep(1)
# control(pose_desired)
# end for loop
abs_home = [0, 0, -math.pi/2]
# return to global home position
print("Going to Absolute Home Position")
control2(abs_home)
correctEncoders(abs_home)
# This will run when executing the python file, causing the robot to type 'hello'
command_list = ['h','e','l','l','o']
taskcontrol(command_list) |
9,621 | 1ef40d4162ca1b1bd6a5a5010485c78eb9d8d736 | # coding=utf-8
import datetime
from django.http import JsonResponse
from django.shortcuts import render, redirect
from models import *
from hashlib import sha1
from user_decorators import user_login
from df_goods.models import GoodsInfo
# Create your views here.
def register(request):
context={'title':'注册','top':'0'}
return render(request, 'df_user/register.html',context)
def login(request):
context = {'title': '登录','top':'0'}
return render(request, 'df_user/login.html',context)
def register_handle(request):
dict=request.POST
print(dict)
uname=dict.get('user_name')
upwd=dict.get('pwd')
upwd2=dict.get('cpwd')
uemail=dict.get('email')
if upwd != upwd2:
return redirect('/user/register/')
s1=sha1()
s1.update(upwd)
upwd_sha1=s1.hexdigest()
user=UserInfo()
user.uname=uname
user.upwd=upwd_sha1
user.uemail=uemail
result = UserInfo.objects.filter(uname=uname).count()
if result == 0:
user.save()
else:
return redirect('/user/register/')
return redirect('/user/login/')
def register_valid(request):
uname=request.GET.get('uname')
result=UserInfo.objects.filter(uname=uname).count()
context={'valid':result}
return JsonResponse(context)
def login_handle(request):
dict=request.POST
uname=dict.get('username')
upwd=dict.get('pwd')
uname_jz=dict.get('name_jz','0')
s1=sha1()
s1.update(upwd)
upwd_sha1=s1.hexdigest()
context={'title': '登录','uname':uname,'upwd':upwd,'top':'0'}
users=UserInfo.objects.filter(uname=uname)
if len(users)==0:
# 用户名错误
context['name_error']='1'
return render(request, 'df_user/login.html',context)
else:
if users[0].upwd == upwd_sha1:#登陆成功
#记录当前登录的用户
request.session['uid'] = users[0].id
request.session['uname'] = uname
# 重定向 从哪来 回哪去
path = request.session.get('url_path', '/')
response = redirect(path)
if uname_jz == '1':
response.set_cookie('uname',uname, expires=datetime.datetime.now()+datetime.timedelta(days=7))
else:
response.set_cookie('uname','',max_age=-1)
return response
else:
#密码错误
context['pwd_error']='1'
return render(request,'df_user/login.html',context)
# @user_login
# def info(request):
# if request.session.has_key('uid'):
# return render(request, 'df_user/info.html')
# else:
# return redirect('/user/login/')
@user_login
def info(request):
user=UserInfo.objects.get(pk=request.session['uid'])
#读取最近浏览商品
goods_ids = request.COOKIES.get('goods_ids','').split(',')
goods_list= []
for gid in goods_ids:
if gid:
goods_list.append(GoodsInfo.objects.get(id=gid))
context={'title':'用户中心','user':user,'goods_list':goods_list}
return render(request, 'df_user/info.html',context)
@user_login
def order(request):
return render(request, 'df_user/order.html')
@user_login
def site(request):
user = UserInfo.objects.get(pk=request.session['uid'])
if request.method == 'POST':
dict=request.POST
user.ushou=dict.get('ushou')
user.uaddress = dict.get('uaddress')
user.uphone = dict.get('uphone')
user.save()
context = {'title': '收货地址', 'user': user}
return render(request, 'df_user/site.html',context)
def logout(request):
request.session.flush()
return redirect('/user/login/')
def islogin(request):
result=0
if request.session.has_key('uid'):
result=1
return JsonResponse({'islogin':result}) |
9,622 | 3f3d7cdf7732b2a1568cd97574e1443225667327 | from urllib.request import urlopen
from json import loads
with urlopen('http://api.nbp.pl/api/exchangerates/tables/A/') as site:
data = loads(site.read().decode('utf-8'))
rates = data[0]['rates']
exchange = input('Jaką wartość chcesz wymienić na złotówki? ')
value, code = exchange.split(' ')
value = float(value)
rate = list(filter(lambda x: x['code'] == code, rates))
print(f'Otrzymujesz {value * rate[0]["mid"]} PLN')
|
9,623 | fd904c70b350c650362c55ccb3b915371f24e267 | import logging
import os
import callbacks
import commands
import dice
import echo
import inline
import keyboards
import mybot
import myenigma
import poll
import rocketgram
import send
import unknown
# avoid to remove "unused" imports by optimizers
def fix_imports():
_ = callbacks
_ = commands
_ = echo
_ = keyboards
_ = myenigma
_ = inline
_ = send
_ = dice
_ = unknown
_ = poll
logger = logging.getLogger('minibots.engine')
def main():
mode = os.environ.get('MODE')
if mode is None and 'DYNO' in os.environ:
mode = 'heroku'
if mode not in ('updates', 'webhook', 'heroku'):
raise TypeError('MODE must be `updates` or `webhook` or `heroku`!')
logging.basicConfig(format='%(asctime)s - %(levelname)-5s - %(name)-25s: %(message)s')
logging.basicConfig(level=logging.ERROR)
logging.getLogger('engine').setLevel(logging.INFO)
logging.getLogger('mybot').setLevel(logging.DEBUG)
logging.getLogger('rocketgram').setLevel(logging.DEBUG)
logging.getLogger('rocketgram.raw.in').setLevel(logging.INFO)
logging.getLogger('rocketgram.raw.out').setLevel(logging.INFO)
logger.info('Starting bot''s template in %s...', mode)
bot = mybot.get_bot(os.environ['TOKEN'].strip())
if mode == 'updates':
rocketgram.UpdatesExecutor.run(bot, drop_updates=bool(int(os.environ.get('DROP_UPDATES', 0))))
else:
port = int(os.environ['PORT']) if mode == 'heroku' else int(os.environ.get('WEBHOOK_PORT', 8080))
rocketgram.AioHttpExecutor.run(bot,
os.environ['WEBHOOK_URL'].strip(),
os.environ.get('WEBHOOK_PATH', '/').strip(),
host='0.0.0.0', port=port,
drop_updates=bool(int(os.environ.get('DROP_UPDATES', 0))),
webhook_remove=not mode == 'heroku')
logger.info('Bye!')
if __name__ == '__main__':
main()
|
9,624 | 7cfca56907f0bca7fd62e506414641f942527d1a | import os
TEMP_DIR = os.path.expanduser('~/Documents/MFA')
def make_safe(value):
if isinstance(value, bool):
return str(value).lower()
return str(value)
class MonophoneConfig(object):
'''
Configuration class for monophone training
Scale options defaults to::
['--transition-scale=1.0', '--acoustic-scale=0.1', '--self-loop-scale=0.1']
If ``align_often`` is True in the keyword arguments, ``realign_iters`` will be::
[1, 5, 10, 15, 20, 25, 30, 35, 38]
Otherwise, ``realign_iters`` will be::
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 16, 18, 20, 23, 26, 29, 32, 35, 38]
Attributes
----------
num_iters : int
Number of training iterations to perform, defaults to 40
scale_opts : list
Options for specifying scaling in alignment
beam : int
Default beam width for alignment, defaults = 10
retry_beam : int
Beam width to fall back on if no alignment is produced, defaults to 40
max_iter_inc : int
Last iter to increase #Gauss on, defaults to 30
totgauss : int
Total number of gaussians, defaults to 1000
boost_silence : float
Factor by which to boost silence likelihoods in alignment, defaults to 1.0
realign_iters : list
List of iterations to perform alignment
stage : int
Not used
power : float
Exponent for number of gaussians according to occurrence counts, defaults to 0.25
do_fmllr : bool
Specifies whether to do speaker adaptation, defaults to False
do_lda_mllt : bool
Spacifies whether to do LDA + MLLT transformation, default to True
'''
def __init__(self, **kwargs):
self.num_iters = 40
self.scale_opts = ['--transition-scale=1.0',
'--acoustic-scale=0.1',
'--self-loop-scale=0.1']
self.beam = 10
self.retry_beam = 40
self.max_gauss_count = 1000
self.boost_silence = 1.0
if kwargs.get('align_often', False):
self.realign_iters = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14,
16, 18, 20, 23, 26, 29, 32, 35, 38]
else:
self.realign_iters = [1, 5, 10, 15, 20, 25, 30, 35, 38]
self.stage = -4
self.power = 0.25
self.do_fmllr = False
self.do_lda_mllt = False
for k, v in kwargs.items():
setattr(self, k, v)
@property
def max_iter_inc(self):
return self.num_iters - 10
@property
def inc_gauss_count(self):
return int((self.max_gauss_count - self.initial_gauss_count) / self.max_iter_inc)
class TriphoneConfig(MonophoneConfig):
'''
Configuration class for triphone training
Scale options defaults to::
['--transition-scale=1.0', '--acoustic-scale=0.1', '--self-loop-scale=0.1']
If ``align_often`` is True in the keyword arguments, ``realign_iters`` will be::
[1, 5, 10, 15, 20, 25, 30, 35, 38]
Otherwise, ``realign_iters`` will be::
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 16, 18, 20, 23, 26, 29, 32, 35, 38]
Attributes
----------
num_iters : int
Number of training iterations to perform, defaults to 35
scale_opts : list
Options for specifying scaling in alignment
beam : int
Default beam width for alignment, defaults = 10
retry_beam : int
Beam width to fall back on if no alignment is produced, defaults to 40
max_iter_inc : int
Last iter to increase #Gauss on, defaults to 30
totgauss : int
Total number of gaussians, defaults to 1000
boost_silence : float
Factor by which to boost silence likelihoods in alignment, defaults to 1.0
realign_iters : list
List of iterations to perform alignment
stage : int
Not used
power : float
Exponent for number of gaussians according to occurrence counts, defaults to 0.25
do_fmllr : bool
Specifies whether to do speaker adaptation, defaults to False
do_lda_mllt : bool
Spacifies whether to do LDA + MLLT transformation, default to False
num_states : int
Number of states in the decision tree, defaults to 3100
num_gauss : int
Number of gaussians in the decision tree, defaults to 50000
cluster_threshold : int
For build-tree control final bottom-up clustering of leaves, defaults to 100
'''
def __init__(self, **kwargs):
defaults = {'num_iters': 35,
'initial_gauss_count': 3100,
'max_gauss_count': 50000,
'cluster_threshold': 100,
'do_lda_mllt': False}
defaults.update(kwargs)
super(TriphoneConfig, self).__init__(**defaults)
class TriphoneFmllrConfig(TriphoneConfig):
'''
Configuration class for speaker-adapted triphone training
Scale options defaults to::
['--transition-scale=1.0', '--acoustic-scale=0.1', '--self-loop-scale=0.1']
If ``align_often`` is True in the keyword arguments, ``realign_iters`` will be::
[1, 5, 10, 15, 20, 25, 30, 35, 38]
Otherwise, ``realign_iters`` will be::
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 16, 18, 20, 23, 26, 29, 32, 35, 38]
``fmllr_iters`` defaults to::
[2, 4, 6, 12]
Attributes
----------
num_iters : int
Number of training iterations to perform, defaults to 35
scale_opts : list
Options for specifying scaling in alignment
beam : int
Default beam width for alignment, defaults = 10
retry_beam : int
Beam width to fall back on if no alignment is produced, defaults to 40
max_iter_inc : int
Last iter to increase #Gauss on, defaults to 30
totgauss : int
Total number of gaussians, defaults to 1000
boost_silence : float
Factor by which to boost silence likelihoods in alignment, defaults to 1.0
realign_iters : list
List of iterations to perform alignment
stage : int
Not used
power : float
Exponent for number of gaussians according to occurrence counts, defaults to 0.25
do_fmllr : bool
Specifies whether to do speaker adaptation, defaults to True
do_lda_mllt : bool
Spacifies whether to do LDA + MLLT transformation, default to False
num_states : int
Number of states in the decision tree, defaults to 3100
num_gauss : int
Number of gaussians in the decision tree, defaults to 50000
cluster_threshold : int
For build-tree control final bottom-up clustering of leaves, defaults to 100
fmllr_update_type : str
Type of fMLLR estimation, defaults to ``'full'``
fmllr_iters : list
List of iterations to perform fMLLR estimation
fmllr_power : float
Defaults to 0.2
silence_weight : float
Weight on silence in fMLLR estimation
'''
def __init__(self, align_often=True, **kwargs):
defaults = {'do_fmllr': True,
'do_lda_mllt': False,
'fmllr_update_type': 'full',
'fmllr_iters': [2, 4, 6, 12],
'fmllr_power': 0.2,
'silence_weight': 0.0}
defaults.update(kwargs)
super(TriphoneFmllrConfig, self).__init__(**defaults)
# For nnets
class LdaMlltConfig(object):
'''
Configuration class for LDA + MLLT training
Scale options defaults to::
['--transition-scale=1.0', '--acoustic-scale=0.1', '--self-loop-scale=0.1']
Attributes
----------
num_iters : int
Number of training iterations to perform
do_fmllr : bool
Specifies whether to do speaker adaptation, defaults to False
do_lda_mllt : bool
Spacifies whether to do LDA + MLLT transformation, default to True
scale_opts : list
Options for specifying scaling in alignment
num_gauss : int
Number of gaussians in the decision tree, defaults to 50000
beam : int
Default beam width for alignment, defaults = 10
retry_beam : int
Beam width to fall back on if no alignment is produced, defaults to 40
cluster_threshold : int
For build-tree control final bottom-up clustering of leaves, defaults to 100
boost_silence : float
Factor by which to boost silence likelihoods in alignment, defaults to 1.0
realign_iters : list
List of iterations to perform alignment
stage : int
Not used
power : float
Exponent for number of gaussians according to occurrence counts, defaults to 0.25
randprune : float
Approximately the ratio by which we will speed up the LDA and MLLT calculations via randomized pruning
'''
def __init__(self, **kwargs):
self.num_iters = 13
self.do_fmllr = False
self.do_lda_mllt = True
self.scale_opts = ['--transition-scale=1.0',
'--acoustic-scale=0.1',
'--self-loop-scale=0.1']
self.num_gauss = 5000
self.beam = 10
self.retry_beam = 40
self.initial_gauss_count = 5000
self.cluster_threshold = -1
self.max_gauss_count = 10000
self.boost_silence = 1.0
self.realign_iters = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
self.stage = -5
self.power = 0.25
self.dim = 40
self.careful = False
self.randprune = 4.0
self.splice_opts = ['--left-context=3', '--right-context=3']
self.cluster_thresh = -1
self.norm_vars = False
for k, v in kwargs.items():
setattr(self, k, v)
@property
def max_iter_inc(self):
return self.num_iters
@property
def inc_gauss_count(self):
return int((self.max_gauss_count - self.initial_gauss_count) / self.max_iter_inc)
class DiagUbmConfig(object):
'''
Configuration class for diagonal UBM training
Attributes
----------
num_iters : int
Number of training iterations to perform
num_gselect : int
Number of Gaussian-selection indices to use while training the model
num_gauss : int
Number of Gaussians after clustering down.
'''
def __init__(self, **kwargs):
self.num_iters = 4
self.num_gselect = 30
self.num_frames = 400000
self.num_gauss = 256
self.num_iters_init = 20
self.initial_gauss_proportion = 0.5
self.subsample = 2
self.cleanup = True
self.min_gaussian_weight = 0.0001
self.remove_low_count_gaussians = True
self.num_threads = 32
self.splice_opts = ['--left-context=3', '--right-context=3']
class iVectorExtractorConfig(object):
'''
Configuration class for i-vector extractor training
Attributes
----------
ivector_dim : int
Dimension of the extracted i-vector
ivector_period : int
Number of frames between i-vector extractions
num_iters : int
Number of training iterations to perform
num_gselect : int
Gaussian-selection using diagonal model: number of Gaussians to select
posterior_scale : float
Scale on the acoustic posteriors, intended to account for inter-frame correlations
min_post : float
Minimum posterior to use (posteriors below this are pruned out)
subsample : int
Speeds up training; training on every x'th feature
max_count : int
The use of this option (e.g. --max-count 100) can make iVectors more consistent for different lengths of utterance, by scaling up the prior term when the data-count exceeds this value. The data-count is after posterior-scaling, so assuming the posterior-scale is 0.1, --max-count 100 starts having effect after 1000 frames, or 10 seconds of data.
'''
def __init__(self, **kwargs):
self.ivector_dim = 100
self.ivector_period = 10
self.num_iters = 10
self.num_gselect = 5
self.posterior_scale = 0.1
self.min_post = 0.025
self.subsample = 2
self.max_count = 0
self.num_threads = 4
self.num_processes = 4
self.splice_opts = ['--left-context=3', '--right-context=3']
self.compress = False
class NnetBasicConfig(object):
'''
Configuration class for neural network training
Attributes
----------
num_epochs : int
Number of epochs of training; number of iterations is worked out from this
iters_per_epoch : int
Number of iterations per epoch
realign_times : int
How many times to realign during training; this will equally space them over the iterations
beam : int
Default beam width for alignment
retry_beam : int
Beam width to fall back on if no alignment is produced
initial_learning_rate : float
The initial learning rate at the beginning of training
final_learning_rate : float
The final learning rate by the end of training
pnorm_input_dim : int
The input dimension of the pnorm component
pnorm_output_dim : int
The output dimension of the pnorm component
p : int
Pnorm parameter
hidden_layer_dim : int
Dimension of a hidden layer
samples_per_iter : int
Number of samples seen per job per each iteration; used when getting examples
shuffle_buffer_size : int
This "buffer_size" variable controls randomization of the samples on each iter. You could set it to 0 or to a large value for complete randomization, but this would both consume memory and cause spikes in disk I/O. Smaller is easier on disk and memory but less random. It's not a huge deal though, as samples are anyway randomized right at the start. (the point of this is to get data in different minibatches on different iterations, since in the preconditioning method, 2 samples in the same minibatch can affect each others' gradients.
add_layers_period : int
Number of iterations between addition of a new layer
num_hidden_layers : int
Number of hidden layers
randprune : float
Speeds up LDA
alpha : float
Relates to preconditioning
mix_up : int
Number of components to mix up to
prior_subset_size : int
Number of samples per job for computing priors
update_period : int
How often the preconditioning subspace is updated
num_samples_history : int
Relates to online preconditioning
preconditioning_rank_in : int
Relates to online preconditioning
preconditioning_rank_out : int
Relates to online preconditioning
'''
def __init__(self, **kwargs):
self.num_epochs = 4
self.num_epochs_extra = 5
self.num_iters_final = 20
self.iters_per_epoch = 2
self.realign_times = 0
self.beam = 10
self.retry_beam = 15000000
self.initial_learning_rate=0.32
self.final_learning_rate=0.032
self.bias_stddev = 0.5
self.pnorm_input_dim = 3000
self.pnorm_output_dim = 300
self.p = 2
self.shrink_interval = 5
self.shrink = True
self.num_frames_shrink = 2000
self.final_learning_rate_factor = 0.5
self.hidden_layer_dim = 50
self.samples_per_iter = 200000
self.shuffle_buffer_size = 5000
self.add_layers_period = 2
self.num_hidden_layers = 3
self.modify_learning_rates = False
self.last_layer_factor = 0.1
self.first_layer_factor = 1.0
self.splice_width = 3
self.randprune = 4.0
self.alpha = 4.0
self.max_change = 10.0
self.mix_up = 12000 # From run_nnet2.sh
self.prior_subset_size = 10000
self.boost_silence = 0.5
self.update_period = 4
self.num_samples_history = 2000
self.max_change_per_sample = 0.075
self.precondition_rank_in = 20
self.precondition_rank_out = 80
class MfccConfig(object):
'''
Configuration class for MFCC generation
The ``config_dict`` currently stores one key ``'use-energy'`` which
defaults to False
Parameters
----------
output_directory : str
Path to directory to save configuration files for Kaldi
kwargs : dict, optional
If specified, updates ``config_dict`` with this dictionary
Attributes
----------
config_dict : dict
Dictionary of configuration parameters
'''
def __init__(self, output_directory, job=None, kwargs=None):
if kwargs is None:
kwargs = {}
self.job = job
self.config_dict = {'use-energy': False, 'frame-shift': 10}
self.config_dict.update(kwargs)
self.output_directory = output_directory
self.write()
def update(self, kwargs):
'''
Update configuration dictionary with new dictionary
Parameters
----------
kwargs : dict
Dictionary of new parameter values
'''
self.config_dict.update(kwargs)
self.write()
@property
def config_directory(self):
path = os.path.join(self.output_directory, 'config')
os.makedirs(path, exist_ok=True)
return path
@property
def path(self):
if self.job is None:
f = 'mfcc.conf'
else:
f = 'mfcc.{}.conf'.format(self.job)
return os.path.join(self.config_directory, f)
def write(self):
'''
Write configuration dictionary to a file for use in Kaldi binaries
'''
with open(self.path, 'w', encoding='utf8') as f:
for k, v in self.config_dict.items():
f.write('--{}={}\n'.format(k, make_safe(v)))
|
9,625 | e8092faed22607f9c8f18a79709022037ff647bf | from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from typing import List
from sqlalchemy.sql.functions import current_date, current_user
from db.session import get_db
from db.models.jobs import Job
from schemas.jobs import JobCreate, ShowJob
from db.repository.jobs import create_new_job, delete_job_by_id, retrieve_job, list_jobs, update_job_by_id
from apis.version1.route_login import get_current_user_from_token
from db.models.users import User
router = APIRouter()
@router.post("/create-job", response_model=ShowJob)
def create_job(job: JobCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):
owner_id = current_user.id
job = create_new_job(job=job, db=db, owner_id=owner_id)
return job
@router.put("/update/{id}")
def update_job(id: int, job: JobCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):
owner_id = current_user.id
message = update_job_by_id(id, job, db, owner_id)
if message == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Job with id {id} does not exist"
)
return {"detail": "Successfully updated"}
@router.get("/get/{id}", response_model=ShowJob)
def retrieve_job_by_id(id: int, db: Session = Depends(get_db)):
job = retrieve_job(id=id, db=db)
if not job:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Job with id {id} does not exist"
)
return job
@router.get("/all", response_model=List[ShowJob])
def retrieve_all_jobs(db: Session = Depends(get_db)):
jobs = list_jobs(db=db)
return jobs
@router.delete("/delete/{id}")
def delete_job(id: int, db: Session = Depends(get_db), current_user: User = Depends(get_current_user_from_token)):
owner_id = current_user.id
message = delete_job_by_id(id, db, owner_id=owner_id)
if message == 0:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Job with id {id} does not exist"
)
return {"details": "Successfully deleted"}
|
9,626 | cda01bc7b0ebcfaf010bb87e7d9be34fd310d7a7 | import math
import torch
from torch import nn
from d2l import torch as d2l
def masked_softmax(X, valid_lens):
"""通过在最后一个轴上掩蔽元素来执行softmax操作"""
# X:3D张量,valid_lens:1D或2D张量
if valid_lens is None:
return nn.functional.softmax(X, dim=-1)
else:
shape = X.shape
if valid_lens.dim() == 1:
valid_lens = torch.repeat_interleave(valid_lens, shape[1])
else:
valid_lens = valid_lens.reshape(-1)
# 最后一轴上被掩蔽的元素使用一个非常大的负值替换,从而其softmax输出为0
X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-1e6)
return nn.functional.softmax(X.reshape(shape), dim=-1)
"""测试softmax掩码"""
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([2, 3])))
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([[1, 3], [2, 4]])))
class AdditiveAttention(nn.Module):
"""加性注意力"""
def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):
super(AdditiveAttention, self).__init__(**kwargs)
self.W_k = nn.Linear(key_size, num_hiddens, bias=False)
self.W_q = nn.Linear(query_size, num_hiddens, bias=False)
self.W_v = nn.Linear(num_hiddens, 1, bias=False)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens):
queries, keys = self.W_q(queries), self.W_k(keys)
# 在维度扩展后,
# queries的形状:(batch_size,查询的个数,1,num_hidden)
# keys的形状:(batch_size,1,“键-值”对的个数,num_hiddens)
# 使用广播方式进行求和
features = queries.unsqueeze(2) + keys.unsqueeze(1)
features = torch.tanh(features)
# self.w_v仅有一个输出,因此从形状中移除最后那个维度。
# scores的形状:(batch_size,查询的个数,“键-值”对的个数)
scores = self.W_v(features).squeeze(-1)
print("scores:", scores)
self.attention_weights = masked_softmax(scores, valid_lens)
# values的形状:(batch_size,“键-值”对的个数,值的维度)
return torch.bmm(self.dropout(self.attention_weights), values)
"""加性注意力函数测试"""
queries, keys = torch.normal(0, 1, (2, 1, 20)), torch.ones((2, 10, 2))
# values的小批量,两个值矩阵是相同的
values = torch.arange(40, dtype=torch.float32).reshape(1, 10, 4).repeat(2, 1, 1)
valid_lens = torch.tensor([2, 6])
attention = AdditiveAttention(key_size=2, query_size=20, num_hiddens=8, dropout=0.1)
attention.eval()
print(attention(queries, keys, values, valid_lens))
class DotProductAttention(nn.Module):
"""缩放点积注意力"""
def __init__(self, dropout, **kwargs):
super(DotProductAttention, self).__init__(**kwargs)
self.dropout = nn.Dropout(dropout)
# queries的形状:(batch_size,查询的个数,d)
# keys的形状:(batch_size,“键-值”对的个数,d)
# values的形状:(batch_size,“键-值”对的个数,值的维度)
# valid_lens的形状:(batch_size,)或者(batch_size,查询的个数)
def forward(self, queries, keys, values, valid_lens=None):
d = queries.shape[-1]
# 设置transpose_b=True为了交换的keys的最后两个维度
scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
"""缩放点积注意力函数测试"""
queries = torch.normal(0, 1, (2, 1, 2))
attention = DotProductAttention(dropout=0.5)
attention.eval()
print(attention(queries, keys, values, valid_lens))
|
9,627 | 264da5a2ab7d5c311d8a59b06c81ea2156cefd76 | from flask import Flask, request, render_template
from utils import get_result
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route("/result", methods=["POST"])
def result():
form_data = request.form
sentence = form_data['sentence']
output = get_result(sentence)
return render_template('result.html', result=output)
if __name__ == '__main__':
app.run(debug=True)
|
9,628 | 51bc2668a9f9f4425166f9e6da72b7a1c37baa01 | """Tasks for managing Debug Information Files from Apple App Store Connect.
Users can instruct Sentry to download dSYM from App Store Connect and put them into Sentry's
debug files. These tasks enable this functionality.
"""
import logging
import pathlib
import tempfile
from typing import List, Mapping, Tuple
import requests
import sentry_sdk
from django.utils import timezone
from sentry.lang.native import appconnect
from sentry.models import (
AppConnectBuild,
LatestAppConnectBuildsCheck,
Project,
ProjectOption,
debugfile,
)
from sentry.tasks.base import instrumented_task
from sentry.utils import json, metrics, sdk
from sentry.utils.appleconnect import appstore_connect as appstoreconnect_api
logger = logging.getLogger(__name__)
# Sadly this decorator makes this entire function untyped for now as it does not itself have
# typing annotations. So we do all the work outside of the decorated task function to work
# around this.
# Since all these args must be pickled we keep them to built-in types as well.
@instrumented_task(name="sentry.tasks.app_store_connect.dsym_download", queue="appstoreconnect", ignore_result=True) # type: ignore
def dsym_download(project_id: int, config_id: str) -> None:
inner_dsym_download(project_id=project_id, config_id=config_id)
def inner_dsym_download(project_id: int, config_id: str) -> None:
"""Downloads the dSYMs from App Store Connect and stores them in the Project's debug files."""
with sdk.configure_scope() as scope:
scope.set_tag("project", project_id)
scope.set_tag("config_id", config_id)
project = Project.objects.get(pk=project_id)
config = appconnect.AppStoreConnectConfig.from_project_config(project, config_id)
client = appconnect.AppConnectClient.from_config(config)
listed_builds = client.list_builds()
builds = process_builds(project=project, config=config, to_process=listed_builds)
if not builds:
return
for i, (build, build_state) in enumerate(builds):
with sdk.configure_scope() as scope:
scope.set_context("dsym_downloads", {"total": len(builds), "completed": i})
with tempfile.NamedTemporaryFile() as dsyms_zip:
try:
client.download_dsyms(build, pathlib.Path(dsyms_zip.name))
# For no dSYMs, let the build be marked as fetched so they're not
# repeatedly re-checked every time this task is run.
except appconnect.NoDsymsError:
logger.debug("No dSYMs for build %s", build)
# Moves on to the next build so we don't check off fetched. This url will
# eventuallyTM be populated, so revisit it at a later time.
except appconnect.PendingDsymsError:
logger.debug("dSYM url currently unavailable for build %s", build)
continue
# early-return in unauthorized and forbidden to avoid trying all the other builds
# as well, since an expired token will error for all of them.
# the error is also swallowed unreported because this is an expected and actionable
# error.
except appstoreconnect_api.UnauthorizedError:
sentry_sdk.capture_message(
"Not authorized to download dSYM using current App Store Connect credentials",
level="info",
)
return
except appstoreconnect_api.ForbiddenError:
sentry_sdk.capture_message(
"Forbidden from downloading dSYM using current App Store Connect credentials",
level="info",
)
return
# Don't let malformed URLs abort all pending downloads in case it's an isolated instance
except ValueError as e:
sdk.capture_exception(e)
continue
# Assume request errors are a server side issue and do not abort all the
# pending downloads.
except appstoreconnect_api.RequestError as e:
sdk.capture_exception(e)
continue
except requests.RequestException as e:
sdk.capture_exception(e)
continue
else:
create_difs_from_dsyms_zip(dsyms_zip.name, project)
logger.debug("Uploaded dSYMs for build %s", build)
metrics.incr("tasks.app_store_connect.builds_ingested", sample_rate=1)
build_state.fetched = True
build_state.save()
def create_difs_from_dsyms_zip(dsyms_zip: str, project: Project) -> None:
with sentry_sdk.start_span(op="dsym-difs", description="Extract difs dSYM zip"):
with open(dsyms_zip, "rb") as fp:
created = debugfile.create_files_from_dif_zip(fp, project, accept_unknown=True)
for proj_debug_file in created:
logger.debug("Created %r for project %s", proj_debug_file, project.id)
def get_or_create_persisted_build(
project: Project, config: appconnect.AppStoreConnectConfig, build: appconnect.BuildInfo
) -> AppConnectBuild:
"""Fetches the sentry-internal :class:`AppConnectBuild`.
The build corresponds to the :class:`appconnect.BuildInfo` as returned by the
AppStore Connect API. If no build exists yet, a new "pending" build is created.
"""
try:
build_state = AppConnectBuild.objects.get(
project=project,
app_id=build.app_id,
platform=build.platform,
bundle_short_version=build.version,
bundle_version=build.build_number,
)
except AppConnectBuild.DoesNotExist:
build_state = AppConnectBuild(
project=project,
app_id=build.app_id,
bundle_id=config.bundleId,
platform=build.platform,
bundle_short_version=build.version,
bundle_version=build.build_number,
uploaded_to_appstore=build.uploaded_date,
first_seen=timezone.now(),
fetched=False,
)
build_state.save()
return build_state
def process_builds(
project: Project,
config: appconnect.AppStoreConnectConfig,
to_process: List[appconnect.BuildInfo],
) -> List[Tuple[appconnect.BuildInfo, AppConnectBuild]]:
"""Returns a list of builds whose dSYMs need to be updated or fetched.
This will create a new "pending" :class:`AppConnectBuild` for any :class:`appconnect.BuildInfo`
that cannot be found in the DB. These pending :class:`AppConnectBuild`s are immediately saved
upon creation.
"""
pending_builds = []
with sentry_sdk.start_span(
op="appconnect-update-builds", description="Update AppStoreConnect builds in database"
):
for build in to_process:
build_state = get_or_create_persisted_build(project, config, build)
if not build_state.fetched:
pending_builds.append((build, build_state))
LatestAppConnectBuildsCheck.objects.create_or_update(
project=project, source_id=config.id, values={"last_checked": timezone.now()}
)
return pending_builds
# Untyped decorator would stop type-checking of entire function, split into an inner
# function instead which can be type checked.
@instrumented_task( # type: ignore
name="sentry.tasks.app_store_connect.refresh_all_builds",
queue="appstoreconnect",
ignore_result=True,
)
def refresh_all_builds() -> None:
inner_refresh_all_builds()
def inner_refresh_all_builds() -> None:
"""Refreshes all AppStoreConnect builds for all projects.
This iterates over all the projects configured in Sentry and for any which has an
AppStoreConnect symbol source configured will poll the AppStoreConnect API to check if
there are new builds.
"""
# We have no way to query for AppStore Connect symbol sources directly, but
# getting all of the project options that have custom symbol sources
# configured is a reasonable compromise, as the number of those should be
# low enough to traverse every hour.
# Another alternative would be to get a list of projects that have had a
# previous successful import, as indicated by existing `AppConnectBuild`
# objects. But that would miss projects that have a valid AppStore Connect
# setup, but have not yet published any kind of build to AppStore.
options = ProjectOption.objects.filter(key=appconnect.SYMBOL_SOURCES_PROP_NAME)
count = 0
for option in options:
with sdk.push_scope() as scope:
scope.set_tag("project", option.project_id)
try:
if not option.value:
# An empty string set as option value, the UI does this when deleting
# all sources. This is not valid JSON.
continue
# We are parsing JSON thus all types are Any, so give the type-checker some
# extra help. We are maybe slightly lying about the type, but the
# attributes we do access are all string values.
all_sources: List[Mapping[str, str]] = json.loads(option.value)
for source in all_sources:
try:
source_id = source["id"]
source_type = source["type"]
except KeyError:
logger.exception("Malformed symbol source")
continue
if source_type == appconnect.SYMBOL_SOURCE_TYPE_NAME:
dsym_download.apply_async(
kwargs={
"project_id": option.project_id,
"config_id": source_id,
}
)
count += 1
except Exception:
logger.exception("Failed to refresh AppStoreConnect builds")
metrics.gauge("tasks.app_store_connect.refreshed", count, sample_rate=1)
|
9,629 | 19cf34e7c38045a183c75703ec56c17f96ee2ac4 | from ROOT import *
import os
import sys
from optparse import Option, OptionValueError, OptionParser
Box = ["RawKLMnodeID","rawKLMlaneFlag","rawKLMtExtraRPC","rawKLMqExtraRPC","rawKLMtExtraScint","rawKLMqExtraScint","rawKLMsizeMultihit","rawKLM00channelMultiplicity","rawKLM00channelMultiplicityFine","rawKLM10channelMultiplicity","rawKLM10channelMultiplicityFine","rawKLM20channelMultiplicity","rawKLM20channelMultiplicityFine","rawKLM30channelMultiplicity","rawKLM30channelMultiplicityFine","rawKLM01channelMultiplicity","rawKLM01channelMultiplicityFine","rawKLM11channelMultiplicity","rawKLM11channelMultiplicityFine","rawKLM21channelMultiplicity","rawKLM21channelMultiplicityFine","rawKLM31channelMultiplicity","rawKLM31channelMultiplicityFine","rawKLM02channelMultiplicity","rawKLM02channelMultiplicityFine","rawKLM12channelMultiplicity","rawKLM12channelMultiplicityFine","rawKLM22channelMultiplicity","rawKLM22channelMultiplicityFine","rawKLM32channelMultiplicity","rawKLM32channelMultiplicityFine","rawKLM03channelMultiplicity","rawKLM03channelMultiplicityFine","rawKLM13channelMultiplicity","rawKLM13channelMultiplicityFine","rawKLM23channelMultiplicity","rawKLM23channelMultiplicityFine","rawKLM33channelMultiplicity","rawKLM33channelMultiplicityFine"]
#Colz = ["",]
def recurPlot(tree):
pathname=os.path.join(*path)
if not os.path.exists(pathname):
os.mkdir(pathname)
for key in tree.GetListOfKeys():
thisObject=tree.Get(key.GetName())
if isinstance(thisObject,TH2):
if key.GetName() in Box:
thisObject.Draw("box")
else:
thisObject.Draw("colz")
elif isinstance(thisObject, TH1):
thisObject.Draw()
c.SaveAs(os.path.join(pathname,key.GetName()+".png"))
def makehtml(pngdir,indexfile):
ListOfPng = os.listdir(pngdir)
Namehtml = open(indexfile,'w')
for i in ListOfPng:
if i.endswith('.png'):
Namehtml.write("<img src='"+i+"' width='25%'></img>")
#Namehtml.write("<img src='"+pngdir+"/"+i+"' width='25%'></img>")
#=========================================================================
#
# Main routine
#
#=========================================================================
parser = OptionParser()
parser.add_option('-e', '--experiment', dest='eNumber',
default='7',
help='Experiment number [default=7]')
parser.add_option('-r', '--run', dest='rNumber',
default='1505',
help='Run number [default=0604]')
(options, args) = parser.parse_args()
exp = '{0:04d}'.format(int(options.eNumber))
run = '{0:05d}'.format(int(options.rNumber))
runhit = '{0:04d}'.format(int(options.rNumber))
inputName = '/ghi/fs01/belle2/bdata/group/detector/BKLM/Run_Analysis/e0007/bklmroots/bklmHists-e{0}r{1}.root'.format(exp, run)
#inputhitName = 'bklmHitmap_run{0}.root'.format(runhit)
outputpng = '/ghi/fs01/belle2/bdata/group/detector/BKLM/Run_Analysis/e0007/bklmroots/png-e{0}r{1}'.format(exp, run)
#htmlindex = 'png-e{0}r{1}/index.html'.format(exp, run)
gROOT.SetBatch()
gStyle.SetOptStat(10)
infile1 = TFile(inputName)
#infile2 = TFile(inputhitName)
c=TCanvas()
path=[outputpng]
recurPlot(infile1)
#recurPlot(infile2)
#makehtml(outputpng,htmlindex)
|
9,630 | 87e9c1d264523d02b287dedb44472fc08b488908 | from __future__ import division, print_function, absolute_import
"""
The dataset is stored in a CSV file, so we can use the TFLearn load_csv() function to
load the data from the CSV file into a python list.
We specify the 'target_column' argument to indicate that our labels (survived or not)
are located in the first column (id: 0). The function will return a tuple: (data, labels).
"""
import numpy as np
import tflearn
#DownLoad the Titanic dataset
from tflearn.datasets import titanic
titanic.download_dataset('titanic_dataset.csv')
#loadCSVfile,indicate that the first column represent labels
from tflearn.data_utils import load_csv
data, labels = load_csv('titanic_dataset.csv',target_column=0,
categorical_labels=True,n_classes=2)
'''
Preprocessing Data
Data are given 'as is' and need some preprocessing to be ready for use in our deep neural network classifier.
First, we will discard the fields that are not likely to help in our analysis.
For example, we make the assumption that the 'name' field will not be very useful in our task,
since a passenger's name and his or her chance of surviving are probably not correlated.
With such thinking, we can go ahead and discard the 'name' and 'ticket' fields.
Then, we need to convert all our data to numerical values,
because a neural network model can only perform operations over numbers.
However, our dataset contains some non-numerical values, such as 'name' and 'sex'. Because 'name' is discarded,
we just need to handle the 'sex' field. In this simple case, we will just assign '0' to males and '1' to females.
example:
survived pclass name sex age sibsp parch ticket fare
1 1 Aubart, Mme. Leontine Pauline female 24 0 0 PC 17477 69.3000
'''
# Here is the preprocessing function:
#Preprocessing function
def preprocess(passengers,columns_to_delete):
#Sort by descending is and delete column
for column_to_delete in sorted(columns_to_delete,reverse = True):
[passenger.pop(column_to_delete) for passenger in passengers]
# print(type(passengers[0]))
for i in range(len(passengers)):
# Converting 'sex' field to float (id is 1 after removing labels column)
passengers[i][1] = 1. if passengers[i][1] == 'female' else 0.
print(np.array(passengers,dtype=np.float32))
return np.array(passengers,dtype=np.float32)
# Ignore 'name' and 'ticket' columns (id 1 & 6 of data array)
to_ignore = [1,6]
#Preprocess data
data = preprocess(data,to_ignore)
'''
Build a Deep Neural Network
We are building a 3-layer neural network using TFLearn. First, we need to specify the shape of our input data.
In our case, each sample has a total of 6 features, and we will process samples per batch to save memory.
So our data input shape is [None, 6] ('None' stands for an unknown dimension, so we can change the total
number of samples that are processed in a batch).
'''
# Build neural network
net = tflearn.input_data(shape=[None,6])
net = tflearn.fully_connected(net,32)
net = tflearn.fully_connected(net,32)
net = tflearn.fully_connected(net,2,activation='softmax')
net =tflearn.regression(net)
'''
Training
TFLearn provides a model wrapper ('DNN') that automatically performs neural network classifier tasks,
such as training, prediction, save/restore, and more. We will run it for 10 epochs
(i.e., the network will see all data 10 times) with a batch size of 16.
'''
#Define model
model = tflearn.DNN(net)
# Start training (apply gradient descent algorithm)
model.fit(data, labels, n_epoch=10, batch_size=16, show_metric=True)
'''
Try the Model
It's time to try out our model.
For fun, let's take Titanic movie protagonists
(DiCaprio and Winslet) and calculate their chance of surviving (class 1).
'''
# Let's create some data for DiCaprio and Winslet
dicaprio = [3, 'Jack Dawson', 'male', 19, 0, 0, 'N/A', 5.0000]
winslet = [1, 'Rose DeWitt Bukater', 'female', 17, 1, 2, 'N/A', 100.0000]
# Preprocess data
dicaprio, winslet = preprocess([dicaprio, winslet], to_ignore)
# Predict surviving chances (class 1 results)
pred = model.predict([dicaprio, winslet])
print("DiCaprio Surviving Rate:", pred[0][1])
print("Winslet Surviving Rate:", pred[1][1])
|
9,631 | 9a1b268386b4652bf50af0365892ef7338329727 | #header
import matplotlib.pyplot as pmf
import random
p = 0.5 # Probablility of success for original system
n = 18 # Number of trials
Y = [] # Contains binomial RVs
b = [0] * (n+1) # List of n + 1 zeroes
N = 100 # Number of experiments performed
for j in range(N):
# Bernoulli random variable
for i in range(n):
r = random.uniform(0,1)
if r < p:
x = 1
else:
x = 0
Y.append(x)
outcome = sum(Y) # Number of successes from 0 to n
b[outcome] = b[outcome] + 1 # Record of successes for bar plot
Y.clear()
for i in range(n+1):
b[i] = b[i]/N # Probabilities
p = 0
cv = int(input('Enter a choice for the CV.'))
for i in range(cv, 19):
p = p + b[i]
print('For a critical value of', cv, 'the probability of rejecting the old system in favor of a new system that is no better than is', p,'.')
#cv = 13, 1/20 or the 5% rule |
9,632 | 2ab303a2f36cdd64e2119856312dd5e38ee728d6 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import uuid
import mock
import mox
import six
from heat.common import exception
from heat.common import template_format
from heat.engine import resource
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
from ..resources import cloud_loadbalancer as lb # noqa
# The following fakes are for pyrax
cert = """\n-----BEGIN CERTIFICATE-----
MIIFBjCCAu4CCQDWdcR5LY/+/jANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMB4XDTE0MTAxNjE3MDYxNVoXDTE1MTAxNjE3MDYxNVowRTELMAkG
A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0
IFdpZGdpdHMgUHR5IEx0ZDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
AMm5NcP0tMKHblT6Ud1k8TxZ9/8uOHwUNPbvFsvSyCupj0J0vGCTjbuC2I5T/CXR
tnLEIt/EarlNAqcjbDCWtSyEKs3zDmmkreoIDEa8pyAQ2ycsCXGMxDN97F3/wlLZ
agUNM0FwGHLZWBg62bM6l+bpTUcX0PqSyv/aVMhJ8EPDX0Dx1RYsVwUzIe/HWC7x
vCmtDApAp1Fwq7AwlRaKU17sGwPWJ8+I8PyouBdqNuslHm7LQ0XvBA5DfkQA6feB
ZeJIyOtctM9WFWQI5fKOsyt5P306B3Zztw9VZLAmZ8qHex+R1WY1zXxDAwKEQz/X
8bRqMA/VU8OxJcK0AmY/1v/TFmAlRh2XBCIc+5UGtCcftWvZJAsKur8Hg5pPluGv
ptyqSgSsSKtOVWkyTANP1LyOkpBA8Kmkeo2CKXu1SCFypY5Q6E+Fy8Y8RaHJPvzR
NHcm1tkBvHOKyRso6FjvxuJEyIC9EyUK010nwQm7Qui11VgCSHBoaKVvkIbFfQdK
aCes0oQO5dqY0+fC/IFDhrxlvSd2Wk7KjuNjNu9kVN9Ama2pRTxhYKaN+GsHfoL7
ra6G9HjbUVULAdjCko3zOKEUzFLLf1VZYk7hDhyv9kovk0b8sr5WowxW7+9Wy0NK
WL5f2QgVCcoHw9bGhyuYQCdBfztNmKOWe9pGj6bQAx4pAgMBAAEwDQYJKoZIhvcN
AQEFBQADggIBALFSj3G2TEL/UWtNcPeY2fbxSGBrboFx3ur8+zTkdZzvfC8H9/UK
w0aRH0rK4+lKYDqF6A9bUHP17DaJm1lF9In38VVMOuur0ehUIn1S2U3OvlDLN68S
p5D4wGKMcUfUQ6pzhSKJCMvGX561TKHCc5fZhPruy75Xq2DcwJENE189foKLFvJs
ca4sIARqP6v1vfARcfH5leSsdIq8hy6VfL0BRATXfNHZh4SNbyDJYYTxrEUPHYXW
pzW6TziZXYNMG2ZRdHF/mDJuFzw2EklOrPC9MySCZv2i9swnqyuwNYh/SAMhodTv
ZDGy4nbjWNe5BflTMBceh45VpyTcnQulFhZQFwP79fK10BoDrOc1mEefhIqT+fPI
LJepLOf7CSXtYBcWbmMCLHNh+PrlCiA1QMTyd/AC1vvoiyCbs3M419XbXcBSDEh8
tACplmhf6z1vDkElWiDr8y0kujJ/Gie24iLTun6oHG+f+o6bbQ9w196T0olLcGx0
oAYL0Olqli6cWHhraVAzZ5t5PH4X9TiESuQ+PMjqGImCIUscXY4objdnB5dfPHoz
eF5whPl36/GK8HUixCibkCyqEOBBuNqhOz7nVLM0eg5L+TE5coizEBagxVCovYSj
fQ9zkIgaC5oeH6L0C1FFG1vRNSWokheBk14ztVoJCJyFr6p0/6pD7SeR
-----END CERTIFICATE-----\n"""
private_key = """\n-----BEGIN PRIVATE KEY-----
MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQDJuTXD9LTCh25U
+lHdZPE8Wff/Ljh8FDT27xbL0sgrqY9CdLxgk427gtiOU/wl0bZyxCLfxGq5TQKn
I2wwlrUshCrN8w5ppK3qCAxGvKcgENsnLAlxjMQzfexd/8JS2WoFDTNBcBhy2VgY
OtmzOpfm6U1HF9D6ksr/2lTISfBDw19A8dUWLFcFMyHvx1gu8bwprQwKQKdRcKuw
MJUWilNe7BsD1ifPiPD8qLgXajbrJR5uy0NF7wQOQ35EAOn3gWXiSMjrXLTPVhVk
COXyjrMreT99Ogd2c7cPVWSwJmfKh3sfkdVmNc18QwMChEM/1/G0ajAP1VPDsSXC
tAJmP9b/0xZgJUYdlwQiHPuVBrQnH7Vr2SQLCrq/B4OaT5bhr6bcqkoErEirTlVp
MkwDT9S8jpKQQPCppHqNgil7tUghcqWOUOhPhcvGPEWhyT780TR3JtbZAbxziskb
KOhY78biRMiAvRMlCtNdJ8EJu0LotdVYAkhwaGilb5CGxX0HSmgnrNKEDuXamNPn
wvyBQ4a8Zb0ndlpOyo7jYzbvZFTfQJmtqUU8YWCmjfhrB36C+62uhvR421FVCwHY
wpKN8zihFMxSy39VWWJO4Q4cr/ZKL5NG/LK+VqMMVu/vVstDSli+X9kIFQnKB8PW
xocrmEAnQX87TZijlnvaRo+m0AMeKQIDAQABAoICAA8DuBrDxgiMqAuvLhS6hLIn
SCw4NoAVyPNwTFQTdk65qi4aHkNZ+DyyuoetfKEcAOZ97tKU/hSYxM/H9S+QqB+O
HtmBc9stJLy8qJ1DQXVDi+xYfMN05M2oW8WLWd1szVVe7Ce8vjUeNE5pYvbSL6hC
STw3a5ibAH0WtSTLTBTfH+HnniKuXjPG4InGXqvv1j+L38+LjGilaEIO+6nX1ejE
ziX09LWfzcAglsM3ZqsN8jvw6Sr1ZWniYC2Tm9aOTRUQsdPC7LpZ//GYL/Vj5bYg
qjcZ8KBCcKe1hW8PDL6oYuOwqR+YdZkAK+MuEQtZeWYiWT10dW2la9gYKe2OZuQ1
7q3zZ6zLP+XP+0N7DRMTTuk2gurBVX7VldzIzvjmW8X+8Q5QO+EAqKr2yordK3S1
uYcKmyL4Nd6rSFjRo0zSqHMNOyKt3b1r3m/eR2W623rT5uTjgNYpiwCNxnxmcjpK
Sq7JzZKz9NLbEKQWsP9gQ3G6pp3XfLtoOHEDkSKMmQxd8mzK6Ja/9iC+JGqRTJN+
STe1vL9L2DC7GnjOH1h2TwLoLtQWSGebf/GBxju0e5pAL0UYWBNjAwcpOoRU9J5J
y9E7sNbbXTmK2rg3B/5VKGQckBWfurg7CjAmHGgz9xxceJQLKvT1O5zHZc+v4TVB
XDZjtz8L2k3wFLDynDY5AoIBAQDm2fFgx4vk+gRFXPoLNN34Jw2fT+xuwD/H7K0e
0Cas0NfyNil/Kbp+rhMHuVXTt86BIY+z8GO4wwn+YdDgihBwobAh2G9T/P6wNm+Q
NcIeRioml8V/CP7lOQONQJ6sLTRYnNLfB96uMFe+13DO/PjFybee5VflfBUrJK1M
DqRLwm9wEIf5p0CWYI/ZJaDNN71B09BB/jdT/e7Ro1hXHlq3W4tKqRDPfuUqwy3H
ocYQ1SUk3oFdSiYFd6PijNkfTnrtyToa0xUL9uGL+De1LfgV+uvqkOduQqnpm/5+
XQC1qbTUjq+4WEsuPjYf2E0WAVFGzwzWcdb0LnMIUJHwPvpLAoIBAQDfsvCZlcFM
nGBk1zUnV3+21CPK+5+X3zLHr/4otQHlGMFL6ZiQManvKMX6a/cT3rG+LvECcXGD
jSsTu7JIt9l8VTpbPaS76htTmQYaAZERitBx1C8zDMuI2O4bjFLUGUX73RyTZdRm
G68IX+7Q7SL8zr/fHjcnk+3yj0L1soAVPC7lY3se7vQ/SCre97E+noP5yOhrpnRt
dij7NYy79xcvUZfc/z0//Ia4JSCcIvv2HO7JZIPzUCVO4sjbUOGsgR9pwwQkwYeP
b5P0MVaPgFnOgo/rz6Uqe+LpeY83SUwc2q8W8bskzTLZEnwSV5bxCY+gIn9KCZSG
8QxuftgIiQDbAoIBAQDQ2oTC5kXulzOd/YxK7z2S8OImLAzf9ha+LaZCplcXKqr0
e4P3hC0xxxN4fXjk3vp5YX+9b9MIqYw1FRIA02gkPmQ3erTd65oQmm88rSY+dYRU
/iKz19OkVnycIsZrR0qAkQFGvrv8I8h+5DMvUTdQ2jrCCwQGnsgYDEqs8OI7mGFx
pcMfXu3UHvCFqMFeaPtUvuk/i1tLJgYWrA2UY+X21V+j4GlREKEMmyCj5/xl5jCA
tr2bRSY49BDVOlCFPl+BGfjzo9z6whU0qRDdXgWA/U7LHOYEn1NSAsuwTzwBHtR3
KdBYm6kI4Ufeb7buHasGwPQAX2X17MAt2ZbvIEsZAoIBAQC4g5dzh5PGhmH4K48b
YU/l1TukzUIJekAfd+ozV4I1nuKppAeEQILD0yTh9zX4vMJtdbiz5DDWapWylCpt
UsBgjsgwxDriCSr7HIhs4QfwqUhf67325MHpoc1dCbS0YBhatDpC1kaI5qLMTJzm
1gL69epLtleWHK2zWjnIAbEmUtr3uMOwczciD3vVKAeZ+BQx72bOjKESPNl2w+fO
jvQfwrR5xEqYQco5j95DC5Q6oAjSM0enZV8wn10/kYpjyKnJieMcEkmnpUgrrpqQ
iTUKYqUlw8OftEopfGwGFT5junmbek57/4nGhTmzw22sac9/LZVC034ghClV5uh4
udDrAoIBAQCJHfBPJmJMT/WtSATTceVDgZiyezWNgH2yLJMqDP6sEuImnLAg2L9M
Yc6LqMcHLj7CyXfy2AEAuYTZwXFSRmVKl6Ycad7sS/hIL1ykvDveRU9VNImexDBq
AJR4GKr6jbRZnBztnRYZTsGA+TcrFc6SwdSPXgz7JQT9uw+JkhLi59m141XBdeRc
NQ/LFgOaxjvRUID81izQaYEyADId7asy+2QVazMDafuALJ23WSUMSXajCXaC6/7N
53RWrOAb+kFRgjuHM8pQkpgnY/Ds0MZxpakFw3Y7PAEL99xyYdR+rE3JOMjPlgr0
LpTt0Xs1OFZxaNpolW5Qis4os7UmmIRV
-----END PRIVATE KEY-----\n"""
class FakeException(Exception):
pass
class FakeClient(object):
user_agent = "Fake"
USER_AGENT = "Fake"
class FakeManager(object):
api = FakeClient()
def list(self):
pass
def get(self, item):
pass
def delete(self, item):
pass
def create(self, *args, **kwargs):
pass
def find(self, *args, **kwargs):
pass
def action(self, item, action_type, body=None):
pass
class FakeLoadBalancerManager(object):
def __init__(self, api=None, *args, **kwargs):
pass
def set_content_caching(self, *args, **kwargs):
pass
class FakeNode(object):
def __init__(self, address=None, port=None, condition=None, weight=None,
status=None, parent=None, type=None, id=None):
if not (address and port):
# This mimics the check that pyrax does on Node instantiation
raise TypeError("You must include an address and "
"a port when creating a node.")
self.address = address
self.port = port
self.condition = condition
self.weight = weight
self.status = status
self.parent = parent
self.type = type
self.id = id
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def update(self):
pass
def delete(self):
pass
class FakeVirtualIP(object):
def __init__(self, address=None, port=None, condition=None,
ipVersion=None, type=None, id=None):
self.address = address
self.port = port
self.condition = condition
self.ipVersion = ipVersion
self.type = type
self.id = id
self.ip_version = ipVersion
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
class FakeLoadBalancerClient(object):
def __init__(self, *args, **kwargs):
self.Node = FakeNode
self.VirtualIP = FakeVirtualIP
pass
def get(self, *args, **kwargs):
pass
def create(self, *args, **kwargs):
pass
class FakeLoadBalancer(object):
def __init__(self, name=None, info=None, *args, **kwargs):
name = name or uuid.uuid4()
info = info or {"fake": "fake"}
self.id = uuid.uuid4()
self.manager = FakeLoadBalancerManager()
self.Node = FakeNode
self.VirtualIP = FakeVirtualIP
self.nodes = []
self.algorithm = "ROUND_ROBIN"
self.session_persistence = "HTTP_COOKIE"
self.connection_logging = False
self.timeout = None
self.httpsRedirect = False
self.protocol = None
self.port = None
self.name = None
self.halfClosed = None
self.content_caching = False
def get(self, *args, **kwargs):
pass
def add_nodes(self, *args, **kwargs):
pass
def add_ssl_termination(self, *args, **kwargs):
pass
def set_error_page(self, *args, **kwargs):
pass
def clear_error_page(self, *args, **kwargs):
pass
def add_access_list(self, *args, **kwargs):
pass
def update(self, *args, **kwargs):
pass
def add_health_monitor(self, *args, **kwargs):
pass
def delete_health_monitor(self, *args, **kwargs):
pass
def delete_ssl_termination(self, *args, **kwargs):
pass
def set_metadata(self, *args, **kwargs):
pass
def delete_metadata(self, *args, **kwargs):
pass
def add_connection_throttle(self, *args, **kwargs):
pass
def delete_connection_throttle(self, *args, **kwargs):
pass
def delete(self, *args, **kwargs):
pass
def get_health_monitor(self, *args, **kwargs):
return {}
def get_metadata(self, *args, **kwargs):
return {}
def get_error_page(self, *args, **kwargs):
pass
def get_connection_throttle(self, *args, **kwargs):
pass
def get_ssl_termination(self, *args, **kwargs):
pass
def get_access_list(self, *args, **kwargs):
pass
class LoadBalancerWithFakeClient(lb.CloudLoadBalancer):
def cloud_lb(self):
return FakeLoadBalancerClient()
def override_resource():
return {
'Rackspace::Cloud::LoadBalancer': LoadBalancerWithFakeClient
}
class LoadBalancerTest(common.HeatTestCase):
def setUp(self):
super(LoadBalancerTest, self).setUp()
self.lb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "fawef",
"Resources": {
self._get_lb_resource_name(): {
"Type": "Rackspace::Cloud::LoadBalancer",
"Properties": {
"name": "test-clb",
"nodes": [{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED"}],
"protocol": "HTTP",
"port": 80,
"virtualIps": [
{"type": "PUBLIC", "ipVersion": "IPV6"}],
"algorithm": 'LEAST_CONNECTIONS',
"connectionThrottle": {'maxConnectionRate': 1000},
'timeout': 110,
'contentCaching': 'DISABLED'
}
}
}
}
self.lb_name = 'test-clb'
self.expected_body = {
"nodes": [FakeNode(address=u"166.78.103.141", port=80,
condition=u"ENABLED", type=u"PRIMARY",
weight=1)],
"protocol": u'HTTP',
"port": 80,
"virtual_ips": [FakeVirtualIP(type=u"PUBLIC", ipVersion=u"IPV6")],
"algorithm": u'LEAST_CONNECTIONS',
"connectionThrottle": {'maxConnectionRate': 1000,
'maxConnections': None,
'rateInterval': None,
'minConnections': None},
"connectionLogging": None,
"halfClosed": None,
"healthMonitor": None,
"metadata": None,
"sessionPersistence": None,
"timeout": 110,
"httpsRedirect": False
}
lb.resource_mapping = override_resource
resource._register_class("Rackspace::Cloud::LoadBalancer",
LoadBalancerWithFakeClient)
def _get_lb_resource_name(self):
return "lb-" + str(uuid.uuid4())
def __getattribute__(self, name):
if name == 'expected_body' or name == 'lb_template':
return copy.deepcopy(super(LoadBalancerTest, self)
.__getattribute__(name))
return super(LoadBalancerTest, self).__getattribute__(name)
def _mock_create(self, tmpl, stack, resource_name, lb_name, lb_body):
resource_defns = tmpl.resource_definitions(stack)
rsrc = LoadBalancerWithFakeClient(resource_name,
resource_defns[resource_name],
stack)
fake_lb = FakeLoadBalancer(name=lb_name)
fake_lb.status = 'ACTIVE'
fake_lb.resource_id = 1234
self.m.StubOutWithMock(rsrc.clb, 'create')
rsrc.clb.create(lb_name, **lb_body).AndReturn(fake_lb)
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(
fake_lb)
return (rsrc, fake_lb)
def _get_first_resource_name(self, templ):
return next(k for k in templ['Resources'])
def _mock_loadbalancer(self, lb_template, expected_name, expected_body):
t = template_format.parse(json.dumps(lb_template))
self.stack = utils.parse_stack(t, stack_name=utils.random_name())
rsrc, fake_lb = self._mock_create(self.stack.t, self.stack,
self.
_get_first_resource_name(
lb_template),
expected_name,
expected_body)
return (rsrc, fake_lb)
def _set_template(self, templ, **kwargs):
for k, v in six.iteritems(kwargs):
templ['Resources'][self._get_first_resource_name(templ)][
'Properties'][k] = v
return templ
def _set_expected(self, expected, **kwargs):
for k, v in six.iteritems(kwargs):
expected[k] = v
return expected
def test_process_node(self):
nodes = [{'addresses': ['1234'], 'port': 80, 'enabled': True},
{'addresses': ['4567', '8901', '8903'], 'port': 80,
'enabled': True},
{'addresses': [], 'port': 80, 'enabled': True}]
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
expected_nodes = [{'address': '1234', 'port': 80, 'enabled': True},
{'address': '4567', 'port': 80, 'enabled': True},
{'address': '8901', 'port': 80, 'enabled': True},
{'address': '8903', 'port': 80, 'enabled': True}]
self.assertEqual(expected_nodes, list(rsrc._process_nodes(nodes)))
def test_nodeless(self):
"""It's possible to create a LoadBalancer resource with no nodes."""
template = self._set_template(self.lb_template,
nodes=[])
expected_body = copy.deepcopy(self.expected_body)
expected_body['nodes'] = []
rsrc, fake_lb = self._mock_loadbalancer(
template, self.lb_name, expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_alter_properties(self):
# test alter properties functions
template = self._set_template(self.lb_template,
sessionPersistence='HTTP_COOKIE',
connectionLogging=True,
metadata={'yolo': 'heeyyy_gurl'})
expected = self._set_expected(self.expected_body,
sessionPersistence={
'persistenceType': 'HTTP_COOKIE'},
connectionLogging={'enabled': True},
metadata=[
{'key': 'yolo',
'value': 'heeyyy_gurl'}])
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_validate_vip(self):
snippet = {
"nodes": [],
"protocol": 'HTTP',
"port": 80,
"halfClosed": None,
"algorithm": u'LEAST_CONNECTIONS',
"virtualIps": [{"id": "1234"}]
}
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
# happy path
resdef = rsrc_defn.ResourceDefinition("testvip",
lb.CloudLoadBalancer,
properties=snippet)
rsrc = lb.CloudLoadBalancer("testvip", resdef, stack)
self.assertIsNone(rsrc.validate())
# make sure the vip id prop is exclusive
snippet["virtualIps"][0]["type"] = "PUBLIC"
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn("Cannot specify type or version", str(exc))
# make sure you have to specify type and version if no id
snippet["virtualIps"] = [{}]
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn("Must specify VIP type and version", str(exc))
def test_validate_half_closed(self):
# test failure (invalid protocol)
template = self._set_template(self.lb_template, halfClosed=True)
expected = self._set_expected(self.expected_body, halfClosed=True)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn('The halfClosed property is only available for the TCP'
' or TCP_CLIENT_FIRST protocols', str(exc))
# test TCP protocol
template = self._set_template(template, protocol='TCP')
expected = self._set_expected(expected, protocol='TCP')
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
# test TCP_CLIENT_FIRST protocol
template = self._set_template(template,
protocol='TCP_CLIENT_FIRST')
expected = self._set_expected(expected,
protocol='TCP_CLIENT_FIRST')
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_validate_health_monitor(self):
# test connect success
health_monitor = {
'type': 'CONNECT',
'attemptsBeforeDeactivation': 1,
'delay': 1,
'timeout': 1
}
template = self._set_template(self.lb_template,
healthMonitor=health_monitor)
expected = self._set_expected(self.expected_body,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
# test connect failure
# bodyRegex is only valid for type 'HTTP(S)'
health_monitor['bodyRegex'] = 'dfawefawe'
template = self._set_template(template,
healthMonitor=health_monitor)
expected = self._set_expected(expected,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn('Unknown Property bodyRegex', str(exc))
# test http fields
health_monitor['type'] = 'HTTP'
health_monitor['bodyRegex'] = 'bodyRegex'
health_monitor['statusRegex'] = 'statusRegex'
health_monitor['hostHeader'] = 'hostHeader'
health_monitor['path'] = 'path'
template = self._set_template(template,
healthMonitor=health_monitor)
expected = self._set_expected(expected,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_validate_ssl_termination(self):
ssl_termination = {
'privatekey': 'ewfawe',
'intermediateCertificate': 'fwaefawe',
'secureTrafficOnly': True
}
# test ssl termination enabled without required fields failure
template = self._set_template(self.lb_template,
sslTermination=ssl_termination)
expected = self._set_expected(self.expected_body,
sslTermination=ssl_termination)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed, rsrc.validate)
self.assertIn("Property certificate not assigned", six.text_type(exc))
ssl_termination['certificate'] = 'dfaewfwef'
template = self._set_template(template,
sslTermination=ssl_termination)
expected = self._set_expected(expected,
sslTermination=ssl_termination)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_ssl_termination_unstripped_certificates(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': ' \nfawefwea\n ',
'intermediateCertificate': "\n\nintermediate_certificate\n",
'secureTrafficOnly': False
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_ssl_termination_intermediateCertificate_None(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': ' \nfawefwea\n ',
'intermediateCertificate': None,
'secureTrafficOnly': False
}
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
add_ssl_termination_args = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': ' \nfawefwea\n ',
'intermediateCertificate': '',
'secureTrafficOnly': False
}
fake_lb.add_ssl_termination(**add_ssl_termination_args)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_access_list(self):
access_list = [{"address": '192.168.1.1/0',
'type': 'ALLOW'},
{'address': '172.165.3.43',
'type': 'DENY'}]
api_access_list = [{"address": '192.168.1.1/0', 'id': 1234,
'type': 'ALLOW'},
{'address': '172.165.3.43', 'id': 3422,
'type': 'DENY'}]
template = self._set_template(self.lb_template,
accessList=access_list)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_access_list')
fake_lb.get_access_list().AndReturn([])
fake_lb.get_access_list().AndReturn(api_access_list)
self.m.StubOutWithMock(fake_lb, 'add_access_list')
fake_lb.add_access_list(access_list)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_ref_id(self):
"""The Reference ID of the resource is the resource ID."""
template = self._set_template(self.lb_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
self.assertEqual(rsrc.resource_id, rsrc.FnGetRefId())
def test_post_creation_error_page(self):
error_page = "REALLY BIG ERROR"
template = self._set_template(self.lb_template,
errorPage=error_page)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn({u'errorpage': {u'content': u''}})
fake_lb.get_error_page().AndReturn(
{u'errorpage': {u'content': error_page}})
self.m.StubOutWithMock(fake_lb, 'set_error_page')
fake_lb.set_error_page(error_page)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_ssl_termination(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_content_caching(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)[0]
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_check(self):
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
resdef = mock.Mock(spec=rsrc_defn.ResourceDefinition)
loadbalancer = lb.CloudLoadBalancer("test", resdef, stack)
loadbalancer._add_event = mock.Mock()
mock_cloud_lb = mock.Mock()
mock_get = mock.Mock(return_value=mock_cloud_lb)
loadbalancer.clb.get = mock_get
mock_cloud_lb.status = 'ACTIVE'
scheduler.TaskRunner(loadbalancer.check)()
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('COMPLETE', loadbalancer.status)
mock_cloud_lb.status = 'FOOBAR'
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(loadbalancer.check))
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('FAILED', loadbalancer.status)
self.assertIn('FOOBAR', str(exc))
mock_get.side_effect = lb.NotFound('boom')
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(loadbalancer.check))
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('FAILED', loadbalancer.status)
self.assertIn('boom', str(exc))
def test_update_add_node_by_address(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.nodes = self.expected_body['nodes']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '172.168.1.4'
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1},
{"addresses": [expected_ip],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.nodes = [
FakeNode(address=u"172.168.1.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"166.78.103.141", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
]
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'add_nodes')
fake_lb.add_nodes([
fake_lb.Node(address=expected_ip,
port=80,
condition='ENABLED',
type="PRIMARY", weight=1)])
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_resolve_attr_noid(self):
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
resdef = mock.Mock(spec=rsrc_defn.ResourceDefinition)
lbres = lb.CloudLoadBalancer("test", resdef, stack)
self.assertIsNone(lbres._resolve_attribute("PublicIp"))
def test_resolve_attr_virtualips(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.virtual_ips = [FakeVirtualIP(address='1.2.3.4',
type='PUBLIC',
ipVersion="IPv6",
id='test-id')]
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
expected = [{
'ip_version': 'IPv6',
'type': 'PUBLIC',
'id': 'test-id',
'address': '1.2.3.4'}]
self.m.ReplayAll()
self.assertEqual(expected, rsrc._resolve_attribute("virtualIps"))
self.m.VerifyAll()
def test_update_nodes_immutable(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
current_nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"3.3.3.3", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1)
]
fake_lb.nodes = current_nodes
fake_lb.tracker = "fake_lb"
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '4.4.4.4'
update_template['Properties']['nodes'] = [
{"addresses": ["1.1.1.1"], "port": 80, "condition": "ENABLED",
"type": "PRIMARY", "weight": 1},
{"addresses": ["2.2.2.2"], "port": 80, "condition": "DISABLED",
"type": "PRIMARY", "weight": 1},
{"addresses": [expected_ip], "port": 80, "condition": "ENABLED",
"type": "PRIMARY", "weight": 1}
]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.status = "PENDING_UPDATE"
fake_lb1.tracker = "fake_lb1"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb) # ACTIVE
# Add node `expected_ip`
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # PENDING_UPDATE
fake_lb2 = copy.deepcopy(fake_lb1)
fake_lb2.status = "ACTIVE"
fake_lb2.nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"3.3.3.3", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"4.4.4.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
]
fake_lb2.tracker = "fake_lb2"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2) # ACTIVE
# Delete node 3.3.3.3
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # PENDING_UPDATE
fake_lb3 = copy.deepcopy(fake_lb2)
fake_lb3.status = "ACTIVE"
fake_lb3.nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"4.4.4.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1)
]
fake_lb3.tracker = "fake_lb3"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb3) # ACTIVE
# Update node 2.2.2.2
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # PENDING_UPDATE
fake_lb4 = copy.deepcopy(fake_lb3)
fake_lb4.status = "ACTIVE"
fake_lb4.nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"DISABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"4.4.4.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1)
]
fake_lb4.tracker = "fake_lb4"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb4) # ACTIVE
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_pending_update_status(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
fake_lb1.status = "PENDING_UPDATE" # lb is immutable
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.name = "updated_name"
fake_lb2.status = "ACTIVE"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_immutable_exception(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb) # initial iteration
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb) # immutable
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # after update
self.m.StubOutWithMock(fake_lb, 'update')
msg = ("Load Balancer '%s' has a status of 'PENDING_UPDATE' and "
"is considered immutable." % rsrc.resource_id)
fake_lb.update(name="updated_name").AndRaise(Exception(msg))
fake_lb.update(name="updated_name").AndReturn(None)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_create_immutable_exception(self):
access_list = [{"address": '192.168.1.1/0',
'type': 'ALLOW'},
{'address': '172.165.3.43',
'type': 'DENY'}]
template = self._set_template(self.lb_template,
accessList=access_list)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_access_list')
fake_lb.get_access_list().AndReturn({})
fake_lb.get_access_list().AndReturn({})
fake_lb.get_access_list().AndReturn(access_list)
self.m.StubOutWithMock(fake_lb, 'add_access_list')
msg = ("Load Balancer '%s' has a status of 'PENDING_UPDATE' and "
"is considered immutable." % rsrc.resource_id)
fake_lb.add_access_list(access_list).AndRaise(Exception(msg))
fake_lb.add_access_list(access_list)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_update_lb_name(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(name="updated_name")
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_multiple(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
update_template['Properties']['algorithm'] = "RANDOM"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.algorithm = "RANDOM"
fake_lb2.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(name="updated_name", algorithm="RANDOM")
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_algorithm(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['algorithm'] = "RANDOM"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.algorithm = "ROUND_ROBIN"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'update')
fake_lb1.update(algorithm="RANDOM")
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.algorithm = "RANDOM"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_protocol(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['protocol'] = "IMAPS"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.protocol = "IMAPS"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(protocol="IMAPS")
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_redirect(self):
template = self._set_template(
self.lb_template, protocol="HTTPS")
expected = self._set_expected(
self.expected_body, protocol="HTTPS")
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['httpsRedirect'] = True
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.httpsRedirect = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(httpsRedirect=True)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_lb_redirect_https(self):
template = self._set_template(
self.lb_template, protocol="HTTPS", httpsRedirect=True)
expected = self._set_expected(
self.expected_body, protocol="HTTPS", httpsRedirect=True)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_lb_redirect_HTTP_with_SSL_term(self):
ssl_termination_template = {
'privatekey': private_key,
'intermediateCertificate': 'fwaefawe',
'secureTrafficOnly': True,
'securePort': 443,
'certificate': cert
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
ssl_termination_api['enabled'] = True
del ssl_termination_api['privatekey']
template = self._set_template(
self.lb_template, sslTermination=ssl_termination_template,
protocol="HTTP", httpsRedirect=True)
expected = self._set_expected(
self.expected_body, protocol="HTTP", httpsRedirect=False)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'create')
rsrc.clb.create(self.lb_name, **expected).AndReturn(fake_lb)
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.httpsRedirect = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn(ssl_termination_api)
self.m.StubOutWithMock(fake_lb1, 'get_ssl_termination')
fake_lb1.get_ssl_termination().AndReturn(ssl_termination_api)
fake_lb1.get_ssl_termination().AndReturn(ssl_termination_api)
fake_lb1.get_ssl_termination().AndReturn(ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
def test_update_lb_half_closed(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['halfClosed'] = True
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.halfClosed = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(halfClosed=True)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_port(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['port'] = 1234
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.port = 1234
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(port=1234)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_timeout(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['timeout'] = 120
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.timeout = 120
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(timeout=120)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_health_monitor_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['healthMonitor'] = {
'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"}
self.m.StubOutWithMock(fake_lb, 'get_health_monitor')
fake_lb.get_health_monitor().AndReturn({})
fake_lb.get_health_monitor().AndReturn(
{'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"})
self.m.StubOutWithMock(fake_lb, 'add_health_monitor')
fake_lb.add_health_monitor(
attemptsBeforeDeactivation=4, bodyRegex='.* testing .*', delay=10,
hostHeader='example.com', path='/',
statusRegex='^[234][0-9][0-9]$', timeout=10, type='HTTP')
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_health_monitor_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
hm = {'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"}
template['Resources'][lb_name]['Properties']['healthMonitor'] = hm
expected_body = copy.deepcopy(self.expected_body)
expected_body['healthMonitor'] = hm
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['healthMonitor']
self.m.StubOutWithMock(fake_lb, 'get_health_monitor')
fake_lb.get_health_monitor().AndReturn(
{'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"})
fake_lb.get_health_monitor().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'delete_health_monitor')
fake_lb.delete_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_session_persistence_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['sessionPersistence'] = 'SOURCE_IP'
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('SOURCE_IP', fake_lb.session_persistence)
self.m.VerifyAll()
def test_update_session_persistence_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'sessionPersistence'] = "SOURCE_IP"
expected_body = copy.deepcopy(self.expected_body)
expected_body['sessionPersistence'] = {'persistenceType': "SOURCE_IP"}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['sessionPersistence']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('', fake_lb.session_persistence)
self.m.VerifyAll()
def test_update_ssl_termination_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['sslTermination'] = {
'securePort': 443, 'privatekey': private_key, 'certificate': cert,
'secureTrafficOnly': False, 'intermediateCertificate': ''}
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443, 'certificate': cert,
'secureTrafficOnly': False, 'enabled': True})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(
securePort=443, privatekey=private_key, certificate=cert,
secureTrafficOnly=False, intermediateCertificate='')
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_ssl_termination_delete(self):
template = copy.deepcopy(self.lb_template)
ssl_termination_template = {
'securePort': 443, 'privatekey': private_key, 'certificate': cert,
'intermediateCertificate': '', 'secureTrafficOnly': False}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties']['sslTermination'] = (
ssl_termination_template)
# The SSL termination config is done post-creation, so no need
# to modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443, 'certificate': cert,
'secureTrafficOnly': False, 'enabled': True})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['sslTermination']
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(
fake_lb)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443, 'certificate': cert,
'secureTrafficOnly': False})
self.m.StubOutWithMock(fake_lb, 'delete_ssl_termination')
fake_lb.delete_ssl_termination()
fake_lb.get_ssl_termination().AndReturn({})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_metadata_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['metadata'] = {'a': 1, 'b': 2}
self.m.StubOutWithMock(fake_lb, 'get_metadata')
fake_lb.get_metadata().AndReturn({})
fake_lb.get_metadata().AndReturn({'a': 1, 'b': 2})
self.m.StubOutWithMock(fake_lb, 'set_metadata')
fake_lb.set_metadata({'a': 1, 'b': 2})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_metadata_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties']['metadata'] = {
'a': 1, 'b': 2}
expected_body = copy.deepcopy(self.expected_body)
expected_body['metadata'] = mox.SameElementsAs(
[{'key': 'a', 'value': 1},
{'key': 'b', 'value': 2}])
rsrc, fake_lb = self._mock_loadbalancer(
template, self.lb_name, expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['metadata']
self.m.StubOutWithMock(fake_lb, 'get_metadata')
fake_lb.get_metadata().AndReturn({'a': 1, 'b': 2})
fake_lb.get_metadata().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'delete_metadata')
fake_lb.delete_metadata()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_errorpage_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error_page = (
'<html><head><title>Service Unavailable</title></head><body><h2>'
'Service Unavailable</h2>The service is unavailable</body></html>')
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['errorPage'] = error_page
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn(
{'errorpage': {'content': 'foo'}})
fake_lb.get_error_page().AndReturn(
{'errorpage': {'content': error_page}})
self.m.StubOutWithMock(fake_lb, 'set_error_page')
fake_lb.set_error_page(error_page)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_errorpage_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
error_page = (
'<html><head><title>Service Unavailable</title></head><body><h2>'
'Service Unavailable</h2>The service is unavailable</body></html>')
template['Resources'][lb_name]['Properties']['errorPage'] = error_page
# The error page config is done post-creation, so no need to
# modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'set_error_page')
fake_lb.set_error_page(error_page)
fake_lb.get_error_page().AndReturn({'errorpage':
{'content': error_page}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['errorPage']
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(
fake_lb)
self.m.StubOutWithMock(fake_lb, 'clear_error_page')
fake_lb.clear_error_page()
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn(
{'errorpage': {'content': error_page}})
fake_lb.get_error_page().AndReturn({'errorpage': {'content': ""}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_connection_logging_enable(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['connectionLogging'] = True
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertTrue(fake_lb.connection_logging)
self.m.VerifyAll()
def test_update_connection_logging_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'connectionLogging'] = True
expected_body = copy.deepcopy(self.expected_body)
expected_body['connectionLogging'] = {'enabled': True}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.connection_logging = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.connection_logging = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['connectionLogging']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertFalse(fake_lb.connection_logging)
self.m.VerifyAll()
def test_update_connection_logging_disable(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'connectionLogging'] = True
expected_body = copy.deepcopy(self.expected_body)
expected_body['connectionLogging'] = {'enabled': True}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['connectionLogging'] = False
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertFalse(fake_lb.connection_logging)
self.m.VerifyAll()
def test_update_connection_throttle_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['connectionThrottle'] = {
'maxConnections': 1000}
self.m.StubOutWithMock(fake_lb, 'add_connection_throttle')
self.m.StubOutWithMock(fake_lb, 'get_connection_throttle')
fake_lb.get_connection_throttle().AndReturn(
{'maxConnectionRate': None, 'minConnections': None,
'rateInterval': None, 'maxConnections': 100})
fake_lb.add_connection_throttle(
maxConnections=1000, maxConnectionRate=None, minConnections=None,
rateInterval=None)
fake_lb.get_connection_throttle().AndReturn(
{'maxConnectionRate': None, 'minConnections': None,
'rateInterval': None, 'maxConnections': 1000})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_connection_throttle_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'connectionThrottle'] = {'maxConnections': 1000}
expected_body = copy.deepcopy(self.expected_body)
expected_body['connectionThrottle'] = {
'maxConnections': 1000, 'maxConnectionRate': None,
'rateInterval': None, 'minConnections': None}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['connectionThrottle']
self.m.StubOutWithMock(fake_lb, 'get_connection_throttle')
fake_lb.get_connection_throttle().AndReturn({
'maxConnections': 1000, 'maxConnectionRate': None,
'rateInterval': None, 'minConnections': None})
self.m.StubOutWithMock(fake_lb, 'delete_connection_throttle')
fake_lb.delete_connection_throttle()
fake_lb.get_connection_throttle().AndReturn({})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_content_caching_enable(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['contentCaching'] = 'ENABLED'
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.content_caching = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.content_caching = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_content_caching_deleted(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'contentCaching'] = 'ENABLED'
# Enabling the content cache is done post-creation, so no need
# to modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['contentCaching']
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.content_caching = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.content_caching = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_content_caching_disable(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'contentCaching'] = 'ENABLED'
# Enabling the content cache is done post-creation, so no need
# to modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['contentCaching'] = 'DISABLED'
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.content_caching = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.content_caching = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndRaise(lb.NotFound('foo'))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_immutable(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndRaise(lb.NotFound('foo'))
self.m.StubOutWithMock(fake_lb, 'delete')
fake_lb.delete().AndRaise(Exception('immutable'))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_non_immutable_exc(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
self.m.StubOutWithMock(fake_lb, 'delete')
fake_lb.delete().AndRaise(FakeException())
self.m.ReplayAll()
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.delete))
self.assertIn('FakeException', six.text_type(exc))
self.m.VerifyAll()
def test_delete_states(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb3 = copy.deepcopy(fake_lb)
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1.status = 'ACTIVE'
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2.status = 'PENDING_DELETE'
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
fake_lb3.status = 'DELETED'
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb3)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_redir(self):
mock_stack = mock.Mock()
mock_stack.db_resource_get.return_value = None
mock_stack.has_cache_data.return_value = False
props = {'httpsRedirect': True,
'protocol': 'HTTPS',
'port': 443,
'nodes': [],
'virtualIps': [{'id': '1234'}]}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
self.assertIsNone(mock_lb.validate())
props['protocol'] = 'HTTP'
props['sslTermination'] = {
'secureTrafficOnly': True,
'securePort': 443,
'privatekey': "bobloblaw",
'certificate': 'mycert'
}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb_2",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test_2", mock_resdef, mock_stack)
self.assertIsNone(mock_lb.validate())
def test_invalid_redir_proto(self):
mock_stack = mock.Mock()
mock_stack.db_resource_get.return_value = None
mock_stack.has_cache_data.return_value = False
props = {'httpsRedirect': True,
'protocol': 'TCP',
'port': 1234,
'nodes': [],
'virtualIps': [{'id': '1234'}]}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
def test_invalid_redir_ssl(self):
mock_stack = mock.Mock()
mock_stack.db_resource_get.return_value = None
mock_stack.has_cache_data.return_value = False
props = {'httpsRedirect': True,
'protocol': 'HTTP',
'port': 1234,
'nodes': [],
'virtualIps': [{'id': '1234'}]}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
props['sslTermination'] = {
'secureTrafficOnly': False,
'securePort': 443,
'privatekey': "bobloblaw",
'certificate': 'mycert'
}
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
props['sslTermination'] = {
'secureTrafficOnly': True,
'securePort': 1234,
'privatekey': "bobloblaw",
'certificate': 'mycert'
}
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
def test_update_nodes_condition_draining(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.nodes = self.expected_body['nodes']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '172.168.1.4'
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "DRAINING",
"type": "PRIMARY",
"weight": 1},
{"addresses": [expected_ip],
"port": 80,
"condition": "DRAINING",
"type": "PRIMARY",
"weight": 1}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'add_nodes')
fake_lb1.add_nodes([
fake_lb1.Node(address=expected_ip,
port=80,
condition='DRAINING',
type="PRIMARY", weight=1)])
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.nodes = [
FakeNode(address=u"166.78.103.141", port=80,
condition=u"DRAINING", type="PRIMARY", weight=1),
FakeNode(address=u"172.168.1.4", port=80,
condition=u"DRAINING", type="PRIMARY", weight=1),
]
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_nodes_add_same_address_different_port(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.nodes = self.expected_body['nodes']
fake_lb.tracker = "fake_lb"
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1},
{"addresses": ["166.78.103.141"],
"port": 81,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'add_nodes')
fake_lb1.add_nodes([
fake_lb1.Node(address="166.78.103.141",
port=81,
condition='ENABLED',
type="PRIMARY", weight=1)])
fake_lb1.tracker = "fake_lb1"
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.nodes = [
FakeNode(address=u"166.78.103.141", port=80,
condition=u"ENABLED", type="PRIMARY", weight=1),
FakeNode(address=u"166.78.103.141", port=81,
condition=u"ENABLED", type="PRIMARY", weight=1),
]
fake_lb2.tracker = "fake_lb2"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_nodes_defaults(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
tmpl_node = template['Resources'][lb_name]['Properties']['nodes'][0]
tmpl_node['type'] = "PRIMARY"
tmpl_node['condition'] = "ENABLED"
tmpl_node['weight'] = 1
expected_body = copy.deepcopy(self.expected_body)
expected_body['nodes'] = [FakeNode(address=u"166.78.103.141", port=80,
condition=u"ENABLED",
type="PRIMARY", weight=1)]
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
fake_lb.nodes = self.expected_body['nodes']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'add_nodes')
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
|
9,633 | b220cacc2530ca62b5599a9c1894e979bcfd5109 | #!/usr/bin/env python3
#
# Display all tags in the specified file for neoview.
# Author: Andrew Pyatkov <mrbiggfoot@gmail.com>
# License: MIT
#
"""
Display all tags in the specified file for neoview.
Output: {file_name}\t{tag_address}\t{displayable_tag_info}
"""
import argparse
#import os
#import re
import subprocess
# Parse command line args.
parser = argparse.ArgumentParser()
parser.add_argument("file", help="File name to display tags from")
args = parser.parse_args()
filename = args.file
# Colors for the output, see for more info:
# https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit
COLOR_TAGTYPE = '\033[1;35m'
COLOR_TAGNAME = ''
COLOR_COMMENT = '\033[0;32m'
COLOR_BAR = '\033[0;37m'
COLOR_RESET = '\033[m'
# Contains lists of [file_name, tag_address, tag_name, comment].
# 'file_name' is relative to the current directory.
# 'tag_address' can be a number or a "/^line$/".
tags = []
# Max length of a tag name.
max_tag_len = 0
cmd = 'ctags -f - --excmd=number %s' % filename
result = subprocess.check_output(cmd, shell=True)
out = result.decode("utf-8", errors="ignore").rstrip().split("\n")
def displayable_info(tagname, comment):
cs = comment.split("\t", 1)
return ('{}{:<' + str(max_tag_len) + '}{} {}|{}{}{}|{} {}{}{}').\
format(
COLOR_TAGNAME, tagname, COLOR_RESET,
COLOR_BAR, COLOR_TAGTYPE, cs[0], COLOR_BAR, COLOR_RESET,
COLOR_COMMENT, cs[1] if len(cs) == 2 else "", COLOR_RESET)
for l in out:
# t[0] - tag name, t[1] - file name, t[2] - tag address and comment
t = l.split("\t", 2)
max_tag_len = max(max_tag_len, len(t[0]))
# info[0] - tag address, info[1] - comment
info = t[2].split(';"')
tags.append([t[1], info[0], t[0], info[1].strip()])
for t in tags:
print('%s\t%s\t%s' %
(t[0], t[1], displayable_info(t[2], t[3])))
|
9,634 | d13c6d71bb871496b0c6ad2451a2f561484e7c68 | # -*- coding: utf-8 -*-
import scrapy
class Heiyan2Spider(scrapy.Spider):
name = 'heiyan2'
allowed_domains = ['heiyan.com']
start_urls = ['http://heiyan.com/']
def parse(self, response):
pass
|
9,635 | 8fa78824a38a3b0c1f51aceacab671f987ea2705 | from .Buzzer import BuzzerController
from .Card import CardScanner
from .RFID import RFIDController
from .Servo import ServoController
__all__ = ["BuzzerController", "CardScanner", "RFIDController", "ServoController"]
|
9,636 | e01eced7c43aae354047fbf29028c601d1daae50 | import unittest
import gym
import torch
from all.environments import DuplicateEnvironment, GymEnvironment
def make_vec_env(num_envs=3):
env = [GymEnvironment('CartPole-v0') for i in range(num_envs)]
return env
class DuplicateEnvironmentTest(unittest.TestCase):
def test_env_name(self):
env = DuplicateEnvironment(make_vec_env())
self.assertEqual(env.name, 'CartPole-v0')
def test_num_envs(self):
num_envs = 5
env = DuplicateEnvironment(make_vec_env(num_envs))
self.assertEqual(env.num_envs, num_envs)
self.assertEqual((num_envs,), env.reset().shape)
def test_reset(self):
num_envs = 5
env = DuplicateEnvironment(make_vec_env(num_envs))
state = env.reset()
self.assertEqual(state.observation.shape, (num_envs, 4))
self.assertTrue((state.reward == torch.zeros(num_envs, )).all())
self.assertTrue((state.done == torch.zeros(num_envs, )).all())
self.assertTrue((state.mask == torch.ones(num_envs, )).all())
def test_step(self):
num_envs = 5
env = DuplicateEnvironment(make_vec_env(num_envs))
env.reset()
state = env.step(torch.ones(num_envs, dtype=torch.int32))
self.assertEqual(state.observation.shape, (num_envs, 4))
self.assertTrue((state.reward == torch.ones(num_envs, )).all())
self.assertTrue((state.done == torch.zeros(num_envs, )).all())
self.assertTrue((state.mask == torch.ones(num_envs, )).all())
def test_step_until_done(self):
num_envs = 3
env = DuplicateEnvironment(make_vec_env(num_envs))
env.seed(5)
env.reset()
for _ in range(100):
state = env.step(torch.ones(num_envs, dtype=torch.int32))
if state.done[0]:
break
self.assertEqual(state[0].observation.shape, (4,))
self.assertEqual(state[0].reward, 1.)
self.assertTrue(state[0].done)
self.assertEqual(state[0].mask, 0)
|
9,637 | b75ebcd278ae92274bbbe8d1ce5cb3bb7fa14a2c | from nltk.tokenize import sent_tokenize
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
import networkx as nx
def summarize(text):
sentences_token = sent_tokenize(text)
#Feature Extraction
vectorizer = CountVectorizer(min_df=1,decode_error='replace')
sent_bow = vectorizer.fit_transform(sentences_token)
transformer = TfidfTransformer(norm='l2', smooth_idf=True, use_idf=True)
sent_tfidf = transformer.fit_transform(sent_bow)
similarity_graph = sent_tfidf * sent_tfidf.T
nx_graph = nx.from_scipy_sparse_matrix(similarity_graph)
scores = nx.pagerank(nx_graph)
text_rank_graph = sorted(((scores[i],s) for i,s in enumerate(sentences_token)), reverse=True)
number_of_sents = int(0.4*len(text_rank_graph))
del text_rank_graph[number_of_sents:]
summary = ' '.join(word for _,word in text_rank_graph)
return summary
|
9,638 | 18f355041a9982de56ad2eb51b665dd39a156f0a | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.db.models import Q
from django.contrib.auth import get_user_model
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from rest_framework.views import APIView
from rest_framework.generics import (CreateAPIView, UpdateAPIView)
from rest_framework.permissions import (AllowAny,IsAuthenticated,IsAdminUser,IsAuthenticatedOrReadOnly)
from rest_framework.authtoken.models import Token
from userprofile.serializers.create_user import UserCreateSerializer
from userprofile.serializers.update_user import UserUpdateSerializer
from userprofile.mailtrap import send_mail
from pwreset.models import PasswordReset
import hashlib
import os
User = get_user_model()
class UserCreateAPIView(CreateAPIView):
serializer_class = UserCreateSerializer
def post(self, request, *args, **kwargs):
validated_data = request.data.get('user')
serializer = UserCreateSerializer(data=validated_data)
if serializer.is_valid(raise_exception=True):
serializer.save()
return Response(serializer.data, status=HTTP_200_OK)
else:
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
class UserUpdateAPIView(UpdateAPIView):
serializer_class = UserUpdateSerializer
permission_classes = [IsAuthenticated]
def post(self, request, format=None):
data = request
queryset = User.objects.get()
class UserTokenVerifyAPIView(APIView):
permission_classes = [AllowAny]
def post(self, request, *args, **kwargs):
if ('token' in request.data) and request.data['token']:
token_key = request.data['token']
conf_token = Token.objects.filter(key=token_key)
if conf_token:
confirmed_user = conf_token.first().user.userprofile
if not confirmed_user.is_authenticated:
confirmed_user.is_authenticated = True
confirmed_user.save()
return Response({'data': 'Success'}, status=HTTP_200_OK)
return Response({'error': 'User not found'}, status=HTTP_400_BAD_REQUEST)
class UserResetPasswordAPIView(APIView):
def post(self, request, *args, **kwargs):
validated_data = request.data
if ('data' not in request.data) or ('email' not in request.data['data']):
return Response({}, status=HTTP_400_BAD_REQUEST)
email=validated_data['data']['email']
token = hashlib.sha256(bytes((email + os.environ['SALT']), 'utf-8')).hexdigest()
self.send_pwreset(email, token)
pwr_token = PasswordReset.objects.get_or_create(email=email, token=token)
return Response({}, status=HTTP_200_OK)
def send_pwreset(self, email, token):
subject = "Password reset instructions"
body = """Follow these steps to reset your password. {0} \n If you did not request for your password to be reset, please ignore this email.""".format("http://127.0.0.1:3000/reset-password/{}".format(token))
from_email = 'from@email.com'
to_email = email
send_mail(subject, body, from_email, to_email)
class UserNewPasswordAPIView(APIView):
def post(self, request, *args, **kwargs):
validated_data = request.data
if not self.validate_request(validated_data):
return Response({}, status=HTTP_400_BAD_REQUEST)
email = validated_data['data']['email']
newpw = validated_data['data']['password']
token = validated_data['data']['token']
pwreset_token = PasswordReset.objects.filter(token=token, consumed=False, email=email)
if pwreset_token:
user = User.objects.get(email=email)
user.set_password(newpw)
user.save()
pwreset = pwreset_token[0]
pwreset.consumed=True
pwreset.save()
return Response({}, status=HTTP_200_OK)
return Response({}, status=HTTP_400_BAD_REQUEST)
def validate_request(self, data):
if ('data' not in data) or ('email' not in data['data']) or ('passwordconf' not \
in data['data']) or ('password' not in data['data']) or ('token' not in \
data['data']) or (data['data']['password'] != data['data']['passwordconf']) or \
(not data['data']['token']) or (not data['data']['email']):
return False
else:
return True
|
9,639 | 10d3ee459a296c26429659a202833a9570cf9454 | #Create a 3x3 identity matrix
import numpy as np
vector = np.zeros((8,8))
vector[1::2,::2]=1
vector[::2,1::2]=1
print(vector)
'''
Output
[[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]]
''' |
9,640 | e4b0dc2e3d9310bbe462e746e21080d309dfed84 | import sys
from PyQt4 import QtGui,QtCore
class Button(QtGui.QPushButton):
def __init__(self,*__args):
super().__init__(*__args)
self.setAcceptDrops(True) # 设置可以接受拖入事件
def dragEnterEvent(self, e):
"设置接受的类型"
#判断拖动的数据类型是否是:text/plain # 这两个一组表示一个类型
#查询方法是:e.mimeData().formats()
if e.mimeData().hasFormat('text/plain'):
e.accept()
else:
'不符合数据类型不触发'
e.ignore()
def dropEvent(self, e):
"当经过dragEnterEvent处理过后,进行对该数据的操作"
#self.setText(e.mimeData().text()) # 修改自身标题
QtGui.QMessageBox.question(self,"提示","你拖入的数据是:%s"%e.mimeData().text())
class UI (QtGui.QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle("简单的获取拖放数据")
self.label = QtGui.QLabel("拖动编辑框内的数据移动到按钮上,触发拖动事件")
self.edit = QtGui.QLineEdit('初始文本',self)
self.edit.setDragEnabled(True)
self.button = Button("等待接受",self)
vLayout = QtGui.QVBoxLayout()
vLayout.addWidget(self.label)
bLayout = QtGui.QHBoxLayout()
bLayout.addWidget(self.edit)
bLayout.addWidget(self.button)
vLayout.addLayout(bLayout)
self.setLayout(vLayout)
if __name__ == "__main__":
app =QtGui.QApplication(sys.argv)
w = UI()
w.show()
sys.exit(app.exec_()) |
9,641 | 440c116327ee587b5a305953772523011ece5dda | """
This type stub file was generated by pyright.
"""
import editobj3.introsp as introsp
import editobj3.editor as editor
from owlready2 import *
from editobj3.observe import *
from typing import Any, Optional
__all__ = ["EditedInstances", "OntologyInstanceEditor"]
class EditedInstances(object):
def __init__(self, ontology, Class):
self.ontology = ...
self.namespace = ...
self.Class = ...
self.name = ...
def get_instances(self):
...
instances = ...
def remove_instance(self, instance):
...
def __str__(self):
...
def details(self):
...
def addable_values(self):
...
descr = introsp.description(EditedInstances)
class TabPaneRepartitor(editor.PaneRepartitor):
def __init__(self, instance_editor, tab_edited_class):
self.instance_editor = ...
self.tab_edited_class = ...
def is_displayed_in_other_tab(self, attribute, o_Class):
...
def _compute(self, o, attribute, field_class: Optional[Any] = ...):
...
def is_displayed_in_hierarchy_pane(self, attribute, o, field_class: Optional[Any] = ...):
...
def is_displayed_in_attribute_pane(self, attribute, o, field_class: Optional[Any] = ...):
...
class OntologyInstanceEditor(editor.EditorTabbedDialog):
_Qt_MODULE = ...
_HTML_MODULE = ...
def __init__(self, gui: Optional[Any] = ..., master: Optional[Any] = ..., direction=..., on_validate: Optional[Any] = ..., edit_child_in_self=..., undo_stack: Optional[Any] = ..., on_close: Optional[Any] = ..., menubar: bool = ...):
self.ontology = ...
self.edited_classes = ...
self.last_undoables = ...
self.edited_instancess = ...
def on_dialog_closed(self, *args):
...
def set_ontology(self, ontology, edited_classes: Optional[Any] = ...):
self.ontology = ...
self.edited_classes = ...
self.last_undoables = ...
self.edited_instancess = ...
def add_tab_for_class(self, Class):
...
def on_save(self, *args):
self.last_undoables = ...
def on_save_as(self, *args):
...
|
9,642 | 95584dfdb232be7f507dc9d29ed2f1d95fa2b653 | from random import randrange
import random
"""
both user and computer funcs:
"""
def check_ok(boat, taken_positions):
# input: boat, taken_positions
# this func checks if the boat outside the playground or the position of the boat is already in taken_position
# return: boat. boat will returned as [-1] or its specific position
boat.sort()
for i in range(len(boat)):
if boat[i] in taken_positions:
#this condition checks if the block boat[i] is already in the list taken_positions
boat = [-1]
break
elif boat[i] > 99 or boat[i] < 0:
#this condition checks border 1 and 3
boat = [-1]
break
elif boat[i] % 10 == 9 and i < len(boat) - 1:
#this condition checks border 2 and 4
if boat[i + 1] % 10 == 0:
boat = [-1]
break
if i != 0:
# this condition checks if there is any hole in the boat
if boat[i] != boat[i - 1] + 1 and boat[i] != boat[i - 1] + 10:
boat = [-1]
break
return boat
def check_shot(shot, ships, hit, miss, comp, sinked_boats):
# input: shot, all the boats (ships), hit, miss, comp, sinked_boats
# this func initially assumes that the shot is missed (cond = 0)
# given a shot, this func uses a for-loop that goes through all ships to see if the shot hits one of the ships
# if yes, remove the block of the boat that is hitted by the shot
# append the shot to hit or comp. If comp, sinked_boats += 1
# if not, append the shot to miss
# return: all the boats (ships), hit, miss, comp, cond, sinked_boats
cond = 0 # miss
for i in range(len(ships)):
if shot in ships[i]:
ships[i].remove(shot)
if len(ships[i]) > 0:
hit.append(shot)
cond = 1 # hit
else:
comp.append(shot)
cond = 2 # comp
sinked_boats += 1
if cond == 0: # miss
miss.append(shot)
return ships, hit, miss, comp, cond, sinked_boats
def create_playground(hit, miss, comp):
# input: hit, miss, comp
# this func creates the playground with the status of each block
# print the playground
print(" battleship")
print(" 0 1 2 3 4 5 6 7 8 9")
block = 0 #this variable keep track of the spot of the block
for i in range(10):
#create each row
row = ""
for j in range(10):
#create each spot on the specific row
character = "_ "
if block in miss:
character = "x "
elif block in hit:
character = "o "
elif block in comp:
character = "Q "
row += character
block += 1 #the block var increments 1 after each character is add to row
print(i, " ", row)
print("")
def check_empty(ships):
# input: ships
# [] = False, [#have element] = True
# this func checks each ship in the 2D list ships
# if ship is empty, return True, and vice versa
# if all ships are empty, return True, else return False
# return True or False
return all([not elem for elem in ships])
"""
user - 2 funcs:
"""
def create_ships_u(taken_positions, num_boats):
# input: num_boats
# this func has a loop that makes all boats,
# which calls the get_ship(len_of_boat, taken_positions) that creates a single boat
# return: ships, which are the 2D list has len(num_boats) that contains the positions of all boats
ships = [] #this is a 2D list contains the positions of all boats
for len_of_boat in num_boats:
ship, taken_positions = get_ship(len_of_boat, taken_positions)
ships.append(ship)
return ships, taken_positions
def create_playground_u(taken_positions):
print(" battleships ")
print(" 0 1 2 3 4 5 6 7 8 9")
place = 0
for x in range(10):
row = ""
for y in range(10):
ch = " _ "
if place in taken_positions:
ch = " o "
row = row + ch
place = place + 1
print(x," ",row)
def get_ship(len_of_boat, taken_positions):
# input: len_of_boat, taken_positions
# this func gets the boat's position from the user's input
# this func checks both the type of the input(is it int) and if the boat is inside playground/in taken_positions/in correct order
# return a valid ship
while True:
ship = []
print("enter your ship of length", len_of_boat)
for i in range(len_of_boat):
while True:
try:
boat_num = input("please enter a number: ")
ship.append(int(boat_num))
except ValueError: # better try again... Return to the start of the loop
print("wrong type of input")
continue
else: # is is a correct input, and we're ready to exit the loop
break
ship = check_ok(ship, taken_positions)
if -1 not in ship: # check if a ship is valid. If yes, add the ship to taken_positions and break
taken_positions += ship
break
else:
print("invalid number - please enter again")
return ship, taken_positions
def get_shot_user(guesses):
# input: guesses is the combined list of hit, miss, comp
# this funcs asks the user to enter the shot, then checks the validity of the shot
# return: the valid shot
while True:
try:
shot = int(input("Enter your shot: "))
if shot < 0 or shot > 99:
shot = int(input("Enter your shot:"))
elif shot in guesses:
print("already guessed - please enter again")
else:
return shot
except:
print("incorrect - please enter integer only")
"""
computer - 1 funcs:
"""
def create_ships_c(taken_positions, num_boats):
# input: num_boats
# this funcs has a loop that makes all boats,
# which calls the create_boat() that creates a single boat
# return: ships, which are the 2D list has len(num_boats) that contains the positions of all boats
ships = [] #this is a 2D list contains the positions of all boats
for len_of_boat in num_boats:
boat_position = [-1] #create the initial position of every boat is [-1]
while -1 in boat_position:
boat_start = randrange(99) #boat starting point
boat_direction = randrange(1, 4) #{1: "up", 2: "right", 3: "down", 4: "left"}
boat_position = create_boat(len_of_boat, boat_start, boat_direction, taken_positions) #return the position of boat
#a new boat is created after finishing the while loop
ships.append(boat_position)
taken_positions += boat_position #add all positions of the newly created boat to the list taken_positions
return ships, taken_positions
def create_boat(len_of_boat, boat_start, boat_direction, taken_positions):
# input: len_of_boat, boat_start, boat_direction, taken_positions
# this func initializes boat = []
# with len_of_boat, boat_start, boat_direction, this func create the position of the boat
# calls check_ok(boat, taken_positions) to see if the boat outside playground or the position of the boat is already in taken_position
# return: boat. boat will returned as [-1] or its specific position
boat = []
if boat_direction == 1:
for i in range(len_of_boat):
boat.append(boat_start - i * 10) # already have the position of boat after this line
boat = check_ok(boat, taken_positions)
elif boat_direction == 2:
for i in range(len_of_boat):
boat.append(boat_start + i)
boat = check_ok(boat, taken_positions)
elif boat_direction == 3:
for i in range(len_of_boat):
boat.append(boat_start + i * 10)
boat = check_ok(boat, taken_positions)
elif boat_direction == 4:
for i in range(len_of_boat):
boat.append(boat_start - i)
boat = check_ok(boat, taken_positions)
return boat
def get_shot_comp(guesses, tactics):
# input: guesses (all moves), tactics(which is the list of all valid possible moves for the shot)
# in the first mơve, tactics = []
# this func checks if len(tactics) > 0
# if yes, pick shot = tactics[0]
# if no, pick shot = randrange(99)
# this func check if shot not in guesses(which is the list of all moves)
# if yes, guess.append(shot), and break
# return: the valid shot, guesses
while True:
try:
if len(tactics) > 0:
shot = tactics[0]
else:
shot = randrange(99)
if shot not in guesses:
guesses.append(shot)
break
except:
print("incorrect - please enter integer only")
return shot, guesses
def calculate_tactics(shot, tactics, guesses, hit):
# input: shot, tactics, guesses, hit
# this function takes the newly shot, and changes the tactics list accordingly
# the list temp is the possible positions that the next shot can be
# if the shot hits the first time, len(tactics) = 0. Then, temp is the list contains 4 blocks around the shot
# else, the list temp will be created based on the last 2 shots
# candidate is the list of valid possible shots that is created from temp
# shuffle the order of elements inside candidate
# return: candidate (candidate is tactics)
temp = []
if len(tactics) < 1:
# got 1 hit the first time
temp = [shot - 1, shot + 1, shot - 10, shot + 10] # temporary places that the next shot could be
else:
# got at least 2 hits
# checks to see if the 4 spots around is in hit
if shot - 1 in hit: # east
temp = [shot + 1]
for num in [2, 3, 4, 5, 6, 7, 8]:
if shot - num not in hit:
temp.append(shot - num)
break
elif shot + 1 in hit: # west
temp = [shot - 1]
for num in [2, 3, 4, 5, 6, 7, 8]:
if shot + num not in hit:
temp.append(shot + num)
break
elif shot - 10 in hit: # south
temp = [shot + 10]
for num in [20, 30, 40, 50, 60, 70, 80]:
if shot - num not in hit:
temp.append(shot - num)
break
elif shot + 10 in hit: # north. Ex: first shot is 50, next shot is 40
temp = [shot - 10]
for num in [20, 30, 40, 50, 60, 70, 80]:
if shot + num not in hit:
temp.append(shot + num)
break
candidate = [] # list of valid places that the next shot could be
for i in range(len(temp)):
if temp[i] not in guesses and temp[i] < 100 and temp[i] > -1: #checks the validity of places in temp
candidate.append(temp[i])
random.shuffle(candidate) # shuffle the element order of the list candidate
return candidate
"""
main program:
"""
num_boats = [5, 4, 3, 3, 2, 2] # this list contains all boats. Each boat is represented by its length
# before game
# computer - 1
hit1 = []
miss1 = []
comp1 = []
guesses1 = []
cond1 = 0
tactics1 = [] # list of possible moves after a boat is hitted. After a boat is sunked, tactics reset to []
taken_positions1 = []
sinked_boats1 = []
# user - 2
hit2 = []
miss2 = []
comp2 = []
guesses2 = []
cond2 = 0
tactics2 = []
taken_positions2 = []
sinked_boats2 = []
# computer creates ships for player 1
ships1, taken_positions1 = create_ships_c(taken_positions1, num_boats)
# user creates boat for player 2 - show board
ships2, taken_positions2 = create_ships_u(taken_positions2, num_boats)
create_playground_u(taken_positions2)
# loop for user and computer takes turn to shoot, and repeat until finding a winner:
turns = 0
while True:
turns += 1
# USER SHOOTS: using 1 because it is checking the data of computer
guesses1 = hit1 + miss1 + comp1
shot1 = get_shot_user(guesses1)
ships1, hit1, miss1, comp1, cond1, sinked_boats1 = check_shot(shot1, ships1, hit1, miss1, comp1, sinked_boats1)
create_playground(hit1, miss1, comp1)
# check if all of the computer ships are empty:
if check_empty(ships1):
print("end of game - winner in", turns)
break
# COMPUTER SHOOTS:
guesses2 = hit2 + miss2 + comp2
shot2, guesses2 = get_shot_comp(guesses2, tactics2)
ships2, hit2, miss2, comp2, cond2, sinked_boats2 = check_shot(shot2, ships2, hit2, miss2, comp2, sinked_boats2)
create_playground(hit2, miss2, comp2)
if cond2 == 1:
# got 1 hit
tactics2 = calculate_tactics(shot2, tactics2, guesses2, hit2)
elif cond2 == 2:
# comp, and sunk the boat
# reset tactics = []
tactics2 = []
elif len(tactics2) > 0: #len(tactics) > 0 means that there are still possible moves
# got 1 hit, then miss
# remove the newly shot from tactics
tactics2.pop(0)
# in case all 3 statements above are False, which means there is no hit in the first place, tactics is still []
# check if all of the computer ships are empty:
if check_empty(ships2):
print("end of game - computer wins in", turns)
break
# after both the user and computer shoot, start a new loop:
|
9,643 | 486362463dc07bdafea85de39a4a6d58cb8c8f26 | import json
from test.test_basic import BaseCase
class TestUserRegister(BaseCase):
"""
TestClass to test the register function.
"""
def test_successful_register(self):
# Given
payload = json.dumps({
"username": "userjw",
"password": "1q2w3e4r"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"message": "User created successfully."}, response.json)
self.assertEqual(201, response.status_code)
def test_signup_with_non_existing_field(self):
# Given
payload = json.dumps({
"username": "userjw",
"password": "1q2w3e4r",
"email": "foo@bar.de"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"message": "User created successfully."}, response.json)
self.assertEqual(201, response.status_code)
def test_signup_without_username(self):
# Given
payload = json.dumps({
"password": "1q2w3e4r"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"username": "This field cannot be blank!"}, response.json['message'])
self.assertEqual(400, response.status_code)
def test_signup_without_password(self):
# Given
payload = json.dumps({
"username": "userjw"
})
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"password": "This field cannot be blank!"}, response.json['message'])
self.assertEqual(400, response.status_code)
def test_creating_already_existing_user(self):
# Given
payload = json.dumps({
"username": "userjw",
"password": "1q2w3e4r",
})
# Preconditions
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# When
response = self.app.post('/register', headers={"Content-Type": "application/json"}, data=payload)
# Then
self.assertEqual({"message": "A user '{}' already exists!".format(json.loads(payload)['username'])}, response.json)
self.assertEqual(400, response.status_code)
|
9,644 | ddab4d014c000dd96bad932adac75e4eec065483 | import csv
from itertools import chain, combinations
import generation
import time
pi = []
wi = []
di = []
n = input("How many tasks do you want to schedule ? \n")
k=tuple(range(1,n+1))
#la fonction qui supprime un element dans l'ensemble des taches, elle facilite comment retrouver les sous taches de J
def supp(c,chaine):
res=[]
if chaine[0]==c:
res=chaine[1:]
for j in range(len(chaine)):
if c==chaine[j]:
res=chaine[0:j]+chaine[j+1:]
return res
# Cette boucle pour generer les donnees des taches en respectant les contraintes du poids
# chose qui existe deja dans la fonction generer
for i in range(n):
pi.append(generation.generer(n)[0][i])
wi.append(generation.generer(n)[1][i])
di.append(generation.generer(n)[2][i])
#enregistrer les donnees generes dans un fichier csv sous le nom LesDonnesDyna
with open("LesDonnesDyna.csv", "w") as new_file:
csv_writer = csv.writer(new_file, delimiter='\t')
csv_writer.writerow(['i', 'Pi', 'Wi', 'Di'])
for i in range(1, n + 1):
csv_writer.writerow([i, pi[i - 1], wi[i - 1], di[i - 1]])
#pour generer les sequenses possibles
# i c'est le nombre de chaque sequence
debut = time.time()
def all_subsets(ss,i):
return chain(*map(lambda x: combinations(ss, x), range(i,i+1)))
#Combinaison est une liste qui stoque les differentes sous listes (sous taches)
combinaison=[]
for subset in all_subsets(k,2):
combinaison.append(subset)
#pour calculer la condition initiale qui est l'execution d'une seule tache
def f1(i):
return wi[i[0]-1]*max(0,(pi[i[0]-1]-di[i[0]-1]))
#pour calculer les valeurs optimales de 2 combinaisons
def f2(i):
ci=0
for j in range(len(i)):
ci+=pi[int(i[j])-1]
resultP={}
resultP[str(i[0])+" => "+str(i[1])]=f1(supp(i[1],i))+wi[int(i[1])-1]*max(0,(ci-di[int(i[1])-1]))
resultP[str(i[1])+" => "+str(i[0])]=f1(supp(i[0],i))+wi[int(i[0])-1]*max(0,(ci-di[int(i[0])-1]))
#min=min_dic(resultP)
mino=min(resultP.values())
n=resultP.values().index(mino)
v=resultP.keys()[n]
#chemin.append(v)
return mino,v
etapeII={}
for i in range(len(combinaison)):
etapeII[combinaison[i]]=f2(combinaison[i])
for i in range(3,len(k)+1):
combinaison1=[]
for subset in all_subsets(k,i):
combinaison1.append(subset)
for t in range(len(combinaison1)):
resultP={}
for h in range(len(combinaison1[t])):
ci=0
for j in range(len(combinaison1[t])):
ci+=pi[combinaison1[t][j] - 1]
r=supp(combinaison1[t][h], combinaison1[t])
resultP[etapeII[r][1] +' => ' + str(combinaison1[t][h])]= etapeII[r][0] + wi[combinaison1[t][h] - 1] * max(0, (ci - (di[combinaison1[t][h] - 1])))
mino=min(resultP.values())
n=resultP.values().index(mino)
v=resultP.keys()[n]
etapeII[combinaison1[t]]=(mino, v)
fin = time.time()
with open('resultat.txt','w') as resultat:
resultat.write("-la valeur optimal est :"+str(etapeII[k][0])+"\n")
resultat.write("-La séquence optimale est : \n")
resultat.write("-"*len(str(etapeII[k][1])) +"\n")
resultat.write('|'+str(etapeII[k][1])+'|\n')
resultat.write("-"*len(str(etapeII[k][1])) +"\n")
resultat.write("Le temps d'execution est : "+str(fin - debut)+" secondes.") |
9,645 | 106cca8af164fa4ae946f77b40c76e03accf171c | from tkinter import*
me=Tk()
me.geometry("354x460")
me.title("CALCULATOR")
melabel = Label(me,text="CALCULATE HERE",bg='PINK',font=("ARIAL",25))
melabel.pack(side=TOP)
me.config(background='BROWN')
displayStr=StringVar()
op=""
def but(a):
global op
op=op+str(a)
displayStr.set(op)
def eq():
global op
result=str(eval(op))
displayStr.set(result)
op=""
def clrbut():
displayStr.set('')
inputText=Entry(me,font=("Courier New",12,'bold'),textvar=displayStr,width=25,bd=5,bg='WHITE')
inputText.pack()
but1=Button(me,padx=14,pady=14,bd=4,bg='RED',command=lambda:but(1),text="1",font=("Courier New",16,'bold'))
but1.place(x=10,y=100)
but2=Button(me,padx=14,pady=14,bd=4,bg='RED',command=lambda:but(2),text="2",font=("Courier New",16,'bold'))
but2.place(x=10,y=170)
but3=Button(me,padx=14,pady=14,bd=4,bg='RED',command=lambda:but(3),text="3",font=("Courier New",16,'bold'))
but3.place(x=10,y=240)
but4=Button(me,padx=14,pady=14,bd=4,bg='YELLOW',command=lambda:but(4),text="4",font=("Courier New",16,'bold'))
but4.place(x=75,y=100)
but5=Button(me,padx=14,pady=14,bd=4,bg='YELLOW',command=lambda:but(5),text="5",font=("Courier New",16,'bold'))
but5.place(x=75,y=170)
but6=Button(me,padx=14,pady=14,bd=4,bg='YELLOW',command=lambda:but(6),text="6",font=("Courier New",16,'bold'))
but6.place(x=75,y=240)
but7=Button(me,padx=14,pady=14,bd=4,bg='YELLOW',command=lambda:but(7),text="7",font=("Courier New",16,'bold'))
but7.place(x=140,y=100)
but8=Button(me,padx=14,pady=14,bd=4,bg='YELLOW',command=lambda:but(8),text="8",font=("Courier New",16,'bold'))
but8.place(x=140,y=170)
but9=Button(me,padx=14,pady=14,bd=4,bg='YELLOW',command=lambda:but(9),text="9",font=("Courier New",16,'bold'))
but9.place(x=140,y=240)
but0=Button(me,padx=14,pady=14,bd=4,bg='RED',command=lambda:but(0),text="0",font=("Courier New",16,'bold'))
but0.place(x=10,y=310)
butdot=Button(me,padx=47,pady=14,bd=4,bg='YELLOW',command=lambda:but("."),text=".",font=("Courier New",16,'bold'))
butdot.place(x=75,y=310)
butpl=Button(me,padx=14,pady=14,bd=4,bg='ORANGE',text="+",command=lambda:but("+"),font=("Courier New",16,'bold'))
butpl.place(x=205,y=100)
butsub=Button(me,padx=14,pady=14,bd=4,bg='ORANGE',text="-",command=lambda:but("-"),font=("Courier New",16,'bold'))
butsub.place(x=205,y=170)
butml=Button(me,padx=14,pady=14,bd=4,bg='ORANGE',text="*",command=lambda:but("*"),font=("Courier New",16,'bold'))
butml.place(x=205,y=240)
butdiv=Button(me,padx=14,pady=14,bd=4,bg='ORANGE',text="/",command=lambda:but("/"),font=("Courier New",16,'bold'))
butdiv.place(x=205,y=310)
butclear=Button(me,padx=14,pady=119,bd=4,bg='GREEN',text="CE",command=clrbut,font=("Courier New",16,'bold'))
butclear.place(x=270,y=100)
butequal=Button(me,padx=151,pady=14,bd=4,bg='GREEN',command=eq,text="=",font=("Courier New",16,'bold'))
butequal.place(x=10,y=380)
me.mainloop() |
9,646 | 483a5e95a7bfca2cc6b1e7e81740620468fb5623 | print(" whats your name boi ?")
name = input();
if name == "arrya":
print("u are a boi");
elif name == "jon":
print("basterd")
elif name == "ned":
print("you are dead man")
elif name == "rob":
print("the king in the north")
else:
print("carry on")
|
9,647 | 93bfca1e756951faacd29871ad19afad374e25d6 | from p5 import *
capture = None
def setup():
global capture
createCanvas(390, 240)
capture = createCapture(VIDEO)
capture.size(320, 240)
def draw():
background(255)
image(capture, 0, 0, 320, 240)
run()
|
9,648 | a22d38f7e8122d6339d1beab3bf08fa41c36d61d | import math
def solve():
a = int(input())
b = int(input())
return math.sqrt(a*a + b * b)
print(solve()) |
9,649 | 6d0a945c9eaf6564a327928880df1f0aeed2e5d0 | import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Main {
public static void findSubNode(Node root) {
}
public static void main(String args[]) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
String[] strings = br.readLine().split(" ");
int n = Integer.parseInt(strings[0]);
int root = Integer.parseInt(strings[1]);
Node head1 = new Node(root);
int[][] arr1 = new int[100 + 1][2];
for (int i = 0; i < n; i++) {
strings = br.readLine().split(" ");
arr1[Integer.parseInt(strings[0])][0] = Integer.parseInt(strings[1]);
arr1[Integer.parseInt(strings[0])][1] = Integer.parseInt(strings[2]);
}
int t = Integer.parseInt(br.readLine());
if (arr1[t][0] == 0 && arr1[t][1] == 0){
System.out.println(0);
} else if(arr1[t][0] != 0){
System.out.println(arr1[t][0] );
}else {
System.out.println(arr1[t][1] );
}
// createTree(head1, arr1);
}
public static void createTree(Node head, int[][] arr) {
if (head == null) {
return;
}
if (arr[head.value][0] != 0) {
head.left = new Node(arr[head.value][0]);
createTree(head.left, arr);
}
if (arr[head.value][1] != 0) {
head.right = new Node(arr[head.value][1]);
createTree(head.right, arr);
}
}
}
|
9,650 | 24a4b9246a9b15334bebc45c532a25bd81266918 | import collections
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def isSymmetric(self, root: TreeNode) -> bool:
if not root:
return True
queue = collections.deque()
queue.append((root.left, root.right))
while queue:
left, right = queue.popleft()
if not left and not right:
continue
if not left or not right or left.val != right.val:
return False
queue.append((left.left, right.right))
queue.append((left.right, right.left))
return True
|
9,651 | 991b894c4c0fb9cb90aef0542227e001a3a3bb0d | #!/usr/bin/env python
"""
Usage:
generate-doc <layer-definition>
generate-doc --help
generate-doc --version
Options:
--help Show this screen.
--version Show version.
"""
from docopt import docopt
import openmaptiles
from openmaptiles.tileset import Layer
from openmaptiles.docs import collect_documentation
if __name__ == '__main__':
args = docopt(__doc__, version=openmaptiles.__version__)
layer = Layer.parse(args['<layer-definition>'])
markdown = collect_documentation(layer)
print(markdown)
|
9,652 | 9a0e24fbe9f51dc914d891e90196c2ff4e65f04a | #embaralhar sorteio
import random
a1 = input('Primeiro aluno: ')
a2 = input('Primeiro segundo: ')
a3 = input('Primeiro terceiro: ')
a4 = input('Primeiro quarto: ')
lista = [a1, a2, a3, a4]
random.shuffle(lista)
print('A ordem de apresentacao será')
print(lista) |
9,653 | 59c33383365d10c108253f7b5a210d40718913a2 | # pick three names
names = ["Mia", "Francis", "Eva"]
# propmpt user for his/her name
print("Please enter your name:")
user_name = input()
if user_name in names:
print("Hi there, {}!".format(user_name))
else:
print("Who goes there?")
|
9,654 | 4add80894036e0395a6e6eb13e8a2db0d963de8c | sum_value = 0
for _ in range(5):
sum_value += int(input())
print(sum_value) |
9,655 | 8ad9efbbb2d9e2a5f73ebbb999da3ed93e4c1974 | import time
import random
from BlockchainNetwork.MVB import *
from threading import Thread
coloredlogs.install()
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
log = logging.getLogger(__name__)
class MVBTest:
def __init__(self, initialNodeCnt):
self.mvb = MVB()
self.signingKeysList = []
self.pubKeysList = []
self.pubKeysByteList = []
self.__initialSigningKeys()
self.__initialPubKeys()
self.mvb.generateGenesisBlockFromJson()
self.mvb.initialNodes(initialNodeCnt)
for i, node in enumerate(self.mvb.networkNodes):
nodeThread = Thread(target=self.threadMining, args=(node, 1))
nodeThread.start()
def multipleValidTxTest(self):
"""
This method tests multiple valid transactions
"""
log.info("--------------------Multiple valid Tx tests now started-------------------")
self.mvb.txWaitingPool += self.readTxFromFile('./TxFiles/MultipleValidTestTx.json')
self.mvb.broadcastTxPools()
def doubleSpendTest(self):
"""
txOutputs is the genesis output.
txOutputs[0] was used twice in this test.
Both Tx1 and Tx2 make txOutputs[0] as input.
When Tx2 is mined, the verification will be failed.
"""
log.info("--------------------Double spend test now started-------------------")
log.info("A pair of valid and invalid transactions is added into GlobalTx Pool")
self.mvb.txWaitingPool += self.readTxFromFile('./TxFiles/DoubleSpendTestTx.json')
self.mvb.broadcastTxPools()
def inputOutputSumTest(self):
log.info("--------------------Input output sum test now started-------------------")
log.info("A pair of valid and invalid Transactions is added into GlobalTx Pool")
self.mvb.txWaitingPool += self.readTxFromFile('./TxFiles/InputOutputSumTestTx.json')
self.mvb.broadcastTxPools()
def sigVerifyTest(self):
log.info("--------------------Signature verify test now started-------------------")
log.info("A pair of valid and invalid Transactions is added into GlobalTx Pool")
self.mvb.txWaitingPool += self.readTxFromFile('./TxFiles/SigVerifyTestTx.json')
self.mvb.broadcastTxPools()
def numberHashTest(self):
log.info("--------------------Number hash test now started-------------------")
log.info("A pair of valid and invalid Transactions is added into GlobalTx Pool")
self.mvb.txWaitingPool += self.readTxFromFile('./TxFiles/NumberHashTestTx.json')
self.mvb.broadcastTxPools()
def txInputsExistTest(self):
log.info("--------------------Transaction inputs exist test now started-------------------")
log.info("A pair of valid and invalid Transactions is added into GlobalTx Pool")
self.mvb.txWaitingPool += self.readTxFromFile('./TxFiles/TxInputsExistTestTx.json')
self.mvb.broadcastTxPools()
def prevHashMatchTest(self):
log.info("--------------------Prev Hash test now started-------------------")
log.info("Node 2 broadcast a Block with invalid prev-hash to the other nodes")
txList = self.readTxFromFile('./TxFiles/PrevHashMatchTestTx.json')
self.mvb.networkNodes[1].mineInvalidBlock(txList[0], isInvalidPrevHash=True)
def blockPOWTest(self):
log.info("--------------------Block POW test now started-------------------")
log.info("Node 1 broadcast a Block with invalid POW to the other nodes")
txList = self.readTxFromFile('./TxFiles/BlockPOWTestTx.json')
self.mvb.networkNodes[0].mineInvalidBlock(txList[0], isInvalidPOW=True)
def threadMining(self, node: Node, i):
nowTime = time.time()
while True:
sleep(random.uniform(0.05, 0.1))
node.receiveBroadcastBlock()
for tx in node.globalTxPool:
node.mineBlock(tx)
if node.globalTxPool:
node.globalTxPool.remove(tx)
if time.time() - nowTime > 15:
break
node.saveToFile()
def createTxJsonFile(self, FILENAME: str, txList: List[Transaction]):
txListJsonObj = {'txList': []}
for tx in txList:
txListJsonObj['txList'].append(tx.getJsonObj())
with open(FILENAME, 'w', encoding='utf-8') as f:
f.write(json.dumps(txListJsonObj, indent=4))
def readTxFromFile(self, FILENAME: str) -> List[Transaction]:
txList = []
with open(FILENAME, 'r', encoding='utf-8') as f:
txListJsonObj = json.load(f)
for txObj in txListJsonObj['txList']:
newTx = Transaction(jsonObj=txObj)
txList.append(newTx)
return txList
def __initialSigningKeys(self) -> None:
"""
Generate and update signingKeys List for the network
"""
seedStr = '0' * 31
seedNum = ['1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f']
seedList = []
for i in range(15):
seed = seedStr + seedNum[i]
seedList.append(seed.encode('utf-8'))
for seed in seedList:
self.signingKeysList.append(SigningKey(seed))
log.info("15 signing keys have been generated successfully")
def __initialPubKeys(self):
for signingKey in self.signingKeysList:
verifyKey = signingKey.verify_key
verifyKeyByte = verifyKey.encode(encoder=HexEncoder)
self.pubKeysList.append(verifyKey)
self.pubKeysByteList.append(verifyKeyByte)
log.info(str(len(self.pubKeysList)) + " public keys have been generated successfully")
|
9,656 | cc97f70b9d41357f020ea9c59d8b149392a336cc | from django.apps import AppConfig
class WebApiAppConfig(AppConfig):
name = 'WebApiApp'
|
9,657 | 95e7e025660e71cbdf6a6a0812964fc26d4beec0 | import sqlite3
import argparse
import json
import index_db
from collections import defaultdict
def query_doc(cursor, lang, title):
cursor.execute(index_db.select_lang_title, (lang, title))
result = cursor.fetchone()
if not result:
return None
return {
'lang': result[0],
'doc_id': result[1],
'doc_path': result[2],
# 'url': result[3], # I don't think url is needed here...
'title': result[4],
'begin': result[5],
'end': result[6]
}
def locate_single_topic_texts(lang_title_dict, cursor):
same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())
return sorted(
(i for i in same_topic if i),
key=lambda x: x['lang']
)
def locate_interlanguage_texts(file_path, db_path):
with open(file_path, 'rt') as f:
interlangauge = json.load(f)
with sqlite3.connect(db_path) as conn:
c = conn.cursor()
return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Locate same topic texts over multiple languages.')
parser.add_argument('--db', dest='db_path', default=index_db.default_path,
help='a sqlite database file generated by index.py')
parser.add_argument('--input', dest='input_path',
default='interlanguage_topics.json',
help='a json file containing sets of topics over '
'multiple languages')
parser.add_argument('--output', dest='output_path',
default='interlanguage_location.json',
help='a json file locating same topic texts over '
'multiple languages')
args = parser.parse_args()
location_infos = locate_interlanguage_texts(args.input_path, args.db_path)
with open(args.output_path, 'wt') as f:
json.dump(location_infos, f)
|
9,658 | 1f69bcd204c9be26756d964f4deb61296e40ff10 | # Import this.
|
9,659 | 61571ba9f647f430879b9fa5db884ec4c93c334f | import numpy as np
import urllib2
from io import StringIO
def demo_polyfit0():
x, y = np.loadtxt('stock.txt', unpack=True)
print '-'.join(map(str, np.polyfit(x, y, 1)))
def demo_polyfit1():
d = urllib2.urlopen("http://www.qlcoder.com/download/145622513871043.txt").read().decode("utf-8")
print d
arr = np.genfromtxt(StringIO(d), delimiter=" ")
z1 = np.polyfit(arr[:, 0], arr[:, 1], 5)
print z1
if __name__ == '__main__':
demo_polyfit0()
demo_polyfit1()
|
9,660 | af609f1558276bab96477d3a2c61d813b9dd3d82 | """
Program file: DataParser.py.
This program parses and returns a dataset for a plotting program
"""
from sys import exit
from csv import Sniffer, DictReader
class DataParser:
"""
Summary: parses a data file, and returns list of the filtered data.
Instances:
1. accepted_records
2. ignored_records
Methods:
1. valid_value
2. create_reader
3. create_dataset
4. get_dataset
"""
def __init__(self, csvfile, data_centers):
"""DataParser constructor."""
self.accepted_records = []
self.ignored_records = []
with open(csvfile, 'r') as file:
# Creates a reader object for later data manipulation
reader = self.create_reader(file)
# Resetting read/write pointer to beginning of file
file.seek(0)
# Creating list for graphing data center's dataset
self.create_dataset(reader, data_centers)
def valid_value(self, number):
"""
Summary: Checks that value is a valid positive number.
Description: Accepts positive whole and decimal numbers.
"""
try:
# Checking that entered value can be converted to a float.
# Excludes letters and symbols.
float(number)
# Checking that validated number is nonnegative.
if float(number) > 0:
return True
return False
except ValueError:
return False
def create_reader(self, csvfile):
"""
Summary: Validates a csv file, returns a DictReader object.
Description: Takes one argument: "data" (Should be a csv file)
"""
# Determines the dialect of the csv file for processing
file_dialect = Sniffer().sniff(csvfile.read(1024))
# Resets the read/write pointer within the file
csvfile.seek(0)
# Checks to see that the csv file imported has a header row,
# that will be used for later parsing.
if not Sniffer().has_header(csvfile.read(1024)):
print('Imported csv file lacks header row')
exit()
# Resets the read/write pointer within the file
csvfile.seek(0)
# Creates a DictReader object with the csvfile provided, and the
# dialect object to define the parameters of the reader instance.
reader = DictReader(csvfile, dialect=file_dialect)
# Return DictReader object
return reader
def create_dataset(self, reader=None, data_centers=None):
"""
Summary: Creates a dataset of dcs and their respective times, values.
Arguments: 'reader' defines a reader object used to read a csv file.
'dataCenters' is a list containing data center names that are to be
graphed.
"""
for row in reader:
# Checking that the 'DC' matches one defined in "data_centers" list
if row.get('DC') in data_centers:
# Validating DC's value is a positive nonnegative number.
if not self.valid_value(row.get('Value')):
# Archiving ignored records for later analysis
self.ignored_records.append(row)
else:
self.accepted_records.append(
[
row.get('DC'),
float(row.get('Time')),
float(row.get('Value'))
]
)
def get_dataset(self):
"""Getter for accepted_records list."""
return self.accepted_records
|
9,661 | a1b85d140c45f082ceac54ad8aa9aa5c3659d5cf |
import ftplib
def ftp_download():
file_remote = 'ftp_upload.jpg'
file_local = '/home/pi/Desktop/Camera-Rasp-Arduino-sensors/test_download.jpg'
bufsize = 1024
fp = open(file_local, 'wb')
f.retrbinary('RETR ' + file_remote, fp.write, bufsize)
fp.close()
def ftp_upload():
file_remote = 'test_upload.jpg'
file_local = '/home/pi/Desktop/Camera-Rasp-Arduino-sensors/test_upload.jpg'
bufsize = 1024
fp = open(file_local, 'rb')
f.storbinary('STOR ' + file_remote, fp, bufsize)
fp.close()
if __name__ == '__main__':
host = 'www.aiforu.com'
username = 'admin_camera'
password = 'QcZ8M9aDga'
f = ftplib.FTP(host)
f.login(username, password)
pwd_path = f.pwd()
print("FTP当前路径:", pwd_path)
ftp_download()
ftp_upload()
f.quit()
|
9,662 | c62ffcaa9095d772e51be086be349d200346bc22 | """ Utility functions and classes for SRP
Context : SRP
Module : Statsistics
Version : 1.0.0
Author : Stefano Covino
Date : 04/04/2013
E-mail : stefano.covino@brera.inaf.it
URL: : http://www.merate.mi.astro.it/utenti/covino
Usage : to be imported
Remarks : inputs are a 1D vectors to be cross-correlated. Optionally you can
give a vector of x-axis units. It returns the cross-correlation
value.
History : (04/04/2013) First version.
"""
import numpy
def XCorr_1D (data, refdata, xdata=None):
if data.ndim == 1 and refdata.ndim == 1:
ycorr = numpy.correlate(data, refdata, mode="full")
xcorr = numpy.arange(ycorr.size)
lags = xcorr - (data.size-1)
if xdata == None:
distPerLag = 1.
elif xdata.ndim == 1:
distPerLag = (xdata[-1] - xdata[0])/float(xdata.size)
else:
return None
#
offsets = -lags*distPerLag
#
mx = ycorr.argmax()
ox = offsets[mx]
return ox
else:
return None
|
9,663 | a824bd7577134227f5c136f2a4382c056f1175be | import mxnet as mx
import numpy as np
import cv2
import random
class Even_iterator(mx.io.DataIter):
'''
data iterator, shuffle data but always make pairs as neighbors
for verification and triplet loss
'''
def __init__(self, lst_name, batch_size, aug_params=dict(), shuffle=False):
super(Even_iterator, self).__init__()
self.batch_size = batch_size
self.aug_params = aug_params.copy()
self.shuffle = shuffle
self.data, self.labels = Even_iterator.load_data(lst_name)
print "load data over"
self.data_num = self.labels.shape[0]
self.label_num = 1 if len(self.labels.shape) == 1 else self.labels.shape[1]
print self.data_num, self.label_num
self.reset()
@staticmethod
def load_data(lst_name):
img_lst = [x.strip().split('\t')
for x in file(lst_name).read().splitlines()]
im = cv2.imread(img_lst[0][-1])
h, w = im.shape[:2]
n, m = len(img_lst), len(img_lst[0]) - 2
data = np.zeros((n, h, w, 3), dtype=np.uint8)
labels = np.zeros((n, m), dtype=np.int32) if m > 1 else np.zeros((n, ), dtype=np.int32)
for i in range(len(img_lst)):
im = cv2.imread(img_lst[i][-1])
data[i] = im
labels[i] = img_lst[i][1:-1] if m > 1 else img_lst[i][1]
return data, labels
@staticmethod
def even_shuffle(labels):
'''
shuffle images lists and make pairs
'''
s = [(x, int(random.random() * 1e5), i) for i, x in enumerate(labels)]
s = sorted(s, key=lambda x: (x[0], x[1]))
lst = [x[2] for x in s]
idx = range(0, len(lst), 2)
random.shuffle(idx)
ret = []
for i in idx:
ret.append(lst[i])
ret.append(lst[i + 1])
return ret
@staticmethod
def model_shuffle(labels):
'''
shuffle images and images with same model are grouped together
'''
models_idx = range(int(np.max(labels)) + 1)
random.shuffle(models_idx)
s = [(models_idx[x], int(random.random() * 1e5), i) for i, x in enumerate(labels)]
s = sorted(s, key=lambda x: (x[0], x[1]))
lst = [x[2] for x in s]
return lst
def reset(self):
self.current = 0
if self.shuffle:
idx = Even_iterator.even_shuffle(self.labels)
# idx = Even_iterator.model_shuffle(self.labels)
self.data = self.data[idx]
self.labels = self.labels[idx]
@property
def provide_data(self):
shape = self.aug_params['input_shape']
return [('data', (self.batch_size, shape[0], shape[1], shape[2]))]
@property
def provide_label(self):
return [('softmax_label', (self.batch_size, self.label_num))]
@staticmethod
def augment(im, aug_params):
'''
augmentation (resize, crop, mirror)
'''
crop_h, crop_w = aug_params['input_shape'][1:]
ori_h, ori_w = im.shape[:2]
resize = aug_params['resize']
if ori_h < ori_w:
h, w = resize, int(float(resize) / ori_h * ori_w)
else:
h, w = int(float(resize) / ori_w * ori_h), resize
if h != ori_h:
im = cv2.resize(im, (w, h), interpolation=cv2.INTER_LINEAR)
x, y = (w - crop_w) / 2, (h - crop_h) / 2
if aug_params['rand_crop']:
x = random.randint(0, w - crop_w)
y = random.randint(0, h - crop_h)
im = im[y:y + crop_h, x:x + crop_w, :]
# cv2.imshow("name", im.astype(np.uint8))
# cv2.waitKey()
im = np.transpose(im, (2, 0, 1))
newim = np.zeros_like(im)
newim[0] = im[2]
newim[1] = im[1]
newim[2] = im[0]
if aug_params['rand_mirror'] and random.randint(0, 1) == 1:
newim = newim[:, :, ::-1]
return newim
def next(self):
if self.current + self.batch_size > self.data_num:
raise StopIteration
shape = self.aug_params['input_shape']
x = np.zeros((self.batch_size, shape[0], shape[1], shape[2]))
y = np.zeros((self.batch_size, self.label_num) if self.label_num > 1
else (self.batch_size, ))
index = []
for i in range(self.current, self.current + self.batch_size):
im = self.data[i]
im.astype(np.float32)
im = Even_iterator.augment(im, self.aug_params)
x[i - self.current] = im
y[i - self.current] = self.labels[i]
index.append(i)
x -= self.aug_params['mean']
x = mx.nd.array(x)
label = mx.nd.array(y)
batch = mx.io.DataBatch(data=[x], label=[label], pad=0, index=index)
self.current += self.batch_size
return batch
|
9,664 | 1178ad09638a4822461f7394e6cabb2db9516053 | #! /usr/bin/python
import Nodo,CLS,copy,sys
from excepcion import *
sys.setrecursionlimit(100000)
# Match
def match(nodo1,nodo2):
#print 'MATCH\n -',nodo1,'\n -',nodo2
# if isinstance(nodo1,Nodo.Nodo) and isinstance(nodo2,Nodo.Nodo):
# print ' -',nodo1.type,'\n -',nodo2.type
# Fin de recursion
if (not isinstance(nodo1,Nodo.Nodo)) and (not isinstance(nodo2,Nodo.Nodo)):
#print '- ',nodo1,'\n- ',nodo2
return nodo1 == nodo2
# Bajar si el nodo es no_terminal
if nodo1.type == 'no_terminal' or nodo1.type == '' or nodo1.type == 'PATRON' or nodo1.type == 'sub' or nodo1.type == 'LISTAPATRON' or nodo1.type == 'lp': return match(nodo1.izquierdo,nodo2)
if nodo2.type == 'no_terminal' or nodo2.type == '' or nodo2.type == 'PATRON' or nodo2.type == 'sub' or nodo2.type == 'LISTAPATRON' or nodo2.type == 'lp': return match(nodo2.izquierdo,nodo1)
# Variables hacen match con todo
if nodo1.type == 'VARIABLE' or nodo2.type == 'VARIABLE':
#print '- Variable\n -'
return True
# Constantes
if nodo1.type == 'CONSTANTE' and nodo2.type == 'CONSTANTE':
#print '- Constante\n -'
#print '(',nodo1.type,' ',nodo2.type,')\n'
return match(nodo1.izquierdo.izquierdo,nodo2.izquierdo.izquierdo)
# Entero
if nodo1.type == 'ENTERO' and nodo2.type == 'ENTERO':
#print '- Entero\n -'
return match(nodo1.izquierdo,nodo2.izquierdo)
# Booleano
if nodo1.type == 'BOOLEANO' and nodo2.type == 'BOOLEANO':
# print '- Booleano\n -'
return match(nodo1.izquierdo,nodo2.izquierdo)
# Listavacia
if nodo1.type == 'CONSTLV' and nodo2.type == 'CONSTLV':
#return match(nodo1.izquierdo,nodo2.izquierdo)
return True
# Listas
if nodo1.type == 'LISTA' and nodo2.type == 'LISTA':
#print 'BLAH',nodo1.izquierdo,nodo2.izquierdo,nodo1.derecho,nodo2.derecho
#print match(nodo1.izquierdo,nodo2.izquierdo) and match(nodo1.derecho,nodo2.derecho)
#return match(nodo1.izquierdo,nodo2.izquierdo) and match(nodo1.derecho,nodo2.derecho)
return comparar_listas(nodo1,nodo2,[])
# print 'falso'
return False
# Comparar Listas
def comparar_listas(lista1,lista2,tuplas):
print 'incomp-tuplas: ',tuplas
if match(lista1.izquierdo,lista2.izquierdo):
tuplas.append((lista1.izquierdo,lista2.izquierdo))
d1 = lista1.derecho
d2 = lista2.derecho
if d1.type == 'LISTA':
if d2.type == 'LISTA':
comparar_listas(lista1.derecho,lista2.derecho,tuplas)
else:
if match(d1,d2): tuplas.append((d1,d2))
elif d2.type == 'LISTA':
if match(d1,d2): tuplas.append((d1,d2))
else:
if match(d1,d2): tuplas.append((d1,d2))
return tuplas
else: return False
# Replace
def replace(diccionario,clave,valor):
diccionario[clave] = valor
return diccionario
# Extend
def extend(diccionario,clave,valor):
diccionario[clave] = valor
return diccionario
# Lookup
def lookup(clave,diccionario):
try:
if clave in diccionario:
return diccionario[clave]
else: raise ParametrosError('Variable '+str(clave)+' no declarada')
except ParametrosError, messag:
messag = messag.messg
print 'Error : ' + messag
# Eval
def eval(nodo,env,orientacion):
if orientacion == 'izquierda': return eval(nodo.izquierdo,env)
return eval(nodo.derecho,env)
# Valor
def valor(nodo):
while isinstance(nodo,Nodo.Nodo):
if nodo.type == 'BOOLEANO':
if nodo.izquierdo == 'TRUE': return True
else: return False
elif nodo.type != 'LISTA':
nodo = nodo.izquierdo
else:
return str(valor(nodo.izquierdo))+'::'+str(valor(nodo.derecho))
return nodo
# Cantidad de patrones de una Funcion
def cantidad_patrones(nodo):
while (nodo.type != 'lp'):
nodo = nodo.izquierdo
global suma
suma = 0
tam_listapatron(nodo)
return suma
# # Tamano de una lista de patrones
# def tam_listapatron(nodo):
# global suma
# i = nodo.izquierdo
# d = nodo.derecho
# if nodo.type == 'PATRON':
# suma += 1
# return
# else:
# if isinstance(i,Nodo.Nodo):
# tam_listapatron(i)
# if isinstance(d,Nodo.Nodo):
# tam_listapatron(d)
# return suma
# Apply
def apply(cls,nodo):
for c in cls.clausura:
#print 'C[0]\n =',valor(nodo)
comparar = match(c[0],nodo)
print 'comp', comparar
if comparar:
if isinstance(comparar,list):
#print 'Matcheo de listas', comparar[0],comparar[1]
#print 'APPLY\n @',cls,'\n @',c[1],'\n @',copy.deepcopy(cls.env)
nuevo_env = copy.deepcopy(cls.env)
for n in comparar:
extend(nuevo_env,valor(n[0]),n[1])
print 'NE ',nuevo_env
#print '#',nuevo_env
return eval(c[1],extend(nuevo_env,str(valor(c[0])),nodo))
#print ' @@ ',eval(c[1],extend(cls.env,str(valor(c[0])),nodo))
#return eval(c[1],extend(cls.env,str(valor(c[0])),valor(nodo)))
#print 'retorno',valor(c[1])
else : return eval(c[1],extend(copy.deepcopy(cls.env),str(valor(c[0])),nodo))
raise ParametrosError('Error de matching')
#APPLY VIEJO
# global num_clausura
# #if isinstance(nodo1,Nodo.Nodo) and isinstance(nodo2,Nodo.Nodo): print 'APPLY',nodo1.type,nodo2.type
# #print 'APPLY\n -',nodo1,'\n -',nodo2
# #if nodo2 is None and nodo1 is None: return
# if 'clausura' in env:
# #print 'here'#, valor(env['clausura'][0][0]),valor(env['clausura'][1][0])
# #print 'APPLY',nodo1,nodo2
# #print env
# #i=555
# for c in env['clausura']:
# print '+C0\n +',nodo2,'\n +',c[0],'\n +',c[1],'\n +',env['clausura'][0][1]
# if match(nodo2,c[0]):
# print 'Macheo \n *',c[1],'\n *',extend(env,str(valor(c[0])),valor(nodo2))
# print valor(eval(c[1],extend(env,str(valor(c[0])),valor(nodo2))))
# return #eval(c[1],extend(env,str(valor(c[0])),valor(nodo2)))
# # else: return False
# # i+=111
# #print 'ERROR',c[0],nodo2
# #n = c[0]
# #print n.type, nodo2.type
# #while isinstance(n,Nodo.Nodo):
# # print n.type
# # n = n.izquierdo
# raise 'AA'
# else:
# #print 'aqui \n ',nodo1,'\n ',nodo2,'\n ' ,env
# #print '1zzz'
# #print 'ZZ', eval(nodo1,env)
# #print '2zzz'
# return apply(eval(nodo1,env),eval(nodo2,env),env)
# #return apply(eval(nodo2,eval(nodo2,env)),env
# Obtener clausura de una funcion
def clausura(nodo,env,temp):
if isinstance(nodo,Nodo.Nodo):
if nodo.type == 'lfe':
#print 'in lfe',nodo.izquierdo,nodo.derecho
temp.append((nodo.izquierdo,nodo.derecho))
clausura(nodo.izquierdo,env,temp)
clausura(nodo.derecho,env,temp)
# print '$$$\n',CLS.CLS(env,temp),'\n$$$'
return CLS.CLS(env,temp)
# Obtener patrones de una lista de patrones
def patrones(nodo,listap):
if isinstance(nodo,Nodo.Nodo):
#print nodo
if nodo.type == 'PATRON':
#print 'p',nodo
listap.append(nodo)
if isinstance(nodo.izquierdo,Nodo.Nodo):
patrones(nodo.izquierdo,listap)
if isinstance(nodo.derecho,Nodo.Nodo):
patrones(nodo.derecho,listap)
return listap
# Obtener cuerpo (listas de patrones y expresiones)
# de una funcion
def cuerpo(nodo,body):
#print 'body',body, nodo.type
if isinstance(nodo,Nodo.Nodo):
if nodo.type == 'lfe':
#print 'in lfe',nodo.izquierdo,nodo.derecho
body.append((patrones(nodo.izquierdo,[]),nodo.derecho))
cuerpo(nodo.izquierdo,body)
cuerpo(nodo.derecho,body)
# print '$$$\n',CLS.CLS(env,temp),'\n$$$'
return body
# Factorizar funcion
def factorizar(body):
conjunto = []
particion = {}
clave = {}
exp = {}
p = 0
q = 1
for b in body:
print b[0][0]
#exp.append((b[0][0],b[1]))
conjunto.append(b[0][0])
p = 0
q = 1
print 'len' ,len(conjunto)
while p < len(conjunto):
while q < len(conjunto):
if match(conjunto[p],conjunto[q]) and match (conjunto[q],conjunto[p]):
print 'conjunto',conjunto[p],conjunto[q],p,q
if p in clave:
if clave[p] in particion:
particion[clave[p]].append(conjunto[q])
else:
particion[clave[p]] = [conjunto[q]]
if clave[p] in exp:
exp[clave[p]].append(body[p][1])
exp[clave[p]].append(body[q][1])
else:
exp[clave[p]] = [body[p][1]]
exp[clave[p]].append(body[q][1])
clave[q] = p
clave[p] = q
elif q in clave:
if clave[q] in particion:
particion[clave[q]].append(conjunto[p])
else:
particion[clave[q]] = [conjunto[p]]
if clave[q] in exp:
exp[clave[q]].append(body[p][1])
exp[clave[q]].append(body[q][1])
else:
exp[clave[q]] = [body[p][1]]
exp[clave[q]].append(body[q][1])
clave[p] = q
clave[q] = p
else:
particion[q] = [conjunto[q]]
exp[q]
clave[q] = p
clave[p] = p
else:
if p not in clave:
clave[p] = p
particion[p] = [conjunto[p]]
if q not in clave:
clave[q] = q
particion[q] = [conjunto[q]]
q += 1
p +=1
print particion , exp #particion[0][0] ,particion[2][0]#particion[3][0
# Eval
def es_entero(x,y):
if isinstance(x,int) and isinstance(y,int) and not(isinstance(x,bool)) and not(isinstance(y,bool)):
return True
else:
return False
def es_booleano(x,y):
if isinstance(x,bool) and isinstance(y,bool):
return True
else:
return False
#definicion de excepcion: Error de tipo
def eval(nodo,env):
# if isinstance(nodo,Nodo.Nodo):
# if isinstance(nodo.izquierdo,Nodo.Nodo):
# if isinstance(nodo.derecho,Nodo.Nodo):
# print nodo.type,'\n I: ', nodo.izquierdo.type,'\n D: ',nodo.derecho.type
# else:
# print nodo.type,'\n I: ', nodo.izquierdo.type
# else: print nodo.typee
try:
if not isinstance(nodo,Nodo.Nodo): return nodo
#if nodo.type == 'lp' or nodo.type == 'arg' or nodo.type == 'arg2': return eval(nodo.izquierdo,env)
if nodo.type == 'arg':
#apply(nodo.izquierdo,nodo.derecho,env)
#print 'Doble \n ',nodo.izquierdo,'\n ',nodo.derecho
eval(nodo.izquierdo,env)
eval(nodo.derecho,env)
#apply(eval(nodo.izquierdo,env),eval(nodo.derecho,env))
#print 'Doble2 \n ',nodo.izquierdo,'\n ',nodo.derecho
#if nodo.type == 'lp' or nodo.type == 'arg2': return eval(nodo.izquierdo,env)
if nodo.type == 'arg2': return eval(nodo.izquierdo,env)
if nodo.type == 'lp':return nodo
elif nodo.type == 'FUN':
#print 'In-Fun\n', cuerpo(nodo,[])
cuerpo_fun = cuerpo(nodo,[])
if len(cuerpo_fun[0][0]) != 1:
#factorizado = factorizar(cuerpo_fun)
fun_factorizada = factorizar(nodo)
else:
return clausura(nodo,env,[])
#return eval(nodo.izquierdo,env)
#elif nodo.type == 'LISTAPATRON': return eval(nodo.izquierdo,env)
elif nodo.type == 'IF':
# print 'if'
# print nodo.izquierdo.izquierdo.type
# print nodo.izquierdo.izquierdo
# print valor(eval(nodo.izquierdo.izquierdo,env))
if valor(eval(nodo.izquierdo.izquierdo ,env)) == True:
#print 'Hola'
return eval(nodo.izquierdo.derecho,env)
else:
return eval(nodo.derecho,env)
elif nodo.type == 'LISTAPATRON' or nodo.type == 'LISTA': return nodo
elif nodo.type == 'no_terminal': return eval(nodo.izquierdo,env)
elif nodo.type == 'sub': return eval(nodo.izquierdo,env)
elif nodo.type == '': return eval(nodo.izquierdo,env)
#elif nodo.type == 'CONSTANTE': return nodo.izquierdo.izquierdo
elif nodo.type == 'CONSTANTE' or nodo.type == 'ENTERO':
#print 'kkk',nodo.type
return nodo
elif nodo.type == 'CONSTLV':
#print nodo.izquierdo
#return '[]'
#print nodo.type
return nodo
elif nodo.type == 'MAS' :
#print 'nodos \n', nodo.izquierdo, nodo.derecho
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
#if es_entero(i,d):
resultado = i + d
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('ENTERO',resultado))
#else: raise ParametrosError('Error de tipo en los parametros de la suma')
elif nodo.type == 'MENOS' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_entero(i,d):
resultado = i - d
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('ENTERO',resultado))
else: raise ParametrosError('Error de tipo en los parametros de la resta')
elif nodo.type == 'NEGATIVO' :
i = valor(eval(nodo.izquierdo,env))
if es_entero(i,1):
resultado = -i
return Nodo.Nodo('NEGATIVO',resultado)
else: raise ParametrosError('Error de tipo en el parametro de negativo')
elif nodo.type == 'PRODUCTO' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
#if es_entero(i,d):
resultado = i * d
#if es_entero(valor(eval(nodo.izquierdo,env)),valor(eval(nodo.derecho,env))):
#resultado = valor(eval(nodo.izquierdo,env)) * valor(eval(nodo.derecho,env))
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('ENTERO',resultado))
#else: raise ParametrosError('Error de tipo en los parametros del producto')
elif nodo.type == 'COCIENTE' :
i = valor(eval(nodo.izquierdo,env))#except ParametrosError, messag:
# messag = messag.messg
# print 'Error : ' + messag
d = valor(eval(nodo.derecho,env))
#if es_entero(i,d):
if (d == 0):
raise ParametrosError('Error: Division por cero')
#else:
resultado = i / d
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('ENTERO',resultado))
#else: raise ParametrosError('Error de tipo de los parametros de la division')
elif nodo.type == 'MENOR' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_entero(i,d):
resultado = (i < d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',resultado))
else: raise ParametrosError('Error de tipo en los parametros de: <')
elif nodo.type == 'MENOROIGUAL' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_entero(i,d):
resultado = (i <= d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',resultado))
else: raise ParametrosError('Error de tipo en los parametros de: =<')
elif nodo.type == 'MAYOR' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_entero(i,d):
resultado = (i > d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',resultado))
else: raise ParametrosError('Error de tipo en los parametros de: >')
elif nodo.type == 'MAYOROIGUAL' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_entero(i,d):
resultado = (i >= d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',resultado))
else: raise ParametrosError('Error de tipo en los parametros de: >=')
elif nodo.type == 'IGUAL' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
#print str(isinstance(i,str)) + ' instancia'
if es_entero(i,d) or es_booleano(i,d):
resultado = (i == d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',str(resultado).upper()))
else: raise ParametrosError('Error de tipo en los parametros de: =')
elif nodo.type == 'DISTINTO' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_entero(i,d) or es_booleano(i,d):
resultado = (i != d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',str(resultado).upper()))
else: raise ParametrosError('Error de tipo en los parametros de: <>')
elif nodo.type == 'NO' :
i = valor(eval(nodo.izquierdo,env))
if es_booleano(bool(i),True):
resultado = not(i)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',str(resultado).upper()))
else: raise 'ERROR: de tipo en la negacion'
elif nodo.type == 'OR' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_booleano(i,d):
resultado = (i or d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',str(resultado).upper()))
else: raise 'ERROR: de tipo en los parametros del OR'
elif nodo.type == 'AND' :
i = valor(eval(nodo.izquierdo,env))
d = valor(eval(nodo.derecho,env))
if es_booleano(i,d):
resultado = (i and d)
return Nodo.Nodo('CONSTANTE',Nodo.Nodo('BOOLEANO',str(resultado).upper()))
else: raise 'ERROR: de tipo en los parametros del AND'
elif nodo.type == 'VARIABLE':
#print 'Pepe',env
#if 'clausura' in env:
#print 'pepe'
#for c in env['clausura']:
#print '+C0\n +',nodo2,'\n +',c[0]#,'\n +',env['clausura']
# if match(nodo,c[0]):
#print 'Macheo',nodo2,c[0]
# print 'aaa', c[1]
# return c[1]
#else:
return lookup(str(valor(nodo.izquierdo)),env)
#return eval(lookup(str(valor(nodo.izquierdo)),env),env)
#elif nodo.type == 'PATRON': return eval(nodo.izquierdo,env)
elif nodo.type == 'PATRON': return nodo
elif nodo.type == 'LET':
#valor_patron = str(nodo.izquierdo.izquierdo.izquierdo.izquierdo.izquierdo)
#env = extend(env,valor_patron,nodo.izquierdo.derecho)
p = nodo.izquierdo.izquierdo
e1 = nodo.izquierdo.derecho
e2 = nodo.derecho
env1 = extend(env,p,'fake')
v1 = eval(e1,env1)
return eval(e2,replace(env1,str(valor(p)),v1))
elif nodo.type == 'lfe':
#print 'LFE \n ===>', nodo.derecho
#if 'clausura' in env:
#extend(env,'clausura',env['clausura']+[(nodo.izquierdo,nodo.derecho)])
#print 'a'
#else:
#extend(env,'clausura',[(nodo.izquierdo,nodo.derecho)])
#print 'b'
#print'ENV', env, nodo
return
elif nodo.type == 'APLICAR':
#print 'APLICAR',nodo.izquierdo,nodo.derecho
#apply(nodo.izquierdo,nodo.derecho,env)
return apply(eval(nodo.izquierdo,env),eval(nodo.derecho,env))
except ParametrosError, messag:
messag = messag.messg
print 'ERROR : ' + messag
|
9,665 | 179b07870d656fb24b73d8b0a1f76ffed08aa5c2 | import time
import DHT22
import pigpio
import Sensor
class MagicBoxDHT22(object):
def DHT22(self):
self.s.trigger()
time.sleep(0.2)
self.tempF=round(self.s.temperature()*1.8+32,2) -3.7 #+adjustment
self.humidity=round(self.s.humidity())
def __init__(self):
self.pi=pigpio.pi()
self.s=DHT22.sensor(self.pi, 4)
self.tempF=0
self.humidity=0
|
9,666 | 78a96020abfd393438c2fce1dfd5fd159a23ca5a | #!/usr/bin/env python
################################################################################
#
# HDREEnable.py
#
# Version: 1.000
#
# Author: Gwynne Reddick
#
# Description:
#
#
# Usage:
#
# Last Update 16:49 08/12/10
#
################################################################################
# part of a hack for later on so we can identify if a second HDRE assembly has been applied
camnames = {'HDRECam (2)':' (2)',
'HDRECam(2)':'(2)',
'HDRECam 2':' 2',
'HDRECam_2':'_2',
'HDRECam2':'2'}
HDREEnvs = ['HDRERefl', 'HDREBackplate', 'HDREEnv']
def itemexists(name):
lx.eval('select.item {%s} set' % name)
selected = lx.evalN('item.name ?')
return name in selected
def lockcamera():
if not itemexists('HDRECam_Grp'):
lx.eval('select.drop item')
lx.eval('group.create')
lx.eval('item.name HDRECam_Grp')
lx.eval('select.subItem HDRECam set camera')
lx.eval('!!group.edit add item')
lx.eval('select.item HDRECam_Grp set')
lx.eval('item.channel lock on item:HDRECam_Grp')
def lockanimcamera():
if not itemexists('HDRECamAnimate_Grp'):
lx.eval('select.drop item')
lx.eval('group.create')
lx.eval('item.name HDRECamAnimate_Grp')
xfrmitem = lx.eval('query sceneservice item.xfrmPos ? HDRECamAnimate')
lx.eval('select.channel {%s:pos.X} set' % xfrmitem)
lx.eval('select.channel {%s:pos.Y} add' % xfrmitem)
lx.eval('select.channel {%s:pos.Z} add' % xfrmitem)
lx.eval('!!group.edit add chan')
lx.eval('item.channel lock on item:HDRECamAnimate_Grp')
def hastag(item):
lx.eval('select.drop item')
lx.eval('select.item {%s} set' % item)
if lx.eval('item.tag HDRE ?') == 'set':
return true
def clearold():
try:
numenvs = lx.eval('query sceneservice environment.N ? all')
envs = []
oldclips = []
for x in xrange(numenvs):
envs.append(lx.eval('query sceneservice environment.ID ? %s' % x))
# need a hack here to work round what appears to be a bug. We need to collect a
# list of clips to delete after deleting the env items. For some reason we have
# to collect the list in one loop, then delete the env items in a second loop
# otherwise querying the env refl image returns None. I think this is because the
# env image layer is originally an instance
for env in envs:
lx.eval('select.item %s set' % env)
if lx.eval('item.tag string HDRE ?') == 'set':
layer, process = lx.eval('query sceneservice mask.children ? {%s}' % env)
lx.eval('select.item {%s} set' % layer)
oldclips.append(lx.eval('texture.setIMap ?'))
# now delete the env items
for env in envs:
lx.eval('select.item %s set' % env)
if lx.eval('item.tag string HDRE ?') == 'set':
lx.eval('!!item.delete')
numgrplocs = lx.eval('query sceneservice groupLocator.N ? all')
grplocs = []
for x in xrange(numgrplocs):
grplocs.append(lx.eval('query sceneservice groupLocator.ID ? %s' % x))
for loc in grplocs:
lx.eval('select.item %s set' % loc)
if lx.eval('item.tag string HDRE ?') == 'set':
lx.eval('!!item.delete')
break
# clear old ground and water material groups
lx.eval('select.itemPattern HDREGroup')
id = lx.eval1('query sceneservice selection ? mask')
parent = lx.eval('query sceneservice mask.parent ? %s' % id)
lx.eval('select.item %s set' % parent)
lx.eval('texture.delete')
# clear old clips
for clip in oldclips:
lx.eval('select.drop item')
lx.eval('select.item {%s} set' % clip)
lx.eval('clip.delete')
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
def renamenew(incr):
try:
lx.eval('item.name HDRECam item:{HDRECam%s}' % incr)
lx.eval('item.name HDRECamAnimate item:{HDRECamAnimate%s}' % incr)
lx.eval('item.name HDRESun item:{HDRESun%s}' % incr)
lx.eval('item.name HDRERefl item:{HDRERefl%s}' % incr)
lx.eval('item.name HDREBackplate item:{HDREBackplate%s}' % incr)
lx.eval('item.name HDREEnv item:{HDREEnv%s}' % incr)
lx.eval('item.name {HDREActivate} item:{HDREActivate%s}' % incr)
lx.eval('item.name {HDREWater} item:{HDREWater%s}' % incr)
lx.eval('item.name {HDREShadowGround} item:{HDREShadowGround%s}' % incr)
lx.eval('item.name {HDREControls} item:{HDREControls%s}' % incr)
lx.eval('item.name {BackdropBrowser} item:{BackdropBrowser%s}' % incr)
lx.eval('item.name {Texture Group} item:{Texture Group%s}' % incr)
lx.eval('item.name {HDREGroup} item:{HDREGroup%s}' % incr)
# rename the parent group
root = lx.eval('query sceneservice item.parent ? HDRECam')
rootname = lx.eval('query sceneservice item.name ? %s' % root)
newname = rootname.split(incr)[0]
lx.eval('item.name {%s} item:{%s}' % (newname, rootname))
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
def tagitems():
try:
lx.eval('select.drop item')
for item in HDREEnvs:
lx.eval('select.item {%s} set' % item)
lx.eval('item.tag string {HDRE} {set}')
lx.eval('select.item {%s} set' % rootID)
lx.eval('item.tag string {HDRE} {set}')
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
def setframesize():
try:
backplate = None
# find the backplate
envchildren = lx.eval('query sceneservice item.children ? HDREBackplate')
for child in envchildren:
if lx.eval('query sceneservice item.type ? {%s}' % child) == 'imageMap':
lx.eval('select.item %s set' % child)
backplate = lx.eval('texture.setIMap ?')
break
if backplate:
clip_width = None
clip_height = None
# set render frame size and film back aspect aspect
clips = lx.evalN('query layerservice clips ? all')
for clip in clips:
if lx.eval('query layerservice clip.name ? {%s}' % clip) == backplate:
info = lx.eval('query layerservice clip.info ? {%s}' % clip).split()
clip_width = float(info[1].split(':')[1])
clip_height = float(info[2].split(':')[1])
if clip_width != None and clip_height != None:
if clip_width > clip_height:
frame_width = 1024
frame_height = int((clip_height/clip_width) * 1024)
else:
frame_height = 1024
frame_width = int((clip_width/clip_height) * 1024)
lx.eval('render.res 0 %s' % frame_width)
lx.eval('render.res 1 %s' % frame_height)
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
try:
# close previously open backdrop browser if there is one
if lx.eval('query scriptsysservice userValue.isDefined ? HDRE_Card'):
cookie = lx.eval('user.value HDRE_Card ?')
lx.eval('layout.createOrClose {%s} open:0' % cookie)
selectedItem = lx.eval1('query sceneservice selection ? locator')
rootID = lx.eval('query sceneservice item.parent ? %s' % selectedItem)
# check to see if an HDRE environment already exists and clear it out if it does.
# this is a bit of a hack, we have to test to see if one of our known items exists
# with an incremented name. If it does we delete all HDRE items with names that
# are not incremented and then rename all the ones that are - YUK!!!!
numcams = lx.eval('query sceneservice camera.N ? all')
for x in xrange(numcams):
camname = lx.eval('query sceneservice camera.name ? %s' % x)
if camname in camnames.keys():
incr = camnames[camname]
clearold()
renamenew(incr)
break
if itemexists('HDRECam'):
# set animate camera focal length
if itemexists('HDRECamAnimate'):
flength = round(lx.eval('item.channel focalLen ? item:HDRECam'), 3) * 1000
if flength >= 101 and flength <= 200:
flength = flength + 100
elif flength >= 51 and flength <= 100:
flength = flength + 50
elif flength >= 18 and flength <= 50:
flength = flength + 10
lx.eval('item.channel focalLen [%s mm] item:HDRECamAnimate' % flength)
lx.eval('render.camera HDRECamAnimate')
lockanimcamera()
lx.eval('render.camera HDRECam')
# group and lock the camera
lockcamera()
renID = lx.eval('query sceneservice polyRender.ID ? 0')
lx.eval('item.channel globEnable true item:%s' % renID)
lx.eval('item.channel dispRate 3 item:%s' % renID)
lx.eval('item.channel dispRatio 8 item:%s' % renID)
# set the scene gamma
numouts = lx.eval('query sceneservice renderOutput.N ? all')
for x in xrange(numouts):
id = lx.eval('query sceneservice renderOutput.ID ? %s' % x)
lx.eval('select.item %s set' % id)
if lx.eval('shader.setEffect ?') == 'shade.color':
lx.eval('item.channel gamma 2.2 item:%s' % id)
num_envs = lx.eval('query sceneservice environment.N ? all')
environments = []
for x in xrange(num_envs):
environments.append(lx.eval('query sceneservice environment.name ? %s' % x))
for env in environments:
if env not in HDREEnvs:
lx.eval('item.channel visCam false item:{%s}' % env)
lx.eval('item.channel visInd false item:{%s}' % env)
lx.eval('item.channel visRefl false item:{%s}' % env)
lx.eval('item.channel visRefr false item:{%s}' % env)
numlights = lx.eval('query sceneservice light.N ? all')
for x in xrange(numlights):
if lx.eval('query sceneservice light.name ? %s' % x) != 'HDRESun':
id = lx.eval('query sceneservice light.ID ? %s' % x)
lx.eval('layer.setVisibility {%s} 0' % id)
if itemexists('HDREActivate'):
lx.eval('layer.setVisibility {HDREActivate} 0')
controlsID = lx.eval('query sceneservice item.ID ? HDREControls')
if controlsID:
lx.eval('layer.setVisibility {%s} 1' % controlsID)
# set render frame size
setframesize()
tagitems()
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
|
9,667 | bb58b4384eaeec45be1af865012c618af05f5a0a | import os
from flask import Flask, request, jsonify, Response, abort
from sesamutils import sesam_logger, VariablesConfig
from sesamutils.flask import serve
required_env_vars = ["SUBDOMAIN"]
optional_env_vars = ["DEBUG", "LOG_LEVEL", ("API_ROOT","zendesk.com/api/v2/tickets/")] # Default values can be given to optional environment variables by the use of tuples
app = Flask(__name__)
logger = sesam_logger('DemoMicroservice', app=app,timestamp=True)
orders = [
{
'id': 1,
'Username': u'Unjudosely',
'Orders': u'Thinkpad',
'TotalSum': 8000
},
{
'id': 2,
'Username': u'Wimen1979',
'Orders': u'MacbookPro',
'TotalSum': 12000
},
{ 'id': 3,
'Username': u'Gotin1984',
'Orders': u'Chormebook',
'TotalSum': 10000
}
]
@app.route('/api/orders')
def get_orders():
return jsonify({'orders': orders})
@app.route('/api/orders/update/<int:orderID>', methods=['GET','PUT','POST','DELETE'])
def update_ticket(orderID):
try:
if request.method != 'PUT':
abort(405) # Check closer what Flask abort does
logger.error(f"ConnectionError issue while fetching tickets{request.method}")
else:
return jsonify(orders[orderID-1])
except ConnectionError as e:
logger.error(f"ConnectionError issue while fetching tickets{e}")
except Exception as e:
logger.error(f"Issue while fetching tickets from Zendesk {e}")
@app.route('/api/generic/<path:txt>', methods=['GET','PUT','POST','DELETE'])
def get_generic(txt):
method = request.method
if method == "POST" and request.is_json:
returnList = []
enteties = request.get_json()
logger.info(type(enteties))
for item in enteties:
item['Hello'] = "Hello, this is a test."
logger.info(type(item))
returnList.append(item)
return jsonify(returnList) , 200, {"Content-Type": "application/json"}
else:
logger.info(f'Http method is {method}')
return "Only JSON on POST is supported.", 500, {"Content-Type": "text/plain"}
@app.route('/api/show/config')
def get_config():
return jsonify({'config': config})
if __name__ == "__main__":
config = VariablesConfig(required_env_vars, optional_env_vars=optional_env_vars)
# logger.info(str(config))
# if not config.validate():
# os.sys.exit(1)
serve(app) |
9,668 | 08848e51d5564bad927607be3fa3c86f2c1212c5 | def favorite_book(name):
print(f"One of my favorite books is {name}...")
favorite_book("Alice in Wonderland")
|
9,669 | 8411acf6b27425357d212f5e220314daa019e023 | import numpy as np
import tensorflow as tf
def mfcc(data):
pass
def cut_frames(data):
pass |
9,670 | 24f6328d578b6145bf86d7b5378a081463936df3 | from __future__ import print_function
class StackQueue(object):
"""Queue implemented with two stacks"""
def __init__(self):
self.stack1 = []
self.stack2 = []
def enqueue(self, data):
self.stack1.append(data)
def dequeue(self):
if self.stack2:
return self.stack2.pop()
else:
while self.stack1:
self.stack2.append(self.stack1.pop())
return self.stack2.pop()
def peek(self):
if self.stack2:
return self.stack2[-1]
else:
while self.stack1:
self.stack2.append(self.stack1.pop())
return self.stack2[-1]
def _test():
pass
def _print():
q = StackQueue()
q.enqueue(1)
q.enqueue(2)
q.enqueue(3)
d1 = q.dequeue()
print(d1)
print(q.peek())
if __name__ == '__main__':
_test()
_print()
|
9,671 | 08712e050bd90408ed9d22bba9f62fafacd64d99 | from django.shortcuts import render
from django.contrib import messages
from django.contrib.auth import logout
from django.contrib.auth.decorators import login_required
# Create your views here.
# def login(request):
# return render(request, 'login.html')
# def validar_login(request):
# usuario= request.POST['username']
# password= request.POST['password']
# usuarios = Login.objects.filter(usuario = usuario, password = password)
# print(usuarios)
# if usuarios is None:
# print('entroo')
# print(usuario)
# messages.add_message(request, messages.INFO, 'EL USUARIO NO ESTA REGISTRADO')
# else:
# return render(request, 'menu.html')
@login_required
def menu(request):
return render(request, 'menu.html')
# def Logout_view(request):
# logout(request)
# return render(request, 'login.html')
@login_required
def Lojanisima(request):
return render(request, 'lojanisima/ruta_lojanisima.html')
@login_required
def Identiarte(request):
return render(request, 'identiarte/ruta_identiarte.html')
@login_required
def Raices(request):
return render(request, 'raice/ruta_raices.html')
@login_required
def Lojatur(request):
return render(request, 'lojatur/lojatur.html') |
9,672 | 4a711642af753ba2c82ce3351b052a4973e17e7d | import os
import RPi.GPIO as GPIO
import time
import neopixel
import board
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(20, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) #Setup button pins
GPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(26, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(19, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(13, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
enable_pin = 24 #Setup stepper motor pins
A1Pin = 23
A2Pin = 22
B1Pin = 27
B2Pin = 17
GPIO.setup(enable_pin, GPIO.OUT)
GPIO.setup(A1Pin, GPIO.OUT)
GPIO.setup(A2Pin, GPIO.OUT)
GPIO.setup(B1Pin, GPIO.OUT)
GPIO.setup(B2Pin, GPIO.OUT)
GPIO.output(enable_pin, 1)
pixel_pin = board.D21 #Setup Neopixels
num_pixels = 60
ORDER = neopixel.GRB
CLEAR = (0,0,0)
pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=0.03, auto_write=False,
pixel_order=ORDER)
pixel = neopixel.NeoPixel(pixel_pin, num_pixels, brightness = 0.1, pixel_order = ORDER)
def setStep(w1,w2,w3,w4): #Send instructions to the stepper motor
GPIO.output(A1Pin, w1)
GPIO.output(A2Pin, w2)
GPIO.output(B1Pin, w3)
GPIO.output(B2Pin, w4)
def wheel(pos): #Function to generate a wheel on NeoPixels, taken from Adafruit
# Input a value 0 to 255 to get a color value.
# The colours are a transition r - g - b - back to r.
if pos < 0 or pos > 255:
r = g = b = 0
elif pos < 85:
r = int(pos * 3)
g = int(255 - pos*3)
b = 0
elif pos < 170:
pos -= 85
r = int(255 - pos*3)
g = 0
b = int(pos*3)
else:
pos -= 170
r = 0
g = int(pos*3)
b = int(255 - pos*3)
return (r, g, b) if ORDER == neopixel.RGB or ORDER == neopixel.GRB else (r, g, b, 0)
def rainbow_cycle(wait): #Function to make the wheel transition through the entire colour spectrum, taken from Adafruit
for j in range(255):
for i in range(num_pixels):
pixel_index = (i * 256 // num_pixels) + j
pixels[i] = wheel(pixel_index & 255)
pixels.show()
time.sleep(wait)
stepList = [(1,0,0,0),(1,1,0,0),(0,1,0,0),(0,1,1,0),(0,0,1,0),(0,0,1,1),(0,0,0,1),(1,0,0,1)] #List of positions for stepper motor
count = 0
def backwards(list, count): #Function to turn the motor backwards by sending the stepList in a certian way
w1 = list[count][0]
w2 = list[count][1]
w3 = list[count][2]
w4 = list[count][3]
setStep(w1,w2,w3,w4)
count+=1
if count >= 8:
count = 0
return count
for i in range(60): #Loading circle, shows Gizmo is ready to use
pixel[i] = (200,100,0)
time.sleep(0.02)
while True:
for j in range(255): #NeoPixels transistion through rainbow colours
for i in range(num_pixels):
pixel_index = (i * 256 // num_pixels) + j
pixels[i] = wheel(pixel_index & 255)
pixels.show()
time.sleep(0.005)
if GPIO.input(20) == GPIO.HIGH: # Button 1 turns the pointer back to the start position
count = backwards(stepList, count)
print ("Pin 20")
if GPIO.input(13) == GPIO.HIGH: # The other buttons select the songs
print ("Here comes the sun")
os.system("python3 song2.py")
if GPIO.input(19) == GPIO.HIGH:
print ("Button - September")
os.system("python3 song4.py")
if GPIO.input(26) == GPIO.HIGH:
print ("Button (26) 4 - Wonderwall")
os.system("python3 song1.py")
if GPIO.input(16) == GPIO.HIGH:
print ("Button (16) 6 - Shape of You")
os.system("python3 song5.py")
|
9,673 | c967a63d03f9f836d97ae917dba2a7bfb7a54a0e | rule run_all:
shell:
'''
echo 'Hello World!'
'''
|
9,674 | 291cd789ac3ab7b794be8feafe0f608ad0c081d7 | from PyQt5.QtWidgets import QWidget, QHBoxLayout, QLabel, QComboBox
class ChoiceTargetNumbers(QWidget):
"""Виджет с выбором номеров целей"""
def __init__(self, parent=None) -> None:
QWidget.__init__(self, parent)
# Нужные компоненты
label = QLabel(text="Выберите номера целей:")
self.first_target_number_combo_box = QComboBox()
self.first_target_number_combo_box.addItems(["1", "2", "3"])
self.second_target_number_combo_box = QComboBox()
self.second_target_number_combo_box.addItems(["1", "2", "3"])
# Основной контейнер
layout = QHBoxLayout(self)
layout.addWidget(label)
layout.addWidget(self.first_target_number_combo_box)
layout.addWidget(self.second_target_number_combo_box)
|
9,675 | 24595979199199ecc6bc6f3a26e0db418def8b78 | def __handle_import():
import sys
import os
cur_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
lib_path = os.path.join(cur_path, '../../build/lib/')
sys.path.append(lib_path)
proto_path = os.path.join(cur_path, '../../build/protobuf_python/')
sys.path.append(proto_path)
__handle_import() |
9,676 | 4e77c7ac784ec235e9925004069131d16717e89a | from __future__ import absolute_import, division, print_function
from .core import Bag, Item, from_sequence, from_filenames
from ..context import set_options
|
9,677 | d5691403812cd3742f8e8b74d4ca613eca784ffd | class Rectangle():
def __init__(self,length,breadth):
self.length=length
self.breadth=breadth
def area(self):
return(self.length*self.breadth)
def perimeter(self):
return(2*(self.length+self.breadth))
r1=Rectangle(4,5)
r2=Rectangle(5,7)
a1=r1.area()
a2=r2.area()
p1=r1.perimeter()
p2=r2.perimeter()
print("the area of rectangle 1 :",a1)
print("the perimeter of rectangle 1:",p1)
print("the area of rectangle 2:",a2)
print("the perimeter of rectangle 2:",p2)
if(a1>a2):
print("rectangle 1 is bigger")
else:
print("rectangle 2 is bigger") |
9,678 | 1328d62769ee2a0309021ff40fdbf78a2c5570c9 | def decorate():
print('hi')
@decorate
def decorated():
print('decorated')
decorate() |
9,679 | 565e994576a57f8bbdcb201f2439bd7e595fa53e | import pdb
from django.db.models import Count
from django.shortcuts import render_to_response, redirect
from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.template import RequestContext
from models import *
from forms import *
from django.http import HttpResponse
def list(request):
techniques = Technique.objects.annotate(num_images = Count('images')).order_by('-num_images')
return render_to_response('technique/list.html', {'techniques': techniques}, RequestContext(request))
def view(request, pk):
t = Technique.objects.get(pk=pk)
related = filter(lambda x: x, [t2 for t2 in t.starting_at.all()] + [t2 for t2 in t.ending_at.all()] + [t2 for t2 in t.children.all()] + [ t.start, t.end ])
return render_to_response('technique/view.html', {'t': t, 'related': related}, RequestContext(request))
@login_required
def create(request, pk=None):
if pk:
t = Technique.objects.get(pk=pk)
else:
t = Technique(created_by=request.user)
if request.method == 'POST':
f = TechniqueForm(request.POST, instance=t)
image_formset = TechniqueImageFormset(request.POST, request.FILES, instance = t)
if f.is_valid():
t = f.save(commit=False)
image_formset = TechniqueImageFormset(request.POST, request.FILES, instance = t)
if image_formset.is_valid():
t.save()
for i in image_formset.save(commit=False):
i.created_by = request.user
i.technique = t
i.save()
return redirect(reverse('technique.views.view', args=(t.pk,)))
else:
f = TechniqueForm(instance=t)
image_formset = TechniqueImageFormset(instance = t)
return render_to_response('technique/create.html', {'f': f, 'image_formset': image_formset}, RequestContext(request))
|
9,680 | 1a7a2c2cfb2aa94401defd7a7a500f7dd2e7e0aa | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import sh
import reqwire.helpers.cli
log_methods = (
'echo',
'error',
'fatal',
'info',
'warn',
'warning',
)
def test_emojize_win32(mocker):
mocker.patch('sys.platform', 'win32')
assert reqwire.helpers.cli.emojize(
':thumbs_up_sign: foo').encode('utf-8') == b'foo'
def test_emojize_linux(mocker):
mocker.patch('sys.platform', 'linux')
mocker.patch('io.open', mocker.mock_open(
read_data='Linux version 4.4.0-31-generic (gcc version 5.3.1)'))
assert reqwire.helpers.cli.emojize(
':thumbs_up_sign:').encode('utf-8') == b'\xf0\x9f\x91\x8d'
def test_emojize_linux_ioerror(mocker):
mocker.patch('sys.platform', 'linux')
mocker.patch('io.open', side_effect=IOError)
assert reqwire.helpers.cli.emojize(
':thumbs_up_sign:').encode('utf-8') == b'\xf0\x9f\x91\x8d'
def test_emojize_wsl(mocker):
mocker.patch('sys.platform', 'linux')
mocker.patch('io.open', mocker.mock_open(
read_data='Linux version 3.4.0-Microsoft (Microsoft@Microsoft.com)'))
assert reqwire.helpers.cli.emojize(
':thumbs_up_sign: foo').encode('utf-8') == b'foo'
def test_console_writer_quiet(mocker):
click_echo = mocker.patch('click.echo')
console = reqwire.helpers.cli.ConsoleWriter(verbose=False)
for method in log_methods:
getattr(console, method)('test')
click_echo.assert_not_called()
def test_console_writer_verbose(mocker):
mocker.patch('sys.platform', 'linux')
mocker.patch('io.open', mocker.mock_open(
read_data='Linux version 4.4.0-31-generic (gcc version 5.3.1)'))
click_echo = mocker.patch('click.echo')
console = reqwire.helpers.cli.ConsoleWriter(verbose=True)
for method in log_methods:
getattr(console, method)('test')
fmt = console.format_strings.get(method, '{msg}')
message = reqwire.helpers.cli.emojize(fmt.format(msg='test'))
click_echo.assert_called_once_with(message)
click_echo.reset_mock()
def test_build_with_pip_compile_options(cli_runner, mocker):
from reqwire.cli import main
pip_compile = mocker.patch.object(sh, 'pip_compile')
result = cli_runner.invoke(main, ['build', '-t', 'main', '--',
'--no-header'])
assert result.exit_code == 0, result.output
assert pip_compile.call_args[0][2] == '--no-header'
def test_main_remove(cli_runner):
from reqwire.cli import main
result = cli_runner.invoke(main, ['remove', 'Flask'])
assert result.exit_code == 0, result.output
|
9,681 | 57f8584a8d058e5f9d4e0b7b75c7ec8dbbfef24a | # -*- coding:utf-8 -*-
# class TreeLinkNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution:
"""
给定一个二叉树和其中的一个结点,请找出中序遍历顺序的下一个结点并且返回。
注意,树中的结点不仅包含左右子结点,同时包含指向父结点的指针。
"""
def GetNext(self, pNode):
# write code here
def left_most(p):
if p == None:
return None
while p.left:
p = p.left
return p
if pNode == None:
return None
if pNode.right:
return left_most(pNode.right)
else:
temp = pNode
while temp.next:
if temp.next.left == temp:
# pNode在某节点的左子树中
return temp.next
temp = temp.next
# 退到根节点
return None
|
9,682 | 829b8cd0b648d39c07c20fd1c401bf717ed5b9c4 | from django.db.backends.base.base import BaseDatabaseWrapper as BaseDatabaseWrapper
from typing import Any, Optional
def wrap_oracle_errors() -> None: ...
class _UninitializedOperatorsDescriptor:
def __get__(self, instance: Any, cls: Optional[Any] = ...): ...
class DatabaseWrapper(BaseDatabaseWrapper):
vendor: str = ...
display_name: str = ...
data_types: Any = ...
data_type_check_constraints: Any = ...
operators: Any = ...
pattern_esc: str = ...
Database: Any = ...
SchemaEditorClass: Any = ...
client_class: Any = ...
creation_class: Any = ...
features_class: Any = ...
introspection_class: Any = ...
ops_class: Any = ...
validation_class: Any = ...
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
def get_connection_params(self): ...
def get_new_connection(self, conn_params: Any): ...
pattern_ops: Any = ...
def init_connection_state(self) -> None: ...
def create_cursor(self, name: Optional[Any] = ...): ...
def check_constraints(self, table_names: Optional[Any] = ...) -> None: ...
def is_usable(self): ...
def cx_oracle_version(self): ...
def oracle_version(self): ...
class OracleParam:
force_bytes: Any = ...
input_size: Any = ...
def __init__(self, param: Any, cursor: Any, strings_only: bool = ...) -> None: ...
class VariableWrapper:
var: Any = ...
def __init__(self, var: Any) -> None: ...
def bind_parameter(self, cursor: Any): ...
def __getattr__(self, key: Any): ...
def __setattr__(self, key: Any, value: Any) -> None: ...
class FormatStylePlaceholderCursor:
charset: str = ...
cursor: Any = ...
def __init__(self, connection: Any) -> None: ...
def execute(self, query: Any, params: Optional[Any] = ...): ...
def executemany(self, query: Any, params: Optional[Any] = ...): ...
def close(self) -> None: ...
def var(self, *args: Any): ...
def arrayvar(self, *args: Any): ...
def __getattr__(self, attr: Any): ...
def __iter__(self) -> Any: ...
|
9,683 | 421e7ed0cc5a8c8acc9b98fae4ee6cc784d9b068 | sair = True
while sair:
num = int(input("informe um numero inteiro:"))
if num <16:
fatorial = 1
x = num
while x>=1:
print(x,".")
fatorial = fatorial*x
x -= 1
print("Valor total do Fatorial do %d = %d "%(num,fatorial))
else:
print("Número inválido!!")
sair = input("deseja sair? s ou n:")
if sair.upper() == "S":
sair = False
print("Programa Encerrado") |
9,684 | 32eff306444966fab47815fcbae4aefb6769d29b | from lilaclib import *
def pre_build():
newver = _G.newver.removeprefix('amd-drm-fixes-')
for line in edit_file('PKGBUILD'):
if line.startswith('_tag'):
line = "_tag='amd-drm-fixes-" + newver + "'"
print(line)
newver2 = newver.replace("-",".")
update_pkgver_and_pkgrel(newver2)
def post_build():
git_add_files('PKGBUILD')
git_commit()
update_aur_repo()
#if __name__ == '__main__':
# single_main()
|
9,685 | 5529813e10e4a30a60c28242be9d1a8822fb58af | """
It's annoying that we have to do it here but for something like Ant, we're not going to be able to
specify it easily inside of the rbf_hyper_parameters file. Because, for something like Ant, we have
2 COM dimensions, and Bipedal we have 1.
So, we're going to do something similar to shaping_functions.
The way it'll work is, to make it modular, we'll take in a single string that we then separate out to
get the scaling. I like that. So, it'll be something like, for Ant:
{
uniform: func_get_ones()
special_loc_scaling: func_special_loc(com, rest_state, actions)
}
There's an argument for making these things know about the environment already. Only because we need
the state and action dimensions. So maybe you pass the environment into the constructor?
It's sort of annoying -- do we do the automatic scaling or not? I'd say leave the option, for something like Ant,
it's unavoidable to use it, even though it does make the problem non-stationary.
And it figures out the rest from there.
So, in the end this will just return an array.
"""
import numpy as np
def uniform_scaling(*args, **kwargs):
return 1.
def action_scaling(env, action_scaler):
"""
This is actually going to just be "action scaling". Because,
it's all about the ratio, and the ratio doesn't change!
"""
try:
state_dim = len(env.observation_space.low)
except AttributeError:
print("Using dm_control so need to get state_dim differently")
state_dim = len(env.observation_space['observations'].low)
action_dim = len(env.action_space.low)
# state_scaling = float(state_scaling)
action_scaler = float(action_scaler)
state_scaler_array = np.ones((state_dim,), dtype=np.float32)
action_scaler_array = np.ones((action_dim,), dtype=np.float32) * action_scaler
return np.concatenate([state_scaler_array, action_scaler_array], axis=0)
def per_dim_scaling(env, *args):
try:
state_dim = len(env.observation_space.low)
except AttributeError:
print("Using dm_control so need to get state_dim differently")
state_dim = len(env.observation_space['observations'].low)
action_dim = len(env.action_space.low)
assert len(args) == state_dim + action_dim
return np.array(args, dtype=np.float32)
def ant_maze_scaling(env, com_scaling, other_feature_scaling, action_scaling):
"""
Not sure how this is correct, but: I'm assuming that the COM is the first 2 states. Then,
the rest of the state is the pos/vel of everything.
"""
state_dim = len(env.observation_space.low)
action_dim = len(env.action_space.low)
num_com_features = 2
num_other_features = state_dim - num_com_features
com_scaler = np.ones((num_com_features,), dtype=np.float32) * float(com_scaling)
other_feature_scaler = np.ones((num_other_features,), dtype=np.float32) * float(other_feature_scaling)
action_scaler = np.ones((action_dim,), dtype=np.float32) * float(action_scaling)
return np.concatenate([com_scaler, other_feature_scaler, action_scaler], axis=0)
# assert
print("Just a note that you should PROBABLY be normalizing one way or another for this one.")
"""
This has an interesting interface -- scaling_string is a string where the arguments are double-underscore-separated.
That lets us pass stuff in through a CLI interface a bit easier.
"""
_SCALING_FUNCTIONS = {
'action_scaling': action_scaling,
'per_dim_scaling': per_dim_scaling,
'ant_maze_scaling': ant_maze_scaling,
}
def get_scaling_array(env, scaling_function_string):
scaling_string_parsed = scaling_function_string.split("__")
scaling_method, scaling_args = scaling_string_parsed[0], scaling_string_parsed[1:]
scaling_array = _SCALING_FUNCTIONS[scaling_method](env, *scaling_args)
return scaling_array
# class ScalingFunctions:
# """
# This has an interesting interface -- scaling_string is a string where the arguments are double-underscore-separated.
# That lets us pass stuff in through a CLI interface a bit easier.
# """
# SCALING_FUNCTIONS = {
# 'state_action_scaling': state_action_scaling,
# 'per_dim_scaling': per_dim_scaling
# }
# def __init__(self, env, scaling_string):
# scaling_string_parsed = scaling_string.split("__")
# scaling_method, scaling_args = scaling_string_parsed[0], scaling_string_parsed[1:]
# scaling_array = self.SCALING_FUNCTIONS[scaling_method](env, *scaling_args)
# # return scaling_array
|
9,686 | 0b7d1564ecbd78086d59629a2058716f41b4b8c8 | import warnings
warnings.filterwarnings('ignore', category=FutureWarning)
from cv2 import cv2
from tqdm import tqdm
import os
import pickle
import numpy as np
import csv
import sys
from collections import defaultdict
from dataset_utils import *
sys.path.append("../training")
from dataset_tools import enclosing_square, add_margin, DataGenerator
EXT_ROOT = os.path.dirname(os.path.abspath(__file__))
rafdb_labels = {
"age_group": {
"0-3": 0,
"4-19": 1,
"20-39": 2,
"40-69": 3,
"70+":4
},
"race": {
"Caucasian": 0,
"African-American": 1,
"Asian": 2
}
}
# converted labels
rafDBmeta = defaultdict(dict)
# multitask labels
rafDBpartition = dict() # dict({id:partition or None}) # for partitioning purpose
rafDBdata = None # dict({image_path: ... }) # for ensembling purpose
# ORDER: Gender, Age, Ethnicity, Emotion
def _load_traits(input_meta, include_gender=False, include_age_group=False, include_race=False):
global rafDBdata
if rafDBdata is None:
rafDBdata = dict()
i, errors = 0, defaultdict(set)
for image_path, image_meta in input_meta.items():
identity = image_meta["identity"]
roi = None # aligned image, roi is the image size
rafDBdata[image_path] = {
"roi" : roi,
"identity" : identity,
"gender" : get_gender_label(image_meta["gender"]) if include_gender else MASK_VALUE,
"age_group" : get_age_group_label(image_meta["age_group"]) if include_age_group else MASK_VALUE,
"ethnicity": get_ethnicity_label(image_meta["race"]) if include_race else MASK_VALUE,
"emotion": get_emotion_label(image_meta["emotion"]),
"sample_num" : i
}
i += 1
print("Metadata:", len(rafDBdata))
if errors:
print("Gender errors", errors["gender"])
print("Age errors", errors["age"])
print("Ethnicity errors", errors["ethnicity"])
# Labelling
def get_gender_label(gender):
if gender == 'male':
return LABELS["gender"]["male"]
elif gender == 'female':
return LABELS["gender"]["female"]
return MASK_VALUE
def get_age_group_label(age_group_text):
return rafdb_labels["age_group"][age_group_text]
def get_ethnicity_label(ethnicity_text):
return rafdb_labels["race"][ethnicity_text]
def get_emotion_label(emotion):
return LABELS["emotion"][emotion]
# Load from csv
def _load_meta_from_csv(csv_meta, output_dict):
data = readcsv(csv_meta)
for row in data:
output_dict[row[0]]["gender"] = row[1]
output_dict[row[0]]["age_group"] = row[2]
output_dict[row[0]]["race"] = row[3]
output_dict[row[0]]["emotion"] = row[4]
output_dict[row[0]]["identity"] = row[0].split("_")[1]
def get_partition(identity_label):
global rafDBpartition
try:
faces, partition = rafDBpartition[identity_label]
rafDBpartition[identity_label] = (faces + 1, partition)
except KeyError:
# split 20/80 stratified by identity
l = (len(rafDBpartition) - 1) % 10
if l == 0 or l == 1:
partition = PARTITION_VAL
else:
partition = PARTITION_TRAIN
rafDBpartition[identity_label] = (1, partition)
return partition
def _load_dataset(imagesdir, partition_label, debug_max_num_samples=None):
data = list()
discarded_items = defaultdict(list)
for image_path, image_meta in tqdm(rafDBdata.items()):
path = os.path.join(imagesdir, image_path)
if ALIGNED:
path = os.path.splitext(path)
path = path[0] + "_aligned" + path[1]
identity = image_meta["identity"]
image = cv2.imread(path)
if image is None:
print("WARNING! Unable to read {}".format(image_path))
print(" - At {}".format(path))
discarded_items["unavailable_image"].append(identity)
continue
if np.max(image) == np.min(image):
print("Blank image {}".format(image_path))
discarded_items["blank_image"].append(identity)
continue
sample_partition = PARTITION_TEST if partition_label == PARTITION_TEST else get_partition(identity)
gender = rafDBdata[image_path]["gender"]
age = rafDBdata[image_path]["age_group"]
ethnicity = rafDBdata[image_path]["ethnicity"]
emotion = rafDBdata[image_path]["emotion"]
labels = (gender, age, ethnicity, emotion)
roi = (0, 0, image.shape[1], image.shape[0]) if image_meta["roi"] is None else image_meta["roi"]
sample = {
'img': path,
'label': labels,
'roi': roi,
'part': sample_partition
}
data.append(sample)
if debug_max_num_samples is not None and len(data) >= debug_max_num_samples:
print("Stopped loading. Debug max samples: ", debug_max_num_samples)
break
print("Data loaded. {} samples".format(len(data)))
print("Discarded for unavailable image: ", len(discarded_items["unavailable_image"]))
print("Discarded for blank image: ", len(discarded_items["blank_image"]))
return data
ALIGNED = True
class RAFDBMulti:
def __init__(self,
partition='train',
imagesdir='data/RAF-DB/basic/Image/{aligned}',
csvmeta='data/RAF-DB/basic/multitask/{part}.multitask_rafdb.csv',
target_shape=(112, 112, 3),
augment=True,
custom_augmentation=None,
preprocessing='full_normalization',
debug_max_num_samples=None,
include_gender=False,
include_age_group=False,
include_race=False,
**kwargs):
partition_label = partition_select(partition)
self.target_shape = target_shape
self.custom_augmentation = custom_augmentation
self.augment = augment
self.gen = None
self.preprocessing = preprocessing
print('Loading %s data...' % partition)
num_samples = "_" + str(debug_max_num_samples) if debug_max_num_samples is not None else ''
cache_task = "{}{}{}_emotion".format(
"_withgender" if include_gender else "",
"_withagegroup" if include_age_group else "",
"_withrace" if include_race else ""
)
cache_file_name = 'rafdb{task}_{partition}{num_samples}.cache'.format(task=cache_task, partition=partition, num_samples=num_samples)
cache_file_name = os.path.join("cache", cache_file_name)
cache_file_name = os.path.join(EXT_ROOT, cache_file_name)
print("cache file name %s" % cache_file_name)
try:
with open(cache_file_name, 'rb') as f:
self.data = pickle.load(f)[:debug_max_num_samples]
print("Data loaded. %d samples, from cache" % (len(self.data)))
except FileNotFoundError:
print("Loading %s data from scratch" % partition)
load_partition = "train" if partition_label == PARTITION_TRAIN or partition_label == PARTITION_VAL else "test"
imagesdir = os.path.join(EXT_ROOT, imagesdir.format(aligned="aligned" if ALIGNED else "original"))
csvmeta = os.path.join(EXT_ROOT, csvmeta.format(part=load_partition))
_load_meta_from_csv(csvmeta, rafDBmeta)
_load_traits(rafDBmeta, include_gender, include_age_group, include_race)
print("Loading {} dataset".format(partition))
loaded_data = _load_dataset(imagesdir, partition_label, debug_max_num_samples)
print_verbose_partition(dataset_partition=rafDBpartition, verbosed_partition=partition_label)
if partition.startswith('test'):
self.data = loaded_data
else:
self.data = [x for x in loaded_data if x['part'] == partition_label]
with open(cache_file_name, 'wb') as f:
print("Pickle dumping")
pickle.dump(self.data, f)
def get_data(self):
return self.data
def get_num_samples(self):
return len(self.data)
def get_generator(self, batch_size=64, fullinfo=False, doublelabel=False):
if self.gen is None:
self.gen = DataGenerator(data=self.data,
target_shape=self.target_shape,
with_augmentation=self.augment,
custom_augmentation=self.custom_augmentation,
batch_size=batch_size,
num_classes=self.get_num_classes(),
preprocessing=self.preprocessing,
fullinfo=fullinfo,
doublelabel=doublelabel)
return self.gen
def get_num_classes(self):
return CLASSES
def test_multi(dataset="test", debug_samples=None):
if dataset.startswith("train") or dataset.startswith("val"):
print(dataset, debug_samples if debug_samples is not None else '')
dt = RAFDBMulti(dataset,
target_shape=(112, 112, 3),
preprocessing='vggface2',
debug_max_num_samples=debug_samples)
gen = dt.get_generator()
else:
dv = RAFDBMulti('test',
target_shape=(112, 112, 3),
preprocessing='vggface2',
debug_max_num_samples=debug_samples)
gen = dv.get_generator()
i = 0
for batch in tqdm(gen):
for im, gender, age, ethnicity, emotion in zip(batch[0], batch[1][0], batch[1][1], batch[1][2], batch[1][3]):
facemax = np.max(im)
facemin = np.min(im)
print("Sample:", i)
print("Labels:", gender, age, ethnicity, emotion)
print("Gender:", verbose_gender(gender),
"- Age:", verbose_age(age),
"- Ethnicity:", verbose_ethnicity(ethnicity),
"- Emotion:", verbose_emotion(emotion))
im = (255 * ((im - facemin) / (facemax - facemin))).astype(np.uint8)
cv2.putText(im, "{} {} {} {}".format(verbose_gender(gender), verbose_age(age), verbose_ethnicity(ethnicity), verbose_emotion(emotion)),
(0, im.shape[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 255))
cv2.imshow("{} {} {} {}".format(verbose_gender(gender), verbose_age(age), verbose_ethnicity(ethnicity), verbose_emotion(emotion)), im)
i += 1
if cv2.waitKey(0) & 0xFF == ord('q'):
cv2.destroyAllWindows()
return
if '__main__' == __name__:
test_multi("train")
test_multi("val")
test_multi("test")
|
9,687 | 3d59b8d6a34935ff332028443276f161430a981c | class A():
def m(self):
print("Class A")
class B():
def m(self):
print("Class B")
class C(B, A):
print("class C")
obj1 = C()
obj1.m()
print(C.mro()) # Method Resolution Order based on convention of "OBJECT" super class
|
9,688 | fdea48b6012b67327aea90e40eacbea5a1930d07 | print("This program calculates whether the year is a leap year or not")
year = input("Please enter the Year: ")
if year.isdecimal():
year=int(year)
if year%4==0 and year%100!=0 or year%400==0:
print("{0} is a leap year".format(year))
else:
print("{0} is not a leap year".format(year))
else:
print("Invalid input") |
9,689 | f24516d8977b10b1ccece2f8eaec6e08ce0c2e16 | from functools import partial
import utils.functions as fn
import random as rd
import numpy as np
import time
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
reshape = partial(fn.translate, start1=0, stop1=1, start2=-1, stop2=1)
self.weights = {
'i-h': fn.apply(np.random.rand(self.hidden_size, self.input_size), reshape),
'h-o': fn.apply(np.random.rand(self.output_size, self.hidden_size), reshape),
'h-b': fn.apply(np.random.rand(self.hidden_size, 1), reshape),
'o-b': fn.apply(np.random.rand(self.output_size, 1), reshape),
}
def calculate_layer_values(self, inputs: np.ndarray, layer_key: str, bias_key: str):
return fn.apply(np.add(np.matmul(self.weights[layer_key], inputs), self.weights[bias_key]), fn.sigmoid)
def calculate_layer_gradient_delta(self, layer: np.ndarray, next_layer: np.ndarray, layer_errors: np.ndarray, learning_rate: float):
layer_gradient = np.multiply(fn.apply(layer, fn.dsigmoid), layer_errors)*learning_rate
layer_delta = np.matmul(layer_gradient, np.transpose(next_layer))
return layer_gradient, layer_delta
def feed_forward(self, inputs: list) -> np.ndarray:
cu_inputs = np.array([inputs], dtype=np.double).T
hidden_values = self.calculate_layer_values(cu_inputs, 'i-h', 'h-b')
return self.calculate_layer_values(hidden_values, 'h-o', 'o-b')
def train(self, inputs: list, labels: list, learning_rate: float):
cu_inputs = np.asarray([inputs], dtype=np.double).T
cu_labels = np.asarray([labels], dtype=np.double).T
hidden_layer_values = self.calculate_layer_values(cu_inputs, 'i-h', 'h-b')
output_layer_values = self.calculate_layer_values(hidden_layer_values, 'h-o', 'o-b')
output_errors = np.subtract(cu_labels, output_layer_values)
hidden_output_gradient, hidden_output_delta = self.calculate_layer_gradient_delta(
output_layer_values, hidden_layer_values, output_errors, learning_rate)
self.weights['h-o'] = np.add(self.weights['h-o'], hidden_output_delta)
self.weights['o-b'] = np.add(self.weights['o-b'], hidden_output_gradient)
hidden_errors = np.matmul(self.weights['h-o'].T, output_errors)
input_hidden_gradient, input_hidden_delta = self.calculate_layer_gradient_delta(
hidden_layer_values, cu_inputs, hidden_errors, learning_rate)
self.weights['i-h'] = np.add(self.weights['i-h'], input_hidden_delta)
self.weights['h-b'] = np.add(self.weights['h-b'], input_hidden_gradient)
if __name__ == "__main__":
nn = NeuralNetwork(2, 2, 1)
data_set = [
([0, 0], [0]),
([0, 1], [1]),
([1, 0], [1]),
([1, 1], [0]),
]
start = time.perf_counter()
for _ in range(10000):
rd.shuffle(data_set)
for in_data, t_data in data_set:
nn.train(in_data, t_data, 0.1)
end = time.perf_counter()
print("Took ->>", end-start, "s")
print('-'*100)
print(nn.feed_forward([0, 1]))
print(nn.feed_forward([1, 0]))
print(nn.feed_forward([0, 0]))
print(nn.feed_forward([1, 1]))
print('-'*100)
|
9,690 | a903f9c5cae1c2eb2f40dc8ba29f0625a3d34224 | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import xlrd
from enum import Enum
from sklearn import linear_model
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
import statsmodels.formula.api as smf
import statsmodels.api as sm
import statsmodels.formula.api as smf
def forward_selected(data, response):
"""Linear model designed by forward selection.
Parameters:
-----------
data : pandas DataFrame with all possible predictors and response
response: string, name of response column in data
Returns:
--------
model: an "optimal" fitted statsmodels linear model
with an intercept
selected by forward selection
evaluated by adjusted R-squared
"""
remaining = set(data.columns)
remaining.remove(response)
selected = []
current_score, best_new_score = 0.0, 0.0
while remaining and current_score == best_new_score:
scores_with_candidates = []
for candidate in remaining:
formula = "{} ~ {} + 1".format(response,
' + '.join(selected + [candidate]))
score = smf.ols(formula, data).fit().rsquared_adj
scores_with_candidates.append((score, candidate))
scores_with_candidates.sort()
best_new_score, best_candidate = scores_with_candidates.pop()
if current_score < best_new_score:
remaining.remove(best_candidate)
selected.append(best_candidate)
current_score = best_new_score
formula = "{} ~ {} + 1".format(response,
' + '.join(selected))
model = smf.ols(formula, data).fit()
print(selected)
return model
# def backwardElimination(x, y, sl):
# numVars = len(x[0])
# for i in range(0, numVars):
# regressor_OLS = sm.OLS(y, x).fit()
# maxVar = max(regressor_OLS.pvalues).astype(float)
# if maxVar > sl:
# for j in range(0, numVars - i):
# if (regressor_OLS.pvalues[j].astype(float) == maxVar):
# x = (x, j, 1)
# regressor_OLS.summary()
# return x
dfBIG=pd.read_csv("C:\\Users\\family\\Desktop\\Big12and10.csv")
dfSEC=pd.read_csv("C:\\Users\\family\\Desktop\\SEC.csv")#- For SEC data
dfPAC=pd.read_csv("C:\\Users\\family\\Desktop\\AtlanticCoast.csv")#- For Atlantic Coast and Pac12
df_Predict=pd.read_csv("C:\\Users\\family\\Desktop\\PredictV2.csv")
#plt.scatter(dfBIG['DP'],dfBIG['YDS/GAME'])
SecX=dfSEC[['DP','CATCH_P','YAC','YAC_COMP','FORTYYD','REC','TD','YDS_TA','BroadJump','TARGETS','ROOKIE_YDS_GAME']]# Works for SEC
BigX=dfBIG[['DP','CATCH_P','YAC','YAC_COMP','FORTYYD','REC','TD','YDS_TA','BroadJump','TARGETS','ROOKIE_YDS_GAME']] #Works for AtlanticCoast/Pac12 and Big 10/12
#PacX=dfPAC[['DP','CATCH_P','YAC','YAC_COMP','FORTYYD','REC','TD','YDS_TA','BroadJump','TARGETS','ROOKIE_YDS_GAME']] #Works for AtlanticCoast/Pac12 and Big 10/12
PacX=dfPAC[['DP','CATCH_P','YAC','YAC_COMP','FORTYYD','REC','TD','YDS_TA','BroadJump','TARGETS']] #Works for AtlanticCoast/Pac12 and Big 10/12
#PacX=dfPAC[['DP','CATCH_%','40YD','REC','TD','YDS/TA','TARGETS']] #Works for AtlanticCoast/Pac12 and Big 10/12
#PredictSecX=df_Predict[['DP','CATCH_%','YAC','YAC/COMP','40YD','REC','TARGETS','TD','YDS/TA','Broad Jump']]
PacY=dfPAC['AVG_YDS_SEASON']
SecY=dfSEC['AVG_YDS_SEASON']
BigY=dfBIG['AVG_YDS_SEASON']
PacZ=dfPAC['YDS_GAME']
BigZ=dfBIG['YDS_GAME']
SecZ=dfSEC['YDS_GAME']
PacJ=dfPAC['MAX_YDS_SEASON']
SecJ=dfSEC['MAX_YDS_SEASON']
BigJ=dfBIG['MAX_YDS_SEASON']
PacK=dfPAC['ROOKIE_YDS_GAME']
SecK=dfSEC['ROOKIE_YDS_GAME']
BigK=dfBIG['ROOKIE_YDS_GAME']
# PacK=dfPAC['ROOKIE_YDS']
# SecK=dfSEC['ROOKIE_YDS']
# BigK=dfBIG['ROOKIE_YDS']
# model=forward_selected(SecX,'ROOKIE_YDS')
# print(model)
# regrPac = linear_model.LinearRegression()
# regrSec=linear_model.LinearRegression()
# regrBig=linear_model.LinearRegression()
# regPAC=regrPac.fit(PacX, PacK)
# regSEC=regrSec.fit(SecX, SecK)
# SecX=sm.add_constant(SecX)
# regSEC=sm.OLS(SecK,SecX)
# regBIG=sm.OLS(BigK,BigX)
regPAC=sm.OLS(PacK,PacX)
# resultsSEC=regSEC.fit()
resultsPAC=regPAC.fit()
SecX=SecX.to_numpy()
SecY=SecY.to_numpy()
model=backwardElimination(SecX,SecY,0.05)
print(model)
# resultsBIG=regBIG.fit()
#model=forward_selected(PacX,'ROOKIE_YDS_GAME')
# for i in df_Predict.index:
# print(df_Predict['Conference'][i])
# if df_Predict['Conference'][i]=='Southeastern':
# print(df_Predict['Player'][i])
# pred=regrSec.predict([[df_Predict['DP'][i],df_Predict['CATCH_P'][i],df_Predict['YAC'][i],df_Predict['YAC_COMP'][i],df_Predict['40YD'][i],df_Predict['REC'][i],df_Predict['TD'][i],df_Predict['YDS/TA'][i],df_Predict['Broad Jump'][i]]])
# if pred<0:
# pred=0
# print('Predicted AVG_YDS/SEASON: \n', pred)
# if df_Predict['Conference'][i]=='Big':
# print(df_Predict['Player'][i])
# print('Predicted AVG_YDS/SEASON: \n', regrBig.predict([[df_Predict['DP'][i],df_Predict['CATCH_P'][i],df_Predict['YAC'][i],df_Predict['YAC_COMP'][i],df_Predict['40YD'][i],df_Predict['REC'][i],df_Predict['TD'][i],df_Predict['YDS/TA'][i],df_Predict['Broad Jump'][i]]]))
# if df_Predict['Conference'][i]=='Pac-12':
# print(df_Predict['Player'][i])
# pred=regrPac.predict([[df_Predict['DP'][i],df_Predict['CATCH_P'][i],df_Predict['YAC'][i],df_Predict['YAC_COMP'][i],df_Predict['40YD'][i],df_Predict['REC'][i],df_Predict['TD'][i],df_Predict['YDS/TA'][i],df_Predict['Broad Jump'][i]]])
# if pred<0:
# pred=0
# print('Predicted AVG_YDS/SEASON: \n', pred)
# print (resultsSEC.rsquared_adj)
# print(resultsSEC.summary())
#print (resultsPAC.rsquared_adj)
# print (resultsBIG.rsquared_adj)
# print(model.summary())
#print(model.rsquared_adj)
# print('AVG_YDS/GAME\n')
#print('Intercept: \n', regrSec.intercept_)
#print('Coefficients: \n', regrSec.coef_)
#print("R^2: \n",regSEC.score(pcaSecX,SecK))
#print("R^2: \n",regSEC.score(SecX,SecK))
# regPAC=regrPac.fit(PacX, PacZ)
# regBIG=regrBig.fit(BigX,BigZ)
# regSEC=regrSec.fit(SecX,SecY)
# print('YDS/GAME\n')
# print('Intercept: \n', regrPac.intercept_)
# print('Coefficients: \n', regrPac.coef_)
# print("R^2: \n",regPAC.score(PacX,PacZ) )
# regPAC=regrPac.fit(PacX,PacJ)
|
9,691 | a2fe62b6bbb6b753ef6aec6f44758b8aceeeafe6 | from __future__ import division
from collections import deque
import os
import warnings
import numpy as np
import keras.backend as K
import keras.layers as layers
import keras.optimizers as optimizers
from rl.core import Agent
from rl.util import *
def mean_q(y_true, y_pred):
return K.mean(K.max(y_pred, axis=-1))
# Deep DPG as described by Lillicrap et al. (2015)
# http://arxiv.org/pdf/1509.02971v2.pdf
# http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.646.4324&rep=rep1&type=pdf
class UBDDPGAgent(Agent):
"""Write me
"""
def __init__(self, nb_actions, actor, critic, nb_players, critic_action_inputs, memory,
gamma=.99, batch_size=32, nb_steps_warmup_critic=1000, nb_steps_warmup_actor=1000,
train_interval=1, memory_interval=1, delta_range=None, delta_clip=np.inf,
random_process=None, custom_model_objects={}, target_model_update=.001, **kwargs):
assert len(critic_action_inputs) == nb_players
if hasattr(actor.output, '__len__') and len(actor.output) != nb_players:
raise ValueError((
'Actor "{}" does not have the right number of ',
'outputs. DDPG expects an actor that has {} outputs.'
).format(actor, nb_players))
# if hasattr(critic.output, '__len__') and len(critic.output) > 1:
# raise ValueError('Critic "{}" has more than one output. DDPG expects a critic that has a single output.'.format(critic))
for critic_action_input in critic_action_inputs:
if critic_action_input not in critic.input:
raise ValueError('Critic "{}" does not have designated action input "{}".'.format(critic, critic_action_input))
if not hasattr(critic.input, '__len__') or len(critic.input) < 2:
raise ValueError('Critic "{}" does not have enough inputs. The critic must have at least two inputs, one for the action and one for the observation.'.format(critic))
super(UBDDPGAgent, self).__init__(**kwargs)
# Soft vs hard target model updates.
if target_model_update < 0:
raise ValueError('`target_model_update` must be >= 0.')
elif target_model_update >= 1:
# Hard update every `target_model_update` steps.
target_model_update = int(target_model_update)
else:
# Soft update with `(1 - target_model_update) * old + target_model_update * new`.
target_model_update = float(target_model_update)
if delta_range is not None:
warnings.warn('`delta_range` is deprecated. Please use `delta_clip` instead, which takes a single scalar. For now we\'re falling back to `delta_range[1] = {}`'.format(delta_range[1]))
delta_clip = delta_range[1]
# Parameters.
self.nb_actions = nb_actions
self.nb_steps_warmup_actor = nb_steps_warmup_actor
self.nb_steps_warmup_critic = nb_steps_warmup_critic
self.random_process = random_process
self.delta_clip = delta_clip
self.gamma = gamma
self.target_model_update = target_model_update
self.batch_size = batch_size
self.train_interval = train_interval
self.memory_interval = memory_interval
self.custom_model_objects = custom_model_objects
# Related objects.
self.actor = actor
self.critic = critic
self.nb_players = nb_players
self.critic_action_inputs = critic_action_inputs
self.critic_action_input_idxes = [
self.critic.input.index(critic_action_input)
for critic_action_input in critic_action_inputs
]
self.memory = memory
# State.
self.compiled = False
self.reset_states()
@property
def uses_learning_phase(self):
return self.actor.uses_learning_phase or self.critic.uses_learning_phase
def compile(self, optimizer, metrics=[]):
metrics += [mean_q]
if type(optimizer) in (list, tuple):
if len(optimizer) != 2:
raise ValueError('More than two optimizers provided. Please only provide a maximum of two optimizers, the first one for the actor and the second one for the critic.')
actor_optimizer, critic_optimizer = optimizer
else:
actor_optimizer = optimizer
critic_optimizer = clone_optimizer(optimizer)
if type(actor_optimizer) is str:
actor_optimizer = optimizers.get(actor_optimizer)
if type(critic_optimizer) is str:
critic_optimizer = optimizers.get(critic_optimizer)
assert actor_optimizer != critic_optimizer
if len(metrics) == 2 and hasattr(metrics[0], '__len__') and hasattr(metrics[1], '__len__'):
actor_metrics, critic_metrics = metrics
else:
actor_metrics = critic_metrics = metrics
def clipped_error(y_true, y_pred):
y_true = K.squeeze(y_true, axis=-1)
y_pred = K.squeeze(y_pred, axis=-1)
loss = K.mean(
# K.random_uniform(shape=(self.batch_size, self.nb_players), minval=0., maxval=1.) *
huber_loss(y_true, y_pred, self.delta_clip),
axis=-1)
# y_true = K.print_tensor(y_true, message='y_true: ')
# y_pred = K.print_tensor(y_pred, message='y_pred: ')
# loss = K.print_tensor(loss, message='loss: ')
return loss
# Compile target networks. We only use them in feed-forward mode, hence we can pass any
# optimizer and loss since we never use it anyway.
self.target_actor = clone_model(self.actor, self.custom_model_objects)
self.target_actor.compile(optimizer='sgd', loss='mse')
self.target_critic = clone_model(self.critic, self.custom_model_objects)
self.target_critic.compile(optimizer='sgd', loss='mse')
# We also compile the actor. We never optimize the actor using Keras but instead compute
# the policy gradient ourselves. However, we need the actor in feed-forward mode, hence
# we also compile it with any optimzer and
self.actor.compile(optimizer='sgd', loss='mse')
# Compile the critic.
if self.target_model_update < 1.:
# We use the `AdditionalUpdatesOptimizer` to efficiently soft-update the target model.
critic_updates = get_soft_target_model_updates(self.target_critic, self.critic, self.target_model_update)
critic_optimizer = AdditionalUpdatesOptimizer(critic_optimizer, critic_updates)
self.critic.compile(
optimizer=critic_optimizer,
loss=[clipped_error]*self.nb_players,
metrics=critic_metrics)
# Combine actor and critic so that we can get the policy gradient.
# Assuming critic's state inputs are the same as actor's.
critic_inputs = []
actor_inputs = []
for i in self.critic.input:
if i in self.critic_action_inputs:
critic_inputs.append([])
else:
critic_inputs.append(i)
actor_inputs.append(i)
actor_outputs = self.actor(actor_inputs)
if not isinstance(actor_outputs, (list,)):
actor_outputs = [actor_outputs]
assert len(actor_outputs) == self.nb_players
for input_idx, actor_output in zip(self.critic_action_input_idxes, actor_outputs):
critic_inputs[input_idx] = actor_output
# critic_outputs = layers.Maximum()(self.critic(critic_inputs))
critic_outputs = self.critic(critic_inputs)
if not isinstance(critic_outputs, (list,)):
critic_outputs = [critic_outputs]
assert len(critic_outputs) == self.nb_players
actor_losses = [None]* self.nb_players
for input_idx, critic_output in zip(self.critic_action_input_idxes, critic_outputs):
actor_losses[input_idx] = -K.mean(critic_output)
updates = actor_optimizer.get_updates(
params=self.actor.trainable_weights,
loss=actor_losses)
if self.target_model_update < 1.:
# Include soft target model updates.
updates += get_soft_target_model_updates(self.target_actor, self.actor, self.target_model_update)
updates += self.actor.updates # include other updates of the actor, e.g. for BN
# Finally, combine it all into a callable function.
if K.backend() == 'tensorflow':
self.actor_train_fn = K.function(actor_inputs + [K.learning_phase()],
actor_outputs, updates=updates)
else:
if self.uses_learning_phase:
actor_inputs += [K.learning_phase()]
self.actor_train_fn = K.function(actor_inputs, actor_outputs, updates=updates)
self.actor_optimizer = actor_optimizer
self.compiled = True
def load_weights(self, filepath):
filename, extension = os.path.splitext(filepath)
actor_filepath = filename + '_actor' + extension
critic_filepath = filename + '_critic' + extension
self.actor.load_weights(actor_filepath)
self.critic.load_weights(critic_filepath)
self.update_target_models_hard()
def save_weights(self, filepath, overwrite=False):
filename, extension = os.path.splitext(filepath)
actor_filepath = filename + '_actor' + extension
critic_filepath = filename + '_critic' + extension
self.actor.save_weights(actor_filepath, overwrite=overwrite)
self.critic.save_weights(critic_filepath, overwrite=overwrite)
def update_target_models_hard(self):
self.target_critic.set_weights(self.critic.get_weights())
self.target_actor.set_weights(self.actor.get_weights())
# TODO: implement pickle
def reset_states(self):
if self.random_process is not None:
self.random_process.reset_states()
self.recent_action = None
self.recent_observation = None
if self.compiled:
self.actor.reset_states()
self.critic.reset_states()
self.target_actor.reset_states()
self.target_critic.reset_states()
def process_state_batch(self, batch):
batch = np.array(batch)
if self.processor is None:
return batch
return self.processor.process_state_batch(batch)
def select_action(self, state):
batch = self.process_state_batch([state])
# actions = [action.flatten() for action in self.actor.predict_on_batch(batch)]
actions = self.actor.predict_on_batch(batch)
if self.nb_players == 1:
actions =[actions]
# actions = [a.flatten() for a in actions]
assert len(actions) == self.nb_players
# assert actions[0].shape == (self.nb_actions,)
assert actions[0].shape == (1, self.nb_actions)
# print('actions: {}'.format(actions))
if len(self.critic.inputs) > (self.nb_players+1): # state is a list
state_batch_with_action = batch[:]
else:
state_batch_with_action = [batch]
for action_idx, input_idx in enumerate(self.critic_action_input_idxes):
state_batch_with_action.insert(input_idx, actions[action_idx])
q_values = [
qv.flatten()
for qv in self.critic.predict_on_batch(state_batch_with_action)
]
assert q_values[0].shape == (1, )
assert len(q_values) == self.nb_players
# print('q_values: {}'.format(q_values))
action_best = actions[np.argmax(q_values)].flatten()
# assert action_best.shape == (self.nb_actions, )
assert action_best.shape == (self.nb_actions, )
# print('action_best: {}'.format(action_best))
# print(type(action_best[0]))
# Apply noise, if a random process is set.
if self.training and self.random_process is not None:
noise = self.random_process.sample()
assert noise.shape == action_best.shape
action_best += noise
return action_best
def forward(self, observation):
# Select an action.
state = self.memory.get_recent_state(observation)
action = self.select_action(state) # TODO: move this into policy
# Book-keeping.
self.recent_observation = observation
self.recent_action = action
return action
@property
def layers(self):
return self.actor.layers[:] + self.critic.layers[:]
@property
def metrics_names(self):
names = self.critic.metrics_names[:]
if self.processor is not None:
names += self.processor.metrics_names[:]
return names
def backward(self, reward, terminal=False):
# Store most recent experience in memory.
if self.step % self.memory_interval == 0:
self.memory.append(self.recent_observation, self.recent_action, reward, terminal,
training=self.training)
metrics = [np.nan for _ in self.metrics_names]
if not self.training:
# We're done here. No need to update the experience memory since we only use the working
# memory to obtain the state over the most recent observations.
return metrics
# Train the network on a single stochastic batch.
can_train_either = self.step > self.nb_steps_warmup_critic or self.step > self.nb_steps_warmup_actor
if can_train_either and self.step % self.train_interval == 0:
experiences = self.memory.sample(self.batch_size)
assert len(experiences) == self.batch_size
# Start by extracting the necessary parameters (we use a vectorized implementation).
state0_batch = []
reward_batch = []
action_batch = []
terminal1_batch = []
state1_batch = []
for e in experiences:
state0_batch.append(e.state0)
state1_batch.append(e.state1)
reward_batch.append(e.reward)
action_batch.append(e.action)
terminal1_batch.append(0. if e.terminal1 else 1.)
# Prepare and validate parameters.
state0_batch = self.process_state_batch(state0_batch)
state1_batch = self.process_state_batch(state1_batch)
terminal1_batch = np.array(terminal1_batch)
reward_batch = np.array(reward_batch)
action_batch = np.array(action_batch)
assert reward_batch.shape == (self.batch_size,)
assert terminal1_batch.shape == reward_batch.shape
assert action_batch.shape == (self.batch_size, self.nb_actions)
# Update critic, if warm up is over.
if self.step > self.nb_steps_warmup_critic:
target_actions = self.target_actor.predict_on_batch(state1_batch)
if not isinstance(target_actions, (list,)):
target_actions = [target_actions]
assert len(target_actions) == self.nb_players
assert target_actions[0].shape == (self.batch_size, self.nb_actions)
if len(self.critic.inputs) > (self.nb_players+1): # state is a list
# if len(self.critic.inputs) >= 3:
state1_batch_with_action = state1_batch[:]
else:
state1_batch_with_action = [state1_batch]
# state1_batch_with_action.insert(self.critic_action_input_idx, target_actions)
for action_idx, input_idx in enumerate(self.critic_action_input_idxes):
state1_batch_with_action.insert(input_idx, target_actions[action_idx])
target_q_values = self.target_critic.predict_on_batch(state1_batch_with_action)
if not isinstance(target_q_values, (list,)):
target_q_values = [target_q_values]
target_q_values = [ tqv.flatten() for tqv in target_q_values]
assert target_q_values[0].shape == reward_batch.shape
assert len(target_q_values) == self.nb_players
# Compute r_t + gamma * Q(s_t+1, mu(s_t+1)) and update the target ys accordingly,
# but only for the affected output units (as given by action_batch).
discounted_reward_batch = [
self.gamma * terminal1_batch * tqv
for tqv in target_q_values
]
assert discounted_reward_batch[0].shape == reward_batch.shape
targets = [reward_batch + drb for drb in discounted_reward_batch] # .reshape(self.batch_size, 1)
assert targets[0].shape == reward_batch.shape
assert len(targets) == self.nb_players
# Perform a single batch update on the critic network.
# if len(self.critic.inputs) >= 3:
if len(self.critic.inputs) > (self.nb_players+1): # state is a list
state0_batch_with_action = state0_batch[:]
else:
state0_batch_with_action = [state0_batch]
for input_idx in self.critic_action_input_idxes:
state0_batch_with_action.insert(input_idx, action_batch)
# state0_batch_with_action.insert(self.critic_action_input_idx, action_batch)
metrics = self.critic.train_on_batch(
state0_batch_with_action,
targets)
if self.processor is not None:
metrics += self.processor.metrics
# q_values = self.critic.predict_on_batch(state0_batch_with_action)
# if not isinstance(q_values, (list,)):
# q_values = [q_values]
# q_values = [ qv.flatten() for qv in q_values]
# print('gamma: {}'.format(self.gamma))
# print('terminal1_batch: {}'.format(terminal1_batch))
# print('target_q_values: {}'.format(target_q_values))
# print('discounted_reward_batch: {}'.format(discounted_reward_batch))
# print('reward_batch: {}'.format(reward_batch))
# print('targets: {}'.format(targets))
# print('current q values: {}'.format(q_values))
# Update actor, if warm up is over.
if self.step > self.nb_steps_warmup_actor:
# TODO: implement metrics for actor
if len(self.actor.inputs) >= 2:
inputs = state0_batch[:]
else:
inputs = [state0_batch]
if self.uses_learning_phase:
inputs += [self.training]
action_values = self.actor_train_fn(inputs)
assert len(action_values) == self.nb_players
assert action_values[0].shape == (self.batch_size, self.nb_actions)
if self.target_model_update >= 1 and self.step % self.target_model_update == 0:
self.update_target_models_hard()
return metrics
|
9,692 | f5c4057babc873099ae2a4d8c1aca960ab9fa30a | import numpy as np
from numpy import random
from sklearn.preprocessing import StandardScaler
from sklearn.cross_validation import train_test_split
from numpy.random import shuffle
import matplotlib.pyplot as plt
import numpy.linalg as la
import sklearn.preprocessing as proc
import csv
def get_accuracy(a, b, X_test, y_test):
size = len(y_test)
count = 0
for i in range(size):
x = X_test[i]
real = y_test[i]
x = np.array(x)
x = x.reshape(1, 6)
prediction = x.dot(a.T) + b
if prediction > 0 and real == 1:
count += 1
elif prediction < 0 and real == -1:
count += 1
return count / size
data = []
with open("train.txt") as file:
data = [line.split() for line in file]
X = []
y = []
for line in data:
numerical = [int(line[0][:-1]), int(line[2][:-1]), int(line[4][:-1]), \
int(line[10][:-1]), int(line[11][:-1]), int(line[12][:-1])]
X.append(numerical)
if line[14] == '<=50K':
y.append(-1)
else:
y.append(1)
a = random.dirichlet(np.ones(6)*1000, size = 1)
b = 0
#scale X
scaler = StandardScaler()
X = scaler.fit_transform(X)
X = X - np.mean(X)
#10% test data and 90% train data
#X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.1)
lambdas = [0.001, 0.01, 0.1, 1]
dict_accuracy = {}
for lamb in lambdas:
dict_accuracy[lamb] = []
dict_a = {}
for lamb in lambdas:
dict_a[lamb] = []
dict_b = {}
for lamb in lambdas:
dict_b[lamb] = []
a = 0
b = 0
for lamb in lambdas:
#a = random.dirichlet(np.ones(6)*1000, size = 1)
a = np.zeros(6)
b = 0
for epoch in range(50):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.1)
if epoch == 49:
result = get_accuracy(a, b, X_test, y_test)
print(str(lamb) + ' : ' + str(result))
shuffle(X_train)
validation_train = X_train[0:50]
validation_test = y_train[0:50]
train_data = X_train[51:]
train_test = y_train[51:]
m = 1
n = 50
step_size = m / (0.01 * epoch + n)
for step in range(500):
if step % 30 == 0:
accuracy = get_accuracy(a, b, validation_train, validation_test)
dict_accuracy[lamb].append(accuracy)
dict_a[lamb].append(a)
dict_b[lamb].append(b)
# current index randomly chosen
curr = random.randint(0, len(train_data))
curr_train = np.array(train_data[curr])
curr_train = curr_train.reshape(1, 6)
curr_val = (curr_train.dot(a.T) + b) * train_test[curr]
if curr_val >= 1:
a = a - np.dot(a, lamb) * step_size
else:
a = a - step_size * (np.dot(a, lamb) - np.dot(train_data[curr], train_test[curr]))
b = b - (step_size * (-train_test[curr]))
'''
x_val = [i for i in range(1, 851)]
# dict_accuracy
fig = plt.figure()
ax1 = fig.add_subplot(411)
ax2 = fig.add_subplot(412)
ax3 = fig.add_subplot(413)
ax4 = fig.add_subplot(414)
fig.tight_layout()
y1 = dict_accuracy[0.001]
y2 = dict_accuracy[0.01]
y3 = dict_accuracy[0.1]
y4 = dict_accuracy[1]
ax1.plot(x_val, y1, color='m')
ax2.plot(x_val, y2, color='g')
ax3.plot(x_val, y3, color='r')
ax4.plot(x_val, y4, color='b')
ax1.set_xlabel('lambda = 0.001')
ax2.set_xlabel('lambda = 0.01')
ax3.set_xlabel('lambda = 0.1')
ax4.set_xlabel('lambda = 1')
plt.show()
#########################################
a_norm = {}
for lamb in lambdas:
a_norm[lamb] = []
for lamb in dict_a:
curr_list = dict_a[lamb]
for curr in curr_list:
norm = la.norm(curr, 2)
a_norm[lamb].append(norm)
plt.plot(x_val, a_norm[0.001], label = 'lambda is 0.001', color = 'b')
plt.plot(x_val, a_norm[0.01], label = 'lambda is 0.01', color = 'r')
plt.plot(x_val, a_norm[0.1], label = 'lambda is 0.01', color = 'g')
plt.plot(x_val, a_norm[1], label = 'lambda is 1', color = 'm')
plt.legend()
plt.show()
'''
lamb = 0.001
a = random.dirichlet(np.ones(6)*1000, size = 1)
b = 0
data = []
with open("train.txt") as file:
data = [line.split() for line in file]
X = []
y = []
for line in data:
numerical = [int(line[0][:-1]), int(line[2][:-1]), int(line[4][:-1]), \
int(line[10][:-1]), int(line[11][:-1]), int(line[12][:-1])]
X.append(numerical)
if line[14] == '<=50K':
y.append(-1)
else:
y.append(1)
#scale X
scaler = StandardScaler()
X = scaler.fit_transform(X)
X = X - np.mean(X)
for epoch in range(30):
if epoch == 29:
result = get_accuracy(a, b, X_test, y_test)
print(str(lamb) + ' : ' + str(result))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.1)
shuffle(X_train)
validation_train = X_train[0:50]
validation_test = y_train[0:50]
train_data = X_train[51:]
train_test = y_train[51:]
m = 1
n = 50
step_size = m / (0.01 * epoch + n)
for step in range(300):
# current index randomly chosen
curr = random.randint(0, len(train_data))
curr_train = np.array(train_data[curr])
curr_train = curr_train.reshape(1, 6)
curr_val = (curr_train.dot(a.T) + b) * train_test[curr]
if curr_val >= 1:
a = a - np.dot(a, lamb) * step_size
else:
a = a - step_size * (np.dot(a, lamb) - np.dot(train_data[curr], train_test[curr]))
b = b - (step_size * (-train_test[curr]))
data = []
with open("test.txt") as file:
data = [line.split() for line in file]
X = []
y = []
for line in data:
numerical = [int(line[0][:-1]), int(line[2][:-1]), int(line[4][:-1]), \
int(line[10][:-1]), int(line[11][:-1]), int(line[12][:-1])]
X.append(numerical)
prediction = []
for k in X:
numerical = np.array(k)
estimate = numerical.dot(a.T) + b
#print(estimate)
if estimate < 0:
prediction.append('<=50K')
else:
prediction.append('>50K')
index_final = []
for i in range(len(prediction)):
index_final.append(["'" + str(i) + "'", prediction[i]])
with open('output.csv', 'w') as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
writer.writerow(['Example', 'Label'])
writer.writerows(index_final)
|
9,693 | ee16b91ce1c12ce78d23ff655304aebc39cb1639 | from superwires import games, color
import random
SCORE = 0
## pizza_image= games.load_image("images/pizza.png")
## pizza = games.Sprite(image = pizza_image, x=SW/2, y=SH/2,
## dx =1, dy = 1)
## games.screen.add(pizza)
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Pan(games.Sprite):
""" A pan controlled by the mouse. """
def update(self):
""" Move to mouse coordinates """
self.x = games.mouse.x
#self.y = games.mouse.y
self.check_collide()
def check_collide(self):
""" Check for collision with pizza. """
for pizza in self.overlapping_sprites:
pizza.handle_collide()
class Pizza(games.Sprite):
def update(self):
global SCORE
#bouncing
if self.right > games.screen.width or self.left < 0:
self.dx = -self.dx
#SCORE += 1
#if self.bottom > games.screen.height or
if self.top < 0:
self.dy = -self.dy
#SCORE += 1
## if self.left > games.screen.width:
## self.right = 0
## SCORE +=1
## if self.right<0:
## self.left = games.screen.width
## SCORE +=1
##
## if self.top > games.screen.height:
## self.top = 0
## SCORE +=1
## if self.bottom < 0:
## self.bottom = games.screen.height
## SCORE +=1
##
def handle_collide(self):
#self.x = random.randrange(games.screen.width)
self.dy = -self.dy
class ScText(games.Text):
def update(self):
self.value = SCORE
def main():
# loaded img
bg_img = games.load_image("images/pizzeria.jpg", transparent = True)
pizza_img = games.load_image("images/pizza.png")
pan_img = games.load_image("images/mousepoint.png")
#added img to bg
games.screen.background = bg_img
#create pizza obj
pizza = Pizza(image = pizza_img, x=games.screen.width/2, y=games.screen.height/2,
dx =random.randint(-10,10), dy = random.randint(-10,10))
pizza2 = Pizza(image = pizza_img, x=games.screen.width/2, y=games.screen.height/2,
dx =random.randint(-10,10), dy = random.randint(-10,10))
pizza3 = Pizza(image = pizza_img, x=games.screen.width/2, y=games.screen.height/2,
dx =random.randint(-10,10), dy = random.randint(-10,10))
pizza4 = Pizza(image = pizza_img, x=games.screen.width/2, y=games.screen.height/2,
dx =random.randint(-10,10), dy = random.randint(-10,10))
#create pan obj
pan = Pan(image = pan_img, x=games.mouse.x, y=games.mouse.y)
#create txt obj
score = ScText(value = SCORE, size = 60,
is_collideable = False,
color = color.black,
x = 550,
y = 30)
#draw objs to screen
games.screen.add(pizza)
games.screen.add(pizza2)
games.screen.add(pizza3)
games.screen.add(pizza4)
games.screen.add(score)
games.screen.add(pan)
#sets visibility of mouse while on screen
games.mouse.is_visible = False
#locks mouse to screen if True
games.screen.event_grab = False
#start mainloop
games.screen.mainloop()
#score = games.Text(value = "welcome", size = 60, color = color.black, x = 550, y = 30)
games.screen.add(score)
#### won_message = games.Message(value = "You lose!", color = color.blue, size = 100, x = games.screen.width/2, y = games.screen.height/2, lifetime = 250, after_death = games.screen.quit)
#### games.screen.add(won_message)
##game_over = games.Message(value = "Game Over",
## size = 100,
## color = color.blue,
## x = games.screen.width/2
## y = games.screen.height/2
## lifetime = 250,
## after_death = games.screen.quit)
##games.screen.add(game_over)
main()
##angle - Facing in degrees
##
##x - x-coordinate
##
##y - y-coordinate
##
##dx - x velocity
##
##dy - y velocity
##
##left - x-coordinate of left sprite edge
##
##right - x-coordinate of right sprite edge
##
##top - y-coordinate of top sprite edge
##
##bottom - y-coordinate of bottom sprite edge
##
##image - image object of sprite
##
##overlapping_sprites - List of other objects that overlap sprite
##
##is_collideable - Whether or not the sprite is collideable. True means sprite will register in collisions. False means sprite will not show up in collisions.
##Methods
##
##update() - Updates sprite. Automatically called every mainloop() cycle.
##
##destroy() - Removes sprite from the screen
|
9,694 | d28f5f95b375a1e075fdfcbc0350c90cf96f0212 | #python的运算符实例
#'+'加号
# 俩个对象相加(可以是俩个数字,也可以是俩个字符串(将俩个字符串连接))
a=7+8
print(a)
b="GOOD"+"Job"
print(b)
#'-'减号
#取一个数字的相反数或者实现俩个数字相减
c=-7
print(c)
print(19-1)
#'*'乘号
#如果是数字则进行乘法运算,字符串则复制若干次
d=4*7
print(d)
e="hello"*7
print(e)
#'/'除号
#表示俩个数字相除(Python 3.0中会直接输出正确的值)
f=7/2
print(f)
#'**'求幂运算
g=2**3
print(g)
#'<'小于号 返回一个布尔值
h=3<7
print(h)
|
9,695 | a6713a4edece14a88bd9c8ddd483ff8e16acdbcc | from unittest import TestCase
from attendance import Member
__author__ = 'colin'
class TestMember(TestCase):
def test_here(self):
member = Member("John", "Doe")
self.assertFalse(member.attended)
member.here()
self.assertTrue(member.attended) |
9,696 | 237f1f72ac3ef381f115a88025518f387825ff79 | # Jarvis interface class definition
import kernel.service
class interface(kernel.service.service):
def __init__(self, name):
self.name = name
|
9,697 | cf2973b94f1113013fe9baa946202ec75488f7d2 | #Exercise 2 - Write a Python class which has two methods get_String and print_String. get_String accept a string
#from the user and print_String print the string in upper case
#string will be an input to a get_string method and whatever you put in will print when you make the print screen method
class IOString():
def __init__(self):
self.str1 = ""
def get_String(self):
self.str1 = input()
def print_String(self):
print(self.str1.upper())
str1 = IOString()
str1.get_String()
str1.print_String() |
9,698 | 5b8d1bd026e97bb7508a500048f940abf0253471 | # def test_categories:
# ["5S", "5H", "5D", "4S", "4H", "4D", "3D", "3S"]
import unittest
from poker import Hand, makeCard, Rank, count_ranks, RankCount, max_straight
class TestHand(unittest.TestCase):
# def test_heap_multiples(self):
# heaped_multiples = Hand.heap_multiples({"J":4, "2":3})
# print heaped_multiples
# self.assertEqual(heaped_multiples, [(4, "J"), (3,"2")], "failure in heap_multiples")
def test_max_straight(self):
cards = map(makeCard, ["10S", "6S", "9S", "8S", "7S"])
straight = max_straight(cards)
self.assertEqual(straight, sorted(map(makeCard, ["10S", "6S", "9S", "8S", "7S"]), reverse=True))
cards = map(makeCard, ["10S", "6S", "9S", "8S", "8C", "7S"])
straight = max_straight(cards)
self.assertEqual(straight, sorted(map(makeCard, ["10S", "6S", "9S", "8S", "7S"]), reverse=True))
cards = map(makeCard, ["10S", "6S", "9S", "8S", "5C", "7S"])
straight = max_straight(cards)
self.assertEqual(straight, sorted(map(makeCard, ["10S", "6S", "9S", "8S", "7S"]), reverse=True))
def test_categories(self):
my_hand = Hand(["KH", "QH", "JH", "AH", "10H"])
self.assertEqual(my_hand.category, Hand.Categories.straight_flush)
my_hand = Hand(["10S", "6S", "9S", "8S", "7S"])
self.assertEqual(my_hand.category, Hand.Categories.straight_flush)
my_hand = Hand(["JH", "JC", "9H", "JS", "JD"])
self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)
my_hand = Hand(["JH", "JC", "JS", "9D", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.full_house)
my_hand = Hand(["10S", "9S", "8S", "5S", "6S"])
self.assertEqual(my_hand.category, Hand.Categories.flush)
my_hand = Hand(["10H", "6S", "9D", "8S", "7S"])
self.assertEqual(my_hand.category, Hand.Categories.straight)
my_hand = Hand(["JH", "JC", "9H", "JS", "8D"])
self.assertEqual(my_hand.category, Hand.Categories.three_of_a_kind)
my_hand = Hand(["JH", "JC", "QS", "9D", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.two_pair)
my_hand = Hand(["JH", "JC", "QS", "5D", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.pair)
my_hand = Hand(["JH", "3C", "4S", "5C", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.high_card)
def test_category_options(self):
my_hand = Hand(["10H", "6S", "9D", "8S", "7S", "7D", "7H"])
self.assertEqual(my_hand.category, Hand.Categories.straight)
my_hand = Hand(["10H", "6S", "9D", "8S", "7S", "7D", "7H", "7C"])
self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)
my_hand = Hand(["10H", "6S", "9D", "8S", "7S", "7D", "7H", "8C"])
self.assertEqual(my_hand.category, Hand.Categories.full_house)
my_hand = Hand(["10S", "9S", "8S", "5S", "6S", "10H", "6D", "9D", "8C", "7C"])
self.assertEqual(my_hand.category, Hand.Categories.flush)
my_hand = Hand(["KH", "QH", "JH", "AH", "10H", "10S", "6S", "9S", "8S", "7S"])
self.assertEqual(my_hand.category, Hand.Categories.straight_flush)
# It gets the royal flush
my_hand = Hand(["5S", "5H", "5D", "4S", "4H", "4D", "3D", "3S"])
self.assertEqual(my_hand.category, Hand.Categories.full_house)
# It gets the fours
my_hand = Hand(["5S", "5H", "5D", "5C", "4S", "4H", "3C", "3D", "3S"])
self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)
# get the 4 kicker
def test_cmp(self):
pair_to_high_card = Hand(["JH", "JC", "QS", "5D", "9H"]) < Hand(["JH", "3C", "4S", "5C", "9H"])
self.assertEqual(pair_to_high_card, False)
straight_to_flush = Hand(["10H", "6S", "9D", "8S", "7S"]) < Hand(["10S", "9S", "8S", "5S", "6S"])
self.assertEqual(straight_to_flush, True)
def test_deck_validation(self):
"""
Test with some hands that are impossible to form with a 52-card deck
Five-of-a-kind
Something that is both a flush and has a pair (flush wins)
Something that is both a flush and four-of-a-kind (four-of-a-kind wins)
"""
pass
if __name__ == '__main__':
unittest.main() |
9,699 | 86f365612e9f15e7658160ecab1d3d9970ca364e | # Autor : Kevin Oswaldo Palacios Jimenez
# Fecha de creacion: 16/09/19
# Se genera un bucle con for
# al no tener argumento print no genera ningun cambio
# mas que continuar a la siguiente linea
for i in range (1,11):
encabezado="Tabla del {}"
print(encabezado.format(i))
print()
# Usaremos un for dentro de otro generando un bucle mas
for j in range(1,11):
# en donde i tendremos la base
# con j tendriamos el elemento
salida="{} x {} = {}"
print(salida.format(i,j,i*j))
else:
# con el bucle teniendo su proceso iterativo
# se saltaran las linea pero ejecutando el codigo
print() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.