prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
from pycp2k.inputsection import InputSection from ._each174 import _each174 class _diis_info3(InputSection): def __init__(self): InputSection.__init__(self)
self.Section_parameters = None self.Add_last = None self.Common_iteration_levels = None self.Filename = None self.Log_print_key = None self.EACH = _each174() self._name = "DIIS_INFO" self._keywords = {'Log_print_key': 'LOG_PRINT_KEY', 'Filename': 'FILENAME', 'Add_l
ast': 'ADD_LAST', 'Common_iteration_levels': 'COMMON_ITERATION_LEVELS'} self._subsections = {'EACH': 'EACH'} self._attributes = ['Section_parameters']
imitations # under the License. import uuid from keystoneclient import exceptions from keystoneclient import fixture from keystoneclient.tests.unit.v2_0 import utils from keystoneclient.v2_0 import client from keystoneclient.v2_0 import tenants from keystoneclient.v2_0 import users class TenantTests(utils.TestCase): def setUp(self): super(TenantTests, self).setUp() self.INVIS_ID = uuid.uuid4().hex self.DEMO_ID = uuid.uuid4().hex self.ADMIN_ID = uuid.uuid4().hex self.EXTRAS_ID = uuid.uuid4().hex self.TEST_TENANTS = { "tenants": {
"values": [ { "enabled": True, "description": "A description cha
nge!", "name": "invisible_to_admin", "id": self.INVIS_ID, }, { "enabled": True, "description": "None", "name": "demo", "id": self.DEMO_ID, }, { "enabled": True, "description": "None", "name": "admin", "id": self.ADMIN_ID, }, { "extravalue01": "metadata01", "enabled": True, "description": "For testing extras", "name": "test_extras", "id": self.EXTRAS_ID, } ], "links": [], }, } def test_create(self): req_body = { "tenant": { "name": "tenantX", "description": "Like tenant 9, but better.", "enabled": True, "extravalue01": "metadata01", }, } id_ = uuid.uuid4().hex resp_body = { "tenant": { "name": "tenantX", "enabled": True, "id": id_, "description": "Like tenant 9, but better.", "extravalue01": "metadata01", } } self.stub_url('POST', ['tenants'], json=resp_body) tenant = self.client.tenants.create( req_body['tenant']['name'], req_body['tenant']['description'], req_body['tenant']['enabled'], extravalue01=req_body['tenant']['extravalue01'], name="don't overwrite priors") self.assertIsInstance(tenant, tenants.Tenant) self.assertEqual(tenant.id, id_) self.assertEqual(tenant.name, "tenantX") self.assertEqual(tenant.description, "Like tenant 9, but better.") self.assertEqual(tenant.extravalue01, "metadata01") self.assertRequestBodyIs(json=req_body) def test_duplicate_create(self): req_body = { "tenant": { "name": "tenantX", "description": "The duplicate tenant.", "enabled": True }, } resp_body = { "error": { "message": "Conflict occurred attempting to store project.", "code": 409, "title": "Conflict", } } self.stub_url('POST', ['tenants'], status_code=409, json=resp_body) def create_duplicate_tenant(): self.client.tenants.create(req_body['tenant']['name'], req_body['tenant']['description'], req_body['tenant']['enabled']) self.assertRaises(exceptions.Conflict, create_duplicate_tenant) def test_delete(self): self.stub_url('DELETE', ['tenants', self.ADMIN_ID], status_code=204) self.client.tenants.delete(self.ADMIN_ID) def test_get(self): resp = {'tenant': self.TEST_TENANTS['tenants']['values'][2]} self.stub_url('GET', ['tenants', self.ADMIN_ID], json=resp) t = self.client.tenants.get(self.ADMIN_ID) self.assertIsInstance(t, tenants.Tenant) self.assertEqual(t.id, self.ADMIN_ID) self.assertEqual(t.name, 'admin') def test_list(self): self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS) tenant_list = self.client.tenants.list() [self.assertIsInstance(t, tenants.Tenant) for t in tenant_list] def test_list_limit(self): self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS) tenant_list = self.client.tenants.list(limit=1) self.assertQueryStringIs('limit=1') [self.assertIsInstance(t, tenants.Tenant) for t in tenant_list] def test_list_marker(self): self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS) tenant_list = self.client.tenants.list(marker=1) self.assertQueryStringIs('marker=1') [self.assertIsInstance(t, tenants.Tenant) for t in tenant_list] def test_list_limit_marker(self): self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS) tenant_list = self.client.tenants.list(limit=1, marker=1) self.assertQueryStringIs('marker=1&limit=1') [self.assertIsInstance(t, tenants.Tenant) for t in tenant_list] def test_update(self): req_body = { "tenant": { "id": self.EXTRAS_ID, "name": "tenantX", "description": "I changed you!", "enabled": False, "extravalue01": "metadataChanged", # "extraname": "dontoverwrite!", }, } resp_body = { "tenant": { "name": "tenantX", "enabled": False, "id": self.EXTRAS_ID, "description": "I changed you!", "extravalue01": "metadataChanged", }, } self.stub_url('POST', ['tenants', self.EXTRAS_ID], json=resp_body) tenant = self.client.tenants.update( req_body['tenant']['id'], req_body['tenant']['name'], req_body['tenant']['description'], req_body['tenant']['enabled'], extravalue01=req_body['tenant']['extravalue01'], name="don't overwrite priors") self.assertIsInstance(tenant, tenants.Tenant) self.assertRequestBodyIs(json=req_body) self.assertEqual(tenant.id, self.EXTRAS_ID) self.assertEqual(tenant.name, "tenantX") self.assertEqual(tenant.description, "I changed you!") self.assertFalse(tenant.enabled) self.assertEqual(tenant.extravalue01, "metadataChanged") def test_update_empty_description(self): req_body = { "tenant": { "id": self.EXTRAS_ID, "name": "tenantX", "description": "", "enabled": False, }, } resp_body = { "tenant": { "name": "tenantX", "enabled": False, "id": self.EXTRAS_ID, "description": "", }, } self.stub_url('POST', ['tenants', self.EXTRAS_ID], json=resp_body) tenant = self.client.tenants.update(req_body['tenant']['id'], req_body['tenant']['name'], req_body['tenant']['description'], req_body['tenant']['enabled']) self.assertIsInstance(tenant, tenants.Tenant) self.assertRequestBodyIs(json=req_body) self.assertEqual(tenant.id, self.EXTRAS_ID) self.assertEqual(tenant.name, "tenantX") self.assertEqual(tenant.description, "") self.assertFalse(tenant.enabled) def test_add_user(self): self.stub_url('PUT', ['tenants', self.EXTRAS_ID, 'users', 'foo', 'roles', 'OS-KSADM', 'barrr'], status_code=204) self.client.tenants.add_user(self.EXTRAS_ID, 'foo', 'barrr') def test_remove_use
= 0.0000001 # Vdd rise setup time to nMCLR/Vpp rise delayP14 = 0.00000001 # Data out Valid from SCK rise delayP15 = 0.000002 # PGM rise setup time to nMCLR/Vpp rise userIDLocationSize = 8 userIDLocationAddr = 0x200000 deviceIDAddr = 0x3FFFFE configWordAddr = 0x300000 deviceIDLength = 2 voltageVDD = 5 voltageVPP = 12 def __init__(self, chipPackage, chipPinVCC, chipPinsVPP, chipPinGND, signature, flashPageSize, flashPages, eepromPageSize, eepromPages, fuseBytes ): Chip.__init__(self, chipPackage=chipPackage, chipPinVCC=chipPinVCC, chipPinsVPP=chipPinsVPP, chipPinGND=chipPinGND) self.signature = signature self.flashPageSize = flashPageSize # Flash page size, in words self.flashPages = flashPages # Nr of flash pages self.eepromPageSize = eepromPageSize # EEPROM page size, in bytes self.eepromPages = eepromPages # Nr of EEPROM pages self.fuseBytes = fuseBytes # Nr of fuse bytes self.isInPmMode = False self.BufferedBytes = 0 self.Image = b"" def getIHexInterpreter(self): inter = IHexInterpreter() inter.progmemRanges = [ AddressRange(0, self.flashPageSize) ] inter.fuseRanges = [ AddressRange(self.configWordAddr, self.configWordAddr + self.fuseBytes) ] inter.uilRanges = [ AddressRange(self.userIDLocationAddr, self.userIDLocationAddr + self.userIDLocationSize) ] return inter def enterPM(self, force=False): if self.isInPmMode and not force: return "Enter HV programming mode. Vdd first entry mode" self.applyVCC(False) self.applyVPP(False) self.applyGND(False) self.setPins(0, 0) self.top.cmdSetVCCVoltage(self.voltageVDD) self.top.cmdSetVPPVoltage(self.voltageVPP) self.applyGND(True) self.applyVCC(True) self.top.hostDelay(10 * self.delayP13) self.applyVPP(True) self.top.hostDelay(102 * self.delayP12) self.setTopProgrammerDelays() self.isInPmMode = True def readUserIdLocation(self): return self.readSequentialBlock(self.userIDLocationAddr, self.userIDLocationSize, "Reading User ID Locations") def readFuse(self): return self.readSequentialBlock(self.configWordAddr, self.fuseBytes, "Reading Config Words") def readSignature(self): return self.readSequentialBlock(self.deviceIDAddr, self.deviceIDLength, "Reading Signature") def readProgmem(self): nrBytes = self.flashPages * self.flashPageSize return self.readSequentialBlock(0, nrBytes, "Reading flash") def readSequentialBlock(self, startAddr, nBytes, infoText): self.enterPM() self.progressMeterInit(infoText, nBytes) self.BufferedBytes = 0 self.Image = b"" self.executeCode(self.getCodeAddrToTBLPTR(startAddr)) for byteAddr in range(0, nBytes): self.send4bitReadInstruction(self.CMD_TRI) self.progressMeter(byteAddr) self.progressMeterFinish() self.flushBufferToImage() return self.Image def writeSequentialBlock(self, startAddr, image, size, infoText): if len(image) > size: self.throwError("Invalid flash image size %d (expected <=%d)" % \ (len(image), self.userIDLocationSize)) self.enterPM() self.executeCode((0x8EA6, 0x9CA6)) self.progressMeterInit(infoText, len(image) // 8) for blockAddr in range(0, len(image), self.writeBufferSize): #print("addr:{:x}".format(startAddr+blockAddr)) self.executeCode(self.getCodeAddrToTBLPTR(startAddr+blockAddr)) #for code in self.getCodeAddrToTBLPTR(startAddr+blockAddr): # print("({:x}, ".format(code)) print(")\n") self.writeNbytes(image[blockAddr:], self.writeBufferSize) #self.executeCode((0x0, 0x0)) self.progressMeter(blockAddr) self.progressMeterFinish() def readEEPROM(self): nrBytes = self.eepromPages * self.eepromPageSize self.enterPM() self.progressMeterInit("Reading EEPROM", nrBytes) self.BufferedBytes = 0 self.Image = b"" self.executeCode((0x9EA6, 0x9CA6)) for byteAddr in range(0, nrBytes): # print("set addr to {:x}\n".format(byteAddr)) self.setEEPROMAddr(byteAddr) self.executeCode((0x80A6, 0x50A8, 0x6EF5)) self.send4bitReadInstruction(self.CMD_SHIFT_OUT_TABLAT) self.progressMeter(byteAddr) self.progressMeterFinish() self.flushBufferToImage() return self.Image def writeEEPROM(self, image): nrBytes = self.eepromPages * self.eepromPageSize if len(image) > nrBytes: self.throwError("Invalid flash image size {:d} (expected <={:d})".format(len(image), nrBytes)) self.enterPM() self.progressMeterInit("Writing eeprom", len(image)) self.executeCode((0x9EA6, 0x9CA6)) for addr in range(0, len(image)): self.progressMeter(addr) #print("writing {:x} value to addr {:x}\n".format(byte2int(image[addr]), addr)) self.setEEPROMAddr(addr) self.executeCode((0x0E00 | (byte2int(image[addr]) & 0xFF), 0x6EA8)) self.executeCode((0x84A6, 0x0E55, 0x6EA7, 0x0EAA, 0x6EA7)) self.executeCode((0x82A6, 0x0, 0x0)) self.top.hostDelay(self.delayP11 + self.delayP10) self.executeCode((0x94A6,)) self.progressMeterFinish() def writeNbytes(self, image, N): if N % 2: self.throwError("N should be even, not %d" % N) isEmpty = True #N = (pN, len(image))[len(image) < pN] for idx in range(0, N): if idx == len(image): image += b'\xFF' elif byte2int(image[idx]) != 0xFF: isEmpty = False if(not isEmpty): for wordAddr in range(0, N-2, 2): self.send4bitWriteInstruction(self.CMD_TWII, byte2int(image[wordAddr]) | (byte2int(image[wordAddr + 1]) << 8)) self.send4bitWriteInstruction(self.CMD_TW_START_PROG, byte2int(image[N-2]) | (byte2int(image[N-1]) << 8)) self.top.cmdFPGAWrite(0x12, 0x81) self.top.hostDelay(self.delayP9) self.setPins(0) self.top.cmdDelay(self.delayP10) for i in range(0,4): self.sendCommand(1) def writeUserIdLocation(self, image): self.writeSequentialBlock(self.userIDLocationAddr, image, self.userIDLocationSize, "Writing User ID Locations") def checkSignature(self): signature = self.readSignature() if signature != self.signature: msg = "Unexpected device signature. " + \ "Want %02X%02X%02X, but got %02X%02X%02X" % \ (byte2int(self.signature[0]), byte2int(self.signature[1]), byte2int(self.signature[2]), byte2int(signature[0]), byte2int(signature[1]), byte2int(signature[2])) if self.top.getForceLevel() >= 1: self.printWarning(msg) else: self.throwError(msg) def writeProgmem(self, image): nrBytes = self.flashPages * self.flashPageSize if len(image) > nrBytes: self.throwError("Invalid flash image size %d (expected <=%d)" % \ (len(image), nrBytes)) self.writeSequentialBlock(0, image, nrBytes, "Writing flash") def writeFuse(self, ima
ge): self.enterPM() if len(image) > self.fuseBytes: self.throwError("Invalid Fus
es image size %d (expected less than %d)" % \ (len(image), self.fuseBytes)) self.executeCode((0x8EA6, 0x8CA6, 0xEF00, 0xF800)) for fuseAddr in range(0,len(image)): self.executeCode(self.getCodeAddrToTBLPTR(self.configWordAddr+fuseAddr)) if(fuseAddr & 0x01): byte = byte2int(image[fuseAddr]) << 8 else: byte = byte2int(image[fuseAddr]) self.send4bitWriteInstruction(self.CMD_TW_START_PROG, byte) self.top.cmdFPGAWrite(0x12, 0x81) #self.setPins(1) self.top.hostDelay(self.delayP9) self.setPins(0) self.top.cmdDelay(self.delayP10) for i in range(0,4): self.sendCommand(1) #self.executeCode((0x2AF6,)) self.writeSequentialBlock(self.configWordAddr, image, self.fuseBytes, "Writing fuses") self.progressMeterInit("Writing fuses", 0) def exitPM(self): "Exit programming mode. Vdd last exit mode" self.top.flushCommands() self.setPins(0, 0) self.applyVPP(False) self.applyVCC(False) self.applyGND(False) self.isInPmMode = False # ready for 18F below def send4bitReadInstruction(self, pInstruction): def incBbAndCheckFillImage(): self.BufferedBytes += 1 if self.BufferedBytes == self.top.getBufferRegSize(): self.flushBufferToImage() # self.sendCommand(1,0,1,pInstruction) self.sendCommand(1, 0, 1, pInstruction) # self.busyWait() self.readSDOBufferHigh() incBbAndCheckFillImage() def send4bitWriteInstruction(self, pInstruction, pDataPayload): # self.busyWait()
from ez_setup import use_se
tuptools use_setuptools() from setuptools import setup, find_packages import sys # Define required packages. requires = [] # Assume spidev is required on non-windows & non-mac platforms (i.e. linux). if sys.platform != 'win32' and sys.platform != 'darwi
n': requires.append('spidev') setup(name = 'Adafruit_GPIO', version = '0.8.0', author = 'Tony DiCola', author_email = 'tdicola@adafruit.com', description = 'Library to provide a cross-platform GPIO interface on the Raspberry Pi and Beaglebone Black using the RPi.GPIO and Adafruit_BBIO libraries.', license = 'MIT', url = 'https://github.com/adafruit/Adafruit_Python_GPIO/', install_requires = requires, packages = find_packages())
cla
ss LSA(object): def __init__(
self,input_path,output_path): super(LSA,self).__init__() self.input_path = input_path self.output_path = output_path self.hpfx = 'k, bins: ['
rt time class SettingsEntry(db.Model): valid_users_entry = db.ListProperty(str,indexed=False,default=None) secret_hash_entry = db.StringProperty() class TemperatureEntry(db.Model): date = db.DateTimeProperty(auto_now_add=True) room = db.StringProperty() temperature = db.FloatProperty() target = db.FloatProperty() furnacestate = db.IntegerProperty() mode = db.StringProperty() outside = db.FloatProperty() other = db.FloatProperty() class DailyTemperatureEntry(db.Model): date = db.IntegerProperty() temp_entry = db.TextProperty() target_entry = db.TextProperty() furnace_entry = db.TextProperty() room_entry = db.TextProperty() mode_entry = db.TextProperty() outside_entry = db.TextProperty() class TargetEntry(db.Model): date = db.IntegerProperty() target_temperature_entry = db.IntegerProperty() target_start_minutes_entry = db.IntegerProperty() target_held_minutes_entry = db.IntegerProperty() target_executed = db.BooleanProperty() default_temperature_entry = db.IntegerProperty() default_temperature_mode_entry = db.TextProperty() class MainHandler(webapp.RequestHandler): def get(self): self.redirect('http://www.google.com/') class Temperature(webapp.RequestHandler): def post(self): secret_hash = db.GqlQuery( "SELECT * FROM SettingsEntry LIMIT 1")[0].secret_hash_entry temp = str(float(cgi.escape(self.request.get('t')))) target = str(float(cgi.escape(self.request.get('g')))) furnace = str(cgi.escape(self.request.get('f'))) room = str(cgi.escape(self.request.get('r'))) home = str(cgi.escape(self.request.get('h'))) outside = str(float(cgi.escape(self.request.get('o')))) mode = str(cgi.escape(self.request.get('m'))) strS = str(cgi.escape(self.request.get('s'))) # secret added since I don't want just anyone to pollute my furnace data if hashlib.sha512(strS).hexdigest() == secret_hash: rightNow = int(time.time()) dayAgo = rightNow-86400 recent_record = DailyTemperatureEntry.gql( "WHERE date > :1 ORDER BY date DESC", dayAgo) rightNow = str(rightNow) if recent_record.count()!=0: # update entry dayObj = recent_record[0] dayObj.temp_entry = dayObj.temp_entry + \ '['+rightNow+','+temp+'],' dayObj.target_entry = dayObj.target_entry + \ '['+rightNow+','+target+'],' dayObj.furnace_entry = dayObj.furnace_entry + \ '['+rightNow+','+furnace+'],' dayObj.room_entry = dayObj.room_entry + \ '['+rightNow+','+room+'],' dayObj.mode_entry = dayObj.mode_entry + \ '['+rightNow+','+mode+'],' dayObj.outside_entry = dayObj.outside_entry + \ '['+rightNow+','+outside+'],' dayObj.put() else: # create entry newEntry = DailyTemperatureEntry( date = int(time.time()), temp_entry = '['+rightNow+','+temp+'],', target_entry = '['+rightNow+','+target+'],', furnace_entr
y = '['+rightNow+','+furnace+'],', room_entry = '['+rightNow+','+room+'],', mode_entry = '['+rightNow+','+mode+'],', outside_entry = '['+rightNow+','+outside+']
,' ) newEntry.put() self.response.headers.add_header("X-Raspberry-Pi-Data", temp +','+ \ target +','+ furnace + \ ','+ room +','+ mode + \ ','+ outside) the_target = db.GqlQuery( "SELECT * FROM TargetEntry ORDER BY date DESC LIMIT 1") template_values = { 'target' : the_target } path = os.path.join(os.path.dirname(__file__), 'target.html') self.response.write(template.render(path, template_values)) else: self.error(500) class Submit(webapp.RequestHandler): def post(self): user = users.get_current_user() valid_users = db.GqlQuery( "SELECT * FROM SettingsEntry LIMIT 1")[0].valid_users_entry if user and user.nickname() in valid_users and \ self.request.get('target_temperature'): self.response.write('<html><head><meta http-equiv="refresh" ' + \ 'content="5; url=https://furnaceathome.appspot.com/t"></head><body>') target_temperature = \ int(cgi.escape(self.request.get('target_temperature'))) target_start_minutes = \ int(cgi.escape(self.request.get('target_start_minutes'))) target_held_minutes = \ int(cgi.escape(self.request.get('target_held_minutes'))) errors = 0 if 0 <= target_temperature <= 22: self.response.write( \ 'will set target to %s &deg;C</br>' % target_temperature) else: self.response.write( \ 'invalid temperature: %s</br></body></html>' % target_temperature) errors+=1 if errors == 0 and 0 <= target_start_minutes <= 120: self.response.write( \ 'to be reached in %s minutes</br>' % target_start_minutes) else: self.response.write( \ 'invalid time span: %s</br></body></html>' % target_start_minutes) errors+=1 if errors == 0 and 5 <= target_held_minutes <= 120: self.response.write('for %s minutes' % target_held_minutes) else: self.response.write( \ 'invalid duration: %s</br></body></html>' % target_held_minutes) errors+=1 if errors == 0: self.response.write('</body></html>') recent_record = TargetEntry.gql("WHERE date > 0 ORDER BY date DESC") if recent_record.count()!=0: #update entry targetObj = recent_record[0] targetObj.date = timegm(datetime.datetime.now().utctimetuple()) targetObj.target_temperature_entry = target_temperature targetObj.target_start_minutes_entry = target_start_minutes targetObj.target_held_minutes_entry = target_held_minutes targetObj.target_executed = False targetObj.put() else: #create entry newEntry = TargetEntry( date = int(time.time()), target_temperature_entry = target_temperature, target_start_minutes_entry = target_start_minutes, target_held_minutes_entry = target_held_minutes, target_executed = False ) newEntry.put() self.response.headers.add_header("X-Raspberry-Pi-Data", target_temperature + ',' + target_start_minutes + \ ',' + target_held_minutes) elif user and user.nickname() in valid_users and \ self.request.get('default_temp'): default_temperature=int(cgi.escape(self.request.get('default_temp'))) default_temperature_mode = \ str(cgi.escape(self.request.get('default_temp_mode'))) recent_record = TargetEntry.gql("WHERE date > 0 ORDER BY date DESC") if recent_record.count()!=0: #update entry targetObj = recent_record[0] targetObj.default_temperature_entry = default_temperature targetObj.default_temperature_mode_entry = default_temperature_mode targetObj.put() else: #create entry newEntry = TargetEntry( default_temperature_entry = default_temperature, defau
from __future__ import print_function, absolute_import import weakref class PDroneCreator(object): def __init__(self, mainwindow, clipboard, title="drones"): self._mainwindow = mainwindow self._clipboard = clipboard self._subwin = mainwindow.newSubWindow(title) from . import PTree self._tree = PTree(self._subwin.wrapwidget(), self._select_drone) self._subwin.setWidget(self._tree.widget()) def _select_drone(self, dronetype):
dronetype = ".".join(dronetype) self._clipboard.set_dragboard_value("drone", dronetype) def append(self, dronename): key = tuple(dronename.split(".")) self._tree.append(key) def remove(self, dronename): key = tuple(dronename.spl
it(".")) self._tree.remove(key)
# -*- coding:UTF-8 -*- """ pyBox Modbus""" # !/usr/bin/python # Python: 3.5.2 # Platform: Windows/ARMv7 # Author: Heyn # Program: Modbus RTU & TCP # History: 2017/02/14 V1.0.0 [Heyn] # 2017/03/08 V1.0.1 [Heyn] Send return string. # 2017/04/07 V1.0.2 [Heyn] Redesign PBoxModbus class functions. # 2017/04/10 V1.0.3 [Heyn] Bug fixe import imx6_ixora_led as led # Windows(X86) Platform: You should have modbus.dll and pymodbus.pyd # Linux or ARM Platform: You should have modbus.so and pymodbus.cpython-35m-arm-linux-gnueabihf.so import sys import pymodbus if sys.platform == 'linux': import imx6_ixora_led as led class PBoxModbus: """Pbox Modbus Class""" def __init__(self): super(PBoxModbus, self).__init__() self.isopened = False self.platform = sys.platform def __del__(self): self.isopened = False pymodbus.free_tcp() if self.platform == 'linux': led.ioctl(led.IXORA_LED4, led.GREEN, led.LOW) led.ioctl(led.IXORA_LED4, led.RED, led.LOW) def newtcp(self, addr='127.0.0.1', port=502): """New TCP for Modbus.""" print('[Modbus TCP] IP=%s:%d'%(addr, port)) try: self.isopened = pymodbus.new_tcp(addr, port) except BaseException as err: self.isopened = False print(err) if (self.platform == 'linux') and (self.isopened is False): led.ioctl(led.IXORA_LED4, led.RED, led.HIGH) return self.isopened def newrtu(self, dev='/dev/ttymxc1'): """New RTU for Modbus.""" print('[Modbus RTU] Port=%s'%(dev)) try: self.isopened = pymodbus.new_rtu(dev) except BaseException as err: self.isopened = False print(err) if (self.platform == 'linux') and (self.isopened is False): led.ioctl(led.IXORA_LED4, led.RED, led.HIGH) return self.isopened def settimeout(self, sec=0, msc=500): """ sec: second. msc: millisecond seconds. """ # set_timeout(seconds, microseconds = us) pymodbus.set_timeout(sec, msc) # default timeout=500ms def setslave(self, addr=1): """Set modbus slave address.""" if self.isopened is False: return None ret = False try: ret = pymodbus.set_slave(addr) except BaseException as err: print(err) return ret def readstring(self, readlist, size=1): """
Read String from Device. readlist = [function code, address, data type] """ if self.isopened is False: return None try: ret = pymodbus.read_registers(readlist[0:3], size) except BaseException as err: if self.platform == 'linux': led.ioctl(led.IXORA_LED4, led.GREEN, led.LOW) led.ioctl(led.IXORA_LED4, led.RED, led.HIGH) print(err)
return None else: if self.platform == 'linux': led.ioctl(led.IXORA_LED4, led.RED, led.LOW) led.ioctl(led.IXORA_LED4, led.GREEN, led.HIGH) else: pass # And each hexadecimal number into ASCII code return ''.join((lambda v: [chr(i) for i in v])(ret)).strip('\x00') def readregs(self, readlist, size=1): """ Read Data from Device. readlist = [function code, address, data type] """ if self.isopened is False: return None try: retlist = pymodbus.read_registers(readlist[0:3], size) except BaseException as err: if self.platform == 'linux': led.ioctl(led.IXORA_LED4, led.GREEN, led.LOW) led.ioctl(led.IXORA_LED4, led.RED, led.HIGH) print(err) return None else: if self.platform == 'linux': led.ioctl(led.IXORA_LED4, led.RED, led.LOW) led.ioctl(led.IXORA_LED4, led.GREEN, led.HIGH) else: pass return retlist # if __name__ == '__main__': # MODBUS = PBoxModbus() # print(MODBUS.newtcp()) # print(MODBUS.readregs([3, 1, 'U16']))
"""XKNX vers
ion.""" __version__ = "0.1
9.2.dev"
'CapsStyle', 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu', 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem', 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError', 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject', 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter', 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher', 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference', 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType', 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter', 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent', 'IBitma
pDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput' 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable', 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int', 'InteractiveObj
ect', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent', 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation', 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection', 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent', 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent', 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping', 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy', 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample', 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError', 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject', 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel', 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite', 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState', 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet', 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField', 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign', 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform', 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest', 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError', 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket', 'XMLUI'), suffix=r'\b'), Name.Builtin), (words(( 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN', 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion', 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent', 'unescape'), suffix=r'\b'), Name.Function), (r'[$a-zA-Z_]\w*', Name.Other), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), ] } class ActionScript3Lexer(RegexLexer): """ For ActionScript 3 source code. .. versionadded:: 0.11 """ name = 'ActionScript 3' aliases = ['as3', 'actionscript3'] filenames = ['*.as'] mimetypes = ['application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3'] identifier = r'[$a-zA-Z_]\w*' typeidentifier = identifier + '(?:\.<\w+>)?' flags = re.DOTALL | re.MULTILINE tokens = { 'root': [ (r'\s+', Text), (r'(function\s+)(' + identifier + r')(\s*)(\()', bygroups(Keyword.Declaration, Name.Function, Text, Operator), 'funcparams'), (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' + typeidentifier + r')', bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text, Keyword.Type)), (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)', bygroups(Keyword, Text, Name.Namespace, Text)), (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()', bygroups(Keyword, Text, Keyword.Type, Text, Operator)), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex), (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)), (r'(case|default|for|each|in|while|do|break|return|continue|if|else|' r'throw|try|catch|with|new|typeof|arguments|instanceof|this|' r'switch|import|include|as|is)\b', Keyword), (r'(class|public|final|internal|native|override|private|protected|' r'static|import|extends|implements|interface|intrinsic|return|super|' r'dynamic|function|const|get|namespace|package|set)\b', Keyword.Declaration), (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b', Keyword.Constant), (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|' r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|' r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|' r'unescape)\b', Name.Function), (identifier, Name), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator), ], 'funcparams': [ (r'\s+', Text), (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' + typeidentifier + r'|\*)(\s*)', bygroups(Text, Punctuation, Name, Text, Operator, Text, Keyword.Type, Text), 'defval'), (r'\)', Operator, 'type') ], 'type': [ (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)', bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'), (r'\s+', Text, '#pop:2'), default('#pop:2') ], 'defval': [ (r'(=)(\s*)([^(),]+)(\s*)(,?)', bygroups(Operator, Text, using(this), Text, Operator), '#pop'), (r',', Operator, '#pop'), default('#pop') ] } def analyse_text(text): if re.match(r'\w+\s*:\s*\w', text): return 0.3 return 0 class MxmlLexer(RegexLexer): """ For MXML markup. Nested AS3 in <script> tags is highlighted by the appropriate lexer. .. versionadded:: 1.1 """ flags = re.MULTILINE | re.DOTALL name = 'MXML' aliases = ['mxml'] filenames = ['*.mxml'] mimetimes = ['text/xml', 'application/xml'] tokens = { 'root': [ ('[^<&]+', Text), (r'&\S*?;', Name.Entity), (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)', bygroups(String, using(ActionScript3Lexer), String)), ('<!--', Comment, 'comment'), (r'<\?.*?\?>', Comment.Preproc), ('<![^>]*>', Comment.Preproc), (r'<\s*
from basetest import * from zfs_autobackup.LogStub import LogStub from zfs_autobackup.ExecuteNode import ExecuteError class TestZfsNode(unittest2.TestCase): def setUp(self): prepare_zpools() # return super().setUp() def test_consistent_snapshot(self): logger = LogStub() description = "[Source]" node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description) with self.subTest("first snapshot"): node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test",exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=200000), "test-20101111000001", 100000) r = shelltest("zfs list -H -o name -r -t all " + TEST_POOLS) self.assertEqual(r, """ test_source1 test_source1/fs1 test_source1/fs1@test-20101111000001 test_source1/fs1/sub test_source1/fs1/sub@test-20101111000001 test_source2 test_source2/fs2 test_source2/fs2/sub test_source2/fs2/sub@test-20101111000001 test_source2/fs3 test_source2/fs3/sub test_target1 """) with self.subTest("second snapshot, no changes, no snapshot"): node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test",exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=200000), "test-20101111000002", 1) r = shelltest("zfs list -H -o name -r -t all " + TEST_POOLS) self.assertEqual(r, """ test_source1 test_source1/fs1 test_source1/fs1@test-20101111000001 test_source1/fs1/sub test_source1/fs1/sub@test-20101111000001 test_source2 test_source2/fs2 test_source2/fs2/sub test_source2/fs2/sub@test-20101111000001 test_source2/fs
3 test_source2/fs3/sub test_target1 """) with self.subTest("second snapshot, no changes, empty snapshot"): node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=200000), "test-20101111000002", 0)
r = shelltest("zfs list -H -o name -r -t all " + TEST_POOLS) self.assertEqual(r, """ test_source1 test_source1/fs1 test_source1/fs1@test-20101111000001 test_source1/fs1@test-20101111000002 test_source1/fs1/sub test_source1/fs1/sub@test-20101111000001 test_source1/fs1/sub@test-20101111000002 test_source2 test_source2/fs2 test_source2/fs2/sub test_source2/fs2/sub@test-20101111000001 test_source2/fs2/sub@test-20101111000002 test_source2/fs3 test_source2/fs3/sub test_target1 """) def test_consistent_snapshot_prepostcmds(self): logger = LogStub() description = "[Source]" node = ZfsNode(snapshot_time_format="test", hold_name="test", logger=logger, description=description, debug_output=True) with self.subTest("Test if all cmds are executed correctly (no failures)"): with OutputIO() as buf: with redirect_stdout(buf): node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=1), "test-1", 0, pre_snapshot_cmds=["echo pre1", "echo pre2"], post_snapshot_cmds=["echo post1 >&2", "echo post2 >&2"] ) self.assertIn("STDOUT > pre1", buf.getvalue()) self.assertIn("STDOUT > pre2", buf.getvalue()) self.assertIn("STDOUT > post1", buf.getvalue()) self.assertIn("STDOUT > post2", buf.getvalue()) with self.subTest("Failure in the middle, only pre1 and both post1 and post2 should be executed, no snapshot should be attempted"): with OutputIO() as buf: with redirect_stdout(buf): with self.assertRaises(ExecuteError): node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=1), "test-1", 0, pre_snapshot_cmds=["echo pre1", "false", "echo pre2"], post_snapshot_cmds=["echo post1", "false", "echo post2"] ) print(buf.getvalue()) self.assertIn("STDOUT > pre1", buf.getvalue()) self.assertNotIn("STDOUT > pre2", buf.getvalue()) self.assertIn("STDOUT > post1", buf.getvalue()) self.assertIn("STDOUT > post2", buf.getvalue()) with self.subTest("Snapshot fails"): with OutputIO() as buf: with redirect_stdout(buf): with self.assertRaises(ExecuteError): #same snapshot name as before so it fails node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=1), "test-1", 0, pre_snapshot_cmds=["echo pre1", "echo pre2"], post_snapshot_cmds=["echo post1", "echo post2"] ) print(buf.getvalue()) self.assertIn("STDOUT > pre1", buf.getvalue()) self.assertIn("STDOUT > pre2", buf.getvalue()) self.assertIn("STDOUT > post1", buf.getvalue()) self.assertIn("STDOUT > post2", buf.getvalue()) def test_getselected(self): # should be excluded by property shelltest("zfs create test_source1/fs1/subexcluded") shelltest("zfs set autobackup:test=false test_source1/fs1/subexcluded") # should be excluded by being unchanged shelltest("zfs create test_source1/fs1/unchanged") shelltest("zfs snapshot test_source1/fs1/unchanged@somesnapshot") logger = LogStub() description = "[Source]" node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description) s = pformat(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=True, min_change=1)) print(s) # basics self.assertEqual(s, """[(local): test_source1/fs1, (local): test_source1/fs1/sub, (local): test_source2/fs2/sub]""") def test_validcommand(self): logger = LogStub() description = "[Source]" node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description) with self.subTest("test invalid option"): self.assertFalse(node.valid_command(["zfs", "send", "--invalid-option", "nonexisting"])) with self.subTest("test valid option"): self.assertTrue(node.valid_command(["zfs", "send", "-v", "nonexisting"])) def test_supportedsendoptions(self): logger = LogStub() description = "[Source]" node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description) # -D propably always supported self.assertGreater(len(node.supported_send_options), 0) def test_supportedrecvoptions(self): logger = LogStub() description = "[Source]" # NOTE: this could hang via ssh if we dont close filehandles properly. (which was a previous bug) node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description, ssh_to='localhost') self.assertIsInstance(node.supported_recv_options, list) if __name__ == '__main__': unittest.main()
import time # about 60 s def countBinOnes(x): cnt = 0 while x != 0: cnt += 1 x &= x - 1 return cnt def isSpecialSumSet(A): N = ( 1 << len(A) ) - 1 subset = N * [None] for i in range(1, N): subset[i] = 0 for j in range(len(A)): if (i >> j) & 1 == 1: subset[i] += A[j] # if combining the two loops, the execution is slower... # data reading caching factor weighs more than false earlier detection for i in range( 1, 1 << ( len(A) - 1 ) ): # just ve
rify the last element for j in range( 1 << ( len(A) - 1 ), N ): if i&j == 0: if subset[i] == subset[j]: # rule i fails return False if subset[i] > subset[j]: if countBinOnes(i) < countBinOnes(j): # rule ii fails return False elif countBinOnes(i) > countBinOnes(j): # rule ii fails return False return Tr
ue # for loop is too ugly, recursion is beautiful def findSpecialOptimum(a, pos): if pos > 1: while a[0] + a[1] > a[pos]: if isSpecialSumSet(a[:pos + 1]) == True: if pos == len(a) - 1: # find one, print it print a, sum(a) return a[pos + 1] = a[pos] + 1 findSpecialOptimum(a, pos + 1) a[pos] += 1 else: while a[pos] <= upbound[pos]: # the upbounding a[pos + 1] = a[pos] + 1 findSpecialOptimum(a, pos + 1) a[pos] += 1 return start = time.time() Set = [11] * 7 upbound = [20, 36] findSpecialOptimum(Set, 0) print( 'Time cost: %lf s.' %( time.time() - start ) )
from django.db.models import CharField, Value as V from django.db.models.functions import Coalesce, Length, Upper from django.test import TestCase from django.test.utils import register_lookup from .models import Author class UpperBilateral(Upper): bilateral = True class FunctionTests(TestCase): def test_nested_function_ordering(self): Author.objects.create(name='John Smith') Aut
hor.objects.create(name='Rhonda Simpson', alias='ronny') authors = Author.objects.order_by(Length(Coalesce('alias', 'name'))) self.assertQuerysetEqual( authors, [
'Rhonda Simpson', 'John Smith', ], lambda a: a.name ) authors = Author.objects.order_by(Length(Coalesce('alias', 'name')).desc()) self.assertQuerysetEqual( authors, [ 'John Smith', 'Rhonda Simpson', ], lambda a: a.name ) def test_func_transform_bilateral(self): with register_lookup(CharField, UpperBilateral): Author.objects.create(name='John Smith', alias='smithj') Author.objects.create(name='Rhonda') authors = Author.objects.filter(name__upper__exact='john smith') self.assertQuerysetEqual( authors.order_by('name'), [ 'John Smith', ], lambda a: a.name ) def test_func_transform_bilateral_multivalue(self): with register_lookup(CharField, UpperBilateral): Author.objects.create(name='John Smith', alias='smithj') Author.objects.create(name='Rhonda') authors = Author.objects.filter(name__upper__in=['john smith', 'rhonda']) self.assertQuerysetEqual( authors.order_by('name'), [ 'John Smith', 'Rhonda', ], lambda a: a.name ) def test_function_as_filter(self): Author.objects.create(name='John Smith', alias='SMITHJ') Author.objects.create(name='Rhonda') self.assertQuerysetEqual( Author.objects.filter(alias=Upper(V('smithj'))), ['John Smith'], lambda x: x.name ) self.assertQuerysetEqual( Author.objects.exclude(alias=Upper(V('smithj'))), ['Rhonda'], lambda x: x.name )
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Building() result.template = "object/building/poi/shared_tatooine_desert_demons_camp_medium.iff" result.attribute_template_id = -1 result.stfName("poi
_n","base_poi_building") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
import unittest from webtest import TestApp import test_helper class ErrorsControllerTests(unittest.Te
stCase): def test_error_404(self):
app = TestApp(test_helper.get_app()) assert app.get('/error/404').status == '200 OK' def test_error_500(self): app = TestApp(test_helper.get_app()) assert app.get('/error/500').status == '200 OK'
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2012 The Plaso Project Authors. # Please see the AUTHORS file for details on individual authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # Y
ou may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '1.1.1' VERSION_DEV = True VERSION_DATE = '20140606' def GetVersion(): """Returns a version information for plaso.""" if not VERSION_DEV: return __version__ return u'{}_{}'.format(__version__, VERSION_DATE)
import datetime import logging from unittest.mock import patch from django.test import TestCase from django.test.utils import override_settings from konfera import models from payments import utils from payments.models import ProcessedTransaction def make_payment(new_data): data = { 'date': datetime.date(2015, 10, 5), 'variable_symbol': '1234', 'transaction_id': '1234', 'amount': 0.0, 'currency': 'EUR', 'comment': '', 'executor': '', } data.update(new_data) return data logging.disable(logging.WARNING) class TestGetLastPayements(TestCase): @patch('django.utils.timezone.now', return_value=datetime.datetime(2016, 9, 29)) @patch('fiobank.FioBank.period', return_value=[]) @override_settings(FIO_BANK_TOKEN='fio_token') def test__get_last_payments(self, FioBankMockPeriod, timezone_mock): data = utils._get_last_payments() self.assertEqual(data, []) FioBankMockPeriod.assert_called_with(
'2016-09-26', '2016-09-29') timezone_mock.assert_called_once_with() class TestGetNotProcessedPayments(TestCase): def test_no_processed_payment_is_available(self): payments = [ make_payment({'transaction_id': '1'}), make_payment({'transaction_id': '2'}), ] self.assertEqual( list(utils._get_not_processed_payments(payments)), payments ) def test_proc
essed_payments_filtered(self): payments = [ make_payment({'transaction_id': '1'}), make_payment({'transaction_id': '2'}), make_payment({'transaction_id': '3'}), ] ProcessedTransaction.objects.create(transaction_id='2', amount=0) self.assertEqual( list(utils._get_not_processed_payments(payments)), [ make_payment({'transaction_id': '1'}), make_payment({'transaction_id': '3'}), ] ) class TestGetPaymentsForOrder(TestCase): def setUp(self): self.order = models.Order.objects.create(price=200, discount=0) def test_no_payments(self): payments = [] self.assertEqual( list(utils._get_payments_for_order(self.order, payments)), [] ) def test_payments_for_different_orders(self): payments = [ make_payment({'variable_symbol': str(self.order.pk + 7)}), make_payment({'variable_symbol': str(self.order.pk + 13)}), ] self.assertEqual( list(utils._get_payments_for_order(self.order, payments)), [] ) def test_payment_found_for_order(self): payments = [ make_payment({'variable_symbol': self.order.variable_symbol}), make_payment({'variable_symbol': str(self.order.pk + 13)}), ] self.assertEqual( list(utils._get_payments_for_order(self.order, payments)), [make_payment({'variable_symbol': self.order.variable_symbol})] ) def test_multiple_payments_found_for_order(self): payments = [ make_payment({'variable_symbol': self.order.variable_symbol}), make_payment({'variable_symbol': str(self.order.pk + 13)}), make_payment({'variable_symbol': self.order.variable_symbol}), ] self.assertEqual( list(utils._get_payments_for_order(self.order, payments)), [ make_payment({'variable_symbol': self.order.variable_symbol}), make_payment({'variable_symbol': self.order.variable_symbol}), ] ) class TestProcessPayment(TestCase): def test_attendee_paid_less(self): order = models.Order.objects.create(price=100, discount=10) payment = make_payment({'amount': 80, 'transaction_id': '7'}) utils._process_payment(order, payment) self.assertEqual(order.amount_paid, 80) self.assertEqual(order.status, models.order.PARTLY_PAID) def test_attendee_paid_enough(self): order = models.Order.objects.create(price=100, discount=10, amount_paid=5, status=models.order.PARTLY_PAID) payment = make_payment({'amount': 85, 'transaction_id': '7'}) utils._process_payment(order, payment) self.assertEqual(order.amount_paid, 90) self.assertEqual(order.status, models.order.PAID) def test_payment_marked_as_processed(self): order = models.Order.objects.create(price=100, discount=10) payment = make_payment({'amount': 80, 'transaction_id': '7'}) self.assertEqual(ProcessedTransaction.objects.count(), 0) utils._process_payment(order, payment) self.assertEqual(ProcessedTransaction.objects.count(), 1) self.assertEqual(ProcessedTransaction.objects.all()[0].transaction_id, '7') class TestCheckPaymentsStatus(TestCase): def setUp(self): self.order1 = models.Order.objects.create(price=200, discount=0) self.order2 = models.Order.objects.create(price=200, discount=7) @patch('payments.utils._get_last_payments', return_value=[]) def test_no_payments_available(self, mock_api_call): """ FioBank doesn't have any payments - no order status should be changed """ utils.check_payments_status() order1 = models.Order.objects.get(pk=self.order1.pk) order2 = models.Order.objects.get(pk=self.order2.pk) self.assertEqual(mock_api_call.call_count, 1) self.assertEqual(order1.status, models.order.AWAITING) self.assertEqual(order2.status, models.order.AWAITING) @patch('payments.utils._get_last_payments') def test_one_order_is_paid(self, mock_api_call): """ FioBank doesn't have a payment for order1 - order's status was changed """ mock_api_call.return_value = [ make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 200, 'transaction_id': '7'}), ] utils.check_payments_status() order1 = models.Order.objects.get(pk=self.order1.pk) order2 = models.Order.objects.get(pk=self.order2.pk) self.assertEqual(mock_api_call.call_count, 1) self.assertEqual(order1.status, models.order.PAID) self.assertEqual(order2.status, models.order.AWAITING) @patch('payments.utils._get_last_payments') def test_all_orders_are_paid(self, mock_api_call): mock_api_call.return_value = [ make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 200, 'transaction_id': '7'}), make_payment({'variable_symbol': self.order2.variable_symbol, 'amount': 200, 'transaction_id': '8'}), ] utils.check_payments_status() order1 = models.Order.objects.get(pk=self.order1.pk) order2 = models.Order.objects.get(pk=self.order2.pk) self.assertEqual(mock_api_call.call_count, 1) self.assertEqual(order1.status, models.order.PAID) self.assertEqual(order2.status, models.order.PAID) @patch('payments.utils._get_last_payments') def test_order_is_paid_in_multiple_payments(self, mock_api_call): mock_api_call.return_value = [ make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 150, 'transaction_id': '7'}), make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 50, 'transaction_id': '79'}), make_payment({'variable_symbol': self.order2.variable_symbol, 'amount': 30, 'transaction_id': '80'}), ] utils.check_payments_status() order1 = models.Order.objects.get(pk=self.order1.pk) order2 = models.Order.objects.get(pk=self.order2.pk) self.assertEqual(order1.status, models.order.PAID) self.assertEqual(order2.status, models.order.PARTLY_PAID)
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Event CRM Sale', 'version': '1.0', 'category': 'Marketing/Events', 'website': 'ht
tps://www.odoo.com/page/events', 'description': "Add information of sale order linked to
the registration for the creation of the lead.", 'depends': ['event_crm', 'event_sale'], 'data': [ 'views/event_lead_rule_views.xml', ], 'installable': True, 'auto_install': True, }
"eol": "2222-07-01" }, { "id": 178028, "sla": "security_fixes", "eol": "2222-07-01" } ], "type": "rpm", "active": True, "critical_path": False } ]) pdc.add_endpoint('component-branch-slas', 'GET', [ { "id": 178020, "sla": "bug_fixes", "branch": { "id": 89151, "name": "f26", "global_component": "iwhd", "type": "rpm", "critical_path": False, "active": True }, "eol": "2222-07-01" }, { "id": 178028, "sla": "security_fixes", "branch": { "id": 89151, "name": "f26", "global_component": "iwhd", "type": "rpm", "critical_path": False, "active": True }, "eol": "2222-07-01" } ]) pdc.add_endpoint('component-branch-slas/178020', 'PATCH', 'ok') pdc.add_endpoint('component-branch-slas/178028', 'PATCH', 'ok') idx = '2017-b1adac6d-64e9-406f-a1f4-4d3e57105649' msg = pdcupdater.utils.get_fedmsg(idx) self.handler.handle(pdc, msg) expected_keys = [ 'component-branches', 'component-branch-slas/178020', 'component-branch-slas/178028' ] self.assertEquals(pdc.calls.keys(), expected_keys) @mock_pdc def test_can_process_retire_msg_already_retired(self, pdc): pdc.add_endpoint('component-branches', 'GET', [ { "id": 155867, "global_component": "obexftp", "name": "f26", "slas": [ { "id": 310591, "sla": "bug_fixes", "eol": "2017-06-28" }, { "id": 310602, "sla": "security_fixes", "eol": "2017-06-28" } ], "type": "rpm", "active": False, "critical_path": False } ]) idx = '2017-3f490f4d-7612-4881-80cb-e1a941d6d700' msg = pdcupdater.utils.get_fedmsg(idx) self.handler.handle(pdc, msg) expected_keys = [ 'component-branches' ] self.assertEquals(pdc.calls.keys(), expected_keys) @mock_pdc def test_audit(self, pdc): pdc.add_endpoint('component-branches', 'GET', [ { "id": 155867, "global_component": "obexftp", "name": "f26", "slas": [ { "id": 310591, "sla": "bug_fixes", "eol": "2017-06-28" }, { "id": 310602, "sla": "security_fixes", "eol": "2017-06-28" } ], "type": "rpm", "active": False, "critical_path": False }, { "id": 323149, "global_component": "python",
"name": "f26", "slas": [ {
"id": 646309, "sla": "security_fixes", "eol": "2222-07-01" }, { "id": 646303, "sla": "bug_fixes", "eol": "2222-07-01" } ], "type": "module", "active": True, "critical_path": False } ]) with mock.patch('requests.Session') as mock_requests_session: mock_rv_found = mock.Mock() mock_rv_found.status_code = 200 mock_rv_not_found = mock.Mock() mock_rv_not_found.status_code = 404 mock_session_rv = mock.Mock() mock_session_rv.head.side_effect = [mock_rv_found, mock_rv_not_found] mock_requests_session.return_value = mock_session_rv present, absent = self.handler.audit(pdc) self.assertEquals(present, set()) self.assertEquals(absent, set()) @mock_pdc def test_audit_retired_in_pdc_not_git(self, pdc): pdc.add_endpoint('component-branches', 'GET', [ { "id": 155867, "global_component": "obexftp", "name": "f26", "slas": [ { "id": 310591, "sla": "bug_fixes", "eol": "2017-06-28" }, { "id": 310602, "sla": "security_fixes", "eol": "2017-06-28" } ], "type": "rpm", "active": False, "critical_path": False }, { "id": 323149, "global_component": "python", "name": "f26", "slas": [ { "id": 646309, "sla": "security_fixes", "eol": "2222-07-01" }, { "id": 646303, "sla": "bug_fixes", "eol": "2222-07-01" } ], "type": "module", "active": True, "critical_path": False } ]) with mock.patch('requests.Session') as mock_requests_session: mock_rv_not_found = mock.Mock() mock_rv_not_found.status_code = 404 mock_session_rv = mock.Mock() mock_session_rv.head.return_value = mock_rv_not_found mock_requests_session.return_value = mock_session_rv present, absent = self.handler.audit(pdc) self.assertEquals(present, {'rpm/obexftp#f26'}) self.assertEquals(absent, set()) @mock_pdc def test_audit_retired_in_git_not_pdc(self, pdc): pdc.add_endpoint('component-branches', 'GET', [ { "id": 155867, "global_component": "obexftp", "name": "f26", "slas": [ { "id": 310591, "sla": "bug_fixes", "eol": "2222-06-28" }, { "id": 310602, "sla": "security_fixes", "eol": "2222-06-28" } ], "type": "rpm", "active": True, "critical_path": False }, { "id": 323149, "global_component": "python", "name": "f26", "slas": [ { "id": 646309, "sla": "security_fixes", "eol": "2222-07-01" }, { "id": 646303, "sla": "bug_fixes", "eol": "2222-07-01" } ], "type": "module", "active": True, "critical_path": False } ]) with mock.patch('requests.Session') as mock_requests_session: mock_rv_not_found = mock.Mock()
dictionary = {"GEEKS", "FOR", "QUIZ", "GO"} N, M = 3, 3 board = [['G','I','Z'], ['U','E','K'], ['Q','S','E']] class Graph: class Vertex: def __int__(self, v): self.val = v self.adj = [] def findWords(board=board): def search(node, word, visited): if node not in visited: visited.a
ppend(node) word.append(node.val) for adjNode in node.adj: search(node, word, visited) if word not in dictionary: word.pop() result = [] g = creategraph(board) for u in g.vertices(): visited = [] visited.append(u) word = "" for adj in u.adj: search(adj, word, visited)
if word in dictionary: result.append(word) return result if __name__=="__main__": print(findWords())
# -*- encoding:ut
f-8 -*- from flask import Flask app = Flask(__name__) @app.route('/config') def hello_world(): return 'Hello World!' if __name__ == '__main__': app.run(host="0.0.0.0", port=80
80)
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup APP = ['aggregate
.py'] DATA_FILES = [] OPTIONS = {'argv_emulatio
n': True} setup( app=APP, data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
class NamespaceAlr
eadyRegistered(Exception): pass class NoParentFound(Exception): pas
s
if subgroup == 'END_GROUP': break elif line[1] == '(': while line: line=lines.next() line = line.replace('"','').strip() subvalue+=line if line[-1:]==';': subvalue=eval(subvalue.strip(';')) break else:subvalue=subvalue.strip(';') subdata[subgroup]=subvalue hdrdata[group]=subdata else: hdrdata[group]=value.strip(');') line=lines.next() return hdrdata # reading the metadata in a dictionary imddata = read_metadata (metadata_file) def acquireMetadata(band): ref_MRF = float(imddata['RADIOMETRIC_RESCALING']['REFLECTANCE_MULT_BAND_' + str(band)]) ref_AMF = float(imddata['RADIOMETRIC_RESCALING']['REFLECTANCE_ADD_BAND_' + str(band)]) metadatalist = [0, 0, ref_MRF, ref_AMF] return metadatalist def acquireThrmalMetadata(band): radi_MRF = float(imddata['RADIOMETRIC_RESCALING']['RADIANCE_MULT_BAND_' + str(band)]) radi_AMF = float(imddata['RADIOMETRIC_RESCALING']['RADIANCE_ADD_BAND_' + str(band)]) K1 = float(imddata['TIRS_THERMAL_CONSTANTS']['K1_CONSTANT_BAND_' + str(band)]) K2 = float(imddata['TIRS_THERMAL_CONSTANTS']['K2_CONSTANT_BAND_' + str(band)]) acquireThrmalMetadata = [radi_MRF, radi_AMF, K1, K2] return acquireThrmalMetadata SunElevation = float(imddata['IMAGE_ATTRIBUTES']['SUN_ELEVATION']) img_date = imddata['PRODUCT_METADATA']['DATE_ACQUIRED'] img_time = imddata['PRODUCT_METADATA']['SCENE_CENTER_TIME'] solar_zenith_angle = float(90.00) - SunElevation solar_zenith_angle_radians = math.radians(solar_zenith_angle) SunElevation_radians = math.radians(SunElevation) print 'Acquisition date : ' + img_date print 'Acquisition time : ' + img_time print 'SunElevation :' + str(SunElevation) # creating the product name and output dir year = img_date.split('-', 1)[0] month = img_date.split('-', 1)[1] month = month.split('-', 1)[0] day = img_date.split('-', 1)[1] day = day.split('-', 1)[1] product_date = day + '.' + month + '.' + year[2:4] product = 'binned_' + product_date output_dir = os.path.join (data_dir, product) print output_dir if not os.path.exists(output_dir): os.makedirs(output_dir) ######## raster processing functions --------------------- #---------------------------------------------------------- # function to read the image bands def return_band(image_file_name, band_number): image = image_file_name dataset = gdal.Open(image,GA_ReadOnly) if dataset is None: print "Could not open " + dataset sys.exit(1) geoTransform = dataset.GetGeoTransform() proj = dataset.GetProjection() rasterband = dataset.GetRasterBand(band_number) type(rasterband) ncol = dataset.RasterXSize nrow = dataset.RasterYSize band = rasterband.ReadAsArray(0,0,ncol,nrow) band = band.astype(numpy.uint16) return band,geoTransform,proj,ncol,
nrow dataset = None band = None # will return '/media/Arc/eo_archive_proc/VHR_SAT_IMAGE/SPOT6/20140704_SPOT/binned_SPOT6_20140704/B0.binned_SPOT6_20140704.tif' # the function input defined in the beginining: out_put_dir, product just we have to change the product name..... def product_output_name(out_put_dir,product,Product_name): product_dir = os.path.join(out_put_dir,product) product_output_name = Product_name+'.'+product+'.tif' product_path_file = os.path.join(product_dir,pr
oduct_output_name) return product_path_file def output_file(output_name,output_array,geoTransform,proj,ncol,nrow): format = "GTiff" driver = gdal.GetDriverByName( format ) outDataset = driver.Create(output_name,ncol,nrow,1,GDT_Float32) outBand = outDataset.GetRasterBand(1) outBand.WriteArray(output_array,0,0) outBand.FlushCache() outBand.SetNoDataValue(fillval) outDataset.SetGeoTransform(geoTransform ) outDataset.SetProjection(proj) def normalize(band1,band2): var1 = numpy.subtract(band1,band2) var2 = numpy.add(band1,band2) numpy.seterr(all='ignore') ndvi = numpy.divide(var1,var2) return ndvi # reading DN bands, extracting metadata and calculating radiance and reflactance and writing it to the folder # i.e, band_name = B1 def calculate_reflectance(band_name, solar_zenith_angle_radians, DN, output_dir): img_name = 'Band_' + band_name band_metadata = acquireMetadata (band_name[1:]) ref_MRF = float(band_metadata[2]) ref_AMF = float(band_metadata[3]) print 'calculating ' + band_name + ' reflactance...' reflectance = (ref_MRF * DN + ref_AMF) / (math.cos(solar_zenith_angle_radians)) reflectance_name = product_output_name(data_dir,product,band_name) print 'Masking with Quality flag...' band_BQA,geoTransform,proj,ncol,nrow = return_band(Band_BQA,1) for i in BQF: qc = numpy.where(band_BQA==i,1,0) numpy.putmask(reflectance, qc, fillval) output_file(reflectance_name,reflectance,geoTransform,proj,ncol,nrow) reflactance = None band_BQA = None # calculating the ndvi def calculate_ndvi(solar_zenith_angle_radians, red, nir, output_dir): print 'reading RED band....' band_metadata_B4 = acquireMetadata(4) ref_MRF_B4 = float(band_metadata_B4[2]) ref_AMF_B4 = float(band_metadata_B4[3]) print 'calculating reflactance...' reflectance_B4 = (ref_MRF_B4 * Band_B4 + ref_AMF_B4) / (math.cos(solar_zenith_angle_radians)) print 'reading NIR band....' band_metadata_B5 = acquireMetadata(5) ref_MRF_B5 = float(band_metadata_B5[2]) ref_AMF_B5 = float(band_metadata_B5[3]) print 'calculating reflactance...' reflectance_B5 = (ref_MRF_B5 * Band_B5 + ref_AMF_B5) / (math.cos(solar_zenith_angle_radians)) ndvi_name = product_output_name(data_dir,product,'ndvi') print "Calculating ndvi...." ndvi = normalize(reflectance_B5, reflectance_B4) min_ndvi_mask = numpy.where(ndvi < min_ndvi, 1, 0) max_ndvi_mask = numpy.where(ndvi > max_ndvi, 1, 0) numpy.putmask(ndvi, min_ndvi_mask, min_ndvi) numpy.putmask(ndvi, max_ndvi_mask, max_ndvi) #print 'Masking with Quality flag...' band_BQA,geoTransform,proj,ncol,nrow = return_band(Band_BQA,1) for i in BQF: qc = numpy.where(band_BQA==i,1,0) numpy.putmask(ndvi, qc, fillval) output_file(ndvi_name,ndvi,geoTransform,proj,ncol,nrow) reflectance_B4 = None reflectance_B5 = None mdvi = None band_BQA = None # Conversion to At-Satellite Brightness Temperature (K) def calculate_brightness_temperature(band_name, solar_zenith_angle_radians, DN, output_dir): img_name = 'Band_' + band_name print 'reading....' + img_name band_metadata = acquireThrmalMetadata (band_name[1:]) radi_MRF = float(band_metadata[0]) radi_AMF = float(band_metadata[1]) K1 = float(band_metadata[2]) K2 = float(band_metadata[3]) print 'calculating Radiance...' radiance = (DN * radi_MRF) + radi_AMF print 'calculating Satellite Brightness Temperature...' TB = K2 / (numpy.log((K1 / radiance) +1)) print 'Masking with Quality flag...' band_BQA,geoTransform,proj,ncol,nrow = return_band(Band_BQA,1) for i in BQF: qc = numpy.where(band_BQA==i,1,0) numpy.putmask(TB, qc, fillval) print 'writing output...' reflectance_name = product_output_name(data_dir,product,band_name) output_file(reflectance_name,TB,geoTransform,proj,ncol,nrow) radiance = None if __name__ == "__main__": Band_B1,geoTransform,proj,ncol,nrow = return_band(Band_B1,1) calculate_reflectance('B1', solar_zenith_angle_radians, Band_B1, output_dir) Band_B1 = None Band_B2,geoTransform,proj,ncol,nrow = return_band(Band_B2,1) calculate_reflectance('B2', solar_zenith_angle_radians, Band_B2, output_dir) Band_B2 = None Band_B3,geoTransform,proj,ncol,nrow = return_band(Band_B3,1) calculate_reflectance('B3', solar_zenith_angle_radians, Band_B3, output_dir) Band_B3 = None Band_B4,geoTransform,proj,ncol,nrow = return_band(Band_B4,1) calculate_re
import cProfile import unittest import pstats if __name__ == '__main__': suite =
unittest.TestLoader().discover('.') def runtests(): # set verbosity to 2 to see each test unittest.TextTestRunner(verbosity=1, buffer=True).run(suite) cProfile.run( 'runtests()', filename='test_cprofile_results.log', sort='cumtime') p = pstats.Stats('test_cprofile_results.log') p.strip_dirs().sort_stats('cumulative').p
rint_stats(100)
''' Analysis plugin for supporting WorkspaceEmulators during analysis pass. Finds and connects Switch Cases, most specifically from Microsoft. ''' import envi import envi.archs.i386 as e_i386 import vivisect import vivisect.analysis.generic.codeblocks as vagc def analyzeJmp(amod, emu, op, starteip): ''' Top level logic ''' test, ctx = testSwitch(emu.vw, op, starteip, emu) if test: output = makeSwitch(emu.vw, starteip, ctx['offarraybase'], ctx['indiroffbase']) def testSwitch(vw, op, vajmp, emu=None): ''' identifies and enumerates microsoft's switch-case methods. ''' if not (op.iflags & envi.IF_BRANCH): # vw.verbprint( "indirect branch is not correct type") return False,None backone = vw.getLocation(vajmp-1) if backone == None: #vw.verbprint( "previous instruction isn't defined") return False,None backtwo = vw.getLocation(backone[0]-1) if backtwo == None: #vw.verbprint( "two previous instruction isn't defined") return False,None filename = vw.getMemoryMap(vajmp)[3] imagebase = vw.getFileMeta(filename, 'imagebase') op1 = vw.parseOpcode(backone[0]) if op1.mnem != 'add': #vw.verbprint( "previous instruction isn't an 'add'") return False,None baseoper = op1.opers[1] if not isinstance(baseoper, e_i386.i386RegOper): #vw.verbprint( "baseoper is not an i386RegOper: %s" % repr(baseoper)) return False,None # this is a weak analysis failure, but a powerful confirmation. if emu != None: regbase = op1.getOperValue(1, emu) if regbase != imagebase: vw.verbprint( "reg != imagebase") return False,None # now check the instruction before that op2 = vw.parseOpcode(backtwo[0]) if op2.mnem != 'mov': vw.verbprint( "2nd previous instruction isn't an 'mov'") return False,None arrayoper = op2.opers[1] if not (isinstance(arrayoper, e_i386.i386SibOper) and arrayoper.scale == 4): vw.verbprint( "arrayoper is not an i386SibOper of size 4: %s" % repr(baseoper)) return False,None ao_reg = arrayoper.reg & e_i386.RMETA_NMASK if ao_reg != baseoper.reg: vw.verbprint( "arrayoper.reg != baseoper.reg: %s != %s" % (ao_reg, baseoper.reg)) return False,None offarraybase = arrayoper.disp #initial check of the array. should point to the next va. we'll scrape it up later offarrayfirst = vw.readMemValue(offarraybase+imagebase, 4) if offarrayfirst+imagebase != vajmp+2: vw.verbprint( "first ref is not the va after the jmp: %x != %x" % (offarrayfirst+imagebase, vajmp+2)) indiroffbase = None # now check for the byte array before that backthree = vw.getLocation(backtwo[0]-1) # this one is optional. first two are not. if backthree != None: op = vw.parseOpcode(backthree[0]) if op.mnem == 'movzx' and isinstance(op.opers[1], e_i386.i386SibOper) and \ op.opers[1].scale == 1: vw.verbprint( "this is a double deref (hitting a byte array offset into the offset-array)") indiroffbase = op.opers[1].disp return True, {'indiroffbase':indiroffbase, 'offarraybase':offarraybase, } def makeSwitch(vw, vajmp, offarraybase, indiroffbase=None): ''' Makes the changes to the Workspace for the given jmp location. Handles naming for all cases because naming wants to indicate larger context. (future)If indiroffbase is not None, the indirection "database" is analyzed for naming ''' filename = vw.getMemoryMap(vajmp)[3] imagebase = vw.getFileMeta(filename, 'imagebase') # we have identified this is a switch case vw.verbprint( "FOUND MS SWITCH CASE SPRAY at 0x%x" % vajmp) # roll through the offset array until imagebase+offset is not a valid pointer, points to non-op locations or splits instructions count = 0 tracker = [] ptr = offarraybase while True: off = vw.readMemValue(ptr+imagebase, 4) ova = imagebase + off tgtva = makeSwitchCase(vw, vajmp, ova) if not tgtva: break tracker.append((count, tgtva)) count += 1 ptr += 4 # FIXME: this doesn't take into account two-level derefs (indiroffbase) naming = {} for idx,va in tracker: lst = naming.get(va) if lst == None: lst = [] naming[va] = lst lst.append("%xh" % idx) #TODO: analyze indiroffbase to determine case information for va, opts in naming.items(): options = "_".join(opts) name = "switch_case_%s_%.8x" % (options, va) vw.makeName(va, name) #TODO: analyze which paths handle which cases, name accordingly #TODO: determine good hint for symbolik constraints funcva = vw.getFunction(vajmp) vw.makeName(vajmp, "jmp_switch_%.8x" % vajmp) vagc.analyzeFunction(vw, funcva) return tracker def makeSwitchCase(vw, vaSwitch, vaCase): ''' Handle minutia of each case, specifically, checking for validity and making Xref and making code (if necessary) ''' if not vw.isValidPointer(vaCase): return False loc = vw.getLocation(vaCase)
if loc != None: if loc[0] != vaCase: return False if loc[vivisect.L_LTYPE] != vivisect.LOC_OP: return False else: vw.makeCode(vaCase) #if we reach here, we're going to assume the location is valid.
vw.verbprint( "0x%x MS Switch Case Spray: emu.getBranchNode( emu.curpath , 0x%x )" % (vaSwitch, vaCase)) vw.addXref(vaSwitch, vaCase, vivisect.REF_CODE) return vaCase if globals().get('vw'): verbose = vw.verbose vw.verbose = True vw.vprint("Starting...") findSwitchCase(vw) vw.vprint("Done") vw.verbose = verbose
w long ago the date represents. Ported from PrettyDate by John Resig """ if not iso_datetime: return '' import math if isinstance(iso_datetime, basestring): iso_datetime = datetime.datetime.strptime(iso_datetime, DATETIME_FORMAT) now_dt = datetime.datetime.strptime(now(), DATETIME_FORMAT) dt_diff = now_dt - iso_datetime # available only in python 2.7+ # dt_diff_seconds = dt_diff.total_seconds() dt_diff_seconds = dt_diff.days * 86400.0 + dt_diff.seconds dt_diff_days = math.floor(dt_diff_seconds / 86400.0) # differnt cases if dt_diff_seconds < 60.0: return 'just now' elif dt_diff_seconds < 120.0: return '1 minute ago' elif dt_diff_seconds < 3600.0: return '%s minutes ago' % cint(math.floor(dt_diff_seconds / 60.0)) elif dt_diff_seconds < 7200.0: return '1 hour ago' elif dt_diff_seconds < 86400.0: return '%s hours ago' % cint(math.floor(dt_diff_seconds / 3600.0)) elif dt_diff_days == 1.0: return 'Yesterday' elif dt_diff_days < 7.0: return '%s days ago' % cint(dt_diff_days) elif dt_diff_days < 31.0: return '%s week(s) ago' % cint(math.ceil(dt_diff_days / 7.0)) elif dt_diff_days < 365.0: return '%s months ago' % cint(math.ceil(dt_diff_days / 30.0)) else: return 'more than %s year(s) ago' % cint(math.floor(dt_diff_days / 365.0)) def comma_or(some_list): return comma_sep(some_list, frappe._("{0} or {1}")) def comma_and(some_list): return comma_sep(some_list, frappe._("{0} and {1}")) def comma_sep(some_list, pattern): if isinstance(some_list, (list, tuple)): # list(some_list) is done to preserve the existing list some_list = [unicode(s) for s in list(some_list)] if not some_list: return "" elif len(some_list) == 1: return some_list[0] else: some_list = ["'%s'" % s for s in some_list] return pattern.format(", ".join(frappe._(s) for s in some_list[:-1]), some_list[-1]) else: return some_list def new_line_sep(some_list): if isinstance(some_list, (list, tuple)): # list(some_list) is done to preserve the existing list some_list = [unicode(s) for s in list(some_list)] if not some_list: return "" elif len(some_list) == 1: return some_list[0] else: some_list = ["%s" % s for s in some_list] return format("\n ".join(some_list)) else: return some_list def filter_strip_join(some_list, sep): """given a list, filter None values, strip spaces and join""" return (cstr(sep)).join((cstr(a).strip() for a in filter(None, some_list))) def get_url(uri=None, full_address=False): """get app url from request""" host_name = frappe.local.conf.host_name or frappe.local.conf.hostname if uri and (uri.startswith("http://") or uri.startswith("https://")): return uri if not host_name: if hasattr(frappe.local, "request") and frappe.local.request and frappe.local.request.host: protocol = 'https' == frappe.get_request_header('X-Forwarded-Proto', "") and 'https://' or 'http://' host_name = protocol + frappe.local.request.host elif frappe.local.site: host_name = "http://{}".format(frappe.local.site) else: host_name = frappe.db.get_value("Website Settings", "Website Settings", "subdomain") if host_name and "http" not in host_name: host_name = "http://" + host_name if not host_name: host_name = "http://localhost" if not uri and full_address: uri = frappe.get_request_header("REQUEST_URI", "") url = urllib.basejoin(host_name, uri) if uri else host_name return url def get_host_name(): return get_url().rsplit("//", 1)[-1] def get_link_to_form(doctype, name, label=None): if not label: label = name return """<a href="{0}">{1}</a>""".format(get_url_to_form(doctype, name), label) def get_url_to_form(doctype, name): return get_url(uri = "desk#Form/{0}/{1}".format(quoted(doctype), quoted(name))) def get_url_to_list(doctype): return get_url(uri = "desk#List/{0}".format(quoted(doctype))) operator_map = { # startswith "^": lambda (a, b): (a or "").startswith(b), # in or not in a list "in": lambda (a, b): operator.contains(b, a), "not in": lambda (a, b): not operator.contains(b, a), # comparison operators "=": lambda (a, b): operator.eq(a, b), "!=": lambda (a, b): operator.ne(a, b), ">": lambda (a, b): operator.gt(a, b), "<": lambda (a, b): operator.lt(a, b), ">=": lambda (a, b): operator.ge(a, b), "<=": lambda (a, b): operator.le(a, b), "not None": lambda (a, b): a and True or False, "None": lambda (a, b): (not a) and True or False } def evaluate_filters(doc, filters): '''Returns true if doc matches filters''' if isinstance(filters, dict): for key, value in filters.iteritems(): f = get_filter(None, {key:value}) if not compare(doc.get(f.fieldname), f.operator, f.value): return False elif isinstance(filters, (list, tuple)): for d in filters: f = get_filter(None, d) if not compare(doc.get(f.fieldname), f.operator, f.value): return False return True def compare(val1, condition, val2): ret = False if condition in operator_map: ret = operator_map[condition]((val1, val2)) return ret def get_filter(doctype, f): """Returns a _dict like { "doctype": "fieldname": "operator": "value": } """ from frappe.model import default_fields, optional_fields if isinstance(f, dict): key, value = f.items()[0] f = make_filter_tuple(doctype, key, value) if not isinstance(f, (list, tuple)): frappe.throw("Filter must be a tuple or list (in a list)") if len(f) == 3: f = (doctype, f[0], f[1], f[2]) elif len(f) != 4: frappe.throw("Filter must have 4 values (doctype, fieldname, operator, value): {0}".format(str(f))) f = frappe._dict(doctype=f[0], fieldname=f[1], operator=f[2], value=f[3]) if not f.operator: # if operator is missing f.operator = "=" valid_operators = ("=", "!=", ">", "<", ">=", "<=", "like", "not like", "in", "not in") if f.operator not in valid_operators: frappe.throw("Operator must be one of {0}".format(", ".join(valid_operators))) if f.doctype and (f.fieldname not in default_fields + optional_fields): # verify fieldname belongs to the doctype meta = frappe.get_meta(f.doctype) if not meta.has_field(f.fieldname): # try and match the doctype name from child tables for df in meta.get_table_fields(): if frappe.get_meta(df.options).has_field(f.fieldname): f.doctype = df.options break return f def make_filter_tuple(doctype, key, value): '''return a filter tuple like [doctype, key, operator, value]''' if isinstance(value, (list, tuple)): return [doctype, key, value[0], value[1]] else: return [doctype, key, "=", value] def scrub_urls(html): html = expand_relative_urls(html) # encoding should be responsibility of the composer # html = quote_urls(html) return html d
ef expand_relative_urls(html): # expand relative urls url = get_url() if url.endswith("/"): url = url[:-1] def _expand_relative_urls(match): to_expand = list(match.groups()) if not to_expand[2].startswith("/"): to_expand[2] = "/" + to_expand[2] to_expand.insert(2, url) if 'url' in to_expand[0] and to_expand[1].startswit
h('(') and to_expand[-1].endswith(')'): # background-image: url('/assets/...') - workaround for wkhtmltopdf print-media-type to_expand.append(' !important') return "".join(to_expand) html = re.sub('(href|src){1}([\s]*=[\s]*[\'"]?)((?!http)[^\'" >]+)([\'"]?)', _expand_relative_urls, html) # background-image: url('/assets/...') html = re.sub('(:[\s]?url)(\([\'"]?)([^\)]*)([\'"]?\))', _expand_relative_urls, html) return html def quoted(url): return cstr(urllib.quote(encode(url), safe=b"~@#$&()*!+=:;,.?/'")) def quote_urls(html): def _quote_url(match): groups = list(match.groups()) groups[2] = quoted(groups[2]) return "".join(groups) return re.sub('(href|src){1}([\s]*=[\s]*[\'"]?)((?:http)[^\'">]+)([\'"]?)', _quote_url, html) def unique(seq): """use this instead of list(set()) to preserve order of the original list. Thanks to Stackoverflow: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order""" seen = set() seen_add = seen.add return [ x for x in seq if not (x in seen or seen_add(x)) ] def strip(v
impor
t pytest import os def test_launch (launch): serve = '../../atila/example/serve.py' if not os.path.isfile (serve): return with launch (serve) as engine: for i in range (2): resp = engine.axios.get ('/apis/rest-api{}'.format (i == 1 and 2 or '')) assert resp.status_code == 200 assert 'result' in resp.data assert 'info' in resp.data ['result']
for i in range (2): resp = engine.axios.get ('/apis/rest-api{}'.format (i == 1 and 2 or '')) assert resp.status_code == 200 assert 'result' in resp.data assert 'info' in resp.data ['result']
# This file is p
art of Mok
sha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from moksha.config.app_cfg import base_config #Use base_config to setup the environment loader function load_environment = base_config.make_load_environment()
# -*- coding: utf-8 -*- import logging import threading from midas.compat import HTTPError from midas.compat import Queue from midas.crunchbase_company import CompanyList import midas.scripts class FetchCrunchbaseCompanies(midas.scripts.MDCommand): """ Crawl the companies information from crunchbase.com and save it locally. """ def add_argument(self): self.parser.add_argument('-p', '--num_threads', default=1, type=int, help='How many threads should crawl in parallel') self.parser.add_argument('location', action=midas.scripts.CheckDirectoryAction, help='The location to save the crawled data') def run(self): if self.args.quiet: log_level = logging.CRITICAL else: log_level = logging.INFO logging.basicConfig(level=log_level) cl = CompanyList(self.args.location) logging.info('Updating CompanyList') cl.update() q = Queue() for _ in range(self.args.num_threads): t = Fetcher(q) t.daemon = True t.start() for company in cl.list_not_local(): q.put(company) q.join() return 0 class Fetcher(threading.Thread): def __init__(self, queue): super(Fetcher, self).__init__() self.q = queue self.inst = None def run(self): while True: self.inst = self.q.get() loggin
g.info('{0}: Updating'.format(self.inst)) self.make_update(0) self.q.task_done() def make_update(self, tries=0): try: self.inst.update() except HTTPError as e: if e.code == 404: logging.critical('{0}: Got 404'.format(self.inst)) elif tries < 2 and (e.code == 503 or e.code == 504): logging.critical( '{0}: Got 504 ({1} attempt[s])'.format(self.inst
, tries + 1) ) self.make_update(tries + 1) else: logging.exception(e) except Exception as e: logging.critical( '{0}: An exception occured'.format(self.inst)) logging.exception(e)
#### NOTICE: THIS FILE IS AUTOGENERATED #### MO
DIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Creature() result.template = "object/mobile/shared_dressed_weaponsmith_trainer_02.iff" result.attribute_template_id = 9 result.stfName("npc_name","human_base_male") #### BEGIN MODIFICATIONS #### ####
END MODIFICATIONS #### return result
10, "points_possible": 10, "contributing_assessments": 123, "staff_id": "staff 1", } _, submission = self._create_workflow_with_status( "user 1", "test/1/1", "peer-problem", "peer", steps=["peer"] ) workflow_api.update_from_assessments( submission["uuid"], { "peer": { "must_grade": 5, "must_be_graded_by": 3 } }, override_submitter_requirements=True ) @patch('openassessment.workflow.models.AssessmentWorkflow.objects.get') @ddt.file_data('data/assessments.json') @raises(workflow_api.AssessmentWorkflowInternalError) def test_unexpected_exception_wrapped(self, data, mock_create): mock_create.side_effect = Exception("Kaboom!") submission = sub_api.create_submission(ITEM_1, ANSWER_2) workflow_api.update_from_assessments(submission["uuid"], data["steps"]) @ddt.file_data('data/assessments.json') def test_get_assessment_workflow_expected_errors(self, data): with self.assertRaises(workflow_api.AssessmentWorkflowNotFoundError): workflow_api.get_workflow_for_submission("0000000000000", data["requirements"]) with self.assertRaises(workflow_api.AssessmentWorkflowRequestError): workflow_api.get_workflow_for_submission(123, data["requirements"]) @patch('submissions.models.Submission.objects.get') @ddt.file_data('data/assessments.json') @raises(workflow_api.AssessmentWorkflowInternalError) def test_unexpected_workflow_get_errors_wrapped(self, data, mock_get): mock_get.side_effect = Exception("Kaboom!") submission = sub_api.create_submission(ITEM_1, "We talk TV!") workflow = workflow_api.create_workflow(submission["uuid"], data["steps"]) workflow_api.get_workflow_for_submission(workflow["uuid"], {}) def test_preexisting_workflow(self): """ Verifies that even if a workflow does not go through start_workflow, it won't blow up. update_from_assessments() will go through _get_steps(), and add a staff step to the workflow even if it was created without one initially. """ submission = sub_api.create_submission({ "student_id": "test student", "course_id": "test course", "item_id": "test item", "item_type": "openassessment", }, "test answer") # Create the model object directly, bypassing start_workflow() workflow = AssessmentWorkflow.objects.create( submission_uuid=submission["uuid"], status=AssessmentWorkflow.STATUS.waiting, course_id="test course", item_id="test item" ) # This call will throw exceptions if the workflow is in an invalid state workflow_api.update_from_assessments(submission["uuid"], {}) def test_get_status_counts(self): # Initially, the counts should all be zero counts = workflow_api.get_status_counts( "test/1/1", "peer-problem", ["ai", "training", "peer", "self"] ) self.assertEqual(counts, [ {"status": "training", "count": 0}, {"status": "peer", "count": 0}, {"status": "self", "count": 0}, {"status": "waiting", "count": 0}, {"status": "done", "count": 0}, {"status": "cancelled", "count": 0}, ]) self.assertFalse("ai" in [count['status'] for count in counts]) # Create assessments with each status # We're going to cheat a little bit by using the model objects # directly, since the API does not provide access to the status directly. self._create_workflow_with_status("user 1", "test/1/1", "peer-problem", "training") self._create_workflow_with_status("user 1", "test/1/1", "peer-problem", "peer") self._create_workflow_with_status("user 2", "test/1/1", "peer-problem", "self") self._create_workflow_with_status("user 3", "test/1/1", "peer-problem", "self") self._create_workflow_with_status("user 4", "test/1/1", "peer-problem", "waiting") self._create_workflow_with_status("user 5", "test/1/1", "peer-problem", "waiting") self._create_workflow_with_status("user 6", "test/1/1", "peer-problem", "waiting") self._create_workflow_with_status("user 7", "test/1/1", "peer-problem", "done") self._create_workflow_with_status("user 8", "test/1/1", "peer-problem", "done") self._create_workflow_with_status("user 9", "test/1/1", "peer-problem", "done") self._create_workflow_with_status("user 10", "test/1/1", "peer-problem", "done") self._create_workflow_with_status("user 11", "test/1/1", "peer-problem", "cancelled") # Now the counts should be updated counts = workflow_api.get_status_counts( "test/1/1", "peer-problem", ["ai", "training", "peer", "self"] ) self.assertEqual(counts, [ {"status": "training", "count": 1}, {"status": "peer", "count": 1}, {"status": "self", "count": 2}, {"status": "waiting", "count": 3}, {"status": "done", "count": 4}, {"status": "cancelled", "count": 1}, ]) self.assertFalse("ai" in [count['status'] for count in counts]) # Create a workflow in a different course, same user and item # Counts should be the same self._create_workflow_with_status("user 1", "other_course", "peer-problem", "peer") updated_counts = workflow_api.get_status_counts( "test/1/1", "peer-problem", ["ai", "training", "peer", "self"] ) self.assertEqual(counts, updated_counts) # Create a workflow in the same course, different item # Counts should be the same self._create_workflow_with_status("user 1", "test/1/1", "other problem", "peer") updated_counts = workflow_api.get_status_counts( "test/1/1", "peer-problem", ["ai", "training", "peer", "self"] ) self.assertEqual(counts, updated_counts) @override_settings(ORA2_ASSESSMENTS={'self': 'not.a.module'}) def test_unable_to_load_api(self): submission = sub_api.create_submission({ "student_id": "test student", "course_id": "test course", "item_id": "test item", "item_type": "openassessment", }, "test answer") with self.assertRaises(AssessmentWorkflowInternalError): workflow_api.create_workflow(submission['uuid'], ['self']) def test_cancel_the_assessment_workflow(self): # Create the submission and assessment workflow. submission = sub_api.create_submission(ITEM_1, ANSWER_1) workflow = workflow_api.create_workflow(submission["uuid"], ["peer"]) requirements = { "peer": { "must_grade": 1, "must_be_graded_by": 1 } } # Check the workflow is not cancelled. self.assertFalse(workflow_api.is_workflow_cancelled(submission["uuid"])) # Check the status is not cancelled. self.assertNotEqual(workflow.get('status'), 'cancelled') # Check the points_earned are not 0 self.assertNotEqual(workflow['score'], 0) # Cancel the workflow for submission.
workflow_api.cancel_workflow( submission_uuid=submission["uuid"],
comments="Inappropriate language", cancelled_by_id=ITEM_2['student_id'], assessment_requirements=requirements ) # Check workflow is cancelled. self.assertTrue(workflow_api.is_workflow_cancelled(submission["uuid"])) # Status for workflow should be cancelled. workflow = AssessmentWorkflow.get_by_submission_uuid(submission["uuid"]) self.assertEqual(workflow.status, 'cancelled') # Score points_
patch_
size = 1
from django.conf.urls import url, include from django.contrib.auth.decorators import login_required from .views import * urlpatterns = [ # Listado url(r'^evaluacion-lista/', login_required(evaluacion_list), name='listar_evaluacion'), # Evaluacion paso a paso url(r'^generar/step1/$', login_required(evaluacion_step1), name='evaluacion_step1'), url(r'^generar/step1/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_step1_back), name='evaluacion_step1_back'), url(r'^generar/step2/(?P<eva
luacion_id>\d+)/$', login_required(evaluacion_step2), name='evaluacion_step2'), url(r'^generar/step3/
(?P<evaluacion_id>\d+)/$', login_required(evaluacion_step3), name='evaluacion_step3'), url(r'^generar/step4/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_step4), name='evaluacion_step4'), # Evaluacion automatica url(r'^automatica/step1/$', login_required(evaluacion_rapida_step1), name='evaluacion_rapida_step1'), url(r'^automatica/step2/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_rapida_step2), name='evaluacion_rapida_step2'), # Detalle evaluacion url(r'^detalle/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_detail), name='evaluacion_detail'), url(r'^descartar/(?P<evaluacion_id>\d+)/$', login_required(descartar_evaluacion), name='evaluacion_descartar'), # Calificacion alumnos url(r'^estudiantes-notas/(?P<evaluacion_id>\d+)/$', login_required(estudiante_calificacion), name='estudiante_calificacion'), #excel url(r'^calificacion/(?P<evaluacion_id>\d+)/$', login_required(calificaciones), name='calificacion'), url(r'^agregar/calificaciones/(?P<evaluacion_id>\d+)/$', login_required(get_calificaciones), name='calificacionExcel'), #PDF url(r'^evaluacion-pdf/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_pdf), name='evaluacionPDF'), url(r'^solucion-pdf/(?P<evaluacion_id>\d+)/$', login_required(solucion_pdf), name='solucionPDF'), # AJAX url(r'^ContenidoFiltroAjax/$', login_required(ContenidoFiltroAjax.as_view()), name='ContenidoFiltroAjax'), url(r'^PreguntaObjAjax/$', login_required(PreguntaObjAjax.as_view()), name='PreguntaObjAjax'), url(r'^filtro/palabras/$', login_required(busqueda), name='busqueda_palabra'), url(r'^PreguntaBusquedaAjax/$', login_required(PreguntaBusquedaAjax.as_view()), name='PreguntaBusquedaAjax'), ]
## \file ## \ingroup tutorial_tdataframe ## \notebook -nodraw ## This tutorial shows how to express the concept of ranges when working with the TDataFrame. ## \macro_code ## ## \date March 2017 ## \author Danilo Piparo import ROOT fill_tree_code = ''' void fill_tree(const char *filename, const char *treeName) { TFile f(filena
me, "RECREATE"); TTree t(treeName, treeName); int b1; float b2; t.Branch("b1", &b1); t.Branch("b2", &b2); for (int i = 0; i < 100; ++i) { b1 = i; b2 = i * i; t.Fill(); } t.Write(); f.Close(); return; } ''' # We prepare an input tree to run on fileName = "tdf006_ranges_py.root" treeName =
"myTree" ROOT.gInterpreter.Declare(fill_tree_code) ROOT.fill_tree(fileName, treeName) # We read the tree from the file and create a TDataFrame. TDF = ROOT.ROOT.Experimental.TDataFrame d = TDF(treeName, fileName) # ## Usage of ranges # Now we'll count some entries using ranges c_all = d.Count() # This is how you can express a range of the first 30 entries d_0_30 = d.Range(0, 30) c_0_30 = d_0_30.Count() # This is how you pick all entries from 15 onwards d_15_end = d.Range(15, 0) c_15_end = d_15_end.Count() # We can use a stride too, in this case we pick an event every 3 d_15_end_3 = d.Range(15, 0, 3) c_15_end_3 = d_15_end_3.Count() # The Range is a 1st class citizen in the TDataFrame graph: # not only actions (like Count) but also filters and new columns can be added to it. d_0_50 = d.Range(0, 50) c_0_50_odd_b1 = d_0_50.Filter("1 == b1 % 2").Count() # An important thing to notice is that the counts of a filter are relative to the # number of entries a filter "sees". Therefore, if a Range depends on a filter, # the Range will act on the entries passing the filter only. c_0_3_after_even_b1 = d.Filter("0 == b1 % 2").Range(0, 3).Count() # Ok, time to wrap up: let's print all counts! print("Usage of ranges:") print(" - All entries:", c_all.GetValue()) print(" - Entries from 0 to 30:", c_0_30.GetValue()) print(" - Entries from 15 onwards:", c_15_end.GetValue()) print(" - Entries from 15 onwards in steps of 3:", c_15_end_3.GetValue()) print(" - Entries from 0 to 50, odd only:", c_0_50_odd_b1.GetValue()) print(" - First three entries of all even entries:", c_0_3_after_even_b1.GetValue())
from __future__ import absolute_import from django.db import models from django.test import TestCase from .models import Author, Book signal_output = [] def pre_save_test(signal, sender, instance, **kwa
rgs): signal_output.append('pre_save signal, %s' % instance) if kwargs.get('raw'): signal_output.append('Is raw') def post_save_test(signal, sender, instance, **kwargs): signal_output.append('post_save signal, %s' % instance) if 'created' in kwargs: if kwargs['created']: signal_output.append('Is created') else: signal
_output.append('Is updated') if kwargs.get('raw'): signal_output.append('Is raw') def pre_delete_test(signal, sender, instance, **kwargs): signal_output.append('pre_save signal, %s' % instance) signal_output.append('instance.id is not None: %s' % (instance.id != None)) def post_delete_test(signal, sender, instance, **kwargs): signal_output.append('post_delete signal, %s' % instance) signal_output.append('instance.id is not None: %s' % (instance.id != None)) class SignalsRegressTests(TestCase): """ Testing signals before/after saving and deleting. """ def get_signal_output(self, fn, *args, **kwargs): # Flush any existing signal output global signal_output signal_output = [] fn(*args, **kwargs) return signal_output def setUp(self): # Save up the number of connected signals so that we can check at the end # that all the signals we register get properly unregistered (#9989) self.pre_signals = (len(models.signals.pre_save.receivers), len(models.signals.post_save.receivers), len(models.signals.pre_delete.receivers), len(models.signals.post_delete.receivers)) models.signals.pre_save.connect(pre_save_test) models.signals.post_save.connect(post_save_test) models.signals.pre_delete.connect(pre_delete_test) models.signals.post_delete.connect(post_delete_test) def tearDown(self): models.signals.post_delete.disconnect(post_delete_test) models.signals.pre_delete.disconnect(pre_delete_test) models.signals.post_save.disconnect(post_save_test) models.signals.pre_save.disconnect(pre_save_test) # Check that all our signals got disconnected properly. post_signals = (len(models.signals.pre_save.receivers), len(models.signals.post_save.receivers), len(models.signals.pre_delete.receivers), len(models.signals.post_delete.receivers)) self.assertEqual(self.pre_signals, post_signals) def test_model_signals(self): """ Model saves should throw some signals. """ a1 = Author(name='Neal Stephenson') self.assertEqual(self.get_signal_output(a1.save), [ "pre_save signal, Neal Stephenson", "post_save signal, Neal Stephenson", "Is created" ]) b1 = Book(name='Snow Crash') self.assertEqual(self.get_signal_output(b1.save), [ "pre_save signal, Snow Crash", "post_save signal, Snow Crash", "Is created" ]) def test_m2m_signals(self): """ Assigning and removing to/from m2m shouldn't generate an m2m signal """ b1 = Book(name='Snow Crash') self.get_signal_output(b1.save) a1 = Author(name='Neal Stephenson') self.get_signal_output(a1.save) self.assertEqual(self.get_signal_output(setattr, b1, 'authors', [a1]), []) self.assertEqual(self.get_signal_output(setattr, b1, 'authors', []), [])
d a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """These are tests for the bodhi.server.consumers.automatic_updates module.""" from copy import deepcopy from unittest import mock import logging from fedora_messaging.api import Message from fedora_messaging.testing import mock_sends import pytest from bodhi.server.config import config from bodhi.server.consumers.automatic_updates import AutomaticUpdateHandler from bodhi.server.models import ( Build, Release, TestGatingStatus, Update, UpdateRequest, UpdateStatus, UpdateType, User ) from bodhi.tests.server import base @mock.patch('bodhi.server.consumers.automatic_updates.work_on_bugs_task', mock.Mock()) class TestAutomaticUpdateHandler(base.BasePyTestCase): """Test the automatic update handler.""" def setup_method(self, method): """Set up environment for each test.""" super().setup_method(method) self.release = self.db.query(Release).filter_by(name='F17').first() if self.release: self.release.create_automatic_updates = True self.db.flush() else: self.release = self.create_release('17', create_automatic_updates=True) body = { 'build_id': 442562, 'name': 'colord', 'tag_id': 214, 'instance': 's390', 'tag': 'f17-updates-candidate', 'user': 'sharkcz', 'version': '1.3.4', 'owner': 'sharkcz', 'release': '1.fc26', } self.sample_message = Message(topic='', body=body) self.sample_nvr = f"{body['name']}-{body['version']}-{body['release']}" self.db_factory = base.TransactionalSessionMaker(self.Session) self.handler = AutomaticUpdateHandler(self.db_factory) # Test the main code paths. def test_consume(self, caplog): """Assert that messages about tagged builds create an update.""" caplog.set_level(logging.DEBUG) # process the message self.handler(self.sample_message) # check if the update exists... update = self.db.query(Update).filter( Update.builds.any(Build.nvr == self.sample_nvr) ).first() # ...and some of its properties assert update is not None assert update.type == UpdateType.unspecified assert update.status == UpdateStatus.pending assert update.autokarma == False assert update.test_gating_status is None assert update.builds[0].release == self.release expected_username = base.buildsys.DevBuildsys._build_data['owner_name'] assert update.user and update.user.name == expected_username assert not any(r.levelno >= logging.WARNING for r in caplog.records) @pytest.mark.parametrize('changelog', (True, None, "")) @mock.patch('bodhi.server.models.RpmBuild.get_changelog') def test_changelog(self, mock_generate_changelog, changelog): """Assert that update notes contain the changelog if it exists.""" if changelog: # fill the changelog here rather than in the decorator changelog = ('* Sat Aug 3 2013 Fedora Releng <rel-eng@lists.fedoraproject.org> - 2\n' '- Added a free money feature.\n* Tue Jun 11 2013 Randy <bowlofeggs@fpo>' ' - 2.0.1-2\n- Make users ☺\n') mock_generate_changelog.return_value = changelog # process the message self.handler(self.sample_message) # check if the update exists... update = self.db.query(Update).filter( Update.builds.any(Build.nvr == self.sample_nvr) ).first() if changelog: assert update.notes == f"""Automatic update for colord-1.3.4-1.fc26. ##### **Changelog** ``` {changelog} ```""" else: # no changelog assert update.notes == "Automatic update for colord-1.3.4-1.fc26." @mock.patch('bodhi.server.models.RpmBuild.get_changelog') def test_bug_added(self, mock_generate_changelog): """Assert that a bug is added to the update if proper string is in changelog.""" changelog = ('* Sat Aug 3 2013 Fedora Releng <rel-eng@lists.fedoraproject.org> - 2\n' '- Added a free money feature.\n- Fix rhbz#112233.') mock_generate_changelog.return_value = changelog # process the message self.handler(self.sample_message) # check if the update exists... update = self.db.query(Update).filter( Update.builds.any(Build.nvr == self.sample_nvr) ).first() ass
ert update.notes == f"""Automatic update for colord-1.3.4-1.fc26. ##### **Changelog** ``` {changelog} ```""" assert len(update.bugs) > 0 assert update.bugs[0].bug_id == 112233 @mock.patch.dict(config, [('bz_exclude_rels', ['F17'])]) @mock.patch('bodhi.server.models.RpmBuild.get_changelog') def test_bug_not_added_excluded_release(self, mock_genera
te_changelog): """Assert that a bug is not added for excluded release.""" changelog = ('* Sat Aug 3 2013 Fedora Releng <rel-eng@lists.fedoraproject.org> - 2\n' '- Added a free money feature.\n- Fix rhbz#112233.') mock_generate_changelog.return_value = changelog # process the message self.handler(self.sample_message) # check if the update exists... update = self.db.query(Update).filter( Update.builds.any(Build.nvr == self.sample_nvr) ).first() assert update.notes == f"""Automatic update for colord-1.3.4-1.fc26. ##### **Changelog** ``` {changelog} ```""" assert len(update.bugs) == 0 @mock.patch('bodhi.server.models.RpmBuild.get_changelog') def test_changelog_handled_exception(self, mock_generate_changelog): """Assert that update creation is succesful if get_changelog() raises ValueError.""" mock_generate_changelog.side_effect = ValueError('Handled exception') # process the message self.handler(self.sample_message) # check if the update exists... update = self.db.query(Update).filter( Update.builds.any(Build.nvr == self.sample_nvr) ).first() assert update.notes == "Automatic update for colord-1.3.4-1.fc26." @mock.patch('bodhi.server.models.RpmBuild.get_changelog') def test_changelog_unhandled_exception(self, mock_generate_changelog): """Assert that update creation is not succesful if get_changelog() raises Exception.""" mock_generate_changelog.side_effect = Exception('Unhandled exception') with pytest.raises(Exception) as exc: self.handler(self.sample_message) assert str(exc.value) == 'Unhandled exception' def test_consume_with_orphan_build(self, caplog): """ Assert existing builds without an update can be handled. Such builds can exist e.g. if they're used in a buildroot override. """ caplog.set_level(logging.DEBUG) # Run the handler to create the build & update, then remove the update. self.handler(self.sample_message) build = self.db.query(Build).filter_by(nvr=self.sample_nvr).one() update = build.update build.update = None # satisfy foreign key constraint self.db.delete(update) # Now test with the same message again which should encounter the # build already existing in the database. self.handler(self.sample_message) # check if the update exists... update = self.db.query(Update).filter( Update.builds.any(Build.nvr == self.sample_nvr) ).first() # ...and some of its properties assert update is not None assert update.type == UpdateType.unspecified assert update.status == UpdateStatus.pending assert update.test_gating_status is None expected_username = base.buildsys.DevBuildsys._build_data['owner_name'] assert update.user and update.user.name ==
#!/usr/bin/env python # -*- coding: utf-8 -*- # # || ____ _ __ # +------+ / __ )(_) /_______________ _____ ___ # | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \ # +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/ # || || /_____/_/\__/\___/_/ \__,_/ /___/\___/ # # Copyright (C) 2014 Bitcraze AB # # Crazyflie Nano Quadcopter Client # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """ """ __author__ = 'Bitcraze AB' __all__ = ['InputMux'] import os import glob import logging from cflib.utils.callbacks import Caller logger = logging.getLogger(__name__) MAX_THRUST = 65000 class InputMux(object): def __init__(self, input): self._devs = [] self.name = "N/A" self.input = input self._prev_values = {} # Roll/pitch limitation self.max_rp_angle = 0 # Thrust limitations self.thrust_slew_enabled = True self.thrust_slew_limit = 0 self.thrust_slew_rate = 0 self.max_thrust = 0 self.max_yaw_rate = 0 self.springy_throttle = True self.trim_roll = 0 self.trim_pitch = 0 self.has_pressure_sensor = False # TODO: Fix writing these values #self._max_rp_angle = 40 #self._springy_throttle = True #self._thrust_slew_enabled = True #self._thrust_slew_limit = 30 #self._thrust_slew_rate = 30 #self._min_thrust = 20000 #self._max_thrust = 50000 #self._max_yaw_rate = 400 #self._trim_roll = 0.0 #self._trim_pitch = 0.0 # Stateful things self._old_thrust = 0 self._old_raw_thrust = 0 self._old_alt_hold = False # TODO: Should these really be placed here? #self.input_updated = Caller() #self.rp_trim_updated = Caller() #self.emergency_stop_updated = Caller() #self.device_discovery = Caller() #self.device_error = Caller() #self.althold_updated = Caller() #self.alt1_updated = Caller() #self.alt2_updated = Caller() def get_supported_dev_count(self): return 1 def add_device(self, dev, parameters): logger.info("Adding device and opening it") dev.open() self._devs.append(dev) def remove_device(self, dev): self._devs.remove(dev) dev.close() def close(self): """Close down the MUX and close all it's devices""" for d in self._devs: d.close() self._devs = [] def _cap_rp(self, rp): ret = rp * self.max_rp_angle if ret > self.max_rp_angle: ret = self.max_rp_angle elif ret < -1 * self.max_rp_angle: ret = -1 * self.max_rp_angle return ret def _scale_rp(self, roll, pitch): return [self._cap_rp(roll), self._cap_rp(pitch)] def _scale_and_deadband_yaw(self, yaw): return InputMux.deadband(yaw, 0.2) * self.max_yaw_rate def _limit_thrust(self, thrust, althold, emergency_stop): # Thust limiting (slew, minimum and emergency stop) if self.springy_throttle: if althold and self.has_pressure_sensor: thrust = int(round(InputMux.deadband(thrust, 0.2)*32767 + 32767)) #Convert to uint16 else: if thrust < 0.05 or emergency_stop: thrust = 0 else: thrust = self.min_thrust + thrust * (self.max_thrust - self.min_thrust) if (self.thrust_slew_enabled == True and self.thrust_slew_limit > thrust and not emergency_stop): if self._old_thrust > self.thrust_slew_limit: self._old_thrust = self.thrust_slew_limit if thrust < (self._old_thrust - (self.thrust_slew_rate / 100)): thrust = self._old_thrust - self.thrust_slew_rate / 100 if thrust < 0 or thrust < self.min_thrust: thrust = 0 e
lse: thrust = thrust / 2 + 0.5 if althold and self.has_
pressure_sensor: #thrust = int(round(JoystickReader.deadband(thrust,0.2)*32767 + 32767)) #Convert to uint16 thrust = 32767 else: if thrust < -0.90 or emergency_stop: thrust = 0 else: thrust = self.min_thrust + thrust * (self.max_thrust - self.min_thrust) if (self.thrust_slew_enabled == True and self.thrust_slew_limit > thrust and not emergency_stop): if self._old_thrust > self.thrust_slew_limit: self._old_thrust = self.thrust_slew_limit if thrust < (self._old_thrust - (self.thrust_slew_rate / 100)): thrust = self._old_thrust - self.thrust_slew_rate / 100 if thrust < -1 or thrust < self.min_thrust: thrust = 0 self._old_thrust = thrust self._old_raw_thrust = thrust return thrust def set_alt_hold_available(self, available): """Set if altitude hold is available or not (depending on HW)""" self.input._has_pressure_sensor = available def enable_alt_hold(self, althold): """Enable or disable altitude hold""" self._old_alt_hold = althold def _check_toggle(self, key, data): if not key in self._prev_values: self._prev_values[key] = data elif self._prev_values[key] != data: self._prev_values[key] = data return True return False def _update_alt_hold(self, value): if self._check_toggle("althold", value): self.input.althold_updated.call(str(value)) def _update_em_stop(self, value): if self._check_toggle("estop", value): self.input.emergency_stop_updated.call(value) def _update_alt1(self, value): if self._check_toggle("alt1", value): self.input.alt1_updated.call(value) def _update_alt2(self, value): if self._check_toggle("alt2", value): self.input.alt2_updated.call(value) def _trim_rp(self, roll, pitch): return [roll + self.trim_roll, pitch + self.trim_pitch] @staticmethod def p2t(percentage): """Convert a percentage to raw thrust""" return int(MAX_THRUST * (percentage / 100.0)) @staticmethod def deadband(value, threshold): if abs(value) < threshold: value = 0 elif value > 0: value -= threshold elif value < 0: value += threshold return value/(1-threshold) def read(self): return None
import types import functools import unittest from .agent import Config, Agent # XXX bring into compliance with python 2.7 unittest api class AssertRaisesContextManager(object): def __init__(self, expected): self.expected = expected def __enter__(self): return self def __exit__(self, type, value, traceback): if type is None: raise AssertionError('%s expected but not raised' % str(self.expected)) if type != self.expected: raise AssertionError('%s expected, not `%s`' % (self.expected.__class__, str(value))) self.exception = value # silence exception return True class WebTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(WebTestCase, self).__init__(*args, **kwargs) # XXX does not inherit self.config = getattr(self.__class__, '_config', None) or Config() def setUp(self): super(WebTestCase, self).setUp() self._agent = self._create_agent() def _create_agent(self): kwargs = {} kwargs['config'] = self.config agent_class = self.config.agent_class or Agent return agent_class(**kwargs) def agent(self): agent = self._create_agent() return agent @property def response(self): return self._agent.response def request(self, method, url, *args, **kwargs): if hasattr(self, '_no_session') and self._no_session: self._agent = self._create_agent() return self._agent.request(method, url, *args, **kwargs) def get(self, url, *args, **kwargs): return self.request('get', url, *args, **kwargs) def post(self, url, *args, **kwargs): return self.requ
est('post', url, *args, **kwargs) def follow_redirect(self): return self._agent.follow_redirect() def submit_form(self, form, elements=None): return self._agent.submit_form(form, elements) # XXX move to utu # XXX accept kwargs
def assert_raises(self, expected, *args): if args: return self.assertRaises(expected, *args) else: return AssertRaisesContextManager(expected) def assert_status(self, code): self._agent.assert_status(code) def assert_redirected_to_uri(self, target): self._agent.assert_redirected_to_uri(target) def assert_redirected_to_url(self, target): self._agent.assert_redirected_to_url(target) def assert_response_cookie(self, name, **kwargs): self._agent.assert_response_cookie(name, **kwargs) def assert_not_response_cookie(self, name): self._agent.assert_not_response_cookie(name) def assert_cookie_jar_cookie(self, name, **kwargs): self._agent.assert_cookie_jar_cookie(name, **kwargs) def assert_not_cookie_jar_cookie(self, name): self._agent.assert_not_cookie_jar_cookie(name) @property def cookies(self): return self._agent.response.cookies @property def raw_headers(self): return self._agent.raw_headers @property def headers(self): return self._agent.headers @property def current_url(self): '''Contains the full URL for the last request made. None if no requests have been made. ''' return self._agent.current_url def no_session(cls): '''Class decorator requesting that session management should not be performed. ''' cls._no_session = True return cls def config(**kwargs): '''Function and class decorator for setting configuration on test cases.''' def decorator(cls_or_fn): if isinstance(cls_or_fn, types.FunctionType): fn = cls_or_fn @functools.wraps(fn) def decorated(self): saved = {} for key in kwargs: saved[key] = getattr(self.config, key) setattr(self.config, key, kwargs[key]) try: fn(self) finally: for key in kwargs: setattr(self.config, key, saved[key]) return decorated else: cls = cls_or_fn config = getattr(cls, '_config', None) or Config() for name in kwargs: setattr(config, name, kwargs[name]) cls._config = config return cls return decorator
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # ObservationTools documentation build configuration file, created by # sphinx-quickstart on Sun Apr 30 14:32:48 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'sphinx.ext.githubpages'] # Add any paths that contain templates here, relative to this directory. templates_path = ['.templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Obs
ervationTools' copyright = '2017, IA' author = 'IA' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is
also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['.static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'ObservationToolsdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'ObservationTools.tex', 'ObservationTools Documentation', 'IA', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'observationtools', 'ObservationTools Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'ObservationTools', 'ObservationTools Documentation', author, 'ObservationTools', 'One line description of project.', 'Miscellaneous'), ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None}
from distutils.core import setup fr
om distutils.extension import Extension from Cython.Distutils import build_ext setup( cmdcla
ss = {'build_ext': build_ext}, ext_modules = [Extension("capture", ["capture.pyx"])] )
# -*- coding:utf-8 -*- """ # Author: Pegasus Wang (pegasuswang@qq.com, http://ningning.today) # Created Time : Fri Feb 20 21:38:57 2015 # File Name: wechatService.py # Description: # :copyright: (c) 2015 by Pegasus Wang. # :license: MIT, see LICENSE for more details. """ import json import time import urllib import urllib2 from wechatUtil import MessageUtil from wechatReply import TextReply class RobotService(object): """Auto reply robot service""" KEY = 'd92d20bc1d8bb3cff585bf746603b2a9' url = 'http://www.tuling123.com/openapi/api' @staticmethod def auto_reply(req_info): query = {'key': RobotService.KEY, 'info': req_info.encode('utf-8')} headers = {'Content-type': 'text/html', 'charset': 'utf-8'} data = urllib.urlencode(query) req = urllib2.Request(RobotService.url, data) f = urllib2.urlopen(req).read() return json.loads(f).get('text').replace('<br>', '\n') #return json.loads(f).get('text') class WechatService(object): """process request""" @staticmethod def processRequest(request): """process different message types. :param request: post request message :return: None """ requestMap = MessageUtil.parseXml(request) fromUserName = requestMap.get(u'FromUserName') toUserName = requestMap.get(u'ToUserName') createTime = requestMap.get(u'CreateTime') msgType = requestMap.get(u'MsgType') msgId = requestMap.get(u'MsgId') textReply = TextReply() textReply.setToUserName(fromUserName) textReply.setFromUserName(toUserName) textReply.setCreateTime(time.time()) textReply.setMsgType(MessageUtil.RESP_MESSAGE_TYPE_TEXT) if msgType == MessageUtil.REQ_MESSAGE_TYPE_TEXT: content = requestMap.get('Content').decode('utf-8') # note: decode first #respContent = u'您发送的是文本消息:' + content respContent = RobotService.auto_reply(content) elif msgType == MessageUtil.REQ_MESSAGE_TYPE_IMAGE: respContent =
u'您发送的是图片消息!' elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VOICE: respContent = u'您发送的是语音消息!' elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VIDEO: respContent = u'您发送的是视频消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LOCATION: respContent = u'您发送的是地理位置消息!' elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LINK: respContent = u'您发送的是链接消息!' elif msgType == MessageUtil.REQ_MESSAGE_TYPE_EVENT: eventType = requestMap.get(u'Event') if eventType == MessageUtil.EVENT_TYPE_SUBSCRIBE: respContent = u'^_^谢谢您的关注,本公众号由王宁宁开发(python2.7+django1.4),如果你有兴趣继续开发,' \ u'可以联系我,就当打发时间了.' elif eventType == MessageUtil.EVENT_TYPE_UNSUBSCRIBE: pass elif eventType == MessageUtil.EVENT_TYPE_SCAN: # TODO pass elif eventType == MessageUtil.EVENT_TYPE_LOCATION: # TODO pass elif eventType == MessageUtil.EVENT_TYPE_CLICK: # TODO pass textReply.setContent(respContent) respXml = MessageUtil.class2xml(textReply) return respXml """ if msgType == 'text': content = requestMap.get('Content') # TODO elif msgType == 'image': picUrl = requestMap.get('PicUrl') # TODO elif msgType == 'voice': mediaId = requestMap.get('MediaId') format = requestMap.get('Format') # TODO elif msgType == 'video': mediaId = requestMap.get('MediaId') thumbMediaId = requestMap.get('ThumbMediaId') # TODO elif msgType == 'location': lat = requestMap.get('Location_X') lng = requestMap.get('Location_Y') label = requestMap.get('Label') scale = requestMap.get('Scale') # TODO elif msgType == 'link': title = requestMap.get('Title') description = requestMap.get('Description') url = requestMap.get('Url') """
from .app import App as _App class UserApp(_App): ''' An o
bject based on the relationship between a user and an app. A subclass of :class:`steamfront.app.App`. This will not contain any of the attributes for :class:`steamfront.app.App` until :meth unlazify: has been called. Should not be called manually -
will be automatically generated with a :class:`steamfront.user.User` instance. :param dict appdata: The app data that came from the API through the user. :param steamfront.user.User user: The user to whom the app belongs. :ivar player_id: A `str` containing the player's ID. :ivar play_time: An `int` containing how many hours the user has in the app. :ivar player: The :class:`steamfront.user.User` to whom the app belongs. :ivar lazy: A `bool` representing whether or not the object has all of its aspects from :class:`steamfront.app.App`. ''' def __init__(self, appdata:dict, user, lazy=True): self.appid = str(appdata['appid']) self.play_time = appdata['playtime_forever'] self.player_id = user.id64 self.player = user if lazy == False: super().__init__(self.appid) self.lazy = lazy def unlazify(self): ''' To get all of the app attributes of an app, this must be called. ''' self.lazy = False super().__init__(self.appid)
Veyssier # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. from urlparse import urlsplit, parse_qsl, urlparse from datetime import datetime, timedelta from weboob.deprecated.browser import Browser, BrowserIncorrectPassword from weboob.capabilities.bank import Transfer, TransferError from .pages import LoginPage, LoginErrorPage, AccountsPage, UserSpacePage, EmptyPage, \ OperationsPage, CardPage, ComingPage, NoOperationsPage, InfoPage, \ TransfertPage, Change
PasswordPage, VerifCodePage __all__ = ['CICBrowser'] # Browser class CICBrowser(Browser): PROTOCOL = 'https' DOMAIN = 'www.cic.fr' CERTHASH = '9f41522275058310a6fb348504daeadd16ae852a686a91383b10ad045da76d29' ENCODING = 'iso-8859-1' USER_AGENT = Browser.USER_AGENTS['wget'] PAGES = {'https://www.cic.fr/.*/fr/banques/particuliers/index.html': LoginPage, 'https://www.cic.fr/.*/fr/identification/default.cgi': LoginErrorPage, 'https://w
ww.cic.fr/.*/fr/banque/situation_financiere.cgi': AccountsPage, 'https://www.cic.fr/.*/fr/banque/situation_financiere.html': AccountsPage, 'https://www.cic.fr/.*/fr/banque/espace_personnel.aspx': UserSpacePage, 'https://www.cic.fr/.*/fr/banque/mouvements.cgi.*': OperationsPage, 'https://www.cic.fr/.*/fr/banque/mouvements.html.*': OperationsPage, 'https://www.cic.fr/.*/fr/banque/mvts_instance.cgi.*': ComingPage, 'https://www.cic.fr/.*/fr/banque/nr/nr_devbooster.aspx.*': OperationsPage, 'https://www.cic.fr/.*/fr/banque/operations_carte\.cgi.*': CardPage, 'https://www.cic.fr/.*/fr/banque/CR/arrivee\.asp.*': NoOperationsPage, 'https://www.cic.fr/.*/fr/banque/BAD.*': InfoPage, 'https://www.cic.fr/.*/fr/banque/.*Vir.*': TransfertPage, 'https://www.cic.fr/.*/fr/validation/change_password.cgi': ChangePasswordPage, 'https://www.cic.fr/.*/fr/validation/verif_code.cgi.*': VerifCodePage, 'https://www.cic.fr/.*/fr/': EmptyPage, 'https://www.cic.fr/.*/fr/banques/index.html': EmptyPage, 'https://www.cic.fr/.*/fr/banque/paci_beware_of_phishing.html.*': EmptyPage, 'https://www.cic.fr/.*/fr/validation/(?!change_password|verif_code).*': EmptyPage, } currentSubBank = None def is_logged(self): return not self.is_on_page(LoginPage) and not self.is_on_page(LoginErrorPage) def home(self): return self.location('https://www.cic.fr/sb/fr/banques/particuliers/index.html') def login(self): assert isinstance(self.username, basestring) assert isinstance(self.password, basestring) if not self.is_on_page(LoginPage): self.location('https://www.cic.fr/', no_login=True) self.page.login(self.username, self.password) if not self.is_logged() or self.is_on_page(LoginErrorPage): raise BrowserIncorrectPassword() self.getCurrentSubBank() def get_accounts_list(self): if not self.is_on_page(AccountsPage): self.location('https://www.cic.fr/%s/fr/banque/situation_financiere.cgi' % self.currentSubBank) return self.page.get_list() def get_account(self, id): assert isinstance(id, basestring) l = self.get_accounts_list() for a in l: if a.id == id: return a return None def getCurrentSubBank(self): # the account list and history urls depend on the sub bank of the user url = urlparse(self.geturl()) self.currentSubBank = url.path.lstrip('/').split('/')[0] def list_operations(self, page_url): if page_url.startswith('/') or page_url.startswith('https'): self.location(page_url) else: self.location('https://%s/%s/fr/banque/%s' % (self.DOMAIN, self.currentSubBank, page_url)) go_next = True while go_next: if not self.is_on_page(OperationsPage): return for op in self.page.get_history(): yield op go_next = self.page.go_next() def get_history(self, account): transactions = [] last_debit = None for tr in self.list_operations(account._link_id): # to prevent redundancy with card transactions, we do not # store 'RELEVE CARTE' transaction. if tr.raw != 'RELEVE CARTE': transactions.append(tr) elif last_debit is None: last_debit = (tr.date - timedelta(days=10)).month coming_link = self.page.get_coming_link() if self.is_on_page(OperationsPage) else None if coming_link is not None: for tr in self.list_operations(coming_link): transactions.append(tr) month = 0 for card_link in account._card_links: v = urlsplit(card_link) args = dict(parse_qsl(v.query)) # useful with 12 -> 1 if int(args['mois']) < month: month = month + 1 else: month = int(args['mois']) for tr in self.list_operations(card_link): if month > last_debit: tr._is_coming = True transactions.append(tr) transactions.sort(key=lambda tr: tr.rdate, reverse=True) return transactions def transfer(self, account, to, amount, reason=None): # access the transfer page transfert_url = 'WI_VPLV_VirUniSaiCpt.asp?RAZ=ALL&Cat=6&PERM=N&CHX=A' self.location('https://%s/%s/fr/banque/%s' % (self.DOMAIN, self.currentSubBank, transfert_url)) # fill the form self.select_form(name='FormVirUniSaiCpt') self['IDB'] = [account[-1]] self['ICR'] = [to[-1]] self['MTTVIR'] = '%s' % str(amount).replace('.', ',') if reason is not None: self['LIBDBT'] = reason self['LIBCRT'] = reason self.submit() # look for known errors content = unicode(self.response().get_data(), self.ENCODING) insufficient_amount_message = u'Montant insuffisant.' maximum_allowed_balance_message = u'Solde maximum autorisé dépassé.' if content.find(insufficient_amount_message) != -1: raise TransferError('The amount you tried to transfer is too low.') if content.find(maximum_allowed_balance_message) != -1: raise TransferError('The maximum allowed balance for the target account has been / would be reached.') # look for the known "all right" message ready_for_transfer_message = u'Confirmez un virement entre vos comptes' if not content.find(ready_for_transfer_message): raise TransferError('The expected message "%s" was not found.' % ready_for_transfer_message) # submit the confirmation form self.select_form(name='FormVirUniCnf') submit_date = datetime.now() self.submit() # look for the known "everything went well" message content = unicode(self.response().get_data(), self.ENCODING) transfer_ok_message = u'Votre virement a été exécuté ce jour' if not content.find(transfer_ok_message): raise TransferError('The expected message "%s" was not found.' % transfer_ok_message) # We now have to return a Transfer object transfer = Transfer(submit_date.strftime('%Y%m%d%H%M%S')) transfer.amount =
# -*- coding: utf-8 -*- """ 聚类和EM算法 ~~~~~~~~~~~~~~~~ 聚类 :copyright: (c) 2016 by the huaxz1986. :license: lgpl-3.0, see LICENSE for more details. """ import numpy as np import matplotlib.pyplot as plt from sklearn.dataset
s.samples_generator import make_blobs # from .agglomerative_clustering import test_AgglomerativeClustering,test_AgglomerativeClustering_nclusters,test_AgglomerativeClustering_linkage # from .dbscan import test_DBSCAN,test_DBSCAN_epsilon,test_DBSCAN_min_samples from chapters.Cluster_EM.gmm import test_GMM,test_GMM_cov_ty
pe,test_GMM_n_components # from .kmeans import test_Kmeans,test_Kmeans_n_init,test_Kmeans_nclusters def create_data(centers,num=100,std=0.7): ''' 生成用于聚类的数据集 :param centers: 聚类的中心点组成的数组。如果中心点是二维的,则产生的每个样本都是二维的。 :param num: 样本数 :param std: 每个簇中样本的标准差 :return: 用于聚类的数据集。是一个元组,第一个元素为样本集,第二个元素为样本集的真实簇分类标记 ''' X, labels_true = make_blobs(n_samples=num, centers=centers, cluster_std=std) return X,labels_true def plot_data(*data): ''' 绘制用于聚类的数据集 :param data: 可变参数。它是一个元组。元组元素依次为:第一个元素为样本集,第二个元素为样本集的真实簇分类标记 :return: None ''' X,labels_true=data labels=np.unique(labels_true) fig=plt.figure() ax=fig.add_subplot(1,1,1) colors='rgbyckm' # 每个簇的样本标记不同的颜色 for i,label in enumerate(labels): position=labels_true==label ax.scatter(X[position,0],X[position,1],label="cluster %d"%label, color=colors[i%len(colors)]) ax.legend(loc="best",framealpha=0.5) ax.set_xlabel("X[0]") ax.set_ylabel("Y[1]") ax.set_title("data") plt.show() if __name__=='__main__': centers=[[1,1],[2,2],[1,2],[10,20]] # 用于产生聚类的中心点 X,labels_true=create_data(centers,1000,0.5) # 产生用于聚类的数据集 # plot_data(X,labels_true) # 绘制用于聚类的数据集 # test_Kmeans(X,labels_true) # 调用 test_Kmeans 函数 # test_Kmeans_nclusters(X,labels_true) # 调用 test_Kmeans_nclusters 函数 # test_Kmeans_n_init(X,labels_true) # 调用 test_Kmeans_n_init 函数 # test_DBSCAN(X,labels_true) # 调用 test_DBSCAN 函数 # test_DBSCAN_epsilon(X,labels_true) # 调用 test_DBSCAN_epsilon 函数 # test_DBSCAN_min_samples(X,labels_true) # 调用 test_DBSCAN_min_samples 函数 # test_AgglomerativeClustering(X,labels_true) # 调用 test_AgglomerativeClustering 函数 # test_AgglomerativeClustering_nclusters(X,labels_true) # 调用 test_AgglomerativeClustering_nclusters 函数 # test_AgglomerativeClustering_linkage(X,labels_true) # 调用 test_AgglomerativeClustering_linkage 函数 # test_GMM(X,labels_true) # 调用 test_GMM 函数 # test_GMM_n_components(X,labels_true) # 调用 test_GMM_n_components 函数 test_GMM_cov_type(X,labels_true) # 调用 test_GMM_cov_type 函数
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for dashboard module. Separated here to break include loops.""" __author__ = 'Mike Gainer (mgainer@google.com)' import os import appengine_config from controllers import sites from models import vfs RESOURCES_PATH = '/modules/dashboard/resources' RESOURCES_DIR = os.path.join(appengine_config.BUNDLE_ROOT, RESOURCES_PATH.lstrip('/')) def build_assets_url(tab_name): return '/dashboard?action=assets&tab=%s' % tab_name def list_files(handler, subfolder, merge_local_files=False, all_paths=None): """Makes a list of files in a subfolder. Args: handler: webapp request handler. subfolder: string. Relative
path of the subfolder to list. merge_local_files: boolean. If True, the returned lis
t will contain files found on either the datastore filesystem or the read-only local filesystem. If a file is found on both, its datastore filesystem version will trump its local filesystem version. all_paths: list. A list of all file paths in the underlying file system. Returns: List of relative, normalized file path strings. """ home = sites.abspath(handler.app_context.get_home_folder(), '/') _paths = None if all_paths is not None: _paths = [] for _path in all_paths: if _path.startswith(sites.abspath( handler.app_context.get_home_folder(), subfolder)): _paths.append(_path) _paths = set(_paths) else: _paths = set(handler.app_context.fs.list( sites.abspath(handler.app_context.get_home_folder(), subfolder))) if merge_local_files: local_fs = vfs.LocalReadOnlyFileSystem(logical_home_folder='/') _paths = _paths.union(set([ os.path.join(appengine_config.BUNDLE_ROOT, path) for path in local_fs.list(subfolder[1:])])) result = [] for abs_filename in _paths: filename = os.path.relpath(abs_filename, home) result.append(vfs.AbstractFileSystem.normpath(filename)) return sorted(result)
# -
*- coding: utf-8 -*- def social_blblbl(entity, argument): return True #- Fine F
unzione -
from __future__ import unicode_literals import importlib import os import sys from django.apps import apps from django.utils import datetime_safe, six from django.utils.six.moves import input from .loader import MIGRATIONS_MODULE_NAME class MigrationQuestioner(object): """ Gives the autodetector responses to questions it might have. This base class has a built-in noninteractive mode, but the interactive subclass is what the command-line arguments will use. """ def __init__(self, defaults=None, specified_apps=None, dry_run=None): self.defaults = defaults or {} self.specified_apps = specified_apps or set() self.dry_run = dry_run def ask_initial(self, app_label): "Should we create an initial migration for the app?" # If it was specified on the command line, definitely true if app_label in self.specified_apps: return True # Otherwise, we look to see if it has a migrations module # without any Python files in it, apart from __init__.py. # Apps from the new app template will have these; the python # file check will ensure we skip South ones. try: app_config = apps.get_app_config(app_label) except LookupError: # It's a fake app. return self.defaults.get("ask_initial", False) migrations_import_path = "%s.%s" % (app_config.name, MIGRATIONS_MODULE_NAME) try: migrations_module = importlib.import_module(migrations_import_path) except ImportError: return self.defaults.get("ask_initial", False) else: if hasattr(migrations_module, "__file__"): filenames = os.listdir(os.path.dirname(migrations_module.__file__)) elif hasattr(migrations_module, "__path__"): if len(migrations_module.__path__) > 1: return False filenames = os.listdir(list(migrations_module.__path__)[0]) return not any(x.endswith(".py") for x in filenames if x != "__init__.py") def ask_not_null_addition(self, field_name, model_name): "Adding a NOT NULL field to a model" # None means quit return None def ask_rename(self, model_name, old_name, new_name, field_instance): "Was this field really renamed?" return self.defaults.get("ask_rename", False) def ask_rename_model(self, old_model_state, new_model_state): "Was this model really renamed?" return self.defaults.get("ask_rename_model", False) def ask_merge(self, app_label): "Do you really want to merge these migrations?" return self.defaults.get("ask_merge", False) class InteractiveMigrationQuestioner(MigrationQuestioner): def _boolean_input(self, question, default=None): result = input("%s " % question) if not result and default is not None: return default while len(result) < 1 or result[0].lower() not in "yn": result = input("Please answer yes or no: ") return result[0].lower() == "y" def _choice_input(self, question, choices): print(question) for i, choice in enumerate(choices): print(" %s) %s" % (i + 1, choice)) result = input("Select an option: ") while True: try: value = int(result) if 0 < value <= len(choices):
return value except ValueError: pass result = input("Please select a valid option: ") def ask_not_null_addition(self, field_name, model_name): "Adding a NOT NULL field to a model" if not self.dry_run: choice = self._choice_input( "You are trying to add a non-nullable field '%s' to %s without a def
ault;\n" % (field_name, model_name) + "we can't do that (the database needs something to populate existing rows).\n" + "Please select a fix:", [ "Provide a one-off default now (will be set on all existing rows)", "Quit, and let me add a default in models.py", ] ) if choice == 2: sys.exit(3) else: print("Please enter the default value now, as valid Python") print("The datetime module is available, so you can do e.g. datetime.date.today()") while True: if six.PY3: # Six does not correctly abstract over the fact that # py3 input returns a unicode string, while py2 raw_input # returns a bytestring. code = input(">>> ") else: code = input(">>> ").decode(sys.stdin.encoding) if not code: print("Please enter some code, or 'exit' (with no quotes) to exit.") elif code == "exit": sys.exit(1) else: try: return eval(code, {}, {"datetime": datetime_safe}) except (SyntaxError, NameError) as e: print("Invalid input: %s" % e) return None def ask_rename(self, model_name, old_name, new_name, field_instance): "Was this field really renamed?" return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)? [y/N]" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__), False) def ask_rename_model(self, old_model_state, new_model_state): "Was this model really renamed?" return self._boolean_input("Did you rename the %s.%s model to %s? [y/N]" % (old_model_state.app_label, old_model_state.name, new_model_state.name), False) def ask_merge(self, app_label): return self._boolean_input( "\nMerging will only work if the operations printed above do not conflict\n" + "with each other (working on different fields or models)\n" + "Do you want to merge these migration branches? [y/N]", False, )
TEST
_DEFAULT = 'def
value'
import json from typing import TYPE_CHECKING, Optional from boxsdk.util.text_enum import TextEnum from boxsdk.exception import BoxAPIException from .base_object import BaseObject if TYPE_CHECKING: from boxsdk.object.user import User from boxsdk.object.terms_of_service_user_status import TermsOfServiceUserStatus class TermsOfServiceType(TextEnum): """An enum of possible terms of service types""" MANAGED = 'managed' EXTERNAL = 'external' class TermsOfServiceStatus(TextEnum): """An enum of possible terms of service status""" ENABLED = 'enabled' DISABLED = 'disabled' class TermsOfService(BaseObject): """Represents a Box terms of service.""" _item_type = 'terms_of_service' def get_user_status(self, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus': """ Get the terms of service user status. :param user: This is the user to get the status of the terms of service for. This defaults to current user. :returns: A :class:`TermsOfServiceUserStatus` object """ url = self._session.get_url('terms_of_service_user_statuses') additional_params = { 'tos_id': self.object_id, } if user is not None: additional_params['user_id'] = user.object_id box_response = self._session.get(url, params=additional_params) response_object = box_response.json() response = response_object['entries'][0] return self.translator.translate( session=self._session, response_object=response, ) def accept(self, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus': """ Accept a terms of service. :param user: The :class:`User` to assign the terms of service to. :returns: A newly created :class:`TermsOfServiceUserStatus` object """ return self.set_user_status(is_accepted=True, user=user) def reject(self, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus': """ Reject a terms of service. :param user: The :class:`User` to assign the terms of service to. :returns: A newly created :class:`TermsOfServiceUserStatus` object """ return self.set_user_status(is_accepted=False, user=user) def set_user_status(self, is_accepted: bool, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus': """ Create a terms of service user status. :param is_accepted:
Indicates whether a use has accepted or rejected a terms of service. :param user: The :class:`User` to assign the terms of service to. :returns: A newly created :class:`TermsOfServiceUserStatus` object """ url = self._session.get_url('terms_of_service_user_statuses') body = { 'tos': { 'type': self.object_type,
'id': self.object_id, }, 'is_accepted': is_accepted, } if user is not None: body['user'] = { 'type': user.object_type, 'id': user.object_id, } translated_response = None try: box_response = self._session.post(url, data=json.dumps(body)) response = box_response.json() translated_response = self.translator.translate( session=self._session, response_object=response, ) except BoxAPIException as err: if err.status == 409: user_status = self.get_user_status(user) translated_response = user_status.update_info(data={'is_accepted': is_accepted}) return translated_response
""" WSGI config for myproject project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ import os os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "mypro
ject.settings") from django.core.wsgi import get_wsgi_application # flake8: noqa application = get_wsgi_application()
from django.db import models from django.db.models.signals import post_save from django.db.utils import DatabaseError from django.dispatch import receiver from django.contrib.auth.models import User from django.utils.translation import ugettext_lazy as _, ugettext STANDARD_EMAIL = "anonymous@readthedocs.org" class UserProfile (models.Model): """Additional information about a User. ""
" user = models.ForeignKey(User, verbose_name=_('User'), unique=True, related_name='profile') whitelisted = models.BooleanField(_('Whitelisted')) homepage = models.CharField(_('Homepage'), max_length=100, blank=True) allow_email = models.BooleanField(_('Allow email'), help_text=_('Show your email on VCS contributions.'), default=True) def __unicode__(self): return ugettext("%(username)s's profile") % {'username': self.user.username}
def get_absolute_url(self): return ('profiles_profile_detail', (), {'username': self.user.username}) get_absolute_url = models.permalink(get_absolute_url) def get_contribution_details(self): """ Gets the line to put into commits to attribute the author. Returns a tuple (name, email) """ if self.user.first_name and self.user.last_name: name = '%s %s' % (self.user.first_name, self.user.last_name) else: name = self.user.username if self.allow_email: email = self.user.email else: email = STANDARD_EMAIL return (name, email) @receiver(post_save, sender=User) def create_profile(sender, **kwargs): if kwargs['created'] is True: try: UserProfile.objects.create(user_id=kwargs['instance'].id) except DatabaseError: pass
# -*- coding: utf-8 -*- import pytest from .utils import last_activity @pytest.mark.usefixtures('versioning_manager', 'table_creator') class TestActivityCreationWithColumnExclusion(object): @pytest.fixture def audit_trigger_creator(self, session, user_class): session.execute( '''SELECT audit_table('{0}', '{{"age"}}')'''.format( user_class.__tablename__ ) ) @pytest.fixture def user(self, session, user_class, audit_trigger_creator): user = user_class(name='John', age=15) session.add(user) session.flush() return user def test_insert(self, user, connection): activity = last_activity(connection) assert activity['old_data'] == {} assert activity['changed_data'] == { 'id': user.id, 'name': 'John' } assert activity['table_name'] == 'user' assert activity['native_transaction_id'] > 0 assert activity['verb'] == 'insert' def test_update(self, user, session): user.name = 'Luke' user.age = 18 session.flush() activity = last_activity(session) assert activity['changed_data'] == {'name': 'Luke'} assert activity['old_data'] == { 'id': user.id, 'name': 'John', } assert activity['table_name'] == 'user' assert activity['native_transaction_id'] > 0 assert activity['verb'] == 'update' def test_delete(self, user, session): session.delete(user) session.flush() activity = last_activity(session) assert activi
ty['changed_data'] == {} assert activity['old_data'] == { 'id': user.id, 'name': 'John', } assert activity['table_name'] == 'us
er' assert activity['native_transaction_id'] > 0 assert activity['verb'] == 'delete'
ttp-v10-spec-00.txt> H. Frystyk Nielsen # Expires September 8, 1995 March 8, 1995 # # URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt # # and # # Network Working Group R. Fielding # Request for Comments: 2616 et al # Obsoletes: 2068 June 1999 # Category: Standards Track # # URL: http://www.faqs.org/rfcs/rfc2616.html # Log files # --------- # # Here's a quote from the NCSA httpd docs about log file format. # # | The logfile format is as follows. Each line consists of: # | # | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" dd
d bbbb # | # | host: Either the DNS name or the IP number of the remote client # | rfc931: Any information returned by identd for this person, # | - otherwise. # | authuser: If user sent a userid for authentication, the user name, # | - otherwise. # | DD: Day # | Mon: Month (calendar name) # | YYYY: Year # | hh: hour (24-hour format, the machine's timezone) # | mm: minutes # | ss: seconds # | reques
t: The first line of the HTTP request as sent by the client. # | ddd: the status code returned by the server, - if not available. # | bbbb: the total number of bytes sent, # | *not including the HTTP/1.0 header*, - if not available # | # | You can determine the name of the file accessed through request. # # (Actually, the latter is only true if you know the server configuration # at the time the request was made!) __version__ = "0.6" __all__ = [ "HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler", ] import copy import datetime import email.utils import html import http.client import io import mimetypes import os import posixpath import select import shutil import socket # For gethostbyaddr() import socketserver import sys import time import urllib.parse import contextlib from functools import partial from http import HTTPStatus # Default error message template DEFAULT_ERROR_MESSAGE = """\ <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html> <head> <meta http-equiv="Content-Type" content="text/html;charset=utf-8"> <title>Error response</title> </head> <body> <h1>Error response</h1> <p>Error code: %(code)d</p> <p>Message: %(message)s.</p> <p>Error code explanation: %(code)s - %(explain)s.</p> </body> </html> """ DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" class HTTPServer(socketserver.TCPServer): allow_reuse_address = 1 # Seems to make sense in testing environment def server_bind(self): """Override server_bind to store the server name.""" socketserver.TCPServer.server_bind(self) host, port = self.server_address[:2] self.server_name = socket.getfqdn(host) self.server_port = port class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): daemon_threads = True class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): """HTTP request handler base class. The following explanation of HTTP serves to guide you through the code as well as to expose any misunderstandings I may have about HTTP (so you don't need to read the code to figure out I'm wrong :-). HTTP (HyperText Transfer Protocol) is an extensible protocol on top of a reliable stream transport (e.g. TCP/IP). The protocol recognizes three parts to a request: 1. One line identifying the request type and path 2. An optional set of RFC-822-style headers 3. An optional data part The headers and data are separated by a blank line. The first line of the request has the form <command> <path> <version> where <command> is a (case-sensitive) keyword such as GET or POST, <path> is a string containing path information for the request, and <version> should be the string "HTTP/1.0" or "HTTP/1.1". <path> is encoded using the URL encoding scheme (using %xx to signify the ASCII character with hex code xx). The specification specifies that lines are separated by CRLF but for compatibility with the widest range of clients recommends servers also handle LF. Similarly, whitespace in the request line is treated sensibly (allowing multiple spaces between components and allowing trailing whitespace). Similarly, for output, lines ought to be separated by CRLF pairs but most clients grok LF characters just fine. If the first line of the request has the form <command> <path> (i.e. <version> is left out) then this is assumed to be an HTTP 0.9 request; this form has no optional headers and data part and the reply consists of just the data. The reply form of the HTTP 1.x protocol again has three parts: 1. One line giving the response code 2. An optional set of RFC-822-style headers 3. The data Again, the headers and data are separated by a blank line. The response code line has the form <version> <responsecode> <responsestring> where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"), <responsecode> is a 3-digit response code indicating success or failure of the request, and <responsestring> is an optional human-readable string explaining what the response code means. This server parses the request and the headers, and then calls a function specific to the request type (<command>). Specifically, a request SPAM will be handled by a method do_SPAM(). If no such method exists the server sends an error response to the client. If it exists, it is called with no arguments: do_SPAM() Note that the request name is case sensitive (i.e. SPAM and spam are different requests). The various request details are stored in instance variables: - client_address is the client IP address in the form (host, port); - command, path and version are the broken-down request line; - headers is an instance of email.message.Message (or a derived class) containing the header information; - rfile is a file object open for reading positioned at the start of the optional input data part; - wfile is a file object open for writing. IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! The first thing to be written must be the response line. Then follow 0 or more header lines, then a blank line, and then the actual data (if any). The meaning of the header lines depends on the command executed by the server; in most cases, when data is returned, there should be at least one header line of the form Content-type: <type>/<subtype> where <type> and <subtype> should be registered MIME types, e.g. "text/html" or "text/plain". """ # The Python system version, truncated to its first component. sys_version = "Python/" + sys.version.split()[0] # The server software version. You may want to override this. # The format is multiple whitespace-separated strings, # where each string is of the form name[/version]. server_version = "BaseHTTP/" + __version__ error_message_format = DEFAULT_ERROR_MESSAGE error_content_type = DEFAULT_ERROR_CONTENT_TYPE # The default request version. This only affects responses up until # the point where the request line is parsed, so it mainly decides what # the client gets back when sending a malformed request line. # Most web servers default to HTTP 0.9, i.e. don't send a status line. default_request_version = "HTTP/0.9" def parse_request(self): """Parse a request (internal). The request should be stored in self.raw_requestline; the results are in self.command, self.path, self.request_version and self.headers. Return True for success, False for failure; on failure, any relevant
('WRITE', AllUsers()), ('FULL_CONTROL', User(owner.name)), ], 'authenticated-read': [ ('READ', AuthenticatedUsers()), ('FULL_CONTROL', User(owner.name)), ], 'bucket-owner-read': [ ('READ', User(bucket_owner.name)), ('FULL_CONTROL', User(owner.name)), ], 'bucket-owner-full-control': [ ('FULL_CONTROL', User(owner.name)), ('FULL_CONTROL', User(bucket_owner.name)), ], 'log-delivery-write': [ ('WRITE', LogDelivery()), ('READ_ACP', LogDelivery()), ('FULL_CONTROL', User(owner.name)), ], } class AuthenticatedUsers(Group): """ This group represents all AWS accounts. Access permission to this group allows any AWS account to access the resource. However, all requests must be signed (authenticated). """ uri = 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers' def __contains__(self, key): # Swift3 handles only signed requests. return True class AllUsers(Group): """ Access permission to this group allows anyone to access the resource. The requests can be signed (authenticated) or unsigned (anonymous). Unsigned requests omit the Authentication header in the request. Note: Swift3 regards unsigned requests as Swift API accesses, and bypasses them to Swift. As a result, AllUsers behaves completely same as AuthenticatedUsers. """ uri = 'http://acs.amazonaws.com/groups/global/AllUsers' def __contains__(self, key): return True class LogDelivery(Group): """ WRIT
E and READ_ACP permissions on a bucket enables this group to write server access logs to the bucket. """ uri = 'http://acs.amazonaws.com/groups/s3/LogDelivery' def __contains__(self, key): if ':' in key: tenant, user = key.split(':', 1) else: user = key return user == LOG_DELIVERY_USER class Grant(object): """ Grant Class which i
ncludes both Grantee and Permission """ def __init__(self, grantee, permission): """ :param grantee: a grantee class or its subclass :param permission: string """ if permission.upper() not in PERMISSIONS: raise S3NotImplemented() if not isinstance(grantee, Grantee): raise self.grantee = grantee self.permission = permission @classmethod def from_elem(cls, elem): """ Convert an ElementTree to an ACL instance """ grantee = Grantee.from_elem(elem.find('./Grantee')) permission = elem.find('./Permission').text return cls(grantee, permission) def elem(self): """ Create an etree element. """ elem = Element('Grant') elem.append(self.grantee.elem()) SubElement(elem, 'Permission').text = self.permission return elem def allow(self, grantee, permission): return permission == self.permission and grantee in self.grantee class ACL(object): """ S3 ACL class. Refs (S3 API - acl-overview: http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html): The sample ACL includes an Owner element identifying the owner via the AWS account's canonical user ID. The Grant element identifies the grantee (either an AWS account or a predefined group), and the permission granted. This default ACL has one Grant element for the owner. You grant permissions by adding Grant elements, each grant identifying the grantee and the permission. """ metadata_name = 'acl' root_tag = 'AccessControlPolicy' max_xml_length = 200 * 1024 def __init__(self, owner, grants=[]): """ :param owner: Owner Class for ACL instance """ self.owner = owner self.grants = grants @classmethod def from_elem(cls, elem): """ Convert an ElementTree to an ACL instance """ id = elem.find('./Owner/ID').text try: name = elem.find('./Owner/DisplayName').text except AttributeError: name = id grants = [Grant.from_elem(e) for e in elem.findall('./AccessControlList/Grant')] return cls(Owner(id, name), grants) def elem(self): """ Decode the value to an ACL instance. """ elem = Element(self.root_tag) owner = SubElement(elem, 'Owner') SubElement(owner, 'ID').text = self.owner.id SubElement(owner, 'DisplayName').text = self.owner.name SubElement(elem, 'AccessControlList').extend( g.elem() for g in self.grants ) return elem def check_owner(self, user_id): """ Check that the user is an owner. """ if not CONF.s3_acl: # Ignore Swift3 ACL. return if not self.owner.id: if CONF.allow_no_owner: # No owner means public. return raise AccessDenied() if user_id != self.owner.id: raise AccessDenied() def check_permission(self, user_id, permission): """ Check that the user has a permission. """ if not CONF.s3_acl: # Ignore Swift3 ACL. return try: # owners have full control permission self.check_owner(user_id) return except AccessDenied: pass if permission in PERMISSIONS: for g in self.grants: if g.allow(user_id, 'FULL_CONTROL') or \ g.allow(user_id, permission): return raise AccessDenied() @classmethod def from_headers(cls, headers, bucket_owner, object_owner=None, as_private=True): """ Convert HTTP headers to an ACL instance. """ grants = [] try: for key, value in headers.items(): if key.lower().startswith('x-amz-grant-'): permission = key[len('x-amz-grant-'):] permission = permission.upper().replace('-', '_') if permission not in PERMISSIONS: continue for grantee in value.split(','): grants.append( Grant(Grantee.from_header(grantee), permission)) if 'x-amz-acl' in headers: try: acl = headers['x-amz-acl'] if len(grants) > 0: err_msg = 'Specifying both Canned ACLs and Header ' \ 'Grants is not allowed' raise InvalidRequest(err_msg) grantees = canned_acl_grantees( bucket_owner, object_owner)[acl] for permission, grantee in grantees: grants.append(Grant(grantee, permission)) except KeyError: # expects canned_acl_grantees()[] raises KeyError raise InvalidArgument('x-amz-acl', headers['x-amz-acl']) except (KeyError, ValueError): # TODO: think about we really catch this except sequence raise InvalidRequest() if len(grants) == 0: # No ACL headers if as_private: return ACLPrivate(bucket_owner, object_owner) else: return None return cls(object_owner or bucket_owner, grants) class CannedACL(object): """ A dict-like object that returns canned ACL. """ def __getitem__(self, key): def acl(key, bucket_owner, object_owner=None): grants = [] grantees = canned_acl_grantees(bucket_owner, object_owner)[key] for permission, grantee in grantees: grants.append(Grant(grantee, permission)) return ACL(object_owner or bucket_owner, grants)
import unittest fr
om scrapers.journalscrapers import ElsevierScraper class TestElsevierScraper(unittest.TestCase): def setUp(self): self.instance = ElsevierS
craper("../data/elsevier/2016-uncleaned.csv") def test_strip_chars(self): for row in self.instance.get_entries(): print row
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2014 Marcus Müller. # # This is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this software; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # import remote_agent import task_frontend import helpers import benchmarking_task from gnuradio import gr, gr_unittest try: import mtb_swig as mtb except ImportError: pass import gc import json import numpy import os import tempfile import time try: import cStringIO as StringIO except ImportError: import StringIO from PyQt4 import QtGui from PyQt4 import QtCore class MyApplicationClass(QtGui.QApplication): started = QtCore.pyqtSignal() def exec_(self): self.started.emit() return QtGui.QApplication.exec_() class qa_task_frontend (gr_unittest.TestCase): def setUp(self): self.taskstring = "" self.task = [] self.range_spec = (0,1,100) self.ref_task_grc = { "class_name":"class", "module_name":"module", "instruction":"run_grc", "attributes": { "value": {
"param_type": "LIN_RANGE", "value": list(self.range_spec), "value_type": "float64" }, "length": { "param_type": "LIST", "value": [10,20,30], "value_type": "int64" }, }, "sinks": [ "blocks_vector_sink_x_0"
] } self.xml_file = open(os.path.join(os.path.dirname(__file__), "extraction_test_topblock.grc"), "r") self.ref_task_grc["grcxml"] = self.xml_file.read() self.xml_file.close() self.jsonfile = tempfile.NamedTemporaryFile(suffix=".json", delete=False) self.jsonfilename = self.jsonfile.name json.dump(self.ref_task_grc, self.jsonfile) self.jsonfile.close() self.qapp = MyApplicationClass([]) def tearDown(self): os.unlink(self.jsonfilename) def test_001_load_json_file(self): self.my_ui = task_frontend.TaskFrontend() self.my_ui._load_json_file_direct(self.jsonfilename) if __name__ == '__main__': gr_unittest.run(qa_task_frontend)#, "qa_task_frontend.xml")
# ITERATING DICTIONARY d = {'x
':1, 'y':2, 'z':3} for key in d: print
key, 'corresponds to', d[key]
#!/usr/bin/python # # \file 0_setup.py # \brief Setup rbank # \date 2009-03-10-22-43-GMT # \author Jan Boon (Kaetemi) # Python port of game data build pipeline. # Setup rbank # # NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/> # Copyright (C) 2010 Winch Gate Property Limited # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed
in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import time, sys, os, shutil,
subprocess, distutils.dir_util sys.path.append("../../configuration") if os.path.isfile("log.log"): os.remove("log.log") log = open("log.log", "w") from scripts import * from buildsite import * from process import * from tools import * from directories import * printLog(log, "") printLog(log, "-------") printLog(log, "--- Setup rbank") printLog(log, "-------") printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time()))) printLog(log, "") # Setup source directories printLog(log, ">>> Setup source directories <<<") for dir in RBankCmbSourceDirectories: mkPath(log, DatabaseDirectory + "/" + dir) mkPath(log, LeveldesignWorldDirectory) # Setup export directories printLog(log, ">>> Setup export directories <<<") mkPath(log, ExportBuildDirectory + "/" + RBankCmbExportDirectory) mkPath(log, ExportBuildDirectory + "/" + RBankCmbTagExportDirectory) mkPath(log, ExportBuildDirectory + "/" + SmallbankExportDirectory) # Setup build directories printLog(log, ">>> Setup build directories <<<") mkPath(log, ExportBuildDirectory + "/" + ZoneWeldBuildDirectory) for dir in IgLookupDirectories: mkPath(log, ExportBuildDirectory + "/" + dir) for dir in ShapeLookupDirectories: mkPath(log, ExportBuildDirectory + "/" + dir) mkPath(log, ExportBuildDirectory + "/" + RbankBboxBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + IgLandBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + IgOtherBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + RbankTessellationBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + RbankSmoothBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + RbankRawBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + RbankPreprocBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + RbankRetrieversBuildDirectory) mkPath(log, ExportBuildDirectory + "/" + RbankOutputBuildDirectory) # Setup client directories printLog(log, ">>> Setup client directories <<<") mkPath(log, InstallDirectory + "/" + PacsInstallDirectory) log.close() # end of file
from jupyter_workf
low.data import get_fremont_data import pandas as pd def test_fremont_data(): data = get_fremont_data() assert all(data.columns == ['West','East','Total']) assert isinstance(data.index,pd.
DatetimeIndex)
# -*- coding: utf-8 -*- from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps from django.db import migrations def clear_message_sent_by_message_type_values(a
pps, schema_editor): # type: (StateApps, DatabaseSchemaEditor) -> None UserCount = apps.get_model('analytics', 'UserCount') StreamCount = apps.get_model('analytics', 'StreamCount') RealmCount = apps.get_model('analytics', 'RealmCount') InstallationCount = apps.get_model('analytics', 'InstallationCount') FillState = apps.get_model('analytics', 'FillState') pr
operty = 'messages_sent:message_type:day' UserCount.objects.filter(property=property).delete() StreamCount.objects.filter(property=property).delete() RealmCount.objects.filter(property=property).delete() InstallationCount.objects.filter(property=property).delete() FillState.objects.filter(property=property).delete() class Migration(migrations.Migration): dependencies = [('analytics', '0009_remove_messages_to_stream_stat')] operations = [ migrations.RunPython(clear_message_sent_by_message_type_values), ]
class Config: STREAMING_JOB_NAME = "streaming.job.name" STREAMING_OP_NAME = "streaming.op_name" TASK_JOB_ID = "streaming.task_job_id" STREAMING_WORKER_NAME = "streaming.worker_name" # channel CHANNEL_TYPE = "channel_type" MEMORY_CHANNEL = "memory_channel" NATIVE_CHANNEL = "native_channel" CHANNEL_SIZE = "channel_size" CHANNEL_SIZE_DEFAULT = 10**8 IS_RECREATE = "streaming.is_recreate" # return from StreamingReader.getBundle if only empty message read in this # interval. TIMER_INTERVAL_MS = "timer_interval_ms" STREAMING_RING_BUFFER_CAPACITY = "streaming.ring_buffer_capacity" # write an empty message if there is no data
to
be written in this # interval. STREAMING_EMPTY_MESSAGE_INTERVAL = "streaming.empty_message_interval" # operator type OPERATOR_TYPE = "operator_type"
filetype]) self['outputtypecombo'].set_active(0) @classmethod def filedialog(cls, filetypes, foroutput=False, folder=False): """Sets up and returns a file chooser dialog for the caller to run.""" if folder: title = 'Choose directory...' action = gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER elif foroutput: title = 'Save as...' action = gtk.FILE_CHOOSER_ACTION_SAVE else: title = 'Open...' action = gtk.FILE_CHOOSER_ACTION_OPEN dialog = gtk.FileChooserDialog(title, None, action, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) for filetype in filetypes: filefilter = gtk.FileFilter() filefilter.set_name(filetype) for mimetype in filetypes[filetype]['mimes']: filefilter.add_mime_type(mimetype) for pattern in filetypes[filetype]['patterns']: filefilter.add_pattern(pattern.upper()) filefilter.add_pattern(pattern.lower()) dialog.add_filter(filefilter) return dialog def tabledialog(self, tablenames): """Give a list of tables within a file to choose which to load.""" dialog = self['tabledialog'] tabletree = self['tabletree'] tabletree.clear() if type(tablenames) == list: for tablename in tablenames: tabletree.append(None, [tablename, None]) # used for gdb only, currently elif type(tablenames) == dict: # add features in datasets if 'datasets' in tablenames: datasets = tablenames['datasets'].keys() datasets.sort() for dataset in datasets: parentiter = tabletree.append(None, [dataset, 'DataSet']) features = tablenames['datasets'][dataset] features.sort() for feature in features: tabletree.append(parentiter, [feature, 'Feature']) # add features in root if 'features' in tablenames: rootfeatures = tablenames['features'] rootfeatures.sort() for feature in rootfeatures: tabletree.append(None, [feature, 'Feature']) # add tables in root if 'tables' in tablenames: roottables = tablenames['tables'] roottables.sort() for table in roottables: tabletree.append(None, [table, 'Table']) return dialog @classmethod def messagedialog(cls, message, style='msg'): """Creates a simple dialog to display the provided message.""" if style == 'yesno': dialog = gtk.MessageDialog(type=gtk.MESSAGE_QUESTION, buttons=gtk.BUTTONS_YES_NO) else: dialog = gtk.MessageDialog(buttons=gtk.BUTTONS_OK) dialog.set_markup(message) dialog.set_default_response(gtk.RESPONSE_OK) response = dialog.run() dialog.destroy() return response # This is used for the output field config and sample views. def replacecolumns(self, storename, viewname, newcolnames): """Replaces the columns in the output list/view with new columns.""" # make a new liststore to use celltypelist = [] for i in range(len(newcolnames)): celltypelist.append(gobject.TYPE_STRING) # __getitem__ checks newobjects so access will shift to the new store self.newobjects[storename] = gtk.ListStore(*celltypelist) # update the listview view = self[viewname] view.set_model(self[storename]) # remove the old columns for col in view.get_columns(): view.remove_column(col) # add the new columns for i in range(len(newcolnames)): # treeviews need double underscores to display single underscores colname = re.sub(r'_', '__', newcolnames[i]) if colname.lower() in ('type', 'affinity'): fieldtypelist = self['fieldtypelist'] newcell = gtk.CellRendererCombo() newcell.set_property('editable', True) newcell.set_property('has-entry', False) newcell.set_property('model', fieldtypelist) newcell.set_property('text-column', 0) newcell.connect('changed', self.handlerfunctions.updatefieldtype, fieldtypelist, self[storename], i) newcolumn = gtk.TreeViewColumn(colname, newcell, text=1) else: newcell = gtk.CellRendererText() newcell.set_property('editable', True) newcell.connect('edited', self.handlerfunctions.updatefieldattribute, self[storename], i) newcolumn = gtk.TreeViewColumn(colname, newcell, text=i) view.append_column(newcolumn) def initconfiginputwindow(self, fieldnames, fieldvalues, fieldtypes): dialog = gtk.Dialog('Define input', self['mainwindow'], gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OK, gtk.RESPONSE_OK)) dialog.set_modal(True) dialog.set_default_size(500, 400) contentarea = dialog.get_content_area() dialogvbox = gtk.VBox() contentarea.pack_start(dialogvbox, True, True, 0) # label at the top dialoglabel = gtk.Label('Define field types') dialogvbox.pack_start(dialoglabel, False, False, 0) # scrolled window for everything else dialogscrolledwindow = gtk.ScrolledWindow() dialogvbox.pack_start(dialogsc
rolledwindow, True, True) # viewport to hold everything in the scrolled window dialogviewport = g
tk.Viewport() dialogscrolledwindow.add(dialogviewport) # hbox to hold the stuff for each field scrollhbox = gtk.HBox() dialogviewport.add(scrollhbox) inputtypelist = gtk.ListStore(gobject.TYPE_STRING) for fieldtype in fieldtypes: inputtypelist.append([fieldtype]) # store references to the comboboxes in the GUI object so their # values can be retrieved from gui_files self.typecomboboxes = [] # for each field for i in range(len(fieldnames)): # create a box to hold everything inputfieldvbox = gtk.VBox() # store the sample values in a list inputfieldvaluelist = gtk.ListStore(gobject.TYPE_STRING) for record in fieldvalues: inputfieldvaluelist.append([record[i]]) # create a view to hold the field name and a sample of values inputfieldview = gtk.TreeView(inputfieldvaluelist) # add a cell in a column to the listview to display the values inputfieldcell = gtk.CellRendererText() inputfieldcolumn = gtk.TreeViewColumn(fieldnames[i], inputfieldcell, text=0) inputfieldview.append_column(inputfieldcolumn) # add a combobox for selecting field type inputfieldtypecombo = gtk.ComboBox(inputtypelist) inputtypecell = gtk.CellRendererText() inputfieldtypecombo.pack_start(inputtypecell, expand=True) inputfieldtypecombo.add_attribute(cell=inputtypecell, attribute='text', column=0) self.typecomboboxes.append(inputfieldtypecombo) # pack the two main objects inputfieldvbox.pack_start(inputfieldview, expand=True) inputfiel
#!/usr/bin/env python import logging # http://docs.python.org/2/howto/logging.html#logging-basic-tutorial import pysftp # https://code.google.com/p/pysftp/ from configobj import ConfigObj # http://www.voidspace.org.uk/python/configobj.html import os import sys import time import curses import Image import ImageTk import Tkinter from gallery import Gallery logging.basicConfig(level=logging.WARNING) class PiMotionGallery(): def __init__(self): print "\n--- PiMotionGallery v0.1 ---\n" self.config = None self.current = 0 self.image_list = ['1.jpg', '2.jpg', '5.jpg'] self.text_list = ['apple', 'bird', 'cat'] self.root = Tkinter.Tk() self.label = Tkinter.Label(self.root, compound=Tkinter.TOP) self.tmp_host = '' self.tmp_port = '' self.tmp_user = '' self.tmp_pass = '' self.tmp_base = '' self.tmp_loca = '' if(self.loadExistingConfig()): print "Existing configuration successfully loaded." #fetchImages() else: self.askParameters() self.loadFromRemote() self.reviewConfig() self.saveConfig() def gallery2(self): gallery = Gallery(self) gallery.master.title('PiMotionGallery v0.1') #gallery.master.maxsize(1024, 750) #gallery.master.minsize(1024, 750) gallery.mainloop() # Try to load existing configuration file. def loadExistingConfig(self): logging.info("loadExistingConfig()") self.config = ConfigObj('pmg.conf') return self.config != {} def fetchImages(self): print "Connecting to remote server to fetch new images ..." srv = pysftp.Connection(host=self.config['host'], username=self.config['username'], password=self.config['password'], port=int(self.config['port'])) #base_list = srv.execute('ls -al ' + config['motion_base']) for item in srv.listdir(path=self.config['motion_base']): # lstatout=str(srv.lstat(i)).split()[0] # if 'd' in lstatout: # print i, 'is a directory' # Verify it is a directory if len(item) == 8: self.fetchImagesFromDir(srv, item) srv.close() def fetchImagesFromDir(self, srv, directory): remote = self.config['motion_base'] + directory local = self.config['motion_local'] + directory print "\nChecking " + directory + " directory ..." if not os.path.exists(local): os.makedirs(local) dir_list = srv.listdir(path=remote) i = 0 total = len(dir_list) # All files are copied to the local directory for item in dir_list: if (not 'm' in item) and (not os.path.exists(local + '/' + item)): srv.get(remotepath=remote+'/'+item, localpath=local+'/'+item) i += 1 current = int(i * 100 / total) if(current % 5 == 0): sys.stdout.write("\r[%-20s] %d%% - this can take a while, grab a coffee!" % ('=' * (current / 5), current)) sys.stdout.flush() # Remote directory is deleted sys.stdout.write("\n\nDeleting remote directory " + directory + " ...") srv.execute('rm -rf ' + remote) sys.stdout.write(" [OK]\n") sys.stdout.flush() def askParameters(self): logging.info("askParameters()") global tmp_host global tmp_port global tmp_user global tmp_pass global tmp_base global tmp_loca tmp_host = raw_input('host [] : ') or '' tmp_port = raw_input('port [22] : ') or 22 tmp_user = raw_input('username [pi] : ') or 'pi' tmp_pass = raw_input('password [raspberry] : ') or 'raspberry' tmp_base = raw_input('motion base [/home/pi/motion/] : ') or '/home/pi/motion/' tmp_loca = raw_input('local directory [] : ') print "\n\nconfig parameters set to:\n\thost: " + tmp_host + "\n\tport: " + str(tmp_port) + "\n\tusername: " + tmp_user + "\n\tpassword: " + tmp_pass + "\n\n" if(self.representsInt(tmp_port)): tmp_port = int(tmp_port) def loadFromRemote(self): logging.info("loadFromRemote()") print tmp_host print tmp_user print tmp_base print tmp_port if(self.checkConnection()): print "Successfully connected to the remote host." else: keep = '' while (keep != 'y') and (keep != 'n'): keep = raw_input("\nDo you want to keep your current connection parameters? [y/N] : ").lower() or 'n' if(keep == 'no'): keep = 'n' elif(keep == 'yes'): keep = 'y' if(keep == 'n'): self.askParameters() self.loadFromRemote() #srv.get('thefile.txt') #srv.put('anotherfile.txt') def checkConnection(self): success = True try: srv = pysftp.Connection(host=tmp_host, username=tmp_user, password=tmp_pass, port=tmp_port) ## test = srv.execute('ls -al ' + tmp_base) ## print test srv.close() except: logging.warning("Could not connect to remote host.") print "[WARNING] Could not connect to remote host, please check your connection parameters." success = False if(os.path.isdir(tmp_loca)): return success else: logging.warning("Local directory does not exist.") print "[WARNING] Local directory does not exist." return False def reviewConfig(self): logging.info("reviewConfig()") pass def saveConfig(self): logging.info("saveConfig()") config = ConfigObj('pmg.conf') config['host']
= tmp_host config['port'] = tmp_port config['username'] = tmp_user config['password'] = tmp_pass config['motion_base'] = tmp_base config['motion_local'] = tmp_loca config.write()
self.loadExistingConfig() print "Configuration parameters successfully saved." def playVideo(filename): os.system("open " + filename) def gallery(): label.pack() frame = Tkinter.Frame(root) frame.pack() Tkinter.Button(frame, text='Previous picture', command=lambda: move(-1)).pack(side=Tkinter.LEFT) Tkinter.Button(frame, text='Next picture', command=lambda: move(+1)).pack(side=Tkinter.LEFT) Tkinter.Button(frame, text='Quit', command=root.quit).pack(side=Tkinter.LEFT) move(0) root.mainloop() def move(delta): global current, image_list if not (0 <= current + delta < len(image_list)): tkMessageBox.showinfo('End', 'No more image.') return current += delta image = Image.open(image_list[current]) photo = ImageTk.PhotoImage(image) label['text'] = text_list[current] label['image'] = photo label.photo = photo def representsInt(self, s): try: int(s) return True except ValueError: return False def main(): pmg = PiMotionGallery() pmg.gallery2() if __name__ == "__main__": main()
import praw import urllib.request import json import requests import requests.auth
import os.path import re from imgurpython import ImgurCli
ent from bs4 import BeautifulSoup imgur_gif_regex = re.compile("https?:\/\/i\.imgur\.com\/[a-z0-9]+.gif") def gyazo_link_parser(link): """ Parses Gyazo links into their raw (.png or .gif) form (i.gyazo) """ # opens the gyazo link response = urllib.request.urlopen(link) # reads the reponse html = response.read() # parses the html using beautifulsoup, and gives me the image link parsed = BeautifulSoup(html) return parsed.img['src'] # old method of handling gyazo links #title = parsed.title.string #print(str(title)) #return "http://i.gyazo.com/" + title.replace("Gyazo - ", "") def imgur_uploader(link, imgur_client): """ Uploads passed image to imgur, and then outputs the link from the JSON/dict provided. I"m calling it JSON. """ # tries to upload the image to imgur try: uploaded_image = imgur_client.upload_from_url(url=link, config=None, anon=True) except: # if it crashes, it'll just return False print("Error when uploading the image to imgur.") return False else: # otherwise, yay, we return a link print("Successful convert of", link, "to an imgur link", uploaded_image["link"]) if len(imgur_gif_regex.findall(uploaded_image["link"])) != 0: return uploaded_image["link"] + "v" return uploaded_image["link"] def comment_prep(content): """ Prepares the comment so we can have sme basic context. """ # same comment structure, so we'll just do it in a function text = "Imgur link: " + content text += "\n\n\n------\n" text += "This action was performed by a bot. Message +/u/arrivance for further details." return text def comment_poster(comment, content): try: comment.reply(content) except praw.errors.RateLimitExceeded as e: print("Rate limit exceeded:", e) except praw.errors.APIException as e: print("API Exception:", e) except: print("Other unknown fault.") else: print("Successfully commented on comment ID", comment.id) def file_checker(filename): if os.path.isfile(filename) == True: return True else: return False def file_maker(filename, structure): with open(filename, "w") as data_file: json.dump(structure, filename) return True def reddit_oauth_token(login_details, user_agent): client_auth = requests.auth.HTTPBasicAuth(login_details["reddit_client_id"], login_details["reddit_client_secret"]) post_data = {"grant_type": "password", "username": login_details["reddit_user"], "password": login_details["reddit_pass"]} headers = {"User-Agent": user_agent} print("Attempting to get the access_token from reddit...") response = requests.post("https://www.reddit.com/api/v1/access_token", auth=client_auth, data=post_data, headers=headers) access_token = response.json()["access_token"] print("access_token succesfully gotten:", access_token) return access_token
from L500analysis.derived_fields.derived_fields import * from L500analysis.derived_fields.derived_field_functions import * from L500analysis.derived_fields.collections.peak_height.derived_field_functions \ import * from L500analysis.derived_fields.derived_field_tools.non_thermal_temperature \ import calculate_Ttot from L500analysis.derived_fields.derived_field_tools.self_similar_normalizations \ import calculate_T_normalization from L500analysis.plotting.profiles.tools.radial_normalizations import * from L500analysis.plotting.profiles.tools.make_profile import make_profile from L500analysis.utils.constants import K2keV def _normalized_temperature_profile(data, *args, **kwargs) : T_mw = data.profiles['T_mw'] Mvir = data.halo_properties[kwargs['M_delta_key']] Rmid = data.profiles['r_mid'] Rvir = data.halo_properties[kwargs['R_delta_key']] Rscaled = {hid: Rmid[hid]/Rvir[hid] for hid in data.halo_ids} return dict({ 'aexp':data.aexp, 'Mvir':Mvir, 'T_mw':T_mw, 'halo_ids':data.halo_ids, 'Rscaled':Rscaled }, **kwargs) def _normalized_total_temperature_profile(data, *args, **kwargs) : T_mw = data.profiles['T_mw'] Mvir = data.halo_properties[kwargs['M_delta_key']] sigr=data.profiles['vel_gas_rad_std'] sigt=data.profiles['vel_gas_tan_std'] vr = data.profiles['vel_gas_rad_avg'] vt = data.profiles['vel_gas_tan_avg'] Rmid = data.profiles['r_mid'] Rvir = data.halo_properties[kwargs['R_delta_key']] Rscaled = {hid: Rmid[hid]/Rvir[hid] for hid in data.halo_ids} return dict({ 'sigr':sigr, 'sigt':sigt, 'vr':vr, 'vt':vt, 'aexp':data.aexp, 'Mvir':Mvir, 'T_mw':T_mw, 'Rscaled':Rscaled, 'halo_ids':data.halo_ids }, **kwargs) def calculate_normalized_temperature_profile(input_data) : d = input_data normalized_T = {} for hid in d['halo_ids'] : Tdelta = calculate_T_normalization(Mvir=d['Mvir'][hid], delta=d['delta'], aexp=d['aexp']) normalized_T[hid] = d['T_mw'][hid]*d['units']/Tdelta normalized_T[hid] = make_profile(x=d['Rscaled'][hid],y=normalized_T[hid]) return normalized_T def calculate_normalized_total_temperature_profile(input_data) : d = input_data T_tot_normalized = {} for hid in d['halo_ids'] : Tdelta = calculate_T_normalization(Mvir=d['Mvir'][hid], delta=d['delta'], aexp=d['aexp']) Ttot = calculate_Ttot(sigr=d['sigr'][hid],vr=d['vr'][hid], sigt=d['sigt'][hid],vt=d['vt'][hid], Tmw=d['T_mw'][hid]) T_tot_normalized[hid] = Ttot/Tdelta T_tot_normalized[hid] = make_profile(x=d['Rscaled'][hid], y=T_tot_normalized[hid]) return T_tot_normalized def calculate_total_temperature_profile(input_data) : d = input_data T_tot = {} for hid in d['halo_ids'] : Ttot = calculate_Ttot(sigr=d['sigr'][hid],vr=d['vr'][hid], sigt=d['sigt'][hid],vt=d['vt'][hid], Tmw=d['T_mw'][hid]) T_tot_normalized[hid] = Ttot T_tot_normalized[hid] = make_profile(x=d['Rscaled'][hid], y=T_tot_normalized[hid]) return T_tot add_derived_field('T_mw/T500c',function=_normalized_temperature_profile, combine_function=calculate_normalized_temperature_profile, M_delta_key='M_total_500c', R_delta_key='r500c', delta='500c',units=K2keV) add_derived_field('T_mw/T200m',function=_normalized_temperature_profile, combine_function=calculate_normalized_temperature_profile, M_delta_key='M_total_200m', R_delta_key='r200m', delta='200m',units=K2keV) add_derived_field('Ttot/T500c',function=_normalized_total_temperature_profile, combine_function=calculate_normalized_total_temperature_profile , M_delta_key='M_total_500c', R_delta_key='r500c', delta='500c') add_derived_field('Ttot/T200m',function=_normalized_total_temperature_profile,
combine_function=calculate_normalized_total_temperature_profile, M_delta_key='M_total_200m', R_delta_key='r200m', delta='200m') add_derived_field('Ttot_500c',function=_normalized_total_temperature_profile, combine_function=calculate_total_temperature_profile
, M_delta_key='M_total_500c', R_delta_key='r500c', delta='500c') add_derived_field('Ttot_200m',function=_normalized_total_temperature_profile, combine_function=calculate_total_temperature_profile , M_delta_key='M_total_200m', R_delta_key='r200m', delta='200m')
import os.path import gwt from ...weights import W from warnings import warn __author__ = "Myunghwa Hwang <mhwang4@gmail.com>" __all__ = ["DatIO"] class DatIO(gwt.GwtIO): """ Opens, reads, and writes file objects in DAT format. Spatial weights objects in DAT format are used in Dr. LeSage's MatLab Econ library. This DAT format is a simple text file with DAT or dat extension. Without header line, it includes three data columns for origin id, destination id, and weight values as follows: [Line 1] 2 1 0.25 [Line 2] 5 1 0.50 ... Origin/destination IDs in this file format are simply record numbers starting with 1. IDs are not necessarily integers. Data values for all columns should be numeric. """ FORMATS = ['dat'] MODES = ['r', 'w'] def _read(self): """Reads
.dat file Returns a pysal.weights.weights.W object Examples -------- Type 'dir(w)' at the interpreter to see what methods are supported. Open .dat file and read it into a pysal weights object >>> w = pysal.open(pysal.examples.get_path('wmat.dat'),'r').read()
Get the number of observations from the header >>> w.n 49 Get the mean number of neighbors >>> w.mean_neighbors 4.7346938775510203 Get neighbor distances for a single observation >>> w[1] {2.0: 0.3333, 5.0: 0.3333, 6.0: 0.3333} """ if self.pos > 0: raise StopIteration id_type = float weights, neighbors = self._readlines(id_type) self.pos += 1 return W(neighbors, weights) def write(self, obj): """ Parameters ---------- .write(weightsObject) accepts a weights object Returns ------ a DAT file write a weights object to the opened DAT file. Examples -------- >>> import tempfile, pysal, os >>> testfile = pysal.open(pysal.examples.get_path('wmat.dat'),'r') >>> w = testfile.read() Create a temporary file for this example >>> f = tempfile.NamedTemporaryFile(suffix='.dat') Reassign to new var >>> fname = f.name Close the temporary named file >>> f.close() Open the new file in write mode >>> o = pysal.open(fname,'w') Write the Weights object into the open file >>> o.write(w) >>> o.close() Read in the newly created dat file >>> wnew = pysal.open(fname,'r').read() Compare values from old to new >>> wnew.pct_nonzero == w.pct_nonzero True Clean up temporary file created for this example >>> os.remove(fname) """ self._complain_ifclosed(self.closed) if issubclass(type(obj), W): self._writelines(obj) else: raise TypeError("Expected a pysal weights object, got: %s" % ( type(obj)))
#encoding:utf-8 __authors__ = ['"Wei Keke" <keke.wei@cs2c.com.cn>'] __version__ = "V0.1" ''' # ChangeLog: #--------------------------------------------------------------------------------- # Version Date Desc Author #----------------------------------------------------------------
----------------- # V0.1 2014/10/09 初始版本 Wei Keke #--------------------------------------------------------------
------------------- ''' from TestData.Template import ITC07_SetUp as ModuleData from TestAPIs.VirtualMachineAPIs import VirtualMachineAPIs '''--------------------------------------------------------------------------------------------------- @note: PreData ---------------------------------------------------------------------------------------------------''' vm_id = VirtualMachineAPIs().getVmIdByName(ModuleData.vm_name) vm_name = ModuleData.vm_name disk_name = ModuleData.disk_name disk_info = ModuleData.disk_info temp_name = 'Template-ITC070202' temp_info=''' <template> <name>%s</name> <vm id="%s"/> </template> ''' % (temp_name, vm_id) '''--------------------------------------------------------------------------------------------------- @note: ExpectedData ---------------------------------------------------------------------------------------------------''' expected_status_code = 200
# coding: utf-8 """ Onshape REST API The Onshape REST API consumed by all clients. # noqa: E501 The version of the OpenAPI document: 1.113 Contact: api-support@onshape.zendesk.com Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import re # noqa: F401 import sys # noqa: F401 import six # noqa: F401 from onshape_client.oas.model_utils import ( # noqa: F401 ModelComposed, ModelNormal, ModelSimple, date, datetime, file_type, int, none_type, str, validate_get_composed_info, ) try: from onshape_client.oas.models import path_item except ImportError: path_item = sys.modules["onshape_client.oas.models.path_item"] class Callback(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = {} validations = {} additional_properties_type = None @staticmethod def openapi_types(): """ This must be a class method so a model may have properties that are of type self, this ensures that we don't create a cyclic import Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ return { "empty": (bool,), # noqa: E501 "exten
sions": ( {str: (bool, date, datetime, dict, float, int, list, str,)}, ), # noqa: E501 "getref": (str,), # noqa: E501 } @staticmethod def discriminator(): return None attribute_map = { "empty": "empty", # noqa: E501 "extensions": "extensions", # noqa: E501 "getref": "get$ref", # noqa: E501 } @staticmethod def _composed_schemas(): return None required_properties = set( [
"_data_store", "_check_type", "_from_server", "_path_to_item", "_configuration", ] ) def __init__( self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs ): # noqa: E501 """callback.Callback - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _from_server (bool): True if the data is from the server False if the data is from the client (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. empty (bool): [optional] # noqa: E501 extensions ({str: (bool, date, datetime, dict, float, int, list, str,)}): [optional] # noqa: E501 getref (str): [optional] # noqa: E501 """ self._data_store = {} self._check_type = _check_type self._from_server = _from_server self._path_to_item = _path_to_item self._configuration = _configuration for var_name, var_value in six.iteritems(kwargs): setattr(self, var_name, var_value)
# # This is minimal MicroPython variant of run-tests script, which uses # .exp files as generated by run-tests --write-exp. It is useful to run # testsuite on systems which have neither CPython3 nor unix shell. # This script is intended to be run by the same interpreter executable # which is to be tested, so should use minimal language functionality. # import sys import uos as os tests = [ "basics", "micropython", "float", "import", "io", " misc", "unicode", "extmod", "unix" ] if sys.platform == 'win32': MICROPYTHON = "micropython.exe" else: MICROPYTHON = "micropython" def should_skip(test): if test.startswith("native"): return True if test.startswith("viper"): return True test_count = 0 passed_count = 0 skip_count = 0 for suite in tests: #print("Running in: %s" % suite) if sys.platform == 'win32': # dir /b prints only contained filenames, one on a line # http://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/dir.mspx r = os.system("dir /b %s/*.py >tests.lst" % suite) else: r = os.system("ls %s/*.py | xargs -n1 basename >tests.lst" % suite) assert r == 0 with open("tests.lst") as f
: testcases = f.readlines() testcases = [l[:-1] for l in
testcases] assert testcases, "No tests found in dir '%s', which is implausible" % suite #print(testcases) for t in testcases: if t == "native_check.py": continue qtest = "%s/%s" % (suite, t) if should_skip(t): print("skip " + qtest) skip_count += 1 continue exp = None try: f = open(qtest + ".exp") exp = f.read() f.close() except OSError: pass if exp is not None: #print("run " + qtest) r = os.system(MICROPYTHON + " %s >.tst.out" % qtest) if r == 0: f = open(".tst.out") out = f.read() f.close() else: out = "CRASH" if out == "SKIP\n": print("skip " + qtest) skip_count += 1 else: if out == exp: print("pass " + qtest) passed_count += 1 else: print("FAIL " + qtest) test_count += 1 else: skip_count += 1 print("%s tests performed" % test_count) print("%s tests passed" % passed_count) if test_count != passed_count: print("%s tests failed" % (test_count - passed_count)) if skip_count: print("%s tests skipped" % skip_count)
from django.contrib import admin from Weather.models import * from Weather.util im
port updateForecast def update_forecast(modeladmin, request, queryset): for forecast in queryset: updateForecast(forecast) update_forecast.short_description = "Force forecast update from NWS" class forecastAdmin(admin.ModelAdmin): actions = [update_forecast] class WMSR
adarOverlayAdmin(admin.ModelAdmin): pass admin.site.register(Forecast, forecastAdmin) admin.site.register(WMSRadarOverlay, WMSRadarOverlayAdmin)
""" Settings for reputation changes that apply to user in response to various actions by the same users or others """ from askbot.conf.settings_wrapper import settings from askbot.conf.super_groups import REP_AND_BADGES from askbot.deps.livesettings import ConfigurationGroup, IntegerValue from django.utils.translation import ugettext_lazy as _ BADGES = ConfigurationGroup( 'BADGES', _('Badge settings'), ordering=2, super_group = REP_AND_BADGES ) settings.register( IntegerValue( BADGES, 'DISCIPLINED_BADGE_MIN_UPVOTES', default=3, description=_('Disciplined: minimum upvotes for deleted post') ) ) settings.register( IntegerValue( BADGES, 'PEER_PRESSURE_BADGE_MIN_DOWNVOTES', default=3, description=_('Peer Pressure: minimum downvotes for deleted post') ) ) settings.register( IntegerValue( BADGES, 'TEACHER_BADGE_MIN_UPVOTES', default=20, description=_('Teacher: minimum upvotes for the answer') ) ) settings.register( IntegerValue( BADGES,
'NICE_ANSWER_BADGE_MIN_UPVOTES', default=5, description=_('Nice Answer: minimum up
votes for the answer') ) ) settings.register( IntegerValue( BADGES, 'GOOD_ANSWER_BADGE_MIN_UPVOTES', default=10, description=_('Good Answer: minimum upvotes for the answer') ) ) settings.register( IntegerValue( BADGES, 'GREAT_ANSWER_BADGE_MIN_UPVOTES', default=15, description=_('Great Answer: minimum upvotes for the answer') ) ) settings.register( IntegerValue( BADGES, 'NICE_QUESTION_BADGE_MIN_UPVOTES', default=5, description=_('Nice Question: minimum upvotes for the question') ) ) settings.register( IntegerValue( BADGES, 'GOOD_QUESTION_BADGE_MIN_UPVOTES', default=10, description=_('Good Question: minimum upvotes for the question') ) ) settings.register( IntegerValue( BADGES, 'GREAT_QUESTION_BADGE_MIN_UPVOTES', default=15, description=_('Great Question: minimum upvotes for the question') ) ) settings.register( IntegerValue( BADGES, 'POPULAR_QUESTION_BADGE_MIN_VIEWS', default=500, description=_('Popular Question: minimum views') ) ) settings.register( IntegerValue( BADGES, 'NOTABLE_QUESTION_BADGE_MIN_VIEWS', default=150, description=_('Notable Question: minimum views') ) ) settings.register( IntegerValue( BADGES, 'FAMOUS_QUESTION_BADGE_MIN_VIEWS', default=1000, description=_('Famous Question: minimum views') ) ) settings.register( IntegerValue( BADGES, 'SELF_LEARNER_BADGE_MIN_UPVOTES', default=10, description=_('Self-Learner: minimum answer upvotes') ) ) settings.register( IntegerValue( BADGES, 'CIVIC_DUTY_BADGE_MIN_VOTES', default=100, description=_('Civic Duty: minimum votes') ) ) settings.register( IntegerValue( BADGES, 'ENLIGHTENED_BADGE_MIN_UPVOTES', default=15, description=_('Enlightened Duty: minimum upvotes') ) ) settings.register( IntegerValue( BADGES, 'GURU_BADGE_MIN_UPVOTES', default=30, description=_('Guru: minimum upvotes') ) ) settings.register( IntegerValue( BADGES, 'NECROMANCER_BADGE_MIN_UPVOTES', default=3, description=_('Necromancer: minimum upvotes') ) ) settings.register( IntegerValue( BADGES, 'NECROMANCER_BADGE_MIN_DELAY', default=30, description=_('Necromancer: minimum delay in days') ) ) settings.register( IntegerValue( BADGES, 'ASSOCIATE_EDITOR_BADGE_MIN_EDITS', default=20, description=_('Associate Editor: minimum number of edits') ) ) settings.register( IntegerValue( BADGES, 'FAVORITE_QUESTION_BADGE_MIN_STARS', default=5, description=_('Favorite Question: minimum stars') ) ) settings.register( IntegerValue( BADGES, 'STELLAR_QUESTION_BADGE_MIN_STARS', default=10, description=_('Stellar Question: minimum stars') ) ) settings.register( IntegerValue( BADGES, 'COMMENTATOR_BADGE_MIN_COMMENTS', default=10, description=_('Commentator: minimum comments') ) ) settings.register( IntegerValue( BADGES, 'TAXONOMIST_BADGE_MIN_USE_COUNT', default = 5, description = _('Taxonomist: minimum tag use count') ) ) settings.register( IntegerValue( BADGES, 'ENTHUSIAST_BADGE_MIN_DAYS', default = 5, description = _('Enthusiast: minimum days') ) )
from python_kemptech_api import * # Specify the LoadMaster connection credentials here: loadmaster_ip = "" username = "" password = "" vs_ip_1 = "" vs_ip_2 = "" rs_ip_1 = "" rs_ip_2 = "" vs_port = "" rs_port = "" class RealServerPool(object): healthcheck_parameters = [ "checktype", "checkport", "checkurl", "checkheaders", "checkuse1_1", "checkuseget", "checkpostdata", "checkpattern", "checkcodes", "matchlen", "enhancedhealthchecks", "rsminimum" ] rs_parameters = [ "enable", "forward", "weight", "limit", "critical", "follow" ] def __init__(self, rs_list=None, vs=None): if rs_list is not None: self.rs = [] for rs in rs_list: if isinstance(rs, RealServer): self.rs.append(rs) else: ip, port = rs.split(":") mock_lm = {"endpoint": "", "ip_address": "", "vs": ""} self.rs.append(RealServer(mock_lm, ip, port)) self.checktype = None self.checkport = None self.checkurl = None self.checkheaders = None self.checkuse1_1 = None self.checkuseget = None self.checkpostdata = None self.checkpattern = None self.checkcodes = None self.matchlen = None self.enhancedhealthchecks = None self.rsminimum = None elif vs is not None: self.rs = vs.servers.values() self.checktype = vs.checktype self.checkport = vs.checkport self.checkurl = vs.checkurl self.checkheaders = vs.checkheaders self.checkuse1_1 = vs.checkuse1_1 self.checkuseget = vs.checkuseget self.checkpostdata = vs.checkpostdata self.checkpattern = vs.checkpattern self.checkcodes = vs.checkcodes self.matchlen = vs.matchlen self.enhancedhealthchecks = vs.enhance
dhealthchecks self.rsminimum = vs.rsminimum def apply(self, vs): [rs.delete() for rs in vs.servers.values(
)] for rs in self.rs: new_rs = vs.create_real_server(rs.rs, rs.rsport) # Apply other settings new_rs.save() for attr in self.rs_parameters: print("attr: {}".format(attr)) if hasattr(rs, attr) and rs.__getattribute__(attr) is not None: print("set attr: {}={}".format(attr, rs.__getattribute__(attr))) new_rs.__setattr__(attr, rs.__getattribute__(attr)) new_rs.update() for attr in self.healthcheck_parameters: print("attr: {}".format(attr)) if hasattr(self, attr) and self.__getattribute__(attr) is not None: print("set attr: {}={}".format(attr, self.__getattribute__(attr))) vs.__setattr__(attr, self.__getattribute__(attr)) vs.update() # Create the LoadMaster object lm = LoadMaster(loadmaster_ip, username, password) # Delete all the existing VSs [vs.delete() for vs in lm.vs.values()] # Create a new VS vs = lm.create_virtual_service(vs_ip_1, vs_port, "tcp") vs.save() # Configure some healthcheck options vs.checktype = 'HTTPS' vs.checkport = "8443" vs.update() # Add and save the first real server rs1 = vs.create_real_server(rs_ip_1, rs_port) rs1.save() # Configure the weighting rs1.weight = 200 rs1.update() # Add and save the second real server rs2 = vs.create_real_server(rs_ip_2, rs_port) rs2.save() # Disable the server rs2.enable = 'N' rs2.update() # This will create a pool based on the VS and healthcheck settings of the VS pool1 = RealServerPool(vs=vs) # Create the second VS vs2 = lm.create_virtual_service(vs_ip_2, vs_port, "tcp") vs2.save() # Apply the pool to the new VS. The RS and healthcheck settings will be applied pool1.apply(vs2) # Alternately, you can use a list of IP and ports to create a pool rs_list = ["172.22.100.6:88", "172.22.100.7:88", "172.22.100.8:88", "172.22.100.9:88"] pool2 = RealServerPool(rs_list) # You can also apply healthcheck settings directly to a pool pool2.checktype = "ICMP" # Apply the pool to both VSs pool2.apply(vs) pool2.apply(vs2)
# # Copyright (C) 2019 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # # Red Hat Author(s): Vendula Poncova <vponcova@redhat.com> # import unittest from blivet.devices import DiskDevice from blivet.formats import get_format from blivet.size import Size from pyanaconda.modules.common.constants.objects import DISK_SELECTION from pyanaconda.modules.common.errors.storage import UnavailableStorageError from pyanaconda.modules.common.structures.validation import ValidationReport from pyanaconda.modules.storage.disk_selection import DiskSelectionModule from pyanaconda.modules.storage.disk_selection.selection_interface import DiskSelectionInterface from pyanaconda.storage.initialization import create_storage from tests.nosetests.pyanaconda_tests import check_dbus_property class DiskSelectionInterfaceTestCase(unittest.TestCase): """Test DBus interface of the disk selection module.""" def setUp(self): """Set up the module.""" self.disk_selection_module = DiskSelectionModule() self.disk_selection_interface = DiskSelectionInterface(self.disk_selection_module) def _test_dbus_property(self, *args, **kwargs): check_dbus_property( self, DISK_SELECTION, self.disk_selection_interface, *args, **kwargs ) def selected_disks_property_test(self): """Test the selected disks property.""" self._test_dbus_property( "SelectedDisks", ["sda", "sdb"] ) def validate_selected_disks_test(self): """Test ValidateSelectedDisks.""" storage = create_storage() self.disk_selection_module.on_storage_changed(storage) dev1 = DiskDevice( "dev1", exists=False, size=Size("15 GiB"), fmt=get_format("disklabel") ) dev2 = DiskDevice( "dev2", exists=False, parents=[dev1], size=Size("6 GiB"), fmt=get_format("disklabel") ) dev3 = DiskDevice( "dev3", exists=False, parents=[dev2], size=Size("6 GiB"), fmt=get_format("disklabel") ) storage.devicetree._add_device(dev1) storage.devicetree._add_device(dev2) storage.devicetree._add_device(
dev3) report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks([]) ) self.assertEqual(report.is_valid(), True) report = ValidationReport.from_structure( self.disk_selection_interface.ValidateSelectedDisks(["dev1"]) ) self.assertEqual(report.is_valid(), False) self.assertEqual(report.error_messages, [ "You selected disk dev1, which contains devices that also use " "unselected disks dev2, dev3. You must select or de-select " "these disks as a set." ]) self.assertEqual(report.warning_messages, []) report = ValidationReport.from_structure( self.disk_selection_interface.ValidateSelectedDisks(["dev1", "dev2"]) ) self.assertEqual(report.is_valid(), False) self.assertEqual(report.error_messages, [ "You selected disk dev1, which contains devices that also " "use unselected disk dev3. You must select or de-select " "these disks as a set.", "You selected disk dev2, which contains devices that also " "use unselected disk dev3. You must select or de-select " "these disks as a set." ]) self.assertEqual(report.warning_messages, []) report = ValidationReport.from_structure( self.disk_selection_interface.ValidateSelectedDisks(["dev1", "dev2", "dev3"]) ) self.assertEqual(report.is_valid(), True) def exclusive_disks_property_test(self): """Test the exclusive disks property.""" self._test_dbus_property( "ExclusiveDisks", ["sda", "sdb"] ) def ignored_disks_property_test(self): """Test the ignored disks property.""" self._test_dbus_property( "IgnoredDisks", ["sda", "sdb"] ) def protected_disks_property_test(self): """Test the protected disks property.""" self._test_dbus_property( "ProtectedDevices", ["sda", "sdb"] ) def disk_images_property_test(self): """Test the protected disks property.""" self._test_dbus_property( "DiskImages", { "image_1": "/path/1", "image_2": "/path/2" } ) def get_usable_disks_test(self): """Test the GetUsableDisks method.""" with self.assertRaises(UnavailableStorageError): self.disk_selection_interface.GetUsableDisks() self.disk_selection_module.on_storage_changed(create_storage()) self.assertEqual(self.disk_selection_interface.GetUsableDisks(), [])
import json import os #This helper class allows to access members of the configuration #as c.attr1.attr2.attr3 instead of c[attr1][attr2][attr3] class DictToAttr( object ): def __init__( self, dictionary ): self.dict = dict( dictionary ) def
__getattr__(self, attr): if attr in self.dict.keys(): token = self.dict[ attr ] if isinstance( token, dict ): return DictToAttr( token ) else: return token else: raise ValueError("
'%s' does not exists in configuration" %(attr)) class Configuration( object ): def __init__(self): #Read configuration file path = os.path.dirname ( os.path.abspath( os.path.expanduser( __file__ ) ) ) configFile = os.path.join( path, "configuration.json" ) if os.path.exists( configFile ): try: with open( configFile ) as f: #Strip all comments to make the output #a json-compliant string allLines = f.readlines() clean = filter( lambda line: line.find("#") < 0, allLines ) JSON = " ".join( clean ) JSON = JSON.replace("\n"," ") self.config = json.loads( JSON ) except IOError: print("Configuration file %s exists " % configFile + "but cannot be read. Traceback follows") raise else: raise IOError("Configuration file %s does not exist!" %(configFile)) def __getattr__(self, attr): if attr in self.config.keys(): token = self.config[ attr ] if isinstance( token, dict ): return DictToAttr( token ) else: return token
# LICENSE: GPLv2, see attached License # Author: Joerg Jungermann def get_mac(): from network import WLAN from ubinascii import hexlify return hexlify(W
LAN().config('mac'),'-').decode() def sleep(sec, deepsleep = False): import time if deepsleep: import machine rtc = machine.RTC() rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP) rtc.alarm(rtc.ALA
RM0, sec * 1000) print('iotesp.sleep():', sec, 's / DEEPSLEEP') machine.deepsleep() time.sleep_us(100) else: # print('sleep:', sec, 's') time.sleep(sec) # vim: sw=4 ts=4 ft=python et foldmethod=indent
############################################################################### # Name: nsistags.py # # Purpose: Generate Tags for Nullsoft Installer Scripts # # Author: Cody Precord <cprecord@editra.org> # # Copyright: (c) 2008 Cody Precord <staff@editra.org> # # License: wxWindows License # ############################################################################### """ FILE: nsistags.py AUTHOR: Cody Precord LANGUAGE: Python SUMMARY: Generate a DocStruct object that captures the structure of a NSIS Script. It currently supports generating tags for Sections, Functions, and Macro defs. """ __author__ = "Cody Precord <cprecord@editra.org>" __svnid__ = "$Id: nsistags.py 52675 2008-03-22 03:34:38Z CJP $" __revision__ = "$Revision: 52675 $" #--------------------------------------------------------------------------# # Dependancies import taglib import parselib #--------------------------------------------------------------------------# def GenerateTags(buff): """Create a DocStruct object that represents a NSIS Script @param buff: a file like buffer object (StringIO) @todo: generate tags for lua tables? """ rtags = taglib.DocStruct() # Set Descriptions of Document Element Types rtags.SetElementDescription('variable', "Defines") rtags.SetElementDescription('section', "Section Definitions") rtags.SetElementDescription('macro', "Macro Definitions") rtags.SetElementDescription('function', "Function Definitions") rtags.SetElementPriority('variable', 4) rtags.SetElementPriority('section', 3) rtags.SetElementPriority('function', 2) rtags.SetElementPriority('macro', 1) # Parse the lines for code objects for lnum, line in enumerate(buff): line = line.strip() llen = len(line) # Skip comment and empty lines if line.startswith(u"#") or line.startswith(u";") or not line: continue # Look for functions and sections if parselib.IsToken(line, 0, u'Function'): parts = line.split() if len(parts) > 1: rtags.AddFunction(taglib.Function(parts[1], lnum)) elif parselib.IsToken(line, 0, u'Section'): parts = line.split() if len(parts) > 1 and parts[1][0] not in ['"', "'", "`"]: rtags.AddElement('section', taglib.Section(parts[1], lnum)) else: for idx, part in enumerate(parts[1:]): if parts[idx][-1] in ['"', "'", "`"]:
rtags.AddElement('section', taglib.Section(part, lnum)) break elif parselib.IsToken(line, 0, u'!macro'): parts = line.split() if len(parts) > 1: rtags.AddElement('macro', taglib.Ma
cro(parts[1], lnum)) elif parselib.IsToken(line, 0, u'!define'): parts = line.split() if len(parts) > 1 and parts[1][0].isalpha(): rtags.AddVariable(taglib.Variable(parts[1], lnum)) else: continue return rtags #-----------------------------------------------------------------------------# # Test if __name__ == '__main__': import sys import StringIO fhandle = open(sys.argv[1]) txt = fhandle.read() fhandle.close() tags = GenerateTags(StringIO.StringIO(txt)) print "\n\nElements:" for element in tags.GetElements(): print "\n%s:" % element.keys()[0] for val in element.values()[0]: print "%s [%d]" % (val.GetName(), val.GetLine()) print "END"
ses.append(self.pkg_a.arch.name) elif self.pkg_b: css_classes.append(self.pkg_b.arch.name) return ' '.join(css_classes) def __key(self): return (self.pkgname, hash(self.repo), hash(self.pkg_a), hash(self.pkg_b)) def __eq__(self, other): return self.__key() == other.__key() def __hash__(self): return hash(self.__key()) def multilib_differences(): # Query for checking multilib out of date-ness if database_vendor(Package) == 'sqlite': pkgname_sql = """ CASE WHEN ml.pkgname LIKE %s THEN SUBSTR(ml.pkgname, 7) WHEN ml.pkgname LIKE %s THEN SUBSTR(ml.pkgname, 1, LENGTH(ml.pkgname) - 9) ELSE ml.pkgname END """ else: pkgname_sql = """ CASE WHEN ml.pkgname LIKE %s THEN SUBSTRING(ml.pkgname, 7) WHEN ml.pkgname LIKE %s THEN SUBSTRING(ml.pkgname FROM 1 FOR CHAR_LENGTH(ml.pkgname) - 9) ELSE ml.pkgname END """ sql = """ SELECT ml.id, reg.id FROM packages ml JOIN packages reg ON ( reg.pkgname = (""" + pkgname_sql + """) AND reg.pkgver != ml.pkgver ) JOIN repos r ON reg.repo_id = r.id WHERE ml.repo_id = %s AND r.testing = %s AND r.staging = %s AND reg.arch_id = %s ORDER BY ml.last_update """ multilib = Repo.objects.get(name__iexact='multilib') x86_64 = Arch.objects.get(name='x86_64') params = ['lib32-%', '%-multilib', multilib.id, False, False, x86_64.id] cursor = connection.cursor() cursor.execute(sql, params) results = cursor.fetchall() # fetch all of the necessary packages to_fetch = set(chain.from_iterable(results)) pkgs = Package.objects.normal().in_bulk(to_fetch) return [(pkgs[ml], pkgs[reg]) for ml, reg in results] def get_wrong_permissions(): sql = """ SELECT DISTINCT id FROM ( SELECT pr.id, p.repo_id, pr.user_id FROM packages p JOIN packages_packagerelation pr ON p.pkgbase = pr.pkgbase WHERE pr.type = %s ) mp LEFT JOIN ( SELECT user_id, repo_id FROM user_profiles_allowed_repos ar INNER JOIN user_profiles up ON ar.userprofile_id = up.id ) ur ON mp.user_id = ur.user_id AND mp.repo_id = ur.repo_id WHERE ur.user_id IS NULL; """ cursor = connection.cursor() cursor.execute(sql, [PackageRelation.MAINTAINER]) to_fetch = [row[0] for row in cursor.fetchall()] relations = PackageRelation.objects.select_related( 'user', 'user__userprofile').filter( id__in=to_fetch) return relations def attach_maintainers(packages): '''Given a queryset or something resembling it of package objects, find all the maintainers and attach them to the packages to prevent N+1 query cascading.''' if isinstance(packages, QuerySet): pkgbases = packages.values('pkgbase') else: packages = list(packages) pkgbases = {p.pkgbase for p in packages if p is not None} rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, pkgbase__in=pkgbases).values_list( 'pkgbase', 'user_id').order_by().distinct() # get all the
user objects we will need user_ids = {rel[1] for rel in rels} users = User.objects.in_bulk(user_ids) # now build a pkgbase -> [maintainers...] map maintainers = defaultdict(list) for rel in rels: maintainers[rel[0]].append(users[rel[1]]) annotated = [] # and finally, attach the maintainer lists on the original packages for packa
ge in packages: if package is None: continue package.maintainers = maintainers[package.pkgbase] annotated.append(package) return annotated def approved_by_signoffs(signoffs, spec): if signoffs: good_signoffs = sum(1 for s in signoffs if not s.revoked) return good_signoffs >= spec.required return False class PackageSignoffGroup(object): '''Encompasses all packages in testing with the same pkgbase.''' def __init__(self, packages): if len(packages) == 0: raise Exception self.packages = packages self.user = None self.target_repo = None self.signoffs = set() self.default_spec = True first = packages[0] self.pkgbase = first.pkgbase self.arch = first.arch self.repo = first.repo self.version = '' self.last_update = first.last_update self.packager = first.packager self.maintainers = first.maintainers self.specification = fake_signoff_spec(first.arch) version = first.full_version if all(version == pkg.full_version for pkg in packages): self.version = version @property def package(self): '''Try and return a relevant single package object representing this group. Start by seeing if there is only one package, then look for the matching package by name, finally falling back to a standin package object.''' if len(self.packages) == 1: return self.packages[0] same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase] if same_pkgs: return same_pkgs[0] return PackageStandin(self.packages[0]) def find_signoffs(self, all_signoffs): '''Look through a list of Signoff objects for ones matching this particular group and store them on the object.''' for s in all_signoffs: if s.pkgbase != self.pkgbase: continue if self.version and not s.full_version == self.version: continue if s.arch_id == self.arch.id and s.repo_id == self.repo.id: self.signoffs.add(s) def find_specification(self, specifications): for spec in specifications: if spec.pkgbase != self.pkgbase: continue if self.version and not spec.full_version == self.version: continue if spec.arch_id == self.arch.id and spec.repo_id == self.repo.id: self.specification = spec self.default_spec = False return def approved(self): return approved_by_signoffs(self.signoffs, self.specification) @property def completed(self): return sum(1 for s in self.signoffs if not s.revoked) @property def required(self): return self.specification.required def user_signed_off(self, user=None): '''Did a given user signoff on this package? user can be passed as an argument, or attached to the group object itself so this can be called from a template.''' if user is None: user = self.user return user in (s.user for s in self.signoffs if not s.revoked) def __unicode__(self): return f'{self.pkgbase}-{self.version} (self.arch): {len(self.signoffs)}' def signoffs_id_query(model, repos): sql = """ SELECT DISTINCT s.id FROM %s s JOIN packages p ON ( s.pkgbase = p.pkgbase AND s.pkgver = p.pkgver AND s.pkgrel = p.pkgrel AND s.epoch = p.epoch AND s.arch_id = p.arch_id AND s.repo_id = p.repo_id ) WHERE p.repo_id IN (%s) AND s.repo_id IN (%s) """ cursor = connection.cursor() # query pre-process- fill in table name and placeholders for IN repo_sql = ','.join(['%s' for _ in repos]) sql = sql % (model._meta.db_table, repo_sql, repo_sql) repo_ids = [r.pk for r in repos] # repo_ids are needed twice, so double the array cursor.execute(sql, repo_ids * 2) results = cursor.fetchall() return [row[0] for row in results] def get_current_signoffs(repos): '''Returns a list of signoff objects for the given repos.''' to_fetch = signoffs_id_query(Signoff, repos) return Signoff.objects.select_related('user').in_bulk(to_fetch).value
from sys import version_info from functools import reduce from operator import mul from flask_babel import gettext if version_info[0] == 3: unicode = str keywords = ('min', 'max', 'avg', 'sum', 'prod') # required answerer function # can return a list of results (any result type) for a given query def answer(query): parts = query.query.split()
if len(parts) < 2: return [] try: args = list(map(float, parts[1:])) except: return [] func = parts[0] answer = None if func == b'min': answer = min(args) elif func == b'max': answer = max(args) elif func == b'avg':
answer = sum(args) / len(args) elif func == b'sum': answer = sum(args) elif func == b'prod': answer = reduce(mul, args, 1) if answer is None: return [] return [{'answer': unicode(answer)}] # required answerer function # returns information about the answerer def self_info(): return {'name': gettext('Statistics functions'), 'description': gettext('Compute {functions} of the arguments').format(functions='/'.join(keywords)), 'examples': ['avg 123 548 2.04 24.2']}
# Rekall Memory Forensics # Copyright (C) 2007,2008 Volatile Systems # Copyright (C) 2010,2011,2012 Michael Hale Ligh <michael.ligh@mnin.org> # Copyright 2013 Google Inc. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or (at # your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # """The following is a description of windows stations from MSDN: http://msdn.microsoft.com/en-us/library/windows/desktop/ms687096(v=vs.85).aspx A window station contains a clipboard, an atom table, and one or more desktop objects. Each window station object is a securable object. When a window station is created, it is associated with the calling process and assigned to the current session. The interactive window station is the only window station that can display a user interface or receive user input. It is assigned to the logon session of the interactive user, and contains the keyboard, mouse, and display device. It is always named "WinSta0". All other window stations are noninteractive, which means they cannot display a user interface or receive user input. Ref: http://volatility-labs.blogspot.de/2012/09/movp-13-desktops-heaps-and-ransomware.html NOTE: Windows 8 does not have a global atom table any more. http://mista.nu/research/smashing_the_atom.pdf """ from rekall import plugin from rekall.plugins.windows import common from rekall.plugins.windows.gui import win32k_core class WindowsStations(win32k_core.Win32kPluginMixin, common.WindowsCommandPlugin): """Displays all the windows stations by following lists.""" __name = "windows_stations" table_header = [ dict(name="WindowStation", style="address"), dict(name="Name", width=20), dict(name="SesId", width=5), dict(name="AtomTable", style="address"), dict(name="Interactive", width=11), dict(name="Desktops") ] def stations_in_session(self, session): # Get the start of the Window station list from # win32k.sys. These are all the Windows stations that exist in # this Windows session. station_list = self.win32k_profile.get_constant_object( "grpWinStaList", target="Pointer", target_args=dict( target="tagWINDOWSTATION" ), vm=session.obj_vm, ) for station in station_list.walk_list("rpwinstaNext"): yield station def stations(self): """A generator of tagWINDOWSTATION objects.""" # Each windows session
has a unique set of windows stations.
for session in self.session.plugins.sessions().session_spaces(): for station in self.stations_in_session(session): yield station def collect(self): for window_station in self.stations(): desktops = [desk.Name for desk in window_station.desktops()] yield (window_station, window_station.Name, window_station.dwSessionId, window_station.pGlobalAtomTable, window_station.Interactive, desktops) class WinDesktops(plugin.VerbosityMixIn, WindowsStations): """Print information on each desktop.""" __name = "desktops" table_header = [ dict(name="tagDESKTOP", style="address"), dict(name="Name", width=20), dict(name="Sid", width=3), dict(name="Hooks", width=5), dict(name="tagWND", style="address"), dict(name="Winds", width=5), dict(name="Thrd", width=5), dict(name="_EPROCESS"), ] def collect(self): for window_station in self.stations(): for desktop in window_station.desktops(): divider = ("Desktop: {0:addr}, Name: {1}\\{2}\n", desktop, window_station.Name, desktop.Name) divider += ("Heap: {0:addr}, Size: {1:addr}, Base: {2:addr}, " "Limit: {3:addr}\n", desktop.pheapDesktop.v(), (desktop.DeskInfo.pvDesktopLimit.v() - desktop.DeskInfo.pvDesktopBase.v()), desktop.DeskInfo.pvDesktopBase, desktop.DeskInfo.pvDesktopLimit, ) yield dict(divider=divider) window_count = len(list(desktop.windows( desktop.DeskInfo.spwnd))) for thrd in desktop.threads(): yield dict( tagDESKTOP=desktop, Name=desktop.Name, Sid=desktop.dwSessionId, Hooks=desktop.DeskInfo.fsHooks, tagWND=desktop.DeskInfo.spwnd.deref(), Winds=window_count, Thrd=thrd.pEThread.Cid.UniqueThread, _EPROCESS=thrd.ppi.Process.deref())
# Authors: Eric Larson <larson.eric.d@gmail.com> # Sheraz Khan <sheraz@khansheraz.com> # Denis Engemann <denis.engemann@gmail.com> # # License: BSD (3-clause) import numpy as np from ..filter import next_fast_len from ..source_estimate import _BaseSourceEstimate from ..utils import verbose, _check_combine, _check_option @verbose def envelope_correlation(data, combine='mean', orthogonalize="pairwise", log=False, absolute=True, verbose=None): """Compute the envelope correlation. Parameters ---------- data : array-like, shape=(n_epochs, n_signals, n_times) | generator The data from which to compute connectivity. The array-like object can also be a list/generator of array, each with shape (n_signals, n_times), or a :class:`~mne.SourceEstimate` object (and ``stc.data`` will be used). If it's float data, the Hilbert transform will be applied; if it's complex data, it's assumed the Hilbert has already been applied. combine : 'mean' | callable | None How to combine correlation estimates across epochs. Defau
lt is 'mean'. Can be None to return without combining. If callable, it must accept one positional input. For example:: combine = lambda data: np.median(data, axis=0) orthogonalize : 'pairwise' | False Whether to orthogonalize with the pairwise method or not. Defaults to 'pairwise'. Note that when False, the correlation matrix will not be returned with absolute values. .. versionadded:: 0.19
log : bool If True (default False), square and take the log before orthonalizing envelopes or computing correlations. .. versionadded:: 0.22 absolute : bool If True (default), then take the absolute value of correlation coefficients before making each epoch's correlation matrix symmetric (and thus before combining matrices across epochs). Only used when ``orthogonalize=True``. .. versionadded:: 0.22 %(verbose)s Returns ------- corr : ndarray, shape ([n_epochs, ]n_nodes, n_nodes) The pairwise orthogonal envelope correlations. This matrix is symmetric. If combine is None, the array with have three dimensions, the first of which is ``n_epochs``. Notes ----- This function computes the power envelope correlation between orthogonalized signals [1]_ [2]_. .. versionchanged:: 0.22 Computations fixed for ``orthogonalize=True`` and diagonal entries are set explicitly to zero. References ---------- .. [1] Hipp JF, Hawellek DJ, Corbetta M, Siegel M, Engel AK (2012) Large-scale cortical correlation structure of spontaneous oscillatory activity. Nature Neuroscience 15:884–890 .. [2] Khan S et al. (2018). Maturation trajectories of cortical resting-state networks depend on the mediating frequency band. Neuroimage 174:57–68 """ _check_option('orthogonalize', orthogonalize, (False, 'pairwise')) from scipy.signal import hilbert n_nodes = None if combine is not None: fun = _check_combine(combine, valid=('mean',)) else: # None fun = np.array corrs = list() # Note: This is embarassingly parallel, but the overhead of sending # the data to different workers is roughly the same as the gain of # using multiple CPUs. And we require too much GIL for prefer='threading' # to help. for ei, epoch_data in enumerate(data): if isinstance(epoch_data, _BaseSourceEstimate): epoch_data = epoch_data.data if epoch_data.ndim != 2: raise ValueError('Each entry in data must be 2D, got shape %s' % (epoch_data.shape,)) n_nodes, n_times = epoch_data.shape if ei > 0 and n_nodes != corrs[0].shape[0]: raise ValueError('n_nodes mismatch between data[0] and data[%d], ' 'got %s and %s' % (ei, n_nodes, corrs[0].shape[0])) # Get the complex envelope (allowing complex inputs allows people # to do raw.apply_hilbert if they want) if epoch_data.dtype in (np.float32, np.float64): n_fft = next_fast_len(n_times) epoch_data = hilbert(epoch_data, N=n_fft, axis=-1)[..., :n_times] if epoch_data.dtype not in (np.complex64, np.complex128): raise ValueError('data.dtype must be float or complex, got %s' % (epoch_data.dtype,)) data_mag = np.abs(epoch_data) data_conj_scaled = epoch_data.conj() data_conj_scaled /= data_mag if log: data_mag *= data_mag np.log(data_mag, out=data_mag) # subtract means data_mag_nomean = data_mag - np.mean(data_mag, axis=-1, keepdims=True) # compute variances using linalg.norm (square, sum, sqrt) since mean=0 data_mag_std = np.linalg.norm(data_mag_nomean, axis=-1) data_mag_std[data_mag_std == 0] = 1 corr = np.empty((n_nodes, n_nodes)) for li, label_data in enumerate(epoch_data): if orthogonalize is False: # the new code label_data_orth = data_mag[li] label_data_orth_std = data_mag_std[li] else: label_data_orth = (label_data * data_conj_scaled).imag np.abs(label_data_orth, out=label_data_orth) # protect against invalid value -- this will be zero # after (log and) mean subtraction label_data_orth[li] = 1. if log: label_data_orth *= label_data_orth np.log(label_data_orth, out=label_data_orth) label_data_orth -= np.mean(label_data_orth, axis=-1, keepdims=True) label_data_orth_std = np.linalg.norm(label_data_orth, axis=-1) label_data_orth_std[label_data_orth_std == 0] = 1 # correlation is dot product divided by variances corr[li] = np.sum(label_data_orth * data_mag_nomean, axis=1) corr[li] /= data_mag_std corr[li] /= label_data_orth_std if orthogonalize is not False: # Make it symmetric (it isn't at this point) if absolute: corr = np.abs(corr) corr = (corr.T + corr) / 2. corrs.append(corr) del corr corr = fun(corrs) return corr
from pattern import Pattern import itertools import random import colorsys import time class EqPattern(Pattern): def __init__(self, meter_color=(255,100,50), background_color=(0,50,255)): self.meter_r = meter_color[0] self.meter_g = meter_color[1] self.meter_b = meter_color[2] self.bg_r = background_color[0] self.bg_g = background_color[1] self.bg_b = background_color[2] # TODO: delete? # self.register_param("meter_r", 0, 255, meter_color[0]) # self.register_param("meter_g", 0, 255, meter_color[1]) # self.register_param("meter_b", 0, 255, meter_color[2]) # self.register_param("bg_r", 0, 255, background_color[0]) # self.register_param("bg_g", 0, 255, background_color[1]) # self.register_param("bg_b", 0, 255, background_color[2]) self.register_param("max_hue_shift", 0, 0.5, 0.2) self.register_param("beat_channel", 0, 6, 2) self.register_param("max_bpm", 0, 200, 100) self.register_param("prob_shift", 0, 1, 100) self.next_shift = time.time() def meter_color(self): return (self.meter_r, self.meter_g, self.meter_b) def background_color(self): return (self.bg_r, self.bg_g, self.bg_b) # TODO: put this into utils or something def hue_shift(self, color, hu
e_shift): color_scaled = [x/255.0 for x in color] hsv = list(colorsys.rgb_to_hsv(color_scaled[0], color_scaled[1], color_scaled[2])) hsv[0] += hue_shift % 1 return tuple([int(x*255) for x in colorsys.hsv_to_rgb(hsv[0], hsv[1], hsv[2])]) def next_frame(self, octopus, data): beat_channel = int(round(self.beat_channel)) t = time.
time() if data.beats[beat_channel] and t > self.next_shift: self.next_shift = t + 60.0/self.max_bpm shift = self.max_hue_shift*(2*random.random() - 1) if int(round(random.random())): self.meter_r, self.meter_g, self.meter_b = self.hue_shift(self.meter_color(), shift) else: self.bg_r, self.bg_g, self.bg_b = self.hue_shift(self.background_color(), shift) meter_color = self.meter_color() background_color = self.background_color() eq = itertools.cycle(data.eq) for tentacle in octopus.tentacles: level = next(eq) for led_strip in tentacle.led_strips: pixel_colors = [] n_meter_pixels = int(len(led_strip.pixels)*float(level)) pixel_colors.extend([meter_color for x in range(n_meter_pixels)]) n_background_pixels = len(led_strip.pixels) - n_meter_pixels pixel_colors.extend([background_color for x in range(n_background_pixels)]) led_strip.put_pixels(pixel_colors)
def get_avatar(backend, strategy, details, response, user=None, *args, **kwargs): url = Non
e if backend.name == 'facebook': url = "http://graph.facebook.
com/%s/picture?type=small"%response['id'] if url: user.avatar = url user.save()
from couchdbkit import ResourceNotFound from toggle.models import Toggle from toggle.shortcuts import update_toggle_cache, parse_toggle def move_toggles(from_toggle_id, to_toggle_
id): """ Moves all enabled items from one toggle to another. """ try: from_toggle = Toggle.get(from_toggle_id) except ResourceNotFound: # if no source found this is a
noop return try: to_toggle = Toggle.get(to_toggle_id) except ResourceNotFound: to_toggle = Toggle(slug=to_toggle_id, enabled_users=[]) for item in from_toggle.enabled_users: if item not in to_toggle.enabled_users: to_toggle.enabled_users.append(item) namespace, item = parse_toggle(item) update_toggle_cache(to_toggle_id, item, True, namespace=namespace) to_toggle.save() from_toggle.delete()
y not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import mock import os import shutil import tempfile import unittest import zipfile from coffea.java.java_scanner import JavaScanner class Archive(object): def __init__(self, name='archive'): self.name = name self.files = [] def __enter__(self): self._tmpdir = tempfile.mkdtemp() self.root_path = os.path.join(self._tmpdir, self.name) os.makedirs(self.root_path) return self def __exit__(self, type, value, traceback): shutil.rmtree(self._tmpdir) def __repr__(self): return 'Sample Archive:\n%s' % '\n'.join(self.files) def mkdir(self, root, name): path = os.path.join(root, name) os.makedirs(path) return path def mkfile(self, root, name): path = os.path.join(root, name) open(path, 'a').close() self.files.append(path) def mkzip(self, root, name, entries): path = os.path.join(root, name) zf = zipfile.ZipFile(path, "w") with tempfile.NamedTemporaryFile() as f: for e in entries: zf.write(f.name, e) zf.close() self.files.append(path) return path def compress(self): path = os.path.join(self._tmpdir, 'compressed-'+self.name) zf = zipfile.ZipFile(path, 'w', zipfile.ZIP_DEFLATED) for f in self.files: zf.write(f, f.replace(self.root_path, '')) zf.close() return path class SampleJar(Archive): def __init__(self): Archive.__init__(self, 'sample-lib.jar') def __enter__(self): Archive.__enter__(self) self.com_path = self.mkdir(self.root_path, 'com') self.com_example_path = self.mkdir(self.com_path, 'example') self.mkfile(self.com_example_path, 'Component.class') self.mkfile(self.com_example_path, 'ComponentImpl.class') return self def __repr__(self): return 'Sample Jar:\n%s\n%s\n%s' % ('*'*20, '\n'.join(self.files), '*'*20) class SampleWar(Archive): def __init__(self): Archive.__init__(self, 'sample-webapp.war') def __enter__(self): Archive.__enter__(self) self.webinf_path = self.mkdir(self.root_path, 'WEB-INF') self.classes_path = self.mkdir(self.webinf_path, 'classes') self.lib_path = self.mkdir(self.webinf_path, 'lib') self.css_path = self.mkdir(self.root_path, 'css') self.img_path = self.mkdir(self.root_path, 'img') self.mkfile(self.webinf_path, 'web.xml') self.mkfile(self.webinf_path, 'applicationContext.xml') self.mkfile(self.classes_path, 'Controller.class') self.mkfile(self.classes_path, 'Model.class') self.mkfile(self.classes_path, 'View.class') self.mkfile(self.root_path, 'index.jsp') self.mkfile(self.css_path, 'main.css') self.mkfile(self.img_path, 'logo.png') self.mkzip(self.lib_path, 'service.jar', ['com/example/ServiceImpl.class', 'com/example/ServiceImplHelper.class']) self.mkzip(self.lib_path, 'service-api.jar', ['com/example/Service.class']) return self def __repr__(self): return 'Sample War:\n%s\n%s\n%s' % ('*'*20, '\n'.join(self.files), '*'*20) class SampleEar(Archive): def __init__(self): Archive.__init__(self, 'sample-app.ear') def __enter__(self): Archive.__enter__(self) self.metainf_path = self.mkdir(self.root_path, 'META-INF') self.lib_path = self.mkdir(self.root_path, 'lib') self.mkfile(self.metainf_path, 'application.xml') self.mkfile(self.metainf_path, 'MANIFEST.MF') self.mkzip(self.lib_path, 'commons.jar', ['com/example/CommonClass.class', 'com/example/CommonClassFactory.class',
'com/example/CommonClassHelper.cl
ass']) self.mkzip(self.root_path, 'business-component.jar', ['com/example/Service.class', 'com/example/ServiceBean.class']) #TODO: Create sample JARs in /WEB-INF/lib self.mkzip(self.root_path, 'sample-webapp.war', ['/WEB-INF/web.xml', 'com/example/CommonClass.class', 'com/example/CommonClassHelper.class']) return self def __repr__(self): return 'Sample Ear:\n%s\n%s\n%s' % ('*'*20, '\n'.join(self.files), '*'*20) class TestJavaScanner(unittest.TestCase): def setUp(self): self.scanner = JavaScanner(callback=mock.MagicMock()) self.assertTrue(os.path.isdir(self.scanner._work_dir)) def tearDown(self): self.scanner.dispose() self.assertFalse(os.path.isdir(self.scanner._work_dir)) def test_supported_files(self): scanner = self.scanner def supported(name): with tempfile.NamedTemporaryFile(suffix=name) as f: return scanner.supported_file(f.name) self.assertTrue(supported('Test.class')) self.assertTrue(supported('test-1.0.jar')) self.assertTrue(supported('test-1.0.war')) self.assertTrue(supported('test-1.0.ear')) self.assertFalse(supported('build.properties')) self.assertFalse(supported('pom.xml')) self.assertFalse(supported('build.gradle')) self.assertFalse(supported('README')) self.assertFalse(supported('Test.java')) def test_scan_file(self): scanner = self.scanner with tempfile.NamedTemporaryFile(suffix = '.xml') as not_supported_file: scanner.callback.reset_mock() self.assertEquals(scanner.scan(not_supported_file.name), 0) self.assertEquals(scanner.callback.call_count, 0) with tempfile.NamedTemporaryFile(suffix = '.class') as class_file: self.assertEquals(scanner.scan(class_file.name), 1) self.assertEquals(scanner.callback.call_count, 1) scanner.callback.assert_any_call(class_file.name) with SampleJar() as exploded_jar: jar = exploded_jar.compress() scanner.callback.reset_mock() self.assertEquals(scanner.scan(jar), 2) self.assertEquals(scanner.callback.call_count, 2) with SampleWar() as exploded_war: war = exploded_war.compress() scanner.callback.reset_mock() self.assertEquals(scanner.scan(war), 6) self.assertEquals(scanner.callback.call_count, 6) with SampleEar() as exploded_ear: ear = exploded_ear.compress() scanner.callback.reset_mock() self.assertEquals(scanner.scan(ear), 7) self.assertEquals(scanner.callback.call_count, 7) def test_scan_directory(self): scanner = self.scanner with SampleJar() as exploded_jar: scanner.callback.reset_mock() self.assertEquals(scanner.scan(exploded_jar.root_path), 2) self.assertEquals(scanner.callback.call_count, 2) with SampleWar() as exploded_war: scanner.callback.reset_mock() self.assertEquals(scanner.scan(exploded_war.root_path), 6) self.assertEquals(scanner.cal
dtype) y_np = np.random.uniform(-1, 1, [2, 3]).astype(self.dtype) z_np = np.random.uniform(-1, 1, [6, 9]).astype(self.dtype) x = paddle.to_tensor(x_np) y = paddle.to_tensor(y_np) z = paddle.to_tensor(z_np) a = paddle.to_tensor([[1, 1], [2, 2], [3, 3]]) b = paddle.to_tensor([[1, 1], [2, 2], [3, 3]]) # 1. Unary operation for Tensor self.assertEqual(x.dim(), 2) self.assertEqual(x.ndimension(), 2) self.assertEqual(x.ndim, 2) self.assertEqual(x.size, 6) self.assertEqual(x.numel(), 6) self.assertTrue(np.array_equal(x.exp().numpy(), paddle.exp(x).numpy())) self.assertTrue( np.array_equal(x.tanh().numpy(), paddle.tanh(x).numpy())) self.assertTrue( np.array_equal(x.atan().numpy(), paddle.atan(x).numpy())) self.assertTrue(np.array_equal(x.abs().numpy(), paddle.abs(x).numpy())) m = x.abs() self.assertTrue( np.array_equal(m.sqrt().numpy(), paddle.sqrt(m).numpy())) self.assertTrue( np.array_equal(m.rsqrt().numpy(), paddle.rsqrt(m).numpy())) self.assertTrue( np.array_equal(x.ceil().numpy(), paddle.ceil(x).numpy())) self.assertTrue( np.array_equal(x.floor().numpy(), paddle.floor(x).numpy())) self.assertTrue(np.array_equal(x.cos().numpy(), paddle.cos(x).numpy())) self.assertTrue( np.array_equal(x.acos().numpy(), paddle.acos(x).numpy())) self.assertTrue( np.array_equal(x.asin().numpy(), paddle.asin(x).numpy())) self.assertTrue(np.array_equal(x.sin().numpy(), paddle.sin(x).numpy())) self.assertTrue( np.array_equal(x.sinh().numpy(), paddle.sinh(x).numpy())) self.assertTrue( np.array_equal(x.cosh().numpy(), paddle.cosh(x).numpy())) self.assertTrue( np.array_equal(x.round().numpy(), paddle.round(x).numpy())) self.assert
True( np.array_equal(x.reciprocal().numpy(), paddle.reciprocal(x).numpy( ))) self.assertTrue( np.array_equal(x.square().numpy(), paddle.square(x).numpy())) self.assertTrue( np.array_equal(x.rank().numpy(), paddle.rank(x).numpy())) self.assertTrue( np.array_equal(x[0].t().numpy(), paddle.t(x[0]).numpy())) self.assertTrue( np.array_equal(x.asinh().numpy(), paddle.asinh(x).numpy()))
### acosh(x) = nan, need to change input t_np = np.random.uniform(1, 2, [2, 3]).astype(self.dtype) t = paddle.to_tensor(t_np) self.assertTrue( np.array_equal(t.acosh().numpy(), paddle.acosh(t).numpy())) self.assertTrue( np.array_equal(x.atanh().numpy(), paddle.atanh(x).numpy())) d = paddle.to_tensor([[1.2285208, 1.3491015, 1.4899898], [1.30058, 1.0688717, 1.4928783], [1.0958099, 1.3724753, 1.8926544]]) d = d.matmul(d.t()) # ROCM not support cholesky if not fluid.core.is_compiled_with_rocm(): self.assertTrue( np.array_equal(d.cholesky().numpy(), paddle.cholesky(d).numpy( ))) self.assertTrue( np.array_equal(x.is_empty().numpy(), paddle.is_empty(x).numpy())) self.assertTrue( np.array_equal(x.isfinite().numpy(), paddle.isfinite(x).numpy())) self.assertTrue( np.array_equal( x.cast('int32').numpy(), paddle.cast(x, 'int32').numpy())) self.assertTrue( np.array_equal( x.expand([3, 2, 3]).numpy(), paddle.expand(x, [3, 2, 3]).numpy())) self.assertTrue( np.array_equal( x.tile([2, 2]).numpy(), paddle.tile(x, [2, 2]).numpy())) self.assertTrue( np.array_equal(x.flatten().numpy(), paddle.flatten(x).numpy())) index = paddle.to_tensor([0, 1]) self.assertTrue( np.array_equal( x.gather(index).numpy(), paddle.gather(x, index).numpy())) index = paddle.to_tensor([[0, 1], [1, 2]]) self.assertTrue( np.array_equal( x.gather_nd(index).numpy(), paddle.gather_nd(x, index).numpy())) self.assertTrue( np.array_equal( x.reverse([0, 1]).numpy(), paddle.reverse(x, [0, 1]).numpy())) self.assertTrue( np.array_equal( a.reshape([3, 2]).numpy(), paddle.reshape(a, [3, 2]).numpy())) self.assertTrue( np.array_equal( x.slice([0, 1], [0, 0], [1, 2]).numpy(), paddle.slice(x, [0, 1], [0, 0], [1, 2]).numpy())) self.assertTrue( np.array_equal( x.split(2)[0].numpy(), paddle.split(x, 2)[0].numpy())) m = paddle.to_tensor( np.random.uniform(-1, 1, [1, 6, 1, 1]).astype(self.dtype)) self.assertTrue( np.array_equal( m.squeeze([]).numpy(), paddle.squeeze(m, []).numpy())) self.assertTrue( np.array_equal( m.squeeze([1, 2]).numpy(), paddle.squeeze(m, [1, 2]).numpy())) m = paddle.to_tensor([2, 3, 3, 1, 5, 3], 'float32') self.assertTrue( np.array_equal(m.unique()[0].numpy(), paddle.unique(m)[0].numpy())) self.assertTrue( np.array_equal( m.unique(return_counts=True)[1], paddle.unique( m, return_counts=True)[1])) self.assertTrue(np.array_equal(x.flip([0]), paddle.flip(x, [0]))) self.assertTrue(np.array_equal(x.unbind(0), paddle.unbind(x, 0))) self.assertTrue(np.array_equal(x.roll(1), paddle.roll(x, 1))) self.assertTrue(np.array_equal(x.cumsum(1), paddle.cumsum(x, 1))) m = paddle.to_tensor(1) self.assertTrue(np.array_equal(m.increment(), paddle.increment(m))) m = x.abs() self.assertTrue(np.array_equal(m.log(), paddle.log(m))) self.assertTrue(np.array_equal(x.pow(2), paddle.pow(x, 2))) self.assertTrue(np.array_equal(x.reciprocal(), paddle.reciprocal(x))) # 2. Binary operation self.assertTrue( np.array_equal(x.divide(y).numpy(), paddle.divide(x, y).numpy())) self.assertTrue( np.array_equal( x.matmul(y, True, False).numpy(), paddle.matmul(x, y, True, False).numpy())) self.assertTrue( np.array_equal( x.norm( p='fro', axis=[0, 1]).numpy(), paddle.norm( x, p='fro', axis=[0, 1]).numpy())) self.assertTrue( np.array_equal(x.dist(y).numpy(), paddle.dist(x, y).numpy())) self.assertTrue( np.array_equal(x.cross(y).numpy(), paddle.cross(x, y).numpy())) m = x.expand([2, 2, 3]) n = y.expand([2, 2, 3]).transpose([0, 2, 1]) self.assertTrue( np.array_equal(m.bmm(n).numpy(), paddle.bmm(m, n).numpy())) self.assertTrue( np.array_equal( x.histogram(5, -1, 1).numpy(), paddle.histogram(x, 5, -1, 1).numpy())) self.assertTrue( np.array_equal(x.equal(y).numpy(), paddle.equal(x, y).numpy())) self.assertTrue( np.array_equal( x.greater_equal(y).numpy(), paddle.greater_equal(x, y).numpy())) self.assertTrue( np.array_equal( x.greater_than(y).numpy(), paddle.greater_than(x, y).numpy())) self.assertTrue( np.array_equal( x.less_equal(y).numpy(), paddle.less_equal(x, y).numpy())) self.assertTrue( np.array_equal( x.less_than(y).numpy(), paddle.less_than(x, y).numpy())) self.assertTrue( np.array_equal( x.not_equal(y).numpy(), paddle.not_equal(x, y).numpy())) self.assertTrue( np.array_equal( x.equal_all(y).numpy(), paddle.equal_all(x, y).numpy())) self.assertTrue(
) resp = self.client.getKeyRange('objects,', 'objects/') keys = resp.wait() self.assertEqual(len(keys), 1) head_key = keys[0] # policy.hash.t.s.ext.nonce-frag nonce = head_key.rsplit('.', 1)[1] nonce_parts = nonce.split('-') # nonce is now a uuid with a frag_index on the end self.assertEqual(6, len(nonce_parts)) # and it has the value of the ec-frag-index self.assertEqual(int(nonce_parts[-1]), 7) def test_put_and_get(self): df = self.mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy) req_timestamp = time.time() with df.create() as writer: writer.write('awesome') writer.put({'X-Timestamp': req_timestamp}) with df.open() as reader: metadata = reader.get_metadata() body = ''.join(reader) self.assertEquals(body, 'awesome') expected = { 'X-Timestamp': req_timestamp, 'X-Kinetic-Chunk-Count': 1, } for k, v in expected.items(): self.assertEqual(metadata[k], v, 'expected %r for metadatakey %r got %r' % ( v, k, metadata[k])) def test_submit_write_all_sync_options(self): for sync_option in ('flush', 'writeback', 'writethrough', 'default'): conf = {'synchronization': sync_option} mgr = server.DiskFileManager(conf, self.logger) df = mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy) options = {} def capture_args(*args, **kwargs): options.update(kwargs) df.conn.put = capture_args with df.create(): key = self.buildKey('submit_%s' % sync_option) df._submit_write(key, 'blob', final=False) # flush option does writeback unless final if sync_option == 'flush': self.assertEqual(options['synchronization'], server.SYNC_OPTION_MAP['writeback']) else: self.assertEqual(options['synchronization'], df.synchronization) # final write always matches sync option key = self.buildKey('submit_final_%s' % sync_option) df._submit_write(key, 'blob', final=True) self.assertEqual(options['synchronization'], df.synchronization) def test_put_all_sync_options(self): expected_body = 'a' * 100 conf = { 'disk_chunk_size': 10, } for sync_option in ('flush', 'writeback', 'writethrough', 'default'): conf['synchronization'] = sync_option mgr = server.DiskFileManager(conf, self.logger) mgr.unlink_wait = True df = mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy) req_timestamp = time.time() with df.create() as writer: writer.write(expected_body) writer.put({'X-Timestamp': req_timestamp}) with df.open() as reader: metadata = reader.get_metadata() body = ''.join(reader) self.assertEquals(body, expected_body) expected = { 'X-Timestamp': req_timestamp, 'X-Kinetic-Chunk-Count': 10, } for k, v in expected.items(): self.assertEqual(metadata[k], v, 'expected %r for metadatakey %r got %r' % ( v, k, metadata[k])) def test_get_not_found(self): df = self.mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy) try: df.open() except server.diskfile.DiskFileNotExist: pass else: self.fail('Did not raise deleted!') finally: df.close() def test_multi_chunk_put_and_get(self): df = self.mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy,
disk_chunk_size=10) req_timestamp = time.time() with df.create() as writer: chunk = '\x00' * 10 for i in range(3): writer.write(chunk) writer.put({'X-Timestamp': req_timestamp}) with df.open() as reader: metadata = reader.get_metadata() body = ''.join(reader) self.assertEquals(body, '\x00' * 30) expected = { 'X-Timestamp': req
_timestamp, 'X-Kinetic-Chunk-Count': 3, } for k, v in expected.items(): self.assertEqual(metadata[k], v) def test_multi_chunk_put_and_get_with_buffer_offset(self): disk_chunk_size = 10 write_chunk_size = 6 write_chunk_count = 7 object_size = write_chunk_size * write_chunk_count # int(math.ceil(1.0 * object_size / disk_chunk_size)) q, r = divmod(object_size, disk_chunk_size) disk_chunk_count = q if not r else q + 1 df = self.mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy, disk_chunk_size=disk_chunk_size) req_timestamp = time.time() with df.create() as writer: chunk = '\x00' * write_chunk_size for i in range(write_chunk_count): writer.write(chunk) writer.put({'X-Timestamp': req_timestamp}) with df.open() as reader: metadata = reader.get_metadata() body = ''.join(reader) self.assertEquals(len(body), object_size) self.assertEquals(body, '\x00' * object_size) expected = { 'X-Timestamp': req_timestamp, 'X-Kinetic-Chunk-Count': disk_chunk_count, } for k, v in expected.items(): self.assertEqual(metadata[k], v) def test_write_and_delete(self): df = self.mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy, disk_chunk_size=10) req_timestamp = time.time() with df.create() as writer: chunk = '\x00' * 10 for i in range(3): writer.write(chunk) writer.put({'X-Timestamp': req_timestamp}) req_timestamp += 1 df.delete(req_timestamp) try: df.open() except server.diskfile.DiskFileDeleted as e: self.assertEqual(e.timestamp, req_timestamp) else: self.fail('Did not raise deleted!') finally: df.close() # check object keys storage_policy = server.diskfile.get_data_dir(int(self.policy)) start_key = '%s.%s' % (storage_policy, df.hashpath) end_key = '%s.%s/' % (storage_policy, df.hashpath) keys = self.client.getKeyRange(start_key, end_key).wait() self.assertEqual(1, len(keys)) # the tombstone! for key in keys: expected = start_key + '.%s.ts' % Timestamp(req_timestamp).internal self.assert_(key.startswith(expected)) # check chunk keys start_key = 'chunks.%s' % df.hashpath end_key = 'chunks.%s/' % df.hashpath keys = self.client.getKeyRange(start_key, end_key).wait() self.assertEqual(0, len(keys)) def test_overwrite(self): num_chunks = 3 disk_chunk_size = 10 disk_chunk_count = num_chunks df = self.mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'), self.policy, disk_chunk_size=10) req_timestamp = time.time() with df.create() as writer: ch
from PythonQt import QtGui, QtCore from uic.uiparser import UIParser from uic.Loader.qobj
ectcreator import LoaderCreatorPolicy class DynamicUILoader(UIParser): def __init__(self): UIParser.__init__(self, QtCore, QtGui, LoaderCreatorPolicy()) def createToplevelWidget(self, classname, widgetname): if self.toplevelInst is not None: if not isinstance(self.toplevelInst, self.factory.findQObjectType(classname)): raise TypeError(("Wrong base class of toplevel widget", (type(self.top
levelInst), classname))) return self.toplevelInst else: return self.factory.createQObject(classname, widgetname, ()) def loadUi(self, filename, toplevelInst = None): self.toplevelInst = toplevelInst return self.parse(filename)
#!/usr/bin/env python # # Bzip2 # # Bzip2 packages and versions # # Author P G Jones - 2014-08-15 <p.g.jones@qmul.ac.uk> : New file. #################################################################################################### import nusoft.package.conditional as conditional_package import os class Bzip2(conditional_package.ConditionalPackage): """ The Bzip2 installation package. :param _tar_name: name of the tar file to download/install :param _version: version of Bzip2 to install. """ def __init__(self, system, repository): """ Initialise this bzip2 installation package. :param system: class that manages system commands :type system: :class:`nusoft.system.System` instance :param repository: local name of the repository the package is from """ super(Bzip2, self).__init__(self._version, system, repository, libraries=["bz2"], headers=["bzlib.h"]) self._tar_name = self._version + ".tar.gz" def get_dependencies(self): """ Return a list of dependency names :returns: list of dependency package names :rtype: list """ return [] def _download(self): """ Download the bzip2 tar file.""" self._system.download("http://bzip2.haxx.se/download/" + self._tar_name) def _install(self): """ Untar the tar file to the install path.""" self._system.untar(self._tar_name, self.get_install_path(), 1) self._system.make(args=["-f", "Makefile-libbz2_so"], cwd=self.get_ins
tall_path()) self._system.make(args=["install", "PREFIX=" + self.get_install_path()], cwd=self.get_install_path()) def _update(self): """ Nothing to do here...""" pass def _remove(self): """ Remove the install directory.""" self._system.remove(self.get_install_path()) def _is_installed(self)
: """ Check if root is installed by looking for the root executable in the bin directory. :return: True if installed """ return self._system.exists(os.path.join(self.get_install_path(), "include/bzlib.h")) \ and self._system.is_library(os.path.join(self.get_install_path(), "lib/libbz2")) # The versions of root that can be installed versions = [type('bzip2-1.0.6', (Bzip2, object), {"_version" : "bzip2-1.0.6"})]
#!/usr/bin/python import feedparser import wget import sqlite3 import time RssUrlList = ['http://postitforward.tumblr.com/rss','http://for-war3-blog-blog.tumblr.com/rss'] sleep=3600/len(RssUrlList) def mkdir(path): import os path=path.strip() path=path.rstrip("\\") isExists=os.path.exists(path) if not isExists: os.makedirs(path) conn = sqlite3.connect('tumblr.db') def DownloadVideo(rss_url): feeds = feedparser.parse(rss_url) table=rss_url[7:-15].replace('-','') try: conn.execute('''CREATE TABLE %s(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE REAL)'''% table) conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','new','0')" % (table,rss_url)) # conn.execute("SELECT * FROM TUMBLR WHERE BLOG == %s").next() except: pass # conn.execute('''CREATE TABLE(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE TEXT);''') # conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('rss_url','TEST','TEST')" % table) mkdir(rss_url[7:-4]) for post in feeds.entries: thispostti
me=float(time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S"))) if conn.execute("SELECT MAX(DATE) FROM %s"%table).next()[0] == thisposttime: break if post.description.find("video_file") == -1: continue sourceadd= post.description.find("source src=") tumblradd= post.description[sourceadd:].find("tumblr_") typeadd = post.description[sourceadd:][tumblra
dd:].find("type=\"video") video_id=post.description[sourceadd:][tumblradd:][:typeadd-2] if video_id.find("/") !=-1: video_id=video_id[:video_id.find("/")] try: list(conn.execute("SELECT * FROM %s WHERE ADDRESS == '%s'"%(table,video_id)).next()) except: print(post.title + ": " + post.link + post.published+"\n") wget.download("http://vt.tumblr.com/"+video_id+".mp4",rss_url[7:-4]) print("\n") conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','%s','%f')" % (table,rss_url,video_id,time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S")))) #wget.download(get_download_url("https://your.appspot.com/fetch.php?url="+post.link),rss_url[7:-4]) conn.commit() while(1): for rss_url in RssUrlList: print("Downloading "+rss_url) DownloadVideo(rss_url) print("Sleep "+str(sleep)+" seconds") time.sleep(sleep)
import requests import json import sqlite3 import time #searching the api key of velib key_file = open('jcdecaux.key','r') api_key = key_file.readline().rstrip('\n') key_file.close() startime = time.time() url = 'https://api.jcdecaux.com/vls/v1/stations?contract=Paris&apiKey=' + api_key re
sponse = requests.get(url) p
rint(response.status_code) data = response.json() conn = sqlite3.connect('velib.db') cursor = conn.cursor() keep = 0 request_date = int(time.time()) for station in data: number = int(station['number']) status = station['status'] bike_stands = int(station['bike_stands']) available_bike_stands = int(station['available_bike_stands']) available_bikes = int(station['available_bikes']) last_update = int(station['last_update']) cursor.execute(""" INSERT INTO statistics(number,request_date,status,bike_stands,available_bike_stands,available_bikes,last_update) VALUES(?, ?, ?, ?, ?, ?, ?)""", (number,request_date,status,bike_stands,available_bike_stands,available_bikes,last_update)) conn.commit() endtime = time.time() print(int(endtime - startime)) conn.close()
import pytest from proposals.models import AdditionalSpeaker, TalkProposal, TutorialProposal @pytest.fixture def talk_proposal(user): proposal = TalkProposal.objects.create( id=42, submitter=user, title='Beyond the Style Guides<br>', ) return proposal @pytest.fixture def cancelled_talk_proposal(talk_proposal): talk_proposal.cancelled = True talk_proposal.save() return talk_proposal @pytest.fixture def tutorial_proposal(user): proposal = TutorialProposal.objects.create( id=42,
submitter=user, title='Beyond the Style Guides<br>', ) return p
roposal @pytest.fixture def cancelled_tutorial_proposal(tutorial_proposal): tutorial_proposal.cancelled = True tutorial_proposal.save() return tutorial_proposal @pytest.fixture(params=['talk', 'tutorial']) def proposal_type(request): return request.param @pytest.fixture def proposal(proposal_type, talk_proposal, tutorial_proposal): return locals()[proposal_type + '_proposal'] @pytest.fixture def additional_speaker(another_user, proposal): speaker = AdditionalSpeaker.objects.create( id=81, user=another_user, proposal=proposal, ) return speaker
#!/usr/bin/env python """Execute the tests for the samcat program. The golden test outputs are generated by the script generate_outputs.sh. You have to give the root paths to the source and the binaries as arguments to the program. These are the paths to the directory that contains the 'projects' directory. Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH """ import logging import os.path import sys # Automagically add util/py_lib to PYTHONPATH environment variable. path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'util', 'py_lib')) sys.path.insert(0, path) import seqan.app_tests as app_tests def main(source_base, binary_base): """Main entry point of the script.""" print 'Executing test for samcat' print '=========================' print p
h = app_tests.TestPathHelper( source_base, binary_base, 'apps/samcat/tests') # tests dir # ============================================================ # A
uto-detect the binary path. # ============================================================ path_to_program = app_tests.autolocateBinary( binary_base, 'apps/samcat', 'samcat') # ============================================================ # Built TestConf list. # ============================================================ # Build list with TestConf objects, analoguely to how the output # was generated in generate_outputs.sh. conf_list = [] # ============================================================ # Run on DNA (Adenoviruses). # ============================================================ conf = app_tests.TestConf( program=path_to_program, args=[ph.inFile('ex1_a1.sam'), ph.inFile('ex1_a2.sam'), ph.inFile('ex1_a3.sam'), '-o', ph.outFile('ex1_merged.sam')], to_diff=[(ph.inFile('ex1_merged.sam'), ph.outFile('ex1_merged.sam'))]) conf_list.append(conf) conf = app_tests.TestConf( program=path_to_program, args=[ph.inFile('ex1_a1.sam'), ph.inFile('ex1_a2.sam'), ph.inFile('ex1_a3.sam'), '-o', ph.outFile('ex1_merged.bam')], to_diff=[(ph.inFile('ex1_merged.bam'), ph.outFile('ex1_merged.bam'), "gunzip")]) conf_list.append(conf) # Execute the tests. failures = 0 for conf in conf_list: res = app_tests.runTest(conf) # Output to the user. print ' '.join(conf.commandLineArgs()) if res: print 'OK' else: failures += 1 print 'FAILED' # Cleanup. ph.deleteTempDir() print '==============================' print ' total tests: %d' % len(conf_list) print ' failed tests: %d' % failures print 'successful tests: %d' % (len(conf_list) - failures) print '==============================' # Compute and return return code. return failures != 0 if __name__ == '__main__': sys.exit(app_tests.main(main))
eturn local_filename def _s3_parse_manifest(self, context, metadata, manifest): manifest = etree.fromstring(manifest) image_format = 'ami' image_type = 'machine' try: kernel_id = manifest.find('machine_configuration/kernel_id').text if kernel_id == 'true': image_format = 'aki' image_type = 'kernel' kernel_id = None except Exception: kernel_id = None try: ramdisk_id = manifest.find('machine_configuration/ramdisk_id').text if ramdisk_id == 'true': image_format = 'ari' image_type = 'ramdisk' ramdisk_id = None except Exception: ramdisk_id = None try: arch = manifest.find('machine_configuration/architecture').text except Exception: arch = 'x86_64' # NOTE(yamahata): # EC2 ec2-budlne-image --block-device-mapping accepts # <virtual name>=<device name> where # virtual name = {ami, root, swap, ephemeral<N>} # where N is no negative integer # device name = the device name seen by guest kernel. # They are converted into # block_device_mapping/mapping/{virtual, device} # # Do NOT confuse this with ec2-register's block device mapping # argument. mappings = [] try: block_device_mapping = manifest.findall('machine_configuration/' 'block_device_mapping/' 'mapping') for bdm in block_device_mapping: mappings.append({'virtual': bdm.find('virtual').text, 'device': bdm.find('device').text}) except Exception: mappings = [] properties = metadata['properties'] properties['architecture'] = arch def _translate_dependent_image_id(image_key, image_id): image_uuid = ec2utils.ec2_id_to_glance_id(context, image_id) properties[image_key] = image_uuid if kernel_id: _translate_dependent_image_id('kernel_id', kernel_id) if ramdisk_id: _translate_dependent_image_id('ramdisk_id', ramdisk_id) if mappings: properties['mappings'] = mappings metadata.update({'disk_format': image_format, 'container_format': image_format, 'status': 'queued', 'is_public': False, 'properties': properties}) metadata['properties']['image_state'] = 'pending' #TODO(bcwaldon): right now, this removes user-defined ids. # We need to re-enable this. image_id = metadata.pop('id', None) image = self.service.create(context, metadata) # extract the new uuid and generate an int id to present back to user image_uuid = image['id'] image['id'] = ec2utils.glance_id_to_id(context, image_uuid) # return image_uuid so the caller can still make use of image_service return manifest, image, image_uuid def _s3_create(self, context, metadata): """Gets a manifest from s3 and makes an image.""" image_path = tempfile.mkdtemp(dir=CONF.image_decryption_dir) image_location = metadata['properties']['image_location'] bucket_name = image_location.split('/')[0] manifest_path = image_location[len(bucket_name) + 1:] bucket = self._conn(context).get_bucket(bucket_name) key = bucket.get_key(manifest_path) m
anifest = key.get_contents_as_string() manifest, image, image_uuid = self._s3_parse_manifest(context, metadata, manifest) def delayed_create(): """This handles the fetching and decrypting of the part files.""" context.update_store() log_vars = {'image_location': image_location,
'image_path': image_path} def _update_image_state(context, image_uuid, image_state): metadata = {'properties': {'image_state': image_state}} self.service.update(context, image_uuid, metadata, purge_props=False) def _update_image_data(context, image_uuid, image_data): metadata = {} self.service.update(context, image_uuid, metadata, image_data, purge_props=False) _update_image_state(context, image_uuid, 'downloading') try: parts = [] elements = manifest.find('image').getiterator('filename') for fn_element in elements: part = self._download_file(bucket, fn_element.text, image_path) parts.append(part) # NOTE(vish): this may be suboptimal, should we use cat? enc_filename = os.path.join(image_path, 'image.encrypted') with open(enc_filename, 'w') as combined: for filename in parts: with open(filename) as part: shutil.copyfileobj(part, combined) except Exception: LOG.exception(_("Failed to download %(image_location)s " "to %(image_path)s"), log_vars) _update_image_state(context, image_uuid, 'failed_download') return _update_image_state(context, image_uuid, 'decrypting') try: hex_key = manifest.find('image/ec2_encrypted_key').text encrypted_key = binascii.a2b_hex(hex_key) hex_iv = manifest.find('image/ec2_encrypted_iv').text encrypted_iv = binascii.a2b_hex(hex_iv) dec_filename = os.path.join(image_path, 'image.tar.gz') self._decrypt_image(context, enc_filename, encrypted_key, encrypted_iv, dec_filename) except Exception: LOG.exception(_("Failed to decrypt %(image_location)s " "to %(image_path)s"), log_vars) _update_image_state(context, image_uuid, 'failed_decrypt') return _update_image_state(context, image_uuid, 'untarring') try: unz_filename = self._untarzip_image(image_path, dec_filename) except Exception: LOG.exception(_("Failed to untar %(image_location)s " "to %(image_path)s"), log_vars) _update_image_state(context, image_uuid, 'failed_untar') return _update_image_state(context, image_uuid, 'uploading') try: with open(unz_filename) as image_file: _update_image_data(context, image_uuid, image_file) except Exception: LOG.exception(_("Failed to upload %(image_location)s " "to %(image_path)s"), log_vars) _update_image_state(context, image_uuid, 'failed_upload') return metadata = {'status': 'active', 'properties': {'image_state': 'available'}} self.service.update(context, image_uuid, metadata, purge_props=False) shutil.rmtree(image_path) eventlet.spawn_n(delayed_create) return image def _decrypt_image(self, context, encrypted_filename, encrypted_key, encrypted_iv, decrypted_filename): elevated = context.elevated() try: key = self.cert_rpcapi.decrypt_text(elevated, project_id=context.project_id, text=base64.b64encode(encry
fc\x80\x15\x44\x4a\ \x28\xb2\x02\x69\x4f\x00\x22\x15\x14\x59\x81\xe8\x6f\xce\x25\x04\ \x91\x02\x8a\xbc\x40\xc6\xe6\x93\x82\xa0\x46\x91\x15\xc8\xc5\xdb\ \x0b\x9c\x40\x50\xa2\x20\x03\xf9\x6d\x6b\x1f\x3c\xbe\x70\x5a\x33\ \x36\xbb\x02\xa6\x5b\x33\x9c\xd3\x3b\x74\x07\x6e\xe8\xd3\x83\x72\ \x6b\xa0\x14\x3c\xbe\x30\xfc\xb6\x85\xe8\x1f\xa8\x50\xd4\xe6\x4e\ \x18\x6c\xb3\x4f\x53\xca\xc4\xf4\x03\x18\xeb\xfb\x30\x2d\x28\x76\ \xf3\xf1\xa4\x6b\xc8\x2a\x90\xdf\xff\xb7\x9f\x32\x88\xd4\x50\xb2\ \x0a\x64\xff\xcf\x17\xbc\x40\xa4\x84\x82\x41\x24\x86\x92\x55\x20\ \x00\x20\x08\x24\xdd\x28\xc3\x57\x4e\xc6\xdc\x7f\xd6\x81\x38\xe7\ \x16\x33\x02\xc5\xdc\x59\x08\xaa\xe6\x56\x38\xd7\x76\x81\x71\xff\ \x59\x07\xe2\xf1\xfe\x2a\x18\xe4\xa0\x51\xfc\x8f\x4d\x50\xfb\x75\ \x3b\xd5\x7c\x3a\x8a\x20\x90\x6c\x4f\x75\x93\x1e\xcc\x9d\x85\xa2\ \xa2\xf8\x1f\x9b\x40\xa3\xef\x89\xb9\x96\xaa\xb9\x8d\xf9\x18\x06\ \x39\x78\x94\x68\x0c\xd3\xf5\xbb\xd0\xfd\xc3\x2d\xf6\x6b\x73\x06\ \x91\xe1\x9f\x5e\x55\x35\xb6\x9d\xbf\xde\x55\xb0\x22\x04\x85\x0d\ \xc3\xbb\xb2\x0b\xde\x95\x5d\x06\x4a\xd5\xf9\xce\xf5\xc8\x8b\x9e\ \xe3\x9f\x5e\x95\x6b\x59\x8d\x8a\x9c\x51\xc3\x6b\x53\x7c\x50\x12\ \x61\xb0\xa1\xd4\xab\x5a\xf6\x50\xaf\x37\x23\x8a\x0f\x0a\x17\x0c\ \x8c\x22\xa0\x52\x41\x49\x05\x03\xa3\x08\x28\x2e\x28\x7c\x30\x30\ \x8a\x80\x4a\x84\x22\x04\x83\x8c\xae\x7f\x18\xa3\xa4\x5a\x6c\x28\ \x62\x60\xd8\x1f\xfa\xa0\x4e\x7f\x85\x7a\x0e\x43\xd9\x29\x70\x10\ \xc4\x14\xea\xf5\x66\x44\xd1\x51\xcc\x9d\x85\x8c
\x4f\xe0\x62\x60\ \x98\xce\xa8\xc1\x4e\x10\x60\x27\x08\x70\x10\xc4\x8f\xa8\xd7\x9b\ \x11\x65\x35\x2a\x72\xbe\xd3\xbf\xfb\x48\xd5\xdc\x2a\x2a\xc6\x60\ \xb3\x0e\xc2\x5a\x2d\x3c\x2c\x2a\xa2\x50\xee\x11\xc4\x11\xd4\xeb\ \xcd\x88\xea\x6e\x2f\x3b\xf4\x65\x8b\x76\x8f\x6c\xe6\xd7\xa6\x6b\ \xa2\x60\x84\xb5\x5a\xd8\x69\x68\x80\xfb\xf9\xf9\xe4\x2e\x09\x59\ \x15\x8a\x1c\xd4\xeb\xcd\x88\xe2\x8b\x92\x08\x83\xcc\x5a\x75
\xf5\ \xdf\xbb\x44\xa9\x2c\x47\xbd\xd6\x8c\xa9\xee\xf6\xb2\x43\xaa\xe6\ \xd6\x0d\xae\x28\x5c\x30\xc8\xd0\x76\x09\x3e\x4b\x52\xa9\x72\xad\ \x31\xa7\xa2\xb1\x7b\x2a\x19\x4a\x2a\x18\xf4\xb3\xc4\x41\x10\x21\ \xd4\x6b\xcc\xb8\x2a\xd7\x1a\x73\xaa\xce\x77\x3e\x8a\x87\x92\x2a\ \x46\x58\xab\x85\x85\xd2\x52\x6a\x6c\xa1\x5e\x5f\x46\x56\xf7\xb1\ \xb2\x43\xf5\xaa\x96\x98\x33\x85\x0f\x46\xd4\x0e\xc1\x9f\x49\xf8\ \x16\x1b\x0a\x1f\x0c\xfa\x19\x62\x27\x08\x23\xea\x75\x65\x74\x45\ \xa3\xf0\xc1\xa0\xbf\xcb\xb2\x29\x95\x2a\xd4\x6b\xca\xf8\x8a\x46\ \xb9\xfc\x79\x0d\x67\x8c\x9d\x86\x06\x98\x39\x7a\x94\x3a\xd0\x6d\ \xb9\xb9\xaf\xa0\x5e\x4f\x56\x14\x89\x62\x28\x3b\x05\x76\x82\x80\ \x99\xa3\x47\x39\x61\x78\x8b\x8b\xf1\x67\x90\x83\x2a\xab\x42\x91\ \xe3\x20\x88\x29\xb2\xc1\xae\xbc\x3c\x78\x56\x51\xc1\x8a\x11\x54\ \xab\x61\xba\xa0\x80\xc2\xb0\x13\x84\x15\xf5\xfd\x67\x65\x59\x15\ \x8a\x1c\x3b\x41\x18\x69\x8d\x86\xfb\xf9\xf9\xf0\xb0\xa8\x08\x16\ \x4a\x4b\xc1\x5b\x5c\x0c\xd3\x05\x05\xe0\xca\xcb\x63\x60\xd8\x15\ \x8a\x97\x51\xdf\x7b\x56\x97\x5d\xa9\x2c\x71\x10\x44\x88\x0e\x13\ \x1d\x07\x41\x84\xee\x29\x95\xe5\xf8\xf7\x57\x69\x2a\xab\x42\x91\ \x73\x4f\xa9\x2c\xb7\x13\x84\x91\x1c\x65\x0e\x82\x98\x72\x10\xc4\ \x8f\x36\xa5\x52\x15\x6f\x57\xfc\x1f\xed\x45\x69\x64\xa7\xc0\x7c\ \xeb\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\xea\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\ \x00\x00\x00\xb1\x49\x44\x41\x54\x78\x9c\xed\xd4\xc1\x09\x80\x30\ \x14\x44\x41\x2b\xb5\x0c\xab\xb1\x12\xc1\x5e\xec\x44\x6f\x22\xa2\ \x17\x31\x92\x35\xb3\xf0\xee\x3f\x73\x48\xd7\x99\x99\xd9\xbe\x7e\ \x1c\x56\xdd\x07\xf0\x2b\xc0\x69\x99\x75\x08\x20\x40\x80\xd1\x01\ \x04\x08\x30\x3a\x80\x00\x01\x46\x07\x10\x20\xc0\x47\x07\xd7\x76\ \x0f\x40\x80\x00\x01\x02\x04\x08\x10\x20\x40\x80\x00\x01\x02\x04\ \x08\x10\x20\x40\x80\x00\x01\x02\x04\x08\xb0\x6d\xc0\xf3\x83\x6b\ \x0f\x20\x40\x80\x00\xdf\x04\x2c\xf5\x69\xd7\x76\x0f\x40\x80\x00\ \x01\x02\x04\x08\x10\x20\x40\x80\x00\x01\x02\x04\x08\x10\x20\x40\ \x80\x00\x01\x02\x04\x08\x10\xe0\x9f\x03\x08\x10\x60\x74\x00\x01\ \x02\x8c\x0e\x20\x40\x80\xd1\x01\x04\x18\x06\xa8\xeb\x00\x96\x06\ \x34\x33\x6b\x68\x1b\xca\x72\x68\xc1\x35\xba\x39\x8a\x00\x00\x00\ \x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x04\x0e\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\ \x00\x00\x03\xd5\x49\x44\x41\x54\x78\x9c\xed\x9b\xbd\x4a\x23\x51\ \x14\x80\x7d\x84\x3c\xc2\x3e\x82\x8f\x10\x62\x9b\x99\x8c\x16\x82\ \x45\x40\x17\x2c\x84\x14\x03\x2b\x38\x73\x8d\x32\x6c\x90\x90\x5d\ \x44\xb6\x58\x48\x11\xd0\x42\x58\x8b\xe0\x14\x93\x3b\x12\x58\x88\ \x45\x30\x65\x8a\x40\x0a\x0b\xc5\x22\x85\x10\xd0\x27\xf0\x6c\x61\ \xce\x38\x64\xfd\xc9\xe4\xce\xe4\x3a\x93\xf3\xc1\x05\xcb\xb9\x1f\ \xe7\xf7\x12\x17\x16\x08\x82\x20\x08\x82\x20\xfe\xe3\x2a\x93\xd1\ \xaf\x32\x19\x5d\xf6\x77\xc4\x92\xab\x4c\x46\xef\x2c\x2d\x41\x67\ \x69\x09\x48\x62\x40\xfc\xf2\x48\x62\x40\x5e\x93\x47\x12\x27\xe4\ \x3d\x79\x24\xf1\x03\x26\x91\x47\x12\xdf\x20\x88\x3c\x92\x38\xc6\ \x34\xf2\x48\xe2\x08\x11\x79\x73\x2f\x31\x0c\x79\x73\x2b\x31\x4c\ \x79\x73\x27\x31\x0a\x79\x73\x23\x31\x4a\x79\x89\x97\x38\x0b\x79\ \x89\x97\x38\x09\x1f\xc9\x91\xfd\x7d\x9f\x1e\x12\x28\x08\x09\x14\ \x84\x04\x0a\x42\x02\x05\x21\x81\x82\x90\x40\x41\x48\xa0\x20\x24\ \x50\x10\x12\x28\x08\x09\x14\x84\x04\x0a\x42\x02\x05\x21\x81\x82\ \x90\x40\x41\x48\xa0\x20\x24\x50\x10\x12\x38\x25\x9a\x65\xa7\xb2\ \x86\x93\x2e\x7d\x2d\x41\xa1\x50\x85\x4d\xfd\x04\x56\x77\xce\x61\ \x75\xe7\x1c\xf2\xdf\xfe\xc0\xf6\xd6\x2f\xd8\xdd\xfc\x01\x0a\xe3\ \xeb\xb2\xbf\xf5\x53\x91\x35\x9c\xb4\x62\xba\xad\x1c\x73\x1f\x55\ \xc6\x61\xd2\x93\x63\xdc\x9e\x6b\x99\x59\xc3\x49\xe7\x18\xb7\x51\ \xc8\xf2\x5e\xf3\x29\x5f\x69\x43\xa1\xda\x83\xe2\xd9\x1d\x58\xf5\ \x01\x94\x9d\x21\x94\x9d\x21\x58\xf5\x01\x18\xa7\x37\xa0\xd7\xfa\ \x90\xaf\xb4\xc7\x65\x76\x35\xe3\xe2\x8b\xec\xfb\xcc\x14\x95\x39\ \x16\x46\xdc\xf2\x5e\xf3\x49\xaf\xf5\xa1\xec\x0c\xe1\xe7\xc5\xe3\ \xc4\x47\xaf\xf5\xc7\x44\x3a\x96\xec\x7b\x45\x8e\x66\xd9\xa9\x1c\ \xe3\xc7\x28\xae\x50\xed\x05\x16\xf7\x9e\x48\xc5\x74\x5b\xb2\xef\ \x18\x19\x23\x79\x36\xca\x33\x4e\x6f\x84\xc4\xf9\x8f\x55\x1f\xc0\ \xca\x7e\xd3\x4b\x69\xd9\x77\x8d\x04\x94\xb7\x76\x70\x09\x25\xfb\ \x3e\x34\x79\x78\xca\xce\x10\xd6\x0e\x2e\x93\x19\x89\x8a\xd9\xd0\ \x31\xf2\xac\xfa\x20\x74\x79\x7e\x89\x18\x89\x39\xc6\x8f\x65\xdf\ \x3b\x14\xb2\x86\x93\x8e\x22\x6d\xdf\x4b\x67\xac\x89\x1a\xe3\x8b\ \xb2\xef\x2f\x8c\x62\xba\x2d\x95\x71\x28\x54\x7b\x91\xcb\xc3\x53\ \xa8\xf6\x92\x51\x0f\x15\xd3\xd5\x30\xfa\x44\xbb\x6d\xd0\x54\x5e\ \xfd\xfe\x17\xeb\xa1\x26\xdb\xc3\xd4\x60\xe3\x98\x65\xf4\xe1\xd9\ \x3e\xb9\xf6\x36\x16\xd9\x1e\xa6\xe2\x79\x6c\x79\x1e\x96\xa3\xe8\ \xba\x93\x44\xa1\x57\x0b\xe3\xb8\xa9\x60\xfa\xe6\x2b\xed\x99\xcb\ \xc3\xb3\x71\xd8\x79\x4e\xe3\x38\xee\xcc\xb8\x71\xc8\x48\xdf\x44\ \xa4\x31\xd6\xbf\x59\x8c\x2e\x6f\x9d\xe2\xd9\x5d\x7c\x07\x6b\x95\ \xf1\x5b\x95\x71\x88\x72\x70\x0e\x50\x07\x1f\x64\xfb\x08\x0c\x0a\ \x94\xd1\x40\x92\x22\xf0\x41\x65\x5c\xf8\xb5\x45\xf4\x60\x27\x96\ \xed\x23\x30\x2a\xe3\x5d\xd9\x29\x5c\xb2\xef\x51\xe0\xad\x6c\x1f\ \x81\xc1\x15\xae\x78\x76\x27\x4d\xa0\x6f\x2f\x8e\xdf\x4a\xa7\xee\ \xba\x47\x2a\xe3\xa0\xd7\xfa\xd2\x04\xfa\xc6\x98\xf8\xbd\xcc\x28\ \x8c\xaf\xab\x8c\xc3\xc6\x61\x47\x9a\x40\x6f\x90\x36\x1b\xf1\xfb\ \xc7\x1b\xcd\xb2\x53\x58\xc0\x65\x34\x12\xff\xdb\x60\x2c\x57\xb9\ \x85\x85\x97\x61\x7a\xfb\xe4\x5a\x5a\xfa\xc6\xb2\xfe\x21\xb8\x0f\ \xaf\xec\x37\x67\x1e\x7d\xde\x73\x56\x1c\xf7\x60\x3f\x38\xce\xc8\ \x78\x50\x8d\xe5\x0a\x37\x8e\xc6\xf8\x22\xd6\xc2\x59\xcc\x84\x56\ \x7d\x00\xcb\x7b\xcd\x27\x95\x71\xc8\x1a\x4e\x5a\xf6\xfd\x43\x01\ \x5f\x66\x56\xf6\x9b\x91\xae\x76\xfe\xd4\x55\x77\xdd\x23\xd9\xf7\ \x0e\x15\x1c\xac\xd7\x0e\x2e\x23\xe9\xca\x65\x67\xe8\x8d\x2d\x2a\ \xe3\x5d\xcd\xb2\x53\xb2\xef\x1c\x2a\xa3\xb1\xa6\x8b\x91\x18\x66\ \x3a\x5b\xf5\xc1\x4b\xe4\x31\x7e\x9b\x38\x79\x88\x66\xd9\x29\x8c\ \x44\x95\x71\xd8\xfa\xdd\x15\x8e\xba\x42\xb5\xe7\xd5\x3c\xc5\x74\ \x5b\xb1\x9d\xf9\x82\x80\x6b\x1e\x46\x63\xd0\x39\xb1\xec\x0c\xc1\ \x38\xbd\xf1\x47\x1d\xa8\xbb\xee\x51\x62\x23\xef\x35\x46\xdd\xb9\ \xeb\xff\x85\xd5\xc6\x61\x07\xf4\x5a\x1f\x8a\x67\x77\x5e\xb3\x29\ \x3b\x43\x28\xd9\xf7\xde\x4f\xdc\xf2\x95\xb6\x17\x71\x18\x75\x89\ \xe9\xb6\xd3\xa0\x30\xbe\xee\x4f\x
# # Copyright 2011 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301 USA # # Refer to the README and COPYING files for full details of the license # import os from multiprocessing.managers import BaseManager, RemoteError import logging import threading from vdsm import constants, utils _g_singletonSupervdsmInstance = None _g_singletonSupervdsmInstance_lock = threading.Lock() ADDRESS = os.path.join(constants.P_VDSM_RUN, "svdsm.sock") class _SuperVdsmManager(BaseManager): pass class ProxyCaller(object): def __init__(self, supervdsmProxy, funcName): self._funcName = funcName self._supervdsmProxy = supervdsmProxy def __call__(self, *args, **kwargs): callMethod = lambda: \ getattr(self._supervdsmProxy._svdsm, self._funcName)(*args, **kwargs) try: return callMethod() except RemoteError: self._supervdsmProxy._connect() raise RuntimeError( "Broken communication with supervdsm. Failed call to %s" % self._funcName) class SuperVdsmProxy(object): """ A wrapper around all the supervdsm init stuff """ _log = logging.getLogger("SuperVdsmProxy") def __init__(self): self.proxyLock = threading.Lock() self._manager = None self._svdsm = None self._connect() def open(self, *args, **kwargs): return self._manager.open(*args, **kwargs) def _connect(self): self._manager = _SuperVdsmManager(address=ADDRESS, authkey='') self._manager.register('instance') self._manager.register('open') self._log.debu
g("Trying to connect to Super Vdsm") try: utils.retry(self._manager.connect, Exception, timeout=60, tries=3)
except Exception as ex: msg = "Connect to supervdsm service failed: %s" % ex utils.panic(msg) self._svdsm = self._manager.instance() def __getattr__(self, name): return ProxyCaller(self, name) def getProxy(): global _g_singletonSupervdsmInstance if _g_singletonSupervdsmInstance is None: with _g_singletonSupervdsmInstance_lock: if _g_singletonSupervdsmInstance is None: _g_singletonSupervdsmInstance = SuperVdsmProxy() return _g_singletonSupervdsmInstance
#!/usr/bin/env python # -*- coding: utf-8 -*- # # This file is part of kryptomime, a Python module for email kryptography. # Copyright © 2013,2014 Thomas Tanner <tanner@gmx.net> # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the included LICENSE file for details. #______________________________________________________________________________ from __future__ import absolute_import from __future__ import print_function import setuptools, sys from setuptools.command.test import test as TestCommand __author__ = "Thomas Tanner" __contact__ = 'tanner@gmx.net' __url__ = 'https://github.com/ttanner/kryptomime' class PyTest(TestCommand): user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = None def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): #import here, cause outside the eggs aren't loaded import pytest, sys args = [self.pytest_args] if self.pytest_args else [] errno = pytest.main(['--cov-config','.coveragerc','--cov','kryptomime']+args) sys.exit(errno) subproc = ["subprocess32"] if sys.version_info[0] == 2 else [] setuptools.setup( name = "kryptomime", description="Python support for E-Mail kryptography", long_description=open('README.rst').read(), license="GPLv3+", version='0.5.0', author=__author__, author_email=__contact__, maintainer=__author__, maintainer_email=__contact__, url=__url__, package_dir={'kryptomime': 'kryptomime'}, packages=['kryptomime'], package_data={'': ['README.rst', 'COPYING.txt', 'requirements.txt']}, tests_require=['pytest-cov','pytest-pythonpath'], cmdclass = {'test': PyTest}, install_requires=['gnupg>=1.3.2','six>=1.8']+subproc, extra
s_require={'docs': ["Sphinx>=1.1", "repoze.sphinx"]}, platforms="Linux, BSD, OSX, Windows", download_url="https://github.com/ttanner/kryptomime/archive/master.zip", classifiers=[ "Deve
lopment Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Security :: Cryptography", "Topic :: Utilities",] )
soup = BeautifulSoup(r.text, 'html.parser') self.data_set = soup.find_all('div', class_='customjumbotron') else: print '网络连接错误' r.raise_for_status() else: print 'KM对应URL不存在或错误' sys.exit() def km_victim_detail(self): # 返回受害者的信息 info = map(lambda x: x.strip(), filter( lambda x: len(x.strip()) > 0, self.data_set[0].text.split('\n'))) info.pop(info.index('相关击坠')) killmail_info = { 'victim_character': info.pop(0)[2:], 'corp': info.pop(0)[2:], 'alliance': info.pop(0)[2:], 'ship': info.pop(0)[2:], 'time': info.pop(0)[2:], 'location': info.pop(0)[2:], 'damage_taken': info.pop(0)[4:], 'value': info.pop(0)[4:] } return killmail_info def km_lastblow_detail(self): # 返回最后一击的个人信息 info = map(lambda x: x.strip(), filter( lambda x: len(x.strip()) > 0, self.data_set[1].text.split('\n'))) killmail_info = { 'last_blow_character': info.pop(0), 'corp': info.pop(0), 'alliance': info.pop(0) } # 对应武器以及船只信息 info = map(lambda x: x.get('title'), filter( lambda x: x.get('title'), self.data_set[1].find_all('img'))) killmail_info.update({'ship': info.pop(0)}) killmail_info.update({'weapon': info.pop(0)}) return killmail_info def km_comp_detail(self): # KM的伤害来源组成,按照联盟以及船只分类 info_set = self.data_set[2].find_all('td') info = map(lambda x: x.strip(), filter( lambda x: len(x.strip()) > 0, info_set[0].text.split('\n'))) killmail_info = {'com_alliance': info} info = map(lambda x: x.strip(), filter( lambda x: len(x.strip()) > 0, info_set[1].text.split('\n'))) killmail_info.update({'com_ship': info}) return killmail_info def km_involve_detail(self): # 伤害来源个人信息 info = [] killmail_info = [] info = map(lambda x: x.strip(), filter( lambda x: len(x.strip()) > 0, self.data_set[3].text.split('\n'))) info.pop(info.index('最后一击!')) while info: killmail_info.append({ 'character': info.pop(0), 'corp': info.pop(0), 'alliance': info.pop(0), 'damage': info.pop(0) }) # 对应武器以及船只信息 info = map(lambda x: x.get('title'), filter( lambda x: x.get('title'), self.data_set[3].find_all('img'))) for n in xrange(len(killmail_info)): killmail_info[n].update({ 'ship': info.pop(0), 'weapon': info.pop(0) }) return killmail_info def km_fit_detail(self): # 高中低槽装配信息以及损失价值信息 data_set = soup.find(id='kl-detail-shipdetails') info = [] killmail = {} for text in data_set.text.split('\n'): if len(text.strip()) != 0: if "装配" in text.strip() or "无人机挂舱" in text.strip() \ or "货舱" in text.strip() or "其他" in text.strip() or "损失价值" in text.strip(): info.append([]) if text.strip() != "价值": info[-1].append(text.strip()) for n in info[:-1]: slot_name = n.pop(0) killmail[slot_name] = [] while n: killmail[slot_name].append({ 'item': n.pop(0), 'num': n.pop(0), 'value': n.pop(0) }) killmail['values'] = [] while info[-1]: killmail['values'].append({ 'type': info[-1].pop(0), 'value': info[-1].pop(0) }) return killmail def km_fit_info(self): # 详细参数 data_set = soup.find('div', id='attr_Panel').find( 'div', class_='row col-md-12') data_set = filter( lambda x: len(x.strip()) != 0, data_set.text.split('\n')) # 工程部门, 6行 # 工程部门, 电容, 回充速率, CPU, 能量栅格, 改装值 for thing in data_set[0:6]: print thing # 攻击部门, 4行 # 攻击部门, 炮DPS, 导弹DPS, 立体炸弹DPS for thing in data_set[6:10]: print thing # 防御部门, 20行 # 防御部门, 全伤有效/极限防御/常规防御,有效HP[字样],最小有效/有效[混合],最大有效 # 5行护盾防御: HP, 电磁, 热能, 动能,爆炸 # 5行装甲防御: HP, 电磁, 热能, 动能,爆炸 # 5行结构防御: HP, 电磁, 热能, 动能,爆炸 for thing in data_set[10:30]: print thing # 维修部门, 9行 # 维修部门, 护盾修理2, 装甲修理2, 结构修理2, 护盾被动回充2 for thing in data_set[30:39]: print thing # 导航部门, 6行 # 导航部门, 最大速度, 敏捷度, 起跳时间, 跃迁速度, 免疫跃迁干扰强度 for thing in data_set[39:45]: print thing # 索敌部门, 5行 # 索敌部门, 最大锁定个数/雷达强度, 锁定距离, 锁定分辨率, 信号半径 for thing in data_set[45:50]: print thing class KillMail_Search(): # seems broken def __init__(self): self.url = 'http://kb.ceve-market.org/ajax_search/' global client global soup client = requests.session() r = client.get(self.url) if r.status_code == requests.codes.ok: print '网络连接正常' soup = BeautifulSoup(r.text, 'html.parser') self.data_set = soup.find_all('div', class_='customjumbotron') else: print '网络连接错误' r.raise_for_status() def search(self, type, name): URL = self.url client.get(URL) csrftoken_cookies = client.cookies['csrftoken'] end_time = time.strftime('%Y-%m-%d %H:%M') start_time = datetime.datetime.now() + datetime.timedelta(days=-1) search_data = dict( searchtype=type, name=name, type='lost', shiptype='shiptype', systemtype='sys', ship='', system='',
starttime='', endtime='', prev='', next='', csrfmiddlewaretoken=csrftoken_cookies, ) r = client.post(URL, data=search_data, headers=dict(Referer=URL)) soup = BeautifulSoup(r.text, 'html.parser') a = soup.find('table',id='kbtable').find('tbody').find_all('tr') info = [] url_info = []
killmail = [] for km in a[:-1]: for n in km.find_all('td'): if n.find_all(href=re.compile("/pilot/0")) != []: info.append('未知角色') if n.find_all(style='color:grey') != []: null_alli_tag = 1 else: null_alli_tag = 0 if n.find_all(href=re.compile("/corp/None")) != []: null_corp_tag = 1 else: null_corp_tag = 0 for text in n.text.split('\n'): if len(text) != 0: if text == '无联盟' and null_alli_tag == 1: info.append('无联盟(NULL)') if null_corp_tag == 1: info.append('无军团(NULL)') elif text == '未知': pass else: info.append(text) url_info.append(km['onclick'].split('\'')[-2]) while info: killmail.append({ 'victim_ship': info.pop(0), 'victim_shiptype': info.pop(0), 'victim': info.pop(0), 'victim_alli': info.pop(0), 'victim_corp': info.pop(0), 'final_blow': info.pop(0), 'alliance': info.pop(0), 'corp': info.pop(0), 'location': info.pop(0), 'time': info.pop(0), 'url': url_info.pop(0) }) return killmail class KillMail_LeaderBoard(): def __init__(self): self.url = 'http://kb.ceve-market.org/rank/' global client client = requests.session() r =
#!/usr/bin/true # # download.py - part of autospec # Copyright (C) 2018 Intel Corporation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (a
t your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more detail
s. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import os import sys from io import BytesIO import pycurl from util import print_fatal def do_curl(url, dest=None, post=None, is_fatal=False): """ Perform a curl operation for `url`. If `post` is set, a POST is performed for `url` with fields taken from the specified value. Otherwise a GET is performed for `url`. If `dest` is set, the curl response (if successful) is written to the specified path and the path is returned. Otherwise a successful response is returned as a BytesIO object. If `is_fatal` is `True` (`False` is the default), a GET failure, POST failure, or a failure to write to the path specified for `dest` results in the program exiting with an error. Otherwise, `None` is returned for any of those error conditions. """ c = pycurl.Curl() c.setopt(c.URL, url) if post: c.setopt(c.POSTFIELDS, post) c.setopt(c.FOLLOWLOCATION, True) c.setopt(c.FAILONERROR, True) c.setopt(c.CONNECTTIMEOUT, 10) c.setopt(c.TIMEOUT, 600) c.setopt(c.LOW_SPEED_LIMIT, 1) c.setopt(c.LOW_SPEED_TIME, 10) buf = BytesIO() c.setopt(c.WRITEDATA, buf) try: c.perform() except pycurl.error as e: if is_fatal: print_fatal("Unable to fetch {}: {}".format(url, e)) sys.exit(1) return None finally: c.close() # write to dest if specified if dest: try: with open(dest, 'wb') as fp: fp.write(buf.getvalue()) except IOError as e: if os.path.exists(dest): os.unlink(dest) if is_fatal: print_fatal("Unable to write to {}: {}".format(dest, e)) sys.exit(1) return None if dest: return dest else: return buf
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*- from autopilot.matchers import Eventually from testtools.matchers import Equals from bmicalc import tests class MainViewTestC
ase(tests.BaseTestCase): """Tests for the mainview""" def setUp(self): super(MainViewTestCase, self).setUp() def test_click_button(self): # Find and click the button button = self.app.main_view.get_button() self.app.pointing_device.click_object(button) # Make an assertion about what should happen label = self.app.main_view.get_label() self.ass
ertThat(label.text, Eventually(Equals('..world!')))