prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
# -*- coding: utf-8 -*- import sys, os, time from Tools.HardwareInfo import HardwareInfo def getVersionString(): return getImageVersionString() def getImageVersionString(): try: if os.path.isfile('/var/lib/opkg/status'): st = os.stat('/var/lib/opkg/status') else: st = os.stat('/usr/lib/ipkg/status') tm = time.localtime(st.st_mtime) if tm.tm_year >= 2011: return time.strftime("%Y-%m-%d %H:%M:%S", tm) except: pass return _("unavailable") def getFlashDateString(): try: return time.strftime(_("%Y-%m-%d %H:%M"), time.localtime(os.stat("/boot").st_ctime)) except: return _("unknown") def getEnigmaVersionString(): import enigma enigma_version = enigma.getEnigmaVersionString() if '-(no branch)' in enigma_version: enigma_version = enigma_version [:-12] return enigma_version def getGStreamerVersionString(): import enigma return enigma.getGStreamerVersionString() def getKernelVersionString(): try: return open("/proc/version","r").read().split(' ', 4)[2].split('-',2)[0] except: return _("unknown") def getHardwareTypeString(): return HardwareInfo().get_device_string() def getImageTypeString(): try: return open("/etc/issue").readlines()[-2].capitalize().strip()[:-6] except: return _("undefined") def getCPUInfoString(): try: cpu_count = 0 cpu_speed = 0 for line in open("/proc/cpuinfo").readlines(): line = [x.strip() for x in line.strip().split(":")] if line[0] in ("system type", "model name"): processor = line[1].split()[0] elif line[0] == "cpu MHz": cpu_speed = "%1.0f" % float(line[1]) elif line[0] == "processor": cpu_count += 1 if not cpu_speed: try: cpu_speed = int(open("/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq").read()) / 1000 except: cpu_speed = "-" if os.path.isfile('/proc/stb/fp/temp_sensor_avs'): temperature = open("/proc/stb/fp/temp_sensor_avs").readline().replace('\n','') return "%s %s MHz (%s) %s°C" % (processor, cpu_speed, ngettext("%d core", "%d cores", cpu_count) % cpu_count, temperature) return "%s %s MHz (%s)" % (processor, cpu_speed, ngettext("%d core", "%d cores", cpu_count) % cpu_count) except: return _("undefined") def getDriverInstalledDate(): try: from glob import glob driver = [x.split("-")[-2:-1][0][-8:] for x in open(glob("/var/lib/opkg/info/*-dvb-modules-*.control")[0], "r") if x.startswith("Version:")][0] return "%s-%s-%s" % (driver[:4], driver[4:6], driver[6:]) except: return _("unknown") def getPythonVersionString(): try: import commands status, output = commands.getstatusoutput("python -V") return output.split(' ')[1] except: return _("unknown") def getDriverVuInstalledDate(): try: driver = os.popen("opkg list-installed | grep vuplus-dvb-").read().strip() driver = dri
ver.split("-") #return driver[:4] + "-"
+ driver[4:6] + "-" + driver[6:] return driver[5] except: return "unknown" # For modules that do "from About import about" about = sys.modules[__name__]
row in channelRows: print row centered = False weightXYFilledOut = False if len(channelRows) > 0: volume = '' compositeId = '' channels = [] for row in channelRows: compositeId = row[config.colCompositesCompositeId] fileId = row[config.colCompositesFileId] volume = row[config.colCompositesVolume] filter = row[config.colCompositesFilter] weight = float(row[config.colCompositesWeight]) \ if len(row)>config.colCompositesWeight else 1.0 x = int(row[config.colCompositesX]) if len(row)>config.colCompositesX else 0 y = int(row[config.colCompositesY]) if len(row)>config.colCompositesY else 0 if len(row)>config.colCompositesWeight: weightXYFilledOut = True # if don't have an inpaint or centered file, use the adjusted file channelfilepath = lib.getFilepath('inpaint', volume, fileId) if os.path.isfile(channelfilepath): centered = True else: channelfilepath = lib.getFilepath('center', volume, fileId, filter) if os.path.isfile(channelfilepath): centered = True else: channelfilepath = lib.getFilepath('adjust', volume, fileId, filter) if os.path.isfile(channelfilepath): channel = [fileId,filter,channelfilepath,weight,x,y] channels.append(channel) if len(channels)>0: outfilepath = lib.getFilepath('composite', volume, compositeId) if centered: optionAlign = False # don't try to align images if already centered if weightXYFilledOut: optionAlign = False # don't align if already have values # combine the channel images im, channels = libimg.combineChannels(channels, optionAlign) libimg.imwrite(outfilepath, im) # if -align: update channels x,y etc if optionAlign: # make sure all the rows have all their columns for row in channelRows: while len(row)<=config.colCompositesY: row.append('') # find each row in channelRows and update weights and x,y translation for row in channels: for row2 in channelRows: if row2[config.colCompositesFileId]==row[config.colChannelFileId]: row2[config.colCompositesWeight]=row[config.colChannelWeight] row2[config.colCompositesX]=row[config.colChannelX] row2[config.colCompositesY]=row[config.colChannelY] # print [ch[:-1] for ch in channels if ch] # return channels # caller needs to know if x,y values were changed xyChanged = not centered return xyChanged def writeUpdates(csvNew, channelRows): "" for row in channelRows: # row = [compositeId, fileId, volume, filter, weight, x, y] csvNew.writerow(row) # print row def vgComposite(filterVolume=None, filterCompositeId=None, filterTargetPath=None, optionOverwrite=False, optionAlign=False, directCall=True): """ Build composite images by combining channel images. Walks over records in composites.csv, merges channel images, writes to composites folder. eg composites.csv: compositeId,centerId,volume,filter,weight,x,y C1537728,C1537728,5103,Blue C1537728,C1537730,5103,Orange,0.8 C1537728,C1537732,5103,Green,1,10,3 => step05_composites/VGISS_5103/C1537728_composite.jpg Note: weight,x,y are optional - default to 1,0,0 """ if filterCompositeId: filterCompositeId = filterCompositeId.upper() # always capital C # note: targetPathParts = [system, craft, target, camera] targetPathParts = lib.parseTargetPath(filterTargetPath) # build volume for previous step if filterVolume: filterVolume = str(filterVolume) outputSubfolder = lib.getSubfolder('composite', filterVolume) # quit if volume folder exists if os.path.isdir(outputSubfolder) and optionOverwrite==False: if directCall: print "Folder exists: " + outputSubfolder return # build the previous step, if not already there vgCenter.vgCenter(filterVolume, '', optionOverwrite=False, directCall=False)
# vgInpaint.vgInpaint(filterVolum
e, '', optionOverwrite=False, directCall=False) # make folder lib.mkdir(outputSubfolder) # read small dbs into memory compositingInfo = lib.readCsv(config.dbCompositing) # when to turn centering on/off retargetingInfo = lib.readCsv(config.dbRetargeting) # remapping listed targets # open files.csv so can join to it csvFiles, fFiles = lib.openCsvReader(config.dbFiles) # open compositesNew.csv for writing if optionAlign: lib.rm(config.dbCompositesNew) csvNew, fNew = lib.openCsvWriter(config.dbCompositesNew) # iterate over composites.csv records csvComposites, fComposites = lib.openCsvReader(config.dbComposites) startId = '' startVol = '' channelRows = [] nfile = 0 for row in csvComposites: # get composite info compositeId = row[config.colCompositesCompositeId] fileId = row[config.colCompositesFileId] volume = row[config.colCompositesVolume] # join on files.csv to get more image properties # (note: since compositeId repeats, we might have already advanced to the next record, # in which case rowFiles will be None. But the target properties will remain the same.) rowFiles = lib.getJoinRow(csvFiles, config.colFilesFileId, compositeId) if rowFiles: # get file info filter = rowFiles[config.colFilesFilter] system = rowFiles[config.colFilesSystem] craft = rowFiles[config.colFilesCraft] target = rowFiles[config.colFilesTarget] camera = rowFiles[config.colFilesCamera] # relabel target field if necessary - see db/targets.csv for more info target = lib.retarget(retargetingInfo, compositeId, target) # filter on volume, composite id and targetpath volumeOk = (volume==filterVolume if filterVolume else True) compositeOk = (compositeId==filterCompositeId if filterCompositeId else True) targetPathOk = (lib.targetMatches(targetPathParts, system, craft, target, camera) \ if filterTargetPath else True) doComposite = (volumeOk and compositeOk and targetPathOk) if doComposite: # gather image filenames into channelRows so can merge them if compositeId == startId: channelRows.append(row) else: # we're seeing a new compositeId, so process all the gathered channels printStatus(channelRows,startVol,nfile,startId) processChannels(channelRows, optionAlign) # processChannels(channelRows, optionAlign, csvNew) # xyChanged = processChannels(channelRows, optionAlign) # if optionAlign and xyChanged: # writeUpdates(csvNew, channelRows) startId = compositeId startVol = volume channelRows = [row] nfile += 1 # process the last leftover group # print channelRows printStatus(channelRows,startVol,nfile,startId) processChannels(channelRows, optionAlign) # processChannels(channelRows, optionAlign, csvNew) # xyChanged = processChannels(channelRows,optionAlign) # if optionAlign and xyChanged: # writeUpdates(csvNew, channelRows) print if optionAlign: fNew.close() fFiles.close() fComposites.close() if __name__ == '__main__': os.chdir('..') # vgComposite(5117) # vgComposite(8207) # vgComposite(None,'c1617245') # ariel - works # vgComposite(None,'c2684338',None,optionOverwrite=True) # autom
import os import sys import unittest2 import mitogen import mitogen.ssh import mitogen.utils import testlib import plain_old_module def get_sys_executable(): return sys.executable def get_os_environ(): return dict(os.environ) class LocalTest(testlib.RouterMixin, unittest2.TestCase): stream_
class = mitogen.ssh.Stream def test_stream_name(self): context = self.router.local() pid = context.call(os.getpid) self.assertEquals('local.%d' % (pid,), context.name) class PythonPathTest(testlib.RouterMixin, unittest2.TestCase): stream_class = mitogen.ssh.Stream def test_inherited(self): context = self.router.local() self.assertEquals(sys.executable, context.call(get_sys_executable)) def test_string(se
lf): os.environ['PYTHON'] = sys.executable context = self.router.local( python_path=testlib.data_path('env_wrapper.sh'), ) self.assertEquals(sys.executable, context.call(get_sys_executable)) env = context.call(get_os_environ) self.assertEquals('1', env['EXECUTED_VIA_ENV_WRAPPER']) def test_list(self): context = self.router.local( python_path=[ testlib.data_path('env_wrapper.sh'), "magic_first_arg", sys.executable ] ) self.assertEquals(sys.executable, context.call(get_sys_executable)) env = context.call(get_os_environ) self.assertEquals('magic_first_arg', env['ENV_WRAPPER_FIRST_ARG']) self.assertEquals('1', env['EXECUTED_VIA_ENV_WRAPPER']) if __name__ == '__main__': unittest2.main()
# # Copyright 2020 University of Washington # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation; # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MER
CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # Authors: Hao Yin and Sebastien Deronne # import numpy as np import math def bianchi_ax(data_rate, ack_rate, k, difs)
: # Parameters for 11ax nA = np.linspace(5, 50, 10) CWmin = 15 CWmax = 1023 L_DATA = 1500 * 8 # data size in bits L_ACK = 14 * 8 # ACK size in bits #B = 1/(CWmin+1) B=0 EP = L_DATA/(1-B) T_GI = 800e-9 # guard interval in seconds T_SYMBOL_ACK = 4e-6 # symbol duration in seconds (for ACK) T_SYMBOL_DATA = 12.8e-6 + T_GI # symbol duration in seconds (for DATA) T_PHY_ACK = 20e-6 # PHY preamble & header duration in seconds (for ACK) T_PHY_DATA = 44e-6 # PHY preamble & header duration in seconds (for DATA) L_SERVICE = 16 # service field length in bits L_TAIL = 6 # tail lengthh in bits L_MAC = (30) * 8 # MAC header size in bits L_APP_HDR = 8 * 8 # bits added by the upper layer(s) T_SIFS = 16e-6 T_DIFS = 34e-6 T_SLOT = 9e-6 delta = 1e-7 Aggregation_Type = 'A_MPDU' #A_MPDU or A_MSDU (HYBRID not fully supported) K_MSDU = 1 K_MPDU = k L_MPDU_HEADER = 4 L_MSDU_HEADER = 14 * 8 if (k <= 1): Aggregation_Type = 'NONE' N_DBPS = data_rate * T_SYMBOL_DATA # number of data bits per OFDM symbol if (Aggregation_Type == 'NONE'): N_SYMBOLS = math.ceil((L_SERVICE + (L_MAC + L_DATA + L_APP_HDR) + L_TAIL)/N_DBPS) T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS) K_MPDU = 1 K_MSDU = 1 if (Aggregation_Type == 'A_MSDU'): N_SYMBOLS = math.ceil((L_SERVICE + K_MPDU*(L_MAC + L_MPDU_HEADER + K_MSDU*(L_MSDU_HEADER + L_DATA + L_APP_HDR)) + L_TAIL)/N_DBPS) T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS) if (Aggregation_Type == 'A_MPDU'): N_SYMBOLS = math.ceil((L_SERVICE + K_MPDU*(L_MAC + L_MPDU_HEADER + L_DATA + L_APP_HDR) + L_TAIL)/N_DBPS) T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS) #Calculate ACK Duration N_DBPS = ack_rate * T_SYMBOL_ACK # number of data bits per OFDM symbol N_SYMBOLS = math.ceil((L_SERVICE + L_ACK + L_TAIL)/N_DBPS) T_ACK = T_PHY_ACK + (T_SYMBOL_ACK * N_SYMBOLS) T_s = T_DATA + T_SIFS + T_ACK + T_DIFS if difs == 1: #DIFS T_C = T_DATA + T_DIFS else: T_s = T_DATA + T_SIFS + T_ACK + T_DIFS + delta T_C = T_DATA + T_DIFS + T_SIFS + T_ACK + delta T_S = T_s/(1-B) + T_SLOT S_bianchi = np.zeros(len(nA)) for j in range(len(nA)): n = nA[j]*1 W = CWmin + 1 m = math.log2((CWmax + 1)/(CWmin + 1)) tau1 = np.linspace(0, 0.1, 100000) p = 1 - np.power((1 - tau1),(n - 1)) ps = p*0 for i in range(int(m)): ps = ps + np.power(2*p, i) taup = 2./(1 + W + p*W*ps) b = np.argmin(np.abs(tau1 - taup)) tau = taup[b] Ptr = 1 - math.pow((1 - tau), int(n)) Ps = n*tau*math.pow((1 - tau), int(n-1))/Ptr S_bianchi[j] = K_MSDU*K_MPDU*Ps*Ptr*EP/((1-Ptr)*T_SLOT+Ptr*Ps*T_S+Ptr*(1-Ps)*T_C)/1e6 bianchi_result = S_bianchi return bianchi_result def str_result(bianchi_result, mcs, bw): str_bianchi = ' {' + '\"HeMcs{:d}'.format(mcs) + '_{:d}MHz\"'.format(bw) + ', {\n' for i in range (len(bianchi_result)): str_tmp = ' {' + '{:d}, {:.4f}'.format(5*(i+1), bianchi_result[i]) +'},\n' str_bianchi = str_bianchi + str_tmp str_bianchi = str_bianchi + " }},\n" print(str_bianchi) return str_bianchi # Settings for different MCS and mode data_rates_20MHz = [8.603e6, 17.206e6, 25.8e6, 34.4e6, 51.5e6, 68.8e6, 77.4e6, 86e6, 103.2e6, 114.7e6, 129e6, 143.4e6] ack_rates_20MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6] data_rates_40MHz = [17.2e6, 34.4e6, 51.5e6, 68.8e6, 103.2e6, 137.6e6, 154.9e6, 172.1e6, 206.5e6, 229.4e6, 258.1e6, 286.8e6] ack_rates_40MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6] data_rates_80MHz = [36e6, 72.1e6, 108.1e6, 144.1e6, 216.2e6, 288.2e6, 324.3e6, 360.3e6, 432.4e6, 480.4e6, 540.4e6, 600.5e6] ack_rates_80MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6] data_rates_160MHz = [72.1e6, 144.1e6, 216.2e6, 288.2e6, 432.4e6, 576.5e6, 648.5e6, 720.6e6, 864.7e6, 960.8e6, 1080.9e6, 1201e6] ack_rates_160MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6] # Generate results with frame aggregation disabled k = 1 difs = 1 fo = open("bianchi_11ax_difs.txt", "w") for i in range(len(data_rates_20MHz)): bianchi_result = bianchi_ax(data_rates_20MHz[i], ack_rates_20MHz[i], k, difs) str_s = str_result(bianchi_result, i, 20) fo.write(str_s) for i in range(len(data_rates_40MHz)): bianchi_result = bianchi_ax(data_rates_40MHz[i], ack_rates_40MHz[i], k, difs) str_s = str_result(bianchi_result, i, 40) fo.write(str_s) for i in range(len(data_rates_80MHz)): bianchi_result = bianchi_ax(data_rates_80MHz[i], ack_rates_80MHz[i], k, difs) str_s = str_result(bianchi_result, i, 80) fo.write(str_s) for i in range(len(data_rates_160MHz)): bianchi_result = bianchi_ax(data_rates_160MHz[i], ack_rates_160MHz[i], k, difs) str_s = str_result(bianchi_result, i, 160) fo.write(str_s) fo.close() difs = 0 fo = open("bianchi_11ax_eifs.txt", "w") for i in range(len(data_rates_20MHz)): bianchi_result = bianchi_ax(data_rates_20MHz[i], ack_rates_20MHz[i], k, difs) str_s = str_result(bianchi_result, i, 20) fo.write(str_s) for i in range(len(data_rates_40MHz)): bianchi_result = bianchi_ax(data_rates_40MHz[i], ack_rates_40MHz[i], k, difs) str_s = str_result(bianchi_result, i, 40) fo.write(str_s) for i in range(len(data_rates_80MHz)): bianchi_result = bianchi_ax(data_rates_80MHz[i], ack_rates_80MHz[i], k, difs) str_s = str_result(bianchi_result, i, 80) fo.write(str_s) for i in range(len(data_rates_160MHz)): bianchi_result = bianchi_ax(data_rates_160MHz[i], ack_rates_160MHz[i], k, difs) str_s = str_result(bianchi_result, i, 160) fo.write(str_s) fo.close()
from django.co
nf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^', include('ebets.urls')), url(r'^admin
/', include(admin.site.urls)), )
# vpnr 48 run_trial(hori2, duration=4.000, speed=300) run_trial(rws[2], duration=8.0) run_trial(rbs[12], duration=8.0) run_trial(rws[6], duration=8.0) run_trial(rbs[22], duration=8.0) run_trial(cm200, duration=8.0, speed=150) run_trial(cm200, duration=8.0, speed=800) run_trial(rbs[6], duration=8.0) run_trial(msm0, duration=4.000, speed=400) run_trial(rbs[9], duration=8.0) run_trial(mem2, duration=3.000, speed=600) run_trial(mem0, duration=7.000, speed=200) run_trial(rws[16], duration=8.0) run_trial(rws[18], duration=8.0) run_trial(rbs[1], duration=8.0) run_trial(rbs[10], duration=8.0) run_trial(rws[15], duration=8.0) run_trial(rws[21], duration=8.0) run_trial(rbs[0], duration=8.0) run_trial(rws[1], duration=8.0) run_trial(mem2, duration=5.000, speed=300) show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',)) run_trial(rws[12], duration=8.0) run_trial(cm400, duration=8.0, speed=400) run_trial(rbs[4], duration=8.0) run_trial(rbs[19], duration=8.0) run_trial(mem0, duration=4.000, speed=400) run_trial(rbs[8], duration=8.0) run_trial(rbs[11], duration=8.0) run_trial(rws[13], duration=8.0) run_trial(rws[8], duration=8.0) run_trial(cm400, duration=8.0, speed=200) run_trial(mem1, duration=5.000, speed=300) run_trial(cm400, duration=8.0, speed=300) run_trial(hori1, duration=6.000, speed=200) run_trial(rbs[15], duration=8.0) run_trial(hori0, duration=3.000, speed=400) run_trial(msm0, duration=7.000, speed=200) run_trial(rws[0], duration=8.0) run_trial(mem0, duration=2.500, speed=800) run_trial(rws[17], duration=8.0) run_trial(cm100, duration=8.0, speed=200) run_trial(mem0, duration=3.000, speed=600) show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',)) run_trial(msm1, duration=4.000, speed=400) run_trial(rbs[18], duration=8.0) run_trial(mem1, duration=4.000, speed=400) run_trial(msm2, duration=4.000, speed=400) run_trial(mem1, duration=7.000, speed=200) run_trial(msm2, duration=3.000, speed=600) run_trial(mem1, duration=2.500, speed=800) run_trial(hori0, duration=2.000, speed=600) run_trial(mem1, duration=9.000, speed=150) run_trial(rbs[23], duration=8.0) run_trial(cm100, duration=8.0, speed=150) run_trial(cm200, duration=8.0, speed=200) run_trial(rws[5], duration=8.0) run_trial(hori2, duration=2.000, speed=600) run_trial(msm1, duration=2.500, speed=800) run_trial(rws[9], duration=8.0) run_trial(cm100, duration=8.0, speed=400) run_trial(rbs[2], duration=8.0) run_trial(rbs[14], duration=8.0) run_trial(cm200, duration=8.0, speed=400) run_trial(rbs[5], duration=8.0) show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',)) run_trial(hori1, duration=8.000, speed=150) run_trial(rws[10], duration=8.0) run_trial(rws[19], duration=8.0) run_trial(rws[20], duration=8.0) run_trial(rbs[21], duration=8.0) run_trial(hori0, duration=6.000, speed=200) run_trial(msm0, duration=3.000, speed=600) run_trial(rbs[13], duration=8.0) run_trial(cm200, duration=8.0, speed=300) run_trial(msm1, duration=3.000, speed=600) run_trial(cm400, duration=8.0, speed=600) run_trial(rbs[7], duration=8.0) run_trial(rws[7], duration=8.0) run_trial(rbs[3], duration=8.0) run_trial(hori0, duration=8.000, speed=150) run_trial(mem2, duration=9.000, speed=150) run_trial(rws[4], duration=8.0) run_trial(hori2, duration=1.500, speed=800
) run_trial(cm400, duration=8.0, speed=150) run_trial(hori0, duration=4.000, speed=300) run_trial(cm400, duration=8.0, speed=800) show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',)) run_trial(msm1, duration=5.000, speed=300) run_trial(msm2, du
ration=5.000, speed=300) run_trial(msm0, duration=2.500, speed=800) run_trial(mem2, duration=4.000, speed=400) run_trial(cm200, duration=8.0, speed=600) run_trial(hori1, duration=1.500, speed=800) run_trial(msm0, duration=9.000, speed=150) run_trial(hori0, duration=1.500, speed=800) run_trial(mem2, duration=2.500, speed=800) run_trial(rbs[24], duration=8.0) run_trial(msm2, duration=9.000, speed=150) run_trial(hori1, duration=4.000, speed=300) run_trial(rbs[16], duration=8.0) run_trial(rbs[17], duration=8.0) run_trial(msm2, duration=2.500, speed=800) run_trial(mem1, duration=3.000, speed=600) run_trial(msm1, duration=9.000, speed=150) run_trial(rws[11], duration=8.0) run_trial(hori2, duration=8.000, speed=150) run_trial(hori1, duration=2.000, speed=600) run_trial(msm2, duration=7.000, speed=200) show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',)) run_trial(mem0, duration=5.000, speed=300) run_trial(hori2, duration=6.000, speed=200) run_trial(msm0, duration=5.000, speed=300) run_trial(rws[22], duration=8.0) run_trial(cm100, duration=8.0, speed=300) run_trial(mem0, duration=9.000, speed=150) run_trial(rws[23], duration=8.0) run_trial(rws[14], duration=8.0) run_trial(rws[24], duration=8.0) run_trial(msm1, duration=7.000, speed=200) run_trial(rws[3], duration=8.0) run_trial(cm100, duration=8.0, speed=800) run_trial(hori2, duration=3.000, speed=400) run_trial(rbs[20], duration=8.0) run_trial(hori1, duration=3.000, speed=400) run_trial(mem2, duration=7.000, speed=200) run_trial(cm100, duration=8.0, speed=600) run_movie(movie1audio, 'Jetzt folgt ein Video mit Ton.\n\nWeiter mit Leertaste') run_movie(movie2noaudio, 'Jetzt folgt ein Video OHNE Ton.\n\nWeiter mit Leertaste')
from django.test import TestCase from api.helpers import user_service from api.factories import UserFactory, PostFactory class UserServiceTest(TestCase): POSTS_PER_USER = 10 def setUp(self): self.main_user = UserFactory() self.follower = UserFactory() self.test_user = UserFactory() self.main_user.followers.add(self.follower) self.follower.following.add(self.main_user) for i in range(0, self.POSTS_PER_USER): PostFactory(creator=self.main_user) PostFactory(creator=self.test_user) PostFactory(creator=self.follower) def test_user_feed_returns_posts_from_correct_users(self): posts = user_service.get_user_feed(self.follower.id, 0, 20) self.assertEqua
l(len(posts), self.POSTS_PER_USER * 2) for post in posts: self.assertIn(pos
t.creator_id, [self.main_user.id, self.follower.id]) def test_user_feed_returns_posts_ordered_correctly(self): posts = user_service.get_user_feed(self.follower.id, 0, 20) for i in range(0, len(posts) - 1): self.assertGreater(posts[i].created_at, posts[i + 1].created_at) def test_user_feed_returns_correct_pages(self): pass
Joris Jensen <jjensen@techfak.uni-bielefeld.de> # # License: BSD 3 clause from __future__ import division import numpy as np from scipy.optimize import minimize from sklearn.utils import validation from .rslvq import RslvqModel class LmrslvqModel(RslvqModel): """Localized Matrix Robust Soft Learning Vector Quantization Parameters ---------- prototypes_per_class : int or list of int, optional (default=1) Number of prototypes per class. Use list to specify different numbers per class. initial_prototypes : array-like, shape = [n_prototypes, n_features + 1], optional Prototypes to start with. If not given initialization near the class means. Class label must be placed as last entry of each prototype. initial_matrices : list of array-like, optional Matrices to start with. If not given random initialization regularization : float or array-like, shape = [n_classes/n_prototypes], optional (default=0.0) Values between 0 and 1. Regularization is done by the log determinant of the relevance matrix. Without regularization relevances may degenerate to zero. dim : int, optional Maximum rank or projection dimensions classwise : boolean, optional If true, each class has one relevance matrix. If false, each prototype has one relevance matrix. sigma : float, optional (default=0.5) Variance for the distribution. max_iter : int, optional (default=2500) The maximum number of iterations. gtol : float, optional (default=1e-5) Gradient norm must be less than gtol before successful termination of bfgs. display : boolean, optional (default=False) Print information about the bfgs steps. random_state : int, RandomState instance or None, optional If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Attributes ---------- w_ : array-like, shape = [n_prototypes, n_features] Prototype vector, where n_prototypes in the number of prototypes and n_features is the number of features c_w_ : array-like, shape = [n_prototypes] Prototype classes classes_ : array-like, shape = [n_classes] Array containing labels. omegas_ : list of array-like Relevance Matrices dim_ : list of int Maximum rank of projection regularization_ : array-like, shape = [n_classes/n_prototypes] Values between 0 and 1 See also -------- RslvqModel, MrslvqModel """ def __init__(self, prototypes_per_class=1, initial_prototypes=None, initial_matrices=None, regularization=0.0, dim=None, classwise=False, sigma=1, max_iter=2500, gtol=1e-5, display=False, random_state=None): super(LmrslvqModel, self).__init__(sigma=sigma, random_state=random_state, prototypes_per_class=prototypes_per_class, initial_prototypes=initial_prototypes, gtol=gtol, display=display, max_iter=max_iter) self.regularization = regularization self.initial_matrices = initial_matrices self.classwise = classwise self.initialdim = dim def _optgrad(self, variables, training_data, label_equals_prototype, random_state, lr_relevances=0, lr_prototypes=1): n_data, n_dim = training_data.shape nb_prototypes = self.c_w_.size variables = variables.reshape(variables.size // n_dim, n_dim) prototypes = variables[:nb_prototypes] # dim to indices indices = [] for i in range(len(self.dim_)): indices.append(sum(self.dim_[:i + 1])) omegas = np.split(variables[nb_prototypes:], indices[:-1]) # .conj().T g = np.zeros(variables.shape) if lr_relevances > 0: gw = [] for i in range(len(omegas)): gw.append(np.zeros(omegas[i].shape)) c = 1 / self.sigma for i in range(n_data): xi = training_data[i] c_xi = label_equals_prototype[i] for j in range(prototypes.shape[0]): if len(omegas) == nb_prototypes: omega_index = j else: omega_index = np.where(self.classes_ == self.c_w_[j])[0][0] oo = omegas[omega_index].T.dot(omegas[omega_index]) d = (xi - prototypes[j])[np.newaxis].T p = self._p(j, xi, prototypes=prototypes, omega=omegas[omega_index]) if self.c_w_[j] == c_xi: pj = self._p(j, xi, prototypes=prototypes, y=c_xi, omega=omegas[omega_index]) if lr_prototypes > 0: if self.c_w_[j] == c_xi: g[j] += (c * (pj - p) * oo.dot(d)).ravel() else: g[j] -= (c * p * oo.dot(d)).ravel() if lr_relevances > 0: if self.c_w_[j] == c_xi: gw -= (pj - p) / self.sigma * ( omegas[omega_index].dot(d).dot(d.T)) else: gw += p / self.sigma * (omegas[omega_index].dot(d).dot(d.T)) if lr_relevances > 0: if sum(self.regularization_) > 0: regmatrices = np.zeros([sum(self.dim_), n_dim]) for i in range(len(omegas)): regmatrices[sum(self.dim_[:i + 1]) - self.dim_[i]:sum( self.dim_[:i + 1])] = \ self.regularization_[i] * np.linalg.pinv(omegas[i]) g[nb_prototypes:] = 2 / n_data * lr_relevances * \ np.concatenate(gw) - regmatrices else: g[nb_prototypes:] = 2 / n_data * lr_relevances * \ np.concatenate(gw) if lr_prototypes > 0: g[:nb_prototypes] = 1 / n_data * \ lr_prototypes * g[:nb_prototypes] g *= -(1 + 0.0001 * random_state.rand(*g.shape) - 0.5) return g.ravel() def _optfun(self, variables, training_data, label_equals_prototype): n_data, n_dim = training_data.shape nb_prototypes = self.c_w_.size variables = variables.reshape(variables.size // n_dim, n_dim) prototypes = variables[:nb_prototypes] indices = [] for i in range(len(self.dim_)): indices.append(sum(self.dim_[:i + 1])) omegas = np.split(variables[nb_prototypes:], indices[:-1]) out = 0 for i in range(n_data): xi = training_data[i] y = label_equals_prototype[i] if len(omegas) == nb_prototypes: fs = [self._costf(xi, prototypes[j], omega=omegas[j]) for j in range(nb_prototypes)] else: fs = [self._costf(xi, prototypes[j], omega=omegas[np.where(self.classes_ == self.c_w_[j])[0][0]]) for j in range(nb_prototypes)] fs_max = max(fs) s1 = sum([np.math.exp(fs[i] - fs_max) for i in range(len(fs)) if self.c_w_[i] == y]) s2 = sum([np.math.exp(f - fs_max) for f in fs]) s1 += 0.0000001 s2 += 0.0000001 ou
t += np.math.log(s1 / s2) return -out def _optimize(self, x, y, random_state): nb_prototypes, nb_features = self.w_.shape nb_classes = len(self.classes_) if not isinstance(self.classwise, bool): raise ValueError("classwise must be a boolean") if self.initialdim is None: if
self.classwise: self.dim_ = nb_features * np.ones(nb_classes, dtype=np.int) else:
""" Windows Process Control winprocess.run launches a child process and returns the exit code. Optionally, it can: redirect stdin, stdout & stderr to files run the command as another user limit the process's running time control the process window (location, size, window state, desktop) Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32 extensions. This code is free for any purpose, with no warranty of any kind. -- John B. Dell'Aquila <jbd@alum.mit.edu> """ import win32api, win32process, win32security import win32event, win32con, msvcrt, win32gui def logonUser(loginString): """ Login as specified user and return handle. loginString: 'Domain\nUser\nPassword'; for local login use . or empty string as domain e.g. '.\nadministrator\nsecret_password' """ domain, user, passwd = loginString.split('\n') return win32security.LogonUser( user, domain, passwd, win32con.LOGON32_LOGON_INTERACTIVE, win32con.LOGON32_PROVIDER_DEFAULT ) class Process: """ A Windows process. """ def __init__(self, cmd, login=None, hStdin=None, hStdout=None, hStderr=None, show=1, xy=None, xySize=None, desktop=None): """ Create a Windows process. cmd: command to run login: run as user 'Domain\nUser\nPassword' hStdin, hStdout, hStderr: handles for process I/O; default is caller's stdin, stdout & stderr show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...) xy: window offset (x, y) of upper left corner in pixels xySize: window size (width, height) in pixels desktop: lpDesktop - name of desktop e.g. 'winsta0\\default' None = inherit current desktop '' = create new desktop if necessary User calling login requires additional privileges: Act as part of the operating system [not needed on Windows XP] Increase quotas Replace a process level token Login string must EITHER be an administrator's account (ordinary user can't access current desktop - see Microsoft Q165194) OR use desktop='' to run another desktop invisibly (may be very slow to startup & finalize). """ si = win32process.STARTUPINFO() si.dwFlags = (win32con.STARTF_USESTDHANDLES ^ win32con.STARTF_USESHOWWINDOW) if hStdin is None: si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE) else: si.hStdInput = hStdin if hStdout is None: si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE) else: si.hStdOutput = hStdout if hStderr is None: si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE) else: si.hStdError = hStderr si.wShowWindow = show if xy is not None: si.dwX, si.dwY = xy si.dwFlags ^= win32con.STARTF_USEPOSITION if xySize is not None: si.dwXSize, si.dwYSize = xySize si.dwFlags ^= win32con.STARTF_USESIZE if desktop is not None: si.lpDesktop = desktop procArgs = (None, # appName cmd, # commandLine None, # processAttributes None, # threadAttributes 1, # bInheritHandles win32process.CREATE_NEW_CONSOLE, # dwCreationFlags None, # newEnvironment None, # currentDirectory si) # startupinfo if login is not None: hUser = logonUser(login) win32security.ImpersonateLoggedOnUser(hUser) procHandles = win32process.CreateProcessAsUser(hUser, *procArgs) win32security.RevertToSelf() else: procHandles = win32process.CreateProcess(*procArgs) self.hProcess, self.hThread, self.PId, self.TId = procH
andles def wait(self, mSec=None): """ Wait for process to finish or for specified number of milliseconds to elapse. """ if mSec is None: mSec = win32event.INFINITE return win32event.WaitFo
rSingleObject(self.hProcess, mSec) def kill(self, gracePeriod=5000): """ Kill process. Try for an orderly shutdown via WM_CLOSE. If still running after gracePeriod (5 sec. default), terminate. """ win32gui.EnumWindows(self.__close__, 0) if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0: win32process.TerminateProcess(self.hProcess, 0) win32api.Sleep(100) # wait for resources to be released def __close__(self, hwnd, dummy): """ EnumWindows callback - sends WM_CLOSE to any window owned by this process. """ TId, PId = win32process.GetWindowThreadProcessId(hwnd) if PId == self.PId: win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0) def exitCode(self): """ Return process exit code. """ return win32process.GetExitCodeProcess(self.hProcess) def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw): """ Run cmd as a child process and return exit code. mSec: terminate cmd after specified number of milliseconds stdin, stdout, stderr: file objects for child I/O (use hStdin etc. to attach handles instead of files); default is caller's stdin, stdout & stderr; kw: see Process.__init__ for more keyword options """ if stdin is not None: kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno()) if stdout is not None: kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno()) if stderr is not None: kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno()) child = Process(cmd, **kw) if child.wait(mSec) != win32event.WAIT_OBJECT_0: child.kill() raise WindowsError, 'process timeout exceeded' return child.exitCode() if __name__ == '__main__': # Pipe commands to a shell and display the output in notepad print 'Testing winprocess.py...' import tempfile timeoutSeconds = 15 cmdString = """\ REM Test of winprocess.py piping commands to a shell.\r REM This window will close in %d seconds.\r vol\r net user\r _this_is_a_test_of_stderr_\r """ % timeoutSeconds cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile() cmd.write(cmdString) cmd.seek(0) print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd, stdout=out, stderr=out) cmd.close() print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name, show=win32con.SW_MAXIMIZE, mSec=timeoutSeconds*1000) out.close()
f
rom __future__ import print_function from twython import Twython import util class TwitterBot(util.SocialMediaBot): """ Social Media Bot for posting updates to Tumblr """
NAME = "twitter" def __init__(self, **kwargs): super(TwitterBot, self).__init__(**kwargs) self.client = Twython(*self.oauth_config) def post_update(self): text = self.generate_text(limit_characters=140) self.client.update_status(status=text) if __name__ == "__main__": twitterbot = TwitterBot() twitterbot.post_update()
:type cm: :class:`ClassManager` """ def __init__(self, buff, cm) : self.CM = cm self.field_idx_diff = readuleb128( buff ) self.access_flags = readuleb128( buff ) self.field_idx = 0 self.name = None self.proto = None self.class_name = None self.init_value = None self.access_flags_string = None def reload(self) : name = self.CM.get_field( self.field_idx ) self.class_name = name[0] self.name = name[2] self.proto = ''.join(i for i in name[1]) def set_init_value(self, value) : """
Setup the init value object of the field :param value: the init value :type value: :class:`EncodedValue` """ self.init_value = value def get_init_value(self) : """ Return the init value object of the field :rtype: :class:`EncodedValue` """ return self.init_value def adjust_idx(self, val) : self.field_idx = self.field_idx_dif
f + val def get_field_idx_diff(self) : """ Return the index into the field_ids list for the identity of this field (includes the name and descriptor), represented as a difference from the index of previous element in the list :rtype: int """ return self.field_idx_diff def get_field_idx(self) : """ Return the real index of the method :rtype: int """ return self.field_idx def get_access_flags(self) : """ Return the access flags of the field :rtype: int """ return self.access_flags def get_class_name(self) : """ Return the class name of the field :rtype: string """ return self.class_name def get_descriptor(self) : """ Return the descriptor of the field :rtype: string """ return self.proto def get_name(self) : """ Return the name of the field :rtype: string """ return self.name def get_access_flags_string(self) : """ Return the access flags string of the field :rtype: string """ if self.access_flags_string == None : self.access_flags_string = get_access_flags_string( self.get_access_flags() ) if self.access_flags_string == "" : self.access_flags_string = "0x%x" % self.get_access_flags() return self.access_flags_string def set_name(self, value) : self.CM.set_hook_field_name( self, value ) self.reload() def get_obj(self) : return [] def get_raw(self) : return writeuleb128( self.field_idx_diff ) + writeuleb128( self.access_flags ) def get_size(self) : return len(self.get_raw()) def show(self) : """ Display the information about the field """ colors = bytecode.disable_print_colors() self.pretty_show() bytecode.enable_print_colors(colors) def pretty_show(self) : """ Display the information (with a pretty print) about the field """ bytecode._PrintSubBanner("Field Information") bytecode._PrintDefault("%s->%s %s [access_flags=%s]\n" % ( self.get_class_name(), self.get_name(), self.get_descriptor(), self.get_access_flags_string() )) init_value = self.get_init_value() if init_value != None : bytecode._PrintDefault( "\tinit value: %s\n" % str( init_value.get_value() ) ) self.show_dref() def show_dref(self) : """ Display where this field is read or written """ try : bytecode._PrintSubBanner("DREF") bytecode._PrintDRef("R", self.DREFr.items) bytecode._PrintDRef("W", self.DREFw.items) bytecode._PrintSubBanner() except AttributeError: pass class EncodedMethod : """ This class can parse an encoded_method of a dex file :param buff: a string which represents a Buff object of the encoded_method :type buff: Buff object :param cm: a ClassManager object :type cm: :class:`ClassManager` """ def __init__(self, buff, cm) : self.CM = cm self.method_idx_diff = readuleb128( buff ) #: method index diff in the corresponding section self.access_flags = readuleb128( buff ) #: access flags of the method self.code_off = readuleb128( buff ) #: offset of the code section self.method_idx = 0 self.name = None self.proto = None self.class_name = None self.code = None self.access_flags_string = None self.notes = [] def adjust_idx(self, val) : self.method_idx = self.method_idx_diff + val def get_method_idx(self) : """ Return the real index of the method :rtype: int """ return self.method_idx def get_method_idx_diff(self) : """ Return index into the method_ids list for the identity of this method (includes the name and descriptor), represented as a difference from the index of previous element in the lis :rtype: int """ return self.method_idx_diff def get_access_flags(self) : """ Return the access flags of the method :rtype: int """ return self.access_flags def get_code_off(self) : """ Return the offset from the start of the file to the code structure for this method, or 0 if this method is either abstract or native :rtype: int """ return self.code_off def get_access_flags_string(self) : """ Return the access flags string of the method :rtype: string """ if self.access_flags_string == None : self.access_flags_string = get_access_flags_string( self.get_access_flags() ) if self.access_flags_string == "" : self.access_flags_string = "0x%x" % self.get_access_flags() return self.access_flags_string def reload(self) : v = self.CM.get_method( self.method_idx ) self.class_name = v[0] self.name = v[1] self.proto = ''.join(i for i in v[2]) self.code = self.CM.get_code( self.code_off ) def get_locals(self): ret = self.proto.split(')') params = ret[0][1:].split() return self.code.get_registers_size() - len(params) - 1 def each_params_by_register(self, nb, proto): bytecode._PrintSubBanner("Params") ret = proto.split(')') params = ret[0][1:].split() if params: bytecode._PrintDefault("- local registers: v%d...v%d\n" % (0, nb - len(params) - 1)) j = 0 for i in xrange(nb - len(params), nb): bytecode._PrintDefault("- v%d:%s\n" % (i, get_type(params[j]))) j += 1 else : bytecode._PrintDefault("local registers: v%d...v%d\n" % (0, nb-1)) bytecode._PrintDefault("- return:%s\n" % get_type(ret[1])) bytecode._PrintSubBanner() def show_info(self) : """ Display the basic information about the method """ bytecode._PrintSubBanner("Method Information") bytecode._PrintDefault("%s->%s%s [access_flags=%s]\n" % ( self.get_class_name(), self.get_name(), self.get_descriptor(), self.get_access_flags_string() )) def show(self) : """ Display the information about the method """ colors = bytecode.disable_print_colors() self.pretty_show() bytecode.enable_print_colors(colors) def pretty_show(self) : """ Display the information (with a pretty print) about the method """ self.show_info() self.show_notes() if self.code != None : self.each_params_by_register(
from gateway import Gateway, ge
t_gateway from integration import Integration, get_inte
gration from utils.credit_card import CreditCard
# -*- coding: utf-8 -*- from datetime import datetime from django.db import models from django.core.urlresolvers import reverse from ..core.models import TimeStampedModel class TipoDiagnosticos(TimeStampedModel): nombre = models.CharField(max_length=150, blank=False, null=False, verbose_name=u'Diagnóstico') def get_absolute_url(self): return reverse('diagnosticos:list') def __unicode__(self): return self.nombre class Diagnosticos(TimeStampedModel): tipo_diagnostico = models.ForeignKey(TipoDiagnosticos, blank=True, null=True, verbose_name=u'Diagnóstico') fecha = models.DateField(blank=False, null=False,
help_text=u'Formato: dd/mm/yyyy
', default=datetime.now()) hora = models.TimeField(blank=False, null=False, help_text=u'Formato: hh:mm', default=datetime.now())
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-02-13 11:16 from __future__ import unicode_literals from django.db import mig
rations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('restaurant', '0014_remove_menuimage_menu_name'), ] operations = [ migrations.AlterModelOptions( name='menuimage', options={'verbose_name': 'MenuImage', 'verbose_name_plura
l': 'MenuImages'}, ), migrations.AlterField( model_name='menuimage', name='restaurant', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='menu_image', to='restaurant.Restaurant'), ), ]
#!/usr/bin/env python from Auth import * keyId = plc_api.GetKeys(auth,
{'person_id': 249241}, ['key_id', 'key']) for key in keyId: print "A new key:" print "Key value ->", key['key'] print "K
ey id ->",key['key_id']
### # Copyright (c) 2012, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### """ Add a description of the plugin (to be presented to the user inside the wizard) here. This should describe *what* the plugin does. """ import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # X
XX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/TwitterStream/download' from . import config from . import plugin from imp import reload reload(plugin
) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: from . import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
# -*- coding: utf-8 -*- """ Read Tweetworks API users from XML responses. Nicolas Ward @ultranurd ultranurd@yahoo.com http://www.ultranurd.net/code/tweetworks/ 2009.06.19 """ """ This file is part of the Tweetworks Python API. Copyright © 2009 Nicolas Ward Tweetworks Python API is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Tweetworks Python API is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with the Tweetworks Python API. If not, see http://www.gnu.org/licenses/ The term "Tweetworks" is Copyright © 2009 Tweetworks, LLC and is used under license. See http://www.tweetworks.com/pages/terms The use of this software requires a unique Tweetworks API key. You must be a registered Tweetworks user, and have received an API key after requesting one via http://www.tweetworks.com/pages/contact. The term "Twitter" is Copyright © 2009 Twitter, Inc. """ # System includes import lxml.etree from lxml.builder import E class User: """ Represents the data fields of a single Tweetworks user. """ def __init__(self, xml = None): """ Reads user fields from the XML, or create an empty user. id - int - Tweetworks numeric user ID username - string - Tweetworks/Twitter username avatar_url - string - Twitter avatar URL twitter_id - int - Twitt
er numeric user ID """ # Initialize an empty user if no XML was provided if xml == None: self.id = None self.username = "" self.avatar_url = "" self.twitter_id = None return # User ID
self.id = int(xml.xpath("/user/id/text()")[0]) # User's Twitter username self.username = unicode(xml.xpath("/user/username/text()")[0]) # User avatar URL (loaded from Amazon S3, obtained from Twitter) self.avatar_url = unicode(xml.xpath("/user/avatar_url/text()")[0]) # User's "real" name self.name = unicode(xml.xpath("/user/name/text()")[0]) # Twitter ID of the user; this should always be present but isn't always twitter_id = xml.xpath("/user/twitter_id/text()") if len(twitter_id) == 1: self.twitter_id = int(twitter_id[0]) else: self.twitter_id = None def __str__(self): """ Returns this User as an XML string. """ # Get the XML tree and stringify return lxml.etree.tostring(self.xml()) def __repr__(self): """ Returns an eval-ready string for this User's constructor. """ return "tweetworks.User(lxml.etree.parsestring(%s))" % repr(str(self)) def xml(self): """ Generates an XML element tree for this User. """ # Construct the XML tree representing this User xml = E("user", E("id", str(self.id)), E("username", self.username), E("avatar_url", self.avatar_url), E("name", self.name), E("twitter_id", ("", str(self.twitter_id))[self.twitter_id != None]), ) # Return the XML tree (NOT a string) return xml
#!/usr/bin/env python #------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. #-------------------------------------------------------------------------- import re import os.path from io import open from setuptools import find_packages, setup # Change the PACKAGE_NAME only to change folder and different name PACKAGE_NAME = "azure-mgmt-storagepool" PACKAGE_PPRINT_NAME = "Storage Pool Management" # a-b-c => a/b/c package_folder_path = PACKAGE_NAME.replace('-', '/') # a-b-c => a.b.c namespace_name = PACKAGE_NAME.replace('-', '.') # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, 'version.py') if os.path.exists(os.path.join(package_folder_path, 'version.py')) else os.path.join(package_folder_path, '_version.py'), 'r') as fd: ver
sion = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: raise RuntimeError('Cannot find version information') with open('README.md', encoding='utf-8') as f: readme = f.read() with open('CHANGELOG.md', encoding='utf-8') as f: changelog = f.read() setup( name=PACKAGE_NAME, version=version, description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), lon
g_description=readme + '\n\n' + changelog, long_description_content_type='text/markdown', license='MIT License', author='Microsoft Corporation', author_email='azpysdkhelp@microsoft.com', url='https://github.com/Azure/azure-sdk-for-python', keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product classifiers=[ 'Development Status :: 4 - Beta', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'License :: OSI Approved :: MIT License', ], zip_safe=False, packages=find_packages(exclude=[ 'tests', # Exclude packages that will be covered by PEP420 or nspkg 'azure', 'azure.mgmt', ]), install_requires=[ 'msrest>=0.6.21', 'azure-common~=1.1', 'azure-mgmt-core>=1.3.0,<2.0.0', ], python_requires=">=3.6" )
''' Created on 5/02/2010 @author: henry@henryjenkins.name ''' import datetime class user(object): ''' classdocs ''' dataUp = None dataDown = None macAddress = "" name = "" def __init__(self, mac="", name=""): ''' Constructor ''' self.name = name self.dataUp = {} self.dataDown = {} self.macAddress = mac def getData(self, type, date=None, peak='other'): ''' Method to retrieve data for either a set date, or the total data used by user Return int, data used by this user ''' data = 0 if date == None: data = user.__getTotalData(self, type, peak) else: data = self.getDownData(type = type, date = date, peak = peak) date += self.getUpData(type = type, date = date, peak = peak) return data def __getTotalData(self, type, peak='other'): totalData = self.__getTotalUpData(type, peak) totalData = totalData + self.__getTotalDownData(peak = peak, type = type) return totalData def getUpData(self, type, date=None, peak='other'): data = 0 if date == None: data = self.__getTotalUpData(type = type, peak = peak) elif date in self.dataUp: if type == 'on' or type == 'off': data = self.dataUp[date][peak][type] else:
data = se
lf.dataUp[date]['on'][type] + self.dataUp[date]['off'][type] return data def __getTotalUpData(self, type, peak='other'): dataTotal = 0 for date, data in self.dataUp.items(): if peak == 'on' or peak == 'off': dataTotal += data[peak][type] else: dataTotal += data['on'][type] dataTotal += data['off'][type] return dataTotal def getDownData(self, type, date=None, peak='other'): data = 0 if date == None: data = self.__getTotalDownData(type = type, peak = peak) elif date in self.dataDown: if type == 'on' or type == 'off': data = self.dataDown[date][peak][type] else: data = self.dataDown[date]['on'][type] + self.dataDown[date]['off'][type] return data def __getTotalDownData(self, type, peak='other'): dataTotal = 0 for date, data in self.dataDown.items(): if peak == 'on' or peak == 'off': dataTotal += data[peak][type] else: dataTotal += data['on'][type] dataTotal += data['off'][type] return dataTotal def addData(self, date=None, data=0, pkts=0, peak='on', direction='up'): if direction == 'up': self.addUpData(date, data, pkts, peak) elif direction == 'down': self.addDownData(date, data, pkts, peak) def addUpData(self, date=None, data=0, pkts=0, peak='on'): #TODO store packets date = self.__checkDate(date) if date not in self.dataUp:# Check if data for date already self.dataUp[date] = { 'on': {'data': 0, 'pkts': 0}, 'off': {'data': 0, 'pkts': 0} } self.dataUp[date][peak]['data'] += int(data) self.dataUp[date][peak]['pkts'] += int(pkts) def addDownData(self, date=None, data=0, pkts=0, peak='on'): #TODO store packets date = self.__checkDate(date) if date not in self.dataDown:# Check if data for date already self.dataDown[date] = { 'on': {'data': 0, 'pkts': 0}, 'off': {'data': 0, 'pkts': 0} } self.dataDown[date][peak]['data'] += int(data) self.dataDown[date][peak]['pkts'] += int(pkts) ''' Helper method ''' def __checkDate(self, localDate=None): if localDate == None: localDate = datetime.date.today() return localDate def setMac(self, mac=None): self.macAddress = mac def setName(self, name=None): self.name = name
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2011 Zadara Storage Inc. # Copyright (c) 2011 OpenStack Foundation # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright (c) 2010 Citrix Systems, Inc. # Copyright 2011 Ken Pepple # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.a
pache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, e
ither express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Built-in volume type properties.""" from oslo.config import cfg from cinder import context from cinder import db from cinder import exception from cinder.openstack.common.db import exception as db_exc from cinder.openstack.common import log as logging CONF = cfg.CONF LOG = logging.getLogger(__name__) def create(context, name, extra_specs={}): """Creates volume types.""" try: type_ref = db.volume_type_create(context, dict(name=name, extra_specs=extra_specs)) except db_exc.DBError as e: LOG.exception(_('DB error: %s') % e) raise exception.VolumeTypeCreateFailed(name=name, extra_specs=extra_specs) return type_ref def destroy(context, id): """Marks volume types as deleted.""" if id is None: msg = _("id cannot be None") raise exception.InvalidVolumeType(reason=msg) else: db.volume_type_destroy(context, id) def get_all_types(context, inactive=0, search_opts={}): """Get all non-deleted volume_types. Pass true as argument if you want deleted volume types returned also. """ vol_types = db.volume_type_get_all(context, inactive) if search_opts: LOG.debug(_("Searching by: %s") % str(search_opts)) def _check_extra_specs_match(vol_type, searchdict): for k, v in searchdict.iteritems(): if (k not in vol_type['extra_specs'].keys() or vol_type['extra_specs'][k] != v): return False return True # search_option to filter_name mapping. filter_mapping = {'extra_specs': _check_extra_specs_match} result = {} for type_name, type_args in vol_types.iteritems(): # go over all filters in the list for opt, values in search_opts.iteritems(): try: filter_func = filter_mapping[opt] except KeyError: # no such filter - ignore it, go to next filter continue else: if filter_func(type_args, values): result[type_name] = type_args break vol_types = result return vol_types def get_volume_type(ctxt, id): """Retrieves single volume type by id.""" if id is None: msg = _("id cannot be None") raise exception.InvalidVolumeType(reason=msg) if ctxt is None: ctxt = context.get_admin_context() return db.volume_type_get(ctxt, id) def get_volume_type_by_name(context, name): """Retrieves single volume type by name.""" if name is None: msg = _("name cannot be None") raise exception.InvalidVolumeType(reason=msg) return db.volume_type_get_by_name(context, name) def get_default_volume_type(): """Get the default volume type.""" name = CONF.default_volume_type vol_type = {} if name is not None: ctxt = context.get_admin_context() try: vol_type = get_volume_type_by_name(ctxt, name) except exception.VolumeTypeNotFoundByName as e: # Couldn't find volume type with the name in default_volume_type # flag, record this issue and move on #TODO(zhiteng) consider add notification to warn admin LOG.exception(_('Default volume type is not found, ' 'please check default_volume_type config: %s'), e) return vol_type def is_key_value_present(volume_type_id, key, value, volume_type=None): if volume_type_id is None: return False if volume_type is None: volume_type = get_volume_type(context.get_admin_context(), volume_type_id) if (volume_type.get('extra_specs') is None or volume_type['extra_specs'].get(key) != value): return False else: return True def get_volume_type_extra_specs(volume_type_id, key=False): volume_type = get_volume_type(context.get_admin_context(), volume_type_id) extra_specs = volume_type['extra_specs'] if key: if extra_specs.get(key): return extra_specs.get(key) else: return False else: return extra_specs def is_encrypted(context, volume_type_id): if volume_type_id is None: return False encryption = db.volume_type_encryption_get(context, volume_type_id) return encryption is not None def get_volume_type_qos_specs(volume_type_id): ctxt = context.get_admin_context() res = db.volume_type_qos_specs_get(ctxt, volume_type_id) return res
import unittest import pystache from pystache import Renderer from examples.nested_context import NestedContext from examples.complex import Complex from examples.lambdas import Lambdas from examples.template_partial import TemplatePartial from examples.simple import Simple from pystache.tests.common import EXAMPLES_DIR from pystache.tests.common import AssertStringMixin class TestSimple(unittest.TestCase, AssertStringMixin): def test_nested_context(self): renderer = Renderer() view = NestedContext(renderer) view.template = '{{#foo}}{{thing1}} and {{thing2}} and {{outer_thing}}{{/foo}}{{^foo}}Not foo!{{/foo}}'
actual = renderer.render(view) self.assertString(actual, u"one and foo and two") def test_looping_and_negation_context(self): template = '{{#item}}{{header}}: {{name}} {{/item}}{{^item}} Shouldnt see me{{/item}}' context = Complex() renderer = Renderer() actual = renderer.render(template, context) self.assertEqual(actual, "Colors: red Colors: green Col
ors: blue ") def test_empty_context(self): template = '{{#empty_list}}Shouldnt see me {{/empty_list}}{{^empty_list}}Should see me{{/empty_list}}' self.assertEqual(pystache.Renderer().render(template), "Should see me") def test_callables(self): view = Lambdas() view.template = '{{#replace_foo_with_bar}}foo != bar. oh, it does!{{/replace_foo_with_bar}}' renderer = Renderer() actual = renderer.render(view) self.assertString(actual, u'bar != bar. oh, it does!') def test_rendering_partial(self): renderer = Renderer(search_dirs=EXAMPLES_DIR) view = TemplatePartial(renderer=renderer) view.template = '{{>inner_partial}}' actual = renderer.render(view) self.assertString(actual, u'Again, Welcome!') view.template = '{{#looping}}{{>inner_partial}} {{/looping}}' actual = renderer.render(view) self.assertString(actual, u"Again, Welcome! Again, Welcome! Again, Welcome! ") def test_non_existent_value_renders_blank(self): view = Simple() template = '{{not_set}} {{blank}}' self.assertEqual(pystache.Renderer().render(template), ' ') def test_template_partial_extension(self): """ Side note: From the spec-- Partial tags SHOULD be treated as standalone when appropriate. In particular, this means that trailing newlines should be removed. """ renderer = Renderer(search_dirs=EXAMPLES_DIR, file_extension='txt') view = TemplatePartial(renderer=renderer) actual = renderer.render(view) self.assertString(actual, u"""Welcome ------- ## Again, Welcome! ##""")
""" Read graphs in LEDA format. See http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html """ # Original author: D. Eppstein, UC Irvine, August 12, 2003. # The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2009 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. __all__ = ['read_leda', 'parse_leda'] import networkx from networkx.exception import NetworkXException, NetworkXError from networkx.utils import _get_fh, is_string_like def read_leda(path): """Read graph in GraphML format from path. Returns an XGraph or XDiGraph.""" fh=_get_fh(path,mode='r') G=parse_leda(fh) return G def parse_leda(lines): """Parse LEDA.GRAPH format from string or iterable. Returns an Graph or DiGraph.""" if is_string_like(lines): lines=iter(lines.split('\n')) lines = iter([line.rstrip('\n') for line in lines \ if not (line.startswith('#') or line.startswith('\n') or line=='')]) for i in range(3): lines.next() # Graph du = int(lines.next()) # -1 directed, -2 undirected if du==-1: G = networkx.DiGraph() else: G = networkx.Graph() # Nodes n =int(lines.next()) # number of vertices node={} for i in range(1,n+1): # LEDA counts from 1 to n symbol=lines.next().rstr
ip().strip('|{}| ') if symbol=="": symbol=str(i) # use int if no label - could be trouble node[i]=symbol G.add_nodes_from([s for i,s in node.items()]) # Edges m = int(lines.next()) # number of edges for i in range(m): try: s,t,reversal,label=lines.next().split() except: raise NetworkXError,\ 'Too few fields in LEDA.GRAPH edge %d' % (i+1) # BEWARE: no handling of reversal edges G.ad
d_edge(node[int(s)],node[int(t)],label=label[2:-2]) return G
from model.contact import Contact from model.group import Group from fixture.orm import ORMFixture import random def test_del_contact_from_group(app): orm = ORMFixture(host="127.0.0.1", name="addressbook", user="root", password="") # check for existing any group if len(orm.get_group_list()) == 0: app.group.create(Group(name="test")) group = random.choice(orm.get_group_list()) # choose random group from list if len(orm.get_contacts_in_group(Group(id=group.id))) == 0: if len(orm.get_contacts_not_in_group(Group(id=group.id))) == 0: app.contact.create(Contact(firstname="Ivan")) contact_not_in_group = random.choice(orm.get_contacts_not_in_group(Group(id=group.id))) app.contact.add_contact_to_group_by_id(contact_not_in_gro
up.id, group.id) old_contacts_in_group = orm.get_contacts_in_group(Group(id=group.id)) contact_in_group = random.choice(old_contacts_in_group) # choose random contact from list app
.contact.delete_contact_from_group_by_id(contact_in_group.id, group.id) new_contacts_in_group = orm.get_contacts_in_group(Group(id=group.id)) old_contacts_in_group.remove(contact_in_group) assert sorted(old_contacts_in_group, key=Contact.id_or_max) == sorted(new_contacts_in_group, key=Contact.id_or_max)
-------------------------------------------- # jsonrpc - jsonrpc interface for XBMC-compatible remotes # ----------------------------------------------------------------------- # $Id$ # # JSONRPC and XBMC eventserver to be used for XBMC-compatible # remotes. Only tested with Yatse so far. If something is not working, # do not blame the remote, blame this plugin. # # Not all API calls are implemented yet. # # ----------------------------------------------------------------------- # Freevo - A Home Theater PC framework # Copyright (C) 2014 Dirk Meyer, et al. # # First Edition: Dirk Meyer <https://github.com/Dischi> # Maintainer: Dirk Meyer <https://github.com/Dischi> # # Please see the file AUTHORS for a complete list of authors. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MER- # CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # ----------------------------------------------------------------------- */ # python imports import os import logging import socket import urllib # kaa imports import kaa import kaa.beacon # freevo imports from ... import core as freevo # get logging object log = logging.getLogger('freevo') # generic functions import utils import eventserver # jsonrpc callbacks import videolibrary as VideoLibrary import player as Player import playlist as Playlist class PluginInterface( freevo.Plugin ): """ JSONRPC and XBMC eventserver to be used for XBMC-compatible remotes """ @kaa.coroutine() def plugin_activate(self, level): """ Activate the plugin """ super(PluginInterface, self).plugin_activate(level) self.httpserver = freevo.get_plugin('httpserver') if not self.httpserver: raise RuntimeError('httpserver plugin not running') self.httpserver.server.add_json_handler('/jsonrpc', self.jsonrpc) self.httpserver.server.add_handler('/image/', self.provide_image) self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self._sock.bind(('', freevo.config.plugin.jsonrpc.eventserver)) udp = kaa.Socket() udp.wrap(self._sock, kaa.IO_READ | kaa.IO_WRITE) udp.signals['read'].connect(eventserver.handle) utils.imagedir = (yield kaa.beacon.get_db_info())['directory'] utils.cachedir = os.path.join(os.environ['HOME'], '.thumbnails') self.api = {} for module in ('VideoLibrary', 'Player', 'Playlist'): for name in dir(eval(module)): method = getattr(eval(module), name) if callable(method) and not name.startswith('_'): self.api[module + '.' + name] = method @kaa.coroutine() def provide_image(self, path, **attributes): """ HTTP callback for images """ filename = '' path = urllib.unquote(path) if path.startswith('beacon'): filename = os.path.join(utils.imagedir, path[7:]) if path.startswith('cache'): filename = os.path.join(utils.cachedir, path[6:]) if path.startswith('thumbnail'): item = yield kaa.beacon.query(id=int(path.split('/')[2]), type=path.split('/')[1]) if len(item) != 1: log.error('beacon returned wrong results') yield None thumbnail = item[0].get('thumbnail') if thumbnail.needs_update or 1: yield kaa.inprogress(thumbnail.create(priority=kaa.beacon.Thumbnail.PRIORITY_HIGH)) filename = thumbnail.large if filename: if os.path.isfile(filename):
yield open(filename).read(), None, None log.error('no file: %s' % filename) yield None else: yield None def Application_GetProperties(self, properties): """ JsonRPC Callback Application
.GetProperties """ result = {} for prop in properties: if prop == 'version': result[prop] = {"major": 16,"minor": 0,"revision": "a5f3a99", "tag": "stable"} elif prop == 'volume': result[prop] = 100 elif prop == 'muted': result[prop] = eventserver.muted else: raise AttributeError('unsupported property: %s' % prop) return result def Settings_GetSettingValue(self, setting): """ JsonRPC Settings.GetSettingValue (MISSING) """ return {} def XBMC_GetInfoBooleans(self, booleans): """ JsonRPC Callback XBMC.GetInfoBooleans """ result = {} for b in booleans: if b == 'System.Platform.Linux': result[b] = True else: result[b] = False return result def XBMC_GetInfoLabels(self, labels): """ JsonRPC Callback XBMC.GetInfoLabels """ result = {} for l in labels: # FIXME: use correct values for all these labels if l == 'System.BuildVersion': result[l] = "13.1" elif l == 'System.KernelVersion': result[l] = "Linux 3.11.0" elif l == 'MusicPlayer.Codec': result[l] = "" elif l == 'MusicPlayer.SampleRate': result[l] = "" elif l == 'MusicPlayer.BitRate': result[l] = "" else: raise AttributeError('unsupported label: %s' % l) return result def XBMC_Ping(self): """ JsonRPC Ping """ return '' def JSONRPC_Ping(self): """ JsonRPC Ping """ return '' def GUI_ActivateWindow(self, window, parameters=None): """ Switch Menu Type """ window = window.lower() if window == 'pictures': freevo.Event(freevo.MENU_GOTO_MEDIA).post('image', event_source='user') elif window == 'musiclibrary': freevo.Event(freevo.MENU_GOTO_MEDIA).post('audio', event_source='user') elif window == 'videos': if parameters and parameters[0] == 'MovieTitles': freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'movie', event_source='user') if parameters and parameters[0] == 'TvShowTitles': freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'tv', event_source='user') elif window == 'home': freevo.Event(freevo.MENU_GOTO_MAINMENU).post(event_source='user') else: log.error('ActivateWindow: unsupported window: %s' % window) @kaa.coroutine() def jsonrpc(self, path, **attributes): """ HTTP callback for /jsonrpc """ if not attributes: # supported XBMC API version yield {"major": 6,"minor": 14,"patch": 3} method = attributes.get('method') params = attributes.get('params') result = None if method.startswith('Input'): callback = eventserver.input(method[6:].lower(), params) yield {'jsonrpc': '2.0', 'result': 'OK', 'id': attributes.get('id')} callback = self.api.get(method, None) or getattr(self, method.replace('.', '_'), None) if callback: # log.info('%s(%s)' % (method, params)) if params is None: result = callback() else: result = callback(**params) if isinstance(result, kaa.InProgress): result = yield result else: raise AttributeError('unsupported
#!/usr/bin/env python # # Copyright (C) 2011 Andy Aschwanden # # This file is part of PISM. # # PISM is free software; you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # PISM is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License # along with PISM; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import sys import time import numpy as np from pyproj import Proj from sys import stderr write = stderr.write # try different netCDF modules try: from netCDF4 import Dataset as CDF except: from netCDF3 import Dataset as CDF from optparse import OptionParser __author__ = "Andy Aschwanden" # Create PISM-readable input file from Storglaciaren DEM parser = OptionParser() parser.usage = "usage: %prog [options]" parser.description = "Preprocess Storglaciaren files." (options, args) = parser.parse_args() # Create PISM-readable input file from Storglaciaren DEM write('------------------------------\n') write('PISM-Storglaciaren example\n') write('------------------------------\n') # data dir data_dir = './' # Bed and Surface DEMs for Storglaciaren XFile = data_dir + 'X.txt.gz' YFile = data_dir + 'Y.txt.gz' zBaseFile = data_dir + 'zBase.txt.gz' zSurfFile = data_dir + 'zSurf.txt.gz' # load coordinate information. Note: Swedish grid (RT90) uses inverse notation # X -> northing, Y -> easting try: write('Reading northing coordinate infos from %s: ' % XFile) X = np.loadtxt(XFile) write('Done.\n') write('Reading easting coordinate infos from %s: ' % YFile) Y = np.loadtxt(YFile) write('Done.\n') except IOError: write('ERROR: File %s or %s could not be found.\n' % XFile % YFile) exit(2) # load Bed DEM try: write('Reading DEM from %s: ' % zBaseFile) zBase = np.loadtxt(zBaseFile) write('Done.\n') except IOError: write('ERROR: File %s could not be found.\n' % zBaseFile) exit(2) # load Surface DEM try: write('Reading DEM from %s: ' % zSurfFile) zSurf = np.loadtxt(zSurfFile) write('Done.\n') except IOError: write('ERROR: File %s could not be found.\n' % zSurfFile) exit(2) # Grid size. DEM has 10m spacing. N = zBase.shape[1] M = zBase.shape[0] e0 = Y.min() n0 = X.min() de = 10 # m dn = 10 # m e1 = e0 + (N-1)*de n1 = n0 + (M-1)*dn easting = np.linspace(e0, e1, N) northing = np.linspace(n0, n1, M) # convert to lat/lon # From http://lists.maptools.org/pipermail/proj/2008-December/004165.html: # # However, a simpler method, now recommended by the Swedish Land Survey # instead of a 7-parameter shift, is to start from the WGS84 datum, and than # tweak the projection parameters a little: just use a Transverse Mercator # with # central meridian: 15" 48' 22.624306" E # scale factor: 1.00000561024 # false easting: 1500064.274 m # false northing: -667.711 m # ( http://www.lantmateriet.se/templates/LMV_Page.aspx?id=5197&lang=EN ) projRT90 = "+proj=tmerc +datum=WGS84 +lon_0=-15.806284 +x_0=1500064.274 +y_0=-667.711 +k=1.00000561024 +units=m" ee, nn = np.meshgrid(easting, northing) projection = Proj(projRT90) longitude, latitude = projection(ee, nn, inverse=True) write("Coordinates of the lower-left grid corner:\n" " easting = %.0f\n" " northing = %.0f\n" "Grid size:\n" " rows = %d\n" " columns = %d\n" % (e0, n0, N, M))
# Fill value fill_value = -9999 bed_valid_min = -5000.0 thk_valid_
min = 0.0 bed = np.flipud(zBase) dem = np.flipud(zSurf) # ignored by bootstrapping thk = np.flipud(zSurf-zBase) # used for bootstrapping # Replace NaNs with zeros thk = np.nan_to_num(thk) # There are some negative thickness values # Quick and dirty: set to zero # some inconsistencies in the original data still needs to be sorted out # (filtering) thk[thk<0] = 0 # Output filename ncfile = 'pism_storglaciaren_3d.nc' # Write the data: nc = CDF(ncfile, "w",format='NETCDF3_CLASSIC') # for netCDF4 module # Create dimensions x and y nc.createDimension("x", size=easting.shape[0]) nc.createDimension("y", size=northing.shape[0]) x = nc.createVariable("x", 'f4', dimensions=("x",)) x.units = "m"; x.long_name = "easting" x.standard_name = "projection_x_coordinate" y = nc.createVariable("y", 'f4', dimensions=("y",)) y.units = "m"; y.long_name = "northing" y.standard_name = "projection_y_coordinate" x[:] = easting y[:] = northing def def_var(nc, name, units, fillvalue): var = nc.createVariable(name, 'f', dimensions=("y", "x"),fill_value=fillvalue) var.units = units return var lon_var = def_var(nc, "lon", "degrees_east", None) lon_var.standard_name = "longitude" lon_var[:] = longitude lat_var = def_var(nc, "lat", "degrees_north", None) lat_var.standard_name = "latitude" lat_var[:] = latitude bed_var = def_var(nc, "topg", "m", fill_value) bed_var.valid_min = bed_valid_min bed_var.standard_name = "bedrock_altitude" bed_var.coordinates = "lat lon" bed_var[:] = bed thk_var = def_var(nc, "thk", "m", fill_value) thk_var.valid_min = thk_valid_min thk_var.standard_name = "land_ice_thickness" thk_var.coordinates = "lat lon" thk_var[:] = thk dem_var = def_var(nc, "usurf_from_dem", "m", fill_value) dem_var.standard_name = "surface_altitude" dem_var.coordinates = "lat lon" dem_var[:] = dem # generate (somewhat) reasonable acab acab_max = 2.5 # m/a acab_min = -3.0 # m/a acab_up = easting.min() + 200 # m; location of upstream end of linear acab acab_down = easting.max() - 600 # m;location of downstream end of linear acab acab = np.ones_like(dem) acab[:] = acab_max - (acab_max-acab_min) * (easting - acab_up) / (acab_down - acab_up) acab[thk<1] = acab_min acab_var = def_var(nc, "climatic_mass_balance", "m year-1", fill_value) acab_var.standard_name = "land_ice_surface_specific_mass_balance" acab_var[:] = acab # Set boundary conditions for Scandinavian-type polythermal glacier # ------------------------------------------------------------------------------ # # (A) Surface temperature for temperature equation bc T0 = 273.15 # K Tma = -6.0 # degC, mean annual air temperature at Tarfala zcts = 1300 # m a.s.l.; altitude where CTS is at the surface, projected to topg slope = 100 # m; range around which surface temp transition happens # old abrupt jump: #artm = np.zeros((M,N),float) + T0 #artm[bed<zcts] = T0 + Tma # Scandinavian-type polythermal glacier # smoothed version; FIXME: can't we at least have it depend on initial DEM? # additional lapse rate? artm = T0 + Tma * (zcts + slope - bed) / (2.0 * slope) artm[bed<zcts-slope] = T0 + Tma artm[bed>zcts+slope] = T0 artm_var = def_var(nc, "ice_surface_temp", "K", fill_value) artm_var[:] = artm # set global attributes nc.Conventions = "CF-1.4" historysep = ' ' historystr = time.asctime() + ': ' + historysep.join(sys.argv) + '\n' setattr(nc, 'history', historystr) nc.projection = projRT90 nc.close() write('Done writing NetCDF file %s!\n' % ncfile)
#!/usr/bin/python """This script lists classes and optionally attributes from UML model created with Gaphor.""" import optparse import sys from gaphor import UML from gaphor.application import Session # Setup command line options. usage = "usage: %prog [options] file.gaphor" def main(): parser = optparse.OptionParser(usage=usage) parser.add_option( "-a", "--attributes", dest="attrs", action="store_true", help="Print class attributes", )
(options, args) = parser.parse_args() if len(args) != 1: parser.print_help() sys.exit(1) # The model file to load. model = args[0] # Create the Gaphor application object. ses
sion = Session() # Get services we need. element_factory = session.get_service("element_factory") file_manager = session.get_service("file_manager") # Load model from file. file_manager.load(model) # Find all classes using factory select. for cls in element_factory.select(UML.Class): print(f"Found class {cls.name}") if options.attrs: for attr in cls.ownedAttribute: print(f" Attribute: {attr.name}") if __name__ == "__main__": main()
from .rest import RestClient class Rules(object): """Rules endpoint implementation. Args: domain (str): Your Auth0 domain, e.g: 'username.auth0.com' token (str): Management API v2 Token telemetry (bool, optional): Enable or disable Telemetry (defaults to True) timeout (float or tuple, optional): Change the requests connect and read timeout. Pass a tuple to specify both values separately or a float to set both to it. (defaults to 5.0 for both) rest_options (RestClientOptions): Pass an instance of RestClientOptions to configure additional RestClient options, such as rate-limit retries. (defaults to None) """ def __init__(self, domain, token, telemetry=True, timeout=5.0, protocol="https", rest_options=None): self.domain = domain self.protocol = protocol self.client = RestClient(jwt=token, telemetry=telemetry, timeout=timeout, options=rest_options) def _url(self, id=None): url = '{}://{}/api/v2/rules'.format(self.protocol, self.domain) if id is not None: return '{}/{}'.format(url, id) return url def all(self, stage='login_success', enabled=True, fields=None, include_fields=True, page=None, per_page=None, include_totals=False): """Retrieves a list of all rules. Args: stage (str, optional): Retrieves rules that match the execution stage. Defaults to login_success. enabled (bool, optional): If provided, retrieves rules that match the value, otherwise all rules are retrieved. fields (list, optional): A list of fields to include or exclude (depending on include_fields) from the result. Leave empty to retrieve all fields. include_fields (bool, optional): True if the fields specified are to be included in the result, False otherwise. Defaults to True. page (int, optional): The result's page number (zero based). When not set, the default value is up to the server. per_page (int, optional): The amount of entries per page. When not set, the default value is up to the server. include_totals (bool, optional): True if the query summary is to be included in the result, False otherwise. Defaults to False. See: https://auth0.com/docs/api/management/v2#!/Rules/get_rules """ params = { 'stage': stage, 'fields': fields and ','.join(fields) or None, 'include_fields': str(include_fields).lower(), 'page': page, 'per_page': per_page, 'include_totals': str(include_totals).lower() } # since the default is True, this is here to disable the filter if enabled is not None: params['enabled'] = str(enabled).lower() return self.client.get(self._url(), params=params) def create(self, body): """Creates a new rule. Args: body (dict): Attributes for the newly created rule. See: https://auth0.com/docs/api/v2#!/Rules/post_rules """ return self.client.post(self._url(), data=body) def get(self, id, fields=None, include_fields=True): """Retrieves a rule by its ID. Args: id (str): The id of the rule to retrieve. fields (list, optional): A list of fields to include or exclude (depending on include_fields) from the result. Leave empty to retrieve all fields. include_fields (bool, optional): True if the fields specified are to be included in the result, False otherwise. Defaults to True. See: https://auth0.com/docs/api/management/v2#!/Rules/get_rules_by_id """ params = {'fields': fields and ','.join(fields) or None, 'include_fields': str(include_fields).lower()} return self.client.get(self._url(id), params=params) def delete(self, id): """Delete a rule. Args: id
(str): The id of the rule to delete. See: https://auth0.com/docs/api/management/v2#!/Rules/delete_rules_by_id """ return self.client.delete(self._url(id)) def update(self, id, body): """Update an existing rule Args: id (str): The id of the rule to modify. body (dict): Attributes to modify.
See: https://auth0.com/docs/api/v2#!/Rules/patch_rules_by_id """ return self.client.patch(self._url(id), data=body)
import unittest import os from katello.tests.core.action_test_utils import CLIOptionTestCase, CLIActionTestCase from katello.tests.core.organization import organization_data from katello.tests.core.template import template_data import katello.client.core.template from katello.client.core.template import Delete from katello.client.api.utils import ApiDataError class RequiredCLIOptionsTests(CLIOptionTestCase): #requires: organization, name #optional: environment (defaults to Library) action = Delete() disallowed_options = [ ('--environment=dev', '--name=template_1'), ('--environment=dev', '--org=ACME'), ] al
lowed_options = [ ('--org=ACME', '--name=template_1'), ('--org=ACME', '--environment=dev', '--name=template_1'), ] class TemplateInfoTest(CLIActionTestCase): ORG = organization_data.ORGS[0] ENV = organization_data.ENVS[0] TPL = template_data.TEMPLATES[0] OPTIONS = { 'org': ORG['name'], 'environment': ENV['name'], 'name': TPL['name'], } def se
tUp(self): self.set_action(Delete()) self.set_module(katello.client.core.template) self.mock_printer() self.mock_options(self.OPTIONS) self.mock(self.module, 'get_template', self.TPL) self.mock(self.action.api, 'delete') def test_it_finds_the_template(self): self.run_action() self.module.get_template.assert_called_once_with(self.ORG['name'], self.ENV['name'], self.TPL['name']) def test_it_returns_error_when_template_not_found(self): self.mock(self.module, 'get_template').side_effect = ApiDataError self.run_action(os.EX_DATAERR) def test_it_returns_success_when_template_found(self): self.run_action(os.EX_OK) def test_it_calls_delete_api(self): self.run_action() self.action.api.delete.assert_called_once_with(self.TPL['id'])
from datetime imp
ort datetime from flask import Blueprint, jsonify, request from app.dao.fact_notification_status_dao import ( get_total_notifications_for_date_range, ) from app.dao.fact_processing_time_dao import ( get_processing_time_percentage_for_date_range, ) from app.dao.services_dao import get_live_services_with_organisation from
app.errors import register_errors from app.performance_dashboard.performance_dashboard_schema import ( performance_dashboard_request, ) from app.schema_validation import validate performance_dashboard_blueprint = Blueprint('performance_dashboard', __name__, url_prefix='/performance-dashboard') register_errors(performance_dashboard_blueprint) @performance_dashboard_blueprint.route('') def get_performance_dashboard(): # All statistics are as of last night this matches the existing performance platform # and avoids the need to query notifications. if request.args: # Is it ok to reuse this? - should probably create a new one validate(request.args, performance_dashboard_request) # If start and end date are not set, we are expecting today's stats. today = str(datetime.utcnow().date()) start_date = datetime.strptime(request.args.get('start_date', today), '%Y-%m-%d').date() end_date = datetime.strptime(request.args.get('end_date', today), '%Y-%m-%d').date() total_for_all_time = get_total_notifications_for_date_range(start_date=None, end_date=None) total_notifications, emails, sms, letters = transform_results_into_totals(total_for_all_time) totals_for_date_range = get_total_notifications_for_date_range(start_date=start_date, end_date=end_date) processing_time_results = get_processing_time_percentage_for_date_range(start_date=start_date, end_date=end_date) services = get_live_services_with_organisation() stats = { "total_notifications": total_notifications, "email_notifications": emails, "sms_notifications": sms, "letter_notifications": letters, "notifications_by_type": transform_into_notification_by_type_json(totals_for_date_range), "processing_time": transform_processing_time_results_to_json(processing_time_results), "live_service_count": len(services), "services_using_notify": transform_services_to_json(services) } return jsonify(stats) def transform_results_into_totals(total_notifications_results): total_notifications = 0 emails = 0 sms = 0 letters = 0 for x in total_notifications_results: total_notifications += x.emails total_notifications += x.sms total_notifications += x.letters emails += x.emails sms += x.sms letters += x.letters return total_notifications, emails, sms, letters def transform_into_notification_by_type_json(total_notifications): j = [] for x in total_notifications: j.append({"date": x.bst_date, "emails": x.emails, "sms": x.sms, "letters": x.letters}) return j def transform_processing_time_results_to_json(processing_time_results): j = [] for x in processing_time_results: j.append({"date": x.date, "percentage_under_10_seconds": x.percentage}) return j def transform_services_to_json(services_results): j = [] for x in services_results: j.append({"service_id": x.service_id, "service_name": x.service_name, "organisation_id": x.organisation_id, "organisation_name": x.organisation_name} ) return j
# This file is part of Korman. # # Korman is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Korman is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Korman. If not, see <http://www.gnu.org/licenses/>. import bpy from bpy.props import * from PyHSPlasma import * from .base import PlasmaModifierProperties from ..prop_world import game_versions from ...exporter import ExportError from ... import idprops class PlasmaVersionedNodeTree(idprops.IDPropMixin, bpy.types.PropertyGroup): name = StringProperty(name="Name") version = EnumProperty(name="Version", description="Plasma versions this node tree exports under", items=game_versions, options={"ENUM_FLAG"}, default=set(list(zip(*game_versions))[0])) node_tree = PointerProperty(name="Node Tree", description="Node Tree to export", type=bpy.types.NodeTree) node_name = StringProperty(name="Node Ref", description="Attach a reference to this node") @classmethod def _idprop_mapping(cls): return {"node_tree": "node_tree_name"} def _idprop_sources(self): return {"node_tree_name": bpy.data.node_groups} class PlasmaAdvancedLogic(PlasmaModifierProperties): pl_id = "advanced_logic" bl_category = "Logic" bl_label = "Advanced" bl_description = "Plasma Logic Nodes" bl_icon = "NODETREE" logic_groups = CollectionProperty(type=PlasmaVersionedNodeTree) active_group_index = IntProperty(options={"HIDDEN"}) def export(self, exporter, bo, so): version = exporter.mgr.getVer() for i in self.logic_groups: our_versions = [globals()[j] for j in i.version] if version in our_versions: if i.node_tree is None: raise ExportError("'{}': Advanced Logic is missing a node tree for '{}'".format(bo.name, i.version)) # If node_name is defined, then we're only adding a reference. We will make sure that # the entire node tree is exported once before the post_export step, however. if i.node_name: exporter.want_node_trees[i.node_tree.name] = (bo, so) node = i.node_tree.nodes.get(i.node_name, None) if node is None: raise ExportError("Node '{}' does not exist in '{}'".format(i.node_name, i.node_tree.name)) # We are going to assume get_key will do the adding correctly. Single modifiers # should fetch the appropriate SceneObject before doing anything, so this will # be a no-op in t
hat case. Multi modifiers should accept any SceneObject, however node.get_key(exporter, so) else: exporter.node_trees_exported.add(i.node_tree.name) i.node_tree.export(exporter, bo, so) def harvest_actors(self): actors = set() for i in self.logic_groups: actors.update(i.node_tree.harve
st_actors()) return actors class PlasmaSpawnPoint(PlasmaModifierProperties): pl_id = "spawnpoint" bl_category = "Logic" bl_label = "Spawn Point" bl_description = "Point at which avatars link into the Age" def export(self, exporter, bo, so): # Not much to this modifier... It's basically a flag that tells the engine, "hey, this is a # place the avatar can show up." Nice to have a simple one to get started with. spawn = exporter.mgr.add_object(pl=plSpawnModifier, so=so, name=self.key_name) @property def requires_actor(self): return True class PlasmaMaintainersMarker(PlasmaModifierProperties): pl_id = "maintainersmarker" bl_category = "Logic" bl_label = "Maintainer's Marker" bl_description = "Designates an object as the D'ni coordinate origin point of the Age." bl_icon = "OUTLINER_DATA_EMPTY" calibration = EnumProperty(name="Calibration", description="State of repair for the Marker", items=[ ("kBroken", "Broken", "A marker which reports scrambled coordinates to the KI."), ("kRepaired", "Repaired", "A marker which reports blank coordinates to the KI."), ("kCalibrated", "Calibrated", "A marker which reports accurate coordinates to the KI.") ]) def export(self, exporter, bo, so): maintmark = exporter.mgr.add_object(pl=plMaintainersMarkerModifier, so=so, name=self.key_name) maintmark.calibration = getattr(plMaintainersMarkerModifier, self.calibration) @property def requires_actor(self): return True
__author__ = 'ganeshchand' import re def regex_search(pattern_string, string_source): if re.search(pattern_string,string_source): print("%s matched %s" % (pattern_string, string_source)) else: print("%s did n
ot match %s" % (pattern_string, string_source)) # matching a pattern in one string mystring_anchors = 'aaaaa!@#$!@#$aaaaaadefg' pattern_withoutanchors = r'@#\$!' # $ sign needs escaping if it doesn't represen
t need to represent its special meaning. # It is an anchor reserved character - it marks the end of the string # that means, if you say aab$ , you are looking for a string that that ends with pattern aab # there should be absolutely nothing beyond aab regex_search(pattern_withoutanchors, mystring_anchors) pattern_withanchors = r'defg$' regex_search(pattern_withanchors, mystring_anchors) # patterns to be matched patterns = ["defg$", "^d", "^a", "^a*!"] # defg$ : string must end with defg # ^d: must begin with d # ^a: must begin with # ^a*!: must beging with a followed by any number of characters and ! for patterntobematched in patterns: regex_search(patterntobematched, mystring_anchors) # matching a pattern in an array of string
#!/usr/bin/env python # This file is part of khmer, https://github.com/dib-lab/khmer/, and is # Copyright (C) 2012-2015, Michigan State University. # Copyright (C) 2015, The Regents of the University of California. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # * Neither the name of the Michigan State University nor the names # of its contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Contact: khmer-project@idyll.org """ Use a set of query reads to sweep out overlapping reads from another file. % python scripts/sweep-reads2.py <query reads> <search reads> Results end up in <search reads>.sweep2. Use '-h' for parameter help. """ import sys import khmer import os.path import screed from khmer import khmer_args from khmer.khmer_args import (build_nodegraph_args, DEFAULT_MAX_TABLESIZE) from khmer.utils import broken_paired_reader, write_record def main(): parser = build_nodegraph_args() parser.add_argument('-o', '--outfile', help='output file; d
efault is "infile".sweep2') parser.add_argument('-q', '--quiet') parser.add_argument('input_filename') parser.add_argument('read_filename') args = parser.parse_args() inp = args.input_filename readsfile = args.read_filename outfile = os.path.basename(readsfile) + '.sweep2' if args.outfile: outfile = args.outfile outfp = open(
outfile, 'w') # create a nodegraph data structure ht = khmer_args.create_countgraph(args) # load contigs, connect into N partitions print('loading input reads from', inp) ht.consume_seqfile(inp) print('starting sweep.') m = 0 K = ht.ksize() instream = screed.open(readsfile) for n, is_pair, read1, read2 in broken_paired_reader(instream): if n % 10000 == 0: print('...', n, m) if is_pair: count1 = ht.get_median_count(read1.sequence)[0] count2 = ht.get_median_count(read2.sequence)[0] if count1 or count2: m += 1 write_record_pair(read1, read2, outfp) else: count = ht.get_median_count(read1.sequence)[0] if count: m += 1 write_record(read1, outfp) if __name__ == '__main__': main() # vim: set filetype=python tabstop=4 softtabstop=4 shiftwidth=4 expandtab: # vim: set textwidth=79:
#!/usr/bin/env python # encoding: utf-8 # ------------------------------------------------------------------------------- # version: ?? # author: fernando # license: MIT License # contact: iw518@163.com # purpose: views # date: 2016-12-14 # copyright: copyright 2016 Xu, Aiwu # ------------------------------
------------------------------------------------- from flask import redirect, url_for, render_template from app.model.models import Team from . import hr from .forms import RegisterForm @h
r.route('/team_manage', methods=['POST', 'GET']) def team_manage(): form = RegisterForm() if form.validate_on_submit(): Team(job_id=form.job_selections.data, user_id=form.user_selections.data) return redirect(url_for('order.employee')) return render_template('hr/team_manage.html', form=form)
#!/usr/bin/env python # encoding: utf-8 'A simple client for accessing api.ly.g0v.tw.' import json import unittest try: import urllib.request as request import urllib.parse as urlparse except: import urllib2 as request import urllib as urlparse def assert_args(func, *args): def inner(*args): required_arg = args[1] assert(len(required_arg) > 0) return func(*args) return inner class LY_G0V_Client: BASE_URL = 'http://api-beta.ly.g0v.tw/v0/' # BASE_URL = 'http://api.ly.g0v.tw/v0/' def _fetch_data(self, url_path): URL = LY_G0V_Client.BASE_URL + url_path try: f = request.urlopen(URL) r = f.read() r = r.decode('utf-8') return json.loads(r) except Exception as e: print("Failed to call " + URL) raise e def fetch_all_bills(self): 'Fetch all bills.' return self._fetch_data('collections/bills') def fetch_all_motions(self): 'Fetch all motions.' return self._fetch_data('collections/motions') def fetch_all_sittings(self): 'Fetch all sittings.' return self._fetch_data('collections/sittings') @asser
t_args def fetch_bill(self, bill_id): 'Fetch metadata of a specific bill.' return self._fetch_data('collections/bills/' + str(bill_id)) @assert_args def fetch_bill_data(self, bill
_id): 'Fetch data of a specific bill.' assert(len(bill_id) > 0) return self._fetch_data('collections/bills/' + str(bill_id) + '/data') @assert_args def fetch_motions_related_with_bill(self, bill_id): 'Fetch motions related with a specific bill.' query = json.dumps({'bill_ref': bill_id}) query = urlparse.quote(query) return self._fetch_data('collections/motions/?q='+query) @assert_args def fetch_sitting(self, sitting_id): 'Fetch metadata of a specific bill.' return self._fetch_data('collections/bills/' + str(bill_id)) class TestClient(unittest.TestCase): def setUp(self): import time time.sleep(1) self.client = LY_G0V_Client() def _test_bill(self, bill): self.assertTrue(isinstance(bill, dict), str(type(bill))) keys = ('proposed_by', 'doc', 'abstract', 'sponsors', 'summary', 'bill_ref', 'motions', 'cosponsors', 'bill_id'); for key in keys: self.assertTrue(key in bill) if isinstance(bill['doc'], dict): self.assertTrue('pdf' in bill['doc']) self.assertTrue('doc' in bill['doc']) def _test_bills(self, bills): for key in ('entries', 'paging'): self.assertTrue(key in bills) for key in ('l', 'sk', 'count'): self.assertTrue(key in bills['paging']) for bill in bills['entries']: self._test_bill(bill) def _test_motion(self, motion): self.assertTrue(isinstance(motion, dict), str(type(motion))) keys = ('result', 'resolution', 'motion_class', 'bill_id', 'agenda_item', 'bill_ref', 'tts_id', 'subitem', 'status', 'sitting_id', 'item', 'summary', 'tts_seq', 'proposed_by', 'doc') for key in keys: self.assertTrue(key in motion, key) if isinstance(motion['doc'], dict): self.assertTrue('pdf' in motion['doc']) self.assertTrue('doc' in motion['doc']) def _test_motions(self, motions): self.assertTrue(isinstance(motions, dict), str(type(motions))) for key in ('entries', 'paging'): self.assertTrue(key in motions) for key in ('l', 'sk', 'count'): self.assertTrue(key in motions['paging']) for motion in motions['entries']: self._test_motion(motion) def _test_data(self, data): for key in ('related', 'content'): self.assertTrue(key in data) self.assertTrue(isinstance(data['related'], list)) self.assertTrue(isinstance(data['content'], list)) for item in data['content']: content_keys = ('name', 'type', 'content', 'header') for content_key in content_keys: self.assertTrue(content_key in item) self.assertTrue(len(item['name']) > 0) self.assertTrue(isinstance(item['name'], str) or \ isinstance(item['name'], unicode)) self.assertTrue(len(item['type']) > 0) self.assertTrue(isinstance(item['type'], str) or \ isinstance(item['type'], unicode)) self.assertTrue(len(item['content']) > 0) self.assertTrue(isinstance(item['content'], list)) for content in item['content']: self.assertTrue(isinstance(content, list)) for line in content: self.assertTrue(isinstance(line, str)) self.assertTrue(len(item['header']) > 0) self.assertTrue(isinstance(item['header'], list)) for header in item['header']: self.assertTrue(isinstance(header, str) or \ isinstance(header, unicode)) def _test_sitting(self, sitting): self.assertTrue(isinstance(sitting, dict), str(type(sitting))) keys = ('dates', 'ad', 'videos', 'extra', 'motions', 'sitting', 'summary', 'session', 'committee', 'id', 'name') for key in keys: self.assertTrue(key in sitting, key) def _test_sittings(self, sittings): self.assertTrue(isinstance(sittings, dict), str(type(sittings))) for key in ('entries', 'paging'): self.assertTrue(key in sittings) for key in ('l', 'sk', 'count'): self.assertTrue(key in sittings['paging']) for sitting in sittings['entries']: self._test_sitting(sitting) def test_all_bills(self): bills = self.client.fetch_all_bills() self._test_bills(bills) def test_all_motions(self): motions = self.client.fetch_all_motions() self._test_motions(motions) def test_all_sittings(self): sittings = self.client.fetch_all_sittings() self._test_sittings(sittings) def test_fetch_bill(self): bill = self.client.fetch_bill('1021021071000400') self._test_bill(bill) def test_fetch_bill_data(self): data = self.client.fetch_bill_data('1021021071000400') self._test_data(data) def test_fetch_motions_related_with_bill(self): motions = self.client.fetch_motions_related_with_bill('1021021071000400') self._test_motions(motions) if __name__ == '__main__': unittest.main()
import redis class BetaRedis(redis.StrictRedis): def georadius(self, name, *values): return self.execute_command('GEORADIUS', name, *values) def geoadd(self, name, *values): return self.execute_command('GEOADD', name, *values) def geopos(self, name, *values): return self.execute_command('GEOPOS', name, *values) class RedisHeatMap: REDIS_KEY = 'heatmap' REDIS_KEY_GEO = REDIS_KEY + '_GEO' REDIS_KEY_HASH = REDIS_KEY + '_HASH' def _
_init__(self, host='localhost', port=6379, db=0): self.r = BetaRedis(host=host, port=port, db=db) self.r.flushdb() def gen(self, data, distance=200000, min_sum=1): for point in data: try: res = self.r.georadius(self.REDIS_KEY_GEO, point['lng'], point['lat'], distance, 'm') if not res: self.r.geoadd(self.REDIS_KEY_GEO, point['lng'], point['lat'], point['key']) self.r.hset(self.REDIS_KEY_HASH, poin
t['key'], 1) else: self.r.hincrby(self.REDIS_KEY_HASH, res[0]) except redis.exceptions.ResponseError as e: pass for key in self.r.hscan_iter(self.REDIS_KEY_HASH): lng, lat = map(lambda x: x.decode(), self.r.geopos(self.REDIS_KEY_GEO, key[0].decode())[0]) if int(key[1]) >= min_sum: yield {'key': key[0].decode(), 'lat': lat, 'lng': lng, 'sum': int(key[1])}
import copy from corehq.pillows.case import CasePillow from corehq.pillows.mappings.reportcase_mapping import REPORT_CASE_MAPPING, REPORT_CASE_INDEX from django.conf import settings from .base import convert_property_dict class ReportCasePillow(CasePillow): """ Simple/Common
Case properties Indexer an extension to CasePillow that provides for indexing of custom case properties """ es_alias = "report_cases" es_type = "report_case" es_index = REPORT_CASE_INDEX default_mapping = REPORT_CASE_MAPPING def get_unique_id(self): return self.calc_meta() def change_transform(self, doc_dict): if self.get
_domain(doc_dict) not in getattr(settings, 'ES_CASE_FULL_INDEX_DOMAINS', []): #full indexing is only enabled for select domains on an opt-in basis return None doc_ret = copy.deepcopy(doc_dict) convert_property_dict(doc_ret, self.default_mapping, override_root_keys=['_id', 'doc_type', '_rev', '#export_tag']) return doc_ret
from django.urls import include, path from django.contrib import admin urlpatterns = [ path('', include('orcamentos
.core.urls', namespace='core')), path('crm/', include('orcamentos.crm.urls', namespace='crm')), path('proposal/', include('orcamentos.proposal.urls', names
pace='proposal')), path('admin/', admin.site.urls), ]
import imghdr from wsgiref.util import FileWrapper from django.core.exceptions import ImproperlyConfigured, PermissionDenied from django.http import ( HttpResponse, HttpResponsePermanentRedirect, StreamingHttpResponse, ) from django.shortcuts import get_object_or_404 from django.urls import reverse from django.utils.decorators import classonlymethod from django.views.generic import View from wagtail.images import get_image_model from wagtail.images.exceptions import InvalidFilterSpecError from wagtail.images.models import SourceImageIOError from wagtail.images.utils import generate_signature, verify_signature from wagtail.utils.sendfile import sendfile def generate_image_url(image, filter_spec, viewname="wagtailimages_serve", key=None): signature = generate_signature
(image.id, filter_spec, key) url = reverse(viewname, args=(signature, image.id, filter_spec)) url += image.file.name[len("original_images/") :] return url class ServeView(View): model = get_image_model() action = "serve" key = None @classonlymethod def as_view(cls, **initkwargs): if "action" in initkwargs: if initkwargs["action"] not in ["serve", "redirect"]: raise ImproperlyConfigu
red( "ServeView action must be either 'serve' or 'redirect'" ) return super(ServeView, cls).as_view(**initkwargs) def get(self, request, signature, image_id, filter_spec, filename=None): if not verify_signature( signature.encode(), image_id, filter_spec, key=self.key ): raise PermissionDenied image = get_object_or_404(self.model, id=image_id) # Get/generate the rendition try: rendition = image.get_rendition(filter_spec) except SourceImageIOError: return HttpResponse( "Source image file not found", content_type="text/plain", status=410 ) except InvalidFilterSpecError: return HttpResponse( "Invalid filter spec: " + filter_spec, content_type="text/plain", status=400, ) return getattr(self, self.action)(rendition) def serve(self, rendition): # Open and serve the file rendition.file.open("rb") image_format = imghdr.what(rendition.file) return StreamingHttpResponse( FileWrapper(rendition.file), content_type="image/" + image_format ) def redirect(self, rendition): # Redirect to the file's public location return HttpResponsePermanentRedirect(rendition.url) serve = ServeView.as_view() class SendFileView(ServeView): backend = None def serve(self, rendition): return sendfile(self.request, rendition.file.path, backend=self.backend)
d/svgdatashapes # Copyright 2016-8 Stephen C. Grubb stevegrubb@gmail.com MIT License # # This module provides date / time support for svgdatashapes # import svgdatashapes from svgdatashapes import p_dtformat import collections import datetime as d import time import calendar class AppDt_Error(Exception): pass def dateformat( format=None ): # set the format string to be used for parsing datetimes found in the input data # format codes explained here: https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior # Note that when they say zero-padded this refers to output only; parsing can handle eg. 3/4/2015 global p_dtformat if format == None: raise AppDt_Error( "dateformat() expecting 'format' arg" ) p_dtformat = format return True def toint( dateval=None ): # for the given date/time string in whatever format, return the int utime value # toint( "1970-01-01.00:00" ) == 0 if dateval == None: return None try: tt = d.datetime.strptime( dateval, p_dtformat ).timetuple() # parse out the components utime = calendar.timegm( tt ) except: raise AppDt_Error( "toint() got bad datetime value: " + str(dateval) + " (expecting format of " + p_dtformat + ")" ) return utime def make( utime, fmt=None ): # format the given dt value as per fmt... if utime == None: return None if fmt == None: fmt = p_dtformat try: # tt = time.gmtime( utime ) outstr = d.datetime.utcfromtimestamp(utime).strftime( fmt ) except: raise AppDt_Error( "nicedt error on utime: " + str(utime) + " and format: " + p_dtformat ) return outstr def datediff( val1, val2, result="days" ): # return integer number of days difference (dt1 - dt2) try: dt1 = d.datetime.strptime( val1, p_dtformat ) except: raise AppDt_Error( "datediff() invalid val1 arg: " + str(val1) ) try: dt2 = d.datetime.strptime( val2, p_dtformat ) except: raise AppDt_Error( "datediff() invalid val2 arg: " + str(val2) ) if result != "seconds": dt1 = dt1.replace( second=0, microsecond=0 ) dt2 = dt2.replace( second=0, microsecond=0 ) if result == "days": dt1 = dt1.replace( hour=0, minute=0 ) dt2 = dt2.replace( hour=0, minute=0 ) div = 86400 elif result == "hours": dt1 = dt1.replace( minute=0 ) dt2 = dt2.replace( minute=0 ) div = 3600 elif result == "minutes": div = 60 elif result == "seconds": div = 1 return int(calendar.timegm( dt1.timetuple() ) - calendar.timegm( dt2.timetuple() ) ) / div def daterange( column=None, datarows=None, nearest=None, inc=None, stubformat=None, inc2=None, stub2format=None, stub2place="append", stub2first=True ): dfindex = svgdatashapes._getdfindex( column, datarows ) if nearest == None: raise AppDt_Error( "findrange() requires a nearest= arg " ) if inc == None: inc = nearest # if inc != nearest: # if nearest == "year" and inc == "month": pass # elif nearest == "month" and inc == "day": pass # elif nearest == "day" and inc == "hour": pass # else: raise AppDt_Error( "findrange() invalid nearest= and inc= combination" ) if stubformat == None: stubformat = p_dtformat # find raw min and max dmin = 999999999999999999999999999; dmax = -999999999999999999999999999; for row in datarows: if dfindex == -1: strval = row[column] # dict rows else: strval = row[dfindex] utime = toint( strval ) if utime < dmin: dmin = utime if utime > dmax: dmax = utime dtmin = d.datetime.utcfromtimestamp( dmin ).replace( second=0, microsecond=0 ) # always zero out seconds and ms dtmax = d.datetime.utcfromtimestamp( dmax ).replace( second=0, microsecond=0 ) if nearest[-6:] != "minute": dtmin.replace( minute=0 ); dtmax.replace( minute=0 ) # usually zero out minutes if nearest == "year": dtmin = dtmin.replace( month=1, day=1, hour=0 ) yr = dtmax.year; dtma
x = dtmax.replace( year=yr+1, month=1, day=1, hour=0 ) elif nearest == "3month": newmon = ((dtmin.month / 4) * 3) + 1 dtmin = dtmin.replace( month=newmon, day=1, hour=0 ) newmon = (((dtmax.month / 4)+1) * 3) + 1 yr = dtmax.year if newmon >= 12: newmon = 1; yr += 1; dtmax = dtmax.replace( year=yr, month=newmon, day=1, hour=0 ) elif nearest == "month": dtmin = dtmin.replace( day=1, hour=0 ) mon = dtmax.month; yr = dtmax.year;
if mon == 12: dtmax = dtmax.replace( year=yr+1, month=1, day=1, hour=0 ) else: dtmax = dtmax.replace( month=mon+1, day=1, hour=0 ) elif nearest == "week" or nearest[:8] == "week_day": # week = Monday-based week; or week_dayN where N=1 for Tues; N=6 for Sun, etc wday = time.gmtime( dmin ).tm_wday # struct_time tm_wday convention is that 0 = monday dmin -= (wday*86400) # move timestamp back by necessary no. of days to reach opening week boundary (86400 sec per day) if nearest[:8] == "week_day": dmin -= ((7 - int(nearest[-1:])) * 86400) dtmin = d.datetime.utcfromtimestamp( dmin ).replace( hour=0 ) wday = 7 - time.gmtime( dmax ).tm_wday dmax += (wday*86400) # move timestamp fwd by necessary no. of days to reach the next week boundary if nearest[:8] == "week_day": dmax += ((7 - int(nearest[-1:])) * 86400) dtmax = d.datetime.utcfromtimestamp( dmax ).replace( hour=0 ) elif nearest == "day": dtmin = dtmin.replace( hour=0 ) dmax += 86400 # jump forward one day dtmax = d.datetime.utcfromtimestamp( dmax ).replace( hour=0 ) elif nearest in ["12hour", "6hour", "4hour", "3hour"]: nhr = int(nearest[:-4]) newhr = (dtmin.hour / nhr) * nhr dtmin = dtmin.replace( hour=newhr ) newhr = ((dtmax.hour / nhr)+1) * nhr day = dtmax.day if newhr >= 24: newhr = 0; day += 1 dtmax = dtmax.replace( day=day, hour=newhr ) elif nearest == "hour": dtmin = dtmin.replace( minute=0 ) hr = dtmax.hour if hr == 23: dmax += 3600 # jump forward one hour (there are 3600 sec per hour) dtmax = d.datetime.utcfromtimestamp( dmax ) # no replace necessary else: dtmax = dtmax.replace( hour=hr+1, minute=0 ) elif nearest in [ "30minute", "10minute" ]: nmin = int(nearest[:-6]) newmin = (dtmin.minute / nmin ) * nmin dtmin = dtmin.replace( minute=newmin ) newmin = ((dtmax.minute / nmin)+1) * nmin hr = dtmax.hour if newmin >= 60: newmin = 0; hr += 1 # date rollover not imp. dtmax = dtmax.replace( hour=hr, minute=newmin ) elif nearest == "minute": # dtmin is all set, just compute dtmax... newmin = dtmax.minute + 1 hr = dtmax.hour if newmin >= 60: newmin = 0; hr += 1 dtmax = dtmax.replace( hour=hr, minute=newmin ) else: raise AppDt_Error( "findrange got unrecognized nearest= arg: " + str(nearest) ) axmin = calendar.timegm( dtmin.timetuple() ) axmax = calendar.timegm( dtmax.timetuple() ) # at this point, dtmin and dtmax are the axis min and max as datetime type # and axmin and axmax are the axis min and max as int timestamps # now build a list of ready-to-render stubs with int positions... # will eventually add options for month rollover, year rollover, day rollover, etc. stublist = [] iloop = 0 dtcur = dtmin utime = axmin stub = dtcur.strftime( stubformat ) # do the first stub if inc2 != None and stub2first == True: stub2 = dtcur.strftime( stub2format ) if stub2place == "prepend": stub = stub2 + stub elif stub2place == "replace": stub = stub2 else: stub = stub + stub2 stublist.append( [utime, stub] ) while iloop < 500: # sanity backstop yr = dtcur.year mon = dtcur.month day = dtcur.day if inc == "month": if mon == 12: dtcur = dtcur.replace( year=yr+1, month=
"""tictactoe URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.1/topics/http/urls/ Examples: Function views 1. Add a
n import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django
.contrib import admin from django.urls import path, include from django.conf.urls.static import static from tictactoe import settings urlpatterns = [ path('admin/', admin.site.urls), path('tictactoe/', include('tictactoe.game.urls'), name='game'), ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) # serving static files like this should not be done in production
# testing/schema.py # Copyright (C) 2005-2017 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from . import exclusions from .. import schema, event from . import config __all__ = 'Table', 'Column', table_options = {} def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k, kw.pop(k)) for k in list(kw) if k.startswith('test_')]) kw.update(table_options) if exclusions.against(config._current, 'mysql'): if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' else: kw['mysql_engine'] = 'MyISAM' # Apply some default cascading rules for self-referential foreign keys. # MySQL InnoDB has some issues around seleting self-refs too. if exclusions.against(config._current, 'firebird'): table_name = args[0] unpack = (config.db.dialect. identifier_preparer.unformat_identifiers) # Only going after ForeignKeys in Columns. May need to # expand to ForeignKeyConstraint too. fks = [fk for col in args if isinstance(col, schema.Column) for fk in col.foreign_keys] for fk in fks: # root around in raw spec ref = fk._colspec if isinstance(ref, schema.Column): name = ref.table.name else: # take just the table name: on FB there cannot be # a schema, so the first element is always the # table name, possibly followed by the field name name = unpack(ref)[0] if name == table_name: if fk.ondelete is None: fk.ondelete = 'CASCADE' if fk.onupdate is None: fk.onupdate = 'CASCADE' return schema.Table(*args, **kw) def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-sp
ecific tweaks.""" te
st_opts = dict([(k, kw.pop(k)) for k in list(kw) if k.startswith('test_')]) if not config.requirements.foreign_key_ddl.enabled_for_config(config): args = [arg for arg in args if not isinstance(arg, schema.ForeignKey)] col = schema.Column(*args, **kw) if test_opts.get('test_needs_autoincrement', False) and \ kw.get('primary_key', False): if col.default is None and col.server_default is None: col.autoincrement = True # allow any test suite to pick up on this col.info['test_needs_autoincrement'] = True # hardcoded rule for firebird, oracle; this should # be moved out if exclusions.against(config._current, 'firebird', 'oracle'): def add_seq(c, tbl): c._init_items( schema.Sequence(_truncate_name( config.db.dialect, tbl.name + '_' + c.name + '_seq'), optional=True) ) event.listen(col, 'after_parent_attach', add_seq, propagate=True) return col def _truncate_name(dialect, name): if len(name) > dialect.max_identifier_length: return name[0:max(dialect.max_identifier_length - 6, 0)] + \ "_" + hex(hash(name) % 64)[2:] else: return name
# -*- encoding: utf-8 -*- import argparse import sys import traceback from hashlib import md5 import mailchimp_marketing as MailchimpMarketing import requests from consolemsg import step, error, success from erppeek import Client import time import configdb ERP_CLIENT = Client(**configdb.erppeek) MAILCHIMP_CLIENT = MailchimpMarketing.Client( dict(api_key=configdb.MAILCHIMP_APIKEY, server=configdb.MAILCHIMP_SERVER_PREFIX) ) def get_member_category_id(): module = 'som_partner_account' semantic_id = 'res_partner_category_soci' IrModelData = ERP_CLIENT.model('ir.model.data') member_category_relation = IrModelData.get_object_reference( module, semantic_id ) if member_category_relation: return member_category_relation[-1] def get_not_members_email_list(): Soci = ERP_CLIENT.model('somenergia.soci') ResPartnerAddress = ERP_CLIENT.model('res.partner.address') category_id = get_member_category_id() not_members = Soci.search([ ('category_id', 'not in', [category_id]), ('ref', 'like', 'S%') ]) not_members_partner_ids = [ soci['partner_id'][0] for soci in Soci.read(not_members, ['partner_id']) ] address_list = ResPartnerAddress.search( [('partner_id', 'in', not_members_partner_ids)] ) emails_list = [ address.get('email', 'not found') for address in ResPartnerAddress.read(address_list, ['email']) ] return emails_list def get_mailchimp_list_id(list_name): all_lists = MAILCHIMP_CLIENT.lists.get_all_lists( fields=['lists.id,lists.name'], count=100 )['lists'] for l in all_lists: if l['name'] == list_name: return l['id'] raise Exception("List: <{}> not found".format(list_name)) def get_subscriber_hash(email): subscriber_hash = md5(email.lower()).hexdigest() return subscriber_hash def archive_members_from_list(list_name, email_list): list_id = get_mailchimp_list_id(list_name) operations = [] for email in email_list: operation = { "method": "DELETE", "path": "/lists/{list_id}/members/{s
ubscriber_hash}".format( list_id=list_id, subscriber_hash=get_subscriber_hash(email) ), "operation_id": email, } operations.append(operation) payload = { "operations": operations } try: response = MAILCHIMP_CLIENT.batches.start(payloa
d) except ApiClientError as error: msg = "An error occurred an archiving batch request, reason: {}" error(msg.format(error.text)) else: batch_id = response['id'] while response['status'] != 'finished': time.sleep(2) response = MAILCHIMP_CLIENT.batches.status(batch_id) step("Archived operation finished!!") step("Total operations: {}, finished operations: {}, errored operations: {}".format( response['total_operations'], response['finished_operations'], response['errored_operations'] )) result_summary = requests.get(response['response_body_url']) result_summary.raise_for_status() return result_summary.content def archieve_members_in_list(list_name): email_list = get_not_members_email_list() result = archive_members_from_list(list_name, email_list) return result def main(list_name, output): result = archieve_members_in_list(list_name.strip()) with open(output, 'w') as f: f.write(result) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Archivieren Sie E-Mails in großen Mengen' ) parser.add_argument( '--list', dest='list_name', required=True, help="nom de la llista de mailchimp" ) parser.add_argument( '--output', dest='output', required=True, help="Fitxer de sortida amb els resultats" ) args = parser.parse_args() try: main(args.list_name, args.output) except Exception as e: traceback.print_exc(file=sys.stdout) error("El proceso no ha finalizado correctamente: {}", str(e)) else: success("Script finalizado")
# -*- coding: utf-8 -*- """ *************************************************************************** EditScriptDialog.py --------------------- Date : December 2012 Copyright : (C) 2012 by Alexander Bruy Email : alexander dot bruy at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either versi
on 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** ""
" from processing.modeler.ModelerUtils import ModelerUtils __author__ = 'Alexander Bruy' __date__ = 'December 2012' __copyright__ = '(C) 2012, Alexander Bruy' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import codecs import sys import json from PyQt4.QtCore import * from PyQt4.QtGui import * from PyQt4.Qsci import * from qgis.core import * from qgis.utils import iface from processing.gui.ParametersDialog import ParametersDialog from processing.gui.HelpEditionDialog import HelpEditionDialog from processing.algs.r.RAlgorithm import RAlgorithm from processing.algs.r.RUtils import RUtils from processing.script.ScriptAlgorithm import ScriptAlgorithm from processing.script.ScriptUtils import ScriptUtils from processing.ui.ui_DlgScriptEditor import Ui_DlgScriptEditor import processing.resources_rc class ScriptEditorDialog(QDialog, Ui_DlgScriptEditor): SCRIPT_PYTHON = 0 SCRIPT_R = 1 hasChanged = False def __init__(self, algType, alg): QDialog.__init__(self) self.setupUi(self) self.setWindowFlags(Qt.WindowMinimizeButtonHint | Qt.WindowMaximizeButtonHint | Qt.WindowCloseButtonHint) # Set icons self.btnSave.setIcon( QgsApplication.getThemeIcon('/mActionFileSave.svg')) self.btnSaveAs.setIcon( QgsApplication.getThemeIcon('/mActionFileSaveAs.svg')) self.btnEditHelp.setIcon(QIcon(':/processing/images/edithelp.png')) self.btnRun.setIcon(QIcon(':/processing/images/runalgorithm.png')) self.btnCut.setIcon(QgsApplication.getThemeIcon('/mActionEditCut.png')) self.btnCopy.setIcon( QgsApplication.getThemeIcon('/mActionEditCopy.png')) self.btnPaste.setIcon( QgsApplication.getThemeIcon('/mActionEditPaste.png')) self.btnUndo.setIcon(QgsApplication.getThemeIcon('/mActionUndo.png')) self.btnRedo.setIcon(QgsApplication.getThemeIcon('/mActionRedo.png')) # Connect signals and slots self.btnSave.clicked.connect(self.save) self.btnSaveAs.clicked.connect(self.saveAs) self.btnEditHelp.clicked.connect(self.editHelp) self.btnRun.clicked.connect(self.runAlgorithm) self.btnCut.clicked.connect(self.editor.cut) self.btnCopy.clicked.connect(self.editor.copy) self.btnPaste.clicked.connect(self.editor.paste) self.btnUndo.clicked.connect(self.editor.undo) self.btnRedo.clicked.connect(self.editor.redo) self.editor.textChanged.connect(lambda: self.setHasChanged(True)) self.alg = alg self.algType = algType if self.alg is not None: self.filename = self.alg.descriptionFile self.editor.setText(self.alg.script) else: self.filename = None self.update = False self.help = None self.setHasChanged(False) self.editor.setLexerType(self.algType) def editHelp(self): if self.alg is None: if self.algType == self.SCRIPT_PYTHON: alg = ScriptAlgorithm(None, unicode(self.editor.text())) elif self.algType == self.SCRIPT_R: alg = RAlgorithm(None, unicode(self.editor.text())) else: alg = self.alg dlg = HelpEditionDialog(alg) dlg.exec_() # We store the description string in case there were not saved # because there was no filename defined yet if self.alg is None and dlg.descriptions: self.help = dlg.descriptions def save(self): self.saveScript(False) def saveAs(self): self.saveScript(True) def saveScript(self, saveAs): if self.filename is None or saveAs: if self.algType == self.SCRIPT_PYTHON: scriptDir = ScriptUtils.scriptsFolder() filterName = self.tr('Python scripts (*.py)') elif self.algType == self.SCRIPT_R: scriptDir = RUtils.RScriptsFolder() filterName = self.tr('Processing R script (*.rsx)') self.filename = unicode(QFileDialog.getSaveFileName(self, self.tr('Save script'), scriptDir, filterName)) if self.filename: if self.algType == self.SCRIPT_PYTHON \ and not self.filename.lower().endswith('.py'): self.filename += '.py' if self.algType == self.SCRIPT_R \ and not self.filename.lower().endswith('.rsx'): self.filename += '.rsx' text = unicode(self.editor.text()) if self.alg is not None: self.alg.script = text try: with codecs.open(self.filename, 'w', encoding='utf-8') as fout: fout.write(text) except IOError: QMessageBox.warning(self, self.tr('I/O error'), self.tr('Unable to save edits. Reason:\n %s') % unicode(sys.exc_info()[1])) return self.update = True # If help strings were defined before saving the script for # the first time, we do it here if self.help: with open(self.filename + '.help', 'w') as f: json.dump(self.help, f) self.help = None self.setHasChanged(False) else: self.filename = None def setHasChanged(self, hasChanged): self.hasChanged = hasChanged self.btnSave.setEnabled(hasChanged) def runAlgorithm(self): if self.algType == self.SCRIPT_PYTHON: alg = ScriptAlgorithm(None, unicode(self.editor.text())) alg.provider = ModelerUtils.providers['script'] if self.algType == self.SCRIPT_R: alg = RAlgorithm(None, unicode(self.editor.text())) alg.provider = ModelerUtils.providers['r'] dlg = alg.getCustomParametersDialog() if not dlg: dlg = ParametersDialog(alg) canvas = iface.mapCanvas() prevMapTool = canvas.mapTool() dlg.show() dlg.exec_() if canvas.mapTool() != prevMapTool: try: canvas.mapTool().reset() except: pass canvas.setMapTool(prevMapTool)
return Choice(msg + ' (y/n)', 'ny') def SaveToFile(msg, scriptname, tmpname): if not YesNoQuestion('%s Save script to file?' % msg): return scriptname = os.path.join(os.getcwd(), scriptname) sys.stdout.write('Enter filename (default %s):' % scriptname) filename = sys.stdin.readline().strip() if filename == '':
filename = scriptname scriptdata = open(tmpname).read() open(filename, 'w').write(scriptdata) res, scripts = sieve.listscripts() if res != 'OK': return res for name, active in scripts: if name == scriptname: res, scriptdata = sieve.getscript(scriptname) if res != 'OK': return res break else: if not YesNoQuestion('Sc
ript not on server. Create new?'): return 'OK' # else: script will be created when saving scriptdata = '' import tempfile filename = tempfile.mktemp('.siv') open(filename, 'w').write(scriptdata) editor = os.environ.get('EDITOR', 'vi') while 1: res = os.system('%s %s' % (editor, filename)) if res: # error editing if not YesNoQuestion('Editor returned failture. Continue?'): os.remove(filename) return SUPPRESS else: continue # re-edit # else: editing okay while 1: scriptdata = open(filename).read() res = sieve.putscript(scriptname, scriptdata) if res == 'OK': return res # res is NO, BYE print res, sieve.response_text or sieve.response_code if res == 'NO': res = Choice('Upload failed. (E)dit/(R)etry/(A)bort?', 'era') if res == 0: break # finish inner loop, return to 'edit' elif res == 1: # retry upload continue SaveToFile('', scriptname, filename) else: # BYE SaveToFile('Server closed connection.', scriptname, filename) print 'Deleting tempfile.' os.remove(filename) return SUPPRESS raise "Should not come here." if os.name != 'posix': del cmd_edit def cmd_delete(scriptname): """delete <name> - delete script.""" return sieve.deletescript(scriptname) def cmd_activate(scriptname): """activate <name> - set a script as the active script""" return sieve.setactive(scriptname) def cmd_deactivate(): """deactivate - deactivate all scripts""" return sieve.setactive('') def cmd_quit(*args): """quit - quit""" print 'quitting.' if sieve: try: # this mysteriously fails at times sieve.logout() except: pass raise SystemExit() # find all commands (using introspection) # NB: edit os only available when running on a posix system __commands = dict([c for c in inspect.getmembers(sys.modules[__name__], inspect.isfunction) if c[0].startswith('cmd_') ]) # command aliases/shortcuts __command_map = { '?': 'help', 'h': 'help', 'q': 'quit', 'l': 'list', 'del': 'delete', } def shell(auth, user=None, passwd=None, realm=None, authmech='', server='', use_tls=0, port=managesieve.SIEVE_PORT): """Main part""" def cmd_loop(): """Command loop: read and execute lines from stdin.""" global sieve while 1: sys.stdout.write('> ') line = sys.stdin.readline() if not line: # EOF/control-d cmd_quit() break line = line.strip() if not line: continue # todo: parse command line correctly line = line.split() cmd = __command_map.get(line[0], line[0]) cmdfunc = __commands.get('cmd_%s' % cmd) if not cmdfunc: print 'Unknown command', repr(cmd) else: if __debug__: result = None try: result = cmdfunc(*line[1:]) except TypeError, e: if str(e).startswith('%s() takes' % cmdfunc.__name__): print 'Wrong number of arguments:' print '\t', cmdfunc.__doc__ continue else: raise assert result != None if result == 'OK': print result elif result == SUPPRESS: # suppress 'OK' for some commands (list, get) pass else: print result, sieve.response_text or sieve.response_code if result == "BYE": # quit when server send BYE cmd_quit() global sieve try: print 'connecting to', server try: if not auth: auth = getpass.getuser() if not user: user = auth if not passwd: passwd = getpass.getpass() except EOFError: # Ctrl-D pressed print # clear line return sieve = managesieve.MANAGESIEVE(server, port=port, use_tls=use_tls) print 'Server capabilities:', for c in sieve.capabilities: print c, print try: if not authmech: # auto-select best method available res = sieve.login(authmech, user, passwd) elif authmech.upper() == 'LOGIN': # LOGIN does not support authenticator res = sieve.authenticate(authmech, user, passwd) else: res = sieve.authenticate(authmech, auth, user, passwd) except sieve.error, e: print "Authenticate error: %s" % e cmd_quit() if res != 'OK': print res, sieve.response_text or sieve.response_code cmd_quit() cmd_loop() except KeyboardInterrupt: print cmd_quit() def main(): """Parse options and call interactive shell.""" try: from optparse import OptionParser except ImportError: from optik import OptionParser parser = OptionParser('Usage: %prog [options] server') parser.add_option('--authname', help= "The user to use for authentication " "(defaults to current user).") parser.add_option('--user', dest='username', help = "The authorization name to request; " "by default, derived from the " "authentication credentials.") parser.add_option('--passwd', help = "The password to use.") parser.add_option('--realm', help= "The realm to attempt authentication in.") parser.add_option('--auth-mech', default="", help= "The SASL authentication mechanism to use " "(default: auto select; available: %s)." % ', '.join(managesieve.AUTHMECHS)) parser.add_option('--script', '--script-file', help= "Instead of working interactively, run " "commands from SCRIPT, and exit when done.") parser.add_option('--use-tls', '--tls', action="store_true", help="Switch to TLS if server supports it.") parser.add_option('--port', type="int", default=managesieve.SIEVE_PORT, help="port number to connect to (default: %default)") parser.add_option('-v', '--verbose', action='count', default=0, help='Be verbose. May be given several times to increase verbosity') parser.add_option('-x', '--password-command', dest='password_command', help="Shell command to execute to get the password") config_file = os.environ.get("MANAGESIEVE_CONFIG") if config_file: read_config_defaults(config_file, parser) options, args = p
er knots. def getinttex(self): """ Same as above, but we include the extremal points "once". """ return self.t[(self.k):-(self.k)].copy() def knotstats(self): """ Returns a string describing the knot spacing """ knots = self.getinttex() spacings = knots[1:] - knots[:-1] return " ".join(["%.1f" % (spacing) for spacing in sorted(spacings)]) def setintt(self, intt): """ Give me some internal knots (not even containing the datapoints extrema), and I build the correct total knot vector t for you. I add the extremas, with appropriate multiplicity. @TODO: check consistency of intt with datapoints ! """ # Ok a quick test for consisency : if len(intt) == 0: raise RuntimeError("Your list of internal knots is empty !") if not self.datapoints.jds[0] < intt[0]: raise RuntimeError("Ouch.") if not self.datapoints.jds[-1] > intt[-1]: raise RuntimeError("Ouch.") #assert self.datapoints.jds[0] < intt[0] # should we put <= here ? #assert self.datapoints.jds[-1] > intt[-1] pro = self.datapoints.jds[0] * np.ones(self.k+1) post = self.datapoints.jds[-1] * np.ones(self.k+1) self.t = np.concatenate((pro, intt, post)) def setinttex(self, inttex): """ Including extremal knots """ #pro = self.datapoints.jds[0] * np.ones(self.k) #post = self.datapoints.jds[-1] * np.ones(self.k) pro = inttex[0] * np.ones(self.k) post = inttex[-1] * np.ones(self.k) self.t = np.concatenate((pro, inttex, post)) def getnint(self): """ Returns the number of intervals """ return(len(self.t) - 2* (self.k + 1) + 1) # Similar stuff about coeffs : def getc(self, m=0): """ Returns all active coefficients of the spline, the ones it makes sense to play with. The length of this guy is number of intervals - 2 ! """ return self.c[m:-(self.k + 1 + m)].copy() def setc(self, c, m=0): """ Puts the coeffs from getc back into place. """ self.c[m:-(self.k + 1 + m)] = c def getco(self, m=0): """ Same as getc, but reorders the coeffs in a way more suited for nonlinear optimization """ c = self.getc(m=m) mid = int(len(c)/2.0) return np.concatenate([c[mid:], c[:mid][::-1]]) def setco(self, c, m=0): """ The inverse of getco. """ mid = int(len(c)/2.0) self.setc(np.concatenate([c[mid+1:][::-1], c[:mid+1]]), m=m) def setcflat(self, c): """ Give me coeffs like those from getc(m=1), I will set the coeffs so that the spline extremas are flat (i.e. slope = 0). """ self.setc(c, m=1) self.c[0] = self.c[1] self.c[-(self.k + 2)] = self.c[-(self.k + 3)] def setcoflat(self, c): """ idem, but for reordered coeffs. """ mid = int(len(c)/2.0) self.setcflat(np.concatenate([c[mid:][::-1], c[:mid]])) def r2(self, nostab=True, nosquare=False): """ Evaluates the spline, compares it with the data points and returns a weighted sum of residuals r2. If nostab = False, stab points are included This is precisely the same r2 as is used by splrep for the fit, and thus the same value as returned by optc ! This method can set lastr2nostab, so be sure to end any optimization with it. If nostab = True, we don't count the stab points """ if nostab == True : splinemags = self.eval(nostab = True, jds = None) errs = self.datapoints.mags[self.datapoints.mask] - splinemags werrs = errs/self.datapoints.magerrs[self.datapoints.mask] if nosquare: r2 = np.sum(np.fabs(werrs)) else: r2 = np.sum(werrs * werrs) self.lastr2nostab = r2 else : splinemags = self.eval(nostab = False, jds = None) errs = self.datapoints.mags - splinemags werrs = errs/self.datapoints.magerrs if nosquare: r2 = np.sum(np.fabs(werrs)) else: r2 = np.sum(werrs * werrs) self.lastr2stab = r2 return r2 #if red: # return chi2/len(self.datapoints.jds) def tv(self): """ Returns the total variation of the spline. Simple ! http://en.wikipedia.org/wiki/Total_variation """ # Method 1 : linear approximation ptd = 5 # point density in days ... this is enough ! a = self.t[0] b = self.t[-1] x = np.linspace(a, b, int((b-a) * ptd)) y = self.eval(jds = x) tv1 = np.sum(np.fabs(y[1:] - y[:-1])) #print "TV1 : %f" % (tv1) return tv1 # Method 2 : integrating the absolute value of the derivative ... hmm, splint does not integrate derivatives .. #si.splev(jds, (self.t, self.c, self.k)) def optc(self): """ Optimize the coeffs, don't touch the knots This is the fast guy, one reason to use splines :-) Returns the chi2 in case you want it (including stabilization points) ! Sets lastr2stab, but not lastr2nostab ! """ out = si.splrep(self.datapoints.jds, self.datapoints.mags, w=1.0/self.datapoints.magerrs, xb=None, xe=None, k=self.k, task=-1, s=None, t=self.getintt(), full_output=1, per=0, quiet=1) # We check if it worked : if not out[2] <= 0: raise RuntimeError("Problem with spline representation, message = %s" % (out[3])) self.c = out[0][1] # save the coeffs #import matplotlib.pyplot as plt #plt.plot(self.datapoints.jds, self.datapoints.magerrs) #plt.show() self.lastr2stab = out[1] return out[1] def optcflat(self, verbose = False): """ Optimizes only the "border coeffs" so to get zero slope at the extrema Run optc() first ... This has to be done with an iterative optimizer """ full = self.getc(m=1) inip = self.getc(m=1)[[0, 1, -2, -1]] # 4 coeffs def setp(p): full[[0, 1, -2, -1]] = p self.setcflat(full) if verbose: print "Starting flat coeff optimization ..." print "Initial pars : ", inip def errorfct(p): setp(p) return self.r2(nostab=False) # To get the same as optc would return ! minout = spopt.fmin_powell(errorfct, inip, full_output=1, disp=verbose) popt = minout[0] if popt.shape == (): popt = np.array([popt]) if verbose: print "Optimal pars : ", popt setp(popt) return self.r2(nostab=False) # We include the stab points, like optc does. # This last line also updates self.lastr2 ... def eval(self, jds = N
one, nostab = True): """ Evaluates the spline at jds, and returns the corresponding mags-like vector. By default,
we exclude the stabilization points ! If jds is not None, we use them instead of our own jds (in this case excludestab makes no sense) """ if jds is None: if nostab: jds = self.datapoints.jds[self.datapoints.mask] else: jds = self.datapoints.jds else: # A minimal check for non-extrapolation condition should go here ! pass fitmags = si.splev(jds, (self.t, self.c, self.k)) # By default ext=0 : we do return extrapolated values return fitmags def display(self, showbounds = True, showdatapoints = True, showerrorbars=True, figsize=(16,8)): """ A display of the spline object, with knots, jds, stab points, etc. For debugging and checks. """ fig = plt.figure(figsize=figsize) if showdatapoints: if showerrorbars: mask = self.datapoints.mask plt.errorbar(self.datapoints.jds[mask], self.datapoints.mags[mask], yerr=self.datapoints.magerrs[mask], linestyle="None", color="blue") if not np.alltrue(mask): mask = mask == False plt.errorbar(self.datapoints.jds[mask], self.datapoints.mags[mask], yerr=self.datapoints.magerrs[mask], linestyle="None", color="gray") else: plt.plot(self.datapoints.jds, self.datapoints.mags, "b,") if (np.any(self.t) != None) : if getattr(self, "showknots", True) == True: for knot in self.t: plt.axvline(knot, color="gray") # We draw the spline : xs = np.linspace(self.datapoints.jds[0], self.datapoints.jds[-1], 1000) ys = self.eval(jds = xs) plt.plot(xs, ys, "b-") if showbounds : if (np.any(self.l) != None) and (np.any(self.u) != None) : for l in self.l: plt.axvline(l, color="blue", dashes=(4, 4)) for u in self.u: plt.axvline(u, color="red", dashes=(5, 5)) axes = plt.gca() axes.set_ylim(axes.get_ylim()[::-1]) plt.show() # Some functions to interact directly with lightcurves : def fit(
# -*- coding: utf-8 -*- """ *************************************************************************** FixedDistanceBuffer.py --------------------- Date : August 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * *
* ****************************
*********************************************** """ __author__ = 'Victor Olaya' __date__ = 'August 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from PyQt4.QtCore import * from qgis.core import * from processing.core.GeoAlgorithm import GeoAlgorithm from processing.core.parameters import ParameterVector from processing.core.parameters import ParameterBoolean from processing.core.parameters import ParameterNumber from processing.core.outputs import OutputVector import Buffer as buff from processing.tools import dataobjects class FixedDistanceBuffer(GeoAlgorithm): INPUT = 'INPUT' OUTPUT = 'OUTPUT' FIELD = 'FIELD' DISTANCE = 'DISTANCE' SEGMENTS = 'SEGMENTS' DISSOLVE = 'DISSOLVE' # ========================================================================= # def getIcon(self): # return QtGui.QIcon(os.path.dirname(__file__) + "/icons/buffer.png") # ========================================================================= def defineCharacteristics(self): self.name = 'Fixed distance buffer' self.group = 'Vector geometry tools' self.addParameter(ParameterVector(self.INPUT, 'Input layer', [ParameterVector.VECTOR_TYPE_ANY])) self.addParameter(ParameterNumber(self.DISTANCE, 'Distance', default=10.0)) self.addParameter(ParameterNumber(self.SEGMENTS, 'Segments', 1, default=5)) self.addParameter(ParameterBoolean(self.DISSOLVE, 'Dissolve result', False)) self.addOutput(OutputVector(self.OUTPUT, 'Buffer')) def processAlgorithm(self, progress): layer = dataobjects.getObjectFromUri( self.getParameterValue(self.INPUT)) distance = self.getParameterValue(self.DISTANCE) dissolve = self.getParameterValue(self.DISSOLVE) segments = int(self.getParameterValue(self.SEGMENTS)) writer = self.getOutputFromName( self.OUTPUT).getVectorWriter(layer.pendingFields().toList(), QGis.WKBPolygon, layer.crs()) buff.buffering(progress, writer, distance, None, False, layer, dissolve, segments)
yield self.get_teams_async() @ndb.toplevel def prepTeam
sMatches(self): yield self.get_matches_async(), self.get_teams_async() @property def matchstats(self): if self.details is None: return None else: return self.details
.matchstats @property def rankings(self): if self.details is None: return None else: return self.details.rankings @property def location(self): if self._location is None: split_location = [] if self.city: split_location.append(self.city) if self.state_prov: if self.postalcode: split_location.append(self.state_prov + ' ' + self.postalcode) else: split_location.append(self.state_prov) if self.country: split_location.append(self.country) self._location = ', '.join(split_location) return self._location @property def city_state_country(self): if not self._city_state_country and self.nl: self._city_state_country = self.nl.city_state_country if not self._city_state_country: location_parts = [] if self.city: location_parts.append(self.city) if self.state_prov: location_parts.append(self.state_prov) if self.country: country = self.country if self.country == 'US': country = 'USA' location_parts.append(country) self._city_state_country = ', '.join(location_parts) return self._city_state_country @property def nl(self): return self.normalized_location @property def venue_or_venue_from_address(self): if self.venue: return self.venue else: try: return self.venue_address.split('\r\n')[0] except: return None @property def venue_address_safe(self): """ Construct (not detailed) venue address if detailed venue address doesn't exist """ if not self.venue_address: if not self.venue or not self.location: self._venue_address_safe = None else: self._venue_address_safe = "{}\n{}".format(self.venue.encode('utf-8'), self.location.encode('utf-8')) else: self._venue_address_safe = self.venue_address.replace('\r\n', '\n') return self._venue_address_safe @property def webcast(self): """ Lazy load parsing webcast JSON """ if self._webcast is None: try: self._webcast = json.loads(self.webcast_json) # Sort firstinspires channels to the front, keep the order of the rest self._webcast = sorted(self._webcast, key=lambda w: 0 if (w['type'] == 'twitch' and w['channel'].startswith('firstinspires')) else 1) except Exception, e: self._webcast = None return self._webcast @property def webcast_status(self): from helpers.webcast_online_helper import WebcastOnlineHelper WebcastOnlineHelper.add_online_status(self.current_webcasts) overall_status = 'offline' for webcast in self.current_webcasts: status = webcast.get('status') if status == 'online': overall_status = 'online' break elif status == 'unknown': overall_status = 'unknown' return overall_status @property def current_webcasts(self): if not self.webcast or not self.within_a_day: return [] # Filter by date current_webcasts = [] for webcast in self.webcast: if 'date' in webcast: webcast_datetime = datetime.datetime.strptime(webcast['date'], "%Y-%m-%d") if self.local_time().date() == webcast_datetime.date(): current_webcasts.append(webcast) else: current_webcasts.append(webcast) return current_webcasts @property def online_webcasts(self): current_webcasts = self.current_webcasts from helpers.webcast_online_helper import WebcastOnlineHelper WebcastOnlineHelper.add_online_status(current_webcasts) return filter(lambda x: x.get('status', '') != 'offline', current_webcasts if current_webcasts else []) @property def has_first_official_webcast(self): return any([('firstinspires' in w['channel']) for w in self.webcast]) if self.webcast else False @property def division_keys_json(self): keys = [key.id() for key in self.divisions] return json.dumps(keys) @property def key_name(self): """ Returns the string of the key_name of the Event object before writing it. """ return str(self.year) + self.event_short @property def facebook_event_url(self): """ Return a string of the Facebook Event URL. """ return "http://www.facebook.com/event.php?eid=%s" % self.facebook_eid @property def details_url(self): """ Returns the URL pattern for the link to this Event on TBA """ return "/event/%s" % self.key_name @property def gameday_url(self): """ Returns the URL pattern for the link to watch webcasts in Gameday """ if self.webcast: return "/gameday/{}".format(self.key_name) else: return None @property def hashtag(self): """ Return the hashtag used for the event. """ if self.custom_hashtag: return self.custom_hashtag else: return "frc" + self.event_short # Depreciated, still here to keep GAE clean. webcast_url = ndb.StringProperty(indexed=False) @classmethod def validate_key_name(self, event_key): key_name_regex = re.compile(r'^[1-9]\d{3}[a-z]+[0-9]{0,2}$') match = re.match(key_name_regex, event_key) return True if match else False @property def event_district_str(self): from database.district_query import DistrictQuery if self.district_key is None: return None district = DistrictQuery(self.district_key.id()).fetch() return district.display_name if district else None @property def event_district_abbrev(self): if self.district_key is None: return None else: return self.district_key.id()[4:] @property def event_district_key(self): if self.district_key is None: return None else: return self.district_key.id() @property def event_type_str(self): return EventType.type_names.get(self.event_type_enum) @property def display_name(self): return self.name if self.short_name is None else self.short_name @property def normalized_name(self): if self.event_type_enum == EventType.CMP_FINALS: if self.year >= 2017: return '{} {}'.format(self.city, 'Championship') else: return 'Championship' elif self.short_name and self.event_type_enum != EventType.FOC: if self.event_type_enum == EventType.OFFSEASON: return self.short_name else: return '{} {}'.format(self.short_name, EventType.short_type_names[self.event_type_enum]) else: return self.name @property def first_api_code(self): if self.first_code is None: return self.event_short.upper() return self.first_code.upper() @property def is_in_season(self): """ If the Event is of a regular season type. """ return self.event_type_enum in EventType.SEASON_EVENT_TYPES @property def is_offseason(self): """ 'Offseason' events include preseas
# Copyright 2015 Hewlett-Packard Development Company, L.P. # # Author: Endre Karlson <endre.karlson@hp.com> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under t
he License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limi
tations # under the License. from oslo_utils import uuidutils from urllib.parse import parse_qs from urllib.parse import urlparse from designateclient import exceptions def resolve_by_name(func, name, *args): """ Helper to resolve a "name" a'la foo.com to it's ID by using REST api's query support and filtering on name. """ if uuidutils.is_uuid_like(name): return name results = func(criterion={"name": "%s" % name}, *args) length = len(results) if length == 1: return results[0]["id"] elif length == 0: raise exceptions.NotFound("Name %s didn't resolve" % name) else: msg = "Multiple matches found for %s, please use ID instead." % name raise exceptions.NoUniqueMatch(msg) def parse_query_from_url(url): """ Helper to get key bits of data from the "next" url returned from the API on collections :param url: :return: dict """ values = parse_qs(urlparse(url)[4]) return {k: values[k][0] for k in values.keys()} def get_all(function, criterion=None, args=None): """ :param function: Function to be called to get data :param criterion: dict of filters to be applied :param args: arguments to be given to the function :return: DesignateList() """ criterion = criterion or {} args = args or [] data = function(*args, criterion=criterion) returned_data = data while True: if data.next_page: for k, v in data.next_link_criterion.items(): criterion[k] = v data = function(*args, criterion=criterion) returned_data.extend(data) else: break return returned_data
from .. import tool def test_keygen(): def get_keyring(): WheelKeys, keyring = tool.get_keyring() class WheelKeysTest(WheelKeys): def save(self): pass class keyringTest:
backend = keyring.backend @classmethod def get_keyring(cls): class keyringTest2: pw = None def set_password(self, a, b, c): self.pw = c def get_password(self, a, b): return self.pw
return keyringTest2() return WheelKeysTest, keyringTest tool.keygen(get_keyring=get_keyring)
# -*- test-case-name: foolscap.test.test_crypto -*- available = False # hack to deal with half-broken imports in python <2.4 from OpenSSL import SSL # we try to use ssl support classes from Twisted, if it is new enough. If # not, we pull them from a local copy of sslverify. The funny '_ssl' import # stuff is used to appease pyflakes, which otherwise complains that we're # redefining an imported name. from twisted.internet import ssl if hasattr(ssl, "DistinguishedName"): # Twisted-2.5 will c
ontain these names _ssl = ssl CertificateOptions = ssl.CertificateOptions else: # but it hasn't been released yet (as of 16-Sep-2006). Without them, we # cannot use any encrypted Tubs. We fall back to using a private copy of # sslverify
.py, copied from the Divmod tree. import sslverify _ssl = sslverify from sslverify import OpenSSLCertificateOptions as CertificateOptions DistinguishedName = _ssl.DistinguishedName KeyPair = _ssl.KeyPair Certificate = _ssl.Certificate PrivateCertificate = _ssl.PrivateCertificate from twisted.internet import error if hasattr(error, "CertificateError"): # Twisted-2.4 contains this, and it is used by twisted.internet.ssl CertificateError = error.CertificateError else: class CertificateError(Exception): """ We did not find a certificate where we expected to find one. """ from foolscap import base32 peerFromTransport = Certificate.peerFromTransport class MyOptions(CertificateOptions): def _makeContext(self): ctx = CertificateOptions._makeContext(self) def alwaysValidate(conn, cert, errno, depth, preverify_ok): # This function is called to validate the certificate received by # the other end. OpenSSL calls it multiple times, each time it # see something funny, to ask if it should proceed. # We do not care about certificate authorities or revocation # lists, we just want to know that the certificate has a valid # signature and follow the chain back to one which is # self-signed. The TubID will be the digest of one of these # certificates. We need to protect against forged signatures, but # not the usual SSL concerns about invalid CAs or revoked # certificates. # these constants are from openssl-0.9.7g/crypto/x509/x509_vfy.h # and do not appear to be exposed by pyopenssl. Ick. TODO. We # could just always return '1' here (ignoring all errors), but I # think that would ignore forged signatures too, which would # obviously be a security hole. things_are_ok = (0, # X509_V_OK 9, # X509_V_ERR_CERT_NOT_YET_VALID 10, # X509_V_ERR_CERT_HAS_EXPIRED 18, # X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT 19, # X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN ) if errno in things_are_ok: return 1 # TODO: log the details of the error, because otherwise they get # lost in the PyOpenSSL exception that will eventually be raised # (possibly OpenSSL.SSL.Error: certificate verify failed) # I think that X509_V_ERR_CERT_SIGNATURE_FAILURE is the most # obvious sign of hostile attack. return 0 # VERIFY_PEER means we ask the the other end for their certificate. # not adding VERIFY_FAIL_IF_NO_PEER_CERT means it's ok if they don't # give us one (i.e. if an anonymous client connects to an # authenticated server). I don't know what VERIFY_CLIENT_ONCE does. ctx.set_verify(SSL.VERIFY_PEER | #SSL.VERIFY_FAIL_IF_NO_PEER_CERT | SSL.VERIFY_CLIENT_ONCE, alwaysValidate) return ctx def digest32(colondigest): digest = "".join([chr(int(c,16)) for c in colondigest.split(":")]) digest = base32.encode(digest) return digest available = True
# Copyright 2012 OpenStack LLC. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES
OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # vim: tabstop=4 sh
iftwidth=4 softtabstop=4 import unittest from quantumclient.common import exceptions from quantumclient.quantum import v2_0 as quantumV20 class CLITestArgs(unittest.TestCase): def test_empty(self): _mydict = quantumV20.parse_args_to_dict([]) self.assertEqual({}, _mydict) def test_default_bool(self): _specs = ['--my_bool', '--arg1', 'value1'] _mydict = quantumV20.parse_args_to_dict(_specs) self.assertTrue(_mydict['my_bool']) def test_bool_true(self): _specs = ['--my-bool', 'type=bool', 'true', '--arg1', 'value1'] _mydict = quantumV20.parse_args_to_dict(_specs) self.assertTrue(_mydict['my_bool']) def test_bool_false(self): _specs = ['--my_bool', 'type=bool', 'false', '--arg1', 'value1'] _mydict = quantumV20.parse_args_to_dict(_specs) self.assertFalse(_mydict['my_bool']) def test_nargs(self): _specs = ['--tag', 'x', 'y', '--arg1', 'value1'] _mydict = quantumV20.parse_args_to_dict(_specs) self.assertTrue('x' in _mydict['tag']) self.assertTrue('y' in _mydict['tag']) def test_badarg(self): _specs = ['--tag=t', 'x', 'y', '--arg1', 'value1'] self.assertRaises(exceptions.CommandError, quantumV20.parse_args_to_dict, _specs) def test_arg(self): _specs = ['--tag=t', '--arg1', 'value1'] self.assertEqual('value1', quantumV20.parse_args_to_dict(_specs)['arg1']) def test_dict_arg(self): _specs = ['--tag=t', '--arg1', 'type=dict', 'key1=value1,key2=value2'] arg1 = quantumV20.parse_args_to_dict(_specs)['arg1'] self.assertEqual('value1', arg1['key1']) self.assertEqual('value2', arg1['key2']) def test_list_of_dict_arg(self): _specs = ['--tag=t', '--arg1', 'type=dict', 'list=true', 'key1=value1,key2=value2'] arg1 = quantumV20.parse_args_to_dict(_specs)['arg1'] self.assertEqual('value1', arg1[0]['key1']) self.assertEqual('value2', arg1[0]['key2'])
from kvmagent import kvmagent from zstacklib.utils import jsonobject from zstacklib.utils import http from zstacklib.utils import log from zstacklib.utils.bash import * from zstacklib.utils import linux from zstacklib.utils import thread from jinja2 import Template import os.path import re import time import traceback from prometheus_client.core import GaugeMetricFamily,REGISTRY from prometheus_client import start_http_server logger = log.get_logger(__name__) class PrometheusPlugin(kvmagent.KvmAgent): COLLECTD_PATH = "/prometheus/collectdexporter/start" @kvmagent.replyerror @in_bash def start_collectd_exporter(self, req): cmd = jsonobject.loads(req[http.REQUEST_BODY]) rsp = kvmagent.AgentResponse() eths = bash_o("ls /sys/class/net").split() interfaces = [] for eth in eths: eth = eth.strip(' \t\n\r') if eth == 'lo': continue elif eth.startswith('vnic'): continue elif eth.startswith('outer'): continue elif eth.startswith('br_'): continue elif not eth: continue else: interfaces.append(eth) conf_path = os.path.join(os.path.dirname(cmd.binaryPath), 'collectd.conf') conf = '''Interval {{INTERVAL}} FQDNLookup false LoadPlugin syslog LoadPlugin aggregation LoadPlugin cpu LoadPlugin disk LoadPlugin interface LoadPlugin memory LoadPlugin network LoadPlugin virt <Plugin aggregation> <Aggregation> #Host "unspecified" Plugin "cpu" #PluginInstance "unspecified" Type "cpu" #TypeInstance "unspecified" GroupBy "Host" GroupBy "TypeInstance" CalculateNum false CalculateSum false CalculateAverage true CalculateMinimum false CalculateMaximum false CalculateStddev false </Aggregation> </Plugin> <Plugin cpu> ReportByCpu true ReportByState true ValuesPercentage true </Plugin> <Plugin disk> Disk "/^sd/" Disk "/^hd/" Disk "/^vd/" IgnoreSelected false </Plugin> <Plugin "interface"> {% for i in INTERFACES -%} Interface "{{i}}" {% endfor -%} IgnoreSelected false </Plugin> <Plugin memory> ValuesAbsolute true ValuesPercentage false </Plugin> <Plugin virt> Connection "qemu:///system" RefreshInterval {{INTERVAL}} HostnameFormat name PluginInstanceFormat name </Plugin> <Plugin network> Server "localhost" "25826" </Plugin> ''' tmpt = Template(conf) conf = tmpt.render({ 'INTERVAL': cmd.interval, 'INTERFACES': interfaces, }) need_restart_collectd = False if os.path.exists(conf_path): with open(conf_path, 'r') as fd: old_conf = fd.read() if old_conf != conf: with open(conf_path, 'w') as fd: fd.write(conf) need_restart_collectd = T
rue else: with open(conf_path, 'w') as fd: fd.write(conf) need_restart_collectd = True pid = linux.find_process_by_cmdline(['collectd', conf_path]) if not pid: bash_errorout('collectd -C %s' % conf_path) else: if need_restart_collectd: bash_errorout('kill -9 %s' % pid) bash_errorout('collectd -C %s' % conf_path) pid = linux.find_process
_by_cmdline([cmd.binaryPath]) if not pid: EXPORTER_PATH = cmd.binaryPath LOG_FILE = os.path.join(os.path.dirname(EXPORTER_PATH), cmd.binaryPath + '.log') ARGUMENTS = cmd.startupArguments if not ARGUMENTS: ARGUMENTS = "" bash_errorout('chmod +x {{EXPORTER_PATH}}') bash_errorout("nohup {{EXPORTER_PATH}} {{ARGUMENTS}} >{{LOG_FILE}} 2>&1 < /dev/null &\ndisown") return jsonobject.dumps(rsp) def install_colletor(self): class Collector(object): def collect(self): try: ret = [] for c in kvmagent.metric_collectors: ret.extend(c()) return ret except Exception as e: content = traceback.format_exc() err = '%s\n%s\n' % (str(e), content) logger.warn(err) return [] REGISTRY.register(Collector()) def start(self): http_server = kvmagent.get_http_server() http_server.register_async_uri(self.COLLECTD_PATH, self.start_collectd_exporter) self.install_colletor() start_http_server(7069) def stop(self): pass
import pytest from tests.support.asserts import assert_error, assert_success def perform_actions(session, actions): return session.transport.send( "POST", "/session/{session_id}/actions".format(session_id=session.session_id), {"actions": actions}) @pytest.mark.parametrize("action_type", ["none", "key", "pointer"]) def test_pause_positive_integer(session, action_type): for valid_duration in [0, 1]: actions = [{ "type": action_type, "id": "foobar", "actions": [{ "type": "pause", "duration": valid_duration }] }] response = perform_actions(session, actions) assert_success(response) actions = [{ "type": action_type, "id": "foobar", "actions": [{ "type": "pause", "duration": -1 }] }] response = perform_actions(session, actions) assert_error(response, "invalid argument") @pytest.mark.parametrize("action_type", ["none", "key", "pointer"]) def test_pause_invalid_types(session, action_type): for invalid_type in [0.0, None, "foo", True, [], {}]: actions = [{ "type": action_type, "id": "foobar", "actions": [{
"type": "pause", "duration": invalid_type }] }] response = perform_actions(session, actions) assert_error(response, "invalid argument") @pytest.mark.parametrize("action_type", ["none", "key", "pointer"]) def test_pause_without_duration(session, action_type): actions = [{ "type": action_type, "id": "foobar", "actions": [
{ "type": "pause", }] }] response = perform_actions(session, actions) assert_success(response) @pytest.mark.parametrize("action_type", ["none", "key", "pointer"]) def test_action_without_id(session, action_type): actions = [{ "type": action_type, "actions": [{ "type": "pause", "duration": 1 }] }] response = perform_actions(session, actions) assert_error(response, "invalid argument")
# -*- coding: utf-8 -*- from __future__ import unicode_literals import time from geopy.distance import great_circle from s2sphere import Cell, CellId, LatLng from pokemongo_bot import inventory from pokemongo_bot.base_task import BaseTask from pokemongo_bot.item_list import Item from pokemongo_bot.walkers.polyline_walker import PolylineWalker from pokemongo_bot.walkers.step_walker import StepWalker from pokemongo_bot.worker_result import WorkerResult class PokemonHunter(BaseTask): SUPPORTED_TASK_API_VERSION = 1 def __init__(self, bot, config): super(PokemonHunter, self).__init__(bot, config) def initialize(self): self.destination = None self.walker = None self.search_cell_id = None self.search_points = [] self.lost_counter = 0 self.no_log_until = 0 self.config_max_distance = self.config.get("max_distance", 2000) self.config_hunt_all = self.config.get("hunt_all", False) self.config_hunt_vip = self.config.get("hunt_vip", True) self.config_hunt_pokedex = self.config.get("hunt_pokedex", True) def work(self): if not self.enabled: return WorkerResult.SUCCESS if self.get_pokeball_count() <= 0: self.destination = None self.last_cell_id = None return WorkerResult.SUCCESS now = time.time() pokemons = self.get_nearby_pokemons() if self.destination is None: worth_pokemons = self.get_worth_pokemons(pokemons) if len(worth_pokemons) > 0: self.destination = worth_pokemons[0] self.lost_counter = 0 self.logger.info("New destination at %(distance).2f meters: %(name)s", self.destin
ation)
self.no_log_until = now + 60 if self.destination["s2_cell_id"] != self.search_cell_id: self.search_points = self.get_search_points(self.destination["s2_cell_id"]) self.walker = PolylineWalker(self.bot, self.search_points[0][0], self.search_points[0][1]) self.search_cell_id = self.destination["s2_cell_id"] self.search_points = self.search_points[1:] + self.search_points[:1] else: if self.no_log_until < now: self.logger.info("There is no nearby pokemon worth hunting down [%s]", ", ".join(p["name"] for p in pokemons)) self.no_log_until = now + 120 self.last_cell_id = None return WorkerResult.SUCCESS if any(self.destination["encounter_id"] == p["encounter_id"] for p in self.bot.cell["catchable_pokemons"] + self.bot.cell["wild_pokemons"]): self.destination = None elif self.walker.step(): if not any(self.destination["encounter_id"] == p["encounter_id"] for p in pokemons): self.lost_counter += 1 else: self.lost_counter = 0 if self.lost_counter >= 3: self.destination = None else: self.logger.info("Now searching for %(name)s", self.destination) self.walker = StepWalker(self.bot, self.search_points[0][0], self.search_points[0][1]) self.search_points = self.search_points[1:] + self.search_points[:1] elif self.no_log_until < now: distance = great_circle(self.bot.position, (self.walker.dest_lat, self.walker.dest_lng)).meters self.logger.info("Moving to destination at %s meters: %s", round(distance, 2), self.destination["name"]) self.no_log_until = now + 30 return WorkerResult.RUNNING def get_pokeball_count(self): return sum([inventory.items().get(ball.value).count for ball in [Item.ITEM_POKE_BALL, Item.ITEM_GREAT_BALL, Item.ITEM_ULTRA_BALL]]) def get_nearby_pokemons(self): radius = self.config_max_distance pokemons = [p for p in self.bot.cell["nearby_pokemons"] if self.get_distance(self.bot.start_position, p) <= radius] for pokemon in pokemons: pokemon["distance"] = self.get_distance(self.bot.position, p) pokemon["name"] = inventory.pokemons().name_for(pokemon["pokemon_id"]) pokemons.sort(key=lambda p: p["distance"]) return pokemons def get_worth_pokemons(self, pokemons): if self.config_hunt_all: worth_pokemons = pokemons else: worth_pokemons = [] if self.config_hunt_vip: worth_pokemons += [p for p in pokemons if p["name"] in self.bot.config.vips] if self.config_hunt_pokedex: worth_pokemons += [p for p in pokemons if (p not in worth_pokemons) and any(not inventory.pokedex().seen(fid) for fid in self.get_family_ids(p))] worth_pokemons.sort(key=lambda p: inventory.candies().get(p["pokemon_id"]).quantity) return worth_pokemons def get_family_ids(self, pokemon): family_id = inventory.pokemons().data_for(pokemon["pokemon_id"]).first_evolution_id ids = [family_id] ids += inventory.pokemons().data_for(family_id).next_evolutions_all[:] return ids def get_distance(self, location, pokemon): return great_circle(location, (pokemon["latitude"], pokemon["longitude"])).meters def get_search_points(self, cell_id): points = [] # For cell level 15 for c in Cell(CellId(cell_id)).subdivide(): for cc in c.subdivide(): latlng = LatLng.from_point(cc.get_center()) point = (latlng.lat().degrees, latlng.lng().degrees) points.append(point) points[0], points[1] = points[1], points[0] points[14], points[15] = points[15], points[14] point = points.pop(2) points.insert(7, point) point = points.pop(13) points.insert(8, point) closest = min(points, key=lambda p: great_circle(self.bot.position, p).meters) index = points.index(closest) return points[index:] + points[:index]
#!/usr/bin/python # -*- coding: utf-8 -*- """ This file is part of XBMC Mega Pack Addon. Copyright (C) 2014 Wolverine
(xbmcmegapack@gmail.com) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHAN
TABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html """ class Languages_Persian(): '''Class that manages this specific menu context.''' def open(self, plugin, menu): menu.add_xplugins(plugin.get_xplugins(dictionaries=["Channels", "Events", "Live", "Movies", "Sports", "TVShows"], languages=["Persian"]))
radius of the circular aperture in terms of the FWHM. cube_ref : array_like, 3d, optional Reference library cube. For Reference Star Differential Imaging. svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional Switch for different ways of computing the SVD and selected PCs. scaling : {'temp-mean', 'temp-standard'} or None, optional With None, no scaling is performed on the input data before SVD. With "temp-mean" then temporal px-wise mean subtraction is done and with "temp-standard" temporal mean centering plus scaling to unit variance is done. fmerit : {'sum', 'stddev'}, string optional Chooses the figure of merit to be used. stddev works better for close in companions sitting on top of speckle noise. collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional Sets the way of collapsing the frames for producing a final image. If None then the cube of residuals is used when measuring the function of merit (instead of a single final frame). p_ini : np.array Position (r, theta) of the circular aperture center. options: dict, optional The scipy.optimize.minimize options. verbose : boolean, optional If True, informations are displayed in the shell. Returns ------- out : scipy.optimize.minimize solution object The solution of the minimization algorithm. """ if verbose: print('') print('{} minimization is running...'.format(options.get('method','Nelder-Mead'))) if p_ini is None: p_ini = p solu = minimize(chisquare, p, args=(cube, angs, plsc, psf, fwhm, annulus_width, aperture_radius, p_ini, ncomp, cube_ref, svd_mode, scaling, fmerit, collapse), method = options.pop('method','Nelder-Mead'), options=options, **kwargs) if verbose: print(solu) return solu def firstguess(cube, angs, psfn, ncomp, plsc, planets_xy_coord, fwhm=4, annulus_width=3, aperture_radius=4, cube_ref=None, svd_mode='lapack', scaling=None, fmerit='sum', collapse='median', p_ini=None, f_range=None, simplex=True, simplex_options=None, display=False, verbose=True, save=False, figure_options=None): """ Determines a first guess for the position and the flux of a planet. We process the cube without injecting any negative fake companion. This leads to the visual detection of the planet(s). For each of them, one can estimate the (x,y) coordinates in pixel for the position of the star, as well as the planet(s). From the (x,y) coordinates in pixels for the star and planet(s), we can estimate a preliminary guess for the position and flux for each planet by using the method "firstguess_from_coord". The argument "f_range" allows to indicate prior limits for the flux (optional, default: None). This step can be reiterate to refine the preliminary guess for the flux. We can go a step further by using a Simplex Nelder_Mead minimization to estimate the first guess based on the preliminary guess. Parameters ---------- cube: numpy.array The cube of fits images expressed as a numpy.array. angs: numpy.array The parallactic angle fits image expressed as a numpy.array. psfn: numpy.array The centered and normalized (flux in a 1*FWHM aperture must equal 1) PSF 2d-array. ncomp: int The number of principal components. plsc: float The platescale, in arcsec per pixel. planet_xy_coord: array or list The list of (x,y) positions of the planets. fwhm : float, optional The FHWM in pixels. annulus_width: int, optional The width in terms of the FWHM of the annulus on which the PCA is done. aperture_radius: int, optional The radiu
s of the circular aperture in terms of the FWHM. cube_ref : array_like, 3d, optional Reference library cube. For Reference Star Differential Imaging. svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional Switch for different ways of computing the SVD and selected PCs. scaling : {'temp-mean', 'temp-standard'} or None, optional With None, no scaling is performed on the input data before SVD. With "temp-mean" then temporal px-wise mean subtractio
n is done and with "temp-standard" temporal mean centering plus scaling to unit variance is done. fmerit : {'sum', 'stddev'}, string optional Chooses the figure of merit to be used. stddev works better for close in companions sitting on top of speckle noise. collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional Sets the way of collapsing the frames for producing a final image. If None then the cube of residuals is used when measuring the function of merit (instead of a single final frame). p_ini: numpy.array Position (r, theta) of the circular aperture center. f_range: numpy.array, optional The range of flux tested values. If None, 20 values between 0 and 5000 are tested. simplex: boolean, optional If True, the Nelder-Mead minimization is performed after the flux grid search. simplex_options: dict, optional The scipy.optimize.minimize options. display: boolean, optional If True, the figure chi2 vs. flux is displayed. verbose: boolean If True, display intermediate info in the shell. save: boolean, optional If True, the figure chi2 vs. flux is saved. figure_options: dict, optional Additional parameters are passed to the matplotlib plot method. Returns ------- out : The radial coordinates and the flux of the companion. WARNING: POLAR ANGLE IS NOT THE CONVENTIONAL NORTH-TO-EAST P.A. """ if verbose: start_time = time_ini() if figure_options is None: figure_options = {'color':'gray', 'marker':'.', 'title':r'$\chi^2_{r}$ vs flux'} planets_xy_coord = np.array(planets_xy_coord) n_planet = planets_xy_coord.shape[0] center_xy_coord = np.array(frame_center(cube[0])) if f_range is None: f_range = np.linspace(0,5000,20) if simplex_options is None: simplex_options = {'xtol':1e-1, 'maxiter':500, 'maxfev':1000} r_0 = np.zeros(n_planet) theta_0 = np.zeros_like(r_0) f_0 = np.zeros_like(r_0) for index_planet in range(n_planet): if verbose: print('') print(sep) print(' Planet {} '.format(index_planet)) print(sep) print('') msg2 = 'Planet {}: flux estimation at the position [{},{}], running ...' print(msg2.format(index_planet,planets_xy_coord[index_planet,0], planets_xy_coord[index_planet,1])) res_init = firstguess_from_coord(planets_xy_coord[index_planet], center_xy_coord, cube, angs, plsc, psfn, fwhm, annulus_width, aperture_radius, ncomp, f_range=f_range, cube_ref=cube_ref, svd_mode=svd_mode, scaling=scaling, fmerit=fmerit, collapse=collapse, display=display, verbose=verbose, save=save, **figure_options) r_pre, theta_pre, f_pre = res_init if verbose: msg3 = 'Planet {}: preliminary guess: (r, theta, f)=({:.1f}, {:.1f},
import os.path import logging _logger = logging.getLogger(__name__) from operator import itemgetter from tornado.web import Application, RequestHandler, StaticFileHandler from tornado.ioloop import IOLoop config = { 'DEBUG': True, 'PORT' : 5000 } HANDLERS = [] ROOT_DIR = os.path.abspath(os.path.join(os.path.split(__file__)[0], os.path.pardir)) GFXTABLET_DIR
= os.path.join(ROOT_DIR, "node_modules", "gfxtablet") if os.path.exists(GFXTABLET_DIR): import sys sys.path.insert(0, GFXTABLET_DIR) from GfxTablet import GfxTabletHandler HANDLERS.append((r'/gfxtablet', GfxTabletHandler)) class MainHandler(RequestHandler): def get(self): self.render("index.html") def main(): global HANDLERS HANDLERS += [(r'/(.+)
', StaticFileHandler, {'path': ROOT_DIR}), (r'/', MainHandler)] app = Application(HANDLERS, debug=config.get('DEBUG', False), static_path=ROOT_DIR) _logger.info("app.settings:\n%s" % '\n'.join(['%s: %s' % (k, str(v)) for k, v in sorted(app.settings.items(), key=itemgetter(0))])) port = config.get('PORT', 5000) app.listen(port) _logger.info(""" listening on port %d press CTRL-c to terminate the server ----------- Y A W V R B ************************* ********************************* STARTING TORNADO APP!!!!!!!!!!!!! ********************************* ************************* Y A W V R B ----------- """ % port) IOLoop.instance().start() if __name__ == "__main__": logging.basicConfig(level=(logging.DEBUG if config.get('DEBUG') else logging.INFO), format="%(asctime)s: %(levelname)s %(name)s %(funcName)s %(lineno)d: %(message)s") main()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ############################################################################### # Author: Quincey Sun # Mail: zeroonegit@gmail.com # Created Time: 2016-06-21 23:14:26 ############################################################################### ## This programs asks a user for a name and a password. # It then checks them to make sure that the user is allowed in . # Note that this is a simple and insecure example, # real password code should never be imple
mented this way. name = input("What is your name? ") password = input("What is the password? ") if name == "Josh" and password == "Friday": print ("Welcome Josh") elif name == "Fred" and passw
ord == "Rock": print ("Welcome Fred") else: print ("I don't know you.")
import os from unittest import TestCase import mock from marvel.iterables import BaseIterable class FooIterable(BaseIterable): def __init__(self): self.total_pages = 20 super(FooIterable, self).__init__() def get_items(self): if self.total_pages == 0: raise StopIteration else: self.total_pages =
self.total_pages - 1 return [self.total_pages] class TestBaseIterable(TestCase): def test_limit_pages_not_defined(self): count = 0 for _ in FooIterable(): count = count + 1 assert count == 20 @mock.patch.dict(os.environ, {'TC_LIMIT_PAGES': '3'}) def test_limit_pages_with_3(self): count
= 0 for _ in FooIterable(): count = count + 1 assert count == 3
from mqttsqlite.orm.models import Topic import json from mqttsqlite.settings.private_settings import MANAGEMENT_PASSWORD, QUERY_PASSWORD from .utils import Payload, Utils class TopicsController (object): def add_topic(self, msg): received_data = json.loads(msg.payload) payload = Utils().validate_data(received_data, MANAGEMENT_PASSWORD, ['password', 'client']) if payload.result == 'OK': new_topic, created = Topic.get_or_create(name=str(received_data['topic'])) saved_topics = [] for topic in Topic.select(): saved_topics.append(topic.name) payload.topics = saved_topics return payload.get_json() def remove_topic(self, msg): received_data = json.loads(msg.payload) payload = Utils().validate_data(received_data, MANAGEMENT_PASSWORD, ['password', 'client']) if payload.result == 'OK': topic = Topic.select().where(Topic.name == str(received_data['topic'])) if topic.count() > 0: topic[0].delete_instance() else: payloa
d.result = 'KO' payload.error = 'Topic not found'
saved_topics = [] for topic in Topic.select(): saved_topics.append(topic.name) payload.topics = saved_topics return payload.get_json() def list_topics(self, msg): received_data = json.loads(msg.payload) payload = Utils().validate_data(received_data, QUERY_PASSWORD, ['password', 'client'], topic=False) if payload.result == 'OK': saved_topics = [] for topic in Topic.select(): saved_topics.append(topic.name) payload.topics = saved_topics return payload.get_json() def get_storaged_topics(self): return Topic.select() def is_topic_subscribed(self, topic): if Topic.select().where(Topic.name == topic).count(): return True else: return False
ist()) self.assertEqual([], index.get_shape())
p1 = array_ops.placeholder(dtypes.float32, shape=[None, None]) p2 = array_ops.placeholder(dtypes.float32, shape=[N
one, None]) m, index = control_flow_ops.merge([p1, p2]) self.assertEqual([None, None], m.get_shape().as_list()) self.assertEqual([], index.get_shape()) def testRefSelect(self): index = array_ops.placeholder(dtypes.int32) # All inputs unknown. p1 = array_ops.placeholder(dtypes.float32) p2 = array_ops.placeholder(dtypes.float32) p3 = array_ops.placeholder(dtypes.float32) v1 = variables.Variable(p1, validate_shape=False) v2 = variables.Variable(p2, validate_shape=False) v3 = variables.Variable(p3, validate_shape=False) self.assertIs(None, v1.get_shape().ndims) s = control_flow_ops.ref_select(index, [v1, v2, v3]) self.assertIs(None, s.get_shape().ndims) # All inputs known but different. v1 = variables.Variable([[1, 2]]) v2 = variables.Variable([[2], [1]]) s = control_flow_ops.ref_select(index, [v1, v2]) self.assertIs(None, s.get_shape().ndims) # All inputs known and same. v1 = variables.Variable([[1, 2]]) v2 = variables.Variable([[1, 2]]) s = control_flow_ops.ref_select(index, [v1, v2]) self.assertEqual([1, 2], s.get_shape()) # Possibly the same but not guaranteed. v1 = variables.Variable([[1., 2.]]) p2 = array_ops.placeholder(dtypes.float32, shape=[None, 2]) v2 = variables.Variable(p2, validate_shape=False) s = control_flow_ops.ref_select(index, [v1, v2]) self.assertEqual(None, s.get_shape()) def testRunLoopTensor(self): with self.test_session() as sess: tensor_list = [] def condition(t): return t < constant_op.constant(5) def body(_): tensor_list.append(constant_op.constant(5)) return constant_op.constant(10) result = control_flow_ops.while_loop(condition, body, [constant_op.constant(4)]) self.assertEqual(10, sess.run(result)) # Ensure that we cannot run a tensor that escapes the loop body # accidentally. with self.assertRaises(ValueError): sess.run(tensor_list[0]) def testWhilePyFuncBasic(self): def func(x): return np.square(x) with self.test_session(): r = control_flow_ops.while_loop( lambda i, v: i < 4, lambda i, v: [i + 1, script_ops.py_func(func, [v], [dtypes.float32])[0]], [constant_op.constant(0), constant_op.constant(2.0, dtypes.float32)], [tensor_shape.unknown_shape(), tensor_shape.unknown_shape()]) self.assertEqual(r[1].eval(), 65536.0) def testWhileFuncBasic(self): @function.Defun(dtypes.float32) def func(x): return math_ops.square(math_ops.square(x)) with self.test_session(): x = constant_op.constant(2.0, dtypes.float32) r = control_flow_ops.while_loop( lambda i, v: i < 2, lambda i, v: [i + 1, func(v)], [constant_op.constant(0), x], [tensor_shape.unknown_shape(), tensor_shape.unknown_shape()]) self.assertEqual(r[1].eval(), 65536.0) r = gradients_impl.gradients(r, x)[0] self.assertEqual(r.eval(), 524288.0) self.assertEqual( len([op for op in x.graph.get_operations() if op.type == "StackV2"]), 1) class ControlFlowContextCheckTest(test.TestCase): def _getWhileTensor(self): """Creates and returns a tensor from a while context.""" tensor = [] def body(i): if not tensor: tensor.append(constant_op.constant(1)) return i + tensor[0] control_flow_ops.while_loop(lambda i: i < 10, body, [0]) return tensor[0] def _getCondTensor(self): cond_tensor = [] def true_fn(): if not cond_tensor: cond_tensor.append(constant_op.constant(1)) return cond_tensor[0] control_flow_ops.cond( math_ops.less(1, 2), true_fn, lambda: constant_op.constant(0)) return cond_tensor[0] def testInvalidContext(self): # Accessing a while loop tensor outside of control flow is illegal. while_tensor = self._getWhileTensor() with self.assertRaisesRegexp( ValueError, "Cannot use 'while/Const_1' as input to 'Add' because 'while/Const_1' " "is in a while loop. See info log for more details."): math_ops.add(1, while_tensor) def testInvalidContextInCond(self): # Accessing a while loop tensor in cond is illegal. while_tensor = self._getWhileTensor() with self.assertRaisesRegexp( ValueError, "Cannot use 'while/Const_1' as input to 'cond/Add' because " "'while/Const_1' is in a while loop. See info log for more details."): # TODO(skyewm): this passes if we return while_tensor directly instead # of using it as input to another op. control_flow_ops.cond( math_ops.less(1, 2), lambda: math_ops.add(1, while_tensor), lambda: constant_op.constant(0)) def testInvalidContextInWhile(self): # Accessing a while loop tensor in a different while loop is illegal. while_tensor = self._getWhileTensor() with self.assertRaisesRegexp( ValueError, "Cannot use 'while_1/Add' as input to 'while/Const_1' because they are " "in different while loops. See info log for more details."): control_flow_ops.while_loop(lambda i: i < 10, lambda x: math_ops.add(1, while_tensor), [0]) with self.assertRaisesRegexp( ValueError, "Cannot use 'while_2/NextIteration' as input to 'while/Const_1' " "because they are in different while loops. See info log for more " "details."): control_flow_ops.while_loop(lambda i: i < 10, lambda i: while_tensor, [0]) def testValidCondContext(self): # Accessing a tensor from a cond context is OK (although dangerous). cond_tensor = self._getCondTensor() math_ops.add(1, cond_tensor) def testValidCondContextBranches(self): # Accessing a tensor from a cond context from the other branch's cond # context is OK (although dangerous). cond_tensor = [] def branch_fn(): if not cond_tensor: cond_tensor.append(constant_op.constant(1)) return cond_tensor[0] control_flow_ops.cond(math_ops.less(1, 2), branch_fn, branch_fn) def testValidWhileContext(self): # Accessing a tensor in a nested while is OK. def body(_): c = constant_op.constant(1) return control_flow_ops.while_loop(lambda i: i < 3, lambda i: i + c, [0]) control_flow_ops.while_loop(lambda i: i < 5, body, [0]) def testValidNestedContexts(self): # Accessing a tensor from a cond context in a while context, all inside an # outer while context, is OK. def body(_): cond_tensor = self._getCondTensor() # Create another cond containing the while loop for good measure return control_flow_ops.cond( math_ops.less(1, 2), lambda: control_flow_ops.while_loop(lambda i: i < 3, lambda i: i + cond_tensor, [0]), lambda: constant_op.constant(0)) control_flow_ops.while_loop(lambda i: i < 5, body, [0]) def testInvalidNestedContexts(self): # Accessing a tensor from a while context in a different while context, all # inside a cond context, is illegal. def true_fn(): while_tensor = self._getWhileTensor() return control_flow_ops.while_loop(lambda i: i < 3, lambda i: i + while_tensor, [0]) with self.assertRaisesRegexp( ValueError, "Cannot use 'cond/while_1/add' as input to 'cond/while/Const_1' because" " they are in different while loops. See info log for more details."): control_flow_ops.cond( math_ops.less(1, 2), true_fn, lambda: constant_op.constant(0)) class TupleTest(test.TestCase): def testTensors(self): for v1_first in [True, False]: with self.test_session(): v1 = variables.Variable([1.0]) add1 = math_ops.add( control_flow_ops.with_dependencies([v1.initializer], v1._re
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class JobGetOptions(Model): """Additional parameters for get operation. :param select: An OData $select clause. :type select: str :param expand: An OData $expand clause. :type expand: str :param timeout: The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. Default value: 30 . :type timeout: int :param client_request_id: The caller-generated request ident
ity, in the form of a GUID with no decoration such as curly braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0. :type client_request_id: str :param return_client_request_id: Whether the server should return the client-request-id in the response. Default value: False . :type return_client_request_id: bool :param ocp_date: The time the request was issued. Client libraries typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly. :type ocp_date: datetime :param if_match: An ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service exactly matches the value specified by the client. :type if_match: str :param if_none_match: An ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service does not match the value specified by the client. :type if_none_match: str :param if_modified_since: A timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has been modified since the specified time. :type if_modified_since: datetime :param if_unmodified_since: A timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has not been modified since the specified time. :type if_unmodified_since: datetime """ def __init__(self, select=None, expand=None, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None): self.select = select self.expand = expand self.timeout = timeout self.client_request_id = client_request_id self.return_client_request_id = return_client_request_id self.ocp_date = ocp_date self.if_match = if_match self.if_none_match = if_none_match self.if_modified_since = if_modified_since self.if_unmodified_since = if_unmodified_since
" been successfully aligned.", default=False) parser.add_option("--rootOutgroupDists", dest="rootOutgroupDists", help="root outgroup distance (--rootOutgroupPaths must " + "be given as well)", default=None) parser.add_option("--rootOutgroupPaths", dest="rootOutgroupPaths", type=str, help="root outgroup path (--rootOutgroup must be given " + "as well)", default=None) parser.add_option("--root", dest="root", help="Name of ancestral node (which" " must appear in NEWICK tree in <seqfile>) to use as a " "root for the alignment. Any genomes not below this node " "in the tree may be used as outgroups but will never appear" " in the output. If no root is specifed then the root" " of the tree is used. ", default=None) #Kyoto Tycoon Options ktGroup = OptionGroup(parser, "kyoto_tycoon Options", "Kyoto tycoon provides a client/server framework " "for large in-memory hash tables and is available " "via the --database option.") ktGroup.add_option("--ktPort", dest="ktPort", help="starting port (lower bound of range) of ktservers" " [default: %default]", default=1978) ktGroup.add_option("--ktHost", dest="ktHost", help="The hostname to use for connections to the " "ktserver (this just specifies where nodes will attempt" " to find the server, *not* where the ktserver will be" " run)", default=None) ktGroup.add_option("--ktType", dest="ktType", help="Kyoto Tycoon server type " "(memory, snapshot, or disk)" " [default: %default]", default='memory') # sonlib doesn't allow for spaces in attributes in the db conf # which renders this options useless #ktGroup.add_option("--ktOpts", dest="ktOpts", # help="Command line ktserver options", # default=None) ktGroup.add_option("--ktCreateTuning", dest="ktCreateTuning", help="ktserver options when creating db "\ "(ex #bnum=30m#msiz=50g)", default=None) ktGroup.add_option("--ktOpenTuning", dest="ktOpenTuning", help="ktserver options when opening existing db "\ "(ex #opts=ls#ktopts=p)", default=None) parser.add_option_group(ktGroup) return parser # Try to weed out errors early by checking options and paths def validateInput(workDir, outputHalFile, options): try: if workDir.find(' ') >= 0: raise RuntimeError("Cactus does not support spaces in pathnames: %s" % workDir) if not os.path.isdir(workDir): os.makedirs(workDir) if not os.path.isdir(workDir) or not os.access(workDir, os.W_OK): raise except: raise RuntimeError("Can't write to workDir: %s" % workDir) try: open(outputHalFile, "w") except: raise RuntimeError("Unable to write to hal: %s" % outputHalFile) if options.database != "tokyo_cabinet" and\ options.database != "kyoto_tycoon": raise RuntimeError("Invalid database type: %s" % options.database) if options.outputMaf is not None: try: open(options.outputMaf, "w") except: raise RuntimeError("Unable to write to maf: %s" % options.outputMaf) if options.configFile is not None: try: ConfigWrapper(ET.parse(options.configFile).getroot()) except: raise RuntimeError("Unable to read config: %s" % options.configFile) if options.database == 'kyoto_tycoon': if options.ktType.lower() != 'memory' and\ options.ktType.lower() != 'snapshot' and\ options.ktType.lower() != 'disk': raise RuntimeError("Invalid ktserver type specified: %s. Must be " "memory, snapshot or disk" % options.ktType) # Convert the jobTree options taken in by the parser back # out to command line options to pass to progressive cactus def getJobTreeCommands(jtPath, parser, options): cmds = "--jobTree %s" % jtPath for optGroup in parser.option_groups: if optGroup.title.startswith("jobTree") or optGroup.title.startswith("Jobtree"): for opt in optGroup.option_list: if hasattr(options, opt.dest) and \ getattr(options, opt.dest) != optGroup.defaults[opt.dest]: cmds += " %s" % str(opt) if opt.nargs > 0: cmds += " \"%s\"" % getattr(options, opt.dest) return cmds # Go through a text file and add every word inside to an arguments list # which will be prepended to sys.argv. This way both the file and # command line are passed to the option parser, with the command line # getting priority. def parseOptionsFile(path): if not os.path.isfile(path): raise RuntimeError("Options File not found: %s" % path) args = [] optFile = open(path, "r") for l in optFile: line = l.rstrip() if line: args += shlex.split(line) # This source file should always be in progressiveCactus/src. So # we return the path to progressiveCactus/environment, which needs # to be sourced before doing anything. def getEnvFileP
ath(): path = os.path.dirname(sys.argv[0]) envFile = os.path.join(path, '..', 'environment') assert os.path.isfile(envFile) return envFile # If specified with the risky --autoAbortOnDeadlock option, we call this to # force an abort if
the jobStatusMonitor thinks it's hopeless. # We delete the jobTreePath to get rid of kyoto tycoons. def abortFunction(jtPath, options): def afClosure(): sys.stderr.write('\nAborting due to deadlock (prevent with' + '--noAutoAbort' + ' option), and running rm -rf %s\n\n' % jtPath) system('rm -rf %s' % jtPath) sys.exit(-1) if options.autoAbortOnDeadlock: return afClosure else: return None # Run cactus progressive on the project that has been created in workDir. # Any jobtree options are passed along. Should probably look at redirecting # stdout/stderr in the future. def runCactus(workDir, jtCommands, jtPath, options): envFile = getEnvFilePath() pjPath = os.path.join(workDir, ProjectWrapper.alignmentDirName, '%s_project.xml' % ProjectWrapper.alignmentDirName) logFile = os.path.join(workDir, 'cactus.log') if options.overwrite: overwriteFlag = '--overwrite' system("rm -f %s" % logFile) else: overwriteFlag = '' logHandle = open(logFile, "a") logHandle.write("\n%s: Beginning Progressive Cactus Alignment\n\n" % str( datetime.datetime.now())) logHandle.close() cmd = '. %s && cactus_progressive.py %s %s %s >> %s 2>&1' % (envFile, jtCommands, pjPath, overwriteFlag, logFile) jtMonitor = JobStatusMonitor(jtPath, pjPath, logFile, deadlockCallbackFn=abortFunction(jtPath, options)) if options.database == "kyoto_tycoon": jtMonitor.daemon = True jtMonitor.start() system(cmd) logHandle = open(logFile, "a") logHandle.write("\n%s: Finished Progressive Cactus Alignment\n" % str( datetime.datetime.now())) logHan
# profiling_late.py # # Copyright (C) 2015 Kano Computing Ltd. # License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2 # # ''' Module to enable profiling timepoints. This module is loaded only if the configuration file exists, see profiling.py for more information ''' import os import sys import yaml import cProfile from kano.logging import logger from kano.profiling import CONF_FILE # load the configuration file with open(CONF_FILE, 'r') as inp_conf: conf = yaml.load(inp_conf) myProfile = cProfile.Profile() app_name = sys.argv[0] point_current = "" def has_key(d, k): return type(d) is dict and k in d def declare_timepoint(name, isStart): global myProfile global point_current cmd = None pythonProfile = False # Check if the app is contained in the profiling conf file if has_key(conf, app_name): # Check if the timepoint name is contained in the profiling conf file if has_key(conf[app_name], name): ct = conf[app_name][name] # Check if python profiler should be started for this timepoint if has_key(ct, 'python'): pythonProfile = True if isStart: if point_current: logger.error('Stop profiling for point "{0}" and do "{1}" instead'.format(point_current, name)) myProfile.disable() myProfile.clear() point_current = name myProfile.enable() else: if point_current != name: logger.error('Can\'t stop point "{0}" since a profiling session for "{1}" is being run'.format(name, point_current)) else: myProfile.disable() # Check if the statfile location in specified if ct['python']['statfile']: try: myProfile.dump_stats(ct['python']['statfile']) except IOError as e: if e.errno == 2:
logger.error('Path to "{}" probably does not exist
'.format(ct['python']['statfile'])) else: logger.error('dump_stats IOError: errno:{0}: {1} '.format(e.errno, e.strerror)) else: logger.error('No statfile entry in profiling conf file "{}"'.format(CONF_FILE)) myProfile.clear() point_current = "" else: logger.info('Profiling conf file doesnt enable the Python profiler for point {} at app {}'.format(name, app_name)) # Check if we want to run some other command at this timepoint if isStart and has_key(ct, 'start_exec'): cmd = ct['start_exec'] os.system(cmd) if not isStart and has_key(ct, 'end_exec'): cmd = ct['end_exec'] os.system(cmd) else: logger.info('Profiling conf file doesnt include point:{} for app {}'.format(name, app_name)) else: logger.info('Profiling conf file doesnt include app:{}'.format(app_name)) logger.debug('timepoint '+name, transition=name, isStart=isStart, cmd=cmd, pythonProfile=pythonProfile)
#!/usr/bin/env python # encoding: utf-8 import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name="cubehelix",
vers
ion="0.1.0", author="James Davenport", # author_email="", description="Cubehelix colormaps for matplotlib", long_description=read('README.md'), # license="BSD", py_modules=['cubehelix'], classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Scientific/Engineering :: Visualization", # "License :: OSI Approved :: BSD License", ] )
alizer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def create_or_update( self, resource_group_name: str, workspace_name: str, sql_pool_name: str, data_masking_rule_name: str, parameters: "_models.DataMaskingRule", **kwargs: Any ) -> "_models.DataMaskingRule": """Creates or up
dates a Sql pool data masking rule. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param sql_pool_name: SQL pool name. :type sql_pool_name: str :param data_masking_rule_name: T
he name of the data masking rule. :type data_masking_rule_name: str :param parameters: The required parameters for creating or updating a data masking rule. :type parameters: ~azure.mgmt.synapse.models.DataMaskingRule :keyword callable cls: A custom type or function that will be passed the direct response :return: DataMaskingRule, or the result of cls(response) :rtype: ~azure.mgmt.synapse.models.DataMaskingRule :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DataMaskingRule"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] _json = self._serialize.body(parameters, 'DataMaskingRule') request = build_create_or_update_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, sql_pool_name=sql_pool_name, data_masking_rule_name=data_masking_rule_name, content_type=content_type, json=_json, template_url=self.create_or_update.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('DataMaskingRule', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('DataMaskingRule', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/dataMaskingPolicies/{dataMaskingPolicyName}/rules/{dataMaskingRuleName}'} # type: ignore @distributed_trace_async async def get( self, resource_group_name: str, workspace_name: str, sql_pool_name: str, data_masking_rule_name: str, **kwargs: Any ) -> "_models.DataMaskingRule": """Gets the specific Sql pool data masking rule. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param sql_pool_name: SQL pool name. :type sql_pool_name: str :param data_masking_rule_name: The name of the data masking rule. :type data_masking_rule_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DataMaskingRule, or the result of cls(response) :rtype: ~azure.mgmt.synapse.models.DataMaskingRule :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DataMaskingRule"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) request = build_get_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, sql_pool_name=sql_pool_name, data_masking_rule_name=data_masking_rule_name, template_url=self.get.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('DataMaskingRule', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/dataMaskingPolicies/{dataMaskingPolicyName}/rules/{dataMaskingRuleName}'} # type: ignore @distributed_trace def list_by_sql_pool( self, resource_group_name: str, workspace_name: str, sql_pool_name: str, **kwargs: Any ) -> AsyncIterable["_models.DataMaskingRuleListResult"]: """Gets a list of Sql pool data masking rules. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: The name of the workspace. :type workspace_name: str :param sql_pool_name: SQL pool name. :type sql_pool_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataMaskingRuleListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.synapse.models.DataMaskingRuleListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DataMaskingRuleListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) def prepare_request(next_link=None): if not next_link: request = build_list_by_sql_pool_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, sql_pool_name=sql_pool_name, template_url=self.list_by_sql_pool.metadata['url'], ) request = _convert_request(request) request.url = self._cl
import os import socket import sys input_host = '127.0.0.1' input_port = 65000 batch_enabled = int(os.environ.get('_BACKEND_BATCH_MODE', '0')) if batch_enabled: # Since latest Python 2 has `builtins`and `input`, # we cannot detect Python 2 with the existence of them. if sys.version_info.major > 2: import builtins def _input(prompt=''): sys.stdout.write(prompt) sys.stdout.flush() with socket.socket(socket.AF_INET, socket.SOCK_STREAM
) as sock: try: sock.connect((input_host, input_port)) userdata = sock.recv(1024) except ConnectionRefusedError: userdata = b'<user-input-unavailable>' return userdata.decode() builtins._input = input # type: ignore builtins.input = _input else: # __builtins__ is an alias dict for __builtin__ in
modules other than __main__. # Thus, we have to explicitly import __builtin__ module in Python 2. import __builtin__ builtins = __builtin__ def _raw_input(prompt=''): sys.stdout.write(prompt) sys.stdout.flush() try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((input_host, input_port)) userdata = sock.recv(1024) except socket.error: userdata = b'<user-input-unavailable>' finally: sock.close() return userdata.decode() builtins._raw_input = builtins.raw_input # type: ignore builtins.raw_input = _raw_input # type: ignore
from collections import OrderedDict from django.contrib import admin from edc_export.actions import export_as_csv_action from edc_base.modeladmin.admin import BaseTabularInline from ..forms import MaternalArvPostForm, MaternalArvPostMedForm, MaternalArvPostAdhForm from ..models import MaternalVisit, MaternalArvPost, MaternalArvPostMed, MaternalArvPostAdh from .base_maternal_model_admin import BaseMaternalModelAdmin class MaternalArvPostModInlineAdmin(BaseTabularInline): model = MaternalArvPostMed form = MaternalArvPostMedForm extra = 1 class MaternalArvPostModAdmin(BaseMaternalModelAdmin): form = MaternalArvPostMedForm list_display = ('maternal_arv_post', 'arv_code', 'dose_status', 'modification_date', 'modification_code') radio_fields = { "arv_code": admin.VERTICAL, "dose_status": admin.VERTICAL, "modification_code": admin.VERTICAL, } actions = [ export_as_csv_action( description="CSV Export
of Maternal ARV Post with list", fields=[], delimiter=',', exclude=['created', 'modified', 'user_created', 'user_modified', 'revision', 'id', 'hostname_created', 'hostname_modified'], extra_fields=OrderedDict( {'subject_identifier': 'm
aternal_arv_post__maternal_visit__appointment__registered_subject__subject_identifier', 'gender': 'maternal_arv_post__maternal_visit__appointment__registered_subject__gender', 'dob': 'maternal_arv_post__maternal_visit__appointment__registered_subject__dob', 'on_arv_since': 'maternal_arv_post__on_arv_since', 'on_arv_reason': 'maternal_arv_post__on_arv_reason', 'on_arv_reason_other': 'maternal_arv_post__on_arv_reason_other', 'arv_status': 'maternal_arv_post__arv_status', }), )] admin.site.register(MaternalArvPostMed, MaternalArvPostModAdmin) class MaternalArvPostAdmin(BaseMaternalModelAdmin): form = MaternalArvPostForm fields = ( "maternal_visit", "on_arv_since", "on_arv_reason", "on_arv_reason_other", "arv_status") radio_fields = { "on_arv_since": admin.VERTICAL, "on_arv_reason": admin.VERTICAL, "arv_status": admin.VERTICAL} inlines = [MaternalArvPostModInlineAdmin, ] actions = [ export_as_csv_action( description="CSV Export of Maternal ARV Post", fields=[], delimiter=',', exclude=['created', 'modified', 'user_created', 'user_modified', 'revision', 'id', 'hostname_created', 'hostname_modified'], extra_fields=OrderedDict( {'subject_identifier': 'maternal_visit__appointment__registered_subject__subject_identifier', 'gender': 'maternal_visit__appointment__registered_subject__gender', 'dob': 'maternal_visit__appointment__registered_subject__dob', }), )] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "maternal_visit": if request.GET.get('maternal_visit'): kwargs["queryset"] = MaternalVisit.objects.filter(id=request.GET.get('maternal_visit')) return super(MaternalArvPostAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) admin.site.register(MaternalArvPost, MaternalArvPostAdmin) class MaternalArvPostAdhAdmin(BaseMaternalModelAdmin): form = MaternalArvPostAdhForm fields = ( "maternal_visit", "missed_doses", "missed_days", "missed_days_discnt", "comment") actions = [ export_as_csv_action( description="CSV Export of Maternal ARVs Post: Adherence", fields=[], delimiter=',', exclude=['created', 'modified', 'user_created', 'user_modified', 'revision', 'id', 'hostname_created', 'hostname_modified'], extra_fields=OrderedDict( {'subject_identifier': 'maternal_visit__appointment__registered_subject__subject_identifier', 'gender': 'maternal_visit__appointment__registered_subject__gender', 'dob': 'maternal_visit__appointment__registered_subject__dob', 'registered': 'maternal_visit__appointment__registered_subject__registration_datetime'}), )] admin.site.register(MaternalArvPostAdh, MaternalArvPostAdhAdmin)
#!/usr/bin/env python ######################
################################################## # File : dir
ac-admin-sync-users-from-file # Author : Adrian Casajus ######################################################################## """ Sync users in Configuration with the cfg contents. Usage: dirac-admin-sync-users-from-file [options] ... UserCfg Arguments: UserCfg: Cfg FileName with Users as sections containing DN, Groups, and other properties as options Example: $ dirac-admin-sync-users-from-file file_users.cfg """ from __future__ import print_function from __future__ import absolute_import from __future__ import division from diraccfg import CFG import DIRAC from DIRAC.Core.Base import Script from DIRAC.Core.Utilities.DIRACScript import DIRACScript __RCSID__ = "$Id$" @DIRACScript() def main(): Script.registerSwitch("t", "test", "Only test. Don't commit changes") Script.parseCommandLine(ignoreErrors=True) args = Script.getExtraCLICFGFiles() if len(args) < 1: Script.showHelp() from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin diracAdmin = DiracAdmin() exitCode = 0 testOnly = False errorList = [] for unprocSw in Script.getUnprocessedSwitches(): if unprocSw[0] in ("t", "test"): testOnly = True try: usersCFG = CFG().loadFromFile(args[0]) except Exception as e: errorList.append("file open", "Can't parse file %s: %s" % (args[0], str(e))) errorCode = 1 else: if not diracAdmin.csSyncUsersWithCFG(usersCFG): errorList.append(("modify users", "Cannot sync with %s" % args[0])) exitCode = 255 if not exitCode and not testOnly: result = diracAdmin.csCommitChanges() if not result['OK']: errorList.append(("commit", result['Message'])) exitCode = 255 for error in errorList: print("ERROR %s: %s" % error) DIRAC.exit(exitCode) if __name__ == "__main__": main()
# -*- coding: utf-8 -*- from hangulize import * class Finnish(Language): """For transcribing Finnish.""" __iso639__ = {1: 'fi', 2: 'fin', 3: 'fin'} __tmp__ = ',;%' vowels = 'aAeioOuy' ob = 'bdfgkpstT' notation = Notation([ # Convention: A = ä, O = ö ('å', 'o'), ('ä', 'A'), ('ö', 'O'), ('w', 'v'), ('xx', 'x'), ('x', 'ks'), ('z', 's'), ('ds', 'T'), ('ts', 'T'), ('c{e|i|y}', 's'), ('c', 'k'), ('q', 'k'), ('ng', 'N'), ('nk', 'Nk'), ('mn{@}', 'm,n'), ('mn', 'm'), ('th', 't'), ('^j{@}', 'J'), ('{@}j{@}', 'J'), ('{h|s|T}j', '%J'), ('j', 'i'), ('aa', 'a'), ('bb', 'b'), ('dd', 'd'), ('ee', 'e'), ('AA', 'A'), ('ff', 'f'), ('gg', 'g'), ('hh', 'h'), ('ii', 'i'), ('jj', 'j'), ('kk', 'k'), ('ll', 'l'), ('{@}mm{@}', 'm,m'), ('mm', 'm'), ('{@}nn{@}', 'n,n'), ('nn', 'n'), ('oo', 'o'), ('pp', 'p'), ('rr', 'r'), ('ss', 's'), ('tt', 't'), ('uu', 'u'), ('vv', 'v'), ('yy', 'y'), ('zz', 'z'), ('{@}b{<ob>}', 'p,'), ('{@}g{<ob>}', 'k,'), ('{@}k{<ob>}', 'k,'), ('{@}p{<ob>}', 'p,'), ('{@}t{<ob>}', 't,'), ('^l', 'l;'), ('^m', 'm;'), ('^n', 'n;'), ('l$', 'l,'), ('m$', 'm,'), ('n$', 'n,'), ('l{@|m,|n,|N}', 'l;'), ('{,}l', 'l;'), ('m{@}', 'm;'), ('n{@}', 'n;'), ('l', 'l,'), ('m', 'm,'
), ('n', 'n,'), ('N', 'N,'), (',,', ','), (',;', None), (',l,', 'l,'), (',m,', 'm,'), (',n,', 'n,'), (',N,', 'N,'), ('l{m;|n;}', 'l,'), (';', None), ('b', Choseong(B)), ('d', Choseong(D)), ('f', Choseong(P)), ('g', Choseong(G)), ('h', Choseong(H)), ('k,', Jongseong(G)), ('k', Choseong(K)), ('^l', Choseong(L)),
('{,|-}l', Choseong(L)), ('-', None), ('l,', Jongseong(L)), ('l', Jongseong(L), Choseong(L)), ('m,', Jongseong(M)), ('m', Choseong(M)), ('n,', Jongseong(N)), ('n', Choseong(N)), ('N', Jongseong(NG)), ('p,', Jongseong(B)), ('p', Choseong(P)), ('r', Choseong(L)), ('s', Choseong(S)), ('t,', Jongseong(S)), ('t', Choseong(T)), ('T', Choseong(C)), ('v', Choseong(B)), ('%', Choseong(NG)), ('Ja', Jungseong(YA)), ('JA', Jungseong(YAE)), ('Je', Jungseong(YE)), ('Ji', Jungseong(I)), ('Jo', Jungseong(YO)), ('JO', Jungseong(OE)), ('Ju', Jungseong(YU)), ('Jy', Jungseong(WI)), ('a', Jungseong(A)), ('A', Jungseong(AE)), ('e', Jungseong(E)), ('i', Jungseong(I)), ('o', Jungseong(O)), ('u', Jungseong(U)), ('y', Jungseong(WI)), ('O', Jungseong(OE)), ]) def normalize(self, string): return normalize_roman(string, { 'Å': 'å', 'Ǻ': 'å', 'ǻ': 'å', 'Ä': 'ä', 'Ö': 'ö' }) __lang__ = Finnish
#!/usr/bin/python import os.path import subprocess import sys import urllib KEY_FILE = "submit.token" def main(filename): # Prompt for key if missing if not os.path.exists(KEY_FILE): print "Please visit http://css.csail.mit.edu/6.858/2014/labs/handin.html" print "and enter your API key." key = raw_input(
"Key: ").strip() wi
th open(KEY_FILE, "w") as f: f.write(key + "\n") print "API key written to %s" % KEY_FILE # Read the key. with open(KEY_FILE) as f: key = f.read().strip() # Shell out to curl. urllib2 doesn't deal with multipart attachments. Throw # away the output; you just get a random HTML page. with open("/dev/null", "a") as null: subprocess.check_call(["curl", "-f", "-F", "file=@%s" % filename, "-F", "key=%s" % key, "http://6858.scripts.mit.edu/submit/handin.py/upload"], stdout=null, stderr=null) print "Submitted %s." % filename print "Please visit http://css.csail.mit.edu/6.858/2014/labs/handin.html" print "to verify the upload." if __name__ == "__main__": if len(sys.argv) < 2: print "Usage: %s TARBALL" % sys.argv[0] sys.exit(1) main(sys.argv[1])
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors # # This file is part of Alignak. # # Alignak is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Alignak is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Alignak. If not, see <http://www.gnu.org/licenses/>. # # # This file incorporates work covered by the following copyright and # permission notice: # # Copyright (C) 2009-2014: # Jean Gabes, naparuba@gmail.com # Sebastien Coavoux, s.coavoux@free.fr # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it wil
l be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # # This file is used to test reading and processing of config files # from alignak_test import * class TestServiceDescriptionInheritance(AlignakTest): def setUp(self): self.setup_with_file('etc/alignak_service_description_inheritance.cfg') def test_service_description_inheritance(self): self.print_header() svc = self.sched.services.find_srv_by_name_and_hostname("MYHOST", "SSH") self.assertIsNotNone(svc) def test_service_description_inheritance_multihosts(self): self.print_header() for hname in ["MYHOST2", "MYHOST3"]: svc = self.sched.services.find_srv_by_name_and_hostname(hname, "SSH") self.assertIsNotNone(svc) if __name__ == '__main__': unittest.main()
"""Misc helper
functions for extracting morphological info from CLTK data structures. """ from typing import List, Optional, Tuple, Union from cltk.core.data_types import Word from cltk.core.exceptions import CLTKException from cltk.morphology.universal_dependencies_features import ( NOMINAL_FEA
TURES, VERBAL_FEATURES, MorphosyntacticFeature, ) ALL_POSSIBLE_FEATURES = NOMINAL_FEATURES + VERBAL_FEATURES def get_pos(word: Optional[Word]) -> Union[str, None]: """Take word, return structured info.""" if not word: return None return word.pos.name def get_features( word: Optional[Word], prepend_to_label: str = None, ) -> Tuple[List[str], List[Union[str, int, float, None]]]: """Take a word, return a list of feature labels.""" features_present = list() # type: List[Union[str, None]] feature_variables = list() # type: List[str] for possible_feature in ALL_POSSIBLE_FEATURES: feature_variables.append(str(possible_feature).lower()) if not word: features_present.append(None) continue try: feat = word.__getattr__(possible_feature)[0] # type: MorphosyntacticFeature features_present.append(str(feat.name)) except CLTKException: features_present.append(None) if prepend_to_label: feature_variables = [prepend_to_label + name for name in feature_variables] return feature_variables, features_present
"""A client for the REST API of imeji instances.""" import logging from collections import OrderedDict import requests from six import string_types from pyimeji import resource from pyimeji.config import Config log = logging.getLogger(__name__) class ImejiError(Exception): def __init__(self, message, error): super(ImejiError, self).__init__(message) self.error = error.get('error') if isinstance(error, dict) else error class GET(object): """Handle GET requests. This includes requests - to retrieve single objects, - to fetch lists of object references (which are returned as `OrderedDict` mapping object `id` to additional metadata present in the response). """ def __init__(self, api, name): """Initialize a handler. :param api: An Imeji API instance. :param name: Name specifying the kind of object(s) to retrieve. We check whether\ this name has a plural "s" to determine if a list is to be retrieved. """ self._list = name.endswith('s') self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize()) self.api = api self.name = name self.path = name if not self._list: self.path += 's' def __call__(self, id='', **kw): """Calling the handler initiates an HTTP request to the imeji server. :param id: If a single object is to be retrieved it must be specified by id. :return: An OrderedDict mapping id to additional metadata for lists, a \ :py:class:`pyimeji.resource.Resource` instance for single objects. """ if not self._list and not id: raise ValueError('no id given') if id: id = '/' + id res = self.api._req('/%s%s' % (self.path, id), params=kw) if not self._list: return self.rsc(res, self.api) return OrderedDict([(d['id'], d) for d in res]) class Imeji(object): """The client. >>> api = Imeji(service_url='http://demo.imeji.org/imeji/') >>> collection_id = list(api.collections().keys())[0] >>> collection = api.collection(collection_id) >>> collection = api.create('collection', title='the new collection') >>> item = collection.add_item(fetchUrl='http://example.org') >>> item.delete() """ def __init__(self, cfg=None, service_url=None): self.cfg = cfg or Config() self.service_url = service_url or self.cfg.get('service', 'url') user = self.cfg.get('service', 'user', default=None) password = self.cfg.get('service', 'password', default=None) self.session = requests.Session() if user and password: self.session.auth = (user, password) def _req(self, path, method='get', json=True, assert_status=200, **kw): """Make a request to the API of an imeji instance. :param path: HTTP path. :param method: HTTP method. :param json: Flag signalling whether the response should
be treated as JSON. :param assert_status: Expected HTTP response status of a successful request. :param kw: Additional keyword parameters will be handed through to the \ appropriate function of the requests library. :return: The return value of the function of the requests library or a decoded \ JSON object/array. """ method = getattr(se
lf.session, method.lower()) res = method(self.service_url + '/rest' + path, **kw) status_code = res.status_code if json: try: res = res.json() except ValueError: # pragma: no cover log.error(res.text[:1000]) raise if assert_status: if status_code != assert_status: log.error( 'got HTTP %s, expected HTTP %s' % (status_code, assert_status)) log.error(res.text[:1000] if hasattr(res, 'text') else res) raise ImejiError('Unexpected HTTP status code', res) return res def __getattr__(self, name): """Names of resource classes are accepted and resolved as dynamic attribute names. This allows convenient retrieval of resources as api.<resource-class>(id=<id>), or api.<resource-class>s(q='x'). """ return GET(self, name) def create(self, rsc, **kw): if isinstance(rsc, string_types): cls = getattr(resource, rsc.capitalize()) rsc = cls(kw, self) return rsc.save() def delete(self, rsc): return rsc.delete() def update(self, rsc, **kw): for k, v in kw.items(): setattr(rsc, k, v) return rsc.save()
# -*- coding: utf-8 -*- import pytest f
rom flask import url_for def test_config(app): assert app.debug, 'App is in debug mode' assert not app.config.get('MINIFY_HTML'), 'App does minify html' assert app.config.get('ASSETS_DEBUG'), 'App does build assets' assert app.config.get('YARR_URL'), 'App doesn\'t have Yarr! URL specified' def test_routes(client): assert client.get(url_for('index')).status_code == 200 assert client.get(url_
for('search')).status_code == 302, 'Empty query should throw redirect'
#!/usr/bin/env python import unittest from test import test_support import socket import urllib import sys import os import time mimetools = test_support.import_module("mimetools", deprecated=True) def _open_with_retry(func, host, *args, **kwargs): # Connecting to remote hosts is flaky. Make it more robust # by retrying the connection several times. for i in range(3): try: return func(host, *args, **kwargs) except IOError, last_exc: continue except: raise raise last_exc class URLTimeoutTest(unittest.TestCase): TIMEOUT = 10.0 def setUp(self): socket.setdefaulttimeout(self.TIMEOUT) def tearDown(self): socket.setdefaulttimeout(None) def testURLread(self): f = _open_with_retry(urllib.urlopen, "http://www.python.org/") x = f.read() class urlopenNetworkTests(unittest.TestCase): """Tests urllib.urlopen using the network. These tests are not exhaustive. Assuming that testing using files does a good job overall of some of the basic interface features. There are no tests exercising the optional 'data' and 'proxies' arguments. No tests for transparent redirection have been written. setUp is not used for always constructing a connection to http://www.python.org/ since there a few tests that don't use that address and making a connection is expensive enough to warrant minimizing unneeded connections. """ def urlopen(self, *args): return _open_with_retry(urllib.urlopen, *args) def test_basic(self): # Simple test expected to pass. open_url = self.urlopen("http://www.python.org/") for attr in ("read", "readline", "readlines", "fileno", "close", "info", "geturl"): self.assertTrue(hasattr(open_url, attr), "object returned from " "urlopen lacks the %s attribute" % attr) try: self.assertTrue(open_url.read(), "calling 'read' failed") finally: open_url.close() def test_readlines(self): # Test both readline and readlines. open_url = self.urlopen("http://www.python.org/") try: self.assertIsInstance(open_url.readline(), basestring, "readline did not return a string") self.assertIsInstance(open_url.readlines(), list, "readlines did not return a list") finally: open_url.close() def test_info(self): # Test 'info'. open_url = self.urlopen("http://www.python.org/") try: info_obj = open_url.info() finally: open_url.close() self.assertIsInstance(info_obj, mimetools.Message, "object returned by 'info' is not an " "instance of mimetools.Message") self.assertEqual(info_obj.getsubtype(), "html") def test_geturl(self): # Make sure same URL as opened is returned by geturl. URL = "http://www.python.org/" open_url = self.urlopen(URL) try: gotten_url = open_url.geturl() finally: open_url.close() self.assertEqual(gotten_url, URL) def test_getcode(self): # test getcode() with the fancy opener to get 404 error codes URL = "http://www.python.org/XXXinvalidXXX" open_url = urllib.FancyURLopener().open(URL) try: code = open_url.getcode() finally: open_url.close() self.assertEqual(code, 404) def test_fileno(self): if (sys.platform in ('win32',) or not hasattr(os, 'fdopen')): # On Windows, socket handles are not file descriptors; this # test can't pass on Windows. return # Make sure fd returned by fileno is valid. open_url = self.urlopen("http://www.python.org/") fd = open_url.fileno() FILE = os.fdopen(fd) try: self.assertTrue(FILE.read(), "reading from file created using fd " "returned by fileno failed") finally: FILE.close() def test_bad_address(self): # Make sure proper exception is raised when connecting to a bogus # address. bogus_domain = "sadflkjsasf.i.nvali.d" try: socket.gethostbyname(bogus_domain) except socket.gaierror: pass else: # This happens with some overzealous DNS providers such as OpenDNS self.skipTest("%r should not resolve for test to work" % bogus_domain) self.assertRaises(IOError, # SF patch 809915: In Sep 2003, VeriSign started # highjacking invalid .com and .net addresses to # boost traffic to their own site. This test # started failing then. One hopes the .invalid # domain will be spared to serve its defined # purpose.
# urllib.urlopen, "http://www.sadflkjsasadf.com/")
urllib.urlopen, "http://sadflkjsasf.i.nvali.d/") class urlretrieveNetworkTests(unittest.TestCase): """Tests urllib.urlretrieve using the network.""" def urlretrieve(self, *args): return _open_with_retry(urllib.urlretrieve, *args) def test_basic(self): # Test basic functionality. file_location,info = self.urlretrieve("http://www.python.org/") self.assertTrue(os.path.exists(file_location), "file location returned by" " urlretrieve is not a valid path") FILE = file(file_location) try: self.assertTrue(FILE.read(), "reading from the file location returned" " by urlretrieve failed") finally: FILE.close() os.unlink(file_location) def test_specified_path(self): # Make sure that specifying the location of the file to write to works. file_location,info = self.urlretrieve("http://www.python.org/", test_support.TESTFN) self.assertEqual(file_location, test_support.TESTFN) self.assertTrue(os.path.exists(file_location)) FILE = file(file_location) try: self.assertTrue(FILE.read(), "reading from temporary file failed") finally: FILE.close() os.unlink(file_location) def test_header(self): # Make sure header returned as 2nd value from urlretrieve is good. file_location, header = self.urlretrieve("http://www.python.org/") os.unlink(file_location) self.assertIsInstance(header, mimetools.Message, "header is not an instance of mimetools.Message") def test_data_header(self): logo = "http://www.python.org/community/logos/python-logo-master-v3-TM.png" file_location, fileheaders = self.urlretrieve(logo) os.unlink(file_location) datevalue = fileheaders.getheader('Date') dateformat = '%a, %d %b %Y %H:%M:%S GMT' try: time.strptime(datevalue, dateformat) except ValueError: self.fail('Date value not in %r format', dateformat) def test_main(): test_support.requires('network') with test_support.check_py3k_warnings( ("urllib.urlopen.. has been removed", DeprecationWarning)): test_support.run_unittest(URLTimeoutTest, urlopenNetworkTests, urlretrieveNetworkTests) if __name__ == "__main__": test_main()
from setuptools import find_packages, setup from auspost_pac import __version__ as version setup( name='python-auspost-pac', version=version, license='BSD', author='Sam Kingston', author_email='sam@sjkwi.com.au', description='Python API for Australia Post\'s Postage Assessment Calculator (pac).', url='https://github.com/sjkingo/python-auspost-pac',
install_requires=[ 'cached_property', 'frozendict', 'requests', ], packages=find_packag
es(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], )
# Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def subtreeWithAllDeepest(self, root: TreeNode) -> TreeNode: def dfs(root): if root is None: return None, 0 left, ld =
dfs(root.left) right, r
d = dfs(root.right) if ld < rd: return right, rd + 1 elif ld > rd: return left, ld + 1 else: return root, ld + 1 return dfs(root)[0]
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DateTimeAwareJSONEncoder from django.utils.xmlutils import SimplerXMLGenerator from django.utils.encoding import smart_unicode EMITTERS = {} def get_emitter(format): try: return EMITTERS[format] except KeyError: raise ValueError('No emitter registered for type %s' % format) def register_emitte
r(name=None, content_type='text/plain'): '''Decorator to register an emitter. Parameters:: - ``name``: name of emitter ('json', 'xml', ...) - ``content_type``: conte
nt type to serve response as ''' def inner(func): EMITTERS[name or func.__name__] = (func, content_type) return inner @register_emitter(content_type='application/json; charset=utf-8') def json(request, data): cb = request.GET.get('callback') data = simplejson.dumps(data, cls=DateTimeAwareJSONEncoder, ensure_ascii=False, indent=4) return cb and ('%s(%s)' % (cb, data)) or data @register_emitter(content_type='text/xml; charset=utf-8') def xml(request, data): stream = StringIO() xml = SimplerXMLGenerator(stream, 'utf-8') xml.startDocument() xml.startElement('response', {}) to_xml(xml, data) xml.endElement('response') xml.endDocument() return stream.getvalue() def to_xml(xml, data): if isinstance(data, (list, tuple)): for item in data: xml.startElement('resource', {}) to_xml(xml, item) xml.endElement('resource') elif isinstance(data, dict): for key, value in data.iteritems(): xml.startElement(key, {}) to_xml(xml, value) xml.endElement(key) else: xml.characters(smart_unicode(data))
"""Virtual environment relocatable mixin.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import os import shutil class RelocateMixin(object): """Mixin which adds the ability to relocate a virtual environment.""" def relocate(self, destination): """Configure the virtual environment for another path. Args: destination (str): The target path of the virtual environment. Note: This does not actually move the virtual environment. Is only rewrites the metadata required to support a move. """ for activate in self.bin.activates: activate.vpath = destination for binfile in self.bin.files: shebang = binfile.shebang if shebang: shebang = shebang.strip().split(os.linesep) if len(shebang) == 1 and ( "python" in shebang[0] or "pypy" in shebang[0] ): binfile.shebang = "#!{0}".format( os.path.join(destination, "bin", "python") ) elif len(shebang) == 3 and ( "python" in shebang[1] or "pypy" in shebang[1] ): shebang[1] = "'''exec' {0} \"$0\" \"$@\"".format( os.path.join(destination, "bin", "python") ) binfile.shebang = os.linesep.join(shebang) # Even though wheel is the official format, there are still several # cases in the wild where eggs are being installed. Eggs come with the # possibility of .pth files. Each .pth file contains the path to where # a module can be found. To handle them we must recurse the entire # venv file tree since they can be either at the root of the # site-packages, bundled within an egg directory, or both. original_path = self.path original_abspath = self.abspath dirs = [self] while dirs: current = dirs.pop() dirs.extend(current.dirs) for file_ in current.files: if file_.abspath.endswith(".pth"): content = "" with open(file_.abspath, "r") as source: # .pth files are almost always very small. Because of # this we read the whole file as a convenience. content = source.read() # It's not certain whether the .pth will have a relative # or absolute path so we replace both in order of most to # least specific. content = content.replace(original_abspath, destination) content = content.replace(original_path, destination) with open(file_.abspath, "w") as source: sour
ce.write(content) def move(self, destination): """Reconfigure and move the virtual environment to another path. Args: destination (str): The target path of the virtual environment. Note: Unlike `relocate`, this method *will* move the virtual to the given path.
""" self.relocate(destination) shutil.move(self.path, destination) self._path = destination
from direct.distributed.ClockDelta import * from direct.interval.IntervalGlobal import * from pandac.PandaModules import * from DistributedNPCToonBase import * from toontown.chat.ChatGlobals import * from toontown.estate import BankGUI, BankGlobals from toontown.nametag.NametagGlobals import * from toontown.toonbase import TTLocalizer class DistributedNPCBanker(DistributedNPCToonBase): def __init__(self, cr): DistributedNPCToonBase.__init__(self, cr) self.jellybeanJar = None self.bankGUI = None def disable(self): self.ignoreAll() taskMgr.remove(self.uniqueName('popupBankingGUI')) taskMgr.remove(self.uniqueName('lerpCamera')) if self.bankGUI: self.bankGUI.destroy() self.av = None base.localAvatar.posCamera(0, 0) DistributedNPCToonBase.disable(self) def resetClerk(self): self.ignoreAll() taskMgr.remove(self.uniqueName('popupBankingGUI')) taskMgr.remove(self.uniqueName('lerpCamera')) if self.bankGUI: self.bankGUI.destroy() self.clearMat() self.startLookAround() self.detectAvatars() def handleCollisionSphereEnter(self, collEntry): self.sendAvatarEnter() self.nametag3d.setDepthTest(0) base.cr.playGame.getPlace().setState('purchase') self.nametag3d.setBin('fixed', 0) def sendAvatarEnter(self): self.sendUpdate('avatarEnter') def setMovie(self, mode, avId, timestamp): isLocalToon = avId == base.localAvatar.doId timeStamp = globalClockDelta.localElapsedTime(timestamp) self.remain = 60 - timeStamp self.resetClerk() if mode == BankGlobals.BANK_MOVIE_CLEAR: if not avId: self.setChatAbsolute('', CFSpeech | CFTimeout) if isLocalToon: self.freeAvatar() elif mode == BankGlobals.BANK_MOVIE_TIMEOUT: if isLocalToon: self.cleanupBankingGUI() self.freeAvatar() self.setChatAbsolute(TTLocalizer.STOREOWNER_TOOKTOOLONG, CFSpeech | CFTimeout) elif mode == BankGlobals.B
ANK_MOVIE_DEPOSIT: if isLocalToon: self.cleanupBankingGUI() self.freeAvatar() self.setChatAbsolute(TTLocalizer.STOREOWNER_GOODBYE, CFSpeech | CFTimeout) elif mode == BankGlobals.BANK_MO
VIE_GUI: av = base.cr.doId2do.get(avId) if av: self.setupAvatars(av) if isLocalToon: self.hideNametag2d() base.camera.wrtReparentTo(render) seq = Sequence((base.camera.posQuatInterval(1, Vec3(-5, 9, self.getHeight() - 0.5), Vec3(-150, -2, 0), other=self, blendType='easeOut', name=self.uniqueName('lerpCamera')))) seq.start() taskMgr.doMethodLater(2.0, self.popupBankingGUI, self.uniqueName('popupBankingGUI')) self.setChatAbsolute(TTLocalizer.STOREOWNER_BANKING, CFSpeech | CFTimeout) def __handleBankingDone(self, transactionAmount): self.sendUpdate('transferMoney', [transactionAmount]) def popupBankingGUI(self, task): self.accept('bankDone', self.__handleBankingDone) self.bankGUI = BankGUI.BankGUI('bankDone') return task.done def cleanupBankingGUI(self): if self.bankGUI: self.bankGUI.destroy() self.bankGUI = None def freeAvatar(self): base.localAvatar.posCamera(0, 0) if base.cr.playGame.getPlace(): base.cr.playGame.getPlace().setState('walk') self.showNametag2d()
from . import common import hglib class test_branches(common.basetest): def test_empty(self): self.assertEquals(self.client.branches(), []) def test_basic(self): self.append('a', 'a') rev0 = self.client.commit('first', addremove=True) self.client.branch('foo') self.append('a', 'a') rev1 = self.client.commit('second') branches = self.client.branches() expected = [] for r, n in (rev1, rev0):
r
= self.client.log(r)[0] expected.append((r.branch, int(r.rev), r.node[:12])) self.assertEquals(branches, expected) def test_active_closed(self): pass
from django.conf.urls import patterns, include, url urlpatterns = [ url(r'^$', 'clientes.views.cliente
s', name='clientes'), url(r'^edit/(\d+)$', 'clientes.views.clientes_edit', name='editCliente'), url(r'^delete/
(\d+)$', 'clientes.views.clientes_delete', name='deleteCliente'), ]
from miasm2.core.asmb
lock import disasmEngine from miasm2.arch.msp430.arch import mn_msp430 class dis_msp430(disasmEngine): def _
_init__(self, bs=None, **kwargs): super(dis_msp430, self).__init__(mn_msp430, None, bs, **kwargs)
# This file is part of Supysonic. # Supysonic is a Python implementation of the Subsonic server API. # # Copyright (C) 2020 Alban 'spl0k' Féron # # Distributed under terms of the GNU AGPLv3 license. from flask import request from ..db import RadioStation from . import get_entity, api_routing from .exceptions import Forbidden, MissingParameter @api_routing("/getInternetRadioStations") def get_radio_stations(): query = RadioStation.select().sort_by(RadioStation.name) return request.formatter( "internetRadioStations", {"internetRadioStation": [p.as_subsonic_station() for p in query]}, ) @api_routing("/createInternetRadioStation") def create_radio_station(): if not request.user.admin: raise Forbidden() stream_url, name, homepage_url = map( request.values.get, ("streamUrl", "name", "homepageUrl") ) if stream_url and name: RadioStation(stream_url=stream_url, name=name, homepage_url=homepage_url) else: raise MissingParameter("streamUrl or name") return request.formatter.empty @api_routing("/updateInternetRadioStation") def update_radio_station(): if not request.user.admin: raise Forbidden() res
= get_entity(RadioStation) stream_url, name, homepage_url = map( request.values.get, ("streamUrl", "name", "homepageUrl") ) if stream_url and
name: res.stream_url = stream_url res.name = name if homepage_url: res.homepage_url = homepage_url else: raise MissingParameter("streamUrl or name") return request.formatter.empty @api_routing("/deleteInternetRadioStation") def delete_radio_station(): if not request.user.admin: raise Forbidden() res = get_entity(RadioStation) res.delete() return request.formatter.empty
# -*- coding: UTF-8 -*- # Syntax definition automatically generated by hljs2xt.py # source: sml.js name = 'SML' file_patterns = ['*.sml', '*.ml'] built_in = """ array bool char exn int list option order real ref string substring vector unit word """.split() keyword = """ abstype and andalso as case datatype do else end eqtype exception fn fun functor handle if in include infix infixr let local nonfix
of op open orelse raise rec sharing sig signature struct structure then type val with withtype where while """.split() literal = ['true', 'false', 'NONE', 'SOME', 'LESS', 'EQUAL', 'GREATER', 'nil'] class comment: default_text_color = DELIMITER rules = [ # ignore {'begin': {'pattern': "\\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|s
uch|will|you|your|like)\\b", 'type': 'RegExp'}}, ('doctag', [RE(r"(?:TODO|FIXME|NOTE|BUG|XXX):")]), ] operator_escape = ('operator.escape', [RE(r"\\[\s\S]")]) class string: default_text_color = DELIMITER rules = [operator_escape] number = [ RE(r"\b(?:0[xX][a-fA-F0-9_]+[Lln]?|0[oO][0-7_]+[Lln]?|0[bB][01_]+[Lln]?|[0-9][0-9_]*(?:[Lln]|(?:\.[0-9_]*)?(?:[eE][-+]?[0-9_]+)?)?)"), ] rules = [ ('built_in', built_in), ('keyword', keyword), ('literal', literal), ('literal', [RE(r"\[(?:\|\|)?\]|\(\)")]), ('comment', RE(r"\(\*"), [RE(r"\*\)")], comment), ('symbol', [RE(r"'[A-Za-z_](?!')[\w']*")]), ('type', [RE(r"`[A-Z][\w']*")]), ('type', [RE(r"\b[A-Z][\w']*")]), # ignore {'begin': "[a-z_]\\w*'[\\w']*"}, ('string', RE(r"'"), [RE(r"'")], string), ('string', RE(r"\""), [RE(r"\"")], string), ('number', number), # ignore {'begin': {'pattern': '[-=]>', 'type': 'RegExp'}}, ]
return '0.0.0.0' except IOError as e: print(nic_name + 'is unacceptable !') return '0.0.0.0' finally: return '0.0.0.0' if nic_name != '': bind_ip = bind_nic() s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((bind_ip, 61440)) s.settimeout(3) SALT = '' IS_TEST = True # specified fields based on version CONF = "/etc/drcom.conf" UNLIMITED_RETRY = True EXCEPTION = False DEBUG = False #log saves to file LOG_PATH = '/var/log/drcom_client.log' if IS_TEST: DEBUG = True LOG_PATH = 'drcom_client.log' def log(*args, **kwargs): s = ' '.join(args) print s if DEBUG: with open(LOG_PATH,'a') as f: f.write(s + '\n') def challenge(svr,ran): while True: t = struct.pack("<H", int(ran)%(0xFFFF)) s.sendto("\x01\x02"+t+"\x09"+"\x00"*15, (svr, 61440)) try: data, address = s.recvfrom(1024) log('[challenge] recv',data.encode('hex')) except: log('[challenge] timeout, retrying...') continue if address == (svr, 61440): break else: continue log('[DEBUG] challenge:\n' + data.encode('hex')) if data[0] != '\x02': raise ChallengeException log('[challenge] challenge packet sent.') return data[4:8] def md5sum(s): m = md5() m.update(s) return m.digest() def dump(n): s = '%x' % n if len(s) & 1: s = '0' + s return s.decode('hex') # def ror(md5, pwd): # ret = '' # for i in range(len(pwd)): # x = ord(md5[i]) ^ ord(pwd[i]) # ret += chr(((x<<3)&0xFF) + (x>>5)) # return ret def keep_alive_package_builder(number,random,tail,type=1,first=False): data = '\x07'+ chr(number) + '\x28\x00\x0b' + chr(type) if first : data += '\x0f\x27' else: data += KEEP_ALIVE_VERSION data += '\x2f\x12' + '\x00' * 6 data += tail data += '\x00' * 4 #data += struct.pack("!H",0xdc02) if type == 3: foo = ''.join([chr(int(i)) for i in host_ip.split('.')]) # host_ip #CRC # edited on 2014/5/12, filled zeros to checksum # crc = packet_CRC(data+foo) crc = '\x00' * 4 #data += struct.pack("!I",crc) + foo + '\x00' * 8 data += crc + foo + '\x00' * 8 else: #packet type = 1 data += '\x00' * 16 return data # def packet_CRC(s): # ret = 0 # for i in re.findall('..', s): # ret ^= struct.unpack('>h', i)[0] # ret &= 0xFFFF # ret = ret * 0x2c7 # return ret def keep_alive2(*args): #first keep_alive: #number = number (mod 7) #status = 1: first packet user sended # 2: first packet user recieved # 3: 2nd packet user sended # 4: 2nd packet user recieved # Codes for test tail = '' packet = '' svr = server ran = random.randint(0,0xFFFF) ran += random.randint(1,10) # 2014/10/15 add by latyas, maybe svr sends back a file packet svr_num = 0 packet = keep_alive_package_builder(svr_num,dump(ran),'\x00'*4,1,True) while True: log('[keep-alive2] send1',packet.encode('hex')) s.sendto(packet, (svr, 61440)) data, address = s.recvfrom(1024) log('[keep-alive2] recv1',data.encode('hex')) if data.startswith('\x07\x
00\x28\x00') or data.startswith('\x07' + chr(svr_num) + '\x28\x00'): break elif data[0] == '\x07' and data[2] == '\x10': log('[keep-alive2] recv file, resending..') svr_num = svr_num + 1 packet = keep_alive_package_builder(svr_num,dump(ran),'\x00'*4,1, False) else: log(
'[keep-alive2] recv1/unexpected',data.encode('hex')) #log('[keep-alive2] recv1',data.encode('hex')) ran += random.randint(1,10) packet = keep_alive_package_builder(svr_num, dump(ran),'\x00'*4,1,False) log('[keep-alive2] send2',packet.encode('hex')) s.sendto(packet, (svr, 61440)) while True: data, address = s.recvfrom(1024) if data[0] == '\x07': svr_num = svr_num + 1 break else: log('[keep-alive2] recv2/unexpected',data.encode('hex')) log('[keep-alive2] recv2',data.encode('hex')) tail = data[16:20] ran += random.randint(1,10) packet = keep_alive_package_builder(svr_num,dump(ran),tail,3,False) log('[keep-alive2] send3',packet.encode('hex')) s.sendto(packet, (svr, 61440)) while True: data, address = s.recvfrom(1024) if data[0] == '\x07': svr_num = svr_num + 1 break else: log('[keep-alive2] recv3/unexpected',data.encode('hex')) log('[keep-alive2] recv3',data.encode('hex')) tail = data[16:20] log("[keep-alive2] keep-alive2 loop was in daemon.") i = svr_num while True: try: ran += random.randint(1,10) packet = keep_alive_package_builder(i,dump(ran),tail,1,False) #log('DEBUG: keep_alive2,packet 4\n',packet.encode('hex')) log('[keep_alive2] send',str(i),packet.encode('hex')) s.sendto(packet, (svr, 61440)) data, address = s.recvfrom(1024) log('[keep_alive2] recv',data.encode('hex')) tail = data[16:20] #log('DEBUG: keep_alive2,packet 4 return\n',data.encode('hex')) ran += random.randint(1,10) packet = keep_alive_package_builder(i+1,dump(ran),tail,3,False) #log('DEBUG: keep_alive2,packet 5\n',packet.encode('hex')) s.sendto(packet, (svr, 61440)) log('[keep_alive2] send',str(i+1),packet.encode('hex')) data, address = s.recvfrom(1024) log('[keep_alive2] recv',data.encode('hex')) tail = data[16:20] #log('DEBUG: keep_alive2,packet 5 return\n',data.encode('hex')) i = (i+2) % 0xFF time.sleep(20) keep_alive1(*args) except: pass import re def checksum(s): ret = 1234 for i in re.findall('....', s): ret ^= int(i[::-1].encode('hex'), 16) ret = (1968 * ret) & 0xffffffff return struct.pack('<I', ret) def mkpkt(salt, usr, pwd, mac): data = '\x03\x01\x00'+chr(len(usr)+20) data += md5sum('\x03\x01'+salt+pwd) data += usr.ljust(36, '\x00') data += CONTROLCHECKSTATUS data += ADAPTERNUM data += dump(int(data[4:10].encode('hex'),16)^mac).rjust(6,'\x00') #mac xor md51 data += md5sum("\x01" + pwd + salt + '\x00'*4) #md52 data += '\x01' # number of ip #data += '\x0a\x1e\x16\x11' #your ip address1, 10.30.22.17 data += ''.join([chr(int(i)) for i in host_ip.split('.')]) #x.x.x.x -> data += '\00'*4 #your ipaddress 2 data += '\00'*4 #your ipaddress 3 data += '\00'*4 #your ipaddress 4 data += md5sum(data + '\x14\x00\x07\x0b')[:8] #md53 data += IPDOG data += '\x00'*4 #delimeter data += host_name.ljust(32, '\x00') data += ''.join([chr(int(i)) for i in PRIMARY_DNS.split('.')]) #primary dns data += ''.join([chr(int(i)) for i in dhcp_server.split('.')]) #DHCP server data += '\x00\x00\x00\x00' #secondary dns:0.0.0.0 data += '\x00' * 8 #delimeter data += '\x94\x00\x00\x00' # unknow data += '\x05\x00\x00\x00' # os major data += '\x01\x00\x00\x00' # os minor data += '\x28\x0a\x00\x00' # OS build data += '\x02\x00\x00\x00' #os unknown data += host_os.ljust(32,'\x00') data += '\x00' * 96 #data += '\x01' + host_os.ljust(128, '\x00') #data += '\x0a\x00\x00'+chr(len(pwd)) # \0x0a represents version of client, algorithm: DRCOM_VER + 100 #data += ror(md5sum('\x03\x01'+salt+pwd), pwd) data += AUTH_VERSION data += '\x02\x0c' data += checksum(data+'\x01\x26\x07\x11\x00\x00'+dump(mac)) data += '\x00\x00' #delimeter data += dump(mac) data += '\x00' # auto logout / default: False data += '\x00' # broadcast mode / default : False data += '\xe9\x13' #unknown, filled numbers randomly =w= log('[mkpkt]',data.enco
# Copyright 2012 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log import webob.exc from manila.api import extensions from manila import db from manila import exception from manila import utils LOG = log.getLogger(__name__) authorize = extensions.extension_authorizer('share', 'services') class ServiceController(object): def index(self, req): """Return a list of all running services.""" context = req.environ['manila.context'] authorize(context) all_services = db.service_get_all(context)
services = [] for service in all_services: service = { 'id': service['id'],
'binary': service['binary'], 'host': service['host'], 'zone': service['availability_zone']['name'], 'status': 'disabled' if service['disabled'] else 'enabled', 'state': 'up' if utils.service_is_up(service) else 'down', 'updated_at': service['updated_at'], } services.append(service) search_opts = [ 'host', 'binary', 'zone', 'state', 'status', ] for search_opt in search_opts: value = '' if search_opt in req.GET: value = req.GET[search_opt] services = [s for s in services if s[search_opt] == value] if len(services) == 0: break return {'services': services} def update(self, req, id, body): """Enable/Disable scheduling for a service.""" context = req.environ['manila.context'] authorize(context) if id == "enable": disabled = False elif id == "disable": disabled = True else: raise webob.exc.HTTPNotFound("Unknown action") try: host = body['host'] binary = body['binary'] except (TypeError, KeyError): raise webob.exc.HTTPBadRequest() try: svc = db.service_get_by_args(context, host, binary) if not svc: raise webob.exc.HTTPNotFound('Unknown service') db.service_update(context, svc['id'], {'disabled': disabled}) except exception.ServiceNotFound: raise webob.exc.HTTPNotFound("service not found") return {'host': host, 'binary': binary, 'disabled': disabled} class Services(extensions.ExtensionDescriptor): """Services support.""" name = "Services" alias = "os-services" updated = "2012-10-28T00:00:00-00:00" def get_resources(self): resources = [] resource = extensions.ResourceExtension('os-services', ServiceController()) resources.append(resource) return resources
applescript="\'tell application \"Finder\" to quit\'" shellCmd = 'osascript -e '+applescript os.system(shellCmd) demo=False respDeadline = 100 if autopilot: respDeadline = 0.1 timeAndDateStr = time.strftime("%d%b%Y_%H-%M", time.localtime()) if os.path.isdir('.'+os.sep+'data'): dataDir='data' else: print('"data" directory does not exist, so savi
ng data in present working directory') dataDir='.' fileName = os.path.join(dataDir, participant+'_spatiotopicMotion_
'+timeAndDateStr) dataFile = open(fileName+'.txt', 'w') # sys.stdout #StringIO.StringIO() saveCodeCmd = 'cp \'' + sys.argv[0] + '\' '+ fileName + '.py' os.system(saveCodeCmd) #save a copy of the code as it was when that subject was run logFname = fileName+'.log' ppLogF = logging.LogFile(logFname, filemode='w',#if you set this to 'a' it will append instead of overwriting level=logging.INFO)#errors, data and warnings will be sent to this logfile scrn=1 #1 means second screen widthPix =1024#1024 #monitor width in pixels heightPix =768#768 #monitor height in pixels monitorwidth = 40. #28.5 #monitor width in centimeters viewdist = 50.; #cm pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) / np.pi*180) bgColor = [0,0,0] #"gray background" allowGUI = False waitBlank = False windowAndMouseUnits = 'deg' monitorname = 'mitsubishi' #in psychopy Monitors Center #Holcombe lab monitor mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor mon.setSizePix( (widthPix,heightPix) ) def openMyStimWindow(): #make it a function because have to do it several times, want to be sure is identical each time myWin = visual.Window(monitor=mon,size=(widthPix,heightPix),allowGUI=allowGUI,units=windowAndMouseUnits,color=bgColor,colorSpace='rgb',fullscr=fullscr, screen=scrn,waitBlanking=waitBlank) #Holcombe lab monitor return myWin myWin = openMyStimWindow() myWin.recordFrameIntervals = True #required by RunTimeInfo? refreshMsg2 = '' refreshRateWrong = False if not checkRefreshEtc: refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED' else: #checkRefreshEtc try: runInfo = info.RunTimeInfo( # if you specify author and version here, it overrides the automatic detection of __author__ and __version__ in your script #author='<your name goes here, plus whatever you like, e.g., your lab or contact info>', #version="<your experiment version info>", win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=False, ## True means report on everything userProcsDetailed=False ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes #seems to require internet access, probably for process lookup ) #print(runInfo) logging.info(runInfo) print('Finished runInfo- which assesses the refresh and processes of this computer') runInfo_failed = False except: runInfo_failed = True refreshMsg1 = ' runInfo call FAILED so dont know refresh rate' if not runInfo_failed: refreshSDwarningLevel_ms = 3 ##ms if runInfo["windowRefreshTimeSD_ms"] > refreshSDwarningLevel_ms: print("\nThe variability of the refresh rate is high (SD > %.2f ms)." % (refreshSDwarningLevel_ms)) ## and here you could prompt the user with suggestions, possibly based on other info: if runInfo["windowIsFullScr"]: print("Your window is full-screen, which is good for timing.") print('Possible issues: internet / wireless? bluetooth? recent startup (not finished)?') #if len(runInfo['systemUserProcFlagged']): #doesnt work if no internet # print('other programs running? (command, process-ID):',info['systemUserProcFlagged']) medianHz = 1000./runInfo['windowRefreshTimeMedian_ms'] refreshMsg1= 'Median frames per second ~='+ str( np.round(medianHz,1) ) refreshRateTolerancePct = 3 pctOff = abs( (medianHz-refreshRate) / refreshRate ) refreshRateWrong = pctOff > (refreshRateTolerancePct/100.) if refreshRateWrong: refreshMsg1 += ' BUT' refreshMsg1 += ' program assumes ' + str(refreshRate) refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!' else: refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) ) myWinRes = myWin.size myWin.allowGUI =True myWin.close() #have to close window to show dialog box myDlg = gui.Dlg(title="Screen check", pos=(200,400)) myDlg.addText(refreshMsg1, color='Black') if refreshRateWrong: myDlg.addText(refreshMsg2, color='Red') if refreshRateWrong: logging.error(refreshMsg1+refreshMsg2) else: logging.info(refreshMsg1+refreshMsg2) if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any(): msgWrongResolution = 'Screen apparently NOT the desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels!!' myDlg.addText(msgWrongResolution, color='Red') logging.error(msgWrongResolution) print(msgWrongResolution) myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) # color='DimGrey') color names stopped working along the way, for unknown reason myDlg.show() if myDlg.OK: #unpack information from dialogue box pass else: print('User cancelled from dialog box.') logging.flush() core.quit() if not demo: allowGUI = False myWin = openMyStimWindow() targetDot = visual.ImageStim(myWin,mask='circle',colorSpace='rgb', color = (-.5, .3, -.5), size=ballStdDev,autoLog=autoLogging, contrast=1, opacity = 1.0) foilDot = visual.ImageStim(myWin,mask='circle',colorSpace='rgb', color = (0, 0, 0 ),size=ballStdDev,autoLog=autoLogging, contrast=1, opacity = 1.0) blackDot = visual.ImageStim(myWin,mask='circle',colorSpace='rgb', color = (-1,-1,-1),size=ballStdDev,autoLog=autoLogging, contrast=0.5, opacity = 1.0) mouseLocationMarker = visual.Circle(myWin,units=windowAndMouseUnits,radius=ballStdDev/2.)#,autoLog=autoLogging) mouseLocationMarker.setFillColor((-.5,-.5,-.5), colorSpace='rgb') clickContinueArea = visual.Rect(myWin,units='norm',width=1,height=.6,fillColor=(-.6,-.6,0),autoLog=autoLogging) clickContinueAreaX = 1; clickContinueAreaY = 1 mouseLocationMarker.setFillColor((-.5,-.5,-.5), colorSpace='rgb') beforeFirstTrialText = visual.TextStim(myWin,pos=(0, .8),colorSpace='rgb',color = (-1,-1,-1),alignHoriz='center', alignVert='top', height = 0.05, units='norm',autoLog=autoLogging) respPromptText = visual.TextStim(myWin,pos=(0, -.3),colorSpace='rgb',color = (-1,-1,-1),alignHoriz='center', alignVert='center', height = 0.07, units='norm',autoLog=autoLogging) betweenTrialsText = visual.TextStim(myWin,pos=(0, -.4),colorSpace='rgb',color = (-1,-1,-1),alignHoriz='center', alignVert='center',height=.03,units='norm',autoLog=autoLogging) nextRemindCountText = visual.TextStim(myWin,colorSpace='rgb',color= (1,1,1),alignHoriz='center', alignVert='top',height=.03,units='norm',autoLog=autoLogging) nextRemindCountTextX = 0.8; nextRemindCountTextY = .8 locationOfProbe= np.array([[0,1.5]]) # np.array([[-10,1.5],[0,1.5],[10,1.5]]) #left, centre, right #Potential other conditions:[-10,6.5],[0,6.5],[10,6.5],[-10,-3.5],[0,-3.5],[10,-3.5] stimList=[] for locus in locationOfProbe: #location of the probe for the trial probeLocationY = locus[1] for upDown in [False,True]: #switching between probe moving top to bottom; and bottom to top for startLeft in [False,True]: for infoRightSide in [False,True]: #text between trials and continue area on left side or right side tilts
""" Support for RFXtrx switches. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/switch.rfxtrx/ """ import logging import homeassistant.components.rfxtrx as rfxtrx from homeassistant.components.rfxtrx import ( ATTR_FIREEVENT, ATTR_NAME, ATTR_PACKETID, ATTR_STATE, EVENT_BUTTON_PRESSED) from homeassistant.components.switch import SwitchDevice from homeassistant.const import ATTR_ENTITY_ID from homeassistant.util import slugify DEPENDENCIES = ['rfxtrx'] SIGNAL_REPETITIONS = 1 _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_devices_callback, discovery_info=None): """Setup the RFXtrx platform.""" import RFXtrx as rfxtrxmod # Add switch from config file switchs = [] signal_repetitions = config.get('signal_repetitions', SIGNAL_REPETITIONS) for device_id, entity_info in config.get('devices', {}).items(): if device_id in rfxtrx.RFX_DEVICES: continue _LOGGER.info("Add %s rfxtrx.switch", entity_info[ATTR_NAME]) # Check if i must fire event fire_event = entity_info.get(ATTR_FIREEVENT, False) datas = {ATTR_STATE: False, ATTR_FIREEVENT: fire_event} rfxobject = rfxtrx.get_rfx_object(entity_info[ATTR_PACKETID]) newswitch = RfxtrxSwitch( entity_info[ATTR_NAME], rfxobject, datas, signal_repetitions) rfxtrx.RFX_DEVICES[device_id] = newswitch switchs.append(newswitch) add_devices_callback(switchs) def switch_update(event): """Callback for sensor updates from the RFXtrx gateway.""" if not isinstance(event.device, rfxtrxmod.LightingDevice) or \ event.device.known_to_be_dimmable: return # Add entity if not exist and the automatic_add is True device_id = slugify(event.device.id_string.lower()) if device_id not in rfxtrx.RFX_DEVICES: automatic_add = config.get('automatic_add', False) if not automatic_add: return _LOGGER.info( "Automatic add %s rfxtrx.switch (Class: %s Sub: %s)", device_id, event.device.__class__.__name__, event.device.subtype ) pkt_id = "".join("{0:02x}".format(x) for x in event.data) entity_name = "%s : %s" % (device_id, pkt_id) datas = {ATTR_STATE: False, ATTR_FIREEVENT: False} signal_repetitions = config.get('signal_repetitions', SIGNAL_REPETITIONS) new_switch = RfxtrxSwitch(entity_name, event, datas, signal_repetitions) rfxtrx.RFX_DEVICES[device_id] = new_switch add_devices_callback([new_switch]) # Check if entity exists or previously added automatically if device_id in rfxtrx.RFX_DEVICES: _LOGGER.debug( "EntityID: %s switch_update. Command: %s", device_id, event.values['Command'] ) if event.values['Command'] == 'On'\ or event.values['Command'] == 'Off': # Update the rfxtrx device state is_on = event.values['Command'] == 'On' # pylint: disable=protected-access rfxtrx.RFX_DEVICES[device_id]._state = is_on rfxtrx.RFX_DEVICES[device_id].update_ha_state() # Fire event
if rfxtrx.RFX_DEVICES[device_id].should_fire_event: rfxtrx.RFX_DEVICES[device_id].hass.bus.fire( EVENT_BUTTON_PRESSED, { ATTR_ENTITY_ID: rfxtrx.RFX_DEVICES[device_id].entity_id, ATTR_S
TATE: event.values['Command'].lower() } ) # Subscribe to main rfxtrx events if switch_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS: rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(switch_update) class RfxtrxSwitch(SwitchDevice): """Representation of a RFXtrx switch.""" def __init__(self, name, event, datas, signal_repetitions): """Initialize the switch.""" self._name = name self._event = event self._state = datas[ATTR_STATE] self._should_fire_event = datas[ATTR_FIREEVENT] self.signal_repetitions = signal_repetitions @property def should_poll(self): """No polling needed for a RFXtrx switch.""" return False @property def name(self): """Return the name of the device if any.""" return self._name @property def should_fire_event(self): """Return is the device must fire event.""" return self._should_fire_event @property def is_on(self): """Return true if light is on.""" return self._state @property def assumed_state(self): """Return true if unable to access real state of entity.""" return True def turn_on(self, **kwargs): """Turn the device on.""" if not self._event: return for _ in range(self.signal_repetitions): self._event.device.send_on(rfxtrx.RFXOBJECT.transport) self._state = True self.update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" if not self._event: return for _ in range(self.signal_repetitions): self._event.device.send_off(rfxtrx.RFXOBJECT.transport) self._state = False self.update_ha_state()
from __future__ import print_function from scipy.ndimage import gaussian_filter import numpy as np from PIL import Image img = np.asarray(Image.open('../Klimt/Klimt.ppm')) img_gray = np.asarray(Image.open('../Klimt/Klimt.pgm')) print('img:', img.shape) sigmas = [0.5, 2, 5, 7] for sigma in sigmas: print('sigma:', sigma) # # do not filter across channels # https://github.com/scikit-image/scikit-image/blob/fca9f16da4bd7420245d05fa82ee51bb9677b039/skimage/filters/_gaussia
n.py#L12-L126 img_blur = Image.fromarray(gaussian_filter(img, sigma=(sigma, sigma, 0), mode = 'nearest')) img_blur.save('Klimt_RGB_Gaussian_blur_sigma={:.1f}.png'.format(sigma)) img_blur = Imag
e.fromarray(gaussian_filter(img_gray, sigma=sigma, mode = 'nearest')) img_blur.save('Klimt_gray_Gaussian_blur_sigma={:.1f}.png'.format(sigma))
# Copyright (c) 2014 Adafruit Industries # Author: Tony DiCola # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import sys import time import MPR121 print('Adafruit MPR121 Capacitive Touch Sensor Test') # Create MPR121 instance. cap = MPR121.MPR121() # Initialize communication with MPR121 using default I2C bus of device, and # default I2C address (0x5A). On BeagleBone Black will default to I2C bus 0. if not cap.begin(): print('Error initializing MPR121. Check your wiring!') sys.exit(1) #cap.set_thresholds(6, 12) # Alternatively, specify a custom I2C address such as 0x5B (ADDR tied to 3.3V), # 0x5C (ADDR tied to SDA), or 0x5D (ADDR tied to SCL). #cap.begin(address=0x5B) # Also you can specify an optional I2C bus with the bus keyword parameter. #cap.begin(busnum=1) # Main loop to print a message every time a pin is touched. print('Pre
ss Ctrl-C to quit.') while True: current_touched = cap.touched() print(current_touched) # # Check each pin's last and current state to see if it was pressed or released. # for i in range(12): # # Each pin is represented by a bit in the touched value. A value of 1 # # means the pin is being touched, and 0
means it is not being touched. # pin_bit = 1 << i # # First check if transitioned from not touched to touched. # if current_touched & pin_bit and not last_touched & pin_bit: # print('{0} touched!'.format(i)) # # Next check if transitioned from touched to not touched. # if not current_touched & pin_bit and last_touched & pin_bit: # print('{0} released!'.format(i)) # # Update last state and wait a short period before repeating. # last_touched = current_touched # Alternatively, if you only care about checking one or a few pins you can # call the is_touched method with a pin number to directly check that pin. # This will be a little slower than the above code for checking a lot of pins. # if cap.is_touched(2): # print('Pin 2 is being touched!') # elif cap.is_touched(7): # print('Pin 7 is being touched!') # elif cap.is_touched(8): # print('Pin 8 is being touched!') # else: # # If you're curious or want to see debug info for each pin, uncomment the #following lines: # print ('\t\t\t\t\t\t\t\t\t\t\t\t\t 0x{0:0X}'.format(cap.touched())) # filtered = [cap.filtered_data(i) for i in range(12)] # print('Filt:', '\t'.join(map(str, filtered))) # base = [cap.baseline_data(i) for i in range(12)] # print('Base:', '\t'.join(map(str, base))) time.sleep(0.1)
# Copyright (C) 2014 Johnny Vestergaard <jkv@unixcluster.dk> # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import logging import gevent import conpot.core as conpot_core logger = logging.getLogger(__name__) # Simulates power usage for a Kamstrup 382 meter class UsageSimulator(object): def __init__(self, *args): # both highres, lowres will be calculated on the fly self.energy_in = 0 self.energy_out = 0 # p1, p2, p3 self.voltage = [0, 0, 0] self.current = [0, 0, 0] self.power = [0, 0, 0] gevent.spawn(self.initialize) def usage_counter(self): while True: # since this is gevent, this is actually sleep for _at least_ 1 second # TODO: measure last entry and figure it out gevent.sleep(1) for x in [0, 1, 2]: self.energy_in += int(self.power[x] * 0.0036) def initialize(self): # we need the databus initialized before we can probe values databus = conpot_core.get_databus() databus.initialized.wait() # accumulated counter energy_in_register = 'register_13' self.energy_in = databus.get_value(energy_in_register) databus.set_value(energy_in_register, self._get_energy_in) databus.set_value('register_1', self._get_energy_in_lowres) energy_out_register = 'register_14' self.energy_out = databus.get_value(energy_out_register) databus.set_value(energy_out_register, self._get_energy_out) databus.set_value('register_2', self._get_energy_out_lowres) volt_1_register = 'register_1054' self.voltage[0] = databus.get_value(volt_1_register) databus.set_value(volt_1_register, self._get_voltage_1) volt_2_register = 'register_1055' self.voltage[1] = databus.get_value(volt_2_register) databus.set_value(volt_2_register, self._get_voltage_2) volt_3_register = 'register_1056' self.voltage[2] = databus.get_value(volt_3_register) databus.set_value(volt_3_register, self._get_voltage_3) current_1_register = 'register_1076' self.current[0] = databus.get_value(current_1_register) databus.set_value(current_1_register, self._get_current_1) current_2_register = 'register_1077' self.current[1] = databus.get_value(current_2_register) databus.set_value(current_2_register, self._get_current_2) current_3_register = 'register_1078' self.current[2] = databus.get_value(current_3_register) databus.set_value(current_3_register, self._get_current_3) power_1_register = 'register_1080' self.power[0] = databus.get_value(power_1_register) databus.set_value(power_1_register, self._get_power_1) power_2_register = 'register
_1081' self.power[1] = databus.get_value(power_2_register) databus.set_value(pow
er_2_register, self._get_power_2) power_3_register = 'register_1082' self.power[2] = databus.get_value(power_3_register) databus.set_value(power_3_register, self._get_power_3) gevent.spawn(self.usage_counter) def _get_energy_in(self): return self.energy_in def _get_energy_out(self): return self.energy_out def _get_energy_in_lowres(self): return self.energy_in / 1000 def _get_energy_out_lowres(self): return self.energy_out / 1000 def _get_voltage_1(self): return self.voltage[0] def _get_voltage_2(self): return self.voltage[1] def _get_voltage_3(self): return self.voltage[2] def _get_current_1(self): return self.current[0] def _get_current_2(self): return self.current[1] def _get_current_3(self): return self.current[2] def _get_power_1(self): return self.power[0] def _get_power_2(self): return self.power[1] def _get_power_3(self): return self.power[2]
#!/usr/bin/python # -*- codi
ng: utf-8 -*- """ In this example, we connect a signal of a QSlider to a slot
of a QLCDNumber. """ import sys from PySide.QtGui import * from PySide.QtCore import * class Example(QWidget): def __init__(self): super(Example, self).__init__() lcd = QLCDNumber() sld = QSlider(Qt.Horizontal) vbox = QVBoxLayout() vbox.addWidget(lcd) vbox.addWidget(sld) sld.valueChanged.connect(lcd.display) self.setLayout(vbox) self.setGeometry(300, 300, 250, 150) self.setWindowTitle('Signal & slot') def main(): app = QApplication(sys.argv) ex = Example() ex.show() sys.exit(app.exec_()) if __name__ == '__main__': main()
""" Package for managing a ranked ladder, which is a special kind of ongoing League. Package Requirements -------------
------- botbase match user Dependencies ------------ cmd_ladder botbase/ commandtype ladder/ ratingsdb match/ cmd_match matchinfo user/ userlib ladder util/ server ladderadminchannel botbase/ botchannel cmd_seedgen ladder/ cmd_ladder race/ cmd_racestats user/ cmd_user rating ratingsdb database/ dbconnec
t ladder/ rating ratingutil ratingutil ladder/ rating util/ console """
from sqlalchemy import ForeignKey from sqlalchemy import Integer from sqlalchemy import String from sqlalchemy import testing from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures from sqlalchemy.testing.fixtures import fixture_session from sqlalchemy.testing.schema import Column from sqlalchemy.testing.schema import Table class ABCTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): global a, b, c a = Table( "a", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("adata", String(30)),
Column("type", String(30)), ) b = Table( "b", metadata, Column("id", Integer, ForeignKey("a.id"), primary_key=True), Column("bdata", String(30)), ) c = Table( "c", metadata, Column("id", Integer, ForeignKey("b.id"), primary_key=True),
Column("cdata", String(30)), ) @testing.combinations(("union",), ("none",)) def test_abc_poly_roundtrip(self, fetchtype): class A(fixtures.ComparableEntity): pass class B(A): pass class C(B): pass if fetchtype == "union": abc = a.outerjoin(b).outerjoin(c) bc = a.join(b).outerjoin(c) else: abc = bc = None self.mapper_registry.map_imperatively( A, a, with_polymorphic=("*", abc), polymorphic_on=a.c.type, polymorphic_identity="a", ) self.mapper_registry.map_imperatively( B, b, with_polymorphic=("*", bc), inherits=A, polymorphic_identity="b", ) self.mapper_registry.map_imperatively( C, c, inherits=B, polymorphic_identity="c" ) a1 = A(adata="a1") b1 = B(bdata="b1", adata="b1") b2 = B(bdata="b2", adata="b2") b3 = B(bdata="b3", adata="b3") c1 = C(cdata="c1", bdata="c1", adata="c1") c2 = C(cdata="c2", bdata="c2", adata="c2") c3 = C(cdata="c2", bdata="c2", adata="c2") sess = fixture_session() for x in (a1, b1, b2, b3, c1, c2, c3): sess.add(x) sess.flush() sess.expunge_all() # for obj in sess.query(A).all(): # print obj eq_( [ A(adata="a1"), B(bdata="b1", adata="b1"), B(bdata="b2", adata="b2"), B(bdata="b3", adata="b3"), C(cdata="c1", bdata="c1", adata="c1"), C(cdata="c2", bdata="c2", adata="c2"), C(cdata="c2", bdata="c2", adata="c2"), ], sess.query(A).order_by(A.id).all(), ) eq_( [ B(bdata="b1", adata="b1"), B(bdata="b2", adata="b2"), B(bdata="b3", adata="b3"), C(cdata="c1", bdata="c1", adata="c1"), C(cdata="c2", bdata="c2", adata="c2"), C(cdata="c2", bdata="c2", adata="c2"), ], sess.query(B).order_by(A.id).all(), ) eq_( [ C(cdata="c1", bdata="c1", adata="c1"), C(cdata="c2", bdata="c2", adata="c2"), C(cdata="c2", bdata="c2", adata="c2"), ], sess.query(C).order_by(A.id).all(), )
:ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`IntegrationAccountSchemaPaged <azure.mgmt.logic.models.IntegrationAccountSchemaPaged>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/schemas' path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') if top is not None: query_parameters['$top'] = self._serialize.query("top", top, 'int') if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.IntegrationAccountSchemaPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.IntegrationAccountSchemaPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get( self, resource_group_name, integration_account_name, schema_name, custom_headers=None, raw=False,
**operation_config): """Gets an integration account schema. :param resource_group_name: The resource group name. :type resource_group_name: str :param integration_account_name: The integration account name. :type integration_account_name: str :param schema_name: The integration account schema
name. :type schema_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`IntegrationAccountSchema <azure.mgmt.logic.models.IntegrationAccountSchema>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/schemas/{schemaName}' path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'), 'schemaName': self._serialize.url("schema_name", schema_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('IntegrationAccountSchema', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def create_or_update( self, resource_group_name, integration_account_name, schema_name, schema, custom_headers=None, raw=False, **operation_config): """Creates or updates an integration account schema. :param resource_group_name: The resource group name. :type resource_group_name: str :param integration_account_name: The integration account name. :type integration_account_name: str :param schema_name: The integration account schema name. :type schema_name: str :param schema: The integration account schema. :type schema: :class:`IntegrationAccountSchema <azure.mgmt.logic.models.IntegrationAccountSchema>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`IntegrationAccountSchema <azure.mgmt.logic.models.IntegrationAccountSchema>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/schemas/{schemaName}' path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'), 'schemaName': self._serialize.url("schema_name", schema_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES
OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest2 METRIC_TYPE =
'compute.googleapis.com/instance/uptime' METRIC_LABELS = {'instance_name': 'instance-1'} RESOURCE_TYPE = 'gce_instance' RESOURCE_LABELS = { 'project_id': 'my-project', 'zone': 'us-east1-a', 'instance_id': '1234567890123456789', } METRIC_KIND = 'DELTA' VALUE_TYPE = 'DOUBLE' TS0 = '2016-04-06T22:05:00.042Z' TS1 = '2016-04-06T22:05:01.042Z' TS2 = '2016-04-06T22:05:02.042Z' class TestTimeSeries(unittest2.TestCase): def _getTargetClass(self): from gcloud.monitoring.timeseries import TimeSeries return TimeSeries def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_constructor(self): from gcloud.monitoring.metric import Metric from gcloud.monitoring.resource import Resource from gcloud.monitoring.timeseries import Point VALUE = 60 # seconds METRIC = Metric(type=METRIC_TYPE, labels=METRIC_LABELS) RESOURCE = Resource(type=RESOURCE_TYPE, labels=RESOURCE_LABELS) POINTS = [ Point(start_time=TS0, end_time=TS1, value=VALUE), Point(start_time=TS1, end_time=TS2, value=VALUE), ] series = self._makeOne(metric=METRIC, resource=RESOURCE, metric_kind=METRIC_KIND, value_type=VALUE_TYPE, points=POINTS) self.assertEqual(series.metric, METRIC) self.assertEqual(series.resource, RESOURCE) self.assertEqual(series.metric_kind, METRIC_KIND) self.assertEqual(series.value_type, VALUE_TYPE) self.assertEqual(series.points, POINTS) def test_from_dict(self): VALUE = 60 # seconds info = { 'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS}, 'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS}, 'metricKind': METRIC_KIND, 'valueType': VALUE_TYPE, 'points': [ { 'interval': {'startTime': TS0, 'endTime': TS1}, 'value': {'doubleValue': VALUE}, }, { 'interval': {'startTime': TS1, 'endTime': TS2}, 'value': {'doubleValue': VALUE}, }, ], } series = self._getTargetClass()._from_dict(info) self.assertEqual(series.metric.type, METRIC_TYPE) self.assertEqual(series.metric.labels, METRIC_LABELS) self.assertEqual(series.resource.type, RESOURCE_TYPE) self.assertEqual(series.resource.labels, RESOURCE_LABELS) self.assertEqual(series.metric_kind, METRIC_KIND) self.assertEqual(series.value_type, VALUE_TYPE) self.assertEqual(len(series.points), 2) point1, point2 = series.points self.assertEqual(point1.start_time, TS0) self.assertEqual(point1.end_time, TS1) self.assertEqual(point1.value, VALUE) self.assertEqual(point2.start_time, TS1) self.assertEqual(point2.end_time, TS2) self.assertEqual(point2.value, VALUE) def test_from_dict_no_points(self): info = { 'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS}, 'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS}, 'metricKind': METRIC_KIND, 'valueType': VALUE_TYPE, } series = self._getTargetClass()._from_dict(info) self.assertEqual(series.metric.type, METRIC_TYPE) self.assertEqual(series.metric.labels, METRIC_LABELS) self.assertEqual(series.resource.type, RESOURCE_TYPE) self.assertEqual(series.resource.labels, RESOURCE_LABELS) self.assertEqual(series.metric_kind, METRIC_KIND) self.assertEqual(series.value_type, VALUE_TYPE) self.assertEqual(series.points, []) def test_labels(self): info = { 'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS}, 'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS}, 'metricKind': METRIC_KIND, 'valueType': VALUE_TYPE, } series = self._getTargetClass()._from_dict(info) labels = {'resource_type': RESOURCE_TYPE} labels.update(RESOURCE_LABELS) labels.update(METRIC_LABELS) self.assertIsNone(series._labels) self.assertEqual(series.labels, labels) self.assertIsNotNone(series._labels) self.assertEqual(series.labels, labels) class TestPoint(unittest2.TestCase): def _getTargetClass(self): from gcloud.monitoring.timeseries import Point return Point def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) def test_constructor(self): VALUE = 3.14 point = self._makeOne(start_time=TS0, end_time=TS1, value=VALUE) self.assertEqual(point.start_time, TS0) self.assertEqual(point.end_time, TS1) self.assertEqual(point.value, VALUE) def test_from_dict(self): VALUE = 3.14 info = { 'interval': {'startTime': TS0, 'endTime': TS1}, 'value': {'doubleValue': VALUE}, } point = self._getTargetClass()._from_dict(info) self.assertEqual(point.start_time, TS0) self.assertEqual(point.end_time, TS1) self.assertEqual(point.value, VALUE) def test_from_dict_defaults(self): VALUE = 3.14 info = { 'interval': {'endTime': TS1}, 'value': {'doubleValue': VALUE}, } point = self._getTargetClass()._from_dict(info) self.assertIsNone(point.start_time) self.assertEqual(point.end_time, TS1) self.assertEqual(point.value, VALUE) def test_from_dict_int64(self): VALUE = 2 ** 63 - 1 info = { 'interval': {'endTime': TS1}, 'value': {'int64Value': str(VALUE)}, } point = self._getTargetClass()._from_dict(info) self.assertIsNone(point.start_time) self.assertEqual(point.end_time, TS1) self.assertEqual(point.value, VALUE)
TF_8 = str('iso-8859-1'), str('utf-8') class LimitedStream(object): ''' LimitedStream wraps another stream in order to not allow reading from it past specified amount of bytes. ''' def __init__(self, stream, limit, buf_size=64 * 1024 * 1024): self.stream = stream self.remaining = limit self.buffer = b'' self.buf_size = buf_size def _read_limited(self, size=None): if size is None or size > self.remaining: size = self.remaining if size == 0: return b'' result = self.stream.read(size) self.remaining -= len(result) return result def read(self, size=None): if size is None: result = self.buffer + self._read_limited() self.buffer = b'' elif size < len(self.buffer): result = self.buffer[:size] self.buffer = self.buffer[size:] else: # size >= len(self.buffer) result = self.buffer + self._read_limited(size - len(self.buffer)) self.buffer = b'' return result def readline(self, size=None): while b'\n' not in self.buffer and \ (size is None or len(self.buffer) < size): if size: # since size is not None here, len(self.buffer) < size chunk = self._read_limited(size - len(self.buffer)) else: chunk = self._read_limited() if not chunk: break self.buffer += chunk sio = BytesIO(self.buffer) if size: line = sio.readline(size) else: line = sio.readline() self.buffer = sio.read() return line class WSGIRequest(http.HttpRequest): def __init__(self, environ): script_name = get_script_name(environ) path_info = get_path_info(environ) if not path_info: # Sometimes PATH_INFO exists, but is empty (e.g. accessing # the SCRIPT_NAME URL without a trailing slash). We really need to # operate as if they'd requested '/'. Not amazingly nice to force # the path like this, but should be harmless. path_info = '/' self.environ = environ self.path_info = path_info self.path = '%s/%s' % (script_name.rstrip('/'), path_info.lstrip('/')) self.META = environ self.META['PATH_INFO'] = path_info self.META['SCRIPT_NAME'] = script_name self.method = environ['REQUEST_METHOD'].upper() _, content_params = cgi.parse_header(environ.get('CONTENT_TYPE', '')) if 'charset' in content_params: try: codecs.lookup(content_params['charset']) except LookupError: pass else: self.encoding = content_params['charset'] self._post_parse_error = False try: content_length = int(environ.get('CONTENT_LENGTH')) except (ValueError, TypeError): content_length = 0 self._stream = LimitedStream(self.environ['wsgi.input'], content_length) self._read_started = False self.resolver_match = None def _get_scheme(self): return self.environ.get('wsgi.url_scheme') def _get_request(self): warnings.warn('`request.REQUEST` is deprecated, use `request.GET` or ' '`request.POST` instead.', RemovedInDjango19Warning, 2) if not hasattr(self, '_request'): self._request = datastructures.MergeDict(self.POST, self.GET) return self._request @cached_property def GET(self): # The WSGI spec says 'QUERY_STRING' may be absent. raw_query_string = get_bytes_from_wsgi(self.environ, 'QUERY_STRING', '') return http.QueryDict(raw_query_string, encoding=self._encoding) def _get_post(self): if not hasattr(self, '_post'): self._load_post_and_files() return self._post def _set_post(self, post): self._post = post @cached_property def COOKIES(self): raw_cookie = get_str_from_wsgi(self.environ, 'HTTP_COOKIE', '') return http.parse_cookie(raw_cookie) def _get_files(self): if not hasattr(self, '_files'): self._load_post_and_files() return self._files POST = property(_get_post, _set_post) FILES = property(_get_files) REQUEST = property(_get_request) class WSGIHandler(base.BaseHandler): initLock = Lock() request_class = WSGIRequest def __call__(self, environ, start_response): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._request_middleware is None: with self.initLock: try: # Check that middleware is still uninitialized. if self._request_middleware is None: self.load_middleware() except: # Unload whatever middleware we got self._request_middleware = None raise set_script_prefix(get_script_name(environ)) signals.request_started.send(sender=self.__class__) try: request = self.request_class(environ) except UnicodeDecodeError: logger.warning('Bad Request (UnicodeDecodeError)', exc_info=sys.exc_info(), extra={ 'status_code': 400, } ) response = http.HttpResponseBadRequest() else: response = self.get_response(request) response._handler_class = self.__class__ status = '%s %s' % (response.status_code, response.reason_phrase) response_headers = [(str(k), str(v)) for k, v in response.items()] for c in response.cookies.values(): response_headers.append((str('Set-Cookie'), str(c.output(header='')))) start_response(force_str(status), response_headers) return response def get_path_info(environ): """ Returns the HTTP request's PATH_INFO as a unicode string. """ path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/') # It'd be better to implement URI-to-IRI decoding, see #19508. return path_info.decode(UTF_8) def get_script_name(environ): """ Returns the equivalent of th
e HTTP request's SCRIPT_NAME environment variable. If Apache mod_rewrite has been used, returns what would have been the script name prior to any rewriting (so it's the script name as seen from the client's perspective), unless the FORCE_SCRIPT_NAME setting is set (to anything). """ if settings.FORCE_SCRIPT_NAME is not None: return force_text(settings.FORCE_SCRIPT_NAME) # If Apache's mod_rewrite had a whack at the URL, Apache set either # SCRIPT_URL or R
EDIRECT_URL to the full resource URL before applying any # rewrites. Unfortunately not every Web server (lighttpd!) passes this # information through all the time, so FORCE_SCRIPT_NAME, above, is still # needed. script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '') if not script_url: script_url = get_bytes_from_wsgi(environ, 'REDIRECT_URL', '') if script_url: path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '') script_name = script_url[:-len(path_info)] else: script_name = get_bytes_from_wsgi(environ, 'SCRIPT_NAME', '') # It'd be better to implement URI-to-IRI decoding, see #19508. return script_name.decode(UTF_8) def get_bytes_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as bytes. key and default should be str objects. Under Python 2 they may also be unicode objects provided they only contain ASCII characters. """ value = environ.get(str(key), str(default)) # Under Python 3, non-ASCII values in the WSGI environ are arbitrarily # decoded with ISO-8859-1. This is wrong for Django websites where UTF-8 # is the default. Re-encode to recover the original bytestring. re
import mysql.connector from model.group import Group from model.contact import Contact class DbFixture: def __init__(self, host, dbname, username, password): self.host = host self.dbname = dbname self.username = username self.password = password self.connection = mysql.connector.connect(host=host, database=dbname, user=username, password=password) self.connection.autocommit = True def get_group_list(self): list = [] cursor = self.connection.cursor() try: cursor.execute("select group_id, group_name, group_header, group_footer from group_list") for row in cursor: (id, name, header, footer) = row list.append(Group(id=str(id), name=name, header=header, footer=footer)) finally: cursor.close() return list def get_contact_list(self): list = [] cursor = self.connection.cursor() try: cursor.execute("select id, firstname, lastname from addressbook where deprecated = '0000-0
0-00 00:00:00'") for row in cursor: (id, firstname, lastname) = row list.append(Contact(id=str(id), firstname=firstname, lastname=lastname)) finally:
cursor.close() return list def destroy(self): self.connection.close()