prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
#!/usr/bin/env python import numpy as np import os,sys from mpl_toolkits.mplot3d import Axes3D from matplotlib import pyplot as plt import argparse ap=argparse.ArgumentParser() ap.add_argument('-vis') # 1 plot cropped point cloud ap.add_argument('-refine') # 1 refine mesh ap.add_argument('-clean') # 1 remove tmp files if ap.parse_args().vis==None: vis=0 else: vis=int(ap.parse_args().vis) if ap.parse_args().refine==None: refine=0 else: refine=int(ap.parse_args().refine) if ap.parse_args().clean==None: clean=0 else: clean=int(ap.parse_args().clean) # Synthetic fault pixels z=np.linspace(.2, -.8, num=100) y=np.linspace(-.625,.625, num=120) grid=np.meshgrid(y,z) x=np.zeros((len(z)*len(y),1),dtype=np.float) dat_vert=np.hstack((x,grid[0].reshape(x.shape),grid[1].reshape(x.shape))) # weak wl=np.linspace(.12,.18,num=8); amp=.03125*np.sqrt(wl) e=1.025; r=-.2 dip=70.; zcnt=-.35 omg=[ 0.82976173, 0.89624834, 0.03829284, -0.50016345, -1.06606012, 1.40505898, -1.24256034, 1.28623393] #omg=(np.random.rand(wl.shape[0])-.5)*np.pi L=dat_vert[1,:].max()-dat_vert[1,:].min() zmax=z.max(); zmin=z.min() for i in range(len(wl)): phs=dat_vert[:,1]/wl[i]*np.pi+omg[i] dat_vert[:,0]=dat_vert[:,0]+amp[i]*np.cos(phs)*(e*zmax-dat_vert[:,2])/(e*zmax-zmin)*np.exp(r*abs(phs)/np.pi) dat_vert[:,0]=dat_vert[:,0]+(zcnt-dat_vert[:,2])*np.tan((90.-dip)/180.*np.pi) # ridge patch def flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup): b1=-slope1*trunc1-.7 b2=-slope2*trunc2-.7 in_id=np.where(np.logical_and(dat_vert[:,2]-slope1*dat_vert[:,1]<b1, dat_vert[:,2]-slope2*dat_vert[:,1]<b2))[0] out_id=np.setdiff1d(np.array(range(len(dat_vert)),dtype=np.int32),in_id) x_shift=dat_vert[in_id,0] # ridge patch k=0 zup=dat_vert[:,2].max() zlw=dat_vert[:,2].min() for i in in_id: r=abs(dat_vert[i,1]-.5*(trunc1+trunc2)) R=.5*((dat_vert[i,2]-b2)/slope2-(dat_vert[i,2]-b1)/slope1) h=hlw+(dat_vert[i,2]-zlw)/(zup-zlw)*(hup-hlw) x_shift[k]=x_shift[k]+np.cos(r/R*np.pi/2.)*h k+=1 dat_vert=np.vstack((dat_vert[out_id,:], np.hstack((x_shift.reshape(len(in_id),1), dat_vert[in_id,1].reshape(len(in_id),1), dat_vert[in_id,2].reshape(len(in_id),1))))) return dat_vert slope1=10.;slope2=-10. trunc1=.1;trunc2=.6 hup=0.;hlw=.08 #dat_vert=flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup) print omg fout='F3D_syn.xyz' f=open(fout,'w+') np.savetxt(f,dat_vert,delimiter=' ', fmt='%.6f '*3) f.close() from subprocess import call fin=fout fout=fout.rsplit('.')[0]+'.stl' mxl='xyz2stl.mlx' call(['meshlabserver', '-i',fin,'-o',fout,'-s',mxl]) if clean==1: os.remove(fin) # Mesh fin=fout if refine==1: fout=fout.rsplit('.')[0]+'_dns.exo' else: fout=fout.rsplit('.')[0]+'.exo' jou='F3D_tet.jou' txt_jou=open(jou,'r') txt_jou_tmp=open('tmp.jou','w+') hf=0.0025 # fault grid length (0.0025 for ~100 m tet model, 0.003 for ~40 m) hm=0.0075 # matrix grid length (0.0075 for ~100 m tet model, 0.010 for ~40 m) for line in txt_jou: line=line.strip('\r\n') if 'import' in line.lower(): line='import stl "'+fin+'"' if 'export' in line.lower(): line='export mesh "'+fout+'" dimension 3 overwrite' if 'surface 46 94 95 97 size' in line.lower(): line='surface 46 94 95 97 size %0.6f' %(2*hf) if 'volume all size' in line.lower(): line='volume all size %0.6f' %(2*hm) txt_jou_tmp.write(line+'\n') if 'mesh volume all' in line.lower() and refine==1: txt_jou_tmp.write('refine volume all\n') txt_jou.close();txt_jou_tmp.close() call(['trelis','-nojournal','-nographics','tmp.jou']) if clean==1: os.remove('tmp.jou') # Preprocessing msh=>inp dt_dyn=2E-5 #1E-5 for dns 100 m tet model, 8E-5 for 40 m tet, 8E-4 for ~1 m tet import F3D_msh2inp _=F3D
_msh2inp.msh2inp(fout,dt_dyn) # Fault plot if vis==1: fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(dat_vert[:,0], dat_vert[:,1], dat_vert[:,2], c='b', marker='.') # Create cubic bounding box to simulate equal aspect ratio max_range = np.array([np.max(dat_vert[:,0])-np.min(dat_vert[:,0]),np.max(dat_vert[:,1])\ -np.min(dat_vert[:,1]), np.max(dat_vert[:,2])-np.min(dat_vert[:,2])]).max() Xb = 0.
5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][0].flatten() Yb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][1].flatten() Zb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][2].flatten() for xb, yb, zb in zip(Xb, Yb, Zb): ax.plot([xb], [yb], [zb], 'w',) plt.title('fault [km]') plt.grid() plt.show()
######## # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARR
ANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. from setuptools import setup install_requires = [ 'cloudify-rest-client==4.4.dev1', 'pika==0.9.14', 'networkx==1.9.1', 'proxy_tools==0.1.0', 'bottle==0.12.7', 'jinja2==2.7.2' ] try: import importlib # noqa except ImportError: install_requires.append('importlib')
try: import argparse # NOQA except ImportError as e: install_requires.append('argparse==1.2.2') try: from collections import OrderedDict # noqa except ImportError: install_requires.append('ordereddict==1.1') setup( name='cloudify-plugins-common', version='4.4.dev1', author='cosmo-admin', author_email='cosmo-admin@gigaspaces.com', packages=['cloudify', 'cloudify.compute', 'cloudify.workflows', 'cloudify.plugins', 'cloudify.celery', 'cloudify.proxy', 'cloudify.test_utils', 'cloudify.ctx_wrappers'], license='LICENSE', description='Contains necessary decorators and utility methods for ' 'writing Cloudify plugins', zip_safe=False, install_requires=install_requires, entry_points={ 'console_scripts': [ 'ctx = cloudify.proxy.client:main', ] }, package_data={'cloudify.ctx_wrappers': ['ctx.py']}, scripts=[ 'ctx_wrappers/ctx-sh' ] )
# Copyright 2009 Jean-Francois Houzard, Olivier Roger # # This file is part of pypassport. # # pypassport is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # pypassport is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with pyPassport. # If not, see <http://www.gnu.org/licenses/>. from pypassport.reader import ReaderException from pypassport.hexfunctions import * from pypassport import apdu from pypassport.logger import Logger class Iso7816Exception(Exception): def __init__(self, *params): Exception.__init__(self, *params) class Iso7816(Logger): Errors = { 0x61:'SW2 indicates the number of response bytes still available', 0x62:{0x00:'No information given',\ 0x81:'Part of returned data may be corrupted',\ 0x82:'End of file/record reached before reading Le bytes',\ 0x83:'Selected file invalidated',\ 0x84:'FCI not formatted according to ISO7816-4 section 5.1.5'}, 0x63:{0x00:'No information given',\ 0x81:'File filled up by the last write',\ 0x82:'Card Key not supported',\ 0x83:'Reader Key not supported',\ 0x84:'Plain transmission not supported',\ 0x85:'Secured Transmission not supported',\ 0x86:'Volatile memory not available',\ 0x87:'Non Volatile memory not available',\ 0x88:'Key number not valid',\ 0x89:'Key length is not correct',\ 0xC:'Counter provided by X (valued from 0 to 15) (exact meaning depending on the command)'}, 0x64:'State of non-volatile memory unchanged (SW2=00, other values are RFU)', 0x65:{0x00:'No information given',\ 0x81:'Memory failure'}, 0x66:'Reserved for security-related issues (not defined in this part of ISO/IEC 7816)', 0x67:{0x00:'Wrong length'}, 0x68:{0x00:'No information given',\ 0x81:'Logical channel not supported',\ 0x82:'Secure messaging not supported'}, 0x69:{0x00:'No information given',\ 0x81:'Command incompatible with file structure',\ 0x82:'Security status not satisfied',\ 0x83:'Authentication method blocked',\ 0x84:'Referenced data invalidated',\ 0x85:'Conditions of use not satisfied',\ 0x86:'Command not allowed (no current EF)',\ 0x87:'Expected SM data objects missing',\ 0x88:'SM data objects incorrect'}, 0x6A:{0x00:'No information given',\ 0x80:'Incorrect parameters in the data field',\ 0x81:'Function not supported',\ 0x82:'File not found',\ 0x83:'Record not found',\ 0x84:'Not enough memory space in the file',\ 0x85:'Lc inconsistent with TLV structure',\ 0x86:'Incorrect parameters P1-P2',\ 0x87:'Lc inconsistent with P1-P2',\ 0x88:'Referenced data not found'}, 0x6B:{0x00:'Wrong parameter(s) P1-P2'}, 0x6C:'Wrong length Le: SW2 indicates the exact length', 0x6D:{0x00:'Instruction code not supported or invalid'}, 0x6E:{0x00:'Class not supported'}, 0x6F:{0x00:'No precise diagnosis'}, 0x90:{0x00:'Success'} #No further qualification } def __init__(self, reader): Logger.__init__(self, "ISO7816") self._reader = reader self._ciphering = False def transmit(self, toSend, logMsg): """ @param toSend: The command to transmit. @type toSend: A commandAPDU object. @param logMsg: A log message associated to the transmit. @type logMsg: A string. @return: The result field of the responseAPDU object The P1 and P2 fields are checked after each transmit. If they don't mean succes, the appropriate error string is retrieved from the Error dictionary and an APDUException is raised. The Iso7816Exception is composed of three fields: ('error message', p1, p2) To access these fields when the exception is raised, acces the APDUException object like a list:: try: x.apduTransmit(commandAPDU(..)) except Iso7816Exception, exc: print "error: " + exc[0] print "(pw1, pw2) + str( (exc[1], exc[2]) ) """ try: self.log(logMsg) self.log(str(toSend)) if self._ciphering: toSend = self._ciphering.protect(toSend) self.log("[SM] " + str(toSend)) res = self._reader.transmit(toSend) if self._ciphering: self.log("[SM] " + str(res)) res = self._ciphering.unprotect(res) msg = Iso7816.Errors[res.sw1][res.sw2] self.log(str(res)+" //" + msg) if msg == "Success": return res.res else: raise Iso7816Exception(msg, res.sw1, res.sw2) except KeyError, k: raise Iso7816Exception("Unknown error", res.sw1, res.sw2) def setCiphering(self, c=False): self._ciphering = c def selectFile(self, p1, p2, file="", cla="00", ins="A4"): lc = hexToHexRep(len(file)/2) toSend = apdu.CommandAPDU(cla, ins, p1, p2, lc, file, "") return self.transmit(toSend, "Select File") def readBinary(self, offset, nbOfByte): os = "%04x" % int(offset) toSend = apdu.CommandAPDU("00",
"B0", os[0:2], os[2:4], "
", "", hexToHexRep(nbOfByte)) return self.transmit(toSend, "Read Binary") def updateBinary(self, offset, data, cla="00", ins="D6"): os = "%04x" % int(offset) data = binToHexRep(data) lc = hexToHexRep(len(data)/2) toSend = apdu.CommandAPDU(cla, ins, os[0:2], os[2:4], lc, data, "") return self.transmit(toSend, "Update Binary") def getChallenge(self): toSend = apdu.CommandAPDU("00", "84", "00", "00", "", "", "08") return self.transmit(toSend, "Get Challenge") def internalAuthentication(self, rnd_ifd): data = binToHexRep(rnd_ifd) lc = hexToHexRep(len(data)/2) toSend = apdu.CommandAPDU("00", "88", "00", "00", lc, data, "00") res = self.transmit(toSend, "Internal Authentication") return res # def mutualAuthentication(self, data): # data = binToHexRep(data) # lc = hexToHexRep(len(data)/2) # toSend = apdu.CommandAPDU("00", "82", "00", "00", lc, data, "28") # return self.transmit(toSend, "Mutual Authentication")
from org.jfree.data.xy import XYSeries, XYSeriesCollection from org.jfree.chart.plot import PlotOrientation from org.jfree.chart import ChartFactory from geoscript.plot.chart import Chart from org.jfree.chart.renderer.xy import XYSplineRenderer, XYLine3DRenderer def curve(data, name="", smooth=True, trid=True): """ Creates a curve based on a list of (x,y) tuples. Setting *smooth* to ``True`` results in a spline renderer renderer is used. Setting *trid* to ``True`` results in a 3D plot. In this case the ``smooth`` argument is ignored. """ dataset = XYSeriesCollection() xy = XYSeries(name); for d in data: xy.a
dd(d[0], d[1]) dataset.addSeries(xy); chart = ChartFactory.createXYLineChart(None, None, None, dataset, PlotOrientation.VERTICAL, T
rue, True, False) if smooth: chart.getXYPlot().setRenderer(XYSplineRenderer()) if trid: chart.getXYPlot().setRenderer(XYLine3DRenderer()) return Chart(chart)
nit__(self, statistic, pvalue): self.statistic = statistic self.pvalue = pvalue def __repr__(self): return (f"{self.__class__.__name__}(statistic={self.statistic}, " f"pvalue={self.pvalue})") def _psi1_mod(x): """ psi1 is defined in equation 1.10 in Csorgo, S. and Faraway, J. (1996). This implements a modified version by excluding the term V(x) / 12 (here: _cdf_cvm_inf(x) / 12) to avoid evaluating _cdf_cvm_inf(x) twice in _cdf_cvm. Implementation based on MAPLE code of Julian Faraway and R code of the function pCvM in the package goftest (v1.1.1), permission granted by Adrian Baddeley. Main difference in the implementation: the code here keeps adding ter
ms of the series until the terms are small enough. """ def _ed2(y): z = y**2 / 4 b = kv(1/4, z) + kv(3/4, z) return np.exp(-z) * (y/2)**(3/2) * b / np.sqrt(np.pi) def _ed3(y): z = y**2 / 4 c = np.exp(-z) / np.sqrt(np.pi) return c * (y/2)**(5/2) * (2*kv(1/
4, z) + 3*kv(3/4, z) - kv(5/4, z)) def _Ak(k, x): m = 2*k + 1 sx = 2 * np.sqrt(x) y1 = x**(3/4) y2 = x**(5/4) e1 = m * gamma(k + 1/2) * _ed2((4 * k + 3)/sx) / (9 * y1) e2 = gamma(k + 1/2) * _ed3((4 * k + 1) / sx) / (72 * y2) e3 = 2 * (m + 2) * gamma(k + 3/2) * _ed3((4 * k + 5) / sx) / (12 * y2) e4 = 7 * m * gamma(k + 1/2) * _ed2((4 * k + 1) / sx) / (144 * y1) e5 = 7 * m * gamma(k + 1/2) * _ed2((4 * k + 5) / sx) / (144 * y1) return e1 + e2 + e3 + e4 + e5 x = np.asarray(x) tot = np.zeros_like(x, dtype='float') cond = np.ones_like(x, dtype='bool') k = 0 while np.any(cond): z = -_Ak(k, x[cond]) / (np.pi * gamma(k + 1)) tot[cond] = tot[cond] + z cond[cond] = np.abs(z) >= 1e-7 k += 1 return tot def _cdf_cvm_inf(x): """ Calculate the cdf of the Cramér-von Mises statistic (infinite sample size). See equation 1.2 in Csorgo, S. and Faraway, J. (1996). Implementation based on MAPLE code of Julian Faraway and R code of the function pCvM in the package goftest (v1.1.1), permission granted by Adrian Baddeley. Main difference in the implementation: the code here keeps adding terms of the series until the terms are small enough. The function is not expected to be accurate for large values of x, say x > 4, when the cdf is very close to 1. """ x = np.asarray(x) def term(x, k): # this expression can be found in [2], second line of (1.3) u = np.exp(gammaln(k + 0.5) - gammaln(k+1)) / (np.pi**1.5 * np.sqrt(x)) y = 4*k + 1 q = y**2 / (16*x) b = kv(0.25, q) return u * np.sqrt(y) * np.exp(-q) * b tot = np.zeros_like(x, dtype='float') cond = np.ones_like(x, dtype='bool') k = 0 while np.any(cond): z = term(x[cond], k) tot[cond] = tot[cond] + z cond[cond] = np.abs(z) >= 1e-7 k += 1 return tot def _cdf_cvm(x, n=None): """ Calculate the cdf of the Cramér-von Mises statistic for a finite sample size n. If N is None, use the asymptotic cdf (n=inf). See equation 1.8 in Csorgo, S. and Faraway, J. (1996) for finite samples, 1.2 for the asymptotic cdf. The function is not expected to be accurate for large values of x, say x > 2, when the cdf is very close to 1 and it might return values > 1 in that case, e.g. _cdf_cvm(2.0, 12) = 1.0000027556716846. """ x = np.asarray(x) if n is None: y = _cdf_cvm_inf(x) else: # support of the test statistic is [12/n, n/3], see 1.1 in [2] y = np.zeros_like(x, dtype='float') sup = (1./(12*n) < x) & (x < n/3.) # note: _psi1_mod does not include the term _cdf_cvm_inf(x) / 12 # therefore, we need to add it here y[sup] = _cdf_cvm_inf(x[sup]) * (1 + 1./(12*n)) + _psi1_mod(x[sup]) / n y[x >= n/3] = 1 if y.ndim == 0: return y[()] return y def cramervonmises(rvs, cdf, args=()): """Perform the one-sample Cramér-von Mises test for goodness of fit. This performs a test of the goodness of fit of a cumulative distribution function (cdf) :math:`F` compared to the empirical distribution function :math:`F_n` of observed random variates :math:`X_1, ..., X_n` that are assumed to be independent and identically distributed ([1]_). The null hypothesis is that the :math:`X_i` have cumulative distribution :math:`F`. Parameters ---------- rvs : array_like A 1-D array of observed values of the random variables :math:`X_i`. cdf : str or callable The cumulative distribution function :math:`F` to test the observations against. If a string, it should be the name of a distribution in `scipy.stats`. If a callable, that callable is used to calculate the cdf: ``cdf(x, *args) -> float``. args : tuple, optional Distribution parameters. These are assumed to be known; see Notes. Returns ------- res : object with attributes statistic : float Cramér-von Mises statistic. pvalue : float The p-value. See Also -------- kstest, cramervonmises_2samp Notes ----- .. versionadded:: 1.6.0 The p-value relies on the approximation given by equation 1.8 in [2]_. It is important to keep in mind that the p-value is only accurate if one tests a simple hypothesis, i.e. the parameters of the reference distribution are known. If the parameters are estimated from the data (composite hypothesis), the computed p-value is not reliable. References ---------- .. [1] Cramér-von Mises criterion, Wikipedia, https://en.wikipedia.org/wiki/Cram%C3%A9r%E2%80%93von_Mises_criterion .. [2] Csorgo, S. and Faraway, J. (1996). The Exact and Asymptotic Distribution of Cramér-von Mises Statistics. Journal of the Royal Statistical Society, pp. 221-234. Examples -------- Suppose we wish to test whether data generated by ``scipy.stats.norm.rvs`` were, in fact, drawn from the standard normal distribution. We choose a significance level of alpha=0.05. >>> from scipy import stats >>> rng = np.random.default_rng() >>> x = stats.norm.rvs(size=500, random_state=rng) >>> res = stats.cramervonmises(x, 'norm') >>> res.statistic, res.pvalue (0.49121480855028343, 0.04189256516661377) The p-value 0.79 exceeds our chosen significance level, so we do not reject the null hypothesis that the observed sample is drawn from the standard normal distribution. Now suppose we wish to check whether the same samples shifted by 2.1 is consistent with being drawn from a normal distribution with a mean of 2. >>> y = x + 2.1 >>> res = stats.cramervonmises(y, 'norm', args=(2,)) >>> res.statistic, res.pvalue (0.07400330012187435, 0.7274595666160468) Here we have used the `args` keyword to specify the mean (``loc``) of the normal distribution to test the data against. This is equivalent to the following, in which we create a frozen normal distribution with mean 2.1, then pass its ``cdf`` method as an argument. >>> frozen_dist = stats.norm(loc=2) >>> res = stats.cramervonmises(y, frozen_dist.cdf) >>> res.statistic, res.pvalue (0.07400330012187435, 0.7274595666160468) In either case, we would reject the null hypothesis that the observed sample is drawn from a normal distribution with a mean of 2 (and default variance of 1) because the p-value 0.04 is less than our chosen significance level. """ if isinstance(cdf, str): cdf = getattr(distributions, cdf).cdf vals = np.sort(np.asarray(rvs)) if vals.size <= 1: raise ValueError('The sample must contain at least two observations.') if vals.ndim > 1: raise ValueError('The sample must be one-dimensional.') n = len(vals) cdfvals = cdf(vals, *args) u = (2*np.arange(1, n+1) - 1)/(2*n) w = 1/(1
""" Wikipedia utils. @author: Faegheh Hasibi (faegheh.hasibi@idi.ntnu.no) """ from urllib import quote class WikipediaUtils(object): mongo = None @staticmethod def wiki_title_to_uri(title):
""" Converts wiki page title to wiki_uri based on https://en.wikipedia.org/wiki/Wikipedia:Page_
name#Spaces.2C_underscores_and_character_coding encoding based on http://dbpedia.org/services-resources/uri-encoding """ if title: wiki_uri = "<wikipedia:" + quote(title, ' !$&\'()*+,-./:;=@_~').replace(' ', '_') + ">" return wiki_uri else: return None @staticmethod def wiki_uri_to_dbp_uri(wiki_uri): """Converts Wikipedia uri to DBpedia URI.""" return wiki_uri.replace("<wikipedia:", "<dbpedia:") def main(): # example usage print WikipediaUtils.wiki_title_to_uri("Tango (genre musical)") if __name__ == "__main__": main()
# Licensed under GPL version 3 - see LICENSE.rst import numpy as np import transforms3d from astropy.table import Table from astropy.coordinates import SkyCoord import astropy.units as u from ..analysis import (find_best_detector_position, resolvingpower_per_order) from ..math.utils import e2h from ..source import PointSource, FixedPointing from ..simulator import Sequence from ..optics import (CATGrating, CircleAperture, PerfectLens, RadialMirrorScatter, FlatDetector) from ..design import RowlandTorus, GratingArrayStructure def test_detector_position(): '''Check that the optimal detector position is found at the convergent point.''' n = 1000 convergent_point = np.array([3., 5., 7.]) pos = np.random.rand(n, 3) * 100. + 10. dir = - pos + convergent_point[np.newaxis, :] photons = Table({'pos': e2h(pos, 1), 'dir': e2h(dir, 0), 'energy': np.ones(n), 'polarization': np.ones(n), 'probability': np.ones(n)}) opt = find_best_detector_position(photons) assert np.abs(opt.x - 3.) < 0.1 def test_resolvingpower_consistency(): '''Compare different methods to measure the resolving power. This test only ensures consistency, not correctness. However, most of the underlying statistical functions are implemented in other packages and\ tested there. This test requires a full pipeline to set up the input photons correctly and it thus also serves as an integration test. ''' entrance = np.array([12000., 0., 0.]) aper = CircleAperture(position=entrance, zoom=100) lens = PerfectLens(focallength=12000., position=entrance, zoom=100) rms = RadialMirrorScatter(inplanescatter=1e-4 * u.rad, perpplanescatter=1e-5 * u.rad, position=en
trance, zoom=100) uptomirror = Sequence(elements=[aper, lens, rms]) # CAT grating with blaze angle ensure that positive and negative orders # are defined the same way for all gratings in the GAS. blazeang = 1.91 rowland = RowlandTorus(6000., 6000.) blazemat = transforms3d.axangles.axangle2mat(np.array([0, 0, 1]), np.deg2rad(blazeang)) gas = GratingArrayStructure(rowland=rowland, d_element=30.,
x_range=[1e4, 1.4e4], radius=[50, 100], elem_class=CATGrating, elem_args={'d': 1e-4, 'zoom': [1., 10., 10.], 'orientation': blazemat, 'order_selector': None}, ) star = PointSource(coords=SkyCoord(23., 45., unit="degree"), flux=5. / u.s / u.cm**2) pointing = FixedPointing(coords=SkyCoord(23., 45., unit='deg')) photons = star.generate_photons(exposuretime=200 * u.s) p = pointing(photons) p = uptomirror(p) o = np.array([0, -3, -6]) res1 = resolvingpower_per_order(gas, p.copy(), orders=o, detector=None) res2 = resolvingpower_per_order(gas, p.copy(), orders=o, detector=rowland) res3 = resolvingpower_per_order(gas, p.copy(), orders=o, detector=FlatDetector(zoom=1000)) # FWHM is similar assert np.isclose(res1[1][0], res2[1][0], atol=0.1) assert np.isclose(res1[1][1], res2[1][1], atol=0.2) # differs stronger here if fit not good assert np.isclose(res2[1][0], 1.8, rtol=0.1, atol=0.1) # Resolution of 0th order is essentially 0 assert np.isclose(res1[0][0], 0, atol=0.5) assert np.isclose(res2[0][0], 0, atol=0.5) assert np.isclose(res3[0][0], 0, atol=0.5) # Resolution of higher orders is consistent and higher assert np.isclose(res1[0][1], res2[0][1], rtol=0.1) assert np.isclose(res1[0][2], res2[0][2], rtol=0.2) assert np.isclose(res1[0][1], res3[0][1], rtol=0.1) # Resolution is higher at higher orders (approximately linear for small angles) assert np.isclose(res1[0][2], 2 * res1[0][1], rtol=0.2) assert np.isclose(res2[0][2], 2 * res2[0][1], rtol=0.2) # No test for res3 here, since that does not follow Rowland circle.
''' Creat
ed on 2016年9月16日 @author: Administrator ''' import calendar #返回year的日历 def getYear(year): return calendar.calendar(year) #返回year-month的日历 def getMonth(year, month): return calendar.month(year, month) #返回某年某月的第一天是星期几(从0开始, 0是星期一,6是星期日)和该月天数 def getMonthRange(year, month): return calendar.monthrange(year, month) #返回某个月以每一周为元素的序列 def getMonthYear(year, month):
return calendar.monthcalendar(year, month) #判断year是是否闰年 def isLeap(year): return calendar.isleap(year) print(getYear(2016)) print(getMonth(2016, 10)) print(getMonthYear(2016, 10)) print(getMonthRange(2016, 5)) print(isLeap(2016))
import logging; logger = logging.getLogger("morse." + __name__) import morse.core.sensor class JidoPostureClass(morse.core.sensor.MorseSensorClass): """ Jido posture sensor. Currently working with PTU and KUKA arm """ def __init__(self, obj, parent=None): """ Constructor method. Receives the reference to the Blender object. The second parameter should be the name of the object's parent. """ logger.info('%s initialization' % obj.name) # Call the constructor of the parent class super(self.__class__,self).__init__(obj, parent) # Object position (maybe delete later) self.local_data['x'] = 0.0 self.local_data['y'] = 0.0 self.local_data['z'] = 0.0 self.local_data['yaw'] = 0.0 self.local_data['pitch'] = 0.0 self.local_data['roll'] = 0.0 # joints of kuka-arm self.local_data['seg0'] = 0.0 self.local_data['seg1'] = 0.0 self.local_data['seg2'] = 0.0
self.local_data['seg3'] = 0.0 self.local_data['seg4'] = 0.0 self.local_data['seg5'] = 0.0 self.local_data['seg6'] = 0.0 # joints of PTU-unit self.local_data['pan'] = 0.0 self.local_data['tilt'] = 0.0 logger.info('Component initialized') ##################### PTU joints ################## # Check if robot parent has a child named "PTUname" for child in self.robot_parent.ble
nder_obj.children: if str(child) == self.blender_obj['PTUname']: self._ptu_obj = child # Get the references to the childen object and # store a transformation3d structure for their position for child in self._ptu_obj.childrenRecursive: if 'PanBase' in child.name: self._pan_base = child self._pan_position_3d = morse.helpers.transformation.Transformation3d(child) elif 'TiltBase' in child.name: self._tilt_base = child self._tilt_position_3d = morse.helpers.transformation.Transformation3d(child) # Check the bases were found, or exit with a message try: logger.info("Using PTU: '%s'" % self._ptu_obj.name) logger.info("Using pan base: '%s'" % self._pan_base.name) logger.info("Using tilt base: '%s'" % self._tilt_base.name) except AttributeError as detail: logger.error("Platine is missing the pan and tilt bases. Module will not work!") ###################### KUKA joints ################## # Gather information about all segments of the kuka-arm self._segments = [] self.kuka_obj = 0 # Check if robot parent has a child named "kuka_base" for child in self.robot_parent.blender_obj.children: if str(child) == self.blender_obj['KUKAname']: self.kuka_obj = child try: logger.info("Using KUKA arm: '%s'" % self.kuka_obj.name) except AttributeError as detail: logger.error("Kuka arm is missing. Module will not work!") # The axis along which the different segments of the kuka armrotate # Considering the rotation of the arm as installed in Jido self._dofs = ['y', 'z', 'y', 'z', 'y', 'z', 'y'] def default_action(self): """ Get the x, y, z, yaw, pitch and roll of the blender object. """ x = self.position_3d.x y = self.position_3d.y z = self.position_3d.z yaw = self.position_3d.yaw pitch = self.position_3d.pitch roll = self.position_3d.roll ############################# PTU joints ############################## # Reset movement variables rx, ry, rz = 0.0, 0.0, 0.0 # Update the postition of the base platforms try: self._pan_position_3d.update(self._pan_base) self._tilt_position_3d.update(self._tilt_base) except AttributeError as detail: logger.error("Platine is missing the pan and tilt bases. Platine does not work!") return current_pan = self._pan_position_3d.yaw current_tilt = self._tilt_position_3d.pitch logger.debug("Platine: pan=%.4f, tilt=%.4f" % (current_pan, current_tilt)) ############################# KUKA joints ############################## armature = self.kuka_obj self._angles = [] i = 0 for channel in armature.channels: self._segments.append(channel) # Extract the angles segment_angle = channel.joint_rotation if self._dofs[i] == 'y': self._angles.append(segment_angle[1]) elif self._dofs[i] == '-y': self._angles.append(-segment_angle[1]) elif self._dofs[i] == 'z': self._angles.append(segment_angle[2]) i = i + 1 ############################# Hand data over to middleware ############################## self.local_data['x'] = float(x) self.local_data['y'] = float(y) self.local_data['z'] = float(z) self.local_data['yaw'] = float(yaw) self.local_data['pitch'] = float(pitch) self.local_data['roll'] = float(roll) # KUKA arm self.local_data['seg0'] = self._angles[0] self.local_data['seg1'] = self._angles[1] self.local_data['seg2'] = self._angles[2] self.local_data['seg3'] = self._angles[3] self.local_data['seg4'] = self._angles[4] self.local_data['seg5'] = self._angles[5] self.local_data['seg6'] = self._angles[6] # PTU self.local_data['pan'] = float(current_pan) self.local_data['tilt'] = float(current_tilt)
# -*- coding: utf8 -*- ''' 基本工具 Created on 2014年5月14日 @author: Exp ''' ''' 获取系统时间 ''' def getSysTime(format = "%Y-%m-%d %H:%M:%S"): import time return time.strftime(format) # End Fun getSysTime() ''' 判断
是否为本地运行环境,否则为SAE运行环境 ''' def isLocalEnvironment(): from os import environ return not environ.get("APP_NAME", "") # End Fun isLocalEnvironment() ''' 加密字符串 ''' def encrypt(plaintext): import base64 return base64
.encodestring(plaintext) # End Fun encrypt() ''' 解密字符串 ''' def decrypt(ciphertext): import base64 return base64.decodestring(ciphertext) # End Fun decrypt() ''' 简单编码转换,把未知编码的orgStr转码为aimCharset,其中orgStr的源编码由系统自动判断 ''' def simpleTranscoding(orgStr, aimCharset): import chardet orgCharset = chardet.detect(orgStr)['encoding'] #自动判断编码 return transcoding(orgStr, orgCharset, aimCharset) # End Fun simpleTranscoding() ''' 编码转换,把源编码为orgCharset的orgStr,转码为aimCharset ''' def transcoding(orgStr, orgCharset, aimCharset): unicodeStr = orgStr.decode(orgCharset) return unicodeStr.encode(aimCharset) # End Fun transcoding()
# Copyright 2015 Matt Hawkins # # Update : July 2016 # added CPU and disk monitoring to script # johnty.wang@mail.mcgill.ca # # additional requirement: psutil # #-------------------------------------- from subprocess import PIPE, Popen import smbus import psutil import os import time # Define some device parameters I2C_ADDR = 0x27 # I2C device address LCD_WIDTH = 16 # Maximum characters per line # Define some device constants LCD_CHR = 1 # Mode - Sending data LCD_CMD = 0 # Mode - Sending command LCD_LINE_1 = 0x80 # LCD RAM address for the 1st line LCD_LINE_2 = 0xC0 # LCD RAM address for the 2nd line #LCD_LINE_3 = 0x94 # LCD RAM address for the 3rd line #LCD_LINE_4 = 0xD4 # LCD RAM address for the 4th line LCD_BACKLIGHT = 0x08 # On #LCD_BACKLIGHT = 0x00 # Off ENABLE = 0b00000100 # Enable bit # Timing constants E_PULSE = 0.0005 E_DELAY = 0.0005 #Open I2C interface #bus = smbus.SMBus(0) # Rev 1 Pi uses 0 bus = smbus.SMBus(1) # Rev 2 Pi uses 1 def lcd_init(): # Initialise display lcd_byte(0x33,LCD_CMD) # 110011 Initialise lcd_byte(0x32,LCD_CMD) # 110010 Initialise lcd_byte(0x06,LCD_CMD) # 000110 Cursor move direction lcd_byte(0x0C,LCD_CMD) # 001100 Display On,Cursor Off, Blink Off lcd_byte(0x28,LCD_CMD) # 101000 Data length, number of lines, font size lcd_byte(0x01,LCD_CMD) # 000001 Clear display time.sleep(E_DELAY) def get_cpu_temperature(): process = Popen(['vcgencmd', 'measure_temp'], stdout=PIPE) output, _error = process.communicate() return float(output[output.index('=') + 1:output.rindex("'")]) #Scott tried this, but didn't work #def memory_usage_resource(): # import resource # rusage_denom = 1024. # if sys.platform == 'darwin': # # ... it seems that in OSX the output is different units ... # rusage_denom = rusage_denom * rusage_denom # mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom # return mem def lcd_byte(bits, mode): # Send byte to data pins # bits = the data # mode = 1 for data # 0 for command bits_high = mode | (bits & 0xF0) | LCD_BACKLIGHT bits_low = mode | ((bits<<4) & 0xF0) | LCD_BACKLIGHT # High bits bus.write_byte(I2C_ADDR, bits_high) lcd_toggle_enable(bits_high) # Low bits bus.write_byte(I2C_ADDR, bits_low) lcd_toggle_enable(bits_low) def lcd_toggle_enable(bits): # Toggle enable time.sleep(E_DELAY) bus.write_byte(I2C_ADDR, (bits | ENABLE)) time.sleep(E_PULSE) bus.write_byte(I2C_ADDR,(bits & ~ENABLE)) time.sleep(E_DELAY) def lcd_string(message,line): # Send string to display message = message.ljust(LCD_
WIDTH," ") lcd_byte(line, LCD_CMD) for i in range(LCD_WIDTH): lcd_byte(ord(message[i]),LCD_CHR) def main(): # Main program block # Initialise display lcd_init() while True: cpu_temp = get_cpu_temperature() cpu_usage = psutil.cpu_percent() LINE1 = "CPU TMP = " + str(cpu_temp)+" C" LINE2 = "CPU USE = " + str(cpu_usage)+" %" #print "cpu temp =
", cpu_temp #print "cpu usage = ", cpu_usage lcd_string(LINE1,LCD_LINE_1) lcd_string(LINE2,LCD_LINE_2) time.sleep(5) st = os.statvfs("/") free = st.f_bavail * st.f_frsize total = st.f_blocks * st.f_frsize used = total - free LINE1 = "DISK USAGE:" LINE2 = " "+ str(used/1024/1024) + "/" + str(total/1024/1024)+" MB" lcd_string(LINE1, LCD_LINE_1) lcd_string(LINE2, LCD_LINE_2) #print "disk use =", LINE2 time.sleep(5) #mem = psutil.virtual_memory() #LINE1 = "Mem Use = " + str(mem)+" C" #LINE2 = " "+ str(used/1024/1024) + "/" + str(total/1024/1024)+" MB" #lcd_string(LINE1, LCD_LINE_1) #lcd_string(LINE2, LCD_LINE_2) #time.sleep(5) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass finally: LCD_BACKLIGHT = 0x00 #turn off backlight when exiting! lcd_byte(0x01, LCD_CMD)
#----------------------------------------------------------------------------- # Copyright (c) 2005-2019, PyInstaller Development Team. # # Distributed under the terms of the GNU General Public License with exception # for distributing bootloader. # # The full license is in the file COPYING.txt, distributed with this software. #-----------
------------------------------------------------------------------ """ Import hook for PyGObject https://wiki.g
nome.org/PyGObject """ from PyInstaller.utils.hooks import get_gi_typelibs binaries, datas, hiddenimports = get_gi_typelibs('xlib', '2.0')
#!/usr/bin/env python #-*- encoding:utf-8 -*- import json from datetime import datetime from bottle import route, mako_template as template, redirect, request, response, get, post from bottle import static_file, view #为了不经过controller直接返回诸如html,css等静态文件引入 from model.documents import * from setting import * DATE_FORMAT = '%Y-%m-%d %H:%M:%S' # 入库格式化时间 @route('/to_add_item') def to_add_item(): return template('views/system/item/add', site_opt = site_opt) @route('/add_item', method = 'POST') def add_item(): DATE_FORMAT = '%Y%m%d%H%M%S' innerName = 'attr_%s' % datetime.now().strftime(DATE_FORMAT) #request.params可以同时获取到GET或者POST方法传入的参数 name = request.params.get('name') address = request.params.get('address') telno = request.params.get('telno') lat = request.params.get('lat') lon = request.params.get('lon') item = Restaurant(name=unicode(name, 'utf8'), address=unicode(address, 'utf8'), telno=telno, lat = lat, lon = lon) item.save() redirect('list_item') @route('/list_item') def list_item(): start = request.params.get('start') or '0' size = request.params.get('size') or '1000' items = Restaurant.objects[int(start):(int(start) + int(size))] data = { 'items': items } return template('views/system/item/list', data = data, site_opt = site_opt) @route('/del_item') def del_item(): id = request.params.get('id') Restaurant.objects(id=id).delete() # cascade delete menus of the restaurant Menu.objects(restaurant=id).delete() redirect('/list_item') @route('/modify_item', method = 'POST') def modify_item(): id = request.params.get('id') name = request.params.get('name') address = request.params.get('address') telno
= request.params.get('telno') lat = request.params.get('lat') lon = request.params.get('lon') print 'modify item=====%s, %s, %s, %s' % (id, name, address, telno) Restaurant.objects(id=id).update(set__name = unicode(name, 'utf8'), set__address = address, set__telno = unicode(telno, 'utf-8'), set__lat = lat, set__lon = lon) redirect('/list_item') @route('/to_modify_item') def to_modify_item(): id = request.params.get('id') item = Restaurant.objects(id = id)[0] data
= { 'item': item } return template('views/system/item/edit', data = data, site_opt = site_opt)
# Copyright 2012 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from django.core.urlresolvers import reverse from django import template from django.template import defaultfilters as filters from django.utils.translation import pgettext_lazy from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ungettext_lazy from horizon import exceptions from horizon import tables from openstack_dashboard import api from openstack_dashboard import policy from openstack_dashboard.usage import quotas LOG = logging.getLogger(__name__) class CheckNetworkEditable(object): """Mixin class to determine the specified network is editable.""" def allowed(self, request, datum=None): # Only administrator is allowed to create and manage shared networks. if datum and dat
um.shared: return False return True class DeleteNetwork(policy.PolicyTargetMixin, CheckNetworkEditable, tables.DeleteAction): @staticmethod def action_present(count): return ungettext_lazy( u"Delete Network", u"Delete Networks", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Deleted Network", u"Deleted
Networks", count ) policy_rules = (("network", "delete_network"),) def delete(self, request, network_id): network_name = network_id try: # Retrieve the network list. network = api.neutron.network_get(request, network_id, expand_subnet=False) network_name = network.name LOG.debug('Network %(network_id)s has subnets: %(subnets)s', {'network_id': network_id, 'subnets': network.subnets}) for subnet_id in network.subnets: api.neutron.subnet_delete(request, subnet_id) LOG.debug('Deleted subnet %s', subnet_id) api.neutron.network_delete(request, network_id) LOG.debug('Deleted network %s successfully', network_id) except Exception: msg = _('Failed to delete network %s') LOG.info(msg, network_id) redirect = reverse("horizon:project:networks:index") exceptions.handle(request, msg % network_name, redirect=redirect) class CreateNetwork(tables.LinkAction): name = "create" verbose_name = _("Create Network") url = "horizon:project:networks:create" classes = ("ajax-modal",) icon = "plus" policy_rules = (("network", "create_network"),) def allowed(self, request, datum=None): usages = quotas.tenant_quota_usages(request) # when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False # usages["networks"] is empty if usages.get('networks', {}).get('available', 1) <= 0: if "disabled" not in self.classes: self.classes = [c for c in self.classes] + ["disabled"] self.verbose_name = _("Create Network (Quota exceeded)") else: self.verbose_name = _("Create Network") self.classes = [c for c in self.classes if c != "disabled"] return True class EditNetwork(policy.PolicyTargetMixin, CheckNetworkEditable, tables.LinkAction): name = "update" verbose_name = _("Edit Network") url = "horizon:project:networks:update" classes = ("ajax-modal",) icon = "pencil" policy_rules = (("network", "update_network"),) class CreateSubnet(policy.PolicyTargetMixin, CheckNetworkEditable, tables.LinkAction): name = "subnet" verbose_name = _("Add Subnet") url = "horizon:project:networks:addsubnet" classes = ("ajax-modal",) icon = "plus" policy_rules = (("network", "create_subnet"),) # neutron has used both in their policy files, supporting both policy_target_attrs = (("network:tenant_id", "tenant_id"), ("network:project_id", "tenant_id"),) def allowed(self, request, datum=None): usages = quotas.tenant_quota_usages(request) # when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False # usages["subnets'] is empty if usages.get('subnets', {}).get('available', 1) <= 0: if 'disabled' not in self.classes: self.classes = [c for c in self.classes] + ['disabled'] self.verbose_name = _('Add Subnet (Quota exceeded)') else: self.verbose_name = _('Add Subnet') self.classes = [c for c in self.classes if c != 'disabled'] return True def get_subnets(network): template_name = 'project/networks/_network_ips.html' context = {"subnets": network.subnets} return template.loader.render_to_string(template_name, context) DISPLAY_CHOICES = ( ("up", pgettext_lazy("Admin state of a Network", u"UP")), ("down", pgettext_lazy("Admin state of a Network", u"DOWN")), ) STATUS_DISPLAY_CHOICES = ( ("active", pgettext_lazy("Current status of a Network", u"Active")), ("build", pgettext_lazy("Current status of a Network", u"Build")), ("down", pgettext_lazy("Current status of a Network", u"Down")), ("error", pgettext_lazy("Current status of a Network", u"Error")), ) class ProjectNetworksFilterAction(tables.FilterAction): name = "filter_project_networks" filter_type = "server" filter_choices = (('name', _("Name ="), True), ('shared', _("Shared ="), True, _("e.g. Yes / No")), ('router:external', _("External ="), True, _("e.g. Yes / No")), ('status', _("Status ="), True), ('admin_state_up', _("Admin State ="), True, _("e.g. UP / DOWN"))) class NetworksTable(tables.DataTable): name = tables.WrappingColumn("name_or_id", verbose_name=_("Name"), link='horizon:project:networks:detail') subnets = tables.Column(get_subnets, verbose_name=_("Subnets Associated"),) shared = tables.Column("shared", verbose_name=_("Shared"), filters=(filters.yesno, filters.capfirst)) external = tables.Column("router:external", verbose_name=_("External"), filters=(filters.yesno, filters.capfirst)) status = tables.Column("status", verbose_name=_("Status"), display_choices=STATUS_DISPLAY_CHOICES) admin_state = tables.Column("admin_state", verbose_name=_("Admin State"), display_choices=DISPLAY_CHOICES) class Meta(object): name = "networks" verbose_name = _("Networks") table_actions = (CreateNetwork, DeleteNetwork, ProjectNetworksFilterAction) row_actions = (EditNetwork, CreateSubnet, DeleteNetwork)
# lint-amnesty, pylint: disable=missing-module-docstring from unittest.mock import patch from django.test
import TestCase from common.djangoapps.track.backends.mongodb import MongoBackend class TestMongoBackend(TestCase): # lint-amnesty,
pylint: disable=missing-class-docstring def setUp(self): super().setUp() self.mongo_patcher = patch('common.djangoapps.track.backends.mongodb.MongoClient') self.mongo_patcher.start() self.addCleanup(self.mongo_patcher.stop) self.backend = MongoBackend() def test_mongo_backend(self): events = [{'test': 1}, {'test': 2}] self.backend.send(events[0]) self.backend.send(events[1]) # Check if we inserted events into the database calls = self.backend.collection.insert.mock_calls assert len(calls) == 2 # Unpack the arguments and check if the events were used # as the first argument to collection.insert def first_argument(call): _, args, _ = call return args[0] assert events[0] == first_argument(calls[0]) assert events[1] == first_argument(calls[1])
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import import functools import inspect from .common import * # pylint: disable=redefined-builtin from .datastructures import Context from .exceptions import FieldError, DataError from .transforms import import_loop, validation_converter from .undefined import Undefined def validate(cls, instance_or_dict, trusted_data=None, partial=False, strict=False, convert=True, context=None, **kwargs): """ Validate some untrusted data using a model. Trusted data can be passed in the `trusted_data` parameter. :param cls: The model class to use as source for validation. If given an instance, will also run instance-level validators on the data. :param instance_or_dict: A ``dict`` or ``dict``-like structure for incoming data. :param partial: Allow partial data to validate; useful for PATCH requests. Essentially drops the ``required=True`` arguments from field definitions. Default: False :param strict: Complain about unrecognized keys. Default: False :param trusted_data: A ``dict``-like structure that may contain already validated data. :param convert: Controls whether to perform import conversion before validating. Can be turned off to skip an unnecessary conversion step if all values are known to have the right datatypes (e.g., when validating immediately after the
initial import). Default: True :returns: data ``dict`` containing the valid raw_data plus ``trusted_data``.
If errors are found, they are raised as a ValidationError with a list of errors attached. """ context = context or get_validation_context(partial=partial, strict=strict, convert=convert) errors = {} try: data = import_loop(cls, instance_or_dict, trusted_data=trusted_data, context=context, **kwargs) except DataError as exc: errors = exc.messages data = exc.partial_data errors.update(_validate_model(cls, data, context)) if errors: raise DataError(errors, data) return data def _validate_model(cls, data, context): """ Validate data using model level methods. :param cls: The Model class to validate ``data`` against. :param data: A dict with data to validate. Invalid items are removed from it. :returns: Errors of the fields that did not pass validation. """ errors = {} invalid_fields = [] for field_name, field in iteritems(cls._fields): if field_name in cls._validator_functions and field_name in data: value = data[field_name] try: cls._validator_functions[field_name](cls, data, value, context) except FieldError as exc: field = cls._fields[field_name] serialized_field_name = field.serialized_name or field_name errors[serialized_field_name] = exc.messages invalid_fields.append(field_name) for field_name in invalid_fields: data.pop(field_name) return errors def get_validation_context(**options): validation_options = { 'field_converter': validation_converter, 'partial': False, 'strict': False, 'convert': True, 'validate': True, 'new': False, } validation_options.update(options) return Context(**validation_options) def prepare_validator(func, argcount): if isinstance(func, classmethod): func = func.__get__(object).__func__ if len(inspect.getargspec(func).args) < argcount: @functools.wraps(func) def newfunc(*args, **kwargs): if not kwargs or kwargs.pop('context', 0) is 0: args = args[:-1] return func(*args, **kwargs) return newfunc return func __all__ = module_exports(__name__)
# Django settings for kguser project. from os import path from karaage.conf.defaults import * TEMPLATE_DIRS += ( '/usr/share/kguser/templates', ) ROOT_URLCONF = 'kguser.conf.urls' SITE_ID = 2 STATIC_ROOT = '/var/lib/karaage-user/static' STATIC_URL = '/kguser_media/' LOGIN_URL = 'kgauth_login_select' ALLOW_REGISTRATIONS = False SESSION_EXPIRE_AT_BROWSER_CLOSE = True BOOTSTRAP3 = { 'jquery_url': '//code.jquery.com/jquery.min.js', 'base_url': '//netdna.bootstrapcdn.com/bootstrap/3.1.1/', 'css_url': '//netdna.bootstrapcdn.com/bootswatch/3.1.1/simplex/bootstrap.min.css', 'theme_url': None, 'j
avascript_url': None, 'horizontal_label_class': 'col-md-2', 'horizontal_field_class': 'col-md-4', 'set_required': True, } INSTALLED_APPS = INSTALLED_APPS + ('kgauth', 'kgkeystone', 'kguser', 'bootstrap3', 'django_gravatar',) import sys if 'test' in sys.argv: execfile(path.join(path.dirname(__file__), "test_settings.py")) else: execfile("/etc/karaage/user_settings.py
") DEFAULT_FROM_EMAIL = ACCOUNTS_EMAIL if DEBUG: TEMPLATE_DIRS = ( path.abspath(path.join(path.dirname(__file__), '..', '..', 'templates')), path.abspath(path.join(path.dirname(__file__), '..', 'templates')), ) + TEMPLATE_DIRS
import mock from tools.wpt import revlist def test_calculate_cutoff_date(): assert revlist.calculate_cutoff_date(3601, 3600, 0) == 3600 assert revlist.calculate_cutoff_date(3600, 3600, 0) == 3600 assert revlist.calculate_cutoff_date(3599, 3600, 0) == 0 assert revlist.calculate_cutoff_date(3600, 3600, 1) == 1 assert revlist.calculate_cutoff_date(3600, 3600, -1) == 3599 def test_parse_epoch(): assert revlist.parse_epoch(b"10h") == 36000 assert revlist.parse_epoch(b"10d") ==
864000 assert revlist.parse_epoch(b"10w") == 6048000 @mock.patch('subprocess.check_output') def test_get_epoch_revisions(mocked_check_output): # check: # # * Several revisions in the same epoch offset (BC, DEF, HIJ, and LM) # * Revision with a timestamp exactly equal to the epoch boundary (H) # * Revision in non closed interval (O) # #
mon tue wed thu fri sat sun mon thu wed # | | | | | | | | | # -A---B-C---DEF---G---H--IJ----------K-----L-M----N--O-- # ^ # until # max_count: 5; epoch: 1d # Expected result: N,M,K,J,G,F,C,A epoch = 86400 until = 1188000 # Wednesday, 14 January 1970 18:00:00 UTC mocked_check_output.return_value = b''' merge_pr_O O 1166400 _wed_ merge_pr_N N 1080000 _tue_ merge_pr_M M 1015200 _mon_ merge_pr_L L 993600 _mon_ merge_pr_K K 907200 _sun_ merge_pr_J J 734400 _fri_ merge_pr_I I 712800 _fri_ merge_pr_H H 691200 _fri_ merge_pr_G G 648000 _thu_ merge_pr_F F 583200 _wed_ merge_pr_E E 561600 _wed_ merge_pr_D D 540000 _wed_ merge_pr_C C 475200 _tue_ merge_pr_B B 453600 _tue_ merge_pr_A A 388800 _mon_ ''' tagged_revisons = revlist.get_epoch_revisions(epoch, until, 8) assert tagged_revisons.next() == 'N' assert tagged_revisons.next() == 'M' assert tagged_revisons.next() == 'K' assert tagged_revisons.next() == 'J' assert tagged_revisons.next() == 'G' assert tagged_revisons.next() == 'F' assert tagged_revisons.next() == 'C' assert tagged_revisons.next() == 'A' assert len(list(tagged_revisons)) == 0 # generator exhausted # check: max_count with enough candidate items in the revision list # # mon tue wed thu fri sat sun mon # | | | | | | | # ------B-----C-----D----E-----F-----G------H--- # ^ # until # max_count: 5; epoch: 1d # Expected result: G,F,E,D,C epoch = 86400 until = 1015200 # Monday, 12 January 1970 18:00:00 UTC mocked_check_output.return_value = b''' merge_pr_H H 993600 _mon_ merge_pr_G G 907200 _sun_ merge_pr_F F 820800 _sat_ merge_pr_E E 734400 _fri_ merge_pr_D D 648000 _thu_ merge_pr_C C 561600 _wed_ merge_pr_B B 475200 _thu_ ''' tagged_revisons = revlist.get_epoch_revisions(epoch, until, 5) assert tagged_revisons.next() == 'G' assert tagged_revisons.next() == 'F' assert tagged_revisons.next() == 'E' assert tagged_revisons.next() == 'D' assert tagged_revisons.next() == 'C' assert len(list(tagged_revisons)) == 0 # generator exhausted # check: max_count with less returned candidates items than the needed # # mon tue wed thu fri sat sun mon # | | | | | | | # -----------------------------F-----G------H--- # ^ # until # max_count: 5; epoch: 1d # Expected result: G,F epoch = 86400 until = 1015200 # Monday, 12 January 1970 18:00:00 UTC mocked_check_output.return_value = b''' merge_pr_H H 993600 _mon_ merge_pr_G G 907200 _sun_ merge_pr_F F 820800 _sat_ ''' tagged_revisons = revlist.get_epoch_revisions(epoch, until, 5) assert tagged_revisons.next() == 'G' assert tagged_revisons.next() == 'F' assert len(list(tagged_revisons)) == 0 # generator exhausted # check: initial until value is on an epoch boundary # # sud mon tue wed thu # | | | | # -F-G-----------------H # ^ # until # max_count: 3; epoch: 1d # Expected result: G,F # * H is skipped because because the epoch # interval is defined as an right-open interval # * G is included but in the Monday's interval # * F is included because it is the unique candidate # included in the Sunday's interval epoch = 86400 until = 1296000 # Thursday, 15 January 1970 0:00:00 UTC mocked_check_output.return_value = b''' merge_pr_H H 1296000 _wed_ merge_pr_G G 950400 _mon_ merge_pr_F F 921600 _sud_ ''' tagged_revisons = revlist.get_epoch_revisions(epoch, until, 3) assert tagged_revisons.next() == 'G' assert tagged_revisons.next() == 'F' assert len(list(tagged_revisons)) == 0 # generator exhausted # check: until aligned with Monday, 5 January 1970 0:00:00 (345600) # not with Thursday, 1 January 1970 0:00:00 (0) # # sud mon tue wed thu # | | | | # -F-G--------------H--- # ^ # until # max_count: 1; epoch: 1w # Expected result: F epoch = 604800 moday = 950400 # Monday, 12 January 1970 00:00:00 UTC until = moday + 345600 # 1296000. Thursday, 15 January 1970 0:00:00 UTC mocked_check_output.return_value = b''' merge_pr_H H 1180800 _wed_ merge_pr_G G 950400 _mon_ merge_pr_F F 921600 _sud_ ''' tagged_revisons = revlist.get_epoch_revisions(epoch, until, 1) assert tagged_revisons.next() == 'F' assert len(list(tagged_revisons)) == 0 # generator exhausted
elif cch['sensor_type_index'] == CTF.CTFV_EEG_CH: coord_frame = FIFF.FIFFV_COORD_HEAD if use_eeg_pos: # EEG electrode coordinates may be present but in the # CTF head frame ch['loc'][:3] = cch['coil']['pos'][0] if not _at_origin(ch['loc'][:3]): if t['t_ctf_head_head'] is None: warn('EEG electrode (%s) location omitted because of ' 'missing HPI information' % ch['ch_name']) ch['loc'].fill(np.nan) coord_frame = FIFF.FIFFV_MNE_COORD_CTF_HEAD else: ch['loc'][:3] = apply_trans( t['t_ctf_head_head'], ch['loc'][:3]) neeg += 1 ch.update(logno=neeg, kind=FIFF.FIFFV_EEG_CH, unit=FIFF.FIFF_UNIT_V, coord_frame=coord_frame, coil_type=FIFF.FIFFV_COIL_EEG) elif cch['sensor_type_index'] == CTF.CTFV_STIM_CH: nstim += 1 ch.update(logno=nstim, coord_frame=FIFF.FIFFV_COORD_UNKNOWN, kind=FIFF.FIFFV_STIM_CH, unit=FIFF.FIFF_UNIT_V) else: nmisc += 1 ch.update(logno=nmisc, coord_frame=FIFF.FIFFV_COORD_UNKNOWN, kind=FIFF.FIFFV_MISC_CH, unit=FIFF.FIFF_UNIT_V) return chs def _comp_sort_keys(c): """Sort the compensation data.""" return (int(c['coeff_type']), int(c['scanno'])) def _check_comp(comp): """Check that conversion to named matrices is possible.""" ref_sens = None kind = -1 for k, c_k in enumerate(comp): if c_k['coeff_type'] != kind: c_ref = c_k ref_sens = c_ref['sensors'] kind = c_k['coeff_type'] elif not c_k['sensors'] == ref_sens: raise RuntimeError('Cannot use an uneven compensation matrix') def _conv_comp(comp, first, last, chs): """Add a new converted compensation data item.""" ch_names = [c['ch_name'] for c in chs] n_col = comp[first]['ncoeff'] col_names = comp[first]['sensors'][:n_col] row_names = [comp[p]['sensor_name'] for p in range(first, last + 1)] mask = np.in1d(col_names, ch_names) # missing channels excluded col_names = np.array(col_names)[mask].tolist() n_col = len(col_names) n_row = len(row_names) ccomp = dict(ctfkind=np.array([comp[first]['coeff_type']]), save_calibrated=False) _add_kind(ccomp) data = np.empty((n_row, n_col)) for ii, coeffs in enumerate(comp[first:last + 1]): # Pick the elements to the matrix data[ii, :] = coeffs['coeffs'][mask] ccomp['data'] = dict(row_names=row_names, col_names=col_names, data=data, nrow=len(row_names), ncol=len(col_names)) mk = ('proper_gain', 'qgain') _calibrate_comp(ccomp, chs, row_names, col_names, mult_keys=mk, flip=True) return ccomp def _convert_comp_data(res4): """Convert the compensation data into named matrices.""" if res4['ncomp'] == 0: return # Sort the coefficients in our favorite order res4['comp'] = sorted(res4['comp'], key=_comp_sort_keys) # Check that all items for a given compensation type have the correct # number of channels _check_comp(res4['comp']) # Create named matrices first = 0 kind = -1 comps = list() for k in range(len(res4['comp'])): if res4['comp'][k]['coeff_type'] != kind: if k > 0: comps.append(_conv_comp(res4['comp'], first, k - 1, res4['chs'])) kind = res4['comp'][k]['coeff_type'] first = k comps.append(_conv_comp(res4['comp'], first, k, res4['chs'])) return comps def _pick_eeg_pos(c): """Pick EEG positions.""" eeg = dict(coord_frame=FIFF.FIFFV_COORD_HEAD, assign_to_chs=False, labels=list(), ids=list(), rr=list(), kinds=list(), np=0) for ch in c['chs']: if ch['kind'] == FIFF.FIFFV_EEG_CH and not _at_origin(ch['loc'][:3]): eeg['labels'].append(ch['ch_name']) eeg['ids'].append(ch['logno']) eeg['rr'].append(ch['loc'][:3]) eeg['kinds'].append(FIFF.FIFFV_POINT_EEG) eeg['np'] += 1 if eeg['np'] == 0: return None logger.info('Picked positions of %d EEG channels from channel info' % eeg['np']) return eeg def _add_eeg_pos(eeg, t, c): """Pick the (virtual) EEG position data.""" if eeg is None: return if t is None or t['t_ctf_head_head'] is None: raise RuntimeError('No coordinate transformation available for EEG ' 'position data') eeg_assigned = 0 if eeg['assign_to_chs']: for k in range(eeg['np']): # Look for a channel name match for ch in c['chs']: if ch['ch_name'].lower() == eeg['labels'][k].lower(): r0 = ch['loc'][:3] r0[:] = eeg['rr'][k] if eeg['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_HEAD: r0[:] = apply_trans(t['t_ctf_head_head'], r0) elif eeg['coord_frame'] != FIFF.FIFFV_COORD_HEAD: raise RuntimeError( 'Illegal coordinate frame for EEG electrode ' 'positions : %s' % _coord_frame_name(eeg['coord_frame'])) # Use the logical channel number as an identifier eeg['ids'][k] = ch['logno'] eeg['kinds'][k] = FIFF.FIFFV_POINT_EEG eeg_assigned +
= 1 break
# Add these to the Polhemus data fid_count = eeg_count = extra_count = 0 for k in range(eeg['np']): d = dict(r=eeg['rr'][k].copy(), kind=eeg['kinds'][k], ident=eeg['ids'][k], coord_frame=FIFF.FIFFV_COORD_HEAD) c['dig'].append(d) if eeg['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_HEAD: d['r'] = apply_trans(t['t_ctf_head_head'], d['r']) elif eeg['coord_frame'] != FIFF.FIFFV_COORD_HEAD: raise RuntimeError('Illegal coordinate frame for EEG electrode ' 'positions: %s' % _coord_frame_name(eeg['coord_frame'])) if eeg['kinds'][k] == FIFF.FIFFV_POINT_CARDINAL: fid_count += 1 elif eeg['kinds'][k] == FIFF.FIFFV_POINT_EEG: eeg_count += 1 else: extra_count += 1 if eeg_assigned > 0: logger.info(' %d EEG electrode locations assigned to channel info.' % eeg_assigned) for count, kind in zip((fid_count, eeg_count, extra_count), ('fiducials', 'EEG locations', 'extra points')): if count > 0: logger.info(' %d %s added to Polhemus data.' % (count, kind)) _filt_map = {CTF.CTFV_FILTER_LOWPASS: 'lowpass', CTF.CTFV_FILTER_HIGHPASS: 'highpass'} def _compose_meas_info(res4, coils, trans, eeg): """Create meas info from CTF data.""" info = _empty_info(res4['sfreq']) # Collect all the necessary data from the structures read info['meas_id'] = get_new_file_id() info['meas_id']['usecs'] = 0 info['meas_id']['secs'] = _convert_time(res4['data_date'], res4['data_time']) info['meas_date'] = (info['meas_id']['secs'], info['meas_id']['usecs']) info['experimenter'] = res4['nf_operator'] info['subject_info'] = dict(his_id=res4['nf_subject_id']) for filt in res4['filters']: if filt['type'] in _filt_map: info[_filt_map[filt['type']]] = filt['freq'] info['dig'], info['hpi_results'] = _pick_isotrak_and_hpi_coils( res4, coils, trans) if trans is not None: if len(info['hpi_results']) > 0: info['hpi_results'][0]['coord_trans'] = trans['t_ctf_head_head'] if trans['t_dev_head'] is not None: info['dev_head_t'] = trans['t_dev_hea
#!/usr/bin/env python # coding: utf-8 from __future__ import unicode_literals # Allow direct execution import os import sys import unittest sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from youtube_dl import YoutubeDL from youtube_dl.compat import compat_http_server, compat_urllib_request import ssl import threading TEST_DIR = os.path.dirname(os.path.abspath(__file__)) def http_server_port(httpd): if os.name == 'java' and isinstance(httpd.socket, ssl.SSLSocket): # In Jython SSLSocket is not a subclass of socket.socket sock = httpd.socket.sock else: sock = httpd.socket return sock.getsockname()[1] class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler): def log_message(self, format, *args): pass def do_GET(self): if self.path == '/video.html': self.send_response(200) self.send_header('Content-Type', 'text/html; charset=utf-8') self.end_headers() self.wfile.write(b'<html><video src="/vid.mp4" /></html>') elif self.path == '/vid.mp4': self.send_response(200) self.send_header('Content-Type', 'video/mp4') self.end_headers() self.wfile.write(b'\x00\x00\x00\x00\x20\x66\x74[video]') elif self.path == '/302': if sys.version_info[0] == 3: # XXX: Python 3 http server does not allow non-ASCII header values self.send_response(404) self.end_headers() return new_url = 'http://localhost:%d/中文.html' % http_server_port(self.server) self.send_response(302) self.send_header(b'Location', new_url.encode('utf-8')) self.end_headers() elif self.path == '/%E4%B8%AD%E6%96%87.html': self.send_response(200) self.send_header('Content-Type', 'text/html; charset=utf-8') self.end_headers() self.wfile.write(b'<html><video src="/vid.mp4" /></html>') else: assert False class FakeLogger(object): def debug(self, msg): pass def warning(self, msg): pass def error(self, msg): pass class TestHTTP(unittest.TestCase): def setUp(self): self.httpd = compat_http_server.HTTPServer( ('localhost', 0), HTTPTestRequestHandler) self.port = http_server_port(self.httpd) self.server_thread = threading.Thread(target=self.httpd.serve_forever) self.server_thread.daemon = True self.server_thread.start() def test_unicode_path_redirection(self): # XXX: Python 3 http server does not allow non-ASCII header values if sys.version_info[0] == 3: return ydl = YoutubeDL({'logger': FakeLogger()}) r = ydl.extract_info('http://localhost:%d/302' % self.port) self.assertEqual(r['url'], 'http://localhost:%d/vid.mp4' % self.port) class TestHTTPS(unittest.TestCase): def setUp(self): certfn = os.path.join(TEST_DIR, 'testcert.pem') self.httpd = compat_http_server.HTTPServer( ('localhost', 0), HTTPTestRequestHandler) self.httpd.socket = ssl.wrap_socket( self.httpd.socket, certfile=certfn, server
_side=True) self.port = http_server_port(self.httpd) self.server_thread = threading.Thread(target=self.httpd.serve_forever) self.server_thread.daemon = True se
lf.server_thread.start() def test_nocheckcertificate(self): if sys.version_info >= (2, 7, 9): # No certificate checking anyways ydl = YoutubeDL({'logger': FakeLogger()}) self.assertRaises( Exception, ydl.extract_info, 'https://localhost:%d/video.html' % self.port) ydl = YoutubeDL({'logger': FakeLogger(), 'nocheckcertificate': True}) r = ydl.extract_info('https://localhost:%d/video.html' % self.port) self.assertEqual(r['url'], 'https://localhost:%d/vid.mp4' % self.port) def _build_proxy_handler(name): class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler): proxy_name = name def log_message(self, format, *args): pass def do_GET(self): self.send_response(200) self.send_header('Content-Type', 'text/plain; charset=utf-8') self.end_headers() self.wfile.write('{self.proxy_name}: {self.path}'.format(self=self).encode('utf-8')) return HTTPTestRequestHandler class TestProxy(unittest.TestCase): def setUp(self): self.proxy = compat_http_server.HTTPServer( ('localhost', 0), _build_proxy_handler('normal')) self.port = http_server_port(self.proxy) self.proxy_thread = threading.Thread(target=self.proxy.serve_forever) self.proxy_thread.daemon = True self.proxy_thread.start() self.geo_proxy = compat_http_server.HTTPServer( ('localhost', 0), _build_proxy_handler('geo')) self.geo_port = http_server_port(self.geo_proxy) self.geo_proxy_thread = threading.Thread(target=self.geo_proxy.serve_forever) self.geo_proxy_thread.daemon = True self.geo_proxy_thread.start() def test_proxy(self): geo_proxy = 'localhost:{0}'.format(self.geo_port) ydl = YoutubeDL({ 'proxy': 'localhost:{0}'.format(self.port), 'geo_verification_proxy': geo_proxy, }) url = 'http://foo.com/bar' response = ydl.urlopen(url).read().decode('utf-8') self.assertEqual(response, 'normal: {0}'.format(url)) req = compat_urllib_request.Request(url) req.add_header('Ytdl-request-proxy', geo_proxy) response = ydl.urlopen(req).read().decode('utf-8') self.assertEqual(response, 'geo: {0}'.format(url)) def test_proxy_with_idn(self): ydl = YoutubeDL({ 'proxy': 'localhost:{0}'.format(self.port), }) url = 'http://中文.tw/' response = ydl.urlopen(url).read().decode('utf-8') # b'xn--fiq228c' is '中文'.encode('idna') self.assertEqual(response, 'normal: http://xn--fiq228c.tw/') if __name__ == '__main__': unittest.main()
scr = getTunerDescription(nim) return "Europe" in descr and "DVB-T" in descr def channel2frequency(channel, nim): descr = getTunerDescription(nim) if "Europe" in descr and "DVB-T" in descr: if 5 <= channel <= 12: return (177500 + 7000*(channel- 5))*1000 elif 21 <= channel <= 69: return (474000 + 8000*(channel-21))*1000 return 474000000 def ConvertToHumanReadable(tp, tunertype = None): ret = { } if tunertype is None: tunertype = tp.get("tuner_type", "None") if tunertype == "DVB-S": ret["tuner_type"] = _("Satellite") ret["inversion"] = { eDVBFrontendParametersSatellite.Inversion_Unknown : _("Auto"), eDVBFrontendParametersSatellite.Inversion_On : _("On"), eDVBFrontendParametersSatellite.Inversion_Off : _("Off")}.get(tp.get("inversion")) ret["fec_inner"] = { eDVBFrontendParametersSatellite.FEC_None : _("None"), eDVBFrontendParametersSatellite.FEC_Auto : _("Auto"), eDVBFrontendParametersSatellite.FEC_1_2 : "1/2", eDVBFrontendParametersSatellite.FEC_2_3 : "2/3", eDVBFrontendParametersSatellite.FEC_3_4 : "3/4", eDVBFrontendParametersSatellite.FEC_5_6 : "5/6", eDVBFrontendParametersSatellite.FEC_6_7 : "6/7", eDVBFrontendParametersSatellite.FEC_7_8 : "7/8", eDVBFrontendParametersSatellite.FEC_3_5 : "3/5", eDVBFrontendParametersSatellite.FEC_4_5 : "4/5", eDVBFrontendParametersSatellite.FEC_8_9 : "8/9", eDVBFrontendParametersSatellite.FEC_9_10 : "9/10"}.get(tp.get("fec_inner")) ret["modulation"] = { eDVBFrontendParametersSatellite.Modulation_Auto : _("Auto"), eDVBFrontendParametersSatellite.Modulation_QPSK : "QPSK", eDVBFrontendParametersSatellite.Modulation_QAM16 : "QAM16", eDVBFrontendParametersSatellite.Modulation_8PSK : "8PSK", eDVBFrontendParametersSatellite.Modulation_16APSK : "16APSK", eDVBFrontendParametersSatellite.Modulation_32APSK : "32APSK"}.get(tp.get("modulation")) ret["orbital_position"] = nimmanager.getSatName(int(tp.get("orbital_position"))) ret["orb_pos"] = orbpos(int(tp.get("orbital_position"))) ret["polarization"] = { eDVBFrontendParametersSatellite.Polarisation_Horizontal : _("Horizontal"), eDVBFrontendParametersSatellite.Polarisation_Vertical : _("Vertical"), eDVBFrontendParametersSatellite.Polarisation_CircularLeft : _("Circular left"), eDVBFrontendParametersSatellite.Polarisation_CircularRight : _("Circular right")}.get(tp.get("polarization")) ret["polarization_abbreviation"] = { eDVBFrontendParametersSatellite.Polarisation_Horizontal : "H", eDVBFrontendParametersSatellite.Polarisation_Vertical : "V", eDVBFrontendParametersSatellite.Polarisation_CircularLeft : "L", eDVBFrontendParametersSatellite.Polarisation_CircularRight : "R"}.get(tp.get("polarization")) ret["system"] = { eDVBFrontendParametersSatellite.System_DVB_S : "DVB-S", eDVBFrontendParametersSatellite.System_DVB_S2 : "DVB-S2"}.get(tp.get("system")) if ret["system"] == "DVB-S2": ret["rolloff"] = { eDVBFrontendParametersSatellite.RollOff_alpha_0_35 : "0.35", eDVBFrontendParametersSatellite.RollOff_alpha_0_25 : "0.25", eDVBFrontendParametersSatellite.RollOff_alpha_0_20 : "0.20", eDVBFrontendParametersSatellite.RollOff_auto : _("Auto")}.get(tp.get("rolloff")) ret["pilot"] = { eDVBFrontendParametersSatellite.Pilot_Unknown : _("Auto"), eDVBFrontendParametersSatellite.Pilot_On : _("On"), eDVBFrontendParametersSatellite.Pilot_Off : _("Off")}.get(tp.get("pilot")) ret["pls_mode"] = { eDVBFrontendParametersSatellite.PLS_Root : _("Root"), eDVBFrontendParametersSatellite.PLS_Gold : _("Gold"), eDVBFrontendParametersSatellite.PLS_Combo : _("Combo"), eDVBFrontendParametersSatellite.PLS_Unknown : _("Unknown")}.get(tp.get("pls_mode")) else: ret["pls_mode"] = None ret["is_id"] = None ret["pls_code"] = None elif tunertype == "DVB-C": ret["tuner_type"] = _("Cable") ret["modulation"] = { eDVBFrontendParametersCable.Modulation_Auto: _("Auto"), eDVBFrontendParametersCable.Modulation_QAM16 : "QAM16", eDVBFrontendParametersCable.Modulation_QAM32 : "QAM32", eDVBFrontendParametersCable.Modulation_QAM64 : "QAM64", eDVBFrontendParametersCable.Modulation_QAM128 : "QAM128", eDVBFrontendParametersCable.Modulation_QAM256 : "QAM256"}.get(tp.get("modulation")) ret["inversion"] = { eDVBFrontendParametersCable.Inversion_Unknown : _("Auto"), eDVBFrontendParametersCable.Inversion_On : _("On"), eDVBFrontendParametersCable.Inversion_Off : _("Off")}.get(tp.get("inversion")) ret["fec_inner"] = { eDVBFrontendParametersCable.FEC_None : _("None"), eDVBFrontendParametersCable.FEC_Auto : _("Auto"), eDVBFrontendParametersCable.FEC_1_2 : "1/2", eDVBFrontendParametersCable.FEC_2_3 : "2/3", eDVBFrontendParametersCable.FEC_3_4 : "3/4", eDVBFrontendParametersCable.FEC_5_6 : "5/6", eDVBFrontendParametersCable.FEC_7_8 : "7/8", eDVBFrontendParametersCable.FEC_8_9 : "8/9", eDVBFrontendParametersCable.FEC_3_5 : "3/5", eDVBFrontendParametersCable.FEC_4_5 : "4/5", eDVBFrontendParametersCable.FEC_9_10 : "9/10"}.get(tp.get("fec_inner")) ret["system"] = { eDVBFrontendParametersCable.System_DVB_C_ANNEX_A : "DVB-C", eDVBFrontendParametersCable.System_DVB_C_ANNEX_C : "DVB-C ANNEX C"}.get(tp.get("system")) elif tunertype == "DVB-T": ret["tuner_type"] = _("Terrestrial") ret["bandwidth"] = { 0 : _("Auto"), 10000000 : "10 MHz", 8000000 : "8 MHz", 7000000 : "7 MHz", 6000000 : "6 MHz", 5000000 : "5 MHz", 1712000 : "1.712 MHz"}.get(tp.get("bandwidth")) ret["code_rate_lp"] = { eDVBFrontendParametersTerrestrial.FEC_Auto : _("Auto"), eDVBFrontendParametersTerrestrial.FEC_1_2 : "1/2", eDVBFrontendParametersTerrestrial.FEC_2_3 : "2/3", eDVBFrontendParametersTerrestrial.FEC_3_4 : "3/4", eDVBFrontendParametersTerrestrial.FEC_5_6 : "5/6", eDVBFrontendParametersTerrestrial.FEC_6_7 : "6/7", eDVBFrontendParametersTerrestrial.FEC_7_8 : "7/8", eDVBFrontendParametersTerrestrial.FEC_8_9 : "8/9"}.get(tp.get("code_rate_lp")) ret["code_rate_hp"] = { eDVBFrontendParametersTerrestrial.FEC_Auto : _("Auto"), eDVBFrontendParametersTerrestrial.FEC_1_2 : "1/2", eDVBFrontendParametersTerrestrial.FEC_2_3 : "2/3", eDVBFrontendParametersTerrestrial.FEC_3_4 : "3/4", eDVBFrontendParametersTerrestrial.FEC_5_6 : "5/6", eDVBFrontend
ParametersTerrestrial.FEC_6_7 : "6/7", eDVBFrontendParametersTerrestrial.FEC_7_8 : "7/8", eDVBFrontendParametersTerrestrial.FEC_8_9 : "8/9"}.get(tp.get("code_rate_hp")) ret["constellation"] =
{ eDVBFrontendParametersTerrestrial.Modulation_Auto : _("Auto"), eDVBFrontendParametersTerrestrial.Modulation_QPSK : "QPSK", eDVBFrontendParametersTerrestrial.Modulation_QAM16 : "QAM16", eDVBFrontendParametersTerrestrial.Modulation_QAM64 : "QAM64", eDVBFrontendParametersTerrestrial.Modulation_QAM256 : "QAM256"}.get(tp.get("constellation")) ret["transmission_mode"] = { eDVBFrontendParametersTerrestrial.TransmissionMode_Auto : _("Auto"), eDVBFrontendParametersTerrestrial.TransmissionMode_1k : "1k", eDVBFrontendParametersTerrestrial.TransmissionMode_2k : "2k", eDVBFrontendParametersTerrestrial.TransmissionMode_4k : "4k", eDVBFrontendParametersTerrestrial.TransmissionMode_8k : "8k", eDVBFrontendParametersTerrestrial.TransmissionMode_16k : "16k", eDVBFrontendParametersTerrestrial.TransmissionMode_32k : "32k"}.get(tp.get("transmission_mode")) ret["guard_interval"] = { eDVBFrontendParametersTerrestrial.GuardInterval_Auto : _("Auto"), eDVBFrontendParametersTerrestrial.GuardInterval_19_256 : "19/256", eDVBFrontendParametersTerrestrial.GuardInterval_19_128 : "19/128", eDVBFrontendParametersTerrestrial.GuardInterval_1_128 : "1/128", eDVBFrontendParametersTerrestrial.GuardInterval_1_32 : "1/32", eDVBFrontendParametersTerrestrial.GuardInterval_1_16 : "1/16", eDVBFrontendParametersTerrestrial.GuardInterval_1_8 : "1/8", eDVBFrontendParametersTerrestrial.GuardInterval_1_4 : "1/4"}.get(tp.get("guard_interval")) ret["hierarchy_information"] = { eDVBFrontendParametersTerrestrial.Hierarchy_Auto : _("Auto"), eDVBFrontendParametersTerrestrial.Hie
'interface': 'test_interface', 'filter': 'test_filter', 'chefs': ['netdumplings.dumplingchefs.ARPChef'], 'poke_interval': 10.0, }, ), ) # We exited the infinite loop by faking the end of the sniffer process. # This means we should have called terminate() on the emitter process. mock_sniffer_process.start.assert_called_once() assert mock_sniffer_process.terminate.call_count == 0 mock_dumpling_emitter_process.start.assert_called_once() mock_dumpling_emitter_process.terminate.assert_called_once() assert result.exit_code == 0 def test_no_valid_chefs(self, mocker): """ Test that no valid chefs results in an error log and an exit code of 1. """ mocker.patch( 'netdumplings.console.sniff.get_valid_chefs', return_value={}, ) logger = logging.getLogger('netdumplings.console.sniff') mock_error = mocker.patch.object(logger, 'error') runner = click.testing.CliRunner() result = runner.invoke( sniff_cli, [ '--kitchen-name', 'test_kitchen', ], ) mock_error.assert_called_once_with( 'test_kitchen: No valid chefs found. Not starting sniffer.' ) assert result.exit_code == 1 class TestSniffChefList: """ Test the chef_list() function. """ def test_chef_list(self, mocker): """ Test requesting a chef list. """ mock_list_chefs = mocker.patch( 'netdumplings.console.sniff.list_chefs' ) runner = click.testing.CliRunner() result = runner.invoke( sniff_cli, [ '--chef-list', '--chef-module', 'testchefs.one', '--chef-module', 'morechefs', ], ) assert result.exit_code == 0 mock_list_chefs.assert_called_once_with(('testchefs.one', 'morechefs')) class TestSniffNetworkSniffer: """ Test the network_sniffer() function. """ def test_network_sniffer(self, mocker): """ Test calling network_sniffer(). We pass in a single valid chef and perform the following checks: - The kitchen gets instantiated. - The chef is instantiated, assigned to the kitchen, and given the dumpling queue. - The kitchen's run() method is called. """ # network_sniffer() uses the __import__ builtin to import chefs, so we # need to patch that. builtin_import = builtins.__import__ chef_class_callable = mocker.Mock() def import_side_effect(*args, **kwargs): if args[0] == 'chefmodule': return types.SimpleNamespace(ChefName=chef_class_callable) return builtin_import(*args, **kwargs) mocker.patch.object( builtin
s, '__import__', side_effect=import_side_effect ) mock_dumpling_kitchen = mocker.patch('netdumplings.DumplingKitchen') kitchen_name = 'test_kitchen' interface = 'test_interface' chefs = '' chef_modules = ''
valid_chefs = {'chefmodule': ['ChefName']} sniffer_filter = 'test_filter' chef_poke_interval = 10 dumpling_queue = mocker.Mock() network_sniffer( kitchen_name, interface, chefs, chef_modules, valid_chefs, sniffer_filter, chef_poke_interval, dumpling_queue, ) # Check that the DumplingKitchen was instantiated and run() was called. mock_dumpling_kitchen.assert_called_once_with( name=kitchen_name, interface=interface, sniffer_filter=sniffer_filter, chef_poke_interval=chef_poke_interval, dumpling_queue=dumpling_queue, ) mock_dumpling_kitchen.return_value.run.assert_called_once() chef_class_callable.assert_called_once_with( kitchen=mock_dumpling_kitchen.return_value, ) def test_network_sniffer_with_module_and_file_chefs(self, mocker): """ Test calling network_sniffer() with one valid chef from a module and another valid chef from a file. We just check that both __import__ and importlib.util.spec_from_file_location get called once each. """ # network_sniffer() uses the __import__ builtin to import chefs, so we # need to patch that. builtin_import = builtins.__import__ module_chef_callable = mocker.Mock() file_chef_callable = mocker.Mock() def import_side_effect(*args, **kwargs): if args[0] == 'chefmodule': return types.SimpleNamespace( ChefNameFromModule=module_chef_callable ) return builtin_import(*args, **kwargs) mocker.patch.object( builtins, '__import__', side_effect=import_side_effect ) mocker.patch.object( importlib.util, 'module_from_spec', return_value=types.SimpleNamespace( ChefNameFromFile=file_chef_callable ) ) mocker.patch.object(importlib.util, 'spec_from_file_location') kitchen_name = 'test_kitchen' interface = 'test_interface' chefs = '' chef_modules = '' valid_chefs = { 'chefmodule': ['ChefNameFromModule'], 'tests/data/chefs_in_a_file.py': ['ChefNameFromFile'], } sniffer_filter = 'test_filter' chef_poke_interval = 10 dumpling_queue = mocker.Mock() mocker.patch('netdumplings.DumplingKitchen') network_sniffer( kitchen_name, interface, chefs, chef_modules, valid_chefs, sniffer_filter, chef_poke_interval, dumpling_queue, ) # Check that our two mock chefs were instantiated. assert module_chef_callable.call_count == 1 assert file_chef_callable.call_count == 1 class TestSniffListChefs: """ Test the list_chefs() function. """ def test_list_chefs_default(self): """ Test default import of chefs from netdumplings.dumplingchefs. """ runner = click.testing.CliRunner() result = runner.invoke( sniff_cli, [ '--chef-list', ], ) assert result.exit_code == 0 assert result.output == ( '\nnetdumplings.dumplingchefs\n' ' ARPChef\n' ' DNSLookupChef\n' ' PacketCountChef\n\n' ) def test_invalid_module(self, mocker): """ Test attempt to list chefs from one valid and one missing module. """ mocker.patch( 'netdumplings.DumplingKitchen.get_chefs_in_modules', return_value={ 'doesnotexist': { 'import_error': 'does not exist', 'chef_classes': [], }, 'valid': { 'import_error': False, 'chef_classes': ['ValidOneChef', 'ValidTwoChef'], } } ) runner = click.testing.CliRunner() result = runner.invoke( sniff_cli, [ '--chef-list', '--chef-module', 'doesnotexist' ], ) assert result.exit_code == 0 assert result.output == ( '\n' 'doesnotexist\n' ' error importing module: does not exist\n' '\n' 'valid\n' ' ValidOneChef\n' ' ValidTwoChef\n' '\n' ) class TestSniffGetValidChefs: """ Test the get_valid_chefs() function. """ def test_chef_retrieval(self, mocker): """ We fake two chef modules containing three chefs total, but one of them has its assignable_to_kitchen set to False. Only the remaining two should be imported. We also requ
rptionChiller chiller_prop: parameters in the characteristic equations and the external flow rates. :param input_conditions: :type input_conditions: dict :return: a dict with operating conditions of the chilled water, cooling water and hot water loops in a absorption chiller. To improve speed, the system of equations was solved using sympy for the output variable ``q_hw_kW`` which is then used to compute the remaining output variables. The following code was used to create the expression to calculate ``q_hw_kW`` with:: # use symbolic computation to derive a formula for q_hw_kW: # first, make sure all the variables are sympy symbols: T_chw_in_C, T_chw_out_C, T_cw_in_C, T_hw_in_C, mcp_cw_kWperK, mcp_hw_kWperK, q_chw_kW = sympy.symbols( "T_chw_in_C, T_chw_out_C, T_cw_in_C, T_hw_in_C, mcp_cw_kWperK, mcp_hw_kWperK, q_chw_kW") T_hw_out_C, T_cw_out_C, q_hw_kW = sympy.symbols('T_hw_out_C, T_cw_out_C, q_hw_kW') a_e, a_g, e_e, e_g, r_e, r_g, s_e, s_g = sympy.symbols("a_e, a_g, e_e, e_g, r_e, r_g, s_e, s_g") ddt_e, ddt_g = sympy.symbols("ddt_e, ddt_g") # the system of equations: eq_e = s_e * ddt_e + r_e - q_chw_kW eq_ddt_e = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_e * (T_cw_in_C + T_cw_out_C) / 2.0 + e_e * (T_chw_in_C + T_chw_out_C) / 2.0 - ddt_e) eq_g = s_g * ddt_g + r_g - q_hw_kW eq_ddt_g = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_g * (T_cw_in_C + T_cw_out_C) / 2.0 + e_g * (T_chw_in_C + T_chw_out_C) / 2.0 - ddt_g) eq_bal_g = (T_hw_in_C - T_hw_out_C) - q_hw_kW / mcp_hw_kWperK # solve the system of equations with sympy eq_sys = [eq_e, eq_g, eq_bal_g, eq_ddt_e, eq_ddt_g] unknown_variables = (T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g) a, b = sympy.linear_eq_to_matrix(eq_sys, unknown_variables) T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g = tuple(*sympy.linsolve(eq_sys, unknown_variables)) q_hw_kW.simplify() ..[Kuhn A. & Ziegler F., 2005] Operational results of a 10kW absorption chiller and adaptation of the characteristic equation. In: Proceedings of the interantional conference solar air conditioning. Bad Staffelstein, Germany: 2005. """ # external water circuits (e: chilled water, ac: cooling water, d: hot water) T_hw_in_C = input_conditions['T_hw_in_C'] T_cw_in_C = input_conditions['T_ground_K'] - 273.0 # condenser water inlet temperature T_chw_in_C = input_conditions['T_chw_re_K'] - 273.0 # inlet to the evaporator T_chw_out_C = input_conditions['T_chw_sup_K'] - 273.0 # outlet from the evaporator q_chw_kW = input_conditions['q_chw_W'] / 1000 # cooling load ata the evaporator m_cw_kgpers = absorption_chiller.m_cw_kgpers # external flow rate of cooling water at the condenser and absorber m_hw_kgpers = absorption_chiller.m_hw_kgpers # external flow rate of hot water at the generator mcp_cw_kWperK = m_cw_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK / 1000 mcp_hw_kWperK = m_hw_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK / 1000 # chiller_props (these are constants from the Absorption_chiller sheet in systems.xls) s_e = absorption_chiller.s_e r_e = absorption_chiller.r_e s_g = absorption_chiller.s_g r_g = absorption_chiller.r_g a_e = absorption_chiller.a_e e_e = absorption_chiller.e_e a_g = absorption_chiller.a_g e_g = absorption_chiller.e_g # variables to solve # T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g = sympy.symbols('T_hw_out_C T
_cw_out_C q_hw_kW , ddt_e,
ddt_g') # # # systems of equations to solve # eq_e = s_e * ddt_e + r_e - q_chw_kW # eq_ddt_e = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_e * (T_cw_in_C + T_cw_out_C) / 2.0 + e_e * (T_chw_in_C + T_chw_out_C) / 2.0 - ddt_e) # eq_g = s_g * ddt_g + r_g - q_hw_kW # eq_ddt_g = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_g * (T_cw_in_C + T_cw_out_C) / 2.0 + e_g * (T_chw_in_C + T_chw_out_C) / 2.0- ddt_g) # eq_bal_g = (T_hw_in_C - T_hw_out_C) - q_hw_kW / mcp_hw_kWperK # # # solve the system of equations with sympy # eq_sys = [eq_e, eq_g, eq_bal_g, eq_ddt_e, eq_ddt_g] # unknown_variables = (T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g) # (T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g) = tuple(*sympy.linsolve(eq_sys, unknown_variables)) # a = np.array([ # [0, 0, 0, s_e, 0], # [0, 0, -1, 0, s_g], # [-1, 0, -1 / mcp_hw_kWperK, 0, 0], # [0.5, 0, 0, -1, 0], # [0.5, 0, 0, 0, -1]]) # b = np.array([ # [q_chw_kW - r_e], # [-r_g], # [-T_hw_in_C], # [-0.5 * T_hw_in_C - 0.5 * e_e * (T_chw_in_C + T_chw_out_C)], # [-0.5 * T_hw_in_C - 0.5 * e_g * (T_chw_in_C + T_chw_out_C)]]) # the below equation for q_hw_kW was created with sympy.linsolve using symbols for all the variables. q_hw_kW = ((r_g * s_e * (0.5 * a_e * mcp_hw_kWperK + 0.25 * s_g * (a_e - a_g)) + s_g * (0.5 * a_g * mcp_hw_kWperK * (q_chw_kW - r_e) + s_e * (0.5 * mcp_hw_kWperK * (a_e * (0.5 * T_chw_in_C * e_g + 0.5 * T_chw_out_C * e_g + 0.5 * T_cw_in_C * a_g + 1.0 * T_hw_in_C) - a_g * (0.5 * T_chw_in_C * e_e + 0.5 * T_chw_out_C * e_e + 0.5 * T_cw_in_C * a_e + 1.0 * T_hw_in_C)) - 0.25 * r_g * (a_e - a_g)))) / (s_e * (0.5 * a_e * mcp_hw_kWperK + 0.25 * s_g * (a_e - a_g)))) # calculate results q_cw_kW = q_hw_kW + q_chw_kW # Q(condenser) + Q(absorber) T_hw_out_C = T_hw_in_C - q_hw_kW / mcp_hw_kWperK T_cw_out_C = T_cw_in_C + q_cw_kW / mcp_cw_kWperK # TODO: set upper bound of the chiller operation return {'T_hw_out_C': T_hw_out_C, 'T_cw_out_C': T_cw_out_C, 'q_chw_W': q_chw_kW * 1000, 'q_hw_W': q_hw_kW * 1000, 'q_cw_W': q_cw_kW * 1000} def calc_power_demand(q_chw_W, chiller_prop): """ Calculates the power demand of the solution and refrigeration pumps in absorption chillers. Linear equations derived from manufacturer's catalog _[Broad Air Conditioning, 2018]. :param q_chw_W: :param ACH_type: :return: ..[Broad Air Conditioning, 2018] BROAD XII NON-ELECTRIC CHILLER. (2018). etrieved from https://www.broadusa.net/en/wp-content/uploads/2018/12/BROAD-XII-US-Catalog2018-12.pdf """ ach_type = chiller_prop['type'].values[0] if ach_type == 'single': w_dot_W = 0.0028 + 2941 else: w_dot_W = 0.0021 * q_chw_W + 2757 # assuming the same for double and triple effect chillers return w_dot_W # Investment costs def calc_Cinv_ACH(Q_nom_W, Absorption_chiller_cost_data, ACH_type): """ Annualized investment costs for the vapor compressor chiller :type Q_nom_W : float :param Q_nom_W: peak cooling demand in [W] :returns InvCa: annualized chiller investment cost in CHF/a :rtype InvCa: float """ Capex_a_ACH_USD = 0 Opex_fixed_ACH_USD = 0 Capex_ACH_USD = 0 if Q_nom_W > 0: Absorption_chiller_cost_data = Absorption_chiller_cost_data[Absorption_chiller_cost_data['type'] == ACH_type] max_chiller_size = max(Absorption_chiller_cost_data['cap_max'].values) Q_nom_W = Absorption_chiller_cost_data['cap_min'].values.min() if Q_nom_W < Absorption_chiller_cost_data[ 'cap_min'].values.min() else Q_nom_W # minimum technology size if Q_nom_W <= max_chiller_size: Absorption_chiller_cost_data = Absorption_chiller_cost_data[ (Absorption_chiller_cost_data['cap_min'] <= Q_nom_W) & ( Absorption_chiller_cost_data[
import sys import copy import time import asyncio import hashlib from unittest import mock import celery import pytest from waterbutler import tasks # noqa from waterbutler.core import remote_logging from waterbutler.core import utils as core_utils from waterbutler.core.path import WaterButlerPath import tests.utils as test_utils # Hack to get the module, not the function move = sys.modules['waterbutler.tasks.move'] FAKE_TIME = 1454684930.0 @pytest.fixture(autouse=True) def patch_backend(monkeypatch): monkeypatch.setattr(move.core.app, 'backend', None) @pytest.fixture(autouse=True) def callback(monkeypatch): mock_request = test_utils.MockCoroutine( return_value=mock.Mock( status=200, read=test_utils.MockCoroutine( return_value=b'meowmeowmeow' ) ) ) monkeypatch.setattr(core_utils, 'send_signed_request', mock_request) return mock_request @pytest.fixture def mock_time(monkeypatch): mock_time = mock.Mock(return_value=FAKE_TIME) monkeypatch.setattr(time, 'time', mock_time) @pytest.fixture def src_path(): return WaterButlerPath('/user/bin/python') @pytest.fixture def dest_path(): return WaterButlerPath('/usr/bin/golang') @pytest.fixture(scope='function') def src_provider(): p = test_utils.MockProvider() p.move.return_value = (test_utils.MockFileMetadata(), True) p.auth['callback_url'] = 'src_callback' return p @pytest.fixture(scope='function') def dest_provider(): p = test_utils.MockProvider() p.move.return_value = (test_utils.MockFileMetadata(), True) p.auth['callback_url'] = 'dest_callback' return p @pytest.fixture(scope='function') def providers(monkeypatch, src_provider, dest_provider): def make_provider(name=None, **kwargs): if name == 'src': return src_provider if name == 'dest': return dest_provider raise ValueError('Unexpected provider') monkeypatch.setattr(move.utils, 'make_provider', make_provider) return src_provider, dest_provider @pytest.fixture(autouse=True) def log_to_keen(monkeypatch): mock_log_to_keen = test_utils.MockCoroutine() monkeypatch.setattr(remote_logging, 'log_to_keen', mock_log_to_keen) return mock_log_to_keen @pytest.fixture def src_bundle(src_path): return { 'nid': 'mst3k', 'path': src_path, 'provider': { 'name': 'src', 'auth': { 'callback_url': '', }, 'settings': {}, 'credentials': {}, } } @pytest.fixture def dest_bundle(dest_path): return { 'nid': 'fbi4u', 'path': dest_path, 'provider': { 'name': 'dest', 'auth': { 'callback_url': '', }, 'settings': {}, 'credentials': {}, } } @pytest.fixture def bundles(src_bundle, dest_bundle): return src_bundle, dest_bundle class TestMoveTask: def test_move_calls_move(self, event_loop, providers, bundles): src, dest = providers src_bundle, dest_bundle = bundles move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle)) assert src.move.called src.move.assert_called_once_with(dest, src_bundle['path'], dest_bundle['path']) def test_is_task(self): assert callable(move.move) assert isinstance(move.move, celery.Task) assert not asyncio.iscoroutine(move.move) assert asyncio.iscoroutinefunction(move.move.adelay) def test_imputes_exceptions(self, event_loop, providers, bundles, callback): src, dest = providers src_bundle, dest_bundle = bundles src.move.side_effect = Exception('This is a string') with pytest.raises(Exception): move.move(copy.deepcop
y(src_bundle), copy.deepcopy(dest_bundle)) (method, url, data), _ = cal
lback.call_args_list[0] assert src.move.called src.move.assert_called_once_with(dest, src_bundle['path'], dest_bundle['path']) assert method == 'PUT' assert data['errors'] == ["Exception('This is a string',)"] assert url == 'dest_callback' def test_return_values(self, event_loop, providers, bundles, callback, src_path, dest_path, mock_time): src, dest = providers src_bundle, dest_bundle = bundles metadata = test_utils.MockFileMetadata() src.move.return_value = (metadata, False) ret1, ret2 = move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle)) assert (ret1, ret2) == (metadata, False) (method, url, data), _ = callback.call_args_list[0] assert method == 'PUT' assert url == 'dest_callback' assert data['action'] == 'move' assert data['auth'] == {'callback_url': 'dest_callback'} assert data['email'] == False assert data['errors'] == [] assert data['time'] == FAKE_TIME + 60 assert data['source'] == { 'nid': 'mst3k', 'resource': 'mst3k', 'path': '/' + src_path.raw_path, 'name': src_path.name, 'materialized': str(src_path), 'provider': src.NAME, 'kind': 'file', 'extra': {}, } assert data['destination'] == { 'nid': 'fbi4u', 'resource': 'fbi4u', 'path': metadata.path, 'name': metadata.name, 'materialized': metadata.path, 'provider': dest.NAME, 'kind': 'file', 'contentType': metadata.content_type, 'etag': hashlib.sha256( '{}::{}'.format(metadata.provider, metadata.etag) .encode('utf-8') ).hexdigest(), 'extra': metadata.extra, 'modified': metadata.modified, 'modified_utc': metadata.modified_utc, 'created_utc': metadata.created_utc, 'size': metadata.size, } def test_starttime_override(self, event_loop, providers, bundles, callback, mock_time): src, dest = providers src_bundle, dest_bundle = bundles stamp = FAKE_TIME move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle), start_time=stamp-100) move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle), start_time=stamp+100) (_, _, data), _ = callback.call_args_list[0] assert data['email'] is True assert data['time'] == 60 + stamp (_, _, data), _ = callback.call_args_list[1] assert data['email'] is False assert data['time'] == 60 + stamp
from __future__ import print_function, absolute_import import re import logging import numpy as np from ...utils import int_else_float_except_string logging.basicConfig() logger = logging.getLogger(__file__) def find_name(string): return re.search('function\s*mpc\s*=\s*(?P<data>.*?)\n', string).groupdict()['data'] def find_attributes(string): pattern = 'mpc\.(?P<attribute>.*?)\s*=\s*' return re.findall(pattern, string, re.DOTALL) def parse_file(attribute, string): match = search_file(attribute, string) if match is not None: match = match.strip("'").strip('"') _list = list() for line in match.splitlines(): line = line.split('%')[0] line = line.replace(';', '') if line.strip(): if attribute == 'bus_name': _list.append([line.strip().strip("'")])
else: _list.append([int_else_float_except_string(s) for s in line.strip().split()]) return _list else: return match def search_file(attribute, string): if attr
ibute in ['gen', 'gencost', 'bus', 'branch']: pattern = r'mpc\.{}\s*=\s*\[[\n]?(?P<data>.*?)[\n]?\];'.format(attribute) elif attribute in ['version', 'baseMVA']: pattern = r'mpc\.{}\s*=\s*(?P<data>.*?);'.format(attribute) elif attribute == 'bus_name': pattern = r'mpc\.{}\s*=\s*\{{[\n]?(?P<data>.*?)[\n]?\}};'.format('bus_name') else: logger.warning('Unable to parse mpc.%s. Please contact the developer.', attribute) return None match = re.search(pattern, string, re.DOTALL) if match is not None: return match.groupdict().get('data', None) else: return match
from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from msp430.models import * from tcp_comm.client import push_config from msp430.models import MSP430 import json @csrf_exempt def register(request): """When an MSP430 connects to the TCP server this is called""" if request.method == 'POST': try: jreq = json.loads(request.body.decode('UTF-8'))['json'] except: return HttpResponseBadRequest('Unable to parse post json key', mimetype='application/json') # Verify fields exist if 'mac' not in jreq or 'ip' not in jreq or 'iface' not in jreq: return HttpResponseBadRequest('Does not have required fields', mimetype='application/json') # Update MSP430 model default_name = "{0} = {1}".format(jreq['mac'], jreq['ip']) msp430_db, created = MSP430.objects.get_or_create(mac_address=jreq['mac'],
defaults={'current_ip':jreq['ip'], 'name':default_name}) msp430_db.current_ip = jreq['ip'] msp430_db.online = True msp430_db.save() def update_iface(model_cls, index_name): if index_name in jreq['iface']: for iface in jreq['ifac
e'][index_name]: iface_db, created = model_cls.objects.get_or_create(msp430=msp430_db, name=iface['name'], defaults={'io_type':iface['io_type']}) iface_db.description = iface['desc'] iface_db.possible_choices = json.dumps(iface['choices']) iface_db.save() # Update referring interface models update_iface(MSP430ReadInterface, 'read') update_iface(MSP430WriteInterface, 'write') # Send configs to the MSP430 push_config(msp430_db) else: return HttpResponse('Not a POST', mimetype='application/json') return HttpResponse('ok', mimetype='application/json') @csrf_exempt def disconnect(request): if request.method == 'POST': try: jreq = json.loads(request.body.decode('UTF-8'))['json'] except: return HttpResponseBadRequest('Unable to parse post json key', mimetype='application/json') # verify fields exist if 'mac' not in jreq: return HttpResponseBadRequest('Does not have required fields - mac', mimetype='application/json') msp430 = MSP430.objects.get(mac_address=jreq['mac']) msp430.online = False msp430.save() return HttpResponse('ok', mimetype='application/json')
#!/usr/bin/env python ''' alltests.py - This module runs the automated tests in all the components. To run specific test cases, pass one or more names of package/module names on the command line which contain the test cases to be run. Usage: python AllTests.py - Runs all the unittests python AllTests.py mypackage.MyFile - Runs the tests in 'mypackage/MyFile' @author: Chip Boling @copyright: 2015 Boling Consulting Solutions. All rights reserved. @license: Artistic License 2.0, http://opensource.org/licenses/Artistic-2.0 @contact: support@bcsw.net @deffield updated: Updated ''' import unittest as uTest #import site import sys import logging alltestnames = [ 'mypackage.myTestModule', ] if __name__ == '__main__': # Configure logging logging.basicConfig() # default level is WARN print print # If no arguments are given, all of the test cases are run. if len(sys.argv) == 1: testnames = alltestnames verbosity = 2 logging.getLogger().setLevel(logging.INFO) print 'Loading all Webware Tests...' else: testnames = sys.argv[1:] # Turn up verbosity and logging level verbosity = 3 logging.getLogger().setLevel(logging.DEBUG) print 'Loading tests %s...' % testnames tests = uTest.TestSuite() # We could just use defaultTestLoader.loadTestsFromNames(), # but it doesn't give a good error message when it cannot load a test. #
So we load all tests individually and raise appropriate exceptions. for test in testnames: try: tests.addTest(uTest.defaultTestLoader.loadTestsFromName(test)) except Exception: print 'ERROR: Skipping tests from "%s".' % test try:
# just try to import the test after loadig failed __import__(test) except ImportError: print 'Could not import the test module.' else: print 'Could not load the test suite.' from traceback import print_exc print_exc() print print 'Running the tests...' uTest.TextTestRunner(verbosity=verbosity).run(tests)
from django.contrib.admin import ModelAdmin from django.utils.encoding import smart_text
class OimModelAdmin(ModelAdmin): """ OimModelAdmin""" def has_module_permission(self, request): user = request.u
ser if user.is_superuser: return True if user.is_staff: if user.groups.filter(name="OIM Staff").exists(): return True return False def smart_truncate(content, length=100, suffix='....(more)'): """Small function to truncate a string in a sensible way, sourced from: http://stackoverflow.com/questions/250357/smart-truncate-in-python """ content = smart_text(content) if len(content) <= length: return content else: return ' '.join(content[:length + 1].split(' ')[0:-1]) + suffix
""" Unit tests to ensure that we can call reset_traits/delete on a property trait (regression tests for Github issue #67). """ from traits import _py2to3 from traits.api import Any, HasTraits, Int, Property, TraitError from traits.testing.unittest_tools import unittest class E(HasTraits): a = Property(Any) b = Property(Int) class TestPropertyDelete(unittest.TestCase): def test_property_delete(self): e = E()
with self.assertRaises(TraitError)
: del e.a with self.assertRaises(TraitError): del e.b def test_property_reset_traits(self): e = E() unresetable = e.reset_traits() _py2to3.assertCountEqual(self, unresetable, ['a', 'b'])
# !/usr/bin/env python # -*- coding: UTF-8 -*- from functools import wraps import bottle import datetime import json import os from error import DMPException from profile import ProfileHandler app = bottle.Bottle() def json_return(func): @wraps(func) def wrapper(*args, **kwargs): data = json.dumps(func(*args, **kwargs), **app.config['json']) data = data.replace(' ', '&nbsp;').replace('\n', '<br/>') return data return wrapper @app.route('/favicon.ico') def favicon(): bottle.response.status = 404 @app.route('/') @json_return def root(): return { 'status': 200, 'data': "Welcome, go to '/profile_name' to sync your profile>" } @app.route('/<name>') @json_return def sync(name): ''' Shows the data in the root folder ''' try: profile_handler.merge_profile(name) except KeyError as err: return { 'status': 404, 'error': 'Profile not Found', }
except DMPException as err: trace = geta
ttr(err, 'trace', 'NO TRACE') print trace return { 'status': 500, 'error': 'Server Error', } print "%s: user requested merge" % name return { 'status': 200, 'data': "Profile %s updated" % name, 'time_server': datetime.datetime.now().strftime('%b %d %Y at %H:%M'), 'time_utc': datetime.datetime.utcnow().strftime('%b %d %Y at %H:%M UTC'), } ################################################## # Settings & Startup ################################################## app.config.update({ 'debug': False, 'host': '0.0.0.0', 'port': 7070, 'quiet': True, 'json': { 'sort_keys': True, 'indent': 4, }, }) from optparse import OptionParser app_parser = OptionParser(usage="usage: %prog profile_path initial_path [options]") app_parser.add_option( "-p", "--port", dest="port", ) app_parser.add_option( "-v", "--debug", "--verbose", dest="debug", action="store_true", ) app_parser.add_option( "-r", "--root", dest="static_root", action="store", ) app_parser.add_option( "-q", "--quiet", dest="debug", action="store_false", ) app_parser.add_option( "--host", dest="host", action="store", ) app_parser.add_option( "--open", dest="host", action="store_const", const="0.0.0.0", ) def parse_options(): ''' Reads any commandline options, returning a final dict of options ''' (options, args) = app_parser.parse_args() if len(args) != 2: app_parser.error("Both profile_path and initial_path are required") # Remove any unset options, using the defaults defined earlier instead options = vars(options) options = dict((key, options[key]) for key in options if options[key] is not None) options['path'] = os.path.abspath(args[0]) options['initial_path'] = os.path.abspath(args[1]) return options if __name__ == '__main__': options = parse_options() app.config.update(options) profile_handler = ProfileHandler(options['path'], options['initial_path'], enable_timer=False) # Debug only settings go here if app.config["debug"]: bottle.debug(True) app.config.update({ 'reloader': True, 'quiet': False, }) print 'starting Server' app.run(**app.config)
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import copy import unittest from pants.option.option_value_container import OptionValueContainer from pants.option.ranked_value import RankedValue class OptionValueContainerTest(unittest.TestCase): def test_standard_values(self): o = OptionValueContainer() o.foo = 1 self.assertEqual(1, o.foo) with self.assertRaises(AttributeError): o.bar def test_value_ranking(self): o = OptionValueContainer() o.foo = RankedValue(RankedValue.CONFIG, 11) self.assertEqual(11, o.foo) self.assertEqual(RankedValue.CONFIG, o.get_rank('foo')) o.foo = RankedValue(RankedValue.HARDCODED, 22) self.assertEqual(11, o.foo) self.assertEqual(RankedValue.CONFIG, o.get_rank('foo')) o.foo = RankedValue(RankedValue.ENVIRONMENT, 33) self.assertEqual(33, o.foo) self.assertEqual(RankedValue.ENVIRONMENT, o.get_rank('foo')) o.foo = 44 # No explicit rank is assumed to be a FLAG. self.assertEqual(44, o.foo) self.assertEqual(RankedValue.FLAG, o.get_rank('foo')) def test_is_flagged(self): o = OptionValueContainer() o.foo = RankedValue(RankedValue.NONE, 11) self.assertFalse(o.is_flagged('foo')) o.foo = RankedValue(RankedValue.CONFIG, 11) self.assertFalse(o.is_flagged('foo')) o.foo = RankedValue(RankedValue.ENVIRONMENT, 11) self.assertFalse(o.is_flagged('foo')) o.foo = RankedValue(RankedValue.FLAG, 11) self.assertTrue(o.is_flagged('foo')) def test_indexing(self): o = OptionValueContainer() o.
foo = 1 self.assertEqual(1, o['foo']) self.assertEqual(1, o.get('foo')) self.assertEqual(1, o.get('foo', 2)) self.assertIsNone(o.get('unknown')) self.assertEqual(2, o.get('unknown', 2)) with self.assertRaises(AttributeError): o['bar'] def test_iterator(self): o = OptionValueContainer()
o.a = 3 o.b = 2 o.c = 1 names = list(iter(o)) self.assertListEqual(['a', 'b', 'c'], names) def test_copy(self): # copy semantics can get hairy when overriding __setattr__/__getattr__, so we test them. o = OptionValueContainer() o.foo = 1 o.bar = {'a': 111} p = copy.copy(o) # Verify that the result is in fact a copy. self.assertEqual(1, p.foo) # Has original attribute. o.baz = 42 self.assertFalse(hasattr(p, 'baz')) # Does not have attribute added after the copy. # Verify that it's a shallow copy by modifying a referent in o and reading it in p. o.bar['b'] = 222 self.assertEqual({'a': 111, 'b': 222}, p.bar) def test_deepcopy(self): # copy semantics can get hairy when overriding __setattr__/__getattr__, so we test them. o = OptionValueContainer() o.foo = 1 o.bar = {'a': 111} p = copy.deepcopy(o) # Verify that the result is in fact a copy. self.assertEqual(1, p.foo) # Has original attribute. o.baz = 42 self.assertFalse(hasattr(p, 'baz')) # Does not have attribute added after the copy. # Verify that it's a deep copy by modifying a referent in o and reading it in p. o.bar['b'] = 222 self.assertEqual({'a': 111}, p.bar)
se"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from copy import deepcopy from designate import exceptions from designate import utils from designate.objects import base from designate.objects.validation_error import ValidationError from designate.objects.validation_error import ValidationErrorList LOG = logging.getLogger(__name__) class RecordSet(base.DictObjectMixin, base.PersistentObjectMixin, base.DesignateObject): @property def action(self): # Return action as UPDATE if present. CREATE and DELETE are returned # if they are the only ones. action = 'NONE' actions = {'CREATE': 0, 'DELETE': 0, 'UPDATE': 0, 'NONE': 0} for record in self.records: actions[record.action] += 1 if actions['CREATE'] != 0 and actions['UPDATE'] == 0 and \ actions['DELETE'] == 0 and actions['NONE'] == 0: action = 'CREATE' elif actions['DELETE'] != 0 and actions['UPDATE'] == 0 and \ actions['CREATE'] == 0 and actions['NONE'] == 0: action = 'DELETE' elif actions['UPDATE'] != 0 or actions['CREATE'] != 0 or \ actions['DELETE'] != 0: action = 'UPDATE' return action @property def managed(self): managed = False for record in self.records: if record.managed: return True return managed @property def status(self): # Return the worst status in order of ERROR, PENDING, ACTIVE status = 'ACTIVE' for record in self.records: if (record.status == 'ERROR') or \ (record.status == 'PENDING' and status != 'ERROR') or \ (status != 'PENDING'): status = record.status return status FIELDS = { 'shard': { 'schema': { 'type': 'integer', 'minimum': 0, 'maximum': 4095 } }, 'tenant_id': { 'schema': { 'type': 'string', }, 'read_only': True }, 'domain_id': { 'schema': { 'type': 'string', 'description': 'Zone identifier', 'format': 'uuid' }, }, 'name': { 'schema': { 'type': 'string', 'description': 'Zone name', 'format': 'domainname', 'maxLength': 255, }, 'immutable': True, 'required': True }, 'type': { 'schema': { 'type': 'string', 'description': 'RecordSet type (TODO: Make types extensible)', 'enum': ['A', 'AAAA', 'CNAME', 'MX', 'SRV', 'TXT', 'SPF', 'NS', 'PTR', 'SSHFP', 'SOA'] }, 'required': True, 'immutable': True }, 'ttl': { 'schema': { 'type': ['integer', 'null'], 'description': 'Default time to live', 'minimum': 0, 'maximum': 2147483647 }, }, 'description': { 'schema': { 'type': ['string', 'null'], 'maxLength': 160 }, }, 'records': { 'relation': True, 'relation_cls': 'RecordList' }, # TODO(graham): implement the polymorphic class relations # 'records': { # 'polymorphic': 'type', # 'relation': True, # 'relation_cls': lambda type_: '%sList' % type_ # }, } def validate(self): errors = ValidationErrorList() # Get the right classes (e.g. A for Recordsets with type: 'A') try: record_list_cls = self.obj_cls_from_name('%sList' % self.type) record_cls = self.obj_cls_from_name(self.type) except KeyError as e: e = ValidationError() e.path = ['recordset', 'type'] e.validator = 'value' e.validator_value = [self.type] e.message = ("'%(type)s' is not a supported Record type" % {'type': self.type}) # Add it to the list for later errors.append(e) raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) # Get any rules that the record type imposes on the record changes = record_cls.get_recordset_schema_changes() old_fields = {} if changes: LOG.debug("Record %s is overriding the RecordSet schema with: %s" % (record_cls.obj_name(), changes)) old_fields = deepcopy(self.FIELDS) self.FIELDS = utils.deep_dict_merge(self.F
IELDS, changes) error_indexes = [] # Copy these for safekeeping old_records = deepcopy(self.records) # Blank the records for this object with the right list type self.records = record_list_cls() i = 0 for record in old_records:
record_obj = record_cls() try: record_obj._from_string(record.data) # The _from_string() method will throw a ValueError if there is not # enough data blobs except ValueError as e: # Something broke in the _from_string() method # Fake a correct looking ValidationError() object e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % {'data': record.data, 'type': self.type}) # Add it to the list for later errors.append(e) error_indexes.append(i) else: # Seems to have loaded right - add it to be validated by # JSONSchema self.records.append(record_obj) i += 1 try: # Run the actual validate code super(RecordSet, self).validate() except exceptions.InvalidObject as e: # Something is wrong according to JSONSchema - append our errors increment = 0 # This code below is to make sure we have the index for the record # list correct. JSONSchema may be missing some of the objects due # to validation above, so this re - inserts them, and makes sure # the index is right for error in e.errors: if len(error.path) > 1 and isinstance(error.path[1], int): error.path[1] += increment while error.path[1] in error_indexes: increment += 1 error.path[1] += 1 # Add the list from above e.errors.extend(errors) # Raise the exception raise e else: # If JSONSchema passes, but we found parsing errors, # raise an exception if len(errors) > 0: raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) finally: if old_fields: self.FIELDS = old_fields # Send in the traditional Record objects to central / storage self.records = old_records class RecordSetList(base.ListObjectMixin, base.Desi
import unittest from logEntry import LogEntry class TestLogEntry(unittest.TestCase): def test_parse_log_1(self): line = '188.45.108.168 - - [12/Dec/2015:19:44:09 +0100] "GET /images/stories/raith/almhuette_raith.jpg HTTP/1.1" 200 43300 "http://www.almhuette-raith.at/" "Mozilla/5.0 (Linux; Android 4.4.2; de-at; SAMSUNG GT-I9301I Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Version/1.5 Chrome/28.0.1500.94 Mobile Safari/537.36" "-"' entry = LogEntry(line) self.assertEqual(entry.clientIp,'188.45.108.168') self.assertEqual(entry.clientId, '-') self.assertEqual(entry.userName, '-') self.assertEqual(entry.requestLine, 'GET /images/stories/raith/almhuette_raith.jpg HTTP/1.1') self.assertEqual(entry.requestUrl, '/images/stories/raith/almhuette_raith.jpg') self.assertEqual(entry.urlSection, '/images/') self.assertEqual(entry.statusCode, 200) self.assertEqual(entry.sizeBytes, 43300) def test_parse_log_2(self): line = 'hmu4.cs.auckl
and.ac.nz - - [09/Feb/2016:02:50:20 -0500] "GET /docs/GCDOAR/EnergyStar.html HTTP/1.0" 200 6829' entry = LogEntry(line) self.asser
tEqual(entry.clientIp, 'hmu4.cs.auckland.ac.nz') self.assertEqual(entry.clientId, '-') self.assertEqual(entry.userName, '-') self.assertEqual(entry.requestLine, 'GET /docs/GCDOAR/EnergyStar.html HTTP/1.0') self.assertEqual(entry.requestUrl, '/docs/GCDOAR/EnergyStar.html') self.assertEqual(entry.urlSection, '/docs/') self.assertEqual(entry.statusCode, 200) self.assertEqual(entry.sizeBytes, 6829) def test_parse_log_3(self): line = '2607:f0d0:1002:0051:0000:0000:0000:0004 - - [23/Jan/2016:15:41:52 +0100] "POST /administrator/index.php HTTP/1.1" 200 "-" "-" "-" "-"' entry = LogEntry(line) self.assertEqual(entry.clientIp, '2607:f0d0:1002:0051:0000:0000:0000:0004') self.assertEqual(entry.clientId, '-') self.assertEqual(entry.userName, '-') self.assertEqual(entry.requestLine, 'POST /administrator/index.php HTTP/1.1') self.assertEqual(entry.requestUrl, '/administrator/index.php') self.assertEqual(entry.urlSection, '/administrator/') self.assertEqual(entry.statusCode, 200) self.assertEqual(entry.sizeBytes, 0) if __name__ == '__main__': unittest.main()
from HTMLParser import HTMLParser import inspect import re from urllib import quote, quote_plus from urlparse import urlparse class SafeHTMLParser(HTMLParser): # from html5lib.sanitiser acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area', 'article', 'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button', 'canvas', 'caption', 'center', 'cite', 'code', 'col', 'colgroup', 'command', 'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn', 'dialog', 'dir', 'div', 'dl', 'dt', 'em', 'event-source', 'fieldset', 'figcaption', 'figure', 'footer', 'font', 'header', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'ins', 'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map', 'menu', 'meter', 'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup', 'option', 'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select', 'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong', 'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video'] replaces_pre = [["&", "&amp;"], ["\"", "&quot;"], ["<", "&lt;"], [">", "&gt;"]] replaces_post = [["\n", "<br/>"], ["\t", "&nbsp;&nbsp;&nbsp;&nbsp;"], [" ", "&nbsp; "], [" ", "&nbsp; "], ["<br/> ", "<br/>&nbsp;"]] src_schemes = [ "data" ] #uriregex1 = re.compile(r'(?i)\b((?:(https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?]))') uriregex1 = re.compile(r'((https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)') uriregex2 = re.compile(r'<a href="([^"]+)&amp;') emailregex = re.compile(r'\b([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,})\b') @staticmethod def replace_pre(text): for a in SafeHTMLParser.replaces_pre: text = text.replace(a[0], a[1]) return text @staticmethod def replace_post(text): for a in SafeHTMLParser.replaces_post: text = text.replace(a[0], a[1]) if len(text) > 1 and text[0] == " ": text = "&nbsp;" + text[1:] return text def __init__(self, *args, **kwargs): HTMLParser.__init__(self, *args, **kwargs) self.reset_safe() def reset_safe(self): self.elements = set() self.raw = u"" self.sanitised = u"" self.has_html = False self.allow_picture = False self.allow_external_src = False def add_if_acceptable(self, tag, attrs = None): if tag not in SafeHTMLParser.acceptable_elements: return self.sanitised += "<" if inspect.stack()[1][3] == "handle_endtag": self.sanitised += "/" self.sanitised += tag if attrs is not None: for attr, val in attrs: if tag == "img" and attr == "src" and not self.allow_picture: val = "" elif attr == "src" and not self.allow_external_src: url = urlparse(val) if url.scheme not in SafeHTMLParser.src_schemes: val = "" self.sanitised += " " + quote_plus(attr) if not (val is None): self.sanitised += "=\"" + val + "\"" if inspect.stack()[1][3] == "handle_startendtag": self.sanitised += "/" self.sanitised += ">" d
ef handle_starttag(self, tag, attrs): if tag in SafeHTMLParser.acceptable_elements: self.has_html = True self.add_if_acceptable(tag, attrs) def handle_endtag(self, tag): self.add_if_acceptable(tag) def handle_startendtag(self, tag, attrs):
if tag in SafeHTMLParser.acceptable_elements: self.has_html = True self.add_if_acceptable(tag, attrs) def handle_data(self, data): self.sanitised += data def handle_charref(self, name): self.sanitised += "&#" + name + ";" def handle_entityref(self, name): self.sanitised += "&" + name + ";" def feed(self, data): try: data = unicode(data, 'utf-8') except UnicodeDecodeError: data = unicode(data, 'utf-8', errors='replace') HTMLParser.feed(self, data) tmp = SafeHTMLParser.replace_pre(data) tmp = SafeHTMLParser.uriregex1.sub( r'<a href="\1">\1</a>', tmp) tmp = SafeHTMLParser.uriregex2.sub(r'<a href="\1&', tmp) tmp = SafeHTMLParser.emailregex.sub(r'<a href="mailto:\1">\1</a>', tmp) tmp = SafeHTMLParser.replace_post(tmp) self.raw += tmp def is_html(self, text = None, allow_picture = False): if text: self.reset() self.reset_safe() self.allow_picture = allow_picture self.feed(text) self.close() return self.has_html
"""Brief description of what this file should test""" import pytest from Calculator import operations def test_addition(): assert operations.add(1, 2) == 3 def test_subtraction(): assert operations.subtract(1 ,2) == -1 def test_multiplication(): assert operations.multiply(2, -1) == -2 def test_divide(): # test for floating point division returns floating point
assert isinstance(operations.divide(3, 2), float) assert operations.divide(8, 4) == 2 # Check that DivisionByZeroError is raised with pytest.raises(Exception): operations.divide(1, 0) @pytest.mark.parametrize("given, expected", [ (0, 0), (-0.76, 0.76), (1, 1), ]) def test_abs_val(given, expected): assert operations.abs_val(give
n) == expected @pytest.mark.parametrize("given, expected", [ (0, 0), (.5, 0), (-0.75, -1), ]) def test_floor(given, expected): assert operations.floor(given) == expected @pytest.mark.parametrize("given, expected", [ (0, 0), (-1.5, -1), (2.3, 3), ]) def test_ceiling(given, expected): assert operations.ceiling(given) == expected @pytest.mark.parametrize("given_a, given_b, expected", [ (0, 1, 0), (3, 3, 27), (-3, 0, 1), (0, 0, 1), ]) def test_power(given_a, given_b, expected): assert operations.power(given_a, given_b) == expected @pytest.mark.parametrize("given, expected", [ (0.45, 0), (-3.6, -3), (3, 3), ]) def test_rounding(given, expected): assert operations.rounding(given) == expected
from flask import Flask, session, redirect, url_for, escape, request, jsonify from flask.ext.pymongo impo
rt PyMongo from pprint import pformat import
sys import os import re app = Flask("download") mongo = PyMongo(app) local_repo = '/home/rgarcia/opendata' server_repo = '/home/rgarcia/public_html' ############ # anuncios # ############ @app.route('/register', methods=['POST']) def anuncio_save(): downloader = request.get_json() try: downloader.pop('email_repeat') path = downloader['path'] oid = mongo.db.downloaders.save(downloader) local_path = os.path.join( local_repo, path ) server_path = os.path.join( server_repo, str(oid) ) app.logger.debug(local_path) app.logger.debug(server_path) os.symlink(local_path, server_path) return jsonify({ "oid": str(oid)} ) except: raise # return jsonify({ "status": "error", # "message": pformat(sys.exc_info()[0]) }) if __name__ == '__main__': app.run(debug=True)
def spiral_iterat
or(): x, y = 0, 0 direction = [(1, 0), (0, 1), (-1, 0), (0, -1)] i = 1 print x,y while True: dir_index = (i - 1) % 4 vector = i * d
irection[dir_index][0], i * direction[dir_index][1] x += vector[0] y += vector[1] print x,y i += 1 raw_input() result = spiral_iterator()
import asyncore, socket, logging, time, asynchat, os from hdfs_space_common import get_tree_from_cache, get_child_node, TreeNode FORMAT = '%(asctime)-15s: %(levelname)s %(module)s - %(funcName)s: %(message)s' logging.basicConfig(format=FORMAT, level=logging.WARNING) class ChatHandler(asynchat.async_chat): def __init__(self, sock): asynchat.async_chat.__init__(self, sock = sock) self.ibuffer = [] self.obuffer = '' self.set_terminator("\n") def collect_incoming_data(self, data): self.ibuffer.append(data) logging.info('Received data "%s"' % data) def found_terminator(self): self.handle_request() def handle_request(self): data = self.ibuffer.pop(0) #Data should be like: #metric:path|user|size # OR #db:new_path command = data.split(":")[0] if command == 'metric': metric_args = data.split(":")[1].split('|') hdfs_path = metric_args[0] if len(metric_args) > 0 else "/" user_name = metric_args[1] if len(metric_args) > 1 else "ALL" metric = metric_args[2] if len(metric_args) > 2 else "size" logging.debug('metric_args: %s' % metric_args) logging.debug('hdfs_path: %s' % hdfs_path) logging.debug('user_name: %s' % user_name) logging.debug('metric: %s' % metric) result = 0 if user_name == "ALL" and metric == 'size': logging.warning('Rather using this script try command "hdfs dfs -du /"') elif user_name == "ALL" and metric == 'amount': logging.info('Calculating the metric') result = get_child_node(file_tree, hdfs_path).get_amount_for_all() else: if metric == "size": logging.info('Calculating the metric') result = get_child_node(file_tree, hdfs_path).get_size_by_user(user_name) elif metric == "amount": logging.info('Calculating the metric') result = get_child_node(file_tree, hdfs_path).get_amount_by_user(user_name) else: logging.warning("The metric %s not implemented yet" % metric) logging.info('The result is ready: %s. Pushing it to back' % result) self.push(str(result)) return elif command == 'db': file_path = data.split(":")[1] if
os.path.exists(file_path): global file_tree file_tree = get_tree_from_cache(file_path) os.rename(file_path,MetricServer.db_path) logging.info('File %s remaned to %s' % (file_path, MetricServer.db_path)) self.push('OK')
else: logging.warning('File %s could not be found. Doing nothing' % file_path) self.push('FAIL') else: logging.warning("The command %s not implemented yet") self.push('FAIL') class MetricServer(asyncore.dispatcher): sock_path = '/tmp/hdfs_space.sock' db_path = '/tmp/hdfs_space.data' def __init__(self): asyncore.dispatcher.__init__(self) self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM) self.set_reuse_addr() self.bind(self.sock_path) logging.info('Starting metric-server') self.listen(5) global file_tree try: file_tree = get_tree_from_cache(self.db_path) except KeyError as e: file_tree = TreeNode('') def handle_accept(self): pair = self.accept() if pair is not None: sock, addr = pair logging.info('Incoming connection') handler = ChatHandler(sock) def handle_close(self): self.close() logging.info('The socket is closed') def handle_expt(self): logging.info("OOB detected for %s" % self) if __name__ == '__main__': file_tree = None server = MetricServer() try: asyncore.loop() finally: if os.path.exists(server.sock_path): os.unlink(server.sock_path)
from modeller.optimizers import state_optimizer class SteepestDescent(state_optimizer): """Very simple steepest descent optimizer, in Python""" # Add options for our optimizer _ok_keys = state_optimizer._ok_keys + ('min_atom_shift', 'min_e_diff', 'step_size', 'max_ite
rations') def __init__(self, step_size=0.0001, min_atom_shift=0.01, min_e_diff=1.0, max_iterations=None, **vars): state_optimizer.__init__(self, step_size=step_size,
min_atom_shift=min_atom_shift, min_e_diff=min_e_diff, max_iterations=max_iterations, **vars) def optimize(self, atmsel, **vars): # Do normal optimization startup state_optimizer.optimize(self, atmsel, **vars) # Get all parameters alpha = self.get_parameter('step_size') minshift = self.get_parameter('min_atom_shift') min_ediff = self.get_parameter('min_e_diff') maxit = self.get_parameter('max_iterations') # Main optimization loop state = self.get_state() (olde, dstate) = self.energy(state) while True: for i in range(len(state)): state[i] -= alpha * dstate[i] (newe, dstate) = self.energy(state) if abs(newe - olde) < min_ediff: print "Finished at step %d due to energy criterion" % self.step break elif self.shiftmax < minshift: print "Finished at step %d due to shift criterion" % self.step break elif maxit is not None and self.step >= maxit: print "Finished at step %d due to step criterion" % self.step break if newe < olde: alpha *= 2 else: alpha /= 2 olde = newe self.next_step() self.finish()
from distutils import sysconfig NAME = 'gevent' CFLAGS = [ '-I' + sysco
nfig.get_python_inc(), '-I' + sysconfig.get_python_inc(plat_specific=True) ] LDFLAGS = [] LIBS = [] GCC_LIST = ['gevent', 'hooks']
# Copyright 2016 Mycroft AI, Inc. # # This file is part of Mycroft Core. # # Mycroft Core is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Mycroft Core is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>. from mycroft.messagebus.message import Message from mycroft.util.log import getLogger __author__ = 'jdorleans' LOGGER = getLogger(__name__) class EnclosureAPI: """ This API is intended to be used to interface with the hardware that is running Mycroft. It exposes all possible commands which can be sent to a Mycroft enclosure implementation. Different enclosure implementations may implement this differently and/or may ignore certain API calls completely. For example, the eyes_color() API might be ignore on a Mycroft that uses simple LEDs which only turn on/off, or not at all on an implementation where there is no face at all. """ def __init__(self, ws): self.ws = ws def reset(self): """The enclosure should restore itself to a started state. Typically this would be represented by the eyes being 'open' and the mouth reset to its default (smile or blank). """ self.ws.emit(Message("enclosure.reset")) def system_reset(self): """The enclosure hardware should reset any CPUs, etc.""" self.ws.emit(Message("enclosure.system.reset")) def system_mute(self): """Turn off the system microphone (not listening for wakeword).""" self.ws.emit(Message("enclosure.system.mute")) def system_unmute(self): """Turn the system microphone on (listening for wakeword).""" self.ws.emit(Message("enclosure.system.unmute")) def system_blink(self, times): """The 'eyes' should blink the given number of times. Args: times (int): number of times to blink """ self.ws.emit(Message("enclosure.system.blink", {'times': times})) def eyes_on(self): """Illuminate or show the eyes.""" self.ws.emit(Message("enclosure.eyes.on")) def eyes_off(self): """Turn off or hide the eyes.""" self.ws.emit(Message("enclosure.eyes.off")) def eyes_blink(self, side): """Make the eyes blink Args: side (str): 'r', 'l', or 'b' for 'right', 'left' or 'both' """ self.ws.emit(Message("enclosure.eyes.blink", {'side': side})) def eyes_narrow(self): """Make the eyes look narrow, like a squint""" self.ws.emit(Message("enclosure.eyes.narrow")) def eyes_look(self, side): """Make the eyes look to the given side Args: side (str): 'r' for right 'l' for left 'u' for up 'd' for down 'c' for crossed """ self.ws.emit(Message("enclosure.eyes.look", {'side': side})) def eyes_color(self, r=255, g=255, b=255): """Change the eye color to the given RGB color Args: r (int): 0-255, red value g (int): 0-255, green value b (int): 0-255, blue value """ self.ws.emit(Message("enclosure.eyes.color", {'r': r, 'g': g, 'b': b})) def eyes_brightness(self, level=30): """Set the brightness of the eyes in the display. Args: level (int): 1-30, bigger numbers being brighter """ self.ws.emit(Message("enclosure.eyes.level", {'level': level})) def eyes_reset(self): """Restore the eyes to their default (ready) state.""" self.ws.emit(Message("enclosure.eyes.reset")) def eyes_timed_spin(self, length): """Make the eyes 'roll' for the given time. Args: length (int): duration in milliseconds of roll, None = forever """ self.ws.emit(Message("enclosure.eyes.timedspin", {'length': length})) def eyes_volume(self, volume): """Indicate the volume using the eyes Args: volume (int): 0 to 11 """ self.ws.emit(Message("enclosure.eyes.volume", {'volume': volume})) def mouth_reset(self): """Restore the mouth display to normal (blank)""" self.ws.emit(Message("enclosure.mouth.reset")) def mouth_talk(self): """Show a generic 'talking' animation for non-synched speech""" self.ws.emit(Message("enclosure.mouth.talk")) def mouth_think(self): """Show a 'thinking' image or animation""" self.ws.emit(Message("enclosure.mouth.think")) def mouth_listen(self): """Show a 'thinking' image or animation""" self.ws.emit(Message("encl
osure.mouth.listen")) def mouth_smile(self): """Show a 'smile' image or animation""" self.ws.emit(Message("enclosure.mouth.smile")) def mouth_viseme(self, code): """Display a viseme mouth shape for synche
d speech Args: code (int): 0 = shape for sounds like 'y' or 'aa' 1 = shape for sounds like 'aw' 2 = shape for sounds like 'uh' or 'r' 3 = shape for sounds like 'th' or 'sh' 4 = neutral shape for no sound 5 = shape for sounds like 'f' or 'v' 6 = shape for sounds like 'oy' or 'ao' """ self.ws.emit(Message("enclosure.mouth.viseme", {'code': code})) def mouth_text(self, text=""): """Display text (scrolling as needed) Args: text (str): text string to display """ self.ws.emit(Message("enclosure.mouth.text", {'text': text})) def weather_display(self, img_code, temp): """Show a weather icon (deprecated)""" self.ws.emit(Message("enclosure.weather.display", {'img_code': img_code, 'temp': temp})) def activate_mouth_events(self): """Enable movement of the mouth with speech""" self.ws.emit(Message('enclosure.mouth.events.activate')) def deactivate_mouth_events(self): """Disable movement of the mouth with speech""" self.ws.emit(Message('enclosure.mouth.events.deactivate'))
"""Python interface to GenoLogics LIMS via its REST API.
Usage examples: Get some samples, and sample info. Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. """ from genologics.lims import * from genologics.config import BASEURI, USERNAME, PASSWORD lims = Lims(BASEURI, USERNAME, PASSWORD) lims.check_version() project = Project(lims, id='KRA61') samples = lims.get_samples(projectlimsid=project.id) print(len(samples), 'samples in', project)
for sample in samples: print(sample, sample.name, sample.date_received, sample.artifact) name = 'spruce_a' artifacts = lims.get_artifacts(sample_name=name) print(len(artifacts), 'artifacts for sample', name) for artifact in artifacts: print(artifact, artifact.name, artifact.qc_flag)
"""Tests for AVM Fritz!Box switch component.""" from datetime import timedelta from unittest.mock import Mock from requests.exceptions import HTTPError from homeassistant.components.fritzbox.const import ( ATTR_STATE_DEVICE_LOCKED, ATTR_STATE_LOCKED, DOMAIN as FB_DOMAIN, ) from homeassistant.components.sensor import ( ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, STATE_CLASS_MEASUREMENT, STATE_CLASS_TOTAL_INCREASING, ) from homeassistant.components.switch import DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, CONF_DEVICES, ENERGY_KILO_WATT_HOUR, POWER_WATT, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ON, STATE_UNAVAILABLE, TEMP_CELSIUS, ) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util from . import FritzDeviceSwitchMock, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock): """Test setup of platform.""" device = FritzDeviceSwitchMock() assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, de
vice, fritz ) state = hass.states.get(ENTITY_ID) assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == CONF_FAKE_NAME assert state.attributes[ATTR_STATE_DEVICE_LOCKED] == "fake_locked_device" assert state.attributes[ATTR_STATE_LOCKED] == "fake_locked" assert ATTR_STATE_CLASS not in state.attributes state = hass.states.get(f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_temperature") assert state assert state.state ==
"1.23" assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Temperature" assert state.attributes[ATTR_STATE_DEVICE_LOCKED] == "fake_locked_device" assert state.attributes[ATTR_STATE_LOCKED] == "fake_locked" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS assert state.attributes[ATTR_STATE_CLASS] == STATE_CLASS_MEASUREMENT state = hass.states.get(f"{ENTITY_ID}_humidity") assert state is None state = hass.states.get(f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_power_consumption") assert state assert state.state == "5.678" assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Power Consumption" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == POWER_WATT assert state.attributes[ATTR_STATE_CLASS] == STATE_CLASS_MEASUREMENT state = hass.states.get(f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_total_energy") assert state assert state.state == "1.234" assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Total Energy" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == ENERGY_KILO_WATT_HOUR assert state.attributes[ATTR_STATE_CLASS] == STATE_CLASS_TOTAL_INCREASING async def test_turn_on(hass: HomeAssistant, fritz: Mock): """Test turn device on.""" device = FritzDeviceSwitchMock() assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) assert await hass.services.async_call( DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_on.call_count == 1 async def test_turn_off(hass: HomeAssistant, fritz: Mock): """Test turn device off.""" device = FritzDeviceSwitchMock() assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) assert await hass.services.async_call( DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_off.call_count == 1 async def test_update(hass: HomeAssistant, fritz: Mock): """Test update without error.""" device = FritzDeviceSwitchMock() assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) assert fritz().update_devices.call_count == 1 assert fritz().login.call_count == 1 next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 1 async def test_update_error(hass: HomeAssistant, fritz: Mock): """Test update with error.""" device = FritzDeviceSwitchMock() fritz().update_devices.side_effect = HTTPError("Boom") assert not await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) assert fritz().update_devices.call_count == 1 assert fritz().login.call_count == 1 next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 2 async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock): """Test assume device as unavailable.""" device = FritzDeviceSwitchMock() device.voltage = 0 device.energy = 0 device.power = 0 assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) state = hass.states.get(ENTITY_ID) assert state assert state.state == STATE_UNAVAILABLE
} KEY_TO_OPT = dict([(key, (opt, ttype)) for opt, (key, ttype) in OPT_TO_KEY.items()]) PARAMS = { "PATH" : "/home/kevin/mount/first", "TARGET" : "/tmp/final.png", #define the size of the picture "WIDTH" : 2000, #define how many lines do we want "LINES": 2, "LINE_HEIGHT": 200, #minimum width of cropped image. Below that, we black it out #only for POLAROID "CROP_SIZE": 1000, "IMG_FORMAT_SUFFIX": ".png", # False if PATH is a normal directory, True if it is WebAlbums-FS "USE_VFS": False, "FORCE_VFS": False, "FORCE_NO_VFS": False, # True if end-of-line photos are wrapped to the next line "DO_WRAP": False, # True if we want a black background and white frame, plus details "DO_POLAROID": True, "WANT_NO_CAPTION": True, # False if we want to add pictures randomly "PUT_RANDOM": False, "DO_RESIZE": False, ### VFS options ### "NO_SWITCH_TO_MINI": False, ### Directory options ### # False if we pick directory images sequentially, false if we take them randomly "PICK_RANDOM": False, #not implemented yet ## Random wall options ## "SLEEP_TIME": 0, "HELP": False } DEFAULTS = dict([(key, value) for key, value in PARAMS.items()]) DEFAULTS_docstr = dict([(KEY_TO_OPT[key][0], value) for key, value in PARAMS.items()]) usage = """Photo Wall for WebAlbums 3. Usage: photowall.py <path> <target> [options] Arguments: <path> The path where photos are picked up from. [default: %(<path>)s] <target> The path where the target photo is written. Except in POLAROID+RANDOM mode, the image will be blanked out first. [default: %(<target>)s] Options: --polaroid Use polaroid-like images for the wall --width <width> Set final image width. [default: %(--width)d] --nb-lines <nb> Number on lines of the target image. [default: %(--nb-lines)d] --resize Resize images before putting in the wall. [default: %(--resize)s] --line-height <height> Set the height of a single image. [default: %(--line-height)d]
--do-wrap If not POLAROID, finish images on the next line. [default:
%(--do-wrap)s] --help Display this message Polaroid mode options: --crop-size <crop> Minimum size to allow cropping an image. [default: %(--crop-size)s] --no-caption Disable caption. [default: %(--no-caption)s] --put-random Put images randomly instead of linearily. [default: %(--put-random)s] --sleep <time> If --put-random, time (in seconds) to go asleep before adding a new image. [default: %(--sleep)d] Filesystem options: --force-vfs Treat <path> as a VFS filesystem. [default: %(--force-vfs)s] --force-no-vfs Treat <path> as a normal filesystem. [default: %(--force-no-vfs)s] --no-switch-to-mini If VFS, don't switch from the normal image to the miniature. [default: %(--no-switch-to-mini)s] --pick-random If not VFS, pick images randomly in the <path> folder. [default: %(--pick-random)s] """ % DEFAULTS_docstr class UpdateCallback: def newExec(self): pass def newImage(self, row=0, col=0, filename=""): print("%d.%d > %s" % (row, col, filename)) def updLine(self, row, tmpLine): #print("--- %d ---" % row) pass def newFinal(self, name): pass def finished(self, name): print("==========") def stopRequested(self): return False def checkPause(self): pass updateCB = UpdateCallback() if __name__ == "__main__": arguments = docopt(usage, version="3.5-dev") if arguments["--help"]: print(usage) exit() param_args = dict([(OPT_TO_KEY[opt][0], OPT_TO_KEY[opt][1](value)) for opt, value in arguments.items()]) PARAMS = dict(PARAMS, **param_args) ########################################### ########################################### previous = None def get_next_file_vfs(): global previous if previous is not None: try: os.unlink(previous) except OSerror: pass files = os.listdir(PARAMS["PATH"]) for filename in files: if not "By Years" in filename: previous = PARAMS["PATH"]+filename if "gpx" in previous: return get_next_file() to_return = previous try: to_return = os.readlink(to_return) except OSError: pass if not PARAMS["NO_SWITCH_TO_MINI"]: to_return = to_return.replace("/images/", "/miniatures/") + ".png" return to_return def get_file_details(filename): try: link = filename try: link = os.readlink(filename) except OSError: pass link = pipes.quote(link) names = link[link.index("/miniatures/" if not PARAMS["NO_SWITCH_TO_MINI"] else "/images"):].split("/")[2:] theme, year, album, fname = names return "%s (%s)" % (album, theme) except Exception as e: #print("Cannot get details from {}: {}".format(filename, e)) fname = get_file_details_dir(filename) fname = fname.rpartition(".")[0] fname = fname.replace("_", "\n") return fname ########################################### class GetFileDir: def __init__(self, randomize): self.idx = 0 self.files = os.listdir(PARAMS["PATH"]) if len(self.files) == 0: raise EnvironmentError("No file available") self.files.sort() if randomize: print("RANDOMIZE") random.shuffle(self.files) def get_next_file(self): to_return = self.files[self.idx] self.idx += 1 self.idx %= len(self.files) return PARAMS["PATH"]+to_return def get_file_details_dir(filename): return filename[filename.rindex("/")+1:] ########################################### ########################################### def do_append(first, second, underneath=False): sign = "-" if underneath else "+" background = "-background black" if PARAMS["DO_POLAROID"] else "" command = "convert -gravity center %s %sappend %s %s %s" % (background, sign, first, second, first) ret = subprocess.call(command, shell=True) if ret != 0: raise Exception("Command failed: ", command) def do_polaroid (image, filename=None, background="black", suffix=None): if suffix is None: suffix = PARAMS["IMG_FORMAT_SUFFIX"] tmp = tempfile.NamedTemporaryFile(delete=False, suffix=suffix) tmp.close() image.save(filename=tmp.name) if not(PARAMS["WANT_NO_CAPTION"]) and filename: details = get_file_details(filename) caption = """-caption "%s" """ % details.replace("'", "\\'") else: caption = "" command = "convert -bordercolor snow -background %(bg)s -gravity center %(caption)s +polaroid %(name)s %(name)s" % {"bg" : background, "name":tmp.name, "caption":caption} ret = subprocess.call(command, shell=True) if ret != 0: raise Exception("Command failed: "+ command) img = Image(filename=tmp.name).clone() os.unlink(tmp.name) img.resize(width=image.width, height=image.height) return img def do_blank_image(height, width, filename, color="black"): command = "convert -size %dx%d xc:%s %s" % (width, height, color, filename) ret = subprocess.call(command, shell=True) if ret != 0: raise Exception("Command failed: "+ command) def do_polaroid_and_random_composite(target_filename, target, image, filename): PERCENT_IN = 100 image = do_polaroid(image, filename, background="transparent", suffix=".png") tmp = tempfile.NamedTemporaryFile(delete=False, suffix=PARAMS["IMG_FORMAT_SUFFIX"]) image.save(filename=tmp.name) height = random.randint(0, target.height - image.height) - target.height/2 width = random.randint(0, target.width - image.width) - target.width/2 geometry = ("+" if height >= 0 else "") + str(height) + ("+" if width >= 0 else "") + str(width) command = "composite -geometry %s -compose Over -gravity center %s %s %s" % (geometry, tmp.name, target_filename, target_filename) ret = os.system(command) os.unlink(tmp.name) if ret != 0: raise object("failed") def photowall(name): output_final = None previous_filename = None #for all the rows, for row in range(PARAMS["LINES"]): output_row = None row_width = 0 #concatenate until the image width is reac
# vim: set sw=2 ts=2 softtabstop=2 expandtab: from . RunnerBase import RunnerBaseClass from .. Analysers.GPUVerify import GPUVerifyAnalyser import logging import os import psutil import re import sys import yaml _logger = logging.getLogger(__name__) class GPUVerifyRunnerException(Exception): def __init__(self, msg): self.msg = msg class GPUVerifyRunner(RunnerBaseClass): softTimeoutDiff = 5 def __init__(self, boogieProgram, workingDirectory, rc): _logger.debug('Initialising {}'.format(boogieProgram)) super(GPUVerifyRunner, self).__init__(boogieProgram, workingDirectory, rc) # Sanity checks # TODO self.softTimeout = self.maxTimeInSeconds
if self.maxTimeInSeconds > 0: # We use GPUVerify's timeout function and enforce the # requested timeout and enforce a hard timeout slightly later self.maxTimeI
nSeconds = self.maxTimeInSeconds + self.softTimeoutDiff if not self.toolPath.endswith('.py'): raise GPUVerifyRunnerException( 'toolPath needs to be the GPUVerify python script') @property def name(self): return "gpuverify" def _buildResultDict(self): results = super(GPUVerifyRunner, self)._buildResultDict() # TODO: Remove this. It's now redundant results['hit_hard_timeout'] = results['backend_timeout'] return results def GetNewAnalyser(self, resultDict): return GPUVerifyAnalyser(resultDict) def run(self): # Run using python interpreter cmdLine = [ sys.executable, self.toolPath ] cmdLine.append('--timeout={}'.format(self.softTimeout)) # Note we ignore self.entryPoint _logger.info('Ignoring entry point {}'.format(self.entryPoint)) # GPUVerify needs PATH environment variable set env = {} path = os.getenv('PATH') if path == None: path = "" env['PATH'] = path cmdLine.extend(self.additionalArgs) # Add the boogie source file as last arg cmdLine.append(self.programPathArgument) backendResult = self.runTool(cmdLine, isDotNet=False, envExtra=env) if backendResult.outOfTime: _logger.warning('GPUVerify hit hard timeout') def get(): return GPUVerifyRunner
from datetime import datetime from flask_login import UserMixin from marshmallow import Schema, fields from werkzeug.security import generate_password_hash, check_password_hash from app import db class User(db.Model, UserMixin): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(20), unique=True) password = db.Column(db.String(50)) def __init__(self, username, password): self.username = username self.set_password(password=bytes(str(password), 'utf-8')) self.password = self.pwd_hash def set_password(self, password): self.pwd_hash = generate_password_hash(password) def check_password(self, password): return check_password_hash(self.password, password) def __repr__(self): return '<User %r>' % self.username class BucketList(db.Model): __tablename__ = 'bucketlists' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey(User.id)) user = db.relationship('User', backref=db.backref('bucketlists', lazy='dynamic')) name = db.Column(db.String(100)) date_created = db.Column(db.DateTime) date_modified = db.Column(db.DateTime) created_by = db.Column(db.String(20)) def __init__(self, name, user_id, created_by): self.name = name self.user_id = user_id self.created_by = created_by self.date_created = datetime.utcnow()
self.date_modified = datetime.utcnow() def __repr__(self): return '<BucketList %r>' % self.name class BucketlistSchema(Schema): id = fields.Int() name = fields.Str() date_created = fields.DateTime() date_modified = fields.DateTime() created_by = fie
lds.Str() class BucketListItem(db.Model): __tablename__ = 'items' id = db.Column(db.Integer, primary_key=True) bucketlist_id = db.Column(db.Integer, db.ForeignKey(BucketList.id)) bucketlist = db.relationship('BucketList', backref=db.backref('items', lazy='dynamic')) name = db.Column(db.String(100)) date_created = db.Column(db.DateTime) date_modified = db.Column(db.DateTime) done = db.Column(db.Boolean, default=False) def __init__(self, name, bucketlist_id): self.name = name self.bucketlist_id = bucketlist_id self.date_created = datetime.utcnow() self.date_modified = datetime.utcnow() self.done = False def __repr__(self): return '<BucketListItem %r>' % self.name class BucketlistItemSchema(Schema): id = fields.Int() name = fields.Str() date_created = fields.DateTime() date_modified = fields.DateTime() done = fields.Bool()
from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Fl
ask(__name__) db = SQLAlchemy(app) app
.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.debug = True
import os import sys import subprocess import shlex import sys import StringIO import datetime sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki" + os.sep) from apps.utils import script_utils def termination_string(): """ Gets the current system time and appends it to a termination notice. """ now = datetime.datetime.now() time = now.strftime("%Y-%m-%d %H:%M:%S") end_time = "Script exiting at %s\n" % time return end_time # Modified from manage_py_dir() in script_utils.py def local_manage_py_dir(): """Returns the directory holding the manage.py file as a string.""" return os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki") # Modified from local_reset_db(heroku_app) in script_utils.py def local_reset_db(logfile): """reset db. Returns a tuple result_tuple. result_tuple[0] has the logfile. result_tuple[1] is True if the reset was aborted, and False if was not. """ local_reset_db_cancel = False logfile.write("WARNING: This command will reset the database. " \ "All existing data will be deleted. This process is irreversible.\n") print "WARNING: This command will reset the database. " \ "All existing data will be deleted. This process is irreversible.\n" value = raw_input("Do you wish to continue (Y/n)? ") while value != "Y" and value != "n": logfile.write("Invalid option %s\n" % value) print "Invalid option %s\n" % value value = raw_input("Do you wish to continue (Y/n)? ") if value == "n": logfile.write("Do you wish to continue (Y/n)? %s\n" % value) logfile.write("Operation cancelled.") print "Operation cancelled.\n" local_reset_db_cancel = True result_tuple = [logfile, local_reset_db_cancel] return result_tuple elif value =="Y": logfile.write("Do you wish to continue (Y/n)? %s\n" % value) print "resetting the db..." os.system("cd " + local_manage_py_dir() + "; python scripts/initialize_postgres.py") result_tuple = [logfile, local_reset_db_cancel] return result_tuple def run(logfile): """ Initializes the Makahiki database with default options and logs the output to a file. This should only be used to initialize local installations. """ now = datetime.datetime.now() time = now.strftime("%Y-%m-%d %H:%M:%S") start_time = "Makahiki instance initialization script started at %s\n" % time logfile.write(start_time) print start_time try: # Retrieve the user's home directory USER_HOME = subprocess.check_output(["echo $HOME"], stderr=subprocess.STDOUT, shell=True) # Remo
ve newline from expected "/home/<username>\n" USER_HOME = USER_HOME[:-1] USER_PROJECT_HOME = USER_HOME + os.sep + "makahiki" # cd to makahiki directory os.chdir(USER_PROJECT_HOME) # Capture console output from script_utils functions: normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer
# Runs the initialization scripts in same order as # makahiki/makahiki/scripts/initialize_instance.py instance_type = None heroku_app = None manage_py = script_utils.manage_py_command() manage_command = "python " + manage_py fixture_path = "makahiki" + os.sep + "fixtures" # Install requirements script_utils.install_requirements() # Switch back to standard I/O sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Reset the database reset_db_result = local_reset_db(logfile) # If successful, write the output of local_reset_db to a logfile logfile = reset_db_result[0] local_reset_db_cancel = reset_db_result[1] if local_reset_db_cancel: logfile.write("Makahiki instance initialization was cancelled by the user.") print "Makahiki instance initialization was cancelled by the user." end_time = termination_string() logfile.write(end_time) print end_time return logfile else: # Resume capturing I/O normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer # Sync the database script_utils.syncdb(manage_command) # Switch I/O back, write output to logfile sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Resume capturing I/O normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer # Copy static files script_utils.copy_static_media(heroku_app) # Switch I/O back, write output to logfile sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Resume capturing I/O normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer # Load data script_utils.load_data(manage_command, instance_type, fixture_path) # Switch I/O back, write output to logfile sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Print a closing message closing = "\nMakahiki initialization script has completed.\n" logfile.write(closing) print closing end_time = termination_string() logfile.write(end_time) print end_time return logfile except subprocess.CalledProcessError as cpe: logfile.write("CalledProcessError: ") print "CalledProcessError: " logfile.write(cpe.output) print cpe.output logfile.write("Warning: Makahiki initialization did not complete successfully.") print "Warning: Makahiki initialization did not complete successfully." end_time = termination_string() logfile.write(end_time) print end_time return logfile except OSError as ose: logfile.write("OSError: ") print "OSError: " oserror_output = " errno: %s\n filename: %s\n strerror: %s\n" % (ose.errno, ose.filename, ose.strerror) logfile.write(oserror_output) print oserror_output logfile.write("Warning: Makahiki initialization did not complete successfully.") print "Warning: Makahiki initialization did not complete successfully." end_time = termination_string() logfile.write(end_time) print end_time return logfile
from qgl2.qgl2 impo
rt qgl2decl, qgl2main, qreg from qgl2.qgl2 import QRegister from qgl2.qgl1 import X, Y, Z, Id, Utheta from itertools import product @qgl2decl def cond_helper(q: qreg, cond): if cond: X(q) @qgl2decl def t1(): """ Correct result is [ X(q1) ] """ q1 = QRegister('q1') cond_helper(q1, False) X(q1) @qgl2decl def t2(): """ Correct result is [ X(q1) ] """ q1 =
QRegister('q1') q2 = QRegister('q2') # We're not going to reference q2 anywhere, # just to make sure that the compiler doesn't # freak out X(q1) @qgl2decl def t3(): """ Like t2, but with a function call """ q1 = QRegister('q1') q2 = QRegister('q2') cond_helper(q1, True) @qgl2decl def t4(): """ Like t3, but the function call does nothing """ q1 = QRegister('q1') q2 = QRegister('q2') cond_helper(q1, False) X(q1) # need to do something @qgl2decl def t5(): """ Like t3, but the function call does nothing """ q1 = QRegister('q1') q2 = QRegister('q2') # don't do anything at all
are (dx+1)/2 on the left and (dx-1)/2 on the right xm = (dx + 1) / 2 xp = xm - 1 else: # Even-sized: X extents are X-dx/2 and X+dx/2 xm = xp = dx / 2 if dy & 1: # Odd-sized: Y extents are (dy+1)/2 below and (dy-1)/2 above ym = (dy + 1) / 2 yp = ym - 1 else: # Even-sized: Y extents are Y-dy/2 and Y+dy/2 ym = yp = dy / 2 return (X - xm, Y - ym, X + xp, Y + yp) def getAdjusted(self, minimum): """ Adjust aperture properties to conform to minimum feature dimensions Return new aperture if required, else return False """ dimx = dimy = None # Check for X and Y dimensions less than minimum if self.dimx is not None and self.dimx < minimum: dimx = minimum if self.dimy is not None and self.dimx < minimum: dimy = minimum # Return new aperture if needed if dimx is not None or dimy is not None: if dimx is None: dimx = self.dimx if dimy is None: dimy = self.dimy return Aperture((self.apname, self.pat, self.format), self.code, dimx, dimy) else: return False # no new aperture needs to be created def rotate(self, GAMT, RevGAMT): if self.apname in ('Macro',): # Construct a rotated macro, see if it's in the GAMT, and set self.dimx # to its name if so. If not, add the rotated macro to the GAMT and set # self.dimx to the new name. Recall that GAMT maps name to macro # (e.g., GAMT['M9'] = ApertureMacro(...)) while RevGAMT maps hash to # macro name (e.g., RevGAMT[hash] = 'M9') AMR = GAMT[self.dimx].rotated() hash = AMR.hash() try: self.dimx = RevGAMT[hash] except KeyError: AMR = amacro.addToApertureMacroTable(GAMT, AMR) # adds to GAMT and modifies name to global name self.dimx = RevGAMT[hash] = AMR.name elif self.dimy is not None: # Rectangles and Ovals have a dimy setting and need to be rotated t = self.dimx self.dimx = self.dimy self.dimy = t def rotated(self, GAMT, RevGAMT): # deepcopy doesn't work on re patterns for some reason so we copy ourselves manually APR = Aperture((self.apname, self.pat, self.format), self.code, self.dimx, self.dimy) APR.rotate(GAMT, RevGAMT) return APR def dump(self, fid=sys.stdout): fid.write(str(self)) def __str__(self): return "{:s}: {:s}".format(self.code, self.hash()) def hash(self): if self.dimy: return ("{:s} ({:.5f} x {:.5f})".format(self.apname, self.dimx, self.dimy)) else: if self.apname in ('Macro',): return ("{:s} ({:s})".format(self.apname, self.dimx)) else: return ("{:s} ({:.5f})".format(self.apname, self.dimx)) def writeDef(self, fid): if self.dimy: fid.write(self.format.format(self.code, self.dimx, self.dimy)) else: fid.write(self.format.format(self.code, self.dimx)) # Parse the aperture definition in line 's'. macroNames is an aperture macro dictionary # that translates macro names local to this file to global names in the GAMT. We make # the translation right away so that the return value from this function is an aperture # definition with a global macro name, e.g., 'ADD10M5' def parseAperture(s, knownMacroNames): for ap in Apertures: match = ap[1].match(s) if match: dimy = None if ap[0] in ('Circle', 'Octagon', 'Macro'): code, dimx = match.groups() else: code, dimx, dimy = match.groups() if ap[0] in ('Macro',): if dimx in knownMacroNames: dimx = knownMacroNames[dimx] # dimx is now GLOBAL, permanent macro name (e.g., 'M2') else: raise RuntimeError("Aperture Macro name \"{:s}\" not defined".format(dimx)) else: try: dimx = float(dimx) if dimy: dimy = float(dimy) except: raise RuntimeError("Illegal floating point aperture size") return Aperture(ap, code, dimx, dimy) return None # This function returns a dictionary where each key is an # aperture code string (e.g., "D11") and the value is the # Aperture object that represents it. For example: # # %ADD12R,0.0630X0.0630*% # # from a Gerber file would result in the dictionary entry: # # "D12": Aperture(ap, 'D10', 0.063, 0.063) # # The input fileList is a list of pathnames which will be read to construct the # aperture table for a job. All the files in the given list will be so # examined, and a global aperture table will be constructed as a dictionary. # Same goes for the global aperture macro table. tool_pat = re.compile(r"^(?:G54)?D\d+\*$") def constructApertureTable(fileList, GAT, GAMT): # First we construct a dictionary where each key is the # string representation of the aperture. Then we go back and assign # numbers. For aperture macros, we construct their final version # (i.e.,
'M1', 'M2', etc.) right away, as they are parsed. Thus, # we translate from 'THX10N' or whatever to 'M
2' right away. GAT.clear() # Clear Global Aperture Table GAMT.clear() # Clear Global Aperture Macro Table RevGAMT = {} # Dictionary keyed by aperture macro hash and returning macro name AT = {} # Aperture Table for this file for fname in fileList: knownMacroNames = {} fid = open(fname, 'rt') for line in fid: # Get rid of CR line = line.replace('\x0D', '') if tool_pat.match(line): break # When tools start, no more apertures are being defined # If this is an aperture macro definition, add its string # representation to the dictionary. It might already exist. # Ignore %AMOC8* from Eagle for now as it uses a macro parameter. if line[:7] == "%AMOC8*": continue # parseApertureMacro() sucks up all macro lines up to terminating '%' AM = amacro.parseApertureMacro(line, fid) if AM: # Has this macro definition already been defined (perhaps by another name # in another layer)? try: # If this macro has already been encountered anywhere in any job, # RevGAMT will map the macro hash to the global macro name. Then, # make the local association knownMacroNames[localMacroName] = globalMacroName. knownMacroNames[AM.name] = RevGAMT[AM.hash()] except KeyError: # No, so define the global macro and do the translation. Note that # addToApertureMacroTable() MODIFIES AM.name to the new M-name. localMacroName = AM.name AM = amacro.addToApertureMacroTable(GAMT, AM) knownMacroNames[localMacroName] = AM.name RevGAMT[AM.hash()] = AM.name else: A = parseAperture(line, knownMacroNames) # If this is an aperture definition, add the string representation # to the dictionary. It might already exist. if A: AT[A.hash()] = A fid.close() # Now, go through and assign sequential codes to all apertures code = 11 #start at 11 since we will be using aperture 10 for the overall outline for val in AT.values(): key = "D{:d}".format(code) GAT[key] = val val.code = key code += 1 def findHighestApertureCode(keys): "Find the highest integer value in a list of aperture codes: ['D10', 'D23', 'D35', ...]" # Mus
from django.db.models.signals import post_save from django.dispatch import receiver from cloths
tream.user_profile.models import UserProfile @receiver(post_save, sender=UserProfile) def create_initial_collection(sender, created, instance, **kwargs): from clothstream.collection.models import Collection if created: Collection.objects.create(owner=instance, title=u'My firs
t collection')
from django.urls import reverse_lazy from django.views.generic import ListView from django.views.generic.edit import UpdateView from .models import Toy class ToyEditView(UpdateView): model = Toy fields = '__all__' template_name_s
uffix = '_edit' success_url = reverse_lazy('toy:list') class ToyListView(ListView): def get_queryset(self): return To
y.objects.all()
import array import struct import socket from odict import OrderedDict as OD class NLRI: def __init__(self, afi, safi, val): self.afi = afi self.safi = safi self.val = val def encode(self): return self.val class vpnv4(NLRI): def __init__(self, labels, rd, prefix): self.labels = labels self.rd = rd self.prefix = prefix def __repr__(self): if self.labels: l = ','.join([str(l) for l in self.labels]) else: l = 'none' return '<vpnv4 label %s rd %s prefix %s>' % (l, self.rd, self.prefix) def __str__(self): return '%s:%s' % (self.rd, self.prefix) def __cmp__(self, other): if isinstance(other, vpnv4): return cmp( (self.labels, self.rd, self.prefix), (other.labels, other.rd, other.prefix), ) return -1 def encode(self): plen = 0 v = '' labels = self.labels[:] if not labels: return '\0' labels = [l<<4 for l in labels] labels[-1] |= 1 for l in labels: lo = l & 0xff hi = (l & 0xffff00) >> 8 v += struct.pack('>HB', hi, lo) plen += 24 l, r = self.rd.split(':') if '.' in l: ip = socket.inet_aton(l) rd = struct.pack('!H4sH', 1, ip, int(r)) else: rd = struct.pack('!HHI', 0, int(l), int(r)) v += rd plen += 64 ip, masklen = self.prefix.split('/') ip = socket.inet_aton(ip) masklen = int(masklen) plen += masklen if masklen > 24: v += ip elif masklen > 16: v += ip[:3] elif masklen > 8: v += ip[:2] elif masklen > 0: v += ip[:1] else: pass return struct.pack('B', plen) + v @classmethod def from_bytes(cls, plen, val): if plen==0: # what the hell? return cls([], '0:0', '0.0.0.0/0') idx = 0 # plen is the length, in bits, of all the MPLS labels, plus the 8-byte RD, plus the IP prefix labels = [] while True: ls, = struct.unpack_from('3s', val, idx) idx += 3 plen -= 24 if ls=='\x80\x00\x00': # special null label for vpnv4 withdraws labels = None break label, = struct.unpack_from('!I', '\x00'+ls) bottom = label & 1 labels.append(label >> 4) if bottom: break rdtype, rd = struct.unpack_from('!H6s', val, idx) if rdtype==1: rdip, num = struct.unpack('!4sH', rd) rdip = socket.inet_ntoa(rdip) rd = '%s:%s' % (rdip, num) else: num1, num2 = struct.unpack('!HI', rd) rd = '%s:%s' % (num1, num2) idx += 8 plen -= 64 ipl = pb(plen) ip = val[idx:idx+ipl] idx += ipl prefix = pip(ip, plen) return cls(labels, rd, prefix) class ipv4(NLRI): def __init__(self, prefix): self.prefix = prefix def __cmp__(self, other): if isinstance(other, ipv4): aip, alen = self.prefix.split('/') alen = int(alen) aip = socket.inet_aton(aip) bip, blen = other.prefix.split('/') blen = int(blen) bip = socket.inet_aton(bip) return cmp((aip,alen),(bip,blen))
return -1 def encode(self): plen = 0 v = '' ip, masklen = self.prefix.split('/') ip = socket.inet_aton(ip) masklen = int(masklen) plen += masklen if masklen > 24: v += ip elif masklen > 16: v += ip[:3] elif masklen > 8: v += ip[:2] elif masklen > 0: v += ip[:1] else: pass return struct.pack('B', plen) + v def __repr__(s
elf): return '<ipv4 %s>' % (self.prefix,) def __str__(self): return self.prefix @classmethod def from_bytes(cls, plen, val): return cls(pip(val, plen)) def pb(masklen): if masklen > 24: return 4 elif masklen > 16: return 3 elif masklen > 8: return 2 elif masklen > 0: return 1 return 0 def pip(pi, masklen): pi += '\x00\x00\x00\x00' return '%s/%s' % (socket.inet_ntoa(pi[:4]), masklen) def parse(bytes, afi=1, safi=0): rv = [] if afi==1 and safi==128: klass = vpnv4 else: klass = ipv4 idx = 0 while idx < len(bytes): plen, = struct.unpack_from('B', bytes, idx) idx += 1 nbytes, rest = divmod(plen, 8) if rest: nbytes += 1 val = bytes[idx:idx+nbytes] idx += nbytes rv.append(klass.from_bytes(plen, val)) return rv
import os, sys, re, json from praw2 import Reddit reload(sys) try: from xbmc import log except: def log(msg): print(msg) sys.setdefaultencoding("utf-8") CLIENT_ID = 'J_0zNv7dXM1n3Q' CLIENT_SECRET = 'sfiPkzKDd8LZl3Ie1WLAvpCICH4' USER_AGENT = 'sparkle streams 1.0' class SubRedditEvents(object): as_regex_str = r'(acestream://[^$\s]+)' def __init__(self, username=None, password=None, client=None): self.client = client or Reddit(client_id=CLIENT_ID, client_secret=CLIENT_SECRET, user_agent=USER_AGENT, username=username, password=password, ) self.as_regex = re.compile(self.as_regex_str, re.IGNORECASE) @staticmethod def get_as_links(body): """ For each acestream link, return a tuple of acestream link, and link quality """ links = [] for entry in body.split('\n'): res = re.findall('(.*)(acestream://[a-z0-9]+)\s*(.*)', entry) if res: pre, acelink, post = res[0] if len(pre.strip()) > len(post.strip()): links.append((acelink.strip(), pre.strip())) else: links.append((acelink.strip(), post.strip())) return links @staticmethod def priority(entry): """ For cases where we have multiple entries for the same acestream link, prioritize based on the quality text to get the best text possible """ if not entry[0]: return (entry, 3) elif re.search('.*\[.*\].*', entry[0]): return (entry, 1) else: return (entry, 2) @staticmethod def collapse(entries): """ Collapse oure list of acestream entries to pick only one with the best quality text """ results = [] prev = None # Sort the entries by our priority logic, then iterate for entry in sorted(entries, key=lambda entry: priority(entry), reverse=True): if prev != entry[0]:
results.append(entry) prev = entry[0] return results def get_events(self, subreddit, filtering=False): subs = [] path = '/r/{}'.format(subreddit) for submission in self.client.get(path): sub_id = submission.id score = submission.score title = submission.title title = title.encode('utf-8') subs.append({'submission_id': sub_id, 'title':
title, 'score': score }) return sorted(subs, key=lambda d: d['score'], reverse=True) def get_event_links(self, submission_id): submission = self.client.submission(id=submission_id) links = [] scores = {} # Add the extracted links and details tuple for c in submission.comments.list(): if hasattr(c, 'body'): links.extend(self.get_as_links(c.body.encode('utf-8'))) # Add entry to our scores table taking the largest score for a given # acestream link score = c.score if hasattr(c, 'score') else 0 for entry in links: scores[entry[0]] = max(scores.get(entry[0], 0), score) if len(links) > 0: return [(s, q, a) for ((a, q), s) in zip(links, map(lambda x: scores[x[0]], links))] else: return links
__version__ = '1
.13.0.0.0.1' if __name__
== "__main__": from main import run run()
"""Created on Sat Oct 01 2015 16:14. @author: Nathan Budd """ import numpy as np def mee2coe(MEE, mu=1.): """ Convert modified equinoctial elements to classical orbital elements. Parameters ---------- MEE : ndarray mx6 array of elements ordered as [p f g h k L]. mu : float Standard gravitational parameter. Defaults to canonical units. Returns ------- COE : ndarray mx6 array of elements ordered as [p e i W w f]. """ p = MEE[0:, 0:1] f = MEE[0:, 1:2] g = MEE[0:, 2:3] h = MEE[0:, 3:4] k = MEE[0:, 4:5] L = MEE[0:, 5:6] # inclination i = 2. * np.arctan((h**2 + k**2)**.5) # right ascension of the ascending node W = np.mod(np.arctan2(k, h), 2*np.pi) # eccentricity e = (f**2 + g**2)**.5 # argume
nt of periapsis w_bar = np.mod(np.arctan2(g, f), 2*np.pi) w = np.mod(w_bar - W, 2*np.pi) # true anomaly f =
np.mod(L - w_bar, 2*np.pi) return np.concatenate((p, e, i, W, w, f), 1)
""" report.py Functions to create various reports. project : pf version : 0.0.0 status : development modifydate : createdate : we
bsite : https://github.com/tmthydvnprt/pf author : tmthydvnprt email : tim@tmthydvnprt.com maintain
er : tmthydvnprt license : MIT copyright : Copyright 2016, tmthydvnprt credits : """
# -*- coding: utf-8 -*- from django.test import TestCase from zerver.decorator import \ REQ, has_request_variables, RequestVariableMissingError, \ RequestVariableConversionError, JsonableError from zerver.lib.validator import ( check_string, check_dict, check_bool, check_int, check_list ) import ujson class DecoratorTestCase(TestCase): def test_REQ_converter(self): def my_converter(data): lst = ujson.loads(data) if not isinstance(lst, list): raise ValueError('not a list') if 13 in lst: raise JsonableError('13 is an unlucky number!') return lst @has_request_variables def get_total(request, numbers=REQ(converter=my_converter)): return sum(numbers) class Request(object): REQUEST = {} # type: Dict[str, str] request = Request() with self.assertRaises(RequestVariableMissingError): get_total(request) request.REQUEST['numbers'] = 'bad_value' with self.assertRaises(RequestVariableConversionError) as cm: get_total(request) self.assertEqual(str(cm.exception), "Bad value for 'numbers': bad_value") request.REQUEST['numbers'] = ujson.dumps([2, 3, 5, 8, 13, 21]) with self.assertRaises(JsonableError) as cm: get_total(request) self.assertEqual(str(cm.exception), "13 is an unlucky number!") request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6]) result = get_total(request) self.assertEqual(result, 21) def test_REQ_validator(self): @has_request_variables def get_total(request, numbers=REQ(validator=check_list(check_int))): return sum(numbers)
class Request(object):
REQUEST = {} # type: Dict[str, str] request = Request() with self.assertRaises(RequestVariableMissingError): get_total(request) request.REQUEST['numbers'] = 'bad_value' with self.assertRaises(JsonableError) as cm: get_total(request) self.assertEqual(str(cm.exception), 'argument "numbers" is not valid json.') request.REQUEST['numbers'] = ujson.dumps([1, 2, "what?", 4, 5, 6]) with self.assertRaises(JsonableError) as cm: get_total(request) self.assertEqual(str(cm.exception), 'numbers[2] is not an integer') request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6]) result = get_total(request) self.assertEqual(result, 21) class ValidatorTestCase(TestCase): def test_check_string(self): x = "hello" self.assertEqual(check_string('x', x), None) x = 4 self.assertEqual(check_string('x', x), 'x is not a string') def test_check_bool(self): x = True self.assertEqual(check_bool('x', x), None) x = 4 self.assertEqual(check_bool('x', x), 'x is not a boolean') def test_check_int(self): x = 5 self.assertEqual(check_int('x', x), None) x = [{}] self.assertEqual(check_int('x', x), 'x is not an integer') def test_check_list(self): x = 999 error = check_list(check_string)('x', x) self.assertEqual(error, 'x is not a list') x = ["hello", 5] error = check_list(check_string)('x', x) self.assertEqual(error, 'x[1] is not a string') x = [["yo"], ["hello", "goodbye", 5]] error = check_list(check_list(check_string))('x', x) self.assertEqual(error, 'x[1][2] is not a string') x = ["hello", "goodbye", "hello again"] error = check_list(check_string, length=2)('x', x) self.assertEqual(error, 'x should have exactly 2 items') def test_check_dict(self): keys = [ ('names', check_list(check_string)), ('city', check_string), ] x = { 'names': ['alice', 'bob'], 'city': 'Boston', } error = check_dict(keys)('x', x) self.assertEqual(error, None) x = 999 error = check_dict(keys)('x', x) self.assertEqual(error, 'x is not a dict') x = {} error = check_dict(keys)('x', x) self.assertEqual(error, 'names key is missing from x') x = { 'names': ['alice', 'bob', {}] } error = check_dict(keys)('x', x) self.assertEqual(error, 'x["names"][2] is not a string') x = { 'names': ['alice', 'bob'], 'city': 5 } error = check_dict(keys)('x', x) self.assertEqual(error, 'x["city"] is not a string') def test_encapsulation(self): # There might be situations where we want deep # validation, but the error message should be customized. # This is an example. def check_person(val): error = check_dict([ ['name', check_string], ['age', check_int], ])('_', val) if error: return 'This is not a valid person' person = {'name': 'King Lear', 'age': 42} self.assertEqual(check_person(person), None) person = 'misconfigured data' self.assertEqual(check_person(person), 'This is not a valid person')
#!/usr/bin/env python3 import unittest, sys, findbits class TestFindBits(unittest.TestCase): def setUp(self): self.old_stdout = sys.stdout sys.stdout = OutputBuffer() def tearDown(self): sys.stdout = self.old_stdout INVERT_CASES = [ ('10', '01'), ('', ''), ] def test_invert(self): self.commutat
ive_test(findbits.invert, self.INVERT_CASES) SEARCH_CASES = [ ('1111', '10111101', ['Match at bit 2', '0<1111>0']), ('00', '10111101', ['Not found']), ] def test_search(self): for target, data, expected_fragment
s in self.SEARCH_CASES: sys.stdout.clear_buffer() findbits.search(target, data) for fragment in expected_fragments: self.assertIn(fragment, sys.stdout.content) BINSTRING_CASES = [ (42, '101010'), (1, '1'), (0, ''), ] def test_binstring(self): self.unary_operation_test(findbits.binstring, self.BINSTRING_CASES) REVERSE_CASES = [ ('abc', 'cba'), ('', ''), ] def test_stringreverse(self): self.commutative_test(findbits.stringreverse, self.REVERSE_CASES) def commutative_test(self, operation, cases): self.unary_operation_test(operation, cases) self.unary_operation_test(operation, map(reversed, cases)) def unary_operation_test(self, operation, cases): for case_in, case_out in cases: self.assertEqual(operation(case_in), case_out) class OutputBuffer(object): def __init__(self): self.clear_buffer() def clear_buffer(self): self.content = '' def write(self, data): self.content += data if __name__ == '__main__': unittest.main()
#!/usr/bin/env python3 from sys import argv, exit from cmd import Cmd from copy import deepcopy from tabulate import tabulate import json import shlex __author__ = 'OJFord' __version__ = '1.0dev' class Interpreter(Cmd): """IARAI: A Relational Algebra Interpreter.""" def __init__(self, relfile): super().__init__() self.prompt = 'RA> ' self.intro = '^D to exit. help[ cmd] for more info. Tab completion.' self.doc_header = 'Relation may be given as `(jsonfile.relation)`.' self.doc_header += 'Alternatively, `$` refers to working relation.' with open(relfile) as f: self.file = json.loads(f.read()) self.fname = self.file['relation'] self.working = None self.chain = '' # working command chain def write(self): print( self.chain + ' (' + self.working['relation'] + ')' ) print( tabulate(self.working['tuples'], headers=self.working['attributes']) ) print() def debugLine(self, line): before = deepcopy(self.working) self.postcmd( self.onecmd( self.precmd(line) ), line) after = self.working return before, after @staticmethod def chainable(cmd, args): return cmd + ('_' + args[1:] if args[1:] else '') + ' ' def cmdloop(self): try: return super().cmdloop() except KeyboardInterrupt: # cancel command without crashing out of interpreter self.intro = None return self.cmdloop() def precmd(self, line): if not line or line == 'EOF' or line.find('help') == 0: return line argsend = line.find('(') if argsend == -1: argsend = line.find('$') rel = line[argsend:] cmd = line[0] args= shlex.split( line[1:argsend] ) if len(args) >= 2 or len(args) >= 1 and args[0][0] not in ['_','(','$']: if args[0][0] == '_': rrecurse= ' '.join(args[1:]) largs = args[0] else: rrecurse= ' '.join(args) largs = '' # execute end of line self.postcmd( self.onecmd( self.precmd(rrecurse+rel) ), rrecurse+rel ) # 'restart' to finish up left of line return self.precmd(cmd + largs + ' $') elif rel == '$': if not self.working: print('Error: no current working relation, use file first.') raise KeyboardInterrupt # hacky af else: # continue with working relation pass elif rel == '(' + self.fname + ')': self.chain = '' self.working = deepcopy(self.file) else: print('Error: last argument must be a valid relation.') raise KeyboardInterrupt # hacky af if args: # single string args, just remove leading '_' args = ' ' + args[0][1:] else: args = '' self.chain = self.chainable(cmd, args) + self.chain return cmd+args def default(self, line): # undo add command to chain.. unfortunately precmd() executes even on invalid cmd, args = line[0], shlex.split(line[1:]) self.chain = self.chain[ len( self.chainable(cmd, args) ):] super().default(line) def emptyline(self): # overrides super's repeat last line, which would make little sense pass def do_EOF(self, line): """Exits.""" return True def do_p(self, args): """ 'p' for pi - project. Projects the given attributes of a relation, or all if none specified. usage: p [ATTR,...] (REL) """ if args: allAtts = self.working['attributes'] # put in same order prjAtts = [ att for att in allAtts if att in args.split(',') ] prjAtts+= [ att for att in args.split(',') if att not in prjAtts ] # project for i,tup in enumerate(self.working['tuples']): self.working['tuples'][i] = [ o for j,o in enumerate(tup) if allAtts[j] in prjAtts ] self.working['tuples'][i]+= [ None for o in prjAtts if o not in allAtts ] self.working['attributes'] = prjAtts self.write() def do_s(self, args): """ 's' for sigma - select. Selects from a relation that which satisfies the given proposition. usage: s [PROP] (REL) """ if '/\\' in args or '\\/' in args: raise Exception('Error: not implemented, use e.g. `s_prop2 s_prop1 $` to AND for now') if args: if args[0] in ['¬', '~', '!']: neg = True args= args[1:] else: neg = False (att, val) = tuple(args.split('=')) else: att = val = None if att: tups = self.working['tuples'] atts = self.working['attributes'] if neg: self.working['tuples'] = [ t for t in tups if t[ atts.index(att) ] != val ] else: self.working['tuples'] = [ t for t in tups if t[ atts.index(att) ] == val ] def do_r(self, args): """ 'r' for rho - rename. Renames a given attribute of a relation. usage: r NEW_NAME/OLD_NAME (REL) """ pairs = [ tuple(p.split('/')) for p in args.split(',') ] atts = self.working['attributes'] for (new, old) in pairs: if old in atts: self.working['attribute
s'][ a
tts.index(old) ] = new if __name__ == '__main__': if len(argv) != 2: print('Error: Single argument - JSON relation file - required.') print('usage: python iarai.py relation.json') exit(1) else: Interpreter(argv[1]).cmdloop()
import itertools class RuleCollection(object): def __init__(self): self.rules = {} def add_rule(self, rule, level=0): self.rules.setdefault(level, []) self.rules[level].append(rule) def apply(self, transaction, entry): entries = [entry] levels = list(self.rules.keys()) levels.sort() for level in levels: rules = self.rules[level] new_entries = [] for entry in entries: new_entries.append(entry) for rule in rules: new_entries += list(rule.apply(transaction, entry)) entries = new_entries return entries class Generator(object): def __init__(self, generator): self.gener
ator = generator def __call__(self, *args): return self.generator(*args) def __add__(self, other): @Generator def result(*args): return itertools.chain(self(*args), other(*args)) return result Generator.null = Generator(lambda *args: []) class Rule(object): def __init__(self, filter,
generator): self.filter = filter self.generator = generator def apply(self, transaction, entry): if self.filter(transaction, entry): return self.generator(entry.info(transaction)) else: return [] class TransactionRule(Rule): def apply(self, transaction): if self.filter(transaction, None): return self.generator(transaction) else: return transaction
#Author: Maple0 #Github:https://github.com/Maple0 #4th Sep 2016 #Given a collection of intervals, merge all overlapping intervals. #
For example, #Given [1,3],[2,6],[8,10],[15,18], #return [1,6],[8,10],[15,18]. class Interval(object): def __init__(self, s=0, e=0): self.start = s self.end = e class Merge_ResultSet(object): def __init__(self,is_modified,merged_numbers): self.is_modified = is_modified self.merged_numbers = merged_numbers class Solution(object): def inner_merge(self,numbers): is_modified=False length=len(numbers) merge
d_numbers=[numbers[0]] for i in range(1,length): c_start=numbers[i].start c_end=numbers[i].end check_status=0 for merged_num in merged_numbers: m_start=merged_num.start m_end=merged_num.end if c_start >= m_start and c_end <=m_end: check_status=1 if c_start < m_start and c_end>=m_start and c_end <= m_end: merged_num.start=c_start check_status=2 elif c_start >= m_start and c_start<=m_end and c_end > m_end: merged_num.end=c_end check_status=2 elif c_start<= m_start and c_end>=m_end: if merged_num.start!=c_start: merged_num.start=c_start check_status=2 if merged_num.end!=c_end: merged_num.end=c_end check_status=2 if check_status==0: merged_numbers.append(numbers[i]) if check_status==2: is_modified=True return Merge_ResultSet(is_modified,merged_numbers) def merge(self, numbers): length=len(numbers) if length < 2: return numbers result=self.inner_merge(numbers) while result.is_modified==True: result=self.inner_merge(numbers) return result.merged_numbers num3=[Interval(1,3),Interval(0,6),Interval(7,7),Interval(8,9),Interval(0,10)] results=Solution().merge(num3) for x in results: print(x.start,x.end)
from collections i
mport defaultdict class Solution: def containsNearbyDuplicate(self, nums, k): """ :type nums: List[int] :type k: int :rtype: bool """ indices = defaultdict(list) for i, x in enumerate(nums):
indices[x].append(i) for _, v in indices.items(): if len(v) <= 1: continue v.sort() for i in range(len(v) - 1): if v[i + 1] - v[i] <= k: return True return False if __name__ == "__main__": sol = Solution() x = [-1, -1] print(sol.containsNearbyDuplicate(x, 1))
# Copyright 2014 Diamond Light Source Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. module:: tomo_recon :platform: Unix :synopsis: runner for tests using the MPI framework .. moduleauthor:: Mark Basham <scientificsoftware@diamond.ac.uk> """ import unittest import tempfile from savu.test import test_utils as tu from savu.tes
t.plugin_runner_test import run_protected_plug
in_runner class SimpleTomoTest(unittest.TestCase): def test_process(self): options = { "transport": "hdf5", "process_names": "CPU0", "data_file": tu.get_test_data_path('24737.nxs'), "process_file": tu.get_test_data_path('simple_recon_test_process.nxs'), "out_path": tempfile.mkdtemp() } run_protected_plugin_runner(options) if __name__ == "__main__": unittest.main()
(BLOCK_TAG_START), re.escape(BLOCK_TAG_END)))) name_extract_re = (re.compile('%s\W*block\W+([^\W]+)\W*?%s' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END)))) endblock_re = (re.compile('%s\W*endblock\W*?%s' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END)))) # for string formatting JS_FORMAT = '$(\"%s\" ).on(\"%s\", function() { window.location = \"%s\"});' MALFORMED_CONFIG_FORMAT = 'Mandatory definition of %s missing from config file.\n' class TagError(Exception): pass class OverwriteError(Exception): pass def tokenize(template_string): """ Return a list of tokens from a given template_string. Taken from the Django template code. """ in_tag = False result = [] for bit in tag_re.split(template_string): if bit: result.append((bit, in_tag)) in_tag = not in_tag return result def is_start_tag(token): """ Args: token: the path to the configuration file (string) """ match = name_extract_re.search(token) if match: return True else: return False def get_block_name(token): """ Assumes that token is a start tag. Args: token: the path to the configuration file (string) """ match = name_extract_re.search(token) if match: return match.group(1) else: raise TagError('Failed to extract block name from %s' % token) def is_end_tag(token): """ Args: token: the path to the configuration file (string) """ match = endblock_re.search(token) if match: return True else: return False def generate_page(template, new_page, block_content, clobber=False, flow_info=None): """ Takes in the name of the template, the name of the page to be generated, a dictionary mapping block names to content they should be replaced with, and optional flow information (a map of classes/id's mapping to tuples (event, page to redirect to)). Raises: TagError and Exception. """ if not clobber and os.path.isfile(new_page): raise OverwriteError('%s already exists. (use --clobber to overwrite)' % new_page) else: output = open(new_page, 'w') # open the template and tokenize it src = open(template, 'r') tokens = tokenize(src.read()) src.close() tag_depth = 0 repl_tag_depth = 0 # start counting whenever we enter a block that is supposed to be replaced # repl_block is the name of the block to replace, None means we're not in one repl_block = None for token, is_tag in tokens: if not is_tag and not repl_block: output.write(token) elif not is_tag: # but in a block that should be replaced pass elif is_tag and repl_block: # so this could be an unreferenced start tag if is_start_tag(token): if get_block_name(token) in block_content: raise TagError('Cannot replace 2 blocks when one nested inside other, here %s is inside %s.' % (get_block_name(token), repl_block)) else: repl_tag_depth += 1 tag_depth += 1 else: # or an endtag repl_tag_depth -= 1 tag_depth -=1 if repl_tag_depth == 0: # write the replacement text output.write(block_content[repl_block])
repl_block = None else: # is_tag and not repl_block if is_start_tag(token):
tag_depth += 1 if get_block_name(token) in block_content: repl_block = get_block_name(token) repl_tag_depth += 1 else: # endblock tag_depth -= 1 if tag_depth < 0: raise TagError('Found more endtags than start tags.') output.write('<script src=\"https://code.jquery.com/jquery.js\"></script>') output.write('<script>') if flow_info: # TODO (susanctu): this works but SHOULD go before the last html tag for class_or_id in flow_info.keys(): output.write(JS_FORMAT % (class_or_id, flow_info[class_or_id][0], flow_info[class_or_id][1])) output.write('</script>') output.close() def load_config(config_file): """ Opens config_file, which is executed as a python script. (Not exactly safe, but since the user is running this on his/her own computer, we don't bother to do anything more secure.) Checks that the config file defines PAGES and FLOWS. PAGES should be defined as follows: PAGES = {'index1.html': ('index_template.html', {}), 'index2.html': ('index_template.html', {'central': 'replacement text'})} Each key in the PAGES dictionary is a 2-tuple containing the template to generate the page from and dictinary mappng from block names to the text to replace the current block contents with. Blocks should be specified in templates as {% block central %} contents of block, blah, blah, blah {% endblock %} where the block can be given any name without whitespace (here, the block is called 'central') Note that in the above example, index.html is just index_template with all block tags removed but their contents preserved (i.e., if you don't specify a block by name in PAGES but it exists in the template, the page will be generated with just the tags stripped.) FLOWS should be defined as follows: FLOWS = {'index1.html': {'.central': ('click', 'index2.html')}, 'index2.html': {'.central': ('click', 'index3.html')}} where each value in FLOWS is a dictionary mapping from classes/ids to 2-tuples of jquery events and the page that we should navigate to when the event happens on an element with that class/id. It is ok for FLOWS to be empty. Args: config_file: the path to the configuration file (string) Returns: Tuple containing PAGES and FLOWS. """ f = open(config_file, 'r') exec(f); try: PAGES except NameError: sys.stderr.write(MALFORMED_CONFIG_FORMAT % 'PAGES') try: FLOWS except NameError: sys.stderr.write(MALFORMED_CONFIG_FORMAT % 'FLOWS') return (PAGES, FLOWS) def main(): # parse arguments parser = argparse.ArgumentParser() parser.add_argument( 'config_file', help='a configfile to tell this tool what to generate') parser.add_argument('--clobber', '-c', action='store_true', help='ok to overwrite files') args = parser.parse_args() # load config PAGES, FLOWS = load_config(args.config_file) # generate each page specified in the config # with appropriate navigation between them for new_page, src_info in PAGES.items(): if len(src_info) != 2: sys.stderr.write( 'Template and fill-in info pair %s contains too many elements.\n' % str(src_info)) exit(1) if type(src_info[1]) is not dict: sys.stderr.write( 'Did not get expected block / replacement pairs: %s.\n' % str(src_info)) exit(1) try: if new_page in FLOWS: generate_page(src_info[0], new_page, src_info[1], args.clobber, FLOWS[new_page]) else: sys.stderr.write( 'WARNING: No FLOWS found for navigation away from %s\n' % new_page) generate_page(src_info[0], new_page, src_info[1], args.clobber) except (TagError, OverwriteError) as e: sys.stderr.write(str(e) + '\n') except Exception as e: sys.stderr.write('Error generating %s, likely due
#!/usr/bin/env python # Author: Jon Trulson <jtrulson@ics.com> # Copyright (c) 2017 Intel Corporation. # # The MIT License # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCH
ANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES
OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function import time, sys, signal, atexit from upm import pyupm_lsm303d as sensorObj def main(): # Instantiate a BMP250E instance using default i2c bus and address sensor = sensorObj.LSM303D() ## Exit handlers ## # This function stops python from printing a stacktrace when you # hit control-C def SIGINTHandler(signum, frame): raise SystemExit # This function lets you run code on exit def exitHandler(): print("Exiting") sys.exit(0) # Register exit handlers atexit.register(exitHandler) signal.signal(signal.SIGINT, SIGINTHandler) # now output data every 250 milliseconds while (1): sensor.update() data = sensor.getAccelerometer() print("Accelerometer x:", data[0], end=' ') print(" y:", data[1], end=' ') print(" z:", data[2], end=' ') print(" g") data = sensor.getMagnetometer() print("Magnetometer x:", data[0], end=' ') print(" y:", data[1], end=' ') print(" z:", data[2], end=' ') print(" uT") print("Temperature: ", sensor.getTemperature()) print() time.sleep(.250) if __name__ == '__main__': main()
from ..base import BaseQuery class LexiconQuery(BaseQuery): def __init__(self, corpus, to_find): super(LexiconQuery, self).__init__(corpus, to_find)
def create_subset(self, label): """ Set properties of the returned tokens. """ labels_to_add = [] if self.to_find.node_type not in self.corpus.hierarchy.subset_types or \ label not in self.corpus.hierarchy.subset_types[self.to_find.node_type]: labels_to_add.append(label) supe
r(LexiconQuery, self).create_subset(label) if labels_to_add: self.corpus.hierarchy.add_type_labels(self.corpus, self.to_find.node_type, labels_to_add) self.corpus.encode_hierarchy() def remove_subset(self, label): """ removes all token labels""" super(LexiconQuery, self).remove_subset(label) self.corpus.hierarchy.remove_type_labels(self.corpus, self.to_find.node_type, [label]) def set_properties(self, **kwargs): """ Set properties of the returned tokens. """ props_to_remove = [] props_to_add = [] for k, v in kwargs.items(): if v is None: props_to_remove.append(k) else: if not self.corpus.hierarchy.has_type_property(self.to_find.node_type, k): props_to_add.append((k, type(kwargs[k]))) super(LexiconQuery, self).set_properties(**kwargs) if props_to_add: self.corpus.hierarchy.add_type_properties(self.corpus, self.to_find.node_type, props_to_add) if props_to_remove: self.corpus.hierarchy.remove_type_properties(self.corpus, self.to_find.node_type, props_to_remove)
/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import os import gzip import tempfile from functools import reduce import rdflib from rdflib.namespace import XSD import logging logger = logging.getLogger() def uri_split(uri): lname = uri.split('/')[-1].split('#')[-1] ns = uri[:len(uri)-len(lname)] return ns, lname class RDFNode(object): def __init__(self, nd): self._valid = True self._node = nd def __eq__(self, other): res = False if isinstance(other, RDFNode): res = self._node == other._node return res def is_valid(self): return self._valid def as_node(self): return self._node class Resource(RDFNode): def __init__(self, uri=None, **args): nd = args.get('node', None) if nd is not None: RDFNode.__init__(self, nd) else: if uri is not None: try: RDFNode.__init__(self, rdflib.term.URIRef(uri)) except Exception: logger.warning('uri="%s"(%s)' % (uri, str(type(uri)))) raise else: RDFNode.__init__(self, rdflib.term.BNode()) # blank node def __eq__(self, other): res = False if isinstance(other, Resource): if isinstance(self._node, rdflib.term.URIRef) \ and isinstance(other._node, rdflib.term.URIRef): res = self.get_uri() == other.get_uri() else: res = self._node == other._node return res def __lt__(self, other): return str(self.g
et_uri()) < str(other.get_uri()) def __gt__(self, other): return str(self.get_uri()) > str(other.get_uri()) def __le__(self, other): self.__eq__(other) or self.__lt__(other) def __ge__(self, other): self.__eq__(other) or self.__gt__(other) def __hash__(self): return str(self.get_uri()).__hash__() def __str__(self): return '<%s>' % self.get_uri() def get_uri(self): return str(str(self.as_node()))
def get_namespane(self): ns, ln = uri_split(self.get_uri()) return ns def get_local_name(self): ns, ln = uri_split(self.get_uri()) return ln class Literal(RDFNode): def __init__(self, literal="", **args): nd = args.get('node', None) if nd is not None: RDFNode.__init__(self, nd) else: RDFNode.__init__(self, rdflib.Literal(literal, **args)) def __eq__(self, other): res = False if isinstance(other, Literal): res = self._node.eq(other._node) return res def __str__(self): return '"%s"' % self.get_content() def get_content(self): return self._node.value def make_literal(x): lit = None if isinstance(x, bool): lit = Literal(literal=str(x).lower(), datatype=XSD.boolean) elif isinstance(x, int): if x >= 0: lit = Literal(literal=str(x), datatype=XSD.nonNegativeInteger) else: lit = Literal(literal=str(x), datatype=XSD.integer) elif isinstance(x, float): lit = Literal(literal=str(x), datatype=XSD.double) # elif isinstance(x, str): # lit = Literal(literal=x.encode('utf-8')) else: lit = Literal(literal=str(x)) return lit class Predicate(Resource): def __init__(self, ns=None, lname=None, **args): self._lname = None self._ns = None uri = None node = args.get('node', None) if ns is None or lname is None: uri = args.get('uri', None) if uri is None: if node is not None: if isinstance(node, rdflib.term.URIRef): uri = str(str(node)) if uri is not None: self._ns, self._lname = uri_split(uri) else: self._ns = ns self._lname = lname uri = ns + lname Resource.__init__(self, uri, **args) def __str__(self): return '<%s>' % self.get_uri() def get_namespace(self): return self._ns def get_local_name(self): return self._lname class Statement(object): def __init__(self, subject=None, predicate=None, object=None, **args): try: stmt = args['statement'] self.subject = stmt.subject self.predicate = stmt.predicate self.object = stmt.object self._stmt = stmt._stmt except KeyError: self.subject = subject self.predicate = predicate self.object = object s = None p = None o = None if isinstance(subject, Resource): s = subject.as_node() if isinstance(predicate, Predicate): p = predicate.as_node() if isinstance(object, RDFNode): o = object.as_node() self._stmt = (s, p, o) def __eq__(self, other): res = False if isinstance(other, Statement): res = reduce(lambda x, y: x and y, [self.subject == other.subject, self.predicate == other.predicate, self.object == other.object]) return res class Graph(object): def __init__(self, ns_tbl, large=False): if large: self._model = rdflib.graph.Graph('BerkeleyDB') else: self._model = rdflib.graph.Graph('Memory') self._g_pred_map = {} self._pred_tbl = {} self.l_true = Literal('true') self.l_false = Literal('false') self.namespace_tbl = ns_tbl def set_namespace(self, prefix, uri): self.namespace_tbl[prefix] = uri def contains(self, s, p, o): stmt = self._create_statement(s, p, o) return (stmt in self._model) def find_statements(self, t): return self._model.triples(t) def size(self): return len(self._model) def _add(self, subj, pred, obj): self._model.add((subj.as_node(), pred.as_node(), obj.as_node())) def _create_statement(self, subj, pred, obj): s = None p = None o = None if subj: s = subj.as_node() if pred: p = pred.as_node() if obj: o = obj.as_node() return (s, p, o) def _guess_fmt(self, path): fmt = '' if path.endswith('.nt'): fmt = 'nt' elif path.endswith('.ttl'): fmt = 'turtle' elif path.endswith('.rdf'): fmt = 'xml' if path.endswith('.nt.gz'): fmt = 'nt' elif path.endswith('.ttl.gz'): fmt = 'turtle' elif path.endswith('.rdf.gz'): fmt = 'xml' return fmt def _mktemp(self): (fd, path) = tempfile.mkstemp() os.close(fd) return path def _gzipped(self, path): return path.endswith('.gz') def _gzip(self, from_file, to_file): f_from = open(from_file, 'rb') f_to = gzip.open(to_file, 'wb') f_to.writelines(f_from) f_to.close() f_from.close() def _gunzip(self, from_file, to_file): f_from = gzip.open(from_file, 'rb') f_to = open(to_file, 'wb') f_to.writelines(f_from) f_to.close() f_from.close() def write(self, path, fmt='', base_uri=None): if fmt == '': fmt = self._guess_fmt(path) gzipped_path = None if self._gzipped(path): gzipped_path = path tmp = self._mktemp() path = tmp for (prefix, uri) in self.namespace_tbl.items(): self._model.bind(prefix, uri) logger.info('writing to "%s"
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import webnotes def make_test_records(verbose): from webnotes.test_runner import make_test_objects accounts = [ # [account_name, parent_account, group_or_ledger] ["_Test Account Bank Account", "Bank Accounts", "Ledger"], ["_Test Account Stock Expenses", "Direct Expenses", "Group"], ["_Test Account Shipping Charges", "_Test Account Stock Expenses", "Ledger"], ["_Test Account Customs Duty", "_Test Account Sto
ck Expenses", "Ledger"], ["_Test Account Tax Assets", "Current Assets", "Group"], ["_Test Account VAT", "_Test Account Tax Assets", "Ledger"], ["_Test Account Service Tax", "_Test Account Tax Assets", "Ledger"], ["_Test Account
Reserves and Surplus", "Current Liabilities", "Ledger"], ["_Test Account Cost for Goods Sold", "Expenses", "Ledger"], ["_Test Account Excise Duty", "_Test Account Tax Assets", "Ledger"], ["_Test Account Education Cess", "_Test Account Tax Assets", "Ledger"], ["_Test Account S&H Education Cess", "_Test Account Tax Assets", "Ledger"], ["_Test Account CST", "Direct Expenses", "Ledger"], ["_Test Account Discount", "Direct Expenses", "Ledger"], # related to Account Inventory Integration ["_Test Account Stock In Hand", "Current Assets", "Ledger"], ["_Test Account Fixed Assets", "Current Assets", "Ledger"], ] for company, abbr in [["_Test Company", "_TC"], ["_Test Company 1", "_TC1"]]: test_objects = make_test_objects("Account", [[{ "doctype": "Account", "account_name": account_name, "parent_account": parent_account + " - " + abbr, "company": company, "group_or_ledger": group_or_ledger }] for account_name, parent_account, group_or_ledger in accounts]) return test_objects
port_id=port_id) except neutron_client_exc.Unauthorized: raise exception.Forbidden() except neutron_client_exc.NeutronClientException as exc: msg = (_("Failed to access port %(port_id)s: %(reason)s") % {'port_id': port_id, 'reason': exc}) raise exception.NovaException(message=msg) def _get_instance_nw_info(self, context, instance, networks=None, port_ids=None, admin_client=None, preexisting_port_ids=None, **kwargs): # NOTE(danms): This is an inner method intended to be called # by other code that updates instance nwinfo. It *must* be # called with the refresh_cache-%(instance_uuid) lock held! LOG.debug('_get_instance_nw_info()', instance=instance) # Ensure that we have an up to date copy of the instance info cache. # Otherwise multiple requests could collide and cause cache # corruption. compute_utils.refresh_info_cache_for_instance(context, instance) nw_info = self._build_network_info_model(context, instance, networks, port_ids, admin_client, preexisting_port_ids) return network_model.NetworkInfo.hydrate(nw_info) def _gather_port_ids_and_networks(self, context, instance, networks=None, port_ids=None): """Return an instance's complete list of port_ids and networks.""" if ((networks is None and port_ids is not None) or (port_ids is None and networks is not None)): message = ("This method needs to be called with either " "networks=None and port_ids=None or port_ids and " " networks as not none.") raise exception.NovaException(message=message) ifaces = compute_utils.get_nw_info_for_instance(instance) # This code path is only done when refreshing the network_cache if port_ids is None: port_ids = [iface['id'] for iface in ifaces] net_ids = [iface['network']['id'] for iface in ifaces] if networks is None: networks = self._get_available_networks(context, instance.project_id, net_ids) # an interface was added/removed from instance. else:
# Since networks does not contain the existing networks on the # instance we use their values from the cache and add it. networks = networks + [ {'id': iface['network']['id'],
'name': iface['network']['label'], 'tenant_id': iface['network']['meta']['tenant_id']} for iface in ifaces] # Include existing interfaces so they are not removed from the db. port_ids = [iface['id'] for iface in ifaces] + port_ids return networks, port_ids @base_api.refresh_cache def add_fixed_ip_to_instance(self, context, instance, network_id): """Add a fixed ip to the instance from specified network.""" neutron = get_client(context) search_opts = {'network_id': network_id} data = neutron.list_subnets(**search_opts) ipam_subnets = data.get('subnets', []) if not ipam_subnets: raise exception.NetworkNotFoundForInstance( instance_id=instance.uuid) zone = 'compute:%s' % instance.availability_zone search_opts = {'device_id': instance.uuid, 'device_owner': zone, 'network_id': network_id} data = neutron.list_ports(**search_opts) ports = data['ports'] for p in ports: for subnet in ipam_subnets: fixed_ips = p['fixed_ips'] fixed_ips.append({'subnet_id': subnet['id']}) port_req_body = {'port': {'fixed_ips': fixed_ips}} try: neutron.update_port(p['id'], port_req_body) return self._get_instance_nw_info(context, instance) except Exception as ex: msg = ("Unable to update port %(portid)s on subnet " "%(subnet_id)s with failure: %(exception)s") LOG.debug(msg, {'portid': p['id'], 'subnet_id': subnet['id'], 'exception': ex}) raise exception.NetworkNotFoundForInstance( instance_id=instance.uuid) @base_api.refresh_cache def remove_fixed_ip_from_instance(self, context, instance, address): """Remove a fixed ip from the instance.""" neutron = get_client(context) zone = 'compute:%s' % instance.availability_zone search_opts = {'device_id': instance.uuid, 'device_owner': zone, 'fixed_ips': 'ip_address=%s' % address} data = neutron.list_ports(**search_opts) ports = data['ports'] for p in ports: fixed_ips = p['fixed_ips'] new_fixed_ips = [] for fixed_ip in fixed_ips: if fixed_ip['ip_address'] != address: new_fixed_ips.append(fixed_ip) port_req_body = {'port': {'fixed_ips': new_fixed_ips}} try: neutron.update_port(p['id'], port_req_body) except Exception as ex: msg = ("Unable to update port %(portid)s with" " failure: %(exception)s") LOG.debug(msg, {'portid': p['id'], 'exception': ex}) return self._get_instance_nw_info(context, instance) raise exception.FixedIpNotFoundForSpecificInstance( instance_uuid=instance.uuid, ip=address) def _get_port_vnic_info(self, context, neutron, port_id): """Retrieve port vnic info Invoked with a valid port_id. Return vnic type and the attached physical network name. """ phynet_name = None port = self._show_port(context, port_id, neutron_client=neutron, fields=['binding:vnic_type', 'network_id']) vnic_type = port.get('binding:vnic_type', network_model.VNIC_TYPE_NORMAL) if vnic_type != network_model.VNIC_TYPE_NORMAL: net_id = port['network_id'] net = neutron.show_network(net_id, fields='provider:physical_network').get('network') phynet_name = net.get('provider:physical_network') return vnic_type, phynet_name def create_pci_requests_for_sriov_ports(self, context, pci_requests, requested_networks): """Check requested networks for any SR-IOV port request. Create a PCI request object for each SR-IOV port, and add it to the pci_requests object that contains a list of PCI request object. """ if not requested_networks: return neutron = get_client(context, admin=True) for request_net in requested_networks: phynet_name = None vnic_type = network_model.VNIC_TYPE_NORMAL if request_net.port_id: vnic_type, phynet_name = self._get_port_vnic_info( context, neutron, request_net.port_id) pci_request_id = None if vnic_type != network_model.VNIC_TYPE_NORMAL: request = objects.InstancePCIRequest( count=1, spec=[{pci_request.PCI_NET_TAG: phynet_name}], request_id=str(uuid.uuid4())) pci_requests.requests.append(request) pci_request_id = request.request_id # Add pci_request_id into the requested network request_net.pci_request_id = pci_request_id def _ports_needed_per_instance(self, context, neutron, requested_networks): ports_needed_per_instance = 0 if
te the population with a list of individuals :param individuals: <List<Individual>> List of individuals to use :return: None """ for ind in individuals: self.append(copy.deepcopy(ind)) def to_json(self): return json.dumps(self, default=lambda o: o.__dict__) def __repr__(self): s = '' for idx, ind in enumerate(self): s += repr(ind) + '\n' return s class SubPopulation(list): def __init__(self): super(SubPopulation, self).__init__() def populate(self, individuals): """ Populate the sub-population with a list of individuals :param individuals: <List<Individual>> List of individuals to use :return: None """ for ind in individuals: self.append(copy.deepcopy(ind)) class Archive(object): def __init__(self): """ Optimization run archive of non-dominated solutions at each generation which is used to predict convergence of the algorithm. :return: None """ # Tracks the archive of non-dominated solutions self._archive = [] # Tracks the size of the non-dominated archive self._idx = 0 # Tracks the consolidation ratio self._consolidation_ratio = [] # Population size self._population_size = None def initialize(self, population): """ Initialize the archive :param population: <Population> Individuals to initialize the population with :return: None """ self._archive.append(nondominated_sort(population, len(population), first_front_only=True)[0]) self._consolidation_ratio.append(0) self._population_size = len(population) def update(self, population): """ Update the archive :param population: <Population> Population to update the archive with :return: None """ nondominated_solutions = nondominated_sort(copy.deepcopy(population), len(population), first_front_only=True)[0] archive_copy = copy.deepcopy(self._archive[self._idx]) archive_copy = archive_copy + nondominated_solutions nondominated_solutions = nondominated_sort(archive_copy, len(archive_copy), first_front_only=True)[0] # Remove copies nondominated_solutions = list(set(nondominated_solutions)) # Update the archive self._archive.append(nondominated_solutions) self._idx += 1 self._consolidation_ratio.append(len(self._archive[self._idx])/float(2*self._population_size)) def get_consolidation_ratio(self): """ Return the most recent calculated consolidation ratio :return: <float> Current consolidation ratio value """ return self._consolidation_ratio[self._idx] def get_consolidation_ratio_history(self): """ Return the consolidation ratio history :return: <List<float>> Consolidation ratio history """ return self._consolidation_ratio def get_archive(self): """ Get the saved archive at each update :return: <List<Population>> archive """ return self._archive class Metadata: def __init__(self): self.end_msg = None self.fevals = None self.gen = None def __repr__(self): s = '\n' + self.end_msg + '\n' s += 'fevals: ' + repr(self.fevals) + '\n' s += 'gen: ' + repr(self.gen) + '\n' return s class History(list): def __init__(self): super(History, self).__init__() def add_point(self, individual): """ Add a design point to the history :param individual: <Individual> Individual to add to the history :return: None """ self.append(individual) class Variable(object): def __init__(self, name, lower, upper): """ Data structure that contains decision variable information. :param name: <string> Reference name for the decision variable :param lower: <float> Lower bound of the decision variable :param upper: <float> Upper bound of the decision variable :return: None """ self.name = name self.lower = lower self.upper = upper class Constraint(object): def __init__(self, name): """ Data structure that contains constraint variable information. :param name: <string> Reference name for the constraint variable :return: None """ self.name = name class Objective(object): def __init__(self, name): """ Data structure that contains objective variable information. :param name: <string> Reference name for the objective variable :return: None """ self.name = name class AlgorithmException(Exception): def __init__(self, message): """ Exception class that gets raised when an error occurs with the algorithm. :param message: Error message to display :return: None """ Exception.__init__(self, message) # Utility functions for dealing with algorithms containing sub-populations def flatten_population(population_list): """ Combine each of the sub-populations into a single global population :param population_list: <List<Population>> :return: <List<Individual>> """ global_pop = Population() for pop in population_list: global_pop.append(pop[:]) return global_pop # Genetic Operators def mutation(population, n_dim, lower, upper, eta_m, p_mut): """ Performs bounded polynomial mutation on the population. :param population: <Population> Population to perform mutation on :param n_dim: <int> Number of decision variable dimensions :param lower: <list<float>> List of decision variable lower bound values :param upper: <list<float>> List of upper bound decision variable values :param eta_m: <float> Mutation index :param p_mut: <float> Mutation probability :return: None """ for ind in population: mutate(ind, n_dim, lower, upper, eta_m, p_mut) def mutate(individual, n_dim, lower, upper, eta_m, p_mut): """ Performs bounded polynomial mutation on an individual. :param individual: <Individual> Individual to perform mutation on :param n_dim: <int> Number of decision variable dimension :param lower: <list<floa
t>> List of decision variable lower bound values. :param upper: <list<float>> List of decision variable upper bound values. :param eta_m: <float> Mutation index :param p_mut: <float>
Mutation probability :return: None """ for i, xl, xu in zip(xrange(n_dim), lower, upper): if random.random() <= p_mut: x = copy.deepcopy(individual.x[i]) delta_1 = (x - xl) / (xu - xl) delta_2 = (xu - x) / (xu - xl) rand = random.random() mut_pow = 1.0 / (eta_m + 1.0) if rand < 0.5: xy = 1.0 - delta_1 val = 2.0 * rand + (1.0 - 2.0*rand)*(xy**(eta_m + 1)) delta_q = val**mut_pow - 1.0 else: xy = 1.0 - delta_2 val = 2.0 * (1.0 - rand) + 2.0 * (rand - 0.5)*(xy**(eta_m + 1)) delta_q = 1.0 - val**mut_pow x += delta_q * (xu - xl) x = min(max(x, xl), xu) individual.x[i] = x def crossover(population, n_dim, lower, upper, eta_c, p_cross): """ Perform simulated binary crossover on the population. :param population: <Population> Population to perform crossover on. :param n_dim: <int> Number of decision variable dimensions. :param lower: <list<float>> List of decision variable lower bound values. :param upper: <list<float>> List of decision variable upper bound values. :param eta_c: <float> Crossover index. :param p_cross: <float> Crossover probability. :return: <Population> Child population """ child_pop = Population() child_pop.populate(population) for ind1, ind2 in zip(child_pop[::2], child_
import unittest from mock import patch from nose.tools import assert_equal from tests.tools import create_mock_json from twilio.rest.resources.pricing.messaging_countries import ( MessagingCountries ) AUTH = ("AC123", "token") BASE_URI = "https://pricing.twilio.com/v1" class MessagingCountriesTest(unittest.TestCase): @patch('twilio.rest.resources.base.make_twilio_request') def test_messaging_countries(self, request): resp = create_mock_json( 'tests/resources/pricing/messaging_countries_list.json') resp.status_code = 200 request.return_value = resp countries = MessagingCountries(BASE_URI + "/Messaging", AUTH) result = countries.list() assert_equal(result[0].iso_country, "AT") assert_equal(len(result), 2) request.assert_called_with( "GET", "{0}/Messaging/Countries".format(BASE_URI), auth=AUTH, use_json_extension=False, params={} ) @patch('twilio.rest.resources.base.make_twilio_request') def test_messaging_country(self, request): resp = create_mock_json( 'tests/resources/pricing/messaging_countries_instance.json') resp.status_code = 200 request.return_value = resp countries = MessagingCountries(BASE_URI + "/Messaging", AUTH) result = countries.get('US') assert_equal(result.iso_country, "US") assert_equal(result.price_unit, "usd") assert_equal(result.outbound_sms_prices[0]['mcc'], "311") assert_equal(result.outbound_sms_prices[0]['mnc'], "484") assert_equal(result.outbound_sms_prices[0]['carrier'], "Verizon") prices = result.outbound_sms_prices[0]['prices'] assert_equal(prices[0]['number_type'], "mobile") assert_equal(prices[0]['base_price'], "0.0075") assert_equal(prices[0]['current_price'], "0.0070") assert_equal(prices[1]['number_type'], "local") assert_equal(prices[1]['base_price'], "0.0075") assert_equal(prices[1]['current_price'], "0.0070") assert_equal(prices[2]['number_type'], "shortcode") assert_equal(prices[2]['base_price'], "0.01") assert_equal(prices[2]['current_price'], "0.01") assert_equal(prices[3]['number_type'], "toll-free") assert_equal(prices[3]['base_price'], "0.0075") assert_equal(prices[3]['current_price'], "0.0075") inbound_sms_prices = result.inbound_sms_prices assert_equal(inbound_sms_prices[0]['number_type']
, "local") assert_equal(inbound_sms_prices[0]['base_price'], "0.0075") assert_equal(inbound_sm
s_prices[0]['current_price'], "0.0075") assert_equal(inbound_sms_prices[1]['number_type'], "shortcode") assert_equal(inbound_sms_prices[1]['base_price'], "0.0075") assert_equal(inbound_sms_prices[1]['current_price'], "0.005") assert_equal(inbound_sms_prices[2]['number_type'], "toll-free") assert_equal(inbound_sms_prices[2]['base_price'], "0.0075") assert_equal(inbound_sms_prices[2]['current_price'], "0.0075") request.assert_called_with( "GET", "{0}/Messaging/Countries/US".format(BASE_URI), auth=AUTH, use_json_extension=False, )
""" This plugin gives the power of Selenium to nosetests by providing a WebDriver object for the tests to use. """ from nose.plugins import Plugin from pyvirtualdisplay import Display from seleniumbase.core import proxy_helper from seleniumbase.fixtures import constants class SeleniumBrowser(Plugin): """ The plugin for Selenium tests. Takes in key arguments and then creates a WebDriver object. All arguments are passed to the tests. The following command line options are available to the tests: self.options.browser -- the browser to use (--browser) self.options.server -- the server used by the test (--server) self.options.port -- the port used by the test (--port) self.options.proxy -- designates the proxy server:port to use. (--proxy) self.options.headless -- the option to run headlessly (--headless) self.options.demo_mode -- the option to slow down Selenium (--demo_mode) self.options.demo_sleep -- Selenium action delay in DemoMode (--demo_sleep) self.options.highlights -- # of highlight animations shown (--highlights) self.options.message_duration -- Messenger alert time (--message_duration) self.options.js_checking_on -- option to check for js errors (--check_js) self.options.ad_block -- the option to block some display ads (--ad_block) self.options.verify_delay -- delay before MasterQA checks (--verify_delay) self.options.timeout_multiplier -- increase defaults (--timeout_multiplier) """ name = 'selenium' # Usage: --with-selenium def options(self, parser, env): super(SeleniumBrowser, self).options(parser, env=env) parser.add_option( '--browser', action='store', dest='browser', choices=constants.ValidBrowsers.valid_browsers, default=constants.Browser.GOOGLE_CHROME, help="""Specifies the web browser to use. Default: Chrome. If you want to use Firefox, explicitly indicate that. Example: (--browser=firefox)""") parser.add_option( '--browser_version', action='store', dest='browser_version', default="latest", help="""The browser version to use. Explicitly select a version number or use "latest".""") parser.add_option( '--cap_file', action='store', dest='cap_file', default=None, help="""The file that stores browser desired capabilities for BrowserStack or Sauce Labs web drivers.""") parser.add_option( '--server', action='store', dest='servername', default='localhost', help="""Designates the Selenium Grid server to use. Default: localhost.""") parser.add_option( '--port', action='store', dest='port', default='4444', help="""Designates the Selenium Grid port to use. Default: 4444.""") parser.add_option( '--proxy', action='store', dest='proxy_string', default=None, help="""Designates the proxy server:port to use. Format: servername:port. OR username:password@servername:port OR A dict key from proxy_list.PROXY_LIST Default: None.""") parser.add_option( '--headless', action="store_true", dest='headless', default=False, help="""Using this makes Webdriver run headlessly, which is required on headless machines.""") parser.add_option( '--demo_mode', action="store_true", dest='demo_mode', default=False, help="""Using this slows down the automation so that you can see what it's actually doing.""") parser.add_option( '--demo_sleep', action='store', dest='demo_sleep', default=None, help="""Setting this overrides the Demo Mode sleep time that happens after browser actions.""")
parser.ad
d_option( '--highlights', action='store', dest='highlights', default=None, help="""Setting this overrides the default number of highlight animation loops to have per call.""") parser.add_option( '--message_duration', action="store", dest='message_duration', default=None, help="""Setting this overrides the default time that messenger notifications remain visible when reaching assert statements during Demo Mode.""") parser.add_option( '--check_js', action="store_true", dest='js_checking_on', default=False, help="""The option to check for JavaScript errors after every page load.""") parser.add_option( '--ad_block', action="store_true", dest='ad_block_on', default=False, help="""Using this makes WebDriver block display ads that are defined in ad_block_list.AD_BLOCK_LIST.""") parser.add_option( '--verify_delay', action='store', dest='verify_delay', default=None, help="""Setting this overrides the default wait time before each MasterQA verification pop-up.""") parser.add_option( '--timeout_multiplier', action='store', dest='timeout_multiplier', default=None, help="""Setting this overrides the default timeout by the multiplier when waiting for page elements. Unused when tests overide the default value.""") def configure(self, options, conf): super(SeleniumBrowser, self).configure(options, conf) self.enabled = True # Used if test class inherits BaseCase self.options = options self.headless_active = False # Default setting proxy_helper.remove_proxy_zip_if_present() def beforeTest(self, test): test.test.browser = self.options.browser test.test.cap_file = self.options.cap_file test.test.headless = self.options.headless test.test.servername = self.options.servername test.test.port = self.options.port test.test.proxy_string = self.options.proxy_string test.test.demo_mode = self.options.demo_mode test.test.demo_sleep = self.options.demo_sleep test.test.highlights = self.options.highlights test.test.message_duration = self.options.message_duration test.test.js_checking_on = self.options.js_checking_on test.test.ad_block_on = self.options.ad_block_on test.test.verify_delay = self.options.verify_delay # MasterQA test.test.timeout_multiplier = self.options.timeout_multiplier test.test.use_grid = False if test.test.servername != "localhost": # Use Selenium Grid (Use --server=127.0.0.1 for localhost Grid) test.test.use_grid = True if self.options.headless: self.display = Display(visible=0, size=(1920, 1200)) self.display.start() self.headless_active = True # The driver will be received later self.driver = None test.test.driver = self.driver def finalize(self, result): """ This runs after all tests have completed with nosetests. """ proxy_helper.remove_proxy_zip_if_present() def afterTest(self, test): try: # If the browser window is still open, close it now. self.driver.quit() except AttributeError: pass except Exception: pass if self.options.headless: if self.headless_active: self.display.stop()
def logging_config(level): return { 'version': 1, 'propagate': True, 'dis
able_existing_loggers': False, 'formatters': { 'simple': { 'format': '%(asctime)s [%(levelname)s] %(messa
ge)s' } }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'level': level, 'formatter': 'simple' } }, # 'loggers': { # 'asyncio': { # 'level': 'DEBUG', # 'handlers': ['console'] # }, # }, 'root': { 'level': 'DEBUG', 'handlers': ['console'] } }
# -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import json import codecs import scrapy from collections import OrderedDict from scrapy.pipelines.images import ImagesPipeline from scrapy.exceptions import DropItem from KuaikanSpider.items import * class KuaikanspiderPipeline(object): def process_item(self, item, spider): return item class JsonPipeline(object): def __init__(self): self.file = codecs.open('newimg.json', 'wb', encoding='utf-8') def process_item(self, item, spider): line = '' picindex = 1 for image_title, image_url, image_path in zip(item['image_titles'], item['image_urls'], item['image_paths']): new_item = ImgSingleItem() new_item['image_character'] = item['image_character'] new_item['image_character'] = picindex new_item['image_title'] = image_title new_item['image_url'] = image_url new_item['image_path'] = image_path line += json.dumps(OrderedDict(new_item), ensure_asc
ii=False, sort_keys=False) + '\n' picindex = picindex + 1 self.file.write(line) return item class ImgDownloadPipeline(ImagesPipeline): def get_media_requests(self, item, info): if item['image_urls']
is not None: for image_url in item['image_urls']: yield scrapy.Request(image_url) def item_completed(self, results, item, info): image_paths = [x['path'] for ok, x in results if ok] if not image_paths: raise DropItem('Item contains no images') item['image_paths'] = image_paths return item
import json from django.test.client import Client, RequestFactory from django.test.utils import override_settings from courseware.models import XModuleContentField from courseware.tests.factories import ContentFactory from courseware.tests.tests import TEST_DATA_MONGO_MODULESTORE import instructor.hint_manager as view from student.tests.factories import UserFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory @override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE) class HintManagerTest(ModuleStoreTestCase): def setUp(self): """ Makes a course, which will be the same for all tests. Set up mako middleware, which is necessary for template rendering to happen. """ self.course = CourseFactory.create(org='Me', number='19.002', display_name='test_course') self.url = '/courses/Me/19.002/test_course/hint_manager' self.user = UserFactory.create(username='robot', email='robot@edx.org', password='test', is_staff=True) self.c = Client() self.c.login(username='robot', password='test') self.problem_id = 'i4x://Me/19.002/crowdsource_hinter/crowdsource_hinter_001' self.course_id = 'Me/19.002/test_course' ContentFactory.create(field_name='hints', definition_id=self.problem_id, value=json.dumps({'1.0': {'1': ['Hint 1', 2], '3': ['Hint 3', 12]}, '2.0': {'4': ['Hint 4', 3]} })) ContentFactory.create(field_name='mod_queue', definition_id=self.problem_id, value=json.dumps({'2.0': {'2': ['Hint 2', 1]}})) ContentFactory.create(field_name='hint_pk', definition_id=self.problem_id, value=5) # Mock out location_to_problem_name, which ordinarily accesses the modulestore. # (I can't figure out how to get fake structures into the modulestore.) view.location_to_problem_name = lambda course_id, loc: "Test problem" def test_student_block(self): """ Makes sure that students cannot see the hint management view. """ c = Client() UserFactory.create(username='student', email='student@edx.org', password='test') c.login(username='student', password='test') out = c.get(self.url) print out self.assertTrue('Sorry, but students are not allowed to access the hint manager!' in out.content) def test_staff_access(self): """ Makes sure that staff can access the hint management view. """ out = self.c.get('/courses/Me/19.002/test_course/hint_manager') print out self.assertTrue('Hints Awaiting Moderation' in out.content) def test_invalid_field_access(self): """ Makes sure that field names other than 'mod_queue' and 'hints' are rejected. """ out = self.c.post(self.url, {'op': 'delete hints', 'field': 'all your private data'}) print out self.assertTrue('an invalid field was accessed' in out.content) def test_switchfields(self): """ Checks that the op: 'switch fields' POST request works. """ out = self.c.post(self.url, {'op': 'switch fields', 'field': 'mod_queue'}) print out self.assertTrue('Hint 2' in out.content) def test_gethints(self): """ Checks that gethints returns the right data. """ request = RequestFactory() post = request.post(self.url, {'field': 'mod_queue'}) out = view.get_hints(post, self.course_id, 'mod_queue') print out self.assertTrue(out['other_field'] == 'hints') expected = {self.problem_id: [(u'2.0', {u'2': [u'Hint 2', 1]})]} self.assertTrue(out['all_hints'] == expected) def test_gethints_other(self): """ Same as above, with hints instead of mod_queue """ request = RequestFactory() post = request.post(self.url, {'field': 'hints'}) out = view.get_hints(post, self.course_id, 'hints') print out self.assertTrue(out['other_field'] == 'mod_queue') expected = {self.problem_id: [('1.0', {'1': ['Hint 1', 2], '3': ['Hint 3', 12]}), ('2.0', {'4': ['Hint 4', 3]}) ]} self.assertTrue(out['all_hints'] == expected) def test_deletehints(self): """ Checks that delete_hints deletes the right stuff. """ request = RequestFactory() post = request.post(self.url, {'field': 'hints', 'op': 'delete hints', 1: [self.problem_id, '1.0', '1']}) view.delete_hints(post, self.course_id, 'hints')
problem_hints = XModuleConten
tField.objects.get(field_name='hints', definition_id=self.problem_id).value self.assertTrue('1' not in json.loads(problem_hints)['1.0']) def test_changevotes(self): """ Checks that vote changing works. """ request = RequestFactory() post = request.post(self.url, {'field': 'hints', 'op': 'change votes', 1: [self.problem_id, '1.0', '1', 5]}) view.change_votes(post, self.course_id, 'hints') problem_hints = XModuleContentField.objects.get(field_name='hints', definition_id=self.problem_id).value # hints[answer][hint_pk (string)] = [hint text, vote count] print json.loads(problem_hints)['1.0']['1'] self.assertTrue(json.loads(problem_hints)['1.0']['1'][1] == 5) def test_addhint(self): """ Check that instructors can add new hints. """ request = RequestFactory() post = request.post(self.url, {'field': 'mod_queue', 'op': 'add hint', 'problem': self.problem_id, 'answer': '3.14', 'hint': 'This is a new hint.'}) view.add_hint(post, self.course_id, 'mod_queue') problem_hints = XModuleContentField.objects.get(field_name='mod_queue', definition_id=self.problem_id).value self.assertTrue('3.14' in json.loads(problem_hints)) def test_approve(self): """ Check that instructors can approve hints. (Move them from the mod_queue to the hints.) """ request = RequestFactory() post = request.post(self.url, {'field': 'mod_queue', 'op': 'approve', 1: [self.problem_id, '2.0', '2']}) view.approve(post, self.course_id, 'mod_queue') problem_hints = XModuleContentField.objects.get(field_name='mod_queue', definition_id=self.problem_id).value self.assertTrue('2.0' not in json.loads(problem_hints) or len(json.loads(problem_hints)['2.0']) == 0) problem_hints = XModuleContentField.objects.get(field_name='hints', definition_id=self.problem_id).value self.assertTrue(json.loads(problem_hints)['2.0']['2'] == ['Hint 2', 1]) self.assertTrue(len(json.loads(problem_hints)['2.0']) == 2)
import simplejson from lxml import etree from ..exceptions import except_orm from ..models import ( MetaModel, BaseModel, Model, TransientModel, AbstractModel, MAGIC_COLUMNS, LOG_ACCESS_COLUMNS, ) from openerp.tools.safe_eval import safe_eval as eval # extra definitions for backward compatibility browse_record_list = BaseModel class browse_record(object): """ Pseudo-class for testing record instances """ class __metaclass__(type): def __instancecheck__(self, inst): return isinstance(inst, BaseModel) and len(inst) <= 1 class browse_null(object): """ Pseudo-class for testing null instances """ class __metaclass__(type): def __instancecheck__(self, inst): return isinstance(inst, BaseModel) and not inst def tra
nsfer_field_to_modifiers(field, modifiers): default_values = {} state_exceptions = {} for attr in ('invisible', 'readonly', 'required'): state_exceptions[attr] = [] default_values[attr] = bool(field.get(attr
)) for state, modifs in (field.get("states",{})).items(): for modif in modifs: if default_values[modif[0]] != modif[1]: state_exceptions[modif[0]].append(state) for attr, default_value in default_values.items(): if state_exceptions[attr]: modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])] else: modifiers[attr] = default_value # Don't deal with groups, it is done by check_group(). # Need the context to evaluate the invisible attribute on tree views. # For non-tree views, the context shouldn't be given. def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False): if node.get('attrs'): modifiers.update(eval(node.get('attrs'))) if node.get('states'): if 'invisible' in modifiers and isinstance(modifiers['invisible'], list): # TODO combine with AND or OR, use implicit AND for now. modifiers['invisible'].append(('state', 'not in', node.get('states').split(','))) else: modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))] for a in ('invisible', 'readonly', 'required'): if node.get(a): v = bool(eval(node.get(a), {'context': context or {}})) if in_tree_view and a == 'invisible': # Invisible in a tree view has a specific meaning, make it a # new key in the modifiers attribute. modifiers['tree_invisible'] = v elif v or (a not in modifiers or not isinstance(modifiers[a], list)): # Don't set the attribute to False if a dynamic value was # provided (i.e. a domain from attrs or states). modifiers[a] = v def simplify_modifiers(modifiers): for a in ('invisible', 'readonly', 'required'): if a in modifiers and not modifiers[a]: del modifiers[a] def transfer_modifiers_to_node(modifiers, node): if modifiers: simplify_modifiers(modifiers) node.set('modifiers', simplejson.dumps(modifiers)) def setup_modifiers(node, field=None, context=None, in_tree_view=False): """ Processes node attributes and field descriptors to generate the ``modifiers`` node attribute and set it on the provided node. Alters its first argument in-place. :param node: ``field`` node from an OpenERP view :type node: lxml.etree._Element :param dict field: field descriptor corresponding to the provided node :param dict context: execution context used to evaluate node attributes :param bool in_tree_view: triggers the ``tree_invisible`` code path (separate from ``invisible``): in tree view there are two levels of invisibility, cell content (a column is present but the cell itself is not displayed) with ``invisible`` and column invisibility (the whole column is hidden) with ``tree_invisible``. :returns: nothing """ modifiers = {} if field is not None: transfer_field_to_modifiers(field, modifiers) transfer_node_to_modifiers( node, modifiers, context=context, in_tree_view=in_tree_view) transfer_modifiers_to_node(modifiers, node) def test_modifiers(what, expected): modifiers = {} if isinstance(what, basestring): node = etree.fromstring(what) transfer_node_to_modifiers(node, modifiers) simplify_modifiers(modifiers) json = simplejson.dumps(modifiers) assert json == expected, "%s != %s" % (json, expected) elif isinstance(what, dict): transfer_field_to_modifiers(what, modifiers) simplify_modifiers(modifiers) json = simplejson.dumps(modifiers) assert json == expected, "%s != %s" % (json, expected) # To use this test: # import openerp # openerp.osv.orm.modifiers_tests() def modifiers_tests(): test_modifiers('<field name="a"/>', '{}') test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}') test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}') test_modifiers('<field name="a" required="1"/>', '{"required": true}') test_modifiers('<field name="a" invisible="0"/>', '{}') test_modifiers('<field name="a" readonly="0"/>', '{}') test_modifiers('<field name="a" required="0"/>', '{}') test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}') test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}') test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}') # The dictionary is supposed to be the result of fields_get(). test_modifiers({}, '{}') test_modifiers({"invisible": True}, '{"invisible": true}') test_modifiers({"invisible": False}, '{}')
import uuid from datetime import datetime import pytest from tests.util.crispin import crispin_client ACCOUNT_ID = 1 NAMESPACE_ID = 1 THREAD_ID = 2 # These tests use a real Gmail test account and idempotently put the account # back to the state it started in when the test is done. @pytest.fixture(scope='function') def message(db, config): from inbox.models.backends.imap import ImapAccount account = db.session.query(ImapAccount).get(ACCOUNT_ID) to = [{'name': u'"\u2605The red-haired mermaid\u2605"', 'email': account.email_address}] subject = 'Draft test: ' + str(uuid.uuid4().hex) body = '<html><body><h2>Sea, birds, yoga and sand.</h2></body></html>' return (to, subject, body) def test_remote_save_draft(db, config, message): """ Tests the save_draft function, which saves the draft to the remote. """ from inbox.actions.backends.gmail import remote_save_draft from inbox.sendmail.base import _parse_recipients from inbox.sendmail.message import create_email, Recipients from inbox.models import Account account = db.session.query(Account).get(ACCOUNT_ID) to, subject, body = message to_addr = _parse_recipients(to) recipients = Recipients(to_addr, [], []) email = create_email(account.sender_name, account.email_address, None, recipients, subject
, body, None) date = datetime.utcnow() remote_save_draft(account, account.drafts_folder.name, email.to_string(), db.session, date) with crispin_client(a
ccount.id, account.provider) as c: criteria = ['NOT DELETED', 'SUBJECT "{0}"'.format(subject)] c.conn.select_folder(account.drafts_folder.name, readonly=False) draft_uids = c.conn.search(criteria) assert draft_uids, 'Message missing from Drafts folder' flags = c.conn.get_flags(draft_uids) for uid in draft_uids: f = flags.get(uid) assert f and '\\Draft' in f, "Message missing '\\Draft' flag" c.conn.delete_messages(draft_uids) c.conn.expunge() def test_remote_delete_draft(db, config, message): """ Tests the delete_draft function, which deletes the draft from the remote. """ from inbox.actions.backends.gmail import (remote_save_draft, remote_delete_draft) from inbox.sendmail.base import _parse_recipients from inbox.sendmail.message import create_email, Recipients from inbox.models import Account account = db.session.query(Account).get(ACCOUNT_ID) to, subject, body = message to_addr = _parse_recipients(to) recipients = Recipients(to_addr, [], []) email = create_email(account.sender_name, account.email_address, None, recipients, subject, body, None) date = datetime.utcnow() # Save on remote remote_save_draft(account, account.drafts_folder.name, email.to_string(), db.session, date) inbox_uid = email.headers['X-INBOX-ID'] with crispin_client(account.id, account.provider) as c: criteria = ['DRAFT', 'NOT DELETED', 'HEADER X-INBOX-ID {0}'.format(inbox_uid)] c.conn.select_folder(account.drafts_folder.name, readonly=False) uids = c.conn.search(criteria) assert uids, 'Message missing from Drafts folder' # Delete on remote remote_delete_draft(account, account.drafts_folder.name, inbox_uid, db.session) c.conn.select_folder(account.drafts_folder.name, readonly=False) uids = c.conn.search(criteria) assert not uids, 'Message still in Drafts folder'
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from keystone.common import sql from keystone.common.sql import migration from keystone import credential from keystone import exception class CredentialModel(sql.ModelBase, sql.DictBase): __tablename__ = 'credential' attributes = ['id', 'user_id', 'project_id', 'blob', 'type'] id = sql.Column(sql.String(64), primary
_key=True) user_id = sql.Column(sql.String(64), nullable=False) project_id = sql.Column(sql.String(64)) blob = sql.Column(sql.JsonBlob(), nullable=False) type = sql.Column(sql.String(255), nullable=False) extra = sql.Column(sql.JsonBlob()) class Credential(sql.Base, credential.Driver): # Internal interface to manage the database def db_s
ync(self, version=None): migration.db_sync(version=version) # credential crud @sql.handle_conflicts(type='credential') def create_credential(self, credential_id, credential): session = self.get_session() with session.begin(): ref = CredentialModel.from_dict(credential) session.add(ref) session.flush() return ref.to_dict() def list_credentials(self): session = self.get_session() refs = session.query(CredentialModel).all() return [ref.to_dict() for ref in refs] def _get_credential(self, session, credential_id): ref = session.query(CredentialModel).get(credential_id) if ref is None: raise exception.CredentialNotFound(credential_id=credential_id) return ref def get_credential(self, credential_id): session = self.get_session() return self._get_credential(session, credential_id).to_dict() @sql.handle_conflicts(type='credential') def update_credential(self, credential_id, credential): session = self.get_session() with session.begin(): ref = self._get_credential(session, credential_id) old_dict = ref.to_dict() for k in credential: old_dict[k] = credential[k] new_credential = CredentialModel.from_dict(old_dict) for attr in CredentialModel.attributes: if attr != 'id': setattr(ref, attr, getattr(new_credential, attr)) ref.extra = new_credential.extra session.flush() return ref.to_dict() def delete_credential(self, credential_id): session = self.get_session() with session.begin(): ref = self._get_credential(session, credential_id) session.delete(ref) session.flush()
#!/usr/bin/env python import h5py import numpy import argparse import cPickle from fuel.datasets.hdf5 import H5PYDataset def pack(f, name, dataset_pathes): datasets = [cPickle.load(open(path)) for path in dataset_pathes] data = sum(datasets, []) dtype = h5py.special_dtype(vlen=numpy.dtype('int32')) table = f.create_dataset(name, (len(
data),), dtype=dtype) for i, example in enumerate(data): table[i] = example return numpy.array([len(d) for d in datasets]) if __name__ == '__main__': parser = argparse.ArgumentParser("Pack data to HDF5") parser.add_argument('-s', dest='sources', nargs='*', help="Source datasets"
) parser.add_argument('-t', dest='targets', nargs='*', help="Target datasets") parser.add_argument('-n', dest='names', nargs='*', help="Dataset names") parser.add_argument('-i', dest='add_ids', action='store_true', default=False, help="Add integer IDs") parser.add_argument('dest', help="Destination") args = parser.parse_args() assert len(args.sources) == len(args.targets) assert len(args.sources) == len(args.names) with h5py.File(args.dest, mode='w') as f: lengths = pack(f, "sources", args.sources) assert numpy.all(lengths == pack(f, "targets", args.targets)) offsets = [0] + list(lengths.cumsum()) total_len = offsets[-1] if args.add_ids: id_table = f.create_dataset('ids', data=numpy.arange(total_len, dtype='int32')) split_dict = { args.names[i]: {'sources': (offsets[i], offsets[i + 1]), 'targets': (offsets[i], offsets[i + 1]), 'ids': (offsets[i], offsets[i + 1])} for i in range(len(args.names))} else: split_dict = { args.names[i]: {'sources': (offsets[i], offsets[i + 1]), 'targets': (offsets[i], offsets[i + 1])} for i in range(len(args.names))} f.attrs['split'] = H5PYDataset.create_split_array(split_dict)
import os import pkg_resources import yaml from fget.utils import fgetprint from fget.resource.root import Root class CachedSettings(object): def __ini
t__(self, cache_dir): self.cache_dir = cache_dir def init(self): settings_filename = 'fget.yaml' cached_filename = 'fget.jobs' cached_settings_file = os.path.join(self.cache_dir, cached_filename) self.cached_settings = {} if not os.path.isfile(cached_settings_file): fgetprint('Initiating. Please wait...') settings_file = \ pkg_resou
rces.resource_filename('fget', settings_filename) with open(settings_file) as f: settings = yaml.load(f.read()) for url in settings.get('JENKINS_URLS', []): url = url.strip('/') fgetprint('Retrieving jobs from {0}'.format(url)) root_resource = Root(url) for job in root_resource.get_jobs(): if url not in self.cached_settings: self.cached_settings[url] = [] self.cached_settings[url].append(str(job['name'])) with open(cached_settings_file, 'w') as f: for key in self.cached_settings.keys(): f.write(key + '\n') for value in self.cached_settings[key]: f.write(value + '\n') fgetprint('Initiating. Finished.') else: with open(cached_settings_file) as f: for line in f: if line.startswith('http://'): url = line.strip() self.cached_settings[url] = [] continue self.cached_settings[url].append(line.strip()) def get_settings(self): return self.cached_settings
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import time from metrics import smoothness from telemetry.page import page_measurement class StatsCollector(object): def __init__(self, timeline): """ Utility class for collecting rendering stats from timeline model. timeline -- The timeline model """ self.timeline = timeline self.total_best_rasterize_time = 0 self.total_best_record_time = 0 self.total_pixels_rasterized = 0 self.total_pixels_recorded = 0 self.trigger_event = self.FindTriggerEvent() self.renderer_process = self.trigger_event.start_thread.parent def FindTriggerEvent(self): events = [s for s in self.timeline.GetAllEventsOfName( 'measureNextFrame') if s.parent_slice == None] if len(events) != 1: raise LookupError, 'no measureNextFrame event found' return events[0] def FindFrameNumber(self, trigger_time): start_event = None for event in self.renderer_process.IterAllSlicesOfName( "LayerTreeHost::UpdateLayers"): if event.start > trigger_time: if start_event == None: start_event = event elif event.start < start_event.start: start_event = event if start_event is None: raise LookupError, \ 'no LayterTreeHost::UpdateLayers after measureNextFrame found' return start_event.args["source_frame_number"] def GatherRasterizeStats(self, frame_number): for event in self.renderer_process.IterAllSlicesOfName( "RasterWorkerPoolTaskImpl::RunRasterOnThread"): if event.args["data"]["source_frame_number"] == frame_number: for raster_loop_event in event.GetAllSubSlicesOfName("RasterLoop"): best_rasterize_time = float("inf") for raster_event in raster_loop_event.GetAllSubSlicesOfName( "Picture::Raster"): if "num_pixels_rasterized" in raster_event.args: best_rasterize_time = min(best_rasterize_time, raster_event.duration) self.total_pixels_rasterized += \ raster_event.args["num_pixels_rasterized"] if best_rasterize_time == float('inf'): best_rasterize_time = 0 self.total_best_rasterize_time += best_rasterize_time def GatherRecordStats(self, frame_number): for event in self.renderer_process.IterAllSlicesOfName( "PictureLayer::Update"): if event.args["source_frame_number"] == frame_number: for record_loop_event in event.GetAllSubSlicesOfName("RecordLoop"): best_record_time = float('inf') for record_event in record_loop_event.GetAllSubSlicesOfName( "Picture::Record"): best_record_time = min(best_record_time, record_event.duration) self.total_pixels_recorded += ( record_event.args["data"]["width"] * record_event.args["data"]["height"]) if best_record_time == float('inf'): best_record_time = 0 self.total_best_record_time += best_record_time def GatherRenderingStats(self): trigger_time = self.trigger_event.start frame_number = self.FindFrameNumber(trigger_time) self.GatherRasterizeStats(frame_number) self.GatherRecordStats(frame_number) def DivideIfPossibleOrZero(numerator, denominator): if denominator == 0: return 0 return numerator / denominator class RasterizeAndRecord(page_measurement.PageMeasurement): def __init__(self): super(RasterizeAndRecord, self).__init__('', True) self._metrics = None def AddCommandLineOptions(self, parser): parser.add_option('--report-all-results', dest='report_all_results',
action='store_true', help='Reports all data collected') parser.add_option('--raster-record-repeat', dest='raster_record_repeat', default=20, help
='Repetitions in raster and record loops.' + 'Higher values reduce variance, but can cause' + 'instability (timeouts, event buffer overflows, etc.).') parser.add_option('--start-wait-time', dest='start_wait_time', default=2, help='Wait time before the benchmark is started ' + '(must be long enought to load all content)') parser.add_option('--stop-wait-time', dest='stop_wait_time', default=5, help='Wait time before measurement is taken ' + '(must be long enough to render one frame)') def CustomizeBrowserOptions(self, options): smoothness.SmoothnessMetrics.CustomizeBrowserOptions(options) # Run each raster task N times. This allows us to report the time for the # best run, effectively excluding cache effects and time when the thread is # de-scheduled. options.AppendExtraBrowserArgs([ '--slow-down-raster-scale-factor=%d' % options.raster_record_repeat, # Enable impl-side-painting. Current version of benchmark only works for # this mode. '--enable-impl-side-painting', '--force-compositing-mode', '--enable-threaded-compositing' ]) def MeasurePage(self, page, tab, results): self._metrics = smoothness.SmoothnessMetrics(tab) # Rasterize only what's visible. tab.ExecuteJavaScript( 'chrome.gpuBenchmarking.setRasterizeOnlyVisibleContent();') # Wait until the page has loaded and come to a somewhat steady state. # Needs to be adjusted for every device (~2 seconds for workstation). time.sleep(float(self.options.start_wait_time)) # Render one frame before we start gathering a trace. On some pages, the # first frame requested has more variance in the number of pixels # rasterized. tab.ExecuteJavaScript(""" window.__rafFired = false; window.webkitRequestAnimationFrame(function() { chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers(); window.__rafFired = true; }); """) tab.browser.StartTracing('webkit.console,benchmark', 60) self._metrics.Start() tab.ExecuteJavaScript(""" console.time("measureNextFrame"); window.__rafFired = false; window.webkitRequestAnimationFrame(function() { chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers(); window.__rafFired = true; }); """) # Wait until the frame was drawn. # Needs to be adjusted for every device and for different # raster_record_repeat counts. # TODO(ernstm): replace by call-back. time.sleep(float(self.options.stop_wait_time)) tab.ExecuteJavaScript('console.timeEnd("measureNextFrame")') self._metrics.Stop() timeline = tab.browser.StopTracing().AsTimelineModel() collector = StatsCollector(timeline) collector.GatherRenderingStats() rendering_stats = self._metrics.end_values results.Add('best_rasterize_time', 'seconds', collector.total_best_rasterize_time / 1.e3, data_type='unimportant') results.Add('best_record_time', 'seconds', collector.total_best_record_time / 1.e3, data_type='unimportant') results.Add('total_pixels_rasterized', 'pixels', collector.total_pixels_rasterized, data_type='unimportant') results.Add('total_pixels_recorded', 'pixels', collector.total_pixels_recorded, data_type='unimportant') if self.options.report_all_results: for k, v in rendering_stats.iteritems(): results.Add(k, '', v)
self.context = {} self.groupby = [] self.cr='' def _parse_node(self, root_node): result = [] for node in root_node: field_name = node.get('name') if not eval(str(node.attrib.get('invisible',False)),{'context':self.context}): if node.tag == 'field': if field_name in self.groupby: continue result.append(field_name) else: result.extend(self._parse_node(node)) return result def _parse_string(self, view): try: dom = etree.XML(view.encode('utf-8')) except Exception:
dom = etree.XML(view) return self._parse_node(dom) def create(self, cr, uid, ids, datas, context=None): if not context: context={} self.cr=cr self.context = context self.groupby = context.get('group_by',[])
self.groupby_no_leaf = context.get('group_by_no_leaf',False) registry = openerp.registry(cr.dbname) model = registry[datas['model']] model_id = registry['ir.model'].search(cr, uid, [('model','=',model._name)]) model_desc = model._description if model_id: model_desc = registry['ir.model'].browse(cr, uid, model_id[0], context).name self.title = model_desc datas['ids'] = ids result = model.fields_view_get(cr, uid, view_type='tree', context=context) fields_order = self.groupby + self._parse_string(result['arch']) if self.groupby: rows = [] def get_groupby_data(groupby = [], domain = []): records = model.read_group(cr, uid, domain, fields_order, groupby , 0, None, context) for rec in records: rec['__group'] = True rec['__no_leaf'] = self.groupby_no_leaf rec['__grouped_by'] = groupby[0] if (isinstance(groupby, list) and groupby) else groupby for f in fields_order: if f not in rec: rec.update({f:False}) elif isinstance(rec[f], tuple): rec[f] = rec[f][1] rows.append(rec) inner_groupby = (rec.get('__context', {})).get('group_by',[]) inner_domain = rec.get('__domain', []) if inner_groupby: get_groupby_data(inner_groupby, inner_domain) else: if self.groupby_no_leaf: continue child_ids = model.search(cr, uid, inner_domain) res = model.read(cr, uid, child_ids, result['fields'].keys(), context) res.sort(lambda x,y: cmp(ids.index(x['id']), ids.index(y['id']))) rows.extend(res) dom = [('id','in',ids)] if self.groupby_no_leaf and len(ids) and not ids[0]: dom = datas.get('_domain',[]) get_groupby_data(self.groupby, dom) else: rows = model.read(cr, uid, datas['ids'], result['fields'].keys(), context) ids2 = map(itemgetter('id'), rows) # getting the ids from read result if datas['ids'] != ids2: # sorted ids were not taken into consideration for print screen rows_new = [] for id in datas['ids']: rows_new += [elem for elem in rows if elem['id'] == id] rows = rows_new res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model_desc) return self.obj.get(), 'pdf' def _create_table(self, uid, ids, fields, fields_order, results, context, title=''): pageSize=[297.0, 210.0] new_doc = etree.Element("report") config = etree.SubElement(new_doc, 'config') def _append_node(name, text): n = etree.SubElement(config, name) n.text = text #_append_node('date', time.strftime('%d/%m/%Y')) _append_node('date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')))) _append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize)) _append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,)) _append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,)) _append_node('report-header', title) registry = openerp.registry(self.cr.dbname) _append_node('company', registry['res.users'].browse(self.cr,uid,uid).company_id.name) rpt_obj = registry['res.users'] rml_obj=report_sxw.rml_parse(self.cr, uid, rpt_obj._name,context) _append_node('header-date', str(rml_obj.formatLang(time.strftime("%Y-%m-%d"),date=True))+' ' + str(time.strftime("%H:%M"))) l = [] t = 0 strmax = (pageSize[0]-40) * 2.8346 temp = [] tsum = [] for i in range(0, len(fields_order)): temp.append(0) tsum.append(0) ince = -1 for f in fields_order: s = 0 ince += 1 if fields[f]['type'] in ('date','time','datetime','float','integer'): s = 60 strmax -= s if fields[f]['type'] in ('float','integer'): temp[ince] = 1 else: t += fields[f].get('size', 80) / 28 + 1 l.append(s) for pos in range(len(l)): if not l[pos]: s = fields[fields_order[pos]].get('size', 80) / 28 + 1 l[pos] = strmax * s / t _append_node('tableSize', ','.join(map(str,l)) ) header = etree.SubElement(new_doc, 'header') for f in fields_order: field = etree.SubElement(header, 'field') field.text = tools.ustr(fields[f]['string'] or '') lines = etree.SubElement(new_doc, 'lines') for line in results: node_line = etree.SubElement(lines, 'row') count = -1 for f in fields_order: float_flag = 0 count += 1 if fields[f]['type']=='many2one' and line[f]: if not line.get('__group'): line[f] = line[f][1] if fields[f]['type']=='selection' and line[f]: for key, value in fields[f]['selection']: if key == line[f]: line[f] = value break if fields[f]['type'] in ('one2many','many2many') and line[f]: line[f] = '( '+tools.ustr(len(line[f])) + ' )' if fields[f]['type'] == 'float' and line[f]: precision=(('digits' in fields[f]) and fields[f]['digits'][1]) or 2 prec ='%.' + str(precision) +'f' line[f]=prec%(line[f]) float_flag = 1 if fields[f]['type'] == 'date' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')) d1 = datetime.strptime(line[f],'%Y-%m-%d') new_d1 = d1.strftime(format) line[f] = new_d1 if fields[f]['type'] == 'time' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str(locale.nl_langinfo(locale.T_FMT)) d1 = datetime.strptime(line[f], '%H:%M:%S') new_d1 = d1.strftime(format) line[f] = new_d1 if fields[f]['type'] == 'datetime' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))+' '+str(locale.nl_langinfo(locale.T_FMT)) d1 = datetime.strptime(line[f], '%Y-%m-%d %H:%M:%S')
MPLATE.format(style=style) @property def __release_url(self): """Get the release URL.""" return self.URL_TEMPLATE.format(version=self.fa_version) @property def __zip_file(self): """Get a file object of the FA zip file.""" if self.zip_path: # If using a local file, just open it: self.__print('Opening local zipf
ile: %s' % self.zip_path) return open(self.zip_path, 'rb') # Otherwise, download it and make a file object in-memory: url = self.__release_url self.__print('Downloading from URL: %s' % url) respo
nse = urlopen(url) return io.BytesIO(response.read()) @property def __zipped_files_data(self): """Get a dict of all files of interest from the FA release zipfile.""" files = {} with zipfile.ZipFile(self.__zip_file) as thezip: for zipinfo in thezip.infolist(): if zipinfo.filename.endswith('metadata/icons.json'): with thezip.open(zipinfo) as compressed_file: files['icons.json'] = compressed_file.read() elif zipinfo.filename.endswith('.ttf'): # For the record, the paths usually look like this: # webfonts/fa-brands-400.ttf # webfonts/fa-regular-400.ttf # webfonts/fa-solid-900.ttf name = os.path.basename(zipinfo.filename) tokens = name.split('-') style = tokens[1] if style in self.FA_STYLES: with thezip.open(zipinfo) as compressed_file: files[style] = compressed_file.read() # Safety checks: assert all(style in files for style in self.FA_STYLES), \ 'Not all FA styles found! Update code is broken.' assert 'icons.json' in files, 'icons.json not found! Update code is broken.' return files def run(self): """Run command.""" files = self.__zipped_files_data hashes = {} icons = {} # Read icons.json (from the webfont zip download) data = json.loads(files['icons.json']) # Group icons by style, since not all icons exist for all styles: for icon, info in data.items(): for style in info['styles']: icons.setdefault(str(style), {}) icons[str(style)][icon] = str(info['unicode']) # For every FA "style": for style, details in icons.items(): # Dump a .json charmap file: charmapPath = self.__get_charmap_path(style) self.__print('Dumping updated "%s" charmap: %s' % (style, charmapPath)) with open(charmapPath, 'w+') as f: json.dump(details, f, indent=4, sort_keys=True) # Dump a .ttf font file: font_path = self.__get_ttf_path(style) data = files[style] self.__print('Dumping updated "%s" font: %s' % (style, font_path)) with open(font_path, 'wb+') as f: f.write(data) # Fix to prevent repeated font names: if style in ('regular', 'solid'): new_name = str("Font Awesome 5 Free %s") % style.title() self.__print('Renaming font to "%s" in: %s' % (new_name, font_path)) if ttLib is not None: rename_font(font_path, new_name) else: sys.exit( "This special command requires the module 'fonttools': " "https://github.com/fonttools/fonttools/") # Reread the data since we just edited the font file: with open(font_path, 'rb') as f: data = f.read() files[style] = data # Store hashes for later: hashes[style] = hashlib.md5(data).hexdigest() # Now it's time to patch "iconic_font.py": iconic_path = ICONIC_FONT_PY_PATH self.__print('Patching new MD5 hashes in: %s' % iconic_path) with open(iconic_path, 'r') as iconic_file: contents = iconic_file.read() # We read it in full, then use regex substitution: for style, md5 in hashes.items(): self.__print('New "%s" hash is: %s' % (style, md5)) regex = r"('fontawesome5-%s-webfont.ttf':\s+)'(\w+)'" % style subst = r"\g<1>'" + md5 + "'" contents = re.sub(regex, subst, contents, 1) # and finally overwrite with the modified file: self.__print('Dumping updated file: %s' % iconic_path) with open(iconic_path, 'w') as iconic_file: iconic_file.write(contents) self.__print( '\nFinished!\n' 'Please check the git diff to make sure everything went okay.\n' 'You should also edit README.md and ' 'qtawesome/docs/source/usage.rst to reflect the changes.') class UpdateCodiconCommand(distutils.cmd.Command): """A custom command to make updating Microsoft's Codicons easy!""" description = 'Try to update the Codicon font data in the project.' user_options = [] CHARMAP_PATH = os.path.join(HERE, 'qtawesome', 'fonts', 'codicon-charmap.json') TTF_PATH = os.path.join(HERE, 'qtawesome', 'fonts', 'codicon.ttf') DOWNLOAD_URL_TTF = 'https://raw.githubusercontent.com/microsoft/vscode-codicons/master/dist/codicon.ttf' DOWNLOAD_URL_CSV = 'https://raw.githubusercontent.com/microsoft/vscode-codicons/master/dist/codicon.csv' # At the time of writing this comment, vscode-codicons repo does not use git tags, but you can get the version from package.json: DOWNLOAD_URL_JSON = 'https://raw.githubusercontent.com/microsoft/vscode-codicons/master/package.json' def initialize_options(self): """Required by distutils.""" def finalize_options(self): """Required by distutils.""" def __print(self, msg): """Shortcut for printing with the distutils logger.""" self.announce(msg, level=distutils.log.INFO) def run(self): """Run command.""" # Download .csv to a temporary path: package_json = urlopen(self.DOWNLOAD_URL_JSON) package_info = json.load(package_json) package_version = package_info['version'] self.__print('Will download codicons version: %s' % package_version) # Download .csv to a temporary path: with tempfile.NamedTemporaryFile(mode='wb+', suffix='.csv', prefix='codicon', delete=False) as tempCSV: self.__print('Downloading: %s' % self.DOWNLOAD_URL_CSV) response = urlopen(self.DOWNLOAD_URL_CSV) shutil.copyfileobj(response, tempCSV) # Interpret the codicon.csv file: charmap = {} with open(tempCSV.name, 'r', encoding='utf-8') as tempCSV: reader = csv.DictReader(tempCSV) for row in reader: code = "0x" + row['unicode'].lower() charmap[row['short_name']] = code self.__print('Identified %s icons in the CSV.' % len(charmap)) # Remove temp file: os.remove(tempCSV.name) # Dump a .json charmap file the way we like it: self.__print('Dumping updated charmap: %s' % self.CHARMAP_PATH) with open(self.CHARMAP_PATH, 'w+') as f: json.dump(charmap, f, indent=4, sort_keys=True) # Dump a .ttf font file: with open(self.TTF_PATH, 'wb+') as ttfFile: self.__print('Downloading %s --> %s' % (self.DOWNLOAD_URL_TTF, self.TTF_PATH)) response = urlopen(self.DOWNLOAD_URL_TTF) data = response.read() ttfFile.write(data) md5 = hashlib.md5(data).hexdigest() self.__print('New hash is: %s' % md5) # Now it's time to patch "iconic_font.py": self.__print('Patching new MD5 hashes in: %s' % ICONIC_FONT_PY_PATH) with open(ICONIC_FONT_PY_PATH, 'r') as iconic_file: contents = iconic_file.read() regex = r"('codicon.ttf':\s+)'(\w+)'"
import requests import math import re from bs4 import BeautifulSoup ROOT_URL = "http://nces.ed.gov/collegenavigator" INDEX_URL = ROOT_URL + "?s=all&l=93&ct=1&ic=1&an=5&ax=50" PAGINATION_DIV_ID = "ctl00_cphCollegeNavBody_ucResultsMain_divMsg" def get_num_pages(pagination): """Returns the total number of pages given pagination string :param pagination: Pagination string (i.e.: 1-20 of 100 Results) """ words = pagination.split() per_page = int(words[0][2:]) total = int(words[2]) pages = total / per_page # Add one if pages doesn't divide evenly if total % per_page != 0: pages = pages + 1 return pages def is_college_link(href): """Returns whether or not an anchor is a college link :param href: hyperlink string """ return href and re.compile("id=").search(href) def get_colleges(): response = requests.get(INDEX_URL) soup = BeautifulSoup(response.text, "html.parser") # Get the total number of pages in the result pagination = soup.find("div", attrs={"id": PAGINATION_DIV_ID}) pages = get_num_pages(pagination.get_text()) # Store colleges in list of dictionaries colleges = [] college = {} # Iterate over all of the pages for i in range(1, pages+1): print("Parsing colleges page: " + str(i)) response = requests.get(INDEX_URL + "&pg=" + str(i)) soup = BeautifulSoup(response.text, "html.parser") # There is only one "resultsTable" in the HTML that # contains the list of college links and information table = soup.find("table", attrs={"class": "resultsTable"}) for link in table.findChildren(href=is_college_link): college['name'] = link.get_text() college['url'] = link.get('href') colleges.append(college.copy()) return colleges def get_college_tuition_data(college): """Retrieves college tuition data and adds to college :param college: college dictionary container """ response = requests.get(ROOT_URL + college['url']) soup = BeautifulSoup(response.text, "html.parser") expenses = soup.find("div", attrs={"id": "expenses"}) table = expenses.find("tbody") try: # Get In-state Tuition Change row = table.find(string="In-state").parent.parent cols = row.find_all("
td") college['In-state'] = cols[5].get_text() # Get Out-of-st
ate Tuition Change row = table.find(string="Out-of-state").parent.parent cols = row.find_all("td") college['Out-of-state'] = cols[5].get_text() except (AttributeError, TypeError): print('\033[93m' + college['name'] + " has no tuition data, skipping!!" + '\033[0m') college['In-state'] = "-" college['Out-of-state'] = "-" # Get initial list of colleges and links print("Getting initial list of colleges") colleges = get_colleges() # Get additional tuition data for each college for college in colleges: print(college['name'] + ": Retrieving tuition data") get_college_tuition_data(college) for college in colleges: print(repr(str(college['name'])) + ": In-state = " + college['In-state'] + ", Out-of-state = " + college['Out-of-state'])
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTAB
ILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # G
eneral Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Record module signals.""" from blinker import Namespace _signals = Namespace() record_viewed = _signals.signal('record-viewed') """ This signal is sent when a detailed view of record is displayed. Parameters: recid - id of record id_user - id of user or 0 for guest request - flask request object Example subscriber: .. code-block:: python def subscriber(sender, recid=0, id_user=0, request=None): ... """ before_record_insert = _signals.signal('before-record-insert') """Signal sent before a record is inserted. Example subscriber .. code-block:: python def listener(sender, *args, **kwargs): sender['key'] = sum(args) from invenio_records.signals import before_record_insert before_record_insert.connect( listener ) """ after_record_insert = _signals.signal('after-record-insert') """Signal sent after a record is inserted. .. note:: No modification are allowed on record object. """ before_record_update = _signals.signal('before-record-update') """Signal sent before a record is update.""" after_record_update = _signals.signal('after-record-update') """Signal sent after a record is updated.""" before_record_index = _signals.signal('before-record-index') """Signal sent before a record is indexed. Example subscriber .. code-block:: python def listener(sender, **kwargs): info = fetch_some_info_for_recid(sender) kwargs['json']['more_info'] = info from invenio_records.signals import before_record_index before_record_index.connect( listener ) """ after_record_index = _signals.signal('after-record-index') """Signal sent after a record is indexed."""
ssertEqual(bucket.get_subresource('logging'), logging_str) def test_copy_key(self): """Test copying a key from one bucket to another.""" # create two new, empty buckets bucket1 = self._MakeBucket() bucket2 = self._MakeBucket() bucket_name_1 = bucket1.name bucket_name_2 = bucket2.name # verify buckets got created bucket1 = self._GetConnection().get_bucket(bucket_name_1) bucket2 = self._GetConnection().get_bucket(bucket_name_2) # create a key in bucket1 and give
it some content key_name = 'foobar' k1 = bucket1.new_key(key_name) self.assertIsInstance(k1, bucket1.key_class) k1.name = key_name s = 'This is a test.' k1.set_contents_from_string(s) # copy the new key from bucket1 to bucket2 k1.copy(bucket_name_2, key_name) # now copy the contents from bucket2 to a local file k2
= bucket2.lookup(key_name) self.assertIsInstance(k2, bucket2.key_class) tmpdir = self._MakeTempDir() fpath = os.path.join(tmpdir, 'foobar') fp = open(fpath, 'wb') k2.get_contents_to_file(fp) fp.close() fp = open(fpath) # check to make sure content read is identical to original self.assertEqual(s, fp.read()) fp.close() # delete keys bucket1.delete_key(k1) bucket2.delete_key(k2) def test_default_object_acls(self): """Test default object acls.""" # create a new bucket bucket = self._MakeBucket() # get default acl and make sure it's project-private acl = bucket.get_def_acl() self.assertIsNotNone(re.search(PROJECT_PRIVATE_RE, acl.to_xml())) # set default acl to a canned acl and verify it gets set bucket.set_def_acl('public-read') acl = bucket.get_def_acl() # save public-read acl for later test public_read_acl = acl self.assertEqual(acl.to_xml(), ('<AccessControlList><Entries><Entry>' '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' '</Entry></Entries></AccessControlList>')) # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() self.assertEqual(acl.to_xml(), '<AccessControlList></AccessControlList>') # set default acl to an xml acl and verify it gets set bucket.set_def_acl(public_read_acl) acl = bucket.get_def_acl() self.assertEqual(acl.to_xml(), ('<AccessControlList><Entries><Entry>' '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' '</Entry></Entries></AccessControlList>')) # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() self.assertEqual(acl.to_xml(), '<AccessControlList></AccessControlList>') def test_default_object_acls_storage_uri(self): """Test default object acls using storage_uri.""" # create a new bucket bucket = self._MakeBucket() bucket_name = bucket.name uri = storage_uri('gs://' + bucket_name) # get default acl and make sure it's project-private acl = uri.get_def_acl() self.assertIsNotNone( re.search(PROJECT_PRIVATE_RE, acl.to_xml()), 'PROJECT_PRIVATE_RE not found in ACL XML:\n' + acl.to_xml()) # set default acl to a canned acl and verify it gets set uri.set_def_acl('public-read') acl = uri.get_def_acl() # save public-read acl for later test public_read_acl = acl self.assertEqual(acl.to_xml(), ('<AccessControlList><Entries><Entry>' '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' '</Entry></Entries></AccessControlList>')) # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() self.assertEqual(acl.to_xml(), '<AccessControlList></AccessControlList>') # set default acl to an xml acl and verify it gets set uri.set_def_acl(public_read_acl) acl = uri.get_def_acl() self.assertEqual(acl.to_xml(), ('<AccessControlList><Entries><Entry>' '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' '</Entry></Entries></AccessControlList>')) # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() self.assertEqual(acl.to_xml(), '<AccessControlList></AccessControlList>') def test_cors_xml_bucket(self): """Test setting and getting of CORS XML documents on Bucket.""" # create a new bucket bucket = self._MakeBucket() bucket_name = bucket.name # now call get_bucket to see if it's really there bucket = self._GetConnection().get_bucket(bucket_name) # get new bucket cors and make sure it's empty cors = re.sub(r'\s', '', bucket.get_cors().to_xml()) self.assertEqual(cors, CORS_EMPTY) # set cors document on new bucket bucket.set_cors(CORS_DOC) cors = re.sub(r'\s', '', bucket.get_cors().to_xml()) self.assertEqual(cors, CORS_DOC) def test_cors_xml_storage_uri(self): """Test setting and getting of CORS XML documents with storage_uri.""" # create a new bucket bucket = self._MakeBucket() bucket_name = bucket.name uri = storage_uri('gs://' + bucket_name) # get new bucket cors and make sure it's empty cors = re.sub(r'\s', '', uri.get_cors().to_xml()) self.assertEqual(cors, CORS_EMPTY) # set cors document on new bucket cors_obj = Cors() h = handler.XmlHandler(cors_obj, None) xml.sax.parseString(CORS_DOC, h) uri.set_cors(cors_obj) cors = re.sub(r'\s', '', uri.get_cors().to_xml()) self.assertEqual(cors, CORS_DOC) def test_lifecycle_config_bucket(self): """Test setting and getting of lifecycle config on Bucket.""" # create a new bucket bucket = self._MakeBucket() bucket_name = bucket.name # now call get_bucket to see if it's really there bucket = self._GetConnection().get_bucket(bucket_name) # get lifecycle config and make sure it's empty xml = bucket.get_lifecycle_config().to_xml() self.assertEqual(xml, LIFECYCLE_EMPTY) # set lifecycle config lifecycle_config = LifecycleConfig() lifecycle_config.add_rule( 'Delete', None, LIFECYCLE_CONDITIONS_FOR_DELETE_RULE) lifecycle_config.add_rule( 'SetStorageClass', 'NEARLINE', LIFECYCLE_CONDITIONS_FOR_SET_STORAGE_CLASS_RULE) bucket.configure_lifecycle(lifecycle_config) xml = bucket.get_lifecycle_config().to_xml() self.assertEqual(xml, LIFECYCLE_DOC) def test_lifecycle_config_storage_uri(self): """Test setting and getting of lifecycle config with storage_uri.""" # create a new bucket bucket = self._MakeBucket() bucket_name = bucket.name uri = storage_uri('gs://' + bucket_name) # get lifecycle config and make sure it's empty xml = uri.get_lifecycle_config().to_xml() self.assertEqual(xml, LIFECYCLE_EMPTY) # set lifecycle config lifecycle_config = LifecycleConfig() lifecycle_config.add_rule( 'Delete', None, LIFECYCLE_CONDITIONS_FOR_DELETE_RULE) lifecycle_config.add_rule( 'SetStorageClass', 'NEARLINE', LIFECYCLE_CONDITIONS_FOR_SET_STORAGE_CLASS_RULE) uri.configure_lifecycle(lifecycle_config) xml = uri.get_lifecycle_config().to_xml() self.assertEqual(xml, LIFECYCLE_DOC) def test_billing_config_bucket(self): """Test setting and getting of billing config on Bucket.""" # create a new bucket bucket = self._MakeBucket() bucket_name = bucket.name # get billing config and make sure it's empty billing = bucket.get_billing_config() self.assertEqual(bill
iostream> #include <string> #include <fstream> #include <streambuf> #include <nlopt.h> #include <sstream> #include <cmath> #include <boost/compute/core.hpp> #include <boost/compute/algorithm/transform.hpp> #include <boost/compute/algorithm/reduce.hpp> #include <boost/compute/container/vector.hpp> #include <boost/compute/functional/math.hpp> namespace compute = boost::compute; """ def generateGPUJoinSampleCode(i,query,estimator,stats,cu_factor): ts, dv = stats qtype = [] remap = [] #Generate Kernels with open("./%s_kernels.cl" % i,'w') as cf: generatePreamble(cf) cols = Utils.generateInvariantColumns(query) for j,indices in enumerate(cols): qtype.extend([query.tables[j].columns[index].type for index in indices ]) remap.extend([(j,index) for index in indices ]) rangeEstimateFunction(cf) pointEstimateFunction(cf) generateEstimateKernel(cf,"estimate",qtype) with open("./%s_GPUJS.cpp" % i,'w') as cf: generateCIncludes(cf) generateGPUJoinSampleParameterArray(cf,query,estimator,qtype) Utils.generateGPUVectorConverterFunction(cf) Utils.generateUintFileReaderFunction(cf) generateGPUJoinSampleEstimateFunction(cf,query,estimator,qtype) generateGPUJoinSampleTestWrapper(cf,query,estimator,qtype) print >>cf, """ int main( int argc, const char* argv[] ){ parameters p; compute::device device = compute::system::default_device(); p.ctx = compute::context(device); p.queue=compute::command_queue(p.ctx, device); """ print >>cf, """ std::ifstream t("./%s_kernels.cl"); t.exceptions ( std::ifstream::failbit | std::ifstream::badbit ); std::string str((std::istreambuf_iterator<char>(t)), std::istreambuf_iterator<char>()); """ % i #Read table sizes and read columns into memory and transfer to device the GPU print >>cf, " std::stringstream iteration_stream;" print >>cf, " p.iteration = (unsigned int) atoi(argv[2]);" print >>cf, " iteration_stream << \"./iteration\" << std::setw(2) << std::setfill('0') << argv[2];" print >>cf, " p.ss = atoi(argv[1]);" print >> cf, " p.local = 64;" print >> cf, " p.global = std::min((size_t) p.ctx.get_device().compute_units()*%s, ((p.ss-1)/p.local+1)*p.local);" % cu_factor print >>cf, " p.ts = %s;" % (ts) for cid,kernel in enumerate(qtype): print >>cf, " std::stringstream s_c%s_stream ;" % (cid) print >>cf, " s_c%s_stream << iteration_stream.str() << \"/jsample_\" << atoi(argv[1]) << \"_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid) print >>cf, " std::string s_c%s_string = s_c%s_stream.str();" % (cid,cid) print >>cf, " unsigned int* s_c%s = readUArrayFromFile(s_c%s_string.c_str());" % (cid,cid) print >>cf, " p.s_c%s = toGPUVector(s_c%s, p.ss, p.ctx, p.queue);" % (cid,cid) print >>cf print >>cf, """ compute::program pr = compute::program::create_with_source(str,p.ctx); try{ std::ostringstream oss; pr.build(oss.str()); } catch(const std::exception& ex){ std::cout << pr.build_log() << std::endl; } """ print >>cf, " p.out = compute::vector<unsigned long>(p.global, p.ctx);" print >>cf, " p.estk = pr.create_kernel(\"estimate\");" print >>cf, " std::string test_cardinality_string = iteration_stream.str() + \"/test_join_true.dump\";" print >>cf, " p.test_cardinality = readUArrayFromFile(test_cardinality_string.c_str());" for cid,ty in enumerate(qtype): if ty == "range": print >>cf, " std::string test_l_c%s_string = iteration_stream.str() + \"/test_join_l_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid) print >>cf, " p.test_l_c%s= readUArrayFromFile(test_l_c%s_string.c_str());" % (cid,cid) print >>cf, " std::string test_u_c%s_string = iteration_stream.str() + \"/test_join_u_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid) print >>cf, " p.test_u_c%s = readUArrayFromFile(test_u_c%s_string.c_str());" % (cid,cid) elif ty == "point": print >>cf, " std::string test_p_c%s_string = iteration_stream.str() + \"/test_join_p_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid) print >>cf, " p.test_p_c%s = readUArrayFromFile(test_p_c%s_string.c_str());" % (cid,cid) else: raise Exception("I don't know this ctype.") print >>cf print >>cf, " join_test(&p);" print >>cf, "}" def generateGPUJoinSampleParameterArray(f,query,estimator,qtypes): print >>f, """ typedef struct{ compute::command_queue queue; compute::context ctx; """ print >>f, " unsigned int iteration;" print >>f, " size_t ss;" print >>f, " size_t global;" print >>f, " size_t local;" print >>f, " unsigned int ts;" print >>f, " compute::kernel estk;" for cid,kernel in enumerate(qtypes): print >>f, " compute::vector<unsigned int> s_c%s;" % (cid) for cid,kernel in enumerate(qtypes): if kernel == "range": print >>f, " unsigned int* test_l_c%s;" % (cid) print >>f, " unsigned int* test_u_c%s;" % (cid) else: print >>f, " unsigned int* test_p_c%s;" % (cid) print >>f, " compute::vector<unsigned long> out;" print >>f, " unsigned int* test_cardinality;" print >>f, """ } parameters; """ def generateGPUJoinSampleEstimateFunction(f, query, estimator, qtypes): print >> f, "double join_estimate_instance(parameters* p" for cid, qtype in enumerate(qtypes): # Start with computing the invariant contributions if qtype == "range": print >> f, " , unsigned int u_c%s, unsigned int l_c%s" % (cid, cid) else: print >> f, " , unsigned int p_c%s" % (cid) print >> f, "){" print >> f, " p->estk.set_args(", for cid, qtype in enumerate(qtypes): if qtype == "range": print >> f, "p->s_c%s, u_c%s, l_c%s, " %
(cid, cid, cid), else:
print >> f, "p->s_c%s, p_c%s, " % (cid, cid), print >> f, " p->out, (unsigned int) p->ss", print >> f, ");" print >> f, " boost::compute::event ev = p->queue.enqueue_nd_range_kernel(p->estk,1,NULL,&(p->global), &(p->local));" # print >>f, " ev.wait();" print >> f, " unsigned long est = 0;" print >> f, " boost::compute::reduce(p->out.begin(), p->out.begin()+std::min(p->global, p->ss), &est, p->queue);" print >> f, " p->queue.finish();" print >> f, " return est * ((double) p->ts)/p->ss;" # At this point, we need a print >> f, "}" def generateGPUJoinSampleTestWrapper(f,query,estimator,qtypes): print >>f, "double join_test(parameters* p){" print >>f, " double objective = 0.0;" print >>f, " double est = 0.0;" print >>f, " int first = 1;" print >>f, " for(unsigned int i = 0; i < %s; i++){" % estimator.test print >> f, " auto begin = std::chrono::high_resolution_clock::now();" print >>f, " if(first ", for cid, qtype in enumerate(qtypes): if qtype == "range": print >>f, "|| p->test_l_c%s[i] != p->test_l_c%s[i-1] " % (cid,cid), print >>f, "|| p->test_u_c%s[i] != p->test_u_c%s[i-1] " % (cid,cid), else: print >>f, "|| p->test_p_c%s[i] != p->test_p_c%s[i-1] " % (cid,cid), print >>f, "){" if hasattr(estimator, 'look_behind'): if estimator.look_behind: print >> f, " first = 0;" else: print >>f, " first = 0;"
""" Cop
yright (c) 2015 Michael Bright and Bamboo HR LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CO
NDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. test_history_date_created Revision ID: ce9be6e8354c Revises: bf363c3a9ef0 Create Date: 2018-04-30 18:44:54.258839 """ # revision identifiers, used by Alembic. import datetime from sqlalchemy import func revision = 'ce9be6e8354c' down_revision = 'bf363c3a9ef0' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### if 'sqlite' == op.get_context().dialect.name: op.add_column('qa_test_histories', sa.Column('date_created', sa.DateTime(), default=datetime.datetime.utcnow())) else: op.add_column('qa_test_histories', sa.Column('date_created', sa.DateTime(), nullable=False, server_default=func.now(), default=datetime.datetime.utcnow())) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('qa_test_histories', 'date_created') ### end Alembic commands ###
import time class RecordAccumulator(object): def __init__(self, buffer_class, config): self.config
= config self.buffer_time_limit = config['buffer_time_limit'] self._buffer_class = buffer_class self._reset_buffer() def _reset_buffer(self): self._buffer = self._buffer_class(config=self.config) self._buffer_started_at = None def try_append(self, rec
ord): """Attempt to accumulate a record. Return False if buffer is full.""" success = self._buffer.try_append(record) if success: self._buffer_started_at = time.time() return success def is_ready(self): """Check whether the buffer is ready.""" if self._buffer_started_at is None: return False if self._buffer.is_ready(): return True elapsed = time.time() - self._buffer_started_at return elapsed >= self.buffer_time_limit def has_records(self): """Check whether the buffer has records.""" return self._buffer_started_at is not None def flush(self): """Close the buffer and return it.""" if self._buffer_started_at is None: return buf = self._buffer.flush() self._reset_buffer() return buf
cl
ass RcModel: RcModels = {} def rcIsDefault(self): return True def getRcFile(self, ext=''): return ext def getRcFolder(self, GetDefault=True): retur
n 'enigma2/data/' def getRcImg(self): return self.getRcFile('enigma2/data/rc.png') def getRcPositions(self): return self.getRcFile('enigma2/data/rcpositions.xml') def getRcLocation(self): return self.getRcFile('enigma2/data/') rc_model = RcModel()
#!/u
sr/bin/env python # # test_codecmaps_cn.py # Codec mapping tests for PRC encodings # # $CJKCodecs: test_codecmaps_cn.py,v 1.3 2004/06/19 06:09:55 perky Exp $ from test import test_support from test import test_multibytecodec_support import unittest class TestGB2312Map(test_multibytecodec_support.TestBase_Mapping, unittest.TestCase): enc
oding = 'gb2312' mapfilename = 'EUC-CN.TXT' mapfileurl = 'http://people.freebsd.org/~perky/i18n/EUC-CN.TXT' class TestGBKMap(test_multibytecodec_support.TestBase_Mapping, unittest.TestCase): encoding = 'gbk' mapfilename = 'CP936.TXT' mapfileurl = 'http://www.unicode.org/Public/MAPPINGS/VENDORS/' \ 'MICSFT/WINDOWS/CP936.TXT' def test_main(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(TestGB2312Map)) suite.addTest(unittest.makeSuite(TestGBKMap)) test_support.run_suite(suite) test_multibytecodec_support.register_skip_expected(TestGB2312Map, TestGBKMap) if __name__ == "__main__": test_main()
import time import json from datetime import datetime, timedelta from world import world, res_filename from bigml.api import HTTP_CREATED from bigml.api import HTTP_OK from bigml.api import HTTP_ACCEPTED from bigml.api import FINISHED from bigml.api import FAULTY from bigml.api import get_status import read_dataset_steps as read #@step(r'I create a dataset$') def i_create_a_dataset(step): resource = world.api.create_dataset(world.source['resource']) world.status = resource['code'] assert world.status == HTTP_CREATED world.location = resource['location'] world.dataset = resource['object'] world.datasets.append(resource['resource']) #@step(r'I download the dataset file to "(.*)"$') def i_export_a_dataset(step, local_file): world.api.download_dataset(world.dataset['resource'], filename=res_filename(local_file)) #@step(r'file "(.*)" is like file "(.*)"$') def files_equal(step, local_file, data): contents_local_file = open(res_filename(local_file)).read() contents_data = open(res_filename(data)).read() assert contents_local_file == contents_data #@step(r'I create a dataset with "(.*)"') def i_create_a_dataset_with(step, data="{}"): resource = world.api.create_dataset(world.source['resource'], json.loads(data)) world.status = resource['code'] assert world.status == HTTP_CREATED world.location = resource['location'] world.dataset = resource['object'] world.datasets.append(resource['resource']) #@step(r'I wait until the dataset status code is either (\d) or (\d) less than (\d+)') def wait_until_dataset_status_code_is(step, code1, code2, secs): start = datetime.utcnow() read.i_get_the_dataset(step, world.dataset['resource']) status = get_status(world.dataset) while (status['code'] != int(code1) and status['code'] != int(code2)): time.sleep(3) assert datetime.utcnow() - start < timedelta(seconds=int(secs)) read.i_get_the_dataset(step, world.dataset['resource']) status = get_status(world.dataset) assert status['code'] == int(code1) #@step(r'I wait until the dataset is ready less than (\d+)') def the_dataset_is_finished_in_less_than(step, secs): wait_until_dataset_status_code_is(step, FINISHED, FAULTY, secs) #@step(r'I make the dataset public') def make_the_dataset_public(step): resource = world.api.update_dataset(world.dataset['resource'], {'private': False}) world.status = resource['code'] assert world.status == HTTP_ACCEPTED world.location = resource['location'] world.dataset = resource['object'] #@step(r'I get the dataset status using the dataset\'s public url') def build_local_dataset_from_public_url(step): world.dataset = world.api.get_dataset("public/%s" % world.dataset['resource']) #@step(r'the dataset\'s status is FINISHED') def dataset_status_finished(step): assert get_status(world.dataset)['code'] == FINISHED #@step(r'I create a dataset extracting a (.*) sample$') def i_create_a_split_dataset(step, rate): world.origin_dataset = world.dataset resource = world.api.create_dataset(world.dataset['resource'], {'sample_rate': float(rate)}) world.status = resource['
code'] assert world.status == HTTP_CREATED world.location = resource['location'] world.dataset = resource
['object'] world.datasets.append(resource['resource']) #@step(r'I compare the datasets\' instances$') def i_compare_datasets_instances(step): world.datasets_instances = (world.dataset['rows'], world.origin_dataset['rows']) #@step(r'the proportion of instances between datasets is (.*)$') def proportion_datasets_instances(step, rate): if (int(world.datasets_instances[1] * float(rate)) == world.datasets_instances[0]): assert True else: assert False, ( "Instances in split: %s, expected %s" % ( world.datasets_instances[0], int(world.datasets_instances[1] * float(rate)))) #@step(r'I create a dataset associated to centroid "(.*)"') def i_create_a_dataset_from_cluster(step, centroid_id): resource = world.api.create_dataset( world.cluster['resource'], args={'centroid': centroid_id}) world.status = resource['code'] assert world.status == HTTP_CREATED world.location = resource['location'] world.dataset = resource['object'] world.datasets.append(resource['resource']) #@step(r'I create a dataset from the cluster and the centroid$') def i_create_a_dataset_from_cluster_centroid(step): i_create_a_dataset_from_cluster(step, world.centroid['centroid_id']) #@step(r'the dataset is associated to the centroid "(.*)" of the cluster') def is_associated_to_centroid_id(step, centroid_id): cluster = world.api.get_cluster(world.cluster['resource']) world.status = cluster['code'] assert world.status == HTTP_OK assert "dataset/%s" % ( cluster['object']['cluster_datasets'][ centroid_id]) == world.dataset['resource'] #@step(r'I check that the dataset is created for the cluster and the centroid$') def i_check_dataset_from_cluster_centroid(step): is_associated_to_centroid_id(step, world.centroid['centroid_id'])
from ChacoInPySideUi import * import sys if __name__ == '__main__'
: print "Starting chaco_in_pyside app" Chaco
InPySideUi_main(sys.argv)
""" Renders css/js files to use config data in config.yml Peter Zujko """ from django.core.management.base import BaseCommand from django.conf import settings from django.template.loader import render_to_string from os import listdir from os.path import isfile, join import os import json import base64 class Command(BaseCommand): petitions_dir = os.path.join(settings.BASE_DIR, "petitions/static") profile_dir = os.path.join(settings.BASE_DIR, "profile/static") def handle(self, *args, **options): CONFIG = settings.CONFIG social = [] # Set icons to base64 for icon in CONFIG['social']['social_links']: data = icon file_loc = settings.BASE_DIR+icon['imgURL'] ext = file_loc.split('.')[1] with open(file_loc, 'rb') as file: data_str = "" if ext == 'svg': data_str = "data:image/svg+xml;utf8;base64," elif ext == 'png': data_str = "data:image/png;base64," data['imgURL'] = data_str + \ base64.b64encode(file.read()).decode("utf-8") social.append(data) petition_file_names = [f for f in listdir( self.petitions_dir) if isfile(join(self.petitions_dir, f))] profile_file_names = [f for f in listdir( self.profile_dir) if isfile(join(self.profile_dir, f))] colors = settings.CONFIG["ui"]["colors"] data_object = { 'name': CONFIG['name'], 'colors': colors, 'header_title': CONFIG['text']['header_title'], 'images': CONFIG['ui']['slideshow_images'], 'social': social, 'default_title': CONFIG['petitions']['default_title'], 'default_body': CONFIG['petitions']['default_body'], 'org': CONFIG['organization'] } # Grab all file names in petitions/static for file in petition_file_names: path = self.petitions_dir + "/" + file template = render_to_string(path, data_object) static_dir = "" # Check file extension ext = file.split(".")[1] if ext == "css": static_dir = os.path.join( settings.BASE_DIR, 'static/css/'+file) elif ext == "js": static_dir = os.path.join(settings.BASE_
DIR, 'static/js/'+file) with open(static_dir, 'w+') as f: f.write(template) for file in profile_file_names: path = self.profile_dir + "/" + file template = render_to_string(path, data_object) static_dir = "" # Check
file extension ext = file.split(".")[1] if ext == "css": static_dir = os.path.join( settings.BASE_DIR, 'static/css/'+file) elif ext == "js": static_dir = os.path.join(settings.BASE_DIR, 'static/js/'+file) with open(static_dir, 'w+') as f: f.write(template) print("Rendered the following " + str(petition_file_names) + str(profile_file_names))
import io import string import sys import pytest from flexmock import flexmock from borgmatic.config import validate as module def test_schema_filename_returns_plausable_path(): schema_path = module.schema_filename() assert schema_path.endswith('/schema.yaml') def mock_config_and_schema(config_yaml, schema_yaml=None): ''' Set up mocks for the given config config YAML string and the schema YAML string, or the default schema if no schema is provided. The idea is that that the code under test consumes these mocks when parsing the configuration. ''' config_stream = io.StringIO(config_yaml) if schema_yaml is None: schema_stream = open(module.schema_filename()) else: schema_stream = io.StringIO(schema_yaml) builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('config.yaml').and_return(config_stream) builtins.should_receive('open').with_args('schema.yaml').and_return(schema_stream) def test_parse_configuration_transforms_file_into_mapping(): mock_config_and_schema( ''' location: source_directories: - /home - /etc repositories: - hostname.borg retention: keep_minutely: 60 keep_hourly: 24 keep_daily: 7 consistency: checks: - repository - archives ''' ) result = module.parse_configuration('config.yaml', 'schema.yaml') assert result == { 'location': {'source_directories': ['/home', '/etc'], 'repositories': ['hostname.borg']}, 'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60}, 'consistency': {'checks': ['repository', 'archives']}, } def test_parse_configuration_passes_through_quoted_punctuation(): escaped_punctuation = string.punctuation.replace('\\', r'\\').replace('"', r'\"') mock_config_and_schema( ''' location: source_directories: - /home repositories: - "{}.borg" '''.format( escaped_punctuation ) ) result = module.parse_configuration('config.yaml', 'schema.yaml') assert result == { 'location': { 'source_directories': ['/home'], 'repositories': ['{}.borg'.format(string.punctuation)], } } def test_parse_configuration_with_schema_lacking_examples_does_not_raise(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg ''', ''' map: location: required: true map: source_directories: required: true seq: - type: scalar repositories: required: true seq: - type: scalar ''', ) module.parse_configuration('config.yaml', 'schema.yaml') def test_parse_configuration_inlines_include(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg retention: !include include.yaml ''' ) builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('include.yaml').and_return( ''' keep_daily: 7 keep_hourly: 24 ''' ) result = module.parse_configuration('config.yaml', 'schema.yaml') assert result == { 'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']}, 'retention': {'keep_daily': 7, 'keep_hourly': 24}, } def test_parse_configuration_merges_include(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg retention: keep_daily: 1 <<: !include include.yaml ''' ) builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('include.yaml').and_return( ''' keep_daily: 7 keep_hourly: 24 '
'' ) result = module.parse_configuration('config.yaml', 'schema.yaml') assert result == { 'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']}, 'retention': {'keep_daily': 1, 'keep_hourly': 24}, } def test_parse_confi
guration_raises_for_missing_config_file(): with pytest.raises(FileNotFoundError): module.parse_configuration('config.yaml', 'schema.yaml') def test_parse_configuration_raises_for_missing_schema_file(): mock_config_and_schema('') builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('schema.yaml').and_raise(FileNotFoundError) with pytest.raises(FileNotFoundError): module.parse_configuration('config.yaml', 'schema.yaml') def test_parse_configuration_raises_for_syntax_error(): mock_config_and_schema('foo:\nbar') with pytest.raises(ValueError): module.parse_configuration('config.yaml', 'schema.yaml') def test_parse_configuration_raises_for_validation_error(): mock_config_and_schema( ''' location: source_directories: yes repositories: - hostname.borg ''' ) with pytest.raises(module.Validation_error): module.parse_configuration('config.yaml', 'schema.yaml') def test_parse_configuration_applies_overrides(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg local_path: borg1 ''' ) result = module.parse_configuration( 'config.yaml', 'schema.yaml', overrides=['location.local_path=borg2'] ) assert result == { 'location': { 'source_directories': ['/home'], 'repositories': ['hostname.borg'], 'local_path': 'borg2', } } def test_parse_configuration_applies_normalization(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg exclude_if_present: .nobackup ''' ) result = module.parse_configuration('config.yaml', 'schema.yaml') assert result == { 'location': { 'source_directories': ['/home'], 'repositories': ['hostname.borg'], 'exclude_if_present': ['.nobackup'], } }
# -*- coding: utf-8 -*- """ Created on Tue Nov 27 23:40:50 2018 @author: austin """ import requests import re from bs4 import BeautifulSoup,SoupStrainer #import matplotlib.pyplot as plt from fake_useragent import UserAgent import time,random,sys import pandas#pandas大法好 #ua=UserAgent()#使用随机header,模拟人类 #headers1={'User-Agent': 'ua.random'}#使用随机header,模拟人类 TotalPrice=[] #Total price InitialPrice=[] UnitPrice=[] #price per meter HouseArea=[] HouseHeight=[] HouseConfig=[] HouseCommunit=[] HouseLocMajor=[] HouseLocMinor=[] HouseBuildYear=[] HouseDealDate=[] HouseDealCycle=[] LinkUrl=[] StrainerPriceInfo = SoupStrainer('a',attrs={'class':'nostyle'}) StrainerChengJiaoList = SoupStrainer('ul',attrs={'class':'listContent'}) StrainerTotalPage = SoupStrainer('div',attrs={'class':'page-box house-lst-page-box'}) #得到当前最大页数 PianQuList= ['北蔡', '碧云', '曹路', '川沙', '大团镇', '合庆', '高行', '高东', '花木', '航头', '惠南', '金桥', '金杨', '康桥', '陆家嘴', '老港镇', '临港新城', '联洋', '泥城镇', '南码头', '三林', '世博', '书院镇', '塘桥', '唐镇', '外高桥', '万祥镇', '潍坊', '宣桥', '新场', '御桥', '杨东', '源深', '洋泾', '张江', '祝桥', '周浦'] PianQuLink= ['/chengjiao/beicai/', '/chengjiao/biyun/', '/chengjiao/caolu/', '/chengjiao/chuansha/', '/chengjiao/datuanzhen/', '/chengjia
o/geqing/', '/chengjiao/gaohang/', '/chengjiao/gaodong/', '/chengjiao/huamu/', '/chengjiao/hangtou/', '/chengjiao/huinan/', '/chengjiao/jinqiao/', '/chengjiao/jinyang/', '/chengjiao/kangqiao/', '/chengjiao/lujiazui/', '/chengjiao/laogangzhen/', '/chengjiao/lingangxincheng/', '/chengjiao/lianyang/', '/chengjiao/nichengzhen/', '/chengjiao/nanmatou/', '/chengjiao/sanlin/', '/chengjiao/shibo/', '/chengjiao/shuyuanzhen/', '/chengjiao/tangqiao/', '/chengjiao/tangzhen/', '/che
ngjiao/waigaoqiao/', '/chengjiao/wanxiangzhen/', '/chengjiao/weifang/', '/chengjiao/xuanqiao/', '/chengjiao/xinchang/', '/chengjiao/yuqiao1/', '/chengjiao/yangdong/', '/chengjiao/yuanshen/', '/chengjiao/yangjing/', '/chengjiao/zhangjiang/', '/chengjiao/zhuqiao/', '/chengjiao/zhoupu/'] #PianQuList=[] #PianQuList.index('唐镇') #24 #PianQuLink[PianQuList.index('唐镇')] #'/chengjiao/tangzhen/' MaxGetPage=100 TotalPage=MaxGetPage HouseLocMajorString='浦东' def SaveList(): df=pandas.DataFrame({'总价':TotalPrice,'单价':UnitPrice,'房型':HouseConfig,'成交日期':HouseDealDate, '成交周期':HouseDealCycle,'面积':HouseArea,'小区':HouseCommunit,'楼层':HouseHeight, '区':HouseLocMajor,'板块':HouseLocMinor,'初始报价':InitialPrice,'楼龄':HouseBuildYear, '网址':LinkUrl}) datetimestr=time.strftime('%Y-%m-%d-%H-%M-%S',time.localtime(time.time())) df.to_csv(datetimestr+'-'+HouseLocMajorString+'-LianJia.csv') begin = time.time() for PianQuGet in PianQuList: i=1 RetryTimes=0 PianQuNum=PianQuList.index(PianQuGet) while i<=TotalPage: # 100页最大值 #http://sh.lianjia.com/chengjiao/tangzhen/pg1/ domain = 'http://sh.lianjia.com'+PianQuLink[PianQuNum]+'pg'+str(i) headers1 = {'User-Agent': UserAgent().random, 'Accept-Language': 'zh-CN,zh;q=0.8'}#使用随机header,模拟人类 sleeptime=random.randint(10, 20)/10 time.sleep(sleeptime) res = requests.get(domain,headers=headers1)#爬取拼接域名 #<ul class="listContent"> PageNumHtml = BeautifulSoup(res.text,'html.parser',parse_only=StrainerTotalPage) # 把 string 变成 Dictionary if len (PageNumHtml) == 0: #遇到空页面 # PageNumDict = [] # if len(PageNumDict) == 0: #25 if RetryTimes>10: sys.exit("Error to get Page: "+domain) else: RetryTimes+=1 sleeptime=random.randint(10, 20)/10+RetryTimes time.sleep(sleeptime) print('Retry after delay '+str(sleeptime)+' s :'+domain) continue RetryTimes=0 PageNumDict = eval(PageNumHtml.div['page-data']) # {'totalPage': 25, 'curPage': 1} TotalPage = int(PageNumDict['totalPage']) if TotalPage> MaxGetPage: TotalPage=MaxGetPage #更新抓取进度 print('已经抓取'+PianQuGet+' 第'+str(i)+'/'+str(TotalPage)+'页 ''耗时: %.1f 分' %((time.time()-begin)/60)) i+=1 ChengJiaoListHtml=BeautifulSoup(res.text,'html.parser',parse_only=StrainerChengJiaoList) for ListItem in ChengJiaoListHtml.find_all('li'): #<div class="title"><a href="https://sh.lianjia.com/chengjiao/107100614568.html" target="_blank">创新佳苑 1室1厅 61.67平米</a></div> # try: if ListItem.div.contents[1].find(class_='dealDate').string == '近30天内成交': continue else: HouseString=[] HouseString1=[] HouseString2=[] HouseString3=[] LinkUrl.append(ListItem.div.contents[0].a['href']) # https://sh.lianjia.com/chengjiao/107100614568.html HouseString = ListItem.div.contents[0].string.split() #['金唐公寓', '2室2厅', '89.06平米'] HouseArea.append(HouseString[2]) HouseConfig.append(HouseString[1]) HouseCommunit.append(HouseString[0]) HouseString1=ListItem.div.contents[1].div.text.split('|') #'['南 ', ' 精装\xa0', ' 无电梯'] HouseDealDate.append(ListItem.div.contents[1].find(class_='dealDate').string) #'2018.10.24' or '近30天内成交' TotalPrice.append(float(ListItem.div.contents[1].find(class_='number').string)) #386 HouseString2=ListItem.div.contents[2].contents[0].text.split() #'中楼层(共6层) 2006年建板楼' HouseHeight.append(HouseString2[0]) HouseBuildYear.append(HouseString2[1]) UnitPrice.append(int(ListItem.div.find(class_='unitPrice').span.string)) #unitPrice 43342 #HouseString3 = ListItem.div.find(class_='dealCycleTxt').contents HouseLocMinor.append(PianQuList[PianQuNum]) HouseLocMajor.append(HouseLocMajorString) HouseString=ListItem.div.find(text=re.compile('挂牌')) # '挂牌391万' if (HouseString == None): InitialPrice.append(' ') else: InitialPrice.append(int(re.findall(r'\d+',HouseString)[0])) HouseString=ListItem.div.find(text=re.compile('成交周期')) # '成交周期119天' ->119 if (HouseString == None): HouseDealCycle.append(' ') else: HouseDealCycle.append(int(re.findall(r'\d+',HouseString)[0])) # except: # info=sys.exc_info() # print(info[0],":",info[1]) SaveList() #df=pandas.DataFrame({'总价':TotalPrice,'单价':UnitPrice,'房型':HouseConfig,'成交日期':HouseDealDate, # '成交周期':HouseDealCycle,'面积':HouseArea,'小区':HouseCommunit,'楼层':HouseHeight, # '区':HouseLocMajor,'板块':HouseLocMinor,'初始报价':InitialPrice,'楼龄':HouseBuildYear, # '网址':LinkUrl}) # #datetimestr=time.strftime('%Y-%m-%d-%H-%M-%S',time.localtime(time.time())) #df.to_csv(datetimestr+'-'+HouseLocMajorString+'-LianJia.csv')
#!/usr/bin/env python """ @package ion.agents.data.test.test_moas_dosta @file ion/agents/data/test_moas_dosta @author Bill French @brief End to end testing for moas dosta """ __author__ = 'Bill French' import gevent from pyon.public import log from nose.plugins.attrib import attr from ion.agents.data.test.dataset_test import DatasetAgentTestCase from ion.services.dm.test.dm_test_case import breakpoint import unittest ############################################################################### # Global constants. ############################################################################### @attr('INT', group='sa') class HypmDOSTATest(DatasetAgentTestCase): """ Verify dataset agent can harvest data fails, parse the date, publish, ingest and retrieve stored data. """ def setUp(self): self.test_config.initialize( instrument_device_name = 'DOSTA-01', preload_scenario= 'GENG,DOSTA', stream_name= 'ggldr_dosta_delayed', # Uncomment this line to load driver from a locak reposi
tory #mi_repo = '/Users/wfrench/Workspace/
code/wfrench/marine-integrations' ) super(HypmDOSTATest, self).setUp() def test_parse(self): """ Verify file import and connection ids """ self.assert_initialize() self.create_sample_data("moas_dosta/file_1.mrg", "unit_363_2013_245_6_6.mrg") self.create_sample_data("moas_dosta/file_2.mrg", "unit_363_2013_245_10_6.mrg") granules = self.get_samples(self.test_config.stream_name, 4) self.assert_data_values(granules, 'moas_dosta/merged.result.yml') def test_large_file(self): """ Verify a large file import with no buffering """ self.assert_initialize() self.create_sample_data("moas_dosta/unit_363_2013_199_0_0.mrg", "unit_363_2013_199_0_0.mrg") gevent.sleep(10) self.assert_sample_queue_size(self.test_config.stream_name, 1) self.create_sample_data("moas_dosta/unit_363_2013_199_1_0.mrg", "unit_363_2013_199_1_0.mrg") gevent.sleep(10) self.assert_sample_queue_size(self.test_config.stream_name, 2) self.create_sample_data("moas_dosta/unit_363_2013_245_6_6.mrg", "unit_363_2013_245_6_6.mrg") self.get_samples(self.test_config.stream_name, 171, 180) self.assert_sample_queue_size(self.test_config.stream_name, 0) def test_capabilities(self): self.assert_agent_capabilities()
# -*- coding: utf-8 -*- # Copyright(C) 2010-2014 Florent Fourcot # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. from weboob.tools.browser2.page import HTMLPage, method, ListElement, ItemElement from weboob.tools.browser2.filters import Env, CleanText, Regexp, Field, DateTime, Map, Attr from weboob.capabilities.gauge import Gauge, GaugeMeasure, GaugeSensor from weboob.capabilities.base import NotAvailable, NotLoaded import re __all__ = ['ListPage', 'HistoryPage'] class ListPage(HTMLPage): @method class get_rivers_list(ListElement): item_xpath = ".//a[@onmouseout='pegelaus()']" class item(ItemElement): klass = Gauge forecasts = {'pf_gerade.png': u'stable', 'pf_unten.png': u'Go down', 'pf_oben.png': u'Go up', } alarmlevel = {"as1.gif": u"Alarmstufe 1", "as2.gif": u"Alarmstufe 2", "as3.gif": u"Alarmstufe 3", "as4.gig": u"Alarmstufe 4", "qua_grau.gif": u"No alarm function", "p_gruen.gif": u"", "qua_weiss.gif": u"no data", "as0.gif": u"", "MNW.gif": u""} obj_id = CleanText(Env('id')) obj_name = CleanText(Env('name'), "'") obj_city = Regexp(Field('name'), '^([^\s]+).*') obj_object = Env('object') def parse(self, el): div = el.getparent() img = Regexp(Attr('.//img', 'src'), "(.*?)/(.*)", "\\2")(div) data = unicode(el.attrib['onmouseover']) \ .strip('pegelein(').strip(')').replace(",'", ",").split("',") self.env['id'] = data[7].strip() self.env['name'] = data[0] self.env['object'] = data[1] self.env['datetime'] = data[2] self.env['levelvalue'] = data[3]
self.env['flowvalue'] = data[4] self.env['forecast'] = data[5] self.env['alarm'] = img def add_sensor(self, sensors, name, unit, value, forecast, alarm, date): sensor = GaugeSensor("%s-%s" % (self.obj.id, name.lower())) sens
or.name = name sensor.unit = unit sensor.forecast = forecast lastvalue = GaugeMeasure() lastvalue.alarm = alarm try: lastvalue.level = float(value) except ValueError: lastvalue.level = NotAvailable lastvalue.date = date sensor.lastvalue = lastvalue sensor.history = NotLoaded sensor.gaugeid = self.obj.id sensors.append(sensor) def obj_sensors(self): sensors = [] lastdate = DateTime(Regexp(Env('datetime'), r'(\d+)\.(\d+)\.(\d+) (\d+):(\d+)', r'\3-\2-\1 \4:\5', default=NotAvailable))(self) forecast = Map(Env('forecast'), self.forecasts, default=NotAvailable)(self) alarm = Map(Env('alarm'), self.alarmlevel, default=u'')(self) self.add_sensor(sensors, u"Level", u"cm", self.env['levelvalue'], forecast, alarm, lastdate) self.add_sensor(sensors, u"Flow", u"m3/s", self.env['flowvalue'], forecast, alarm, lastdate) return sensors class HistoryPage(HTMLPage): @method class iter_history(ListElement): item_xpath = '//table[@width="215"]/tr' class item(ItemElement): klass = GaugeMeasure verif = re.compile("\d\d.\d\d.\d+ \d\d:\d\d") def condition(self): return self.verif.match(self.el[0].text_content()) obj_date = DateTime(Regexp(CleanText('.'), r'(\d+)\.(\d+)\.(\d+) (\d+):(\d+)', r'\3-\2-\1 \4:\5')) sensor_types = [u'Level', u'Flow'] def obj_level(self): index = self.sensor_types.index(self.env['sensor'].name) + 1 try: return float(self.el[index].text_content()) except ValueError: return NotAvailable # TODO: history.alarm
# -*- coding:
utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('geonewsapi', '0016_auto_20151201_1517'), ]
operations = [ migrations.AlterField( model_name='article', name='category', field=models.CharField(max_length=20, blank=True), ), ]