repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
stevenjoelbrey/PMFutures
Python/average6HourlyData.py
1
14668
#!/usr/bin/env python2 ############################################################################### # ------------------------- Description --------------------------------------- ############################################################################### # This script will be used to generate daily met fields from hourly nc files. # The 6-houly data to average live in /barnes-scratch/sbrey/era_interim_nc_6_hourly # Follows ---------------------------------------- # - get_ERA_Interim_data.py # Precedes # - merge_yearly_nc.py # TODO: Handle fg10 (wind gust) daily value creation. ############################################################################### # ---------------------- Set analysis variables-------------------------------- ############################################################################### import sys import cesm_nc_manager as cnm print 'Number of arguments:', len(sys.argv), 'arguments.' print 'Argument List:', str(sys.argv) if len(sys.argv) != 1: print 'Using arguments passed via command line.' hourlyVAR = str(sys.argv[1]) # e.g. 'z' startYear = int(sys.argv[2]) endYear = int(sys.argv[3]) else: # Development environment. Set variables by manually here. hourlyVAR = 'fg10' startYear = 1992 endYear = 2016 drive = cnm.getDrive() dataDir = drive + "era_interim_nc_6_hourly" outputDir = drive + "era_interim_nc_daily" print '----------------------------------------------------------------------' print 'Working on variable: ' + hourlyVAR + ' for years: ' + str(startYear) +\ '-'+ str(endYear) print '----------------------------------------------------------------------' # Import the required modules import os import numpy as np from mpl_toolkits.basemap import Basemap, cm from netCDF4 import Dataset import matplotlib.pyplot as plt import numpy.ma as ma from datetime import date from datetime import timedelta import matplotlib.ticker as tkr import datetime import time as timer import os.path # Start the timer on this code timeStart = timer.time() # Loop over the selected years if startYear == 'all': years = ['all'] else: years = np.arange(startYear, endYear+1) ii = 0 # for counting total iterations for year in years: year = str(year) print '---------------------------------------------------------------------' print 'Working on : ' + year print '---------------------------------------------------------------------' # Load 6-hourly data HourlyFile = os.path.join(dataDir, hourlyVAR + "_" + year + ".nc") print HourlyFile nc = Dataset(HourlyFile, 'r') VAR = nc.variables[hourlyVAR] time = nc.variables['time'] time_hour = np.array(time[:], dtype=int) lon = nc.variables['longitude'] lat = nc.variables['latitude'] # Some variables live on (t, level, lat, lon) grids, others (t, lat, lon) # Find out which one using dimension keys # e.g. [u'longitude', u'latitude', u'time'] for 'sp' # [u'longitude', u'latitude', u'level', u'time'] for 'z' dims = nc.dimensions.keys() if len(dims) == 4: level = nc.variables['level'] ####################################################################### # Handle date from hourly time dimensions ####################################################################### if time.units == 'hours since 1900-01-01 00:00:0.0': # For time datetime array t0 = datetime.datetime(year=1900, month=1, day=1,\ hour=0, minute=0, second=0) # For simply getting dates date0 = date(year=1900, month=1, day=1) else: raise ValueError('Unknown time origin! Code will not work.') # Create arrays to store datetime objects t = [] dates = [] yearList = [] monthList = [] hourList = [] for i in range(len(time_hour)): dt = timedelta(hours=time_hour[i]) date_new = date0 + dt t_new = t0 + dt year_new = t_new.year t.append(t_new) dates.append(date_new) yearList.append(year_new) monthList.append(t_new.month) hourList.append(t_new.hour) t = np.array(t) dates = np.array(dates) dateYears = np.array(yearList) dateMonths = np.array(monthList) dateHours = np.array(hourList) # NOTE: Accumulation parameters (total precip (tp) and e) represent # NOTE: accumulated values from intitialization time. For these data those # NOTE: times are 00:00:00 and 12:00:00. I downloaded the data in 12 hour steps. # NOTE: So for these parameters, each time in the data represents a total for the # NOTE: previous 12 hours. This is why time series start at 12:00:00 for # NOTE: these fields and 00:00:00 for analysis fields. # NOTE: For maximum in time period quantity, e.g. wind gust (fg10), the time step # NOTE: is three hourly and starts at 03:00:00. The units of wind gusts are # NOTE: "10 meter wind gusts since previous post processing". # Get all values numpy array into workspace print '---------------------------------------------------' print 'Loading the large variable array into the workspace' print '---------------------------------------------------' VAR_array = VAR[:] print 'Working on the large loop averaging 6-hourly values for each day' if (hourlyVAR != 'tp') & (hourlyVAR != 'e') & (hourlyVAR != 'fg10'): # these are the analysis variables that always require averages for a # given calendar date. print '---------------------------------------------------------------------' print 'Working with an analysis parameter whos first hour is 0. ' print '---------------------------------------------------------------------' if dateHours[0] != 0: raise ValueError('The first hour of analysis field was not 0Z.') # Create structure to save daily data and do the averaging unique_dates = np.unique(dates) nDays = len(unique_dates) # might have to do a - 1 here now. Or search for feb 29th and set length based on that. nLon = len(lon) nLat = len(lat) # Create array to store daily averaged data, based on dimensions if len(dims) == 4: nLevel= len(level) dailyVAR = np.zeros((nDays, nLevel, nLat, nLon)) else: dailyVAR = np.zeros((nDays, nLat, nLon)) for i in range(nDays): # find unique day to work on indexMask = np.where(dates == unique_dates[i])[0] if len(dims) == 4: VAR_array_subset = VAR_array[indexMask, :, :, :] day_time_mean = np.mean(VAR_array_subset, 0) dailyVAR[i, :, : , :] = day_time_mean else: # Non-precip variables of this size need an average. These are analysis variables VAR_array_subset = VAR_array[indexMask, :, :] day_time_mean = np.mean(VAR_array_subset, 0) dailyVAR[i, :, : ] = day_time_mean elif (hourlyVAR == 'fg10'): print "Handling precip. Going to follow ecmwf guidelines for getting daily max value. " # Create structure to save daily data and do the max value finding unique_dates = np.unique(dates) nDays = len(unique_dates) - 1 # last day (3 hour chunk) goes into next year. Discard that data nLon = len(lon) nLat = len(lat) dailyVAR = np.zeros((nDays, nLat, nLon)) for i in range(nDays): indexMask = np.where(dates == unique_dates[i])[0] VAR_array_subset = VAR_array[indexMask, :, :] # find 0:6 index of maximum value in each lat lon coordinate position array dailyMaxValArray = np.amax(VAR_array_subset, axis=0) # TODO: ensure that this is the max value for each coord! dailyVAR[i,:,:] = dailyMaxValArray elif (dateHours[0] == 12) & (dateHours[-1]) == 0 & (dateYears[-1] > int(year)): print '---------------------------------------------------------------------' print 'Working with an accumulation parameter with start time hour == 12. ' print '---------------------------------------------------------------------' # These strange time conditions are all true when we are working with # tp and e accumulation forecast fields. # Precip units of m per 12 hour window. Requires a sum NOT an average. # Need matching dates noon and next dates midnight to get a days total. # e.g. total precip for Jan 1 is sum of tp at 01-01-year 12:00:00 AND # 01-02-year 00:00:00. # the last date in the time array will be the next year, since midnight or # 0Z. # In order for the code to work for these variables the same as the # analysis fields, we are going to subtract 12 hours from each time # element. t = t - timedelta(hours=12) nTime = len(t) if nTime % 2 != 0: raise ValueError("There is something wrong. Somehow there is a date without two 23 hour chuncks. ") nDays = len(t)/2 nLon = len(lon) nLat = len(lat) # To make a mask of unique dates, we need to make timetime.datetime objects # a more simple datetime.date object. dates = [] for i in range(nDays*2): dates.append(t[i].date()) dates = np.array(dates) unique_dates = np.unique(dates) # Now that these strange time contrains have been met, we know we can # sum the values of every other. Create an array to store daily data in. dailyVAR = np.zeros((nDays, nLat, nLon)) for j in range(nDays): # The hours that match our date. indexMask = np.where(dates == unique_dates[j])[0] if (dateHours[indexMask[0]] == 12) & (dateHours[indexMask[1]] == 0): # Subset the dataframe to include the two twelve hour slices we # want. timeSlice = VAR[indexMask, :, :] # This statement makes sure we are really getting a time slice with # a dimension of 2, e.g. 2 12 hour segments. if timeSlice.shape[0] == 2: dailySum = np.sum(timeSlice, axis=0) dailyVAR[j,:,:] = dailySum else: raise ValueError("The time size was not two deep in time dim.") # if the sum of the dailyVAR array for this date is still zero, # no data was assigned. if np.sum(dailyVAR[j, :,:]) == 0: raise ValueError("No data was assigned to day index j = " + str(j)) meansCompleteTime = timer.time() dt = (meansCompleteTime - timeStart) / 60. print '---------------------------------------------------------------------' print 'It took ' + str(dt) + ' minutes to create daily averages.' print '---------------------------------------------------------------------' # Check to see if the total amount of precip was conserved. if hourlyVAR == 'tp': originalSum = np.sum(VAR, axis=0) dailySum = np.sum(dailyVAR, axis=0) dtp = np.abs(originalSum - dailySum) # ideally dtp is all zero. With float rounding issues it could be slightly # different. This matrix needs to be examined. maxDiff = np.max(dtp) print '------------------------------------------------------------------' print 'Maximum annual difference in rainfall is: ' + str(maxDiff) print '------------------------------------------------------------------' if maxDiff > 1e-10: raise ValueError("Total rainfall depth in meters not conserved within tolerance") #print 'The min value of dailyVAR is: ' + str(np.min(dailyVAR)) #print 'The max value of dailyVAR is: ' + str(np.max(dailyVAR)) ############################################################################### # Write the new netcdf data with the exact same formatting as the # data read here. # Create the Save name and make sure that this file does not exist ############################################################################### outputFile = os.path.join(outputDir, hourlyVAR + "_" + year + ".nc") # See if the file already exists # os.path.isfile(outPutSavename): print '----------------------------------------------------------------------' print 'outputFile used:' print outputFile print '----------------------------------------------------------------------' ############################################################################### # Write the daily averaged netCDF data ############################################################################### ncFile = Dataset(outputFile, 'w', format='NETCDF4') ncFile.description = 'Daily average of 6 hourly data' ncFile.location = 'Global' ncFile.createDimension('time', nDays ) ncFile.createDimension('latitude', nLat ) ncFile.createDimension('longitude', nLon ) # Create variables on the dimension they live on if len(dims) == 4: ncFile.createDimension('level', nLevel ) dailyVAR_ = ncFile.createVariable(hourlyVAR,'f4', ('time','level','latitude','longitude')) # While here create the level dimesion level_ = ncFile.createVariable('level', 'i4', ('level',)) level_.units = level.units else: dailyVAR_ = ncFile.createVariable(hourlyVAR, 'f4',('time','latitude','longitude')) # Assign the same units as the loaded file to the main variable dailyVAR_.units = VAR.units # Create time variable time_ = ncFile.createVariable('time', 'i4', ('time',)) time_.units = time.units time_.calendar = time.calendar # create lat variable latitude_ = ncFile.createVariable('latitude', 'f4', ('latitude',)) latitude_.units = lat.units # create longitude variable longitude_ = ncFile.createVariable('longitude', 'f4', ('longitude',)) longitude_.units = lon.units # Write the actual data to these dimensions dailyVAR_[:] = dailyVAR latitude_[:] = lat[:] longitude_[:] = lon[:] if len(dims) == 4: level_[:] = level[:] # NOTE: In general, every 4th element, starting at 0th, since there # NOTE: are 4 analysis snapshots space by 6 hours for any given date. # NOTE: However, tp (total precip) only has two chunks of 12 hourly data per # NOTE: day so this needs to be handled seperately. Because tp and e fields # NOTE: time were adjusted by minus 12 hours, all daily mean or sum fields # NOTE: have a time stamp of the 0Z for the date of the data. # NOTE: fg divides days into 3 hour analysis periods and they start at 03:00:00Z # NOTE: for a given date. I save the largest wind gust of those 8 3 hour analysis # NOTE: periods. So I need to subtract 3 from time for fg in order to get date # NOTE: for a day to be saved as a consistent 00:00:00Z time for a given date. if (hourlyVAR == 'tp') | (hourlyVAR == 'e'): time = time[:] - 12 elif (hourlyVAR == 'fg10'): time = time[:] - 3 tstep = len(time) / nDays time_[:] = time[0::tstep] # The difference in each time_[:] element in hours must be 24 or # this is not working. if np.unique(np.diff(time_[:])) != 24: ValueError('Difference in hours between days not all 24! Broken.') # The data is written, close the ncFile and move on to the next year! ncFile.close() dt = (timer.time() - timeStart) / 60. print '----------------------------------------------------------------------' print 'It took ' + str(dt) + ' minutes to run entire script.' print '----------------------------------------------------------------------'
mit
scottmcclary1/sp17-i524
project/S17-IR-P013/code/weather_data_analysis/run/wda_mapper.py
19
2000
#!/usr/bin/env python import sys import logging import iu.i524.S17IRP013.hadoop.hbase_to_hdfs as h2h DEFAULT_STATION_ID = 'DST:IND000DEF' logging.basicConfig(format = '%(asctime)s %(message)s',\ datefmt = '%m/%d/%Y %I:%M:%S %p',\ filename = 'wda_app.log',\ level=logging.DEBUG) def get_default_result(): result = dict() result['TMAX'] = [DEFAULT_STATION_ID,0] result['PRCP'] = [DEFAULT_STATION_ID,0] result['TAVG'] = [DEFAULT_STATION_ID,0] result['TMIN'] = [DEFAULT_STATION_ID,100] return result def compare_props(prop,result): logging.info(prop) if prop['parameter'] == 'TMAX': if float(prop['value']) > float(result['TMAX'][1]) or result['TMAX'][1] == 0: result['TMAX'][0] = prop['station_id'] result['TMAX'][1] = prop['value'] elif prop['parameter'] == 'TAVG': if float(prop['value']) > float(result['TAVG'][1]) or result['TAVG'][1] == 0: result['TAVG'][0] = prop['station_id'] result['TAVG'][1] = prop['value'] elif prop['parameter'] == 'PRCP': if float(prop['value']) > float(result['PRCP'][1]) or result['PRCP'][1] == 0: result['PRCP'][0] = prop['station_id'] result['PRCP'][1] = prop['value'] elif prop['parameter'] == 'TMIN': if float(prop['value']) < float(result['TMIN'][1]) or result['TMIN'][1] == 0: result['TMIN'][0] = prop['station_id'] result['TMIN'][1] = prop['value'] return result # input comes from STDIN (standard input) index = 0 for year_month in sys.stdin: year_month = year_month.strip() data_list = h2h.find_by_id(row_key=year_month) tmax = 70 tmin=-70 tavg=0 prcp=0 result = get_default_result() ## Run analysis for prop in data_list: result = compare_props(prop=prop,result=result) #print '%s\t%s' % (index, str(result)) print str(result)
apache-2.0
tizianasellitto/servo
tests/wpt/css-tests/tools/html5lib/html5lib/ihatexml.py
1727
16581
from __future__ import absolute_import, division, unicode_literals import re import warnings from .constants import DataLossWarning baseChar = """ [#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | [#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | [#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | [#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | [#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | [#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | [#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | [#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | [#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | [#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | [#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | [#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | [#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | [#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | [#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | [#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | [#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | [#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | [#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | [#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | [#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | [#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | [#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | [#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | [#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | [#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | [#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | [#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | [#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | [#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | #x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | #x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | #x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | [#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | [#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | #x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | [#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | [#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | [#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | [#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | [#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | #x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | [#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | [#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | [#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | [#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]""" ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]""" combiningCharacter = """ [#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | [#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | [#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | [#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | #x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | [#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | [#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | #x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | [#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | [#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | #x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | [#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | [#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | [#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | [#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | [#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | #x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | [#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | #x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | [#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | [#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | #x3099 | #x309A""" digit = """ [#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | [#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | [#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | [#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]""" extender = """ #x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | #[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]""" letter = " | ".join([baseChar, ideographic]) # Without the name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter, extender]) nameFirst = " | ".join([letter, "_"]) reChar = re.compile(r"#x([\d|A-F]{4,4})") reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]") def charStringToList(chars): charRanges = [item.strip() for item in chars.split(" | ")] rv = [] for item in charRanges: foundMatch = False for regexp in (reChar, reCharRange): match = regexp.match(item) if match is not None: rv.append([hexToInt(item) for item in match.groups()]) if len(rv[-1]) == 1: rv[-1] = rv[-1] * 2 foundMatch = True break if not foundMatch: assert len(item) == 1 rv.append([ord(item)] * 2) rv = normaliseCharList(rv) return rv def normaliseCharList(charList): charList = sorted(charList) for item in charList: assert item[1] >= item[0] rv = [] i = 0 while i < len(charList): j = 1 rv.append(charList[i]) while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1: rv[-1][1] = charList[i + j][1] j += 1 i += j return rv # We don't really support characters above the BMP :( max_unicode = int("FFFF", 16) def missingRanges(charList): rv = [] if charList[0] != 0: rv.append([0, charList[0][0] - 1]) for i, item in enumerate(charList[:-1]): rv.append([item[1] + 1, charList[i + 1][0] - 1]) if charList[-1][1] != max_unicode: rv.append([charList[-1][1] + 1, max_unicode]) return rv def listToRegexpStr(charList): rv = [] for item in charList: if item[0] == item[1]: rv.append(escapeRegexp(chr(item[0]))) else: rv.append(escapeRegexp(chr(item[0])) + "-" + escapeRegexp(chr(item[1]))) return "[%s]" % "".join(rv) def hexToInt(hex_str): return int(hex_str, 16) def escapeRegexp(string): specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}", "[", "]", "|", "(", ")", "-") for char in specialCharacters: string = string.replace(char, "\\" + char) return string # output from the above nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # Simpler things nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\-\'()+,./:=?;!*#@$_%]") class InfosetFilter(object): replacementRegexp = re.compile(r"U[\dA-F]{5,5}") def __init__(self, replaceChars=None, dropXmlnsLocalName=False, dropXmlnsAttrNs=False, preventDoubleDashComments=False, preventDashAtCommentEnd=False, replaceFormFeedCharacters=True, preventSingleQuotePubid=False): self.dropXmlnsLocalName = dropXmlnsLocalName self.dropXmlnsAttrNs = dropXmlnsAttrNs self.preventDoubleDashComments = preventDoubleDashComments self.preventDashAtCommentEnd = preventDashAtCommentEnd self.replaceFormFeedCharacters = replaceFormFeedCharacters self.preventSingleQuotePubid = preventSingleQuotePubid self.replaceCache = {} def coerceAttribute(self, name, namespace=None): if self.dropXmlnsLocalName and name.startswith("xmlns:"): warnings.warn("Attributes cannot begin with xmlns", DataLossWarning) return None elif (self.dropXmlnsAttrNs and namespace == "http://www.w3.org/2000/xmlns/"): warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning) return None else: return self.toXmlName(name) def coerceElement(self, name, namespace=None): return self.toXmlName(name) def coerceComment(self, data): if self.preventDoubleDashComments: while "--" in data: warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning) data = data.replace("--", "- -") return data def coerceCharacters(self, data): if self.replaceFormFeedCharacters: for i in range(data.count("\x0C")): warnings.warn("Text cannot contain U+000C", DataLossWarning) data = data.replace("\x0C", " ") # Other non-xml characters return data def coercePubid(self, data): dataOutput = data for char in nonPubidCharRegexp.findall(data): warnings.warn("Coercing non-XML pubid", DataLossWarning) replacement = self.getReplacementCharacter(char) dataOutput = dataOutput.replace(char, replacement) if self.preventSingleQuotePubid and dataOutput.find("'") >= 0: warnings.warn("Pubid cannot contain single quote", DataLossWarning) dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'")) return dataOutput def toXmlName(self, name): nameFirst = name[0] nameRest = name[1:] m = nonXmlNameFirstBMPRegexp.match(nameFirst) if m: warnings.warn("Coercing non-XML name", DataLossWarning) nameFirstOutput = self.getReplacementCharacter(nameFirst) else: nameFirstOutput = nameFirst nameRestOutput = nameRest replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) for char in replaceChars: warnings.warn("Coercing non-XML name", DataLossWarning) replacement = self.getReplacementCharacter(char) nameRestOutput = nameRestOutput.replace(char, replacement) return nameFirstOutput + nameRestOutput def getReplacementCharacter(self, char): if char in self.replaceCache: replacement = self.replaceCache[char] else: replacement = self.escapeChar(char) return replacement def fromXmlName(self, name): for item in set(self.replacementRegexp.findall(name)): name = name.replace(item, self.unescapeChar(item)) return name def escapeChar(self, char): replacement = "U%05X" % ord(char) self.replaceCache[char] = replacement return replacement def unescapeChar(self, charcode): return chr(int(charcode[1:], 16))
mpl-2.0
pepeportela/edx-platform
common/djangoapps/third_party_auth/middleware.py
8
1044
"""Middleware classes for third_party_auth.""" from social_django.middleware import SocialAuthExceptionMiddleware from . import pipeline class ExceptionMiddleware(SocialAuthExceptionMiddleware): """Custom middleware that handles conditional redirection.""" def get_redirect_uri(self, request, exception): # Fall back to django settings's SOCIAL_AUTH_LOGIN_ERROR_URL. redirect_uri = super(ExceptionMiddleware, self).get_redirect_uri(request, exception) # Safe because it's already been validated by # pipeline.parse_query_params. If that pipeline step ever moves later # in the pipeline stack, we'd need to validate this value because it # would be an injection point for attacker data. auth_entry = request.session.get(pipeline.AUTH_ENTRY_KEY) # Check if we have an auth entry key we can use instead if auth_entry and auth_entry in pipeline.AUTH_DISPATCH_URLS: redirect_uri = pipeline.AUTH_DISPATCH_URLS[auth_entry] return redirect_uri
agpl-3.0
dmargala/qusp
examples/analysis_prep.py
1
5041
#!/usr/bin/env python import argparse import numpy as np import numpy.ma as ma import h5py import qusp import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plt # def sum_chunk(x, chunk_size, axis=-1): # shape = x.shape # if axis < 0: # axis += x.ndim # shape = shape[:axis] + (-1, chunk_size) + shape[axis+1:] # x = x.reshape(shape) # return x.sum(axis=axis+1) def combine_pixels(loglam, flux, ivar, num_combine, trim_front=True): """ Combines neighboring pixels of inner most axis using an ivar weighted average """ shape = flux.shape num_pixels = flux.shape[-1] assert len(loglam) == num_pixels ndim = flux.ndim new_shape = shape[:ndim-1] + (-1, num_combine) num_leftover = num_pixels % num_combine s = slice(num_leftover,None) if trim_front else slice(0,-num_leftover) flux = flux[...,s].reshape(new_shape) ivar = ivar[...,s].reshape(new_shape) loglam = loglam[s].reshape(-1, num_combine) flux, ivar = ma.average(flux, weights=ivar, axis=ndim, returned=True) loglam = ma.average(loglam, axis=1) return loglam, flux, ivar def main(): # parse command-line arguments parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) ## targets to fit parser.add_argument("-i", "--input", type=str, default=None, help="target list") parser.add_argument("-o", "--output", type=str, default=None, help="output file name") parser.add_argument("--num-combine", type=int, default=3, help="number of pixels to combine") parser.add_argument("--wave-min", type=float, default=3600, help="minimum observed wavelength") args = parser.parse_args() # import data skim = h5py.File(args.input, 'r') skim_norm = skim['norm'][:][:,np.newaxis] assert not np.any(skim_norm <= 0) skim_flux = np.ma.MaskedArray(skim['flux'][:], mask=skim['mask'][:])/skim_norm skim_ivar = np.ma.MaskedArray(skim['ivar'][:], mask=skim['mask'][:])*skim_norm*skim_norm skim_loglam = skim['loglam'][:] skim_wave = np.power(10.0, skim_loglam) good_waves = skim_wave > args.wave_min print 'Combining input pixels...' loglam, flux, ivar = combine_pixels(skim_loglam[good_waves], skim_flux[:,good_waves], skim_ivar[:,good_waves], args.num_combine) wave = np.power(10.0, loglam) outfile = h5py.File(args.output+'.hdf5', 'w') # save pixel flux, ivar, and mask outfile.create_dataset('flux', data=flux.data, compression="gzip") outfile.create_dataset('ivar', data=ivar.data, compression="gzip") outfile.create_dataset('mask', data=ivar.mask, compression="gzip") # save uniform wavelength grid outfile.create_dataset('loglam', data=loglam, compression="gzip") # save redshifts from input target list outfile.copy(skim['z'], 'z') # save additional quantities outfile.copy(skim['norm'], 'norm') # save meta data outfile.copy(skim['meta'], 'meta') # copy attrs for attr_key in skim.attrs: outfile.attrs[attr_key] = skim.attrs[attr_key] outfile.attrs['coeff0'] = loglam[0] outfile.attrs['coeff1'] = args.num_combine*1e-4 outfile.attrs['max_fid_index'] = len(loglam) outfile.attrs['wave_min'] = args.wave_min outfile.close() # verify combined pixels print 'Computing mean and variance of input pixels...' skim_flux_mean = np.ma.average(skim_flux, axis=0, weights=skim_ivar) skim_flux_var = np.ma.average((skim_flux-skim_flux_mean)**2, axis=0, weights=skim_ivar) print 'Computing mean and variance of combined pixels...' flux_mean = np.ma.average(flux, axis=0, weights=ivar) flux_var = np.ma.average((flux-flux_mean)**2, axis=0, weights=ivar) print 'Making comparison plots...' plt.figure(figsize=(12,9)) plt.plot(skim_wave, skim_flux_mean, label='Pipeline pixels') plt.plot(wave, flux_mean, label='Analysis pixels') plt.ylim(0.5, 1.5) plt.ylabel(r'Normalized Flux Mean (arb. units)') plt.xlabel(r'Observed Wavelength ($\AA$)') plt.legend() plt.grid() plt.savefig(args.output+'-flux-mean.png', dpi=100, bbox_inches='tight') plt.close() plt.figure(figsize=(12,9)) plt.plot(skim_wave, skim_flux_var, label='Pipeline pixels') plt.plot(wave, flux_var, label='Analysis pixels') plt.ylim(0, 0.45) plt.ylabel('Normalized Flux Variance (arb. units)') plt.xlabel(r'Observed Wavelength ($\AA$)') plt.legend() plt.grid() plt.savefig(args.output+'-flux-var.png', dpi=100, bbox_inches='tight') plt.close() plt.figure(figsize=(12,9)) plt.plot(skim_wave, np.sum(skim_ivar, axis=0), label='Pipeline pixels') plt.plot(wave, np.sum(ivar, axis=0), label='Analysis pixels') plt.ylabel('Inv. Var. Total (arb. units)') plt.xlabel(r'Observed Wavelength ($\AA$)') plt.legend() plt.grid() plt.savefig(args.output+'-ivar-total.png', dpi=100, bbox_inches='tight') plt.close() if __name__ == '__main__': main()
mit
scottferg/web-console
django/templatetags/cache.py
12
2469
from django.template import Library, Node, TemplateSyntaxError, Variable, VariableDoesNotExist from django.template import resolve_variable from django.core.cache import cache from django.utils.encoding import force_unicode from django.utils.http import urlquote from django.utils.hashcompat import md5_constructor register = Library() class CacheNode(Node): def __init__(self, nodelist, expire_time_var, fragment_name, vary_on): self.nodelist = nodelist self.expire_time_var = Variable(expire_time_var) self.fragment_name = fragment_name self.vary_on = vary_on def render(self, context): try: expire_time = self.expire_time_var.resolve(context) except VariableDoesNotExist: raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.expire_time_var.var) try: expire_time = int(expire_time) except (ValueError, TypeError): raise TemplateSyntaxError('"cache" tag got a non-integer timeout value: %r' % expire_time) # Build a unicode key for this fragment and all vary-on's. args = md5_constructor(u':'.join([urlquote(resolve_variable(var, context)) for var in self.vary_on])) cache_key = 'template.cache.%s.%s' % (self.fragment_name, args.hexdigest()) value = cache.get(cache_key) if value is None: value = self.nodelist.render(context) cache.set(cache_key, value, expire_time) return value def do_cache(parser, token): """ This will cache the contents of a template fragment for a given amount of time. Usage:: {% load cache %} {% cache [expire_time] [fragment_name] %} .. some expensive processing .. {% endcache %} This tag also supports varying by a list of arguments:: {% load cache %} {% cache [expire_time] [fragment_name] [var1] [var2] .. %} .. some expensive processing .. {% endcache %} Each unique set of arguments will result in a unique cache entry. """ nodelist = parser.parse(('endcache',)) parser.delete_first_token() tokens = token.contents.split() if len(tokens) < 3: raise TemplateSyntaxError(u"'%r' tag requires at least 2 arguments." % tokens[0]) return CacheNode(nodelist, tokens[1], tokens[2], tokens[3:]) register.tag('cache', do_cache)
bsd-3-clause
chdecultot/erpnext
erpnext/hr/doctype/payroll_period/payroll_period.py
6
2514
# -*- coding: utf-8 -*- # Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe import _ from frappe.utils import date_diff, getdate, formatdate, cint from frappe.model.document import Document from erpnext.hr.utils import get_holidays_for_employee class PayrollPeriod(Document): def validate(self): self.validate_dates() self.validate_overlap() def validate_dates(self): if getdate(self.start_date) > getdate(self.end_date): frappe.throw(_("End date can not be less than start date")) def validate_overlap(self): query = """ select name from `tab{0}` where name != %(name)s and company = %(company)s and (start_date between %(start_date)s and %(end_date)s \ or end_date between %(start_date)s and %(end_date)s \ or (start_date < %(start_date)s and end_date > %(end_date)s)) """ if not self.name: # hack! if name is null, it could cause problems with != self.name = "New "+self.doctype overlap_doc = frappe.db.sql(query.format(self.doctype),{ "start_date": self.start_date, "end_date": self.end_date, "name": self.name, "company": self.company }, as_dict = 1) if overlap_doc: msg = _("A {0} exists between {1} and {2} (").format(self.doctype, formatdate(self.start_date), formatdate(self.end_date)) \ + """ <b><a href="#Form/{0}/{1}">{1}</a></b>""".format(self.doctype, overlap_doc[0].name) \ + _(") for {0}").format(self.company) frappe.throw(msg) def get_payroll_period_days(start_date, end_date, employee): company = frappe.db.get_value("Employee", employee, "company") payroll_period = frappe.db.sql(""" select name, start_date, end_date from `tabPayroll Period` where company=%(company)s and ( (%(start_date)s between start_date and end_date) and (%(end_date)s between start_date and end_date) )""", { 'company': company, 'start_date': start_date, 'end_date': end_date }) if len(payroll_period) > 0: actual_no_of_days = date_diff(getdate(payroll_period[0][2]), getdate(payroll_period[0][1])) + 1 working_days = actual_no_of_days if not cint(frappe.db.get_value("HR Settings", None, "include_holidays_in_total_working_days")): holidays = get_holidays_for_employee(employee, getdate(payroll_period[0][1]), getdate(payroll_period[0][2])) working_days -= len(holidays) return payroll_period[0][0], working_days, actual_no_of_days return False, False, False
gpl-3.0
KyoungRan/Django_React_ex
Django_React_Workshop-mbrochh/django/myvenv/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/gb2312prober.py
2994
1681
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import GB2312DistributionAnalysis from .mbcssm import GB2312SMModel class GB2312Prober(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(GB2312SMModel) self._mDistributionAnalyzer = GB2312DistributionAnalysis() self.reset() def get_charset_name(self): return "GB2312"
mit
julian-seward1/servo
tests/wpt/web-platform-tests/tools/manifest/utils.py
115
1374
import platform import os from six import BytesIO def rel_path_to_url(rel_path, url_base="/"): assert not os.path.isabs(rel_path) if url_base[0] != "/": url_base = "/" + url_base if url_base[-1] != "/": url_base += "/" return url_base + rel_path.replace(os.sep, "/") def from_os_path(path): assert os.path.sep == "/" or platform.system() == "Windows" rv = path.replace(os.path.sep, "/") if "\\" in rv: raise ValueError("path contains \\ when separator is %s" % os.path.sep) return rv def to_os_path(path): assert os.path.sep == "/" or platform.system() == "Windows" if "\\" in path: raise ValueError("normalised path contains \\") return path.replace("/", os.path.sep) class ContextManagerBytesIO(BytesIO): def __enter__(self): return self def __exit__(self, *args, **kwargs): self.close() class cached_property(object): def __init__(self, func): self.func = func self.__doc__ = getattr(func, "__doc__") self.name = func.__name__ def __get__(self, obj, cls=None): if obj is None: return self if self.name not in obj.__dict__: obj.__dict__[self.name] = self.func(obj) obj.__dict__.setdefault("__cached_properties__", set()).add(self.name) return obj.__dict__[self.name]
mpl-2.0
mathjazz/pontoon
pontoon/contributors/urls.py
2
2700
from django.urls import path, register_converter from django.urls.converters import StringConverter from django.views.generic import RedirectView from . import views class EmailConverter(StringConverter): regex = r"[\w.%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}" class UsernameConverter(StringConverter): regex = r"[\w.@+-]+" register_converter(EmailConverter, "email") register_converter(UsernameConverter, "username") urlpatterns = [ # Legacy: Redirect to /contributors/email path( "contributor/<email:email>/", RedirectView.as_view(url="/contributors/%(email)s/", permanent=True), ), # List contributors path( "contributors/", views.ContributorsView.as_view(), name="pontoon.contributors", ), # Contributor profile by email path( "contributors/<email:email>/", views.contributor_email, name="pontoon.contributors.contributor.email", ), # Contributor timeline path( "contributors/<username:username>/timeline/", views.contributor_timeline, name="pontoon.contributors.contributor.timeline", ), # Contributor profile by username path( "contributors/<username:username>/", views.contributor_username, name="pontoon.contributors.contributor.username", ), # Current user's profile path("profile/", views.profile, name="pontoon.contributors.profile"), # Current user's settings path("settings/", views.settings, name="pontoon.contributors.settings"), # Current user's notifications path( "notifications/", views.notifications, name="pontoon.contributors.notifications", ), # Mark current user's notifications as read path( "notifications/mark-all-as-read/", views.mark_all_notifications_as_read, name="pontoon.contributors.notifications.mark.all.as.read", ), # API: Toggle user profile attribute path( "api/v1/user/<username:username>/", views.toggle_user_profile_attribute, name="pontoon.contributors.toggle_user_profile_attribute", ), # AJAX: Save custom homepage path( "save-custom-homepage/", views.save_custom_homepage, name="pontoon.contributors.save_custom_homepage", ), # AJAX: Save preferred source locale path( "save-preferred-source-locale/", views.save_preferred_source_locale, name="pontoon.contributors.save_preferred_source_locale", ), # AJAX: Dismiss Add-On Promotion path( "dismiss-addon-promotion/", views.dismiss_addon_promotion, name="pontoon.contributors.dismiss_addon_promotion", ), ]
bsd-3-clause
Nikea/VisTrails
examples/vtk_examples/IO/flamingo.py
6
1623
#!/usr/bin/env python # This example demonstrates the use of vtk3DSImporter. # vtk3DSImporter is used to load 3D Studio files. Unlike writers, # importers can load scenes (data as well as lights, cameras, actors # etc.). Importers will either generate an instance of vtkRenderWindow # and/or vtkRenderer or will use the ones you specify. import vtk from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() # Create the importer and read a file importer = vtk.vtk3DSImporter() importer.ComputeNormalsOn() importer.SetFileName(VTK_DATA_ROOT + "/Data/iflamigm.3ds") importer.Read() # Here we let the importer create a renderer and a render window for # us. We could have also create and assigned those ourselves like so: # renWin = vtk.vtkRenderWindow() # importer.SetRenderWindow(renWin) # Assign an interactor. # We have to ask the importer for it's render window. # renWin = importer.GetRenderWindow() ren = importer.GetRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # Set the render window's size renWin.SetSize(300, 300) # Set some properties on the renderer. # We have to ask the importer for it's renderer. # ren = importer.GetRenderer() ren.SetBackground(0.1, 0.2, 0.4) # Position the camera: # change view up to +z camera = ren.GetActiveCamera() camera.SetPosition(0, 1, 0) camera.SetFocalPoint(0, 0, 0) camera.SetViewUp(0, 0, 1) # let the renderer compute good position and focal point ren.ResetCamera() camera.Dolly(1.4) ren.ResetCameraClippingRange() iren.Initialize() renWin.Render() iren.Start()
bsd-3-clause
Omegaphora/external_chromium_org
tools/perf/measurements/image_decoding.py
28
4020
# Copyright 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from metrics import power from telemetry.core.platform import tracing_category_filter from telemetry.core.platform import tracing_options from telemetry.page import page_test from telemetry.timeline import model from telemetry.value import scalar class ImageDecoding(page_test.PageTest): def __init__(self): super(ImageDecoding, self).__init__() self._power_metric = None def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArgs('--enable-gpu-benchmarking') power.PowerMetric.CustomizeBrowserOptions(options) def WillStartBrowser(self, platform): self._power_metric = power.PowerMetric(platform) def WillNavigateToPage(self, page, tab): tab.ExecuteJavaScript(""" if (window.chrome && chrome.gpuBenchmarking && chrome.gpuBenchmarking.clearImageCache) { chrome.gpuBenchmarking.clearImageCache(); } """) self._power_metric.Start(page, tab) options = tracing_options.TracingOptions() options.enable_chrome_trace = True # FIXME: Remove the timeline category when impl-side painting is on # everywhere. category_filter = tracing_category_filter.TracingCategoryFilter( 'disabled-by-default-devtools.timeline') # FIXME: Remove webkit.console when blink.console lands in chromium and # the ref builds are updated. crbug.com/386847 # FIXME: Remove the devtools.timeline category when impl-side painting is # on everywhere. categories = [ 'blink', 'devtools.timeline', 'webkit.console', 'blink.console' ] for c in categories: category_filter.AddIncludedCategory(c) tab.browser.platform.tracing_controller.Start(options, category_filter) def StopBrowserAfterPage(self, browser, page): return not browser.tabs[0].ExecuteJavaScript(""" window.chrome && chrome.gpuBenchmarking && chrome.gpuBenchmarking.clearImageCache; """) def ValidateAndMeasurePage(self, page, tab, results): timeline_data = tab.browser.platform.tracing_controller.Stop() timeline_model = model.TimelineModel(timeline_data) self._power_metric.Stop(page, tab) self._power_metric.AddResults(tab, results) def _IsDone(): return tab.EvaluateJavaScript('isDone') decode_image_events = timeline_model.GetAllEventsOfName( 'ImageFrameGenerator::decode') # FIXME: Remove this when impl-side painting is on everywhere. if not decode_image_events: decode_image_events = timeline_model.GetAllEventsOfName('Decode Image') # If it is a real image page, then store only the last-minIterations # decode tasks. if (hasattr(page, 'image_decoding_measurement_limit_results_to_min_iterations') and page.image_decoding_measurement_limit_results_to_min_iterations): assert _IsDone() min_iterations = tab.EvaluateJavaScript('minIterations') decode_image_events = decode_image_events[-min_iterations:] durations = [d.duration for d in decode_image_events] assert durations, 'Failed to find image decode trace events.' image_decoding_avg = sum(durations) / len(durations) results.AddValue(scalar.ScalarValue( results.current_page, 'ImageDecoding_avg', 'ms', image_decoding_avg, description='Average decode time for images in 4 different ' 'formats: gif, png, jpg, and webp. The image files are ' 'located at chrome/test/data/image_decoding.')) results.AddValue(scalar.ScalarValue( results.current_page, 'ImageLoading_avg', 'ms', tab.EvaluateJavaScript('averageLoadingTimeMs()'))) def CleanUpAfterPage(self, page, tab): tracing_controller = tab.browser.platform.tracing_controller if tracing_controller.is_tracing_running: tracing_controller.Stop()
bsd-3-clause
zwpaper/shadowsocks
shadowsocks/daemon.py
694
5602
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2014-2015 clowwindy # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import absolute_import, division, print_function, \ with_statement import os import sys import logging import signal import time from shadowsocks import common, shell # this module is ported from ShadowVPN daemon.c def daemon_exec(config): if 'daemon' in config: if os.name != 'posix': raise Exception('daemon mode is only supported on Unix') command = config['daemon'] if not command: command = 'start' pid_file = config['pid-file'] log_file = config['log-file'] if command == 'start': daemon_start(pid_file, log_file) elif command == 'stop': daemon_stop(pid_file) # always exit after daemon_stop sys.exit(0) elif command == 'restart': daemon_stop(pid_file) daemon_start(pid_file, log_file) else: raise Exception('unsupported daemon command %s' % command) def write_pid_file(pid_file, pid): import fcntl import stat try: fd = os.open(pid_file, os.O_RDWR | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR) except OSError as e: shell.print_exception(e) return -1 flags = fcntl.fcntl(fd, fcntl.F_GETFD) assert flags != -1 flags |= fcntl.FD_CLOEXEC r = fcntl.fcntl(fd, fcntl.F_SETFD, flags) assert r != -1 # There is no platform independent way to implement fcntl(fd, F_SETLK, &fl) # via fcntl.fcntl. So use lockf instead try: fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 0, 0, os.SEEK_SET) except IOError: r = os.read(fd, 32) if r: logging.error('already started at pid %s' % common.to_str(r)) else: logging.error('already started') os.close(fd) return -1 os.ftruncate(fd, 0) os.write(fd, common.to_bytes(str(pid))) return 0 def freopen(f, mode, stream): oldf = open(f, mode) oldfd = oldf.fileno() newfd = stream.fileno() os.close(newfd) os.dup2(oldfd, newfd) def daemon_start(pid_file, log_file): def handle_exit(signum, _): if signum == signal.SIGTERM: sys.exit(0) sys.exit(1) signal.signal(signal.SIGINT, handle_exit) signal.signal(signal.SIGTERM, handle_exit) # fork only once because we are sure parent will exit pid = os.fork() assert pid != -1 if pid > 0: # parent waits for its child time.sleep(5) sys.exit(0) # child signals its parent to exit ppid = os.getppid() pid = os.getpid() if write_pid_file(pid_file, pid) != 0: os.kill(ppid, signal.SIGINT) sys.exit(1) os.setsid() signal.signal(signal.SIGHUP, signal.SIG_IGN) print('started') os.kill(ppid, signal.SIGTERM) sys.stdin.close() try: freopen(log_file, 'a', sys.stdout) freopen(log_file, 'a', sys.stderr) except IOError as e: shell.print_exception(e) sys.exit(1) def daemon_stop(pid_file): import errno try: with open(pid_file) as f: buf = f.read() pid = common.to_str(buf) if not buf: logging.error('not running') except IOError as e: shell.print_exception(e) if e.errno == errno.ENOENT: # always exit 0 if we are sure daemon is not running logging.error('not running') return sys.exit(1) pid = int(pid) if pid > 0: try: os.kill(pid, signal.SIGTERM) except OSError as e: if e.errno == errno.ESRCH: logging.error('not running') # always exit 0 if we are sure daemon is not running return shell.print_exception(e) sys.exit(1) else: logging.error('pid is not positive: %d', pid) # sleep for maximum 10s for i in range(0, 200): try: # query for the pid os.kill(pid, 0) except OSError as e: if e.errno == errno.ESRCH: break time.sleep(0.05) else: logging.error('timed out when stopping pid %d', pid) sys.exit(1) print('stopped') os.unlink(pid_file) def set_user(username): if username is None: return import pwd import grp try: pwrec = pwd.getpwnam(username) except KeyError: logging.error('user not found: %s' % username) raise user = pwrec[0] uid = pwrec[2] gid = pwrec[3] cur_uid = os.getuid() if uid == cur_uid: return if cur_uid != 0: logging.error('can not set user as nonroot user') # will raise later # inspired by supervisor if hasattr(os, 'setgroups'): groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]] groups.insert(0, gid) os.setgroups(groups) os.setgid(gid) os.setuid(uid)
apache-2.0
achang97/YouTunes
lib/python2.7/site-packages/youtube_dl/extractor/medialaan.py
20
10387
from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import compat_str from ..utils import ( ExtractorError, int_or_none, parse_duration, try_get, unified_timestamp, urlencode_postdata, ) class MedialaanIE(InfoExtractor): _VALID_URL = r'''(?x) https?:// (?:www\.|nieuws\.)? (?: (?P<site_id>vtm|q2|vtmkzoom)\.be/ (?: video(?:/[^/]+/id/|/?\?.*?\baid=)| (?:[^/]+/)* ) ) (?P<id>[^/?#&]+) ''' _NETRC_MACHINE = 'medialaan' _APIKEY = '3_HZ0FtkMW_gOyKlqQzW5_0FHRC7Nd5XpXJZcDdXY4pk5eES2ZWmejRW5egwVm4ug-' _SITE_TO_APP_ID = { 'vtm': 'vtm_watch', 'q2': 'q2', 'vtmkzoom': 'vtmkzoom', } _TESTS = [{ # vod 'url': 'http://vtm.be/video/volledige-afleveringen/id/vtm_20170219_VM0678361_vtmwatch', 'info_dict': { 'id': 'vtm_20170219_VM0678361_vtmwatch', 'ext': 'mp4', 'title': 'Allemaal Chris afl. 6', 'description': 'md5:4be86427521e7b07e0adb0c9c554ddb2', 'timestamp': 1487533280, 'upload_date': '20170219', 'duration': 2562, 'series': 'Allemaal Chris', 'season': 'Allemaal Chris', 'season_number': 1, 'season_id': '256936078124527', 'episode': 'Allemaal Chris afl. 6', 'episode_number': 6, 'episode_id': '256936078591527', }, 'params': { 'skip_download': True, }, 'skip': 'Requires account credentials', }, { # clip 'url': 'http://vtm.be/video?aid=168332', 'info_dict': { 'id': '168332', 'ext': 'mp4', 'title': '"Veronique liegt!"', 'description': 'md5:1385e2b743923afe54ba4adc38476155', 'timestamp': 1489002029, 'upload_date': '20170308', 'duration': 96, }, }, { # vod 'url': 'http://vtm.be/video/volledige-afleveringen/id/257107153551000', 'only_matching': True, }, { # vod 'url': 'http://vtm.be/video?aid=163157', 'only_matching': True, }, { # vod 'url': 'http://www.q2.be/video/volledige-afleveringen/id/2be_20170301_VM0684442_q2', 'only_matching': True, }, { # clip 'url': 'http://vtmkzoom.be/k3-dansstudio/een-nieuw-seizoen-van-k3-dansstudio', 'only_matching': True, }, { # http/s redirect 'url': 'https://vtmkzoom.be/video?aid=45724', 'info_dict': { 'id': '257136373657000', 'ext': 'mp4', 'title': 'K3 Dansstudio Ushuaia afl.6', }, 'params': { 'skip_download': True, }, 'skip': 'Requires account credentials', }, { # nieuws.vtm.be 'url': 'https://nieuws.vtm.be/stadion/stadion/genk-nog-moeilijk-programma', 'only_matching': True, }] def _real_initialize(self): self._logged_in = False def _login(self): username, password = self._get_login_info() if username is None: self.raise_login_required() auth_data = { 'APIKey': self._APIKEY, 'sdk': 'js_6.1', 'format': 'json', 'loginID': username, 'password': password, } auth_info = self._download_json( 'https://accounts.eu1.gigya.com/accounts.login', None, note='Logging in', errnote='Unable to log in', data=urlencode_postdata(auth_data)) error_message = auth_info.get('errorDetails') or auth_info.get('errorMessage') if error_message: raise ExtractorError( 'Unable to login: %s' % error_message, expected=True) self._uid = auth_info['UID'] self._uid_signature = auth_info['UIDSignature'] self._signature_timestamp = auth_info['signatureTimestamp'] self._logged_in = True def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id, site_id = mobj.group('id', 'site_id') webpage = self._download_webpage(url, video_id) config = self._parse_json( self._search_regex( r'videoJSConfig\s*=\s*JSON\.parse\(\'({.+?})\'\);', webpage, 'config', default='{}'), video_id, transform_source=lambda s: s.replace( '\\\\', '\\').replace(r'\"', '"').replace(r"\'", "'")) vod_id = config.get('vodId') or self._search_regex( (r'\\"vodId\\"\s*:\s*\\"(.+?)\\"', r'<[^>]+id=["\']vod-(\d+)'), webpage, 'video_id', default=None) # clip, no authentication required if not vod_id: player = self._parse_json( self._search_regex( r'vmmaplayer\(({.+?})\);', webpage, 'vmma player', default=''), video_id, transform_source=lambda s: '[%s]' % s, fatal=False) if player: video = player[-1] if video['videoUrl'] in ('http', 'https'): return self.url_result(video['url'], MedialaanIE.ie_key()) info = { 'id': video_id, 'url': video['videoUrl'], 'title': video['title'], 'thumbnail': video.get('imageUrl'), 'timestamp': int_or_none(video.get('createdDate')), 'duration': int_or_none(video.get('duration')), } else: info = self._parse_html5_media_entries( url, webpage, video_id, m3u8_id='hls')[0] info.update({ 'id': video_id, 'title': self._html_search_meta('description', webpage), 'duration': parse_duration(self._html_search_meta('duration', webpage)), }) # vod, authentication required else: if not self._logged_in: self._login() settings = self._parse_json( self._search_regex( r'jQuery\.extend\(Drupal\.settings\s*,\s*({.+?})\);', webpage, 'drupal settings', default='{}'), video_id) def get(container, item): return try_get( settings, lambda x: x[container][item], compat_str) or self._search_regex( r'"%s"\s*:\s*"([^"]+)' % item, webpage, item, default=None) app_id = get('vod', 'app_id') or self._SITE_TO_APP_ID.get(site_id, 'vtm_watch') sso = get('vod', 'gigyaDatabase') or 'vtm-sso' data = self._download_json( 'http://vod.medialaan.io/api/1.0/item/%s/video' % vod_id, video_id, query={ 'app_id': app_id, 'user_network': sso, 'UID': self._uid, 'UIDSignature': self._uid_signature, 'signatureTimestamp': self._signature_timestamp, }) formats = self._extract_m3u8_formats( data['response']['uri'], video_id, entry_protocol='m3u8_native', ext='mp4', m3u8_id='hls') self._sort_formats(formats) info = { 'id': vod_id, 'formats': formats, } api_key = get('vod', 'apiKey') channel = get('medialaanGigya', 'channel') if api_key: videos = self._download_json( 'http://vod.medialaan.io/vod/v2/videos', video_id, fatal=False, query={ 'channels': channel, 'ids': vod_id, 'limit': 1, 'apikey': api_key, }) if videos: video = try_get( videos, lambda x: x['response']['videos'][0], dict) if video: def get(container, item, expected_type=None): return try_get( video, lambda x: x[container][item], expected_type) def get_string(container, item): return get(container, item, compat_str) info.update({ 'series': get_string('program', 'title'), 'season': get_string('season', 'title'), 'season_number': int_or_none(get('season', 'number')), 'season_id': get_string('season', 'id'), 'episode': get_string('episode', 'title'), 'episode_number': int_or_none(get('episode', 'number')), 'episode_id': get_string('episode', 'id'), 'duration': int_or_none( video.get('duration')) or int_or_none( video.get('durationMillis'), scale=1000), 'title': get_string('episode', 'title'), 'description': get_string('episode', 'text'), 'timestamp': unified_timestamp(get_string( 'publication', 'begin')), }) if not info.get('title'): info['title'] = try_get( config, lambda x: x['videoConfig']['title'], compat_str) or self._html_search_regex( r'\\"title\\"\s*:\s*\\"(.+?)\\"', webpage, 'title', default=None) or self._og_search_title(webpage) if not info.get('description'): info['description'] = self._html_search_regex( r'<div[^>]+class="field-item\s+even">\s*<p>(.+?)</p>', webpage, 'description', default=None) return info
mit
phrazzld/feedingtube
feedtube.py
1
5318
# app from app import app from helpers import stripped, name_image_file # flickr_api import flickr_api, json, urllib from flickr_api.api import flickr flickr_key = app.config['FLICKR_API_KEY'] flickr_secret = app.config['FLICKR_API_SECRET'] # authorize access to flickr flickr_api.set_keys(api_key = flickr_key, api_secret = flickr_secret) # amazon s3 import boto3 s3 = boto3.resource('s3') client = boto3.client('s3') # flask_mail from flask_mail import Mail, Message mail = Mail(app) # mgmt pkgs import os # file and dir mgmt import shutil # path disintegration from bs4 import BeautifulSoup # parse xml import requests # fetch web content from PIL import Image # process images from StringIO import StringIO # glue requests and PIL from ratelimit import rate_limited # comply with Flickr's API policy import sys reload(sys) sys.setdefaultencoding('utf-8') def set_up_local_bucket(path): if not os.path.exists(os.path.join(app.config['APP_ROOT'], 'foodstuff')): os.mkdir(os.path.join(app.config['APP_ROOT'], 'foodstuff')) if not os.path.exists(path): os.mkdir(path) @rate_limited(1) def get_image_page(tag, per_page, page): results = flickr.photos.search(tags=tag, per_page=per_page, page=page) soup = BeautifulSoup(results, 'lxml-xml') return soup @rate_limited(1) def get_image_sizes(image_id): sizes = flickr.photos.getSizes(photo_id=image_id) soup = BeautifulSoup(sizes, 'lxml-xml').find_all('size') return soup def fill_up(tag, bucketname, path, amount): silo = get_image_page(tag, 100, 1) total = int(silo.photos['total']) if amount > total or amount <= 0: amount = total total_pages = total / 100 + 1 image_num = 1 for page in xrange(1, total_pages): for image in silo.find_all('photo'): try: image_id = image['id'] sizes = get_image_sizes(image_id) image_source = None image_source = sizes[-1]['source'] # always grab biggest img if image_source: name = name_image_file(image_id, image['title']) r = requests.get(image_source) try: r.raise_for_status() except Exception as exc: print("There was a problem: {0}".format(exc)) image_file = open(os.path.join(path, name), 'wb') for chunk in r.iter_content(100000): image_file.write(chunk) image_file.close() s3.Object(bucketname, name).put(Body=open(os.path.join(path, name), 'rb')) os.remove(os.path.join(path, name)) except Exception as exc: print("There was a problem: {0}".format(exc)) image_num += 1 if image_num > amount: return silo = get_image_page(tag, 100, page+1) import zipfile def zipper(email, tag, bucket, path, bucketname): with app.app_context(): zippy = '.'.join([tag, 'zip']) with zipfile.ZipFile(zippy, 'w', allowZip64=True) as z: for key in bucket.objects.all(): ext = key.key.split('.')[1] if ext not in ('jpg', 'jpeg'): key.delete() else: bucket.download_file(key.key, key.key) z.write(key.key) os.remove(key.key) key.delete() s3.Object(bucketname, zippy).put(Body=open(os.path.join(path, zippy), 'rb')) url = client.generate_presigned_url( ClientMethod='get_object', Params={ 'Bucket': bucketname, 'Key': zippy }, ExpiresIn=3600*24*3 # three days ) email_zips(email, url) os.remove(os.path.join(path, zippy)) def email_zips(email, url): with app.app_context(): msg = Message(subject="Tell your neural nets, dinner is served!", sender="no-reply@feedingtube.host", recipients=[email], bcc=['phraznikov+ft@gmail.com']) msg.body = "Use this link to download the images you requested: {0}\n\nNote: this link will only be valid for three days.".format(url) mail.send(msg) # process user request for images def get_food(email, tag, amount): with app.app_context(): if type(amount) is not int: amount = int(amount) clean_tag = ''.join(tag.split()) container = stripped(email + clean_tag) bucketname = 'feedingtube-a-' + stripped(email) + '-' + clean_tag path = os.path.join(app.config['APP_ROOT'], 'foodstuff', container) # create fresh s3 bucket bucket = s3.create_bucket(Bucket=bucketname) # nav to tmp dir to process file downloads set_up_local_bucket(path) # plumb images from flickr into local dir, then to s3 fill_up(tag, bucketname, path, amount) os.chdir(path) zipper(email, tag, bucket, path, bucketname) os.chdir(app.config['APP_ROOT']) shutil.rmtree(path)
mit
Cybso/gameplay
gameplay/providers/SystemAppProvider.py
1
2124
""" Provides access to the system apps (global .desktop files for Linux, Start Menu on windows). Linux: SystemAppProvider parses and interprets .desktop files from all paths defined by QStandardPaths::ApplicationsLocation (http://doc.qt.io/qt-5/qstandardpaths.html). The processing is done by DesktopEntryProvider (which primary is used for CUSTOM .desktop files). Only the Main Categories defined by https://standards.freedesktop.org/menu-spec/latest/apa.html will be returned. Other categories will be ignored. This list can be overwritten in the configuration file: [SystemAppProvider] categories=Game Video Network FIXME Implement Windows FIXME Implement OSX """ import platform from gameplay.AppProvider import AppProvider, AppItem system = platform.system() if system == 'Linux': from .DesktopEntryProvider import DesktopEntryProvider from PyQt5.QtCore import QDir, QStandardPaths # https://standards.freedesktop.org/menu-spec/latest/apa.html MAIN_CATEGORIES = [ "AudioVideo", "Development", "Education", "Game", "Graphics", "Network", "Office", "Science", "Settings", "System", "Utility" ]; class SystemAppProvider(DesktopEntryProvider): def __init__(self, settings): DesktopEntryProvider.__init__(self, settings, [QDir.toNativeSeparators(x) for x in QStandardPaths.standardLocations(QStandardPaths.ApplicationsLocation)]) def filter_category(self, categories): """ Lists are passed by reference, so this method can be used to remove all Non-Standard-Categories from the list. """ keep_categories = self.settings.getlist('providers/system', 'categories', fallback=MAIN_CATEGORIES) # Work on a copy, because we are going to modify # the list while iterating it. for c in categories[:]: if not c in keep_categories: categories.remove(c) return len(categories) > 0 #elif system == 'Darwin': #elif system == 'Windows': else: class SystemAppProvider(AppProvider): """ Dummy implementation """ def __init__(self, settings): AppProvider.__init__(self, settings) # vim: set fenc=utf-8 ts=4 sw=4 noet :
gpl-3.0
dxj19831029/keras
tests/manual/check_wrappers.py
53
3385
from __future__ import absolute_import from __future__ import print_function from keras.datasets import mnist from keras.models import Sequential from keras.layers.core import Dense, Activation from keras.utils import np_utils from keras.wrappers.scikit_learn import * import numpy as np batch_size = 128 nb_epoch = 1 nb_classes = 10 max_train_samples = 5000 max_test_samples = 1000 np.random.seed(1337) # for reproducibility ############################################ # scikit-learn classification wrapper test # ############################################ print('Beginning scikit-learn classification wrapper test') print('Loading data') (X_train, y_train), (X_test, y_test) = mnist.load_data() X_train = X_train.reshape(60000, 784)[:max_train_samples] X_test = X_test.reshape(10000, 784)[:max_test_samples] X_train = X_train.astype('float32') X_test = X_test.astype('float32') X_train /= 255 X_test /= 255 Y_train = np_utils.to_categorical(y_train, nb_classes)[:max_train_samples] Y_test = np_utils.to_categorical(y_test, nb_classes)[:max_test_samples] print('Defining model') model = Sequential() model.add(Dense(784, 50)) model.add(Activation('relu')) model.add(Dense(50, 10)) model.add(Activation('softmax')) print('Creating wrapper') classifier = KerasClassifier(model, train_batch_size=batch_size, nb_epoch=nb_epoch) print('Fitting model') classifier.fit(X_train, Y_train) print('Testing score function') score = classifier.score(X_train, Y_train) print('Score: ', score) print('Testing predict function') preds = classifier.predict(X_test) print('Preds.shape: ', preds.shape) print('Testing predict proba function') proba = classifier.predict_proba(X_test) print('Proba.shape: ', proba.shape) print('Testing get params') print(classifier.get_params()) print('Testing set params') classifier.set_params(optimizer='sgd', loss='binary_crossentropy') print(classifier.get_params()) print('Testing attributes') print('Classes') print(classifier.classes_) print('Config') print(classifier.config_) print('Weights') print(classifier.weights_) print('Compiled model') print(classifier.compiled_model_) ######################################## # scikit-learn regression wrapper test # ######################################## print('Beginning scikit-learn regression wrapper test') print('Generating data') X_train = np.random.random((5000, 100)) X_test = np.random.random((1000, 100)) y_train = np.random.random(5000) y_test = np.random.random(1000) print('Defining model') model = Sequential() model.add(Dense(100, 50)) model.add(Activation('relu')) model.add(Dense(50, 1)) model.add(Activation('linear')) print('Creating wrapper') regressor = KerasRegressor(model, train_batch_size=batch_size, nb_epoch=nb_epoch) print('Fitting model') regressor.fit(X_train, y_train) print('Testing score function') score = regressor.score(X_train, y_train) print('Score: ', score) print('Testing predict function') preds = regressor.predict(X_test) print('Preds.shape: ', preds.shape) print('Testing get params') print(regressor.get_params()) print('Testing set params') regressor.set_params(optimizer='sgd', loss='mean_absolute_error') print(regressor.get_params()) print('Testing attributes') print('Config') print(regressor.config_) print('Weights') print(regressor.weights_) print('Compiled model') print(regressor.compiled_model_) print('Test script complete.')
mit
Sponk/cudalibre
gtest/googlemock/scripts/generator/cpp/gmock_class.py
520
8293
#!/usr/bin/env python # # Copyright 2008 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generate Google Mock classes from base classes. This program will read in a C++ source file and output the Google Mock classes for the specified classes. If no class is specified, all classes in the source file are emitted. Usage: gmock_class.py header-file.h [ClassName]... Output is sent to stdout. """ __author__ = 'nnorwitz@google.com (Neal Norwitz)' import os import re import sys from cpp import ast from cpp import utils # Preserve compatibility with Python 2.3. try: _dummy = set except NameError: import sets set = sets.Set _VERSION = (1, 0, 1) # The version of this script. # How many spaces to indent. Can set me with the INDENT environment variable. _INDENT = 2 def _GenerateMethods(output_lines, source, class_node): function_type = (ast.FUNCTION_VIRTUAL | ast.FUNCTION_PURE_VIRTUAL | ast.FUNCTION_OVERRIDE) ctor_or_dtor = ast.FUNCTION_CTOR | ast.FUNCTION_DTOR indent = ' ' * _INDENT for node in class_node.body: # We only care about virtual functions. if (isinstance(node, ast.Function) and node.modifiers & function_type and not node.modifiers & ctor_or_dtor): # Pick out all the elements we need from the original function. const = '' if node.modifiers & ast.FUNCTION_CONST: const = 'CONST_' return_type = 'void' if node.return_type: # Add modifiers like 'const'. modifiers = '' if node.return_type.modifiers: modifiers = ' '.join(node.return_type.modifiers) + ' ' return_type = modifiers + node.return_type.name template_args = [arg.name for arg in node.return_type.templated_types] if template_args: return_type += '<' + ', '.join(template_args) + '>' if len(template_args) > 1: for line in [ '// The following line won\'t really compile, as the return', '// type has multiple template arguments. To fix it, use a', '// typedef for the return type.']: output_lines.append(indent + line) if node.return_type.pointer: return_type += '*' if node.return_type.reference: return_type += '&' num_parameters = len(node.parameters) if len(node.parameters) == 1: first_param = node.parameters[0] if source[first_param.start:first_param.end].strip() == 'void': # We must treat T(void) as a function with no parameters. num_parameters = 0 tmpl = '' if class_node.templated_types: tmpl = '_T' mock_method_macro = 'MOCK_%sMETHOD%d%s' % (const, num_parameters, tmpl) args = '' if node.parameters: # Due to the parser limitations, it is impossible to keep comments # while stripping the default parameters. When defaults are # present, we choose to strip them and comments (and produce # compilable code). # TODO(nnorwitz@google.com): Investigate whether it is possible to # preserve parameter name when reconstructing parameter text from # the AST. if len([param for param in node.parameters if param.default]) > 0: args = ', '.join(param.type.name for param in node.parameters) else: # Get the full text of the parameters from the start # of the first parameter to the end of the last parameter. start = node.parameters[0].start end = node.parameters[-1].end # Remove // comments. args_strings = re.sub(r'//.*', '', source[start:end]) # Condense multiple spaces and eliminate newlines putting the # parameters together on a single line. Ensure there is a # space in an argument which is split by a newline without # intervening whitespace, e.g.: int\nBar args = re.sub(' +', ' ', args_strings.replace('\n', ' ')) # Create the mock method definition. output_lines.extend(['%s%s(%s,' % (indent, mock_method_macro, node.name), '%s%s(%s));' % (indent*3, return_type, args)]) def _GenerateMocks(filename, source, ast_list, desired_class_names): processed_class_names = set() lines = [] for node in ast_list: if (isinstance(node, ast.Class) and node.body and # desired_class_names being None means that all classes are selected. (not desired_class_names or node.name in desired_class_names)): class_name = node.name parent_name = class_name processed_class_names.add(class_name) class_node = node # Add namespace before the class. if class_node.namespace: lines.extend(['namespace %s {' % n for n in class_node.namespace]) # } lines.append('') # Add template args for templated classes. if class_node.templated_types: # TODO(paulchang): The AST doesn't preserve template argument order, # so we have to make up names here. # TODO(paulchang): Handle non-type template arguments (e.g. # template<typename T, int N>). template_arg_count = len(class_node.templated_types.keys()) template_args = ['T%d' % n for n in range(template_arg_count)] template_decls = ['typename ' + arg for arg in template_args] lines.append('template <' + ', '.join(template_decls) + '>') parent_name += '<' + ', '.join(template_args) + '>' # Add the class prolog. lines.append('class Mock%s : public %s {' # } % (class_name, parent_name)) lines.append('%spublic:' % (' ' * (_INDENT // 2))) # Add all the methods. _GenerateMethods(lines, source, class_node) # Close the class. if lines: # If there are no virtual methods, no need for a public label. if len(lines) == 2: del lines[-1] # Only close the class if there really is a class. lines.append('};') lines.append('') # Add an extra newline. # Close the namespace. if class_node.namespace: for i in range(len(class_node.namespace)-1, -1, -1): lines.append('} // namespace %s' % class_node.namespace[i]) lines.append('') # Add an extra newline. if desired_class_names: missing_class_name_list = list(desired_class_names - processed_class_names) if missing_class_name_list: missing_class_name_list.sort() sys.stderr.write('Class(es) not found in %s: %s\n' % (filename, ', '.join(missing_class_name_list))) elif not processed_class_names: sys.stderr.write('No class found in %s\n' % filename) return lines def main(argv=sys.argv): if len(argv) < 2: sys.stderr.write('Google Mock Class Generator v%s\n\n' % '.'.join(map(str, _VERSION))) sys.stderr.write(__doc__) return 1 global _INDENT try: _INDENT = int(os.environ['INDENT']) except KeyError: pass except: sys.stderr.write('Unable to use indent of %s\n' % os.environ.get('INDENT')) filename = argv[1] desired_class_names = None # None means all classes in the source file. if len(argv) >= 3: desired_class_names = set(argv[2:]) source = utils.ReadFile(filename) if source is None: return 1 builder = ast.BuilderFromSource(source, filename) try: entire_ast = filter(None, builder.Generate()) except KeyboardInterrupt: return except: # An error message was already printed since we couldn't parse. sys.exit(1) else: lines = _GenerateMocks(filename, source, entire_ast, desired_class_names) sys.stdout.write('\n'.join(lines)) if __name__ == '__main__': main(sys.argv)
lgpl-2.1
RevelSystems/django
tests/template_tests/syntax_tests/test_spaceless.py
521
1766
from django.test import SimpleTestCase from ..utils import setup class SpacelessTagTests(SimpleTestCase): @setup({'spaceless01': "{% spaceless %} <b> <i> text </i> </b> {% endspaceless %}"}) def test_spaceless01(self): output = self.engine.render_to_string('spaceless01') self.assertEqual(output, "<b><i> text </i></b>") @setup({'spaceless02': "{% spaceless %} <b> \n <i> text </i> \n </b> {% endspaceless %}"}) def test_spaceless02(self): output = self.engine.render_to_string('spaceless02') self.assertEqual(output, "<b><i> text </i></b>") @setup({'spaceless03': "{% spaceless %}<b><i>text</i></b>{% endspaceless %}"}) def test_spaceless03(self): output = self.engine.render_to_string('spaceless03') self.assertEqual(output, "<b><i>text</i></b>") @setup({'spaceless04': "{% spaceless %}<b> <i>{{ text }}</i> </b>{% endspaceless %}"}) def test_spaceless04(self): output = self.engine.render_to_string('spaceless04', {'text': 'This & that'}) self.assertEqual(output, "<b><i>This &amp; that</i></b>") @setup({'spaceless05': "{% autoescape off %}{% spaceless %}" "<b> <i>{{ text }}</i> </b>{% endspaceless %}" "{% endautoescape %}"}) def test_spaceless05(self): output = self.engine.render_to_string('spaceless05', {'text': 'This & that'}) self.assertEqual(output, "<b><i>This & that</i></b>") @setup({'spaceless06': "{% spaceless %}<b> <i>{{ text|safe }}</i> </b>{% endspaceless %}"}) def test_spaceless06(self): output = self.engine.render_to_string('spaceless06', {'text': 'This & that'}) self.assertEqual(output, "<b><i>This & that</i></b>")
bsd-3-clause
makinacorpus/Geotrek
geotrek/tourism/migrations/0011_auto_20200406_1411.py
2
2269
# Generated by Django 2.0.13 on 2020-04-06 14:11 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('tourism', '0010_auto_20200228_2152'), ] operations = [ migrations.RunSQL('ALTER SEQUENCE t_b_contenu_touristique_categorie_id_seq RENAME TO tourism_touristiccontentcategory_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_b_contenu_touristique_type_id_seq RENAME TO tourism_touristiccontenttype_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_b_evenement_touristique_type_id_seq RENAME TO tourism_touristicenventtype_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_b_renseignement_id_seq RENAME TO tourism_informationdesk_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_b_systeme_reservation_id_seq RENAME TO tourism_reservationsystem_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_b_type_renseignement_id_seq RENAME TO tourism_informationdesktype_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_contenu_touristique_portal_id_seq RENAME TO tourism_touristiccontent_portal_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_contenu_touristique_source_id_seq RENAME TO tourism_touristiccontent_source_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_contenu_touristique_theme_id_seq RENAME TO tourism_touristiccontent_themes_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_contenu_touristique_type1_id_seq RENAME TO tourism_touristiccontent_type1_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_contenu_touristique_type2_id_seq RENAME TO tourism_touristiccontent_type2_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_evenement_touristique_portal_id_seq RENAME TO tourism_touristicevent_portal_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_evenement_touristique_source_id_seq RENAME TO tourism_touristicevent_source_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_r_evenement_touristique_theme_id_seq RENAME TO tourism_touristicevent_themes_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_t_contenu_touristique_id_seq RENAME TO tourism_touristiccontent_id_seq;'), migrations.RunSQL('ALTER SEQUENCE t_t_evenement_touristique_id_seq RENAME TO tourism_touristicevent_id_seq;'), ]
bsd-2-clause
nekonok/dotfiles
app/alfred/Alfred.alfredpreferences/workflows/user.workflow.125AE956-75D0-4ABD-BA83-AB8EB38B9531/libs/faker/providers/person/en_TH/__init__.py
2
5104
from __future__ import unicode_literals from .. import Provider as PersonProvider class Provider(PersonProvider): formats = ( '{{first_name}} {{last_name}}', '{{first_name}} {{last_name}}', '{{first_name}} {{last_name}}', '{{first_name}} {{last_name}}', '{{first_name}} {{last_name}}', '{{first_name}} {{last_name}}', '{{prefix}} {{first_name}} {{last_name}}') prefixes_male = ( "GEN", "LT GEN", "MAJ GEN", "COL", "LT COL", "MAJ", "CAPT", "LT", "SUB LT", "S M 1", "S M 2", "S M 3", "SGT", "CPL", "PFC", "PVT", "ADM", "V ADM", "R ADM", "CAPT", "CDR", "L CDR", "LT", "LT JG", "SUB LT", "CPO 1", "CPO 2", "CPO 3", "PO 1", "PO 2", "PO 3", "SEA-MAN", "ACM", "AM", "AVM", "GP CAPT", "WG CDR", "SQN LDR", "FLT LT", "FLG OFF", "PLT OFF", "FS 1", "FS 2", "FS 3", "SGT", "CPL", "LAC", "AMN", "POL GEN", "POL LT GEN", "POL MAJ GEN", "POL COL", "POL LT COL", "POL MAJ", "POL CAPT", "POL LT", "POL SUB LT", "POL SEN SGT MAJ", "POL SGT MAJ", "POL SGT", "POL CPL", "POL L/C", "POL CONST", "MR", "REV", "M L", "M R", "SAMANERA", "PHRA", "PHRA ATHIKAN", "CHAO ATHIKAN", "PHRAPALAD", "PHRASAMU", "PHRABAIDIKA", "PHRAKHU PALAD", "PHRAKHU SAMU", "PHRAKHU BAIDIKA", "PHRAMAHA", "PHRAKHU DHAMMADHORN", "PHRAKHU VINAIDHORN") prefixes_female = ( "GEN", "LT GEN", "MAJ GEN", "COL", "LT COL", "MAJ", "CAPT", "LT", "SUB LT", "S M 1", "S M 2", "S M 3", "SGT", "CPL", "PFC", "PVT", "ADM", "V ADM", "R ADM", "CAPT", "CDR", "L CDR", "LT", "LT JG", "SUB LT", "CPO 1", "CPO 2", "CPO 3", "PO 1", "PO 2", "PO 3", "SEA-MAN", "ACM", "AM", "AVM", "GP CAPT", "WG CDR", "SQN LDR", "FLT LT", "FLG OFF", "PLT OFF", "FS 1", "FS 2", "FS 3", "SGT", "CPL", "LAC", "AMN", "POL GEN", "POL LT GEN", "POL MAJ GEN", "POL COL", "POL LT COL", "POL MAJ", "POL CAPT", "POL LT", "POL SUB LT", "POL SEN SGT MAJ", "POL SGT MAJ", "POL SGT", "POL CPL", "POL L/C", "POL CONST", "MRS", "MISS", "REV", "M L") prefixes = prefixes_male + prefixes_female first_names = ( "Pornchanok", "Patchaploy", "Peem", "Kodchaporn", "Pattapon", "Sarunporn", "Jinjuta", "Sorawut", "Suvakit", "Prima", "Darin", "Pintusorn", "Kulnun", "Nutcha", "Nutkrita", "Sittikorn", "Wasin", "Apisara", "Nattawun", "Tunradee", "Niracha", "Tunchanok", "Kamolchanok", "Jaruwan", "Pachongruk", "Pakjira", "Pattatomporn", "Suwijuk", "Noppakao", "Ratchanon", "Atit", "Kunaporn", "Arisara", "Todsawun", "Chaiwut", "Puntira", "Supasita", "Patcharaporn", "Phubes", "Pattamon", "Chanya", "Pannawich", "Chawin", "Pada", "Chanikan", "Nutwadee", "Chalisa", "Prames", "Supasit", "Sitiwat", "Teetat", "Yada", "Phenphitcha", "Anon", "Chaifah", "Pawan", "Aunyaporn", "Yanisa", "Pak", "Chayanin", "Chayapat", "Jitrin", "Wassaya", "Pitipat", "Nichakarn", "Parin", "Thanatcha", ) last_names = ( "Prachayaroch", "Prachayaroch", "Kamalanon", "Tianvarich", "Bunlerngsri", "Sukhenai", "Posalee", "Chaisatit", "Sujjaboriboon", "Kamalanon", "Neerachapong", "Pianduangsri", "Pasuk", "Losatapornpipit", "Suraprasert", "Matinawin", "Choeychuen", "Wasunun", "Kumsoontorn", "Sireelert", "Boonpungbaramee", "Sorattanachai", "Benchapatranon", "Intaum", "Pikatsingkorn", "Srisoontorn", "Polpo", "Kongchayasukawut", "Charoensuksopol", "Bunlupong", "Chomsri", "Tungkasethakul", "Chowitunkit", "Todsapornpitakul", "Wimolnot", "Kittakun", "Methavorakul", "Pitanuwat", "Phusilarungrueng", "Turongkinanon", "Kitprapa", "Pothanun", "Youprasert", "Methavorakul", "Vethayasas", "Sooksawang", "Anekvorakul", "Pichpandecha", "Sittisaowapak", "Suraprachit", "Kongsri", "Trikasemmart", "Habpanom", "Wannapaitoonsri", "Vinyuvanichkul", "Pongpanitch", "Permchart", "Chaihirankarn", "Thantananont", "Norramon", "Prayoonhong", "Lertsattayanusak", "Polauaypon", "Prakalpawong", "Titipatrayunyong", "Krittayanukoon", "Siripaiboo")
mit
cstrahan/hs-capnp
test-data/gen.py
1
3080
#!/usr/bin/env python import os, inspect import capnp import tests_capnp test_data_dir = os.path.dirname(os.path.abspath(__file__)) ################################################################################ # Sanity checks for handwritten messages def read(schema, name): p = os.path.join(test_data_dir, name) f = open(p, 'rb') return schema.read(f) struct = read(tests_capnp.OneUInt32, "far_one_uint32") assert struct.field0 == 0x11223344, "far_one_uint32 sanity-check failed" struct = read(tests_capnp.OneUInt32, "double_far_one_uint32") assert struct.field0 == 0x11223344, "double_far_one_uint32 sanity-check failed" ################################################################################ # Create test cases def write(message, name): p = os.path.join(test_data_dir, name) f = open(p, 'w+b') message.write(f) struct = tests_capnp.OneBool.new_message() struct.bool = False write(struct, "one_bool_false") struct = tests_capnp.OneBool.new_message() struct.bool = True write(struct, "one_bool_true") struct = tests_capnp.OneUInt32.new_message() struct.field0 = 0x11223344 write(struct, "one_uint32") struct = tests_capnp.ManyBool.new_message() for n in range(0, 64): setattr(struct, "field"+str(n), True) write(struct, "many_bool") struct = tests_capnp.ManyUInt8.new_message() for n in range(0, 8): setattr(struct, "field"+str(n), n+1) write(struct, "many_uint8") struct = tests_capnp.ThreeStructs.new_message() struct.field0.field0 = 0x11223344 struct.field1.field0 = 0x55667788 struct.field2.field0 = 0x99AABBCC write(struct, "three_structs") struct = tests_capnp.ThreeLists.new_message() struct.field0 = [0] struct.field1 = [1,2] struct.field2 = [3,4,5] write(struct, "three_lists") struct = tests_capnp.ListOfUInt32.new_message() struct.field0 = [0x11223344, 0x55667788, 0x99AABBCC] write(struct, "list_of_uint32") struct = tests_capnp.ListOfUInt32.new_message() write(struct, "uninitialized_list_of_uint32") struct = tests_capnp.ListOfUInt64.new_message() struct.field0 = [0x1020304050607080, 0x1121314151617181, 0x1222324252627282] write(struct, "list_of_uint64") struct = tests_capnp.ListOfBool.new_message() struct.field0 = [True, True, False, True] write(struct, "list_of_bool") struct = tests_capnp.ListOfOneUInt32.new_message() l = struct.init("field0", 3) l[0].field0 = 0x11223344 l[1].field0 = 0x55667788 l[2].field0 = 0x99AABBCC write(struct, "list_of_one_uint32") struct = tests_capnp.OneText.new_message() struct.field0 = "This is some text." write(struct, "one_text") struct = tests_capnp.OneData.new_message() struct.field0 = "This is some data." write(struct, "one_data") struct = tests_capnp.ListOfListOfUInt32.new_message() l = struct.init("field0", 3) l[0] = [0] l[1] = [1, 2] l[2] = [3, 4, 5] write(struct, "list_of_list_of_uint32") struct = tests_capnp.Mixed.new_message() struct.field0 = 12345 struct.field1.field0 = 67890 struct.init("field3", 3) struct.field3 = [20304, 30405, 50607] write(struct, "mixed") # TODO # list of list of uint32 # three lists # three texts # three datas
mit
elegans-io/csrec-webapp
setup.py
1
3832
from setuptools import setup, find_packages from codecs import open # To use a consistent encoding from os import path import csrec_webapp here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file long_description = open('README.md').read() def setup_package(): build_requires = ['csrec>=0.4.1', 'tornado>=4.5.2', 'locustio>=0.7.3'] metadata = dict( name='csrec-webapp', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html version=csrec_webapp.__version__, description="Webapp for the cold start recommender", long_description=long_description, # The project's main homepage. url='https://github.com/elegans-io/csrec-webapp', # Author details author='elegans.io Ltd', author_email='info@elegans.io', # Choose your license license='GPL v2', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 4 - Beta', # Indicate who your project is intended for 'Intended Audience :: Information Technology', 'Topic :: Scientific/Engineering :: Artificial Intelligence', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2.7', ], # What does your project relate to? keywords='recommendations, recommender,recommendation engine', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). #packages=find_packages(exclude=['contrib', 'docs', 'tests*']), packages=["csrec_webapp"], # List run-time dependencies here. These will be installed by pip when your # project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/requirements.html # See also https://github.com/scipy/scipy/blob/master/setup.py (malemi) setup_requires=build_requires, install_requires=build_requires, # List additional groups of dependencies here (e.g. development dependencies). # You can install these using the following syntax, for example: # $ pip install -e .[dev,test] # extras_require={ # 'dev': ['check-manifest'], # 'test': ['coverage'], # }, # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. package_data={ 'csrec_webapp': ['*.cl', '*.py'] }, include_package_data=True, scripts=['bin/csrec_webapp.py'], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. # entry_points={ # 'console_scripts': [ # 'sample=sample:main', # ], # }, ) setup(**metadata) if __name__ == '__main__': setup_package()
gpl-2.0
damycra/django-rest-framework
tests/test_testing.py
73
9278
# encoding: utf-8 from __future__ import unicode_literals from io import BytesIO from django.conf.urls import url from django.contrib.auth.models import User from django.shortcuts import redirect from django.test import TestCase from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.test import ( APIClient, APIRequestFactory, force_authenticate ) @api_view(['GET', 'POST']) def view(request): return Response({ 'auth': request.META.get('HTTP_AUTHORIZATION', b''), 'user': request.user.username }) @api_view(['GET', 'POST']) def session_view(request): active_session = request.session.get('active_session', False) request.session['active_session'] = True return Response({ 'active_session': active_session }) @api_view(['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS']) def redirect_view(request): return redirect('/view/') urlpatterns = [ url(r'^view/$', view), url(r'^session-view/$', session_view), url(r'^redirect-view/$', redirect_view), ] class TestAPITestClient(TestCase): urls = 'tests.test_testing' def setUp(self): self.client = APIClient() def test_credentials(self): """ Setting `.credentials()` adds the required headers to each request. """ self.client.credentials(HTTP_AUTHORIZATION='example') for _ in range(0, 3): response = self.client.get('/view/') self.assertEqual(response.data['auth'], 'example') def test_force_authenticate(self): """ Setting `.force_authenticate()` forcibly authenticates each request. """ user = User.objects.create_user('example', 'example@example.com') self.client.force_authenticate(user) response = self.client.get('/view/') self.assertEqual(response.data['user'], 'example') def test_force_authenticate_with_sessions(self): """ Setting `.force_authenticate()` forcibly authenticates each request. """ user = User.objects.create_user('example', 'example@example.com') self.client.force_authenticate(user) # First request does not yet have an active session response = self.client.get('/session-view/') self.assertEqual(response.data['active_session'], False) # Subsequant requests have an active session response = self.client.get('/session-view/') self.assertEqual(response.data['active_session'], True) # Force authenticating as `None` should also logout the user session. self.client.force_authenticate(None) response = self.client.get('/session-view/') self.assertEqual(response.data['active_session'], False) def test_csrf_exempt_by_default(self): """ By default, the test client is CSRF exempt. """ User.objects.create_user('example', 'example@example.com', 'password') self.client.login(username='example', password='password') response = self.client.post('/view/') self.assertEqual(response.status_code, 200) def test_explicitly_enforce_csrf_checks(self): """ The test client can enforce CSRF checks. """ client = APIClient(enforce_csrf_checks=True) User.objects.create_user('example', 'example@example.com', 'password') client.login(username='example', password='password') response = client.post('/view/') expected = {'detail': 'CSRF Failed: CSRF cookie not set.'} self.assertEqual(response.status_code, 403) self.assertEqual(response.data, expected) def test_can_logout(self): """ `logout()` resets stored credentials """ self.client.credentials(HTTP_AUTHORIZATION='example') response = self.client.get('/view/') self.assertEqual(response.data['auth'], 'example') self.client.logout() response = self.client.get('/view/') self.assertEqual(response.data['auth'], b'') def test_logout_resets_force_authenticate(self): """ `logout()` resets any `force_authenticate` """ user = User.objects.create_user('example', 'example@example.com', 'password') self.client.force_authenticate(user) response = self.client.get('/view/') self.assertEqual(response.data['user'], 'example') self.client.logout() response = self.client.get('/view/') self.assertEqual(response.data['user'], '') def test_follow_redirect(self): """ Follow redirect by setting follow argument. """ response = self.client.get('/redirect-view/') self.assertEqual(response.status_code, 302) response = self.client.get('/redirect-view/', follow=True) self.assertIsNotNone(response.redirect_chain) self.assertEqual(response.status_code, 200) response = self.client.post('/redirect-view/') self.assertEqual(response.status_code, 302) response = self.client.post('/redirect-view/', follow=True) self.assertIsNotNone(response.redirect_chain) self.assertEqual(response.status_code, 200) response = self.client.put('/redirect-view/') self.assertEqual(response.status_code, 302) response = self.client.put('/redirect-view/', follow=True) self.assertIsNotNone(response.redirect_chain) self.assertEqual(response.status_code, 200) response = self.client.patch('/redirect-view/') self.assertEqual(response.status_code, 302) response = self.client.patch('/redirect-view/', follow=True) self.assertIsNotNone(response.redirect_chain) self.assertEqual(response.status_code, 200) response = self.client.delete('/redirect-view/') self.assertEqual(response.status_code, 302) response = self.client.delete('/redirect-view/', follow=True) self.assertIsNotNone(response.redirect_chain) self.assertEqual(response.status_code, 200) response = self.client.options('/redirect-view/') self.assertEqual(response.status_code, 302) response = self.client.options('/redirect-view/', follow=True) self.assertIsNotNone(response.redirect_chain) self.assertEqual(response.status_code, 200) def test_invalid_multipart_data(self): """ MultiPart encoding cannot support nested data, so raise a helpful error if the user attempts to do so. """ self.assertRaises( AssertionError, self.client.post, path='/view/', data={'valid': 123, 'invalid': {'a': 123}} ) class TestAPIRequestFactory(TestCase): def test_csrf_exempt_by_default(self): """ By default, the test client is CSRF exempt. """ user = User.objects.create_user('example', 'example@example.com', 'password') factory = APIRequestFactory() request = factory.post('/view/') request.user = user response = view(request) self.assertEqual(response.status_code, 200) def test_explicitly_enforce_csrf_checks(self): """ The test client can enforce CSRF checks. """ user = User.objects.create_user('example', 'example@example.com', 'password') factory = APIRequestFactory(enforce_csrf_checks=True) request = factory.post('/view/') request.user = user response = view(request) expected = {'detail': 'CSRF Failed: CSRF cookie not set.'} self.assertEqual(response.status_code, 403) self.assertEqual(response.data, expected) def test_invalid_format(self): """ Attempting to use a format that is not configured will raise an assertion error. """ factory = APIRequestFactory() self.assertRaises( AssertionError, factory.post, path='/view/', data={'example': 1}, format='xml' ) def test_force_authenticate(self): """ Setting `force_authenticate()` forcibly authenticates the request. """ user = User.objects.create_user('example', 'example@example.com') factory = APIRequestFactory() request = factory.get('/view') force_authenticate(request, user=user) response = view(request) self.assertEqual(response.data['user'], 'example') def test_upload_file(self): # This is a 1x1 black png simple_png = BytesIO(b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\rIDATx\x9cc````\x00\x00\x00\x05\x00\x01\xa5\xf6E@\x00\x00\x00\x00IEND\xaeB`\x82') simple_png.name = 'test.png' factory = APIRequestFactory() factory.post('/', data={'image': simple_png}) def test_request_factory_url_arguments(self): """ This is a non regression test against #1461 """ factory = APIRequestFactory() request = factory.get('/view/?demo=test') self.assertEqual(dict(request.GET), {'demo': ['test']}) request = factory.get('/view/', {'demo': 'test'}) self.assertEqual(dict(request.GET), {'demo': ['test']})
bsd-2-clause
toshywoshy/ansible
lib/ansible/module_utils/service.py
86
9242
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright (c) Ansible Inc, 2016 # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import glob import os import pickle import platform import select import shlex import subprocess import traceback from ansible.module_utils.six import PY2, b from ansible.module_utils._text import to_bytes, to_text def sysv_is_enabled(name, runlevel=None): ''' This function will check if the service name supplied is enabled in any of the sysv runlevels :arg name: name of the service to test for :kw runlevel: runlevel to check (default: None) ''' if runlevel: if not os.path.isdir('/etc/rc0.d/'): return bool(glob.glob('/etc/init.d/rc%s.d/S??%s' % (runlevel, name))) return bool(glob.glob('/etc/rc%s.d/S??%s' % (runlevel, name))) else: if not os.path.isdir('/etc/rc0.d/'): return bool(glob.glob('/etc/init.d/rc?.d/S??%s' % name)) return bool(glob.glob('/etc/rc?.d/S??%s' % name)) def get_sysv_script(name): ''' This function will return the expected path for an init script corresponding to the service name supplied. :arg name: name or path of the service to test for ''' if name.startswith('/'): result = name else: result = '/etc/init.d/%s' % name return result def sysv_exists(name): ''' This function will return True or False depending on the existence of an init script corresponding to the service name supplied. :arg name: name of the service to test for ''' return os.path.exists(get_sysv_script(name)) def get_ps(module, pattern): ''' Last resort to find a service by trying to match pattern to programs in memory ''' found = False if platform.system() == 'SunOS': flags = '-ef' else: flags = 'auxww' psbin = module.get_bin_path('ps', True) (rc, psout, pserr) = module.run_command([psbin, flags]) if rc == 0: for line in psout.splitlines(): if pattern in line: # FIXME: should add logic to prevent matching 'self', though that should be extreemly rare found = True break return found def fail_if_missing(module, found, service, msg=''): ''' This function will return an error or exit gracefully depending on check mode status and if the service is missing or not. :arg module: is an AnsibleModule object, used for it's utility methods :arg found: boolean indicating if services was found or not :arg service: name of service :kw msg: extra info to append to error/success msg when missing ''' if not found: if module.check_mode: module.exit_json(msg="Service %s not found on %s, assuming it will exist on full run" % (service, msg), changed=True) else: module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg)) def fork_process(): ''' This function performs the double fork process to detach from the parent process and execute. ''' pid = os.fork() if pid == 0: # Set stdin/stdout/stderr to /dev/null fd = os.open(os.devnull, os.O_RDWR) # clone stdin/out/err for num in range(3): if fd != num: os.dup2(fd, num) # close otherwise if fd not in range(3): os.close(fd) # Make us a daemon pid = os.fork() # end if not in child if pid > 0: os._exit(0) # get new process session and detach sid = os.setsid() if sid == -1: raise Exception("Unable to detach session while daemonizing") # avoid possible problems with cwd being removed os.chdir("/") pid = os.fork() if pid > 0: os._exit(0) return pid def daemonize(module, cmd): ''' Execute a command while detaching as a daemon, returns rc, stdout, and stderr. :arg module: is an AnsibleModule object, used for it's utility methods :arg cmd: is a list or string representing the command and options to run This is complex because daemonization is hard for people. What we do is daemonize a part of this module, the daemon runs the command, picks up the return code and output, and returns it to the main process. ''' # init some vars chunk = 4096 # FIXME: pass in as arg? errors = 'surrogate_or_strict' # start it! try: pipe = os.pipe() pid = fork_process() except OSError: module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc()) except Exception as exc: module.fail_json(msg=to_text(exc), exception=traceback.format_exc()) # we don't do any locking as this should be a unique module/process if pid == 0: os.close(pipe[0]) # if command is string deal with py2 vs py3 conversions for shlex if not isinstance(cmd, list): if PY2: cmd = shlex.split(to_bytes(cmd, errors=errors)) else: cmd = shlex.split(to_text(cmd, errors=errors)) # make sure we always use byte strings run_cmd = [] for c in cmd: run_cmd.append(to_bytes(c, errors=errors)) # execute the command in forked process p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1])) fds = [p.stdout, p.stderr] # loop reading output till its done output = {p.stdout: b(""), p.stderr: b("")} while fds: rfd, wfd, efd = select.select(fds, [], fds, 1) if (rfd + wfd + efd) or p.poll(): for out in fds: if out in rfd: data = os.read(out.fileno(), chunk) if not data: fds.remove(out) output[out] += b(data) # even after fds close, we might want to wait for pid to die p.wait() # Return a pickled data of parent return_data = pickle.dumps([p.returncode, to_text(output[p.stdout]), to_text(output[p.stderr])], protocol=pickle.HIGHEST_PROTOCOL) os.write(pipe[1], to_bytes(return_data, errors=errors)) # clean up os.close(pipe[1]) os._exit(0) elif pid == -1: module.fail_json(msg="Unable to fork, no exception thrown, probably due to lack of resources, check logs.") else: # in parent os.close(pipe[1]) os.waitpid(pid, 0) # Grab response data after child finishes return_data = b("") while True: rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]]) if pipe[0] in rfd: data = os.read(pipe[0], chunk) if not data: break return_data += b(data) # Note: no need to specify encoding on py3 as this module sends the # pickle to itself (thus same python interpreter so we aren't mixing # py2 and py3) return pickle.loads(to_bytes(return_data, errors=errors)) def check_ps(module, pattern): # Set ps flags if platform.system() == 'SunOS': psflags = '-ef' else: psflags = 'auxww' # Find ps binary psbin = module.get_bin_path('ps', True) (rc, out, err) = module.run_command('%s %s' % (psbin, psflags)) # If rc is 0, set running as appropriate if rc == 0: for line in out.split('\n'): if pattern in line: return True return False
gpl-3.0
funkring/fdoo
addons/stock_account/wizard/__init__.py
351
1105
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import stock_change_standard_price import stock_invoice_onshipping import stock_valuation_history import stock_return_picking
agpl-3.0
jaredmorgs/AsciidocFX
conf/docbook/extensions/xslt.py
65
2137
#!/usr/bin/python -u # $Id: xslt.py 8353 2009-03-17 16:57:50Z mzjn $ import sys import libxml2 import libxslt from docbook import adjustColumnWidths # Check the arguments usage = "Usage: %s xmlfile.xml xslfile.xsl [outputfile] [param1=val [param2=val]...]" % sys.argv[0] xmlfile = None xslfile = None outfile = "-" params = {} try: xmlfile = sys.argv[1] xslfile = sys.argv[2] except IndexError: print usage sys.exit(1) def quote(astring): if astring.find("'") < 0: return "'" + astring + "'" else: return '"' + astring + '"' try: outfile = sys.argv[3] if outfile.find("=") > 0: name, value = outfile.split("=", 2) params[name] = quote(value) outfile = None count = 4 while (sys.argv[count]): try: name, value = sys.argv[count].split("=", 2) if params.has_key(name): print "Warning: '%s' re-specified; replacing value" % name params[name] = quote(value) except ValueError: print "Invalid parameter specification: '" + sys.argv[count] + "'" print usage sys.exit(1) count = count+1 except IndexError: pass # ====================================================================== # Memory debug specific # libxml2.debugMemory(1) # Setup environment libxml2.lineNumbersDefault(1) libxml2.substituteEntitiesDefault(1) libxslt.registerExtModuleFunction("adjustColumnWidths", "http://nwalsh.com/xslt/ext/xsltproc/python/Table", adjustColumnWidths) # Initialize and run styledoc = libxml2.parseFile(xslfile) style = libxslt.parseStylesheetDoc(styledoc) doc = libxml2.parseFile(xmlfile) result = style.applyStylesheet(doc, params) # Save the result if outfile: style.saveResultToFilename(outfile, result, 0) else: print result # Free things up style.freeStylesheet() doc.freeDoc() result.freeDoc() # Memory debug specific #libxslt.cleanup() #if libxml2.debugMemory(1) != 0: # print "Memory leak %d bytes" % (libxml2.debugMemory(1)) # libxml2.dumpMemory()
apache-2.0
dostavro/dotfiles
sublime2/Packages/SublimeCodeIntel/libs/chardet/mbcharsetprober.py
215
3182
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # Proofpoint, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import constants, sys from constants import eStart, eError, eItsMe from charsetprober import CharSetProber class MultiByteCharSetProber(CharSetProber): def __init__(self): CharSetProber.__init__(self) self._mDistributionAnalyzer = None self._mCodingSM = None self._mLastChar = ['\x00', '\x00'] def reset(self): CharSetProber.reset(self) if self._mCodingSM: self._mCodingSM.reset() if self._mDistributionAnalyzer: self._mDistributionAnalyzer.reset() self._mLastChar = ['\x00', '\x00'] def get_charset_name(self): pass def feed(self, aBuf): aLen = len(aBuf) for i in range(0, aLen): codingState = self._mCodingSM.next_state(aBuf[i]) if codingState == eError: if constants._debug: sys.stderr.write(self.get_charset_name() + ' prober hit error at byte ' + str(i) + '\n') self._mState = constants.eNotMe break elif codingState == eItsMe: self._mState = constants.eFoundIt break elif codingState == eStart: charLen = self._mCodingSM.get_current_charlen() if i == 0: self._mLastChar[1] = aBuf[0] self._mDistributionAnalyzer.feed(self._mLastChar, charLen) else: self._mDistributionAnalyzer.feed(aBuf[i-1:i+1], charLen) self._mLastChar[0] = aBuf[aLen - 1] if self.get_state() == constants.eDetecting: if self._mDistributionAnalyzer.got_enough_data() and \ (self.get_confidence() > constants.SHORTCUT_THRESHOLD): self._mState = constants.eFoundIt return self.get_state() def get_confidence(self): return self._mDistributionAnalyzer.get_confidence()
mit
PinguinoIDE/pinguino-multilanguage
files/ide/widgets/python_highlighter.py
1
1974
#! /usr/bin/python #-*- coding: utf-8 -*- from PySide import QtGui, QtCore ######################################################################## class Highlighter(QtGui.QSyntaxHighlighter): #---------------------------------------------------------------------- def __init__(self, parent, extra): super(Highlighter, self).__init__(parent) color = QtGui.QColor self.highlightingRules = [] operators = QtGui.QTextCharFormat() operators.setFontWeight(QtGui.QFont.Bold) self.highlightingRules.append(("[()\[\]{}<>=\-\+\*\\%#!~&^,/]", operators)) start_command = QtGui.QTextCharFormat() start_command.setForeground(color("#729fcf")) self.highlightingRules.append((extra[0].replace(".", "\."), start_command)) #line_command = QtGui.QTextCharFormat() #line_command.setForeground(color("#ef292a")) #self.highlightingRules.append((extra[1].replace(".", "\."), line_command)) sdcc_error_01 = QtGui.QTextCharFormat() sdcc_error_01.setForeground(color("#ef292a")) self.highlightingRules.append(("ERROR: .*", sdcc_error_01)) #sdcc_error_02 = QtGui.QTextCharFormat() #sdcc_error_02.setForeground(color("#ef292a")) #self.highlightingRules.append(("\\b[\d]+: .*", sdcc_error_02)) reserved = QtGui.QTextCharFormat() reserved.setForeground(color("#8ae234")) self.highlightingRules.append(("\\b(None|False|True|def|class)\\b", reserved)) #---------------------------------------------------------------------- def highlightBlock(self, text): for pattern, format_ in self.highlightingRules: expression = QtCore.QRegExp(pattern) index = expression.indexIn(text) while index >= 0: length = expression.matchedLength() self.setFormat(index, length, format_) index = expression.indexIn(text, index + length)
gpl-2.0
elsonidoq/fito
fito/model/word2vec.py
1
1261
import traceback import warnings from fito.model import Model from fito.model import ModelParameter from fito.specs.fields import CollectionField, PrimitiveField try: from gensim.models import Word2Vec as GensimWord2Vec except ImportError: traceback.print_exc() warnings.warn('Could not import Word2Vec') class Word2Vec(Model): sentences = CollectionField(0, serialize=False) size = ModelParameter(1, default=100) alpha = ModelParameter(2, default=0.025) window = ModelParameter(3, default=5) min_count = ModelParameter(4, default=5) max_vocab_size = ModelParameter(5, default=None) sample = ModelParameter(6, default=0.001) seed = ModelParameter(7, default=1) workers = PrimitiveField(8, default=3, serialize=False) train_iterator = PrimitiveField( default=None, help='When sentences is not a rewindable iterator, you must specify another copy of it here', serialize=False ) def apply(self, runner): kwargs = self.to_kwargs() model = GensimWord2Vec(**kwargs) if self.train_iterator is None: self.train_iterator = self.sentences model.build_vocab(self.sentences) model.train(self.train_iterator) return model
mit
richard-willowit/odoo
addons/account_budget/tests/common.py
34
10777
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo.addons.account.tests.account_test_classes import AccountingTestCase import time class TestAccountBudgetCommon(AccountingTestCase): def setUp(self): super(TestAccountBudgetCommon, self).setUp() # In order to check account budget module in Odoo I created a budget with few budget positions # Checking if the budgetary positions have accounts or not account_ids = self.env['account.account'].search([ ('user_type_id', '=', self.ref('account.data_account_type_revenue')), ('tag_ids.name', 'in', ['Operating Activities']) ]).ids if not account_ids: account_ids = self.env['account.account'].create({ 'name': 'Product Sales - (test)', 'code': 'X2020', 'user_type_id': self.ref('account.data_account_type_revenue'), 'tag_ids': [(6, 0, [self.ref('account.account_tag_operating')])], }).ids self.account_budget_post_sales0 = self.env['account.budget.post'].create({ 'name': 'Sales', 'account_ids': [(6, None, account_ids)], }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_partners_camp_to_camp'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-01-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-01-31', 'planned_amount': 500.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_partners_camp_to_camp'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-02-07', 'date_to': str(time.localtime(time.time())[0] + 1) + '-02-28', 'planned_amount': 900.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_partners_camp_to_camp'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-03-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-03-15', 'planned_amount': 300.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_our_super_product'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-03-16', 'paid_date': str(time.localtime(time.time())[0] + 1) + '-12-03', 'date_to': str(time.localtime(time.time())[0] + 1) + '-03-31', 'planned_amount': 375.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_our_super_product'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-05-01', 'paid_date': str(time.localtime(time.time())[0] + 1) + '-12-03', 'date_to': str(time.localtime(time.time())[0] + 1) + '-05-31', 'planned_amount': 375.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-07-16', 'date_to': str(time.localtime(time.time())[0] + 1) + '-07-31', 'planned_amount': 20000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-02-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-02-28', 'planned_amount': 20000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-09-16', 'date_to': str(time.localtime(time.time())[0] + 1) + '-09-30', 'planned_amount': 10000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_sales0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-10-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-12-31', 'planned_amount': 10000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), }) account_ids = self.env['account.account'].search([ ('user_type_id.name', '=', 'Expenses'), ('tag_ids.name', 'in', ['Operating Activities']) ]).ids if not account_ids: account_ids = self.env['account.account'].create({ 'name': 'Expense - (test)', 'code': 'X2120', 'user_type_id': self.ref('account.data_account_type_expenses'), 'tag_ids': [(6, 0, [self.ref('account.account_tag_operating')])], }).ids self.account_budget_post_purchase0 = self.env['account.budget.post'].create({ 'name': 'Purchases', 'account_ids': [(6, None, account_ids)], }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_partners_camp_to_camp'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-01-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-01-31', 'planned_amount': -500.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_partners_camp_to_camp'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-02-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-02-28', 'planned_amount': -250.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_our_super_product'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-04-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-04-30', 'planned_amount': -150.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-06-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-06-15', 'planned_amount': -7500.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-06-16', 'date_to': str(time.localtime(time.time())[0] + 1) + '-06-30', 'planned_amount': -5000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-07-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-07-15', 'planned_amount': -2000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-08-16', 'date_to': str(time.localtime(time.time())[0] + 1) + '-08-31', 'planned_amount': -3000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetpessimistic0'), }) self.env['crossovered.budget.lines'].create({ 'analytic_account_id': self.ref('analytic.analytic_seagate_p2'), 'general_budget_id': self.account_budget_post_purchase0.id, 'date_from': str(time.localtime(time.time())[0] + 1) + '-09-01', 'date_to': str(time.localtime(time.time())[0] + 1) + '-09-15', 'planned_amount': -1000.0, 'crossovered_budget_id': self.ref('account_budget.crossovered_budget_budgetoptimistic0'), })
gpl-3.0
Bleno/teste_api
Main.py
1
3854
#!/usr/bin/python # -*- encoding: utf-8 -*- from Pessoa import Pessoa import urllib2, urllib, json from urllib2 import URLError, HTTPError from ConfigUrl import urlpath p = Pessoa() url = urlpath() user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' headers = { 'User-Agent' : user_agent } #data = urllib.urlencode(values) def FormInput(): try: p.nm_nome = str(raw_input("digite o nome da pessoa: ")) p.idade = int(input("digite a idade da pessoa: ")) except: print "Erro ao capturar dados" Inicio() def SendData(method = 'POST'): FormInput() dados = json.dumps(p, default=lambda o: o.__dict__) #Convertendo dicionario objeto para jsons dadosEnviar = {'json_reg': str(dados)} data = urllib.urlencode(dadosEnviar) try: if method == "PUT": global url url +="/%s" % str(p.id_reg) req = urllib2.Request(url, data, headers) req.get_method = lambda: method response = urllib2.urlopen(req) the_page = response.read() print the_page except urllib2.HTTPError as e: print "Ocorreu um erro na requisição http.\n Status: %s" % str(e.code) print e.read() Inicio() Inicio() def GetAllData(root = True): lista_codigos = [] req = urllib2.Request(url) response = urllib2.urlopen(req) the_page = response.read() a = {} a = json.loads(the_page) #convertendo de json para dicionario python print "%-7s-+-%-20s-+-%-7s" % ("-" * 7, "-" * 20, "-" * 7) #imprime linha pra formatacao print "%-7s | %-20s | %-7s" % (u"Código", "Nome", "Idade") print "%-7s-+-%-20s-+-%-7s" % ("-" * 7, "-" * 20, "-" * 7) #imprime linha pra formatacao b = a['results'] c = b #[0]#['json_reg'] for i in c: #for item in i['json_reg'].items(): print "%-7d | %-20s | %-7s" % (i['json_reg']['id_reg'],i['json_reg']['nm_nome'], i['json_reg']['idade']) if root != True: lista_codigos.append(i['json_reg']['id_reg']) #for nm_nome, idade in c.items(): # print "%-20s | %-7s" % (nm_nome, idade) print "%-7s-+-%-20s-+-%-7s" % ("-" * 7, "-" * 20, "-" * 7) #imprime linha pra formatacao if root: Inicio() else: return lista_codigos def AlteraDados(): lista = GetAllData(False) #try: codigo = int(raw_input("digite o código do dado a ser alterado: ")) contem_valor = False for i in lista: if i == codigo: contem_valor = True if contem_valor == True: p.id_reg = codigo SendData("PUT") else: print "O código não existe no banco" Inicio() # except: # print "Erro ao capturar dados" def ExcluirRegistro(): try: list_id = GetAllData(False) codigo = int(raw_input("digite o código do registro a ser excluído: ")) global url url +="/%s" % str(codigo) req = urllib2.Request(url) req.get_method = lambda: "DELETE" response = urllib2.urlopen(req) the_page = response.read() print the_page Inicio() except urllib2.HTTPError as e: print "Ocorreu um erro na requisição http.\n Status: %s" % str(e.code) print e.read() def ChooseApp(num): if num == 1: GetAllData() elif num == 2: SendData() elif num == 3: AlteraDados() elif num == 4: ExcluirRegistro() else: print 'Escolha uma opção!!!' Inicio() def Inicio(): escolha = input("[1]- Buscar Dados.\n[2]- Cadastrar Pessoa.\n[3]- Editar Dados.\n[4]- Excluir Registro.\nEscolha uma opção: ") ChooseApp(escolha) if __name__ == "__main__": try: Inicio() except KeyboardInterrupt: print "\nA execução foi interrompitada ^C"
mit
SimonSapin/pycairo
test/ref_count_test.py
8
1807
'''test for reference counting problems. If a Python object is garbage collected while another object is using its data, you will get a segmentation fault. ''' import array import gc import tempfile as tfi import cairo import py.test as test width, height = 256, 256 def draw(ctx, width, height): "example draw code" ctx.scale(width/1.0, height/1.0) pat = cairo.LinearGradient(0.0, 0.0, 0.0, 1.0) pat.add_color_stop_rgba(1, 0, 0, 0, 1) pat.add_color_stop_rgba(0, 1, 1, 1, 1) ctx.rectangle(0,0,1,1) ctx.set_source(pat) ctx.fill() def test_create_for_stream(): def run_test(surface_method, suffix): _, fo = tfi.mkstemp(prefix='pycairo_', suffix=suffix) surface = surface_method(fo, width, height) ctx = cairo.Context(surface) del fo # test that 'fo' is referenced to keep it alive gc.collect() draw(ctx, width, height) ctx.show_page() surface.finish() if cairo.HAS_PDF_SURFACE: run_test(cairo.PDFSurface, '.pdf') if cairo.HAS_PS_SURFACE: run_test(cairo.PSSurface, '.ps') if cairo.HAS_SVG_SURFACE: run_test(cairo.SVGSurface, '.svg') def test_get_data(): surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height) memView = surface.get_data() del surface # test that 'surface' is referenced to keep it alive gc.collect() memView[0] = b'\xFF' data = memView.tobytes() def test_create_for_data(): data = array.array('B', [0] * width * height * 4) surface = cairo.ImageSurface.create_for_data(data, cairo.FORMAT_ARGB32, width, height) ctx = cairo.Context(surface) del data # test that 'data' is referenced to keep it alive gc.collect() draw(ctx, width, height) _, fo = tfi.mkstemp(prefix='pycairo_', suffix='.png') surface.write_to_png(fo)
gpl-3.0
pavel-odintsov/MoonGen
deps/dpdk/app/cmdline_test/cmdline_test.py
40
3778
#!/usr/bin/python # BSD LICENSE # # Copyright(c) 2010-2014 Intel Corporation. All rights reserved. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of Intel Corporation nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Script that runs cmdline_test app and feeds keystrokes into it. import sys, pexpect, string, os, cmdline_test_data # # function to run test # def runTest(child,test): child.send(test["Sequence"]) if test["Result"] == None: return 0 child.expect(test["Result"],1) # # history test is a special case # # This test does the following: # 1) fills the history with garbage up to its full capacity # (just enough to remove last entry) # 2) scrolls back history to the very beginning # 3) checks if the output is as expected, that is, the first # number in the sequence (not the last entry before it) # # This is a self-contained test, it needs only a pexpect child # def runHistoryTest(child): # find out history size child.sendline(cmdline_test_data.CMD_GET_BUFSIZE) child.expect("History buffer size: \\d+", timeout=1) history_size = int(child.after[len(cmdline_test_data.BUFSIZE_TEMPLATE):]) i = 0 # fill the history with numbers while i < history_size / 10: # add 1 to prevent from parsing as octals child.send("1" + str(i).zfill(8) + cmdline_test_data.ENTER) # the app will simply print out the number child.expect(str(i + 100000000), timeout=1) i += 1 # scroll back history child.send(cmdline_test_data.UP * (i + 2) + cmdline_test_data.ENTER) child.expect("100000000", timeout=1) # the path to cmdline_test executable is supplied via command-line. if len(sys.argv) < 2: print "Error: please supply cmdline_test app path" sys.exit(1) test_app_path = sys.argv[1] if not os.path.exists(test_app_path): print "Error: please supply cmdline_test app path" sys.exit(1) child = pexpect.spawn(test_app_path) print "Running command-line tests..." for test in cmdline_test_data.tests: print (test["Name"] + ":").ljust(30), try: runTest(child,test) print "PASS" except: print "FAIL" print child sys.exit(1) # since last test quits the app, run new instance child = pexpect.spawn(test_app_path) print ("History fill test:").ljust(30), try: runHistoryTest(child) print "PASS" except: print "FAIL" print child sys.exit(1) child.close() sys.exit(0)
mit
Learningtribes/edx-platform
common/test/acceptance/pages/lms/annotation_component.py
159
3645
""" Annotation Component Page. """ from bok_choy.page_object import PageObject from selenium.webdriver import ActionChains class AnnotationComponentPage(PageObject): """ View of annotation component page. """ url = None active_problem = 0 def is_browser_on_page(self): return self.q(css='.annotatable-title').present @property def component_name(self): """ Return the current problem name. """ return self.q(css='.annotatable-title').text[0] def click_reply_annotation(self, problem): """ Mouse over on annotation selector and click on "Reply to Annotation". """ annotation_span_selector = '.annotatable-span[data-problem-id="{}"]'.format(problem) self.mouse_hover(self.browser.find_element_by_css_selector(annotation_span_selector)) self.wait_for_element_visibility(annotation_span_selector, "Reply to Annotation link is visible") annotation_reply_selector = '.annotatable-reply[data-problem-id="{}"]'.format(problem) self.q(css=annotation_reply_selector).click() self.active_problem = problem def active_problem_selector(self, sub_selector): """ Return css selector for current active problem with sub_selector. """ return 'div[data-problem-id="{}"] {}'.format( self.q(css='.vert-{}'.format(self.active_problem + 1)).map( lambda el: el.get_attribute('data-id')).results[0], sub_selector, ) def mouse_hover(self, element): """ Mouse over on given element. """ mouse_hover_action = ActionChains(self.browser).move_to_element(element) mouse_hover_action.perform() def check_scroll_to_problem(self): """ Return visibility of active problem's input selector. """ annotation_input_selector = self.active_problem_selector('.annotation-input') return self.q(css=annotation_input_selector).visible def answer_problem(self): """ Submit correct answer for active problem. """ self.q(css=self.active_problem_selector('.comment')).fill('Test Response') answer_css = self.active_problem_selector('.tag[data-id="{}"]'.format(self.active_problem)) # Selenium will first move the element into view then click on it. self.q(css=answer_css).click() # Wait for the click to take effect, which is after the class is applied. self.wait_for(lambda: 'selected' in self.q(css=answer_css).attrs('class')[0], description='answer selected') # Click the "Check" button. self.q(css=self.active_problem_selector('.check')).click() # This will trigger a POST to problem_check so wait until the response is returned. self.wait_for_ajax() def check_feedback(self): """ Return visibility of active problem's feedback. """ self.wait_for_element_visibility( self.active_problem_selector('.tag-status.correct'), "Correct is visible" ) return self.q(css=self.active_problem_selector('.tag-status.correct')).visible def click_return_to_annotation(self): """ Click on active problem's "Return to Annotation" link. """ self.q(css=self.active_problem_selector('.annotation-return')).click() def check_scroll_to_annotation(self): """ Return visibility of active annotation component header. """ annotation_header_selector = '.annotation-header' return self.q(css=annotation_header_selector).visible
agpl-3.0
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-3.1/Lib/test/test_logging.py
2
30442
#!/usr/bin/env python # # Copyright 2001-2009 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, # provided that the above copyright notice appear in all copies and that # both that copyright notice and this permission notice appear in # supporting documentation, and that the name of Vinay Sajip # not be used in advertising or publicity pertaining to distribution # of the software without specific, written prior permission. # VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING # ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL # VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR # ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER # IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Test harness for the logging module. Run all tests. Copyright (C) 2001-2009 Vinay Sajip. All Rights Reserved. """ import logging import logging.handlers import logging.config import codecs import copy import pickle import io import gc import os import re import select import socket from socketserver import ThreadingTCPServer, StreamRequestHandler import string import struct import sys import tempfile from test.support import captured_stdout, run_with_locale, run_unittest import textwrap import threading import time import types import unittest import warnings import weakref class BaseTest(unittest.TestCase): """Base class for logging tests.""" log_format = "%(name)s -> %(levelname)s: %(message)s" expected_log_pat = r"^([\w.]+) -> ([\w]+): ([\d]+)$" message_num = 0 def setUp(self): """Setup the default logging stream to an internal StringIO instance, so that we can examine log output as we want.""" logger_dict = logging.getLogger().manager.loggerDict logging._acquireLock() try: self.saved_handlers = logging._handlers.copy() self.saved_handler_list = logging._handlerList[:] self.saved_loggers = logger_dict.copy() self.saved_level_names = logging._levelNames.copy() finally: logging._releaseLock() self.root_logger = logging.getLogger("") self.original_logging_level = self.root_logger.getEffectiveLevel() self.stream = io.StringIO() self.root_logger.setLevel(logging.DEBUG) self.root_hdlr = logging.StreamHandler(self.stream) self.root_formatter = logging.Formatter(self.log_format) self.root_hdlr.setFormatter(self.root_formatter) self.root_logger.addHandler(self.root_hdlr) def tearDown(self): """Remove our logging stream, and restore the original logging level.""" self.stream.close() self.root_logger.removeHandler(self.root_hdlr) self.root_logger.setLevel(self.original_logging_level) logging._acquireLock() try: logging._levelNames.clear() logging._levelNames.update(self.saved_level_names) logging._handlers.clear() logging._handlers.update(self.saved_handlers) logging._handlerList[:] = self.saved_handler_list loggerDict = logging.getLogger().manager.loggerDict loggerDict.clear() loggerDict.update(self.saved_loggers) finally: logging._releaseLock() def assert_log_lines(self, expected_values, stream=None): """Match the collected log lines against the regular expression self.expected_log_pat, and compare the extracted group values to the expected_values list of tuples.""" stream = stream or self.stream pat = re.compile(self.expected_log_pat) try: stream.reset() actual_lines = stream.readlines() except AttributeError: # StringIO.StringIO lacks a reset() method. actual_lines = stream.getvalue().splitlines() self.assertEquals(len(actual_lines), len(expected_values)) for actual, expected in zip(actual_lines, expected_values): match = pat.search(actual) if not match: self.fail("Log line does not match expected pattern:\n" + actual) self.assertEquals(tuple(match.groups()), expected) s = stream.read() if s: self.fail("Remaining output at end of log stream:\n" + s) def next_message(self): """Generate a message consisting solely of an auto-incrementing integer.""" self.message_num += 1 return "%d" % self.message_num class BuiltinLevelsTest(BaseTest): """Test builtin levels and their inheritance.""" def test_flat(self): #Logging levels in a flat logger namespace. m = self.next_message ERR = logging.getLogger("ERR") ERR.setLevel(logging.ERROR) INF = logging.getLogger("INF") INF.setLevel(logging.INFO) DEB = logging.getLogger("DEB") DEB.setLevel(logging.DEBUG) # These should log. ERR.log(logging.CRITICAL, m()) ERR.error(m()) INF.log(logging.CRITICAL, m()) INF.error(m()) INF.warn(m()) INF.info(m()) DEB.log(logging.CRITICAL, m()) DEB.error(m()) DEB.warn (m()) DEB.info (m()) DEB.debug(m()) # These should not log. ERR.warn(m()) ERR.info(m()) ERR.debug(m()) INF.debug(m()) self.assert_log_lines([ ('ERR', 'CRITICAL', '1'), ('ERR', 'ERROR', '2'), ('INF', 'CRITICAL', '3'), ('INF', 'ERROR', '4'), ('INF', 'WARNING', '5'), ('INF', 'INFO', '6'), ('DEB', 'CRITICAL', '7'), ('DEB', 'ERROR', '8'), ('DEB', 'WARNING', '9'), ('DEB', 'INFO', '10'), ('DEB', 'DEBUG', '11'), ]) def test_nested_explicit(self): # Logging levels in a nested namespace, all explicitly set. m = self.next_message INF = logging.getLogger("INF") INF.setLevel(logging.INFO) INF_ERR = logging.getLogger("INF.ERR") INF_ERR.setLevel(logging.ERROR) # These should log. INF_ERR.log(logging.CRITICAL, m()) INF_ERR.error(m()) # These should not log. INF_ERR.warn(m()) INF_ERR.info(m()) INF_ERR.debug(m()) self.assert_log_lines([ ('INF.ERR', 'CRITICAL', '1'), ('INF.ERR', 'ERROR', '2'), ]) def test_nested_inherited(self): #Logging levels in a nested namespace, inherited from parent loggers. m = self.next_message INF = logging.getLogger("INF") INF.setLevel(logging.INFO) INF_ERR = logging.getLogger("INF.ERR") INF_ERR.setLevel(logging.ERROR) INF_UNDEF = logging.getLogger("INF.UNDEF") INF_ERR_UNDEF = logging.getLogger("INF.ERR.UNDEF") UNDEF = logging.getLogger("UNDEF") # These should log. INF_UNDEF.log(logging.CRITICAL, m()) INF_UNDEF.error(m()) INF_UNDEF.warn(m()) INF_UNDEF.info(m()) INF_ERR_UNDEF.log(logging.CRITICAL, m()) INF_ERR_UNDEF.error(m()) # These should not log. INF_UNDEF.debug(m()) INF_ERR_UNDEF.warn(m()) INF_ERR_UNDEF.info(m()) INF_ERR_UNDEF.debug(m()) self.assert_log_lines([ ('INF.UNDEF', 'CRITICAL', '1'), ('INF.UNDEF', 'ERROR', '2'), ('INF.UNDEF', 'WARNING', '3'), ('INF.UNDEF', 'INFO', '4'), ('INF.ERR.UNDEF', 'CRITICAL', '5'), ('INF.ERR.UNDEF', 'ERROR', '6'), ]) def test_nested_with_virtual_parent(self): # Logging levels when some parent does not exist yet. m = self.next_message INF = logging.getLogger("INF") GRANDCHILD = logging.getLogger("INF.BADPARENT.UNDEF") CHILD = logging.getLogger("INF.BADPARENT") INF.setLevel(logging.INFO) # These should log. GRANDCHILD.log(logging.FATAL, m()) GRANDCHILD.info(m()) CHILD.log(logging.FATAL, m()) CHILD.info(m()) # These should not log. GRANDCHILD.debug(m()) CHILD.debug(m()) self.assert_log_lines([ ('INF.BADPARENT.UNDEF', 'CRITICAL', '1'), ('INF.BADPARENT.UNDEF', 'INFO', '2'), ('INF.BADPARENT', 'CRITICAL', '3'), ('INF.BADPARENT', 'INFO', '4'), ]) class BasicFilterTest(BaseTest): """Test the bundled Filter class.""" def test_filter(self): # Only messages satisfying the specified criteria pass through the # filter. filter_ = logging.Filter("spam.eggs") handler = self.root_logger.handlers[0] try: handler.addFilter(filter_) spam = logging.getLogger("spam") spam_eggs = logging.getLogger("spam.eggs") spam_eggs_fish = logging.getLogger("spam.eggs.fish") spam_bakedbeans = logging.getLogger("spam.bakedbeans") spam.info(self.next_message()) spam_eggs.info(self.next_message()) # Good. spam_eggs_fish.info(self.next_message()) # Good. spam_bakedbeans.info(self.next_message()) self.assert_log_lines([ ('spam.eggs', 'INFO', '2'), ('spam.eggs.fish', 'INFO', '3'), ]) finally: handler.removeFilter(filter_) # # First, we define our levels. There can be as many as you want - the only # limitations are that they should be integers, the lowest should be > 0 and # larger values mean less information being logged. If you need specific # level values which do not fit into these limitations, you can use a # mapping dictionary to convert between your application levels and the # logging system. # SILENT = 120 TACITURN = 119 TERSE = 118 EFFUSIVE = 117 SOCIABLE = 116 VERBOSE = 115 TALKATIVE = 114 GARRULOUS = 113 CHATTERBOX = 112 BORING = 111 LEVEL_RANGE = range(BORING, SILENT + 1) # # Next, we define names for our levels. You don't need to do this - in which # case the system will use "Level n" to denote the text for the level. # my_logging_levels = { SILENT : 'Silent', TACITURN : 'Taciturn', TERSE : 'Terse', EFFUSIVE : 'Effusive', SOCIABLE : 'Sociable', VERBOSE : 'Verbose', TALKATIVE : 'Talkative', GARRULOUS : 'Garrulous', CHATTERBOX : 'Chatterbox', BORING : 'Boring', } class GarrulousFilter(logging.Filter): """A filter which blocks garrulous messages.""" def filter(self, record): return record.levelno != GARRULOUS class VerySpecificFilter(logging.Filter): """A filter which blocks sociable and taciturn messages.""" def filter(self, record): return record.levelno not in [SOCIABLE, TACITURN] class CustomLevelsAndFiltersTest(BaseTest): """Test various filtering possibilities with custom logging levels.""" # Skip the logger name group. expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" def setUp(self): BaseTest.setUp(self) for k, v in list(my_logging_levels.items()): logging.addLevelName(k, v) def log_at_all_levels(self, logger): for lvl in LEVEL_RANGE: logger.log(lvl, self.next_message()) def test_logger_filter(self): # Filter at logger level. self.root_logger.setLevel(VERBOSE) # Levels >= 'Verbose' are good. self.log_at_all_levels(self.root_logger) self.assert_log_lines([ ('Verbose', '5'), ('Sociable', '6'), ('Effusive', '7'), ('Terse', '8'), ('Taciturn', '9'), ('Silent', '10'), ]) def test_handler_filter(self): # Filter at handler level. self.root_logger.handlers[0].setLevel(SOCIABLE) try: # Levels >= 'Sociable' are good. self.log_at_all_levels(self.root_logger) self.assert_log_lines([ ('Sociable', '6'), ('Effusive', '7'), ('Terse', '8'), ('Taciturn', '9'), ('Silent', '10'), ]) finally: self.root_logger.handlers[0].setLevel(logging.NOTSET) def test_specific_filters(self): # Set a specific filter object on the handler, and then add another # filter object on the logger itself. handler = self.root_logger.handlers[0] specific_filter = None garr = GarrulousFilter() handler.addFilter(garr) try: self.log_at_all_levels(self.root_logger) first_lines = [ # Notice how 'Garrulous' is missing ('Boring', '1'), ('Chatterbox', '2'), ('Talkative', '4'), ('Verbose', '5'), ('Sociable', '6'), ('Effusive', '7'), ('Terse', '8'), ('Taciturn', '9'), ('Silent', '10'), ] self.assert_log_lines(first_lines) specific_filter = VerySpecificFilter() self.root_logger.addFilter(specific_filter) self.log_at_all_levels(self.root_logger) self.assert_log_lines(first_lines + [ # Not only 'Garrulous' is still missing, but also 'Sociable' # and 'Taciturn' ('Boring', '11'), ('Chatterbox', '12'), ('Talkative', '14'), ('Verbose', '15'), ('Effusive', '17'), ('Terse', '18'), ('Silent', '20'), ]) finally: if specific_filter: self.root_logger.removeFilter(specific_filter) handler.removeFilter(garr) class MemoryHandlerTest(BaseTest): """Tests for the MemoryHandler.""" # Do not bother with a logger name group. expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$" def setUp(self): BaseTest.setUp(self) self.mem_hdlr = logging.handlers.MemoryHandler(10, logging.WARNING, self.root_hdlr) self.mem_logger = logging.getLogger('mem') self.mem_logger.propagate = 0 self.mem_logger.addHandler(self.mem_hdlr) def tearDown(self): self.mem_hdlr.close() BaseTest.tearDown(self) def test_flush(self): # The memory handler flushes to its target handler based on specific # criteria (message count and message level). self.mem_logger.debug(self.next_message()) self.assert_log_lines([]) self.mem_logger.info(self.next_message()) self.assert_log_lines([]) # This will flush because the level is >= logging.WARNING self.mem_logger.warn(self.next_message()) lines = [ ('DEBUG', '1'), ('INFO', '2'), ('WARNING', '3'), ] self.assert_log_lines(lines) for n in (4, 14): for i in range(9): self.mem_logger.debug(self.next_message()) self.assert_log_lines(lines) # This will flush because it's the 10th message since the last # flush. self.mem_logger.debug(self.next_message()) lines = lines + [('DEBUG', str(i)) for i in range(n, n + 10)] self.assert_log_lines(lines) self.mem_logger.debug(self.next_message()) self.assert_log_lines(lines) class ExceptionFormatter(logging.Formatter): """A special exception formatter.""" def formatException(self, ei): return "Got a [%s]" % ei[0].__name__ class ConfigFileTest(BaseTest): """Reading logging config from a .ini-style config file.""" expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$" # config0 is a standard configuration. config0 = """ [loggers] keys=root [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=WARNING handlers=hand1 [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= """ # config1 adds a little to the standard configuration. config1 = """ [loggers] keys=root,parser [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=WARNING handlers= [logger_parser] level=DEBUG handlers=hand1 propagate=1 qualname=compiler.parser [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= """ # config2 has a subtle configuration error that should be reported config2 = config1.replace("sys.stdout", "sys.stbout") # config3 has a less subtle configuration error config3 = config1.replace("formatter=form1", "formatter=misspelled_name") # config4 specifies a custom formatter class to be loaded config4 = """ [loggers] keys=root [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=NOTSET handlers=hand1 [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [formatter_form1] class=""" + __name__ + """.ExceptionFormatter format=%(levelname)s:%(name)s:%(message)s datefmt= """ # config5 specifies a custom handler class to be loaded config5 = config1.replace('class=StreamHandler', 'class=logging.StreamHandler') # config6 uses ', ' delimiters in the handlers and formatters sections config6 = """ [loggers] keys=root,parser [handlers] keys=hand1, hand2 [formatters] keys=form1, form2 [logger_root] level=WARNING handlers= [logger_parser] level=DEBUG handlers=hand1 propagate=1 qualname=compiler.parser [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [handler_hand2] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stderr,) [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= [formatter_form2] format=%(message)s datefmt= """ def apply_config(self, conf): try: fn = tempfile.mktemp(".ini") f = open(fn, "w") f.write(textwrap.dedent(conf)) f.close() logging.config.fileConfig(fn) finally: os.remove(fn) def test_config0_ok(self): # A simple config file which overrides the default settings. with captured_stdout() as output: self.apply_config(self.config0) logger = logging.getLogger() # Won't output anything logger.info(self.next_message()) # Outputs a message logger.error(self.next_message()) self.assert_log_lines([ ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config1_ok(self, config=config1): # A config file defining a sub-parser as well. with captured_stdout() as output: self.apply_config(config) logger = logging.getLogger("compiler.parser") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config2_failure(self): # A simple config file which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config2) def test_config3_failure(self): # A simple config file which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config3) def test_config4_ok(self): # A config file specifying a custom formatter class. with captured_stdout() as output: self.apply_config(self.config4) logger = logging.getLogger() try: raise RuntimeError() except RuntimeError: logging.exception("just testing") sys.stdout.seek(0) self.assertEquals(output.getvalue(), "ERROR:root:just testing\nGot a [RuntimeError]\n") # Original logger output is empty self.assert_log_lines([]) def test_config5_ok(self): self.test_config1_ok(config=self.config5) def test_config6_ok(self): self.test_config1_ok(config=self.config6) class LogRecordStreamHandler(StreamRequestHandler): """Handler for a streaming logging request. It saves the log message in the TCP server's 'log_output' attribute.""" TCP_LOG_END = "!!!END!!!" def handle(self): """Handle multiple requests - each expected to be of 4-byte length, followed by the LogRecord in pickle format. Logs the record according to whatever policy is configured locally.""" while True: chunk = self.connection.recv(4) if len(chunk) < 4: break slen = struct.unpack(">L", chunk)[0] chunk = self.connection.recv(slen) while len(chunk) < slen: chunk = chunk + self.connection.recv(slen - len(chunk)) obj = self.unpickle(chunk) record = logging.makeLogRecord(obj) self.handle_log_record(record) def unpickle(self, data): return pickle.loads(data) def handle_log_record(self, record): # If the end-of-messages sentinel is seen, tell the server to # terminate. if self.TCP_LOG_END in record.msg: self.server.abort = 1 return self.server.log_output += record.msg + "\n" class LogRecordSocketReceiver(ThreadingTCPServer): """A simple-minded TCP socket-based logging receiver suitable for test purposes.""" allow_reuse_address = 1 log_output = "" def __init__(self, host='localhost', port=logging.handlers.DEFAULT_TCP_LOGGING_PORT, handler=LogRecordStreamHandler): ThreadingTCPServer.__init__(self, (host, port), handler) self.abort = False self.timeout = 0.1 self.finished = threading.Event() def serve_until_stopped(self): while not self.abort: rd, wr, ex = select.select([self.socket.fileno()], [], [], self.timeout) if rd: self.handle_request() # Notify the main thread that we're about to exit self.finished.set() # close the listen socket self.server_close() class SocketHandlerTest(BaseTest): """Test for SocketHandler objects.""" def setUp(self): """Set up a TCP server to receive log messages, and a SocketHandler pointing to that server's address and port.""" BaseTest.setUp(self) self.tcpserver = LogRecordSocketReceiver(port=0) self.port = self.tcpserver.socket.getsockname()[1] self.threads = [ threading.Thread(target=self.tcpserver.serve_until_stopped)] for thread in self.threads: thread.start() self.sock_hdlr = logging.handlers.SocketHandler('localhost', self.port) self.sock_hdlr.setFormatter(self.root_formatter) self.root_logger.removeHandler(self.root_logger.handlers[0]) self.root_logger.addHandler(self.sock_hdlr) def tearDown(self): """Shutdown the TCP server.""" try: self.tcpserver.abort = True del self.tcpserver self.root_logger.removeHandler(self.sock_hdlr) self.sock_hdlr.close() for thread in self.threads: thread.join(2.0) finally: BaseTest.tearDown(self) def get_output(self): """Get the log output as received by the TCP server.""" # Signal the TCP receiver and wait for it to terminate. self.root_logger.critical(LogRecordStreamHandler.TCP_LOG_END) self.tcpserver.finished.wait(2.0) return self.tcpserver.log_output def test_output(self): # The log message sent to the SocketHandler is properly received. logger = logging.getLogger("tcp") logger.error("spam") logger.debug("eggs") self.assertEquals(self.get_output(), "spam\neggs\n") class MemoryTest(BaseTest): """Test memory persistence of logger objects.""" def setUp(self): """Create a dict to remember potentially destroyed objects.""" BaseTest.setUp(self) self._survivors = {} def _watch_for_survival(self, *args): """Watch the given objects for survival, by creating weakrefs to them.""" for obj in args: key = id(obj), repr(obj) self._survivors[key] = weakref.ref(obj) def _assert_survival(self): """Assert that all objects watched for survival have survived.""" # Trigger cycle breaking. gc.collect() dead = [] for (id_, repr_), ref in list(self._survivors.items()): if ref() is None: dead.append(repr_) if dead: self.fail("%d objects should have survived " "but have been destroyed: %s" % (len(dead), ", ".join(dead))) def test_persistent_loggers(self): # Logger objects are persistent and retain their configuration, even # if visible references are destroyed. self.root_logger.setLevel(logging.INFO) foo = logging.getLogger("foo") self._watch_for_survival(foo) foo.setLevel(logging.DEBUG) self.root_logger.debug(self.next_message()) foo.debug(self.next_message()) self.assert_log_lines([ ('foo', 'DEBUG', '2'), ]) del foo # foo has survived. self._assert_survival() # foo has retained its settings. bar = logging.getLogger("foo") bar.debug(self.next_message()) self.assert_log_lines([ ('foo', 'DEBUG', '2'), ('foo', 'DEBUG', '3'), ]) class EncodingTest(BaseTest): def test_encoding_plain_file(self): # In Python 2.x, a plain file object is treated as having no encoding. log = logging.getLogger("test") fn = tempfile.mktemp(".log") # the non-ascii data we write to the log. data = "foo\x80" try: handler = logging.FileHandler(fn, encoding="utf8") log.addHandler(handler) try: # write non-ascii data to the log. log.warning(data) finally: log.removeHandler(handler) handler.close() # check we wrote exactly those bytes, ignoring trailing \n etc f = open(fn, encoding="utf8") try: self.failUnlessEqual(f.read().rstrip(), data) finally: f.close() finally: if os.path.isfile(fn): os.remove(fn) def test_encoding_cyrillic_unicode(self): log = logging.getLogger("test") #Get a message in Unicode: Do svidanya in Cyrillic (meaning goodbye) message = '\u0434\u043e \u0441\u0432\u0438\u0434\u0430\u043d\u0438\u044f' #Ensure it's written in a Cyrillic encoding writer_class = codecs.getwriter('cp1251') writer_class.encoding = 'cp1251' stream = io.BytesIO() writer = writer_class(stream, 'strict') handler = logging.StreamHandler(writer) log.addHandler(handler) try: log.warning(message) finally: log.removeHandler(handler) handler.close() # check we wrote exactly those bytes, ignoring trailing \n etc s = stream.getvalue() #Compare against what the data should be when encoded in CP-1251 self.assertEqual(s, b'\xe4\xee \xf1\xe2\xe8\xe4\xe0\xed\xe8\xff\n') class WarningsTest(BaseTest): def test_warnings(self): with warnings.catch_warnings(): logging.captureWarnings(True) try: warnings.filterwarnings("always", category=UserWarning) file = io.StringIO() h = logging.StreamHandler(file) logger = logging.getLogger("py.warnings") logger.addHandler(h) warnings.warn("I'm warning you...") logger.removeHandler(h) s = file.getvalue() h.close() self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0) #See if an explicit file uses the original implementation file = io.StringIO() warnings.showwarning("Explicit", UserWarning, "dummy.py", 42, file, "Dummy line") s = file.getvalue() file.close() self.assertEqual(s, "dummy.py:42: UserWarning: Explicit\n Dummy line\n") finally: logging.captureWarnings(False) # Set the locale to the platform-dependent default. I have no idea # why the test does this, but in any case we save the current locale # first and restore it at the end. @run_with_locale('LC_ALL', '') def test_main(): run_unittest(BuiltinLevelsTest, BasicFilterTest, CustomLevelsAndFiltersTest, MemoryHandlerTest, ConfigFileTest, SocketHandlerTest, MemoryTest, EncodingTest, WarningsTest) if __name__ == "__main__": test_main()
mit
kevinmel2000/sl4a
python/build.py
20
6064
#!/usr/bin/python # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import compileall import glob import os import re import subprocess import shutil import sys import zipfile def run(cmd, exit=True, cwd=None): print cmd if subprocess.Popen(cmd.split(), cwd=cwd).wait() != 0: if exit: print 'Failed!' sys.exit(1) else: print 'Ignoring failure.' def find(directory, pattern=None, exclude=None): print 'Looking for paths in %r matching %r' % (directory, pattern) matches = [] misses = [] if exclude is None: exclude = [] directory = os.path.abspath(directory) for root, dirs, files in os.walk(directory): for basename in dirs + files: if basename in exclude: if basename in dirs: dirs.remove(basename) continue path = os.path.join(root, basename) if pattern is None or re.search(pattern, path): matches.append(path) else: misses.append(path) print 'Found %d matches and %d misses' % (len(matches), len(misses)) return matches, misses def rm(path): print 'Deleting %r' % path try: if os.path.isdir(path): shutil.rmtree(path) else: os.remove(path) except OSError: pass def strip(path): run('arm-eabi-strip %s' % path) def zipup(out_path, in_path, top, exclude=None, prefix=''): zip_file = zipfile.ZipFile(out_path, 'w', compression=zipfile.ZIP_DEFLATED) for path in find(in_path, exclude=exclude)[0]: if not os.path.isdir(path): arcname = prefix + path[len(top):].lstrip('/') print 'Adding %s to %s' % (arcname, out_path) zip_file.write(path, arcname) zip_file.close() # Find Android source path and put it in the environment. gcc_path = subprocess.Popen(['which', 'arm-eabi-gcc'], stdout=subprocess.PIPE).communicate()[0] match = re.match(r'(.*)/prebuilt', gcc_path) if match is None: print 'Could not find arm-eabi-gcc on your path.' sys.exit(1) android_src = match.group(1) os.environ['ANDROID_SRC'] = android_src os.environ['SL4A_TRUNK'] = os.path.abspath('..'); print os.environ['SL4A_TRUNK'] agcc_path = subprocess.Popen(['which', 'agcc'], stdout=subprocess.PIPE).communicate()[0] if agcc_path == '': print 'Could not find agcc on your path.' sys.exit(1) pwd = os.getcwd() os.chdir('src') assert os.path.exists('Parser/hostpgen'), 'hostpgen not found' run('make') #run('make install -k', False) run('make install') assert os.path.exists('android'), 'build result not found' print 'Installing xmppy.' xmpppy_path = os.path.join(pwd, 'xmpppy', 'xmpp') compileall.compile_dir(xmpppy_path) shutil.copytree(xmpppy_path, 'android/python/lib/python2.6/xmpp') print 'Installing BeautifulSoup.' beautifulsoup_path = os.path.join(pwd, 'BeautifulSoup') compileall.compile_dir(beautifulsoup_path) shutil.copy(os.path.join(beautifulsoup_path, 'BeautifulSoup.pyc'), 'android/python/lib/python2.6/BeautifulSoup.pyc') print 'Installing gdata.' gdata_path = os.path.join(pwd, 'gdata') run('python setup.py build', cwd=gdata_path) gdata_build_path = os.path.join(gdata_path, 'build') gdata_result_path = os.path.join(gdata_build_path, os.listdir(gdata_build_path)[0]) compileall.compile_dir(gdata_result_path) shutil.copytree(os.path.join(gdata_result_path, 'gdata'), 'android/python/lib/python2.6/gdata') shutil.copytree(os.path.join(gdata_result_path, 'atom'), 'android/python/lib/python2.6/atom') print 'Installing python-twitter.' twitter_path = os.path.join(pwd, 'python-twitter') compileall.compile_dir(twitter_path) shutil.copy(os.path.join(twitter_path, 'twitter.pyc'), 'android/python/lib/python2.6/twitter.pyc') print 'Installing simplejson.' simplejson_path = os.path.join(pwd, 'python-twitter', 'simplejson') compileall.compile_dir(simplejson_path) shutil.copytree(simplejson_path, 'android/python/lib/python2.6/simplejson') print 'Removing unecessary files and directories from installation.' map(rm, find('android/python/bin', 'python$')[1]) map(rm, find('android', '\.py$')[0]) map(rm, find('android', '\.c$')[0]) map(rm, find('android', 'test')[0]) map(rm, find('android', '\.pyo$')[0]) rm('android/python/share') rm('android/python/include') rm('android/python/lib/libpython2.6.a') map(strip, find('android', '\.so$')[0]) strip('android/python/bin/python') libs_to_remove = [ 'compiler', 'config', 'curses', 'distutils', 'hotshot', 'idlelib', 'lib2to3', 'lib-old', 'lib-tk', 'multiprocessing', 'site-packages', ] for lib in libs_to_remove: rm('android/python/lib/python2.6/'+lib) # Remove any existing zip files. for p in glob.glob(os.path.join(pwd, '*.zip')): rm(p) print 'Zipping up standard library.' libs = os.path.join(pwd, 'src/android/python/lib/python2.6') # Copy in ASE's Android module. shutil.copy(os.path.join(pwd, 'ase', 'android.py'), 'android/python/lib/python2.6') zipup(os.path.join(pwd, 'python_extras.zip'), libs, libs, exclude=['lib-dynload'], prefix='python/') map(rm, find(libs, exclude=['lib-dynload'])[0]) print 'Zipping up Python interpreter for deployment.' zipup(os.path.join(pwd, 'python.zip'), os.path.join(pwd, 'src', 'android', 'python'), os.path.join(pwd, 'src', 'android')) print 'Zipping up Python scripts.' zipup(os.path.join(pwd, 'python_scripts.zip'), os.path.join(pwd, 'ase', 'scripts'), os.path.join(pwd, 'ase', 'scripts')) print 'Done.'
apache-2.0
dyn888/youtube-dl
youtube_dl/extractor/bleacherreport.py
11
4163
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from .amp import AMPIE from ..utils import ( ExtractorError, int_or_none, parse_iso8601, ) class BleacherReportIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?bleacherreport\.com/articles/(?P<id>\d+)' _TESTS = [{ 'url': 'http://bleacherreport.com/articles/2496438-fsu-stat-projections-is-jalen-ramsey-best-defensive-player-in-college-football', 'md5': 'a3ffc3dc73afdbc2010f02d98f990f20', 'info_dict': { 'id': '2496438', 'ext': 'mp4', 'title': 'FSU Stat Projections: Is Jalen Ramsey Best Defensive Player in College Football?', 'uploader_id': 3992341, 'description': 'CFB, ACC, Florida State', 'timestamp': 1434380212, 'upload_date': '20150615', 'uploader': 'Team Stream Now ', }, 'add_ie': ['Ooyala'], }, { 'url': 'http://bleacherreport.com/articles/2586817-aussie-golfers-get-fright-of-their-lives-after-being-chased-by-angry-kangaroo', 'md5': 'af5f90dc9c7ba1c19d0a3eac806bbf50', 'info_dict': { 'id': '2586817', 'ext': 'mp4', 'title': 'Aussie Golfers Get Fright of Their Lives After Being Chased by Angry Kangaroo', 'timestamp': 1446839961, 'uploader': 'Sean Fay', 'description': 'md5:825e94e0f3521df52fa83b2ed198fa20', 'uploader_id': 6466954, 'upload_date': '20151011', }, 'add_ie': ['Youtube'], }] def _real_extract(self, url): article_id = self._match_id(url) article_data = self._download_json('http://api.bleacherreport.com/api/v1/articles/%s' % article_id, article_id)['article'] thumbnails = [] primary_photo = article_data.get('primaryPhoto') if primary_photo: thumbnails = [{ 'url': primary_photo['url'], 'width': primary_photo.get('width'), 'height': primary_photo.get('height'), }] info = { '_type': 'url_transparent', 'id': article_id, 'title': article_data['title'], 'uploader': article_data.get('author', {}).get('name'), 'uploader_id': article_data.get('authorId'), 'timestamp': parse_iso8601(article_data.get('createdAt')), 'thumbnails': thumbnails, 'comment_count': int_or_none(article_data.get('commentsCount')), 'view_count': int_or_none(article_data.get('hitCount')), } video = article_data.get('video') if video: video_type = video['type'] if video_type == 'cms.bleacherreport.com': info['url'] = 'http://bleacherreport.com/video_embed?id=%s' % video['id'] elif video_type == 'ooyala.com': info['url'] = 'ooyala:%s' % video['id'] elif video_type == 'youtube.com': info['url'] = video['id'] elif video_type == 'vine.co': info['url'] = 'https://vine.co/v/%s' % video['id'] else: info['url'] = video_type + video['id'] return info else: raise ExtractorError('no video in the article', expected=True) class BleacherReportCMSIE(AMPIE): _VALID_URL = r'https?://(?:www\.)?bleacherreport\.com/video_embed\?id=(?P<id>[0-9a-f-]{36})' _TESTS = [{ 'url': 'http://bleacherreport.com/video_embed?id=8fd44c2f-3dc5-4821-9118-2c825a98c0e1', 'md5': '8c2c12e3af7805152675446c905d159b', 'info_dict': { 'id': '8fd44c2f-3dc5-4821-9118-2c825a98c0e1', 'ext': 'flv', 'title': 'Cena vs. Rollins Would Expose the Heavyweight Division', 'description': 'md5:984afb4ade2f9c0db35f3267ed88b36e', }, }] def _real_extract(self, url): video_id = self._match_id(url) info = self._extract_feed_info('http://cms.bleacherreport.com/media/items/%s/akamai.json' % video_id) info['id'] = video_id return info
unlicense
rhyolight/nupic.son
app/soc/views/document.py
1
3971
# Copyright 2011 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module containing the template for documents.""" from soc.models import document as document_model from soc.logic.helper import prefixes from soc.views import forms from soc.views import template from soc.views.helper import lists class DocumentForm(forms.ModelForm): """Django form for creating documents.""" dashboard_visibility = forms.MultipleChoiceField( required=False, choices=[(c.identifier, c.verbose_name) for c in document_model.Document.DASHBOARD_VISIBILITIES], widget=forms.CheckboxSelectMultiple) def __init__(self, bound_field_class, **kwargs): super(DocumentForm, self).__init__(bound_field_class, **kwargs) if self.instance: self.initial['dashboard_visibility'] = self.instance.dashboard_visibility class Meta: model = document_model.Document exclude = [ 'scope', 'author', 'modified_by', 'prefix', 'home_for', 'link_id', 'read_access', 'write_access', 'is_featured' ] class Document(template.Template): def __init__(self, data, entity): assert(entity != None) self.data = data self.entity = entity def context(self): return { 'content': self.entity.content, 'title': self.entity.title, } def templatePath(self): return "soc/_document.html" def validateForm(data, document_form): if not document_form.is_valid(): return cleaned_data = document_form.cleaned_data cleaned_data['modified_by'] = data.ndb_user.key.to_old_key() if data.document: document = document_form.save() else: prefix = data.kwargs['prefix'] cleaned_data['link_id'] = data.kwargs['document'] cleaned_data['author'] = data.ndb_user.key.to_old_key() cleaned_data['prefix'] = prefix if prefix in ['gsoc_program', 'gci_program']: scope_key_name = '%s/%s' % ( data.kwargs['sponsor'], data.kwargs['program']) else: scope_key_name = '%s/%s/%s' % ( data.kwargs['sponsor'], data.kwargs['program'], data.kwargs['organization']) cleaned_data['scope'] = prefixes.getScopeForPrefix(prefix, scope_key_name) document = document_form.create(key_name=data.key_name) return document class DocumentList(template.Template): """Template for list of documents.""" def __init__(self, data, edit_name): self.data = data list_config = lists.ListConfiguration() list_config.addSimpleColumn('title', 'Title') list_config.addSimpleColumn('link_id', 'Document ID', hidden=True) list_config.setRowAction( lambda e, *args: data.redirect.document(e).urlOf(edit_name)) list_config.setDefaultPagination(False) list_config.setDefaultSort('title') self._list_config = list_config def context(self): description = 'List of documents for %s' % ( self.data.program.name) list_configuration_response = lists.ListConfigurationResponse( self.data, self._list_config, 0, description) return { 'lists': [list_configuration_response], } def getListData(self): idx = lists.getListIndex(self.data.request) if idx == 0: q = document_model.Document.all() q.filter('scope', self.data.program) response_builder = lists.RawQueryContentResponseBuilder( self.data.request, self._list_config, q, lists.keyStarter) return response_builder.build() else: return None
apache-2.0
redhat-openstack/django
django/utils/image.py
98
4983
# -*- coding: utf-8 -*- """ To provide a shim layer over Pillow/PIL situation until the PIL support is removed. Combinations To Account For =========================== * Pillow: * never has ``_imaging`` under any Python * has the ``Image.alpha_composite``, which may aid in detection * PIL * CPython 2.x may have _imaging (& work) * CPython 2.x may *NOT* have _imaging (broken & needs a error message) * CPython 3.x doesn't work * PyPy will *NOT* have _imaging (but works?) * On some platforms (Homebrew and RHEL6 reported) _imaging isn't available, the needed import is from PIL import _imaging (refs #21355) Restated, that looks like: * If we're on Python 2.x, it could be either Pillow or PIL: * If ``import _imaging`` results in ``ImportError``, either they have a working Pillow installation or a broken PIL installation, so we need to detect further: * To detect, we first ``import Image``. * If ``Image`` has a ``alpha_composite`` attribute present, only Pillow has this, so we assume it's working. * If ``Image`` DOES NOT have a ``alpha_composite``attribute, it must be PIL & is a broken (likely C compiler-less) install, which we need to warn the user about. * If ``import _imaging`` works, it must be PIL & is a working install. * Python 3.x * If ``import Image`` works, it must be Pillow, since PIL isn't Python 3.x compatible. * PyPy * If ``import _imaging`` results in ``ImportError``, it could be either Pillow or PIL, both of which work without it on PyPy, so we're fine. Approach ======== * Attempt to import ``Image`` * ``ImportError`` - nothing is installed, toss an exception * Either Pillow or the PIL is installed, so continue detecting * Attempt to ``hasattr(Image, 'alpha_composite')`` * If it works, it's Pillow & working * If it fails, we've got a PIL install, continue detecting * The only option here is that we're on Python 2.x or PyPy, of which we only care about if we're on CPython. * If we're on CPython, attempt to ``from PIL import _imaging`` and ``import _imaging`` * ``ImportError`` - Bad install, toss an exception """ from __future__ import unicode_literals import warnings from django.core.exceptions import ImproperlyConfigured from django.utils.translation import ugettext_lazy as _ Image = None _imaging = None ImageFile = None def _detect_image_library(): global Image global _imaging global ImageFile # Skip re-attempting to import if we've already run detection. if Image is not None: return Image, _imaging, ImageFile # Assume it's not there. PIL_imaging = False try: # Try from the Pillow (or one variant of PIL) install location first. from PIL import Image as PILImage except ImportError as err: try: # If that failed, try the alternate import syntax for PIL. import Image as PILImage except ImportError as err: # Neither worked, so it's likely not installed. raise ImproperlyConfigured( _("Neither Pillow nor PIL could be imported: %s") % err ) # ``Image.alpha_composite`` was added to Pillow in SHA: e414c6 & is not # available in any version of the PIL. if hasattr(PILImage, 'alpha_composite'): PIL_imaging = False else: # We're dealing with the PIL. Determine if we're on CPython & if # ``_imaging`` is available. import platform # This is the Alex Approved™ way. # See http://mail.python.org/pipermail//pypy-dev/2011-November/008739.html if platform.python_implementation().lower() == 'cpython': # We're on CPython (likely 2.x). Since a C compiler is needed to # produce a fully-working PIL & will create a ``_imaging`` module, # we'll attempt to import it to verify their kit works. try: from PIL import _imaging as PIL_imaging except ImportError: try: import _imaging as PIL_imaging except ImportError as err: raise ImproperlyConfigured( _("The '_imaging' module for the PIL could not be " "imported: %s") % err ) # Try to import ImageFile as well. try: from PIL import ImageFile as PILImageFile except ImportError: import ImageFile as PILImageFile # Finally, warn about deprecation... if PIL_imaging is not False: warnings.warn( "Support for the PIL will be removed in Django 1.8. Please " + "uninstall it & install Pillow instead.", PendingDeprecationWarning ) return PILImage, PIL_imaging, PILImageFile Image, _imaging, ImageFile = _detect_image_library()
bsd-3-clause
osvalr/odoo
addons/analytic_contract_hr_expense/__openerp__.py
312
1637
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Contracts Management: hr_expense link', 'version': '1.1', 'category': 'Hidden', 'description': """ This module is for modifying account analytic view to show some data related to the hr_expense module. ====================================================================================================== """, 'author': 'OpenERP S.A.', 'website': 'https://www.odoo.com/', 'depends': ['hr_expense','account_analytic_analysis'], 'data': ['analytic_contract_hr_expense_view.xml'], 'demo': [], 'installable': True, 'auto_install': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ashhher3/keras
tests/manual/check_yaml.py
74
3091
from __future__ import absolute_import from __future__ import print_function import numpy as np from keras.utils.test_utils import get_test_data from keras.preprocessing import sequence from keras.optimizers import SGD, RMSprop, Adagrad from keras.utils import np_utils from keras.models import Sequential, Graph from keras.layers.core import Dense, Dropout, Activation, Merge from keras.layers.embeddings import Embedding from keras.layers.recurrent import LSTM, GRU from keras.datasets import imdb from keras.models import model_from_yaml ''' This is essentially the IMDB test. Deserialized models should yield the same config as the original one. ''' max_features = 10000 maxlen = 100 batch_size = 32 (X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features, test_split=0.2) X_train = sequence.pad_sequences(X_train, maxlen=maxlen) X_test = sequence.pad_sequences(X_test, maxlen=maxlen) model = Sequential() model.add(Embedding(max_features, 128)) model.add(LSTM(128, 128)) model.add(Dropout(0.5)) model.add(Dense(128, 1, W_regularizer='identity', b_constraint='maxnorm')) model.add(Activation('sigmoid')) model.get_config(verbose=1) ##################################### # save model w/o parameters to yaml # ##################################### yaml_no_params = model.to_yaml() no_param_model = model_from_yaml(yaml_no_params) no_param_model.get_config(verbose=1) ###################################### # save multi-branch sequential model # ###################################### seq = Sequential() seq.add(Merge([model, model], mode='sum')) seq.get_config(verbose=1) merge_yaml = seq.to_yaml() merge_model = model_from_yaml(merge_yaml) large_model = Sequential() large_model.add(Merge([seq,model], mode='concat')) large_model.get_config(verbose=1) large_model.to_yaml() #################### # save graph model # #################### X = np.random.random((100, 32)) X2 = np.random.random((100, 32)) y = np.random.random((100, 4)) y2 = np.random.random((100,)) (X_train, y_train), (X_test, y_test) = get_test_data(nb_train=1000, nb_test=200, input_shape=(32,), classification=False, output_shape=(4,)) graph = Graph() graph.add_input(name='input1', ndim=2) graph.add_node(Dense(32, 16), name='dense1', input='input1') graph.add_node(Dense(32, 4), name='dense2', input='input1') graph.add_node(Dense(16, 4), name='dense3', input='dense1') graph.add_output(name='output1', inputs=['dense2', 'dense3'], merge_mode='sum') graph.compile('rmsprop', {'output1': 'mse'}) graph.get_config(verbose=1) history = graph.fit({'input1': X_train, 'output1': y_train}, nb_epoch=10) original_pred = graph.predict({'input1': X_test}) graph_yaml = graph.to_yaml() graph.save_weights('temp.h5', overwrite=True) reloaded_graph = model_from_yaml(graph_yaml) reloaded_graph.load_weights('temp.h5') reloaded_graph.get_config(verbose=1) reloaded_graph.compile('rmsprop', {'output1': 'mse'}) new_pred = reloaded_graph.predict({'input1': X_test}) assert(np.sum(new_pred['output1'] - original_pred['output1']) == 0)
mit
giocalitri/django-guardian
guardian/testapp/tests/conf.py
14
3348
from __future__ import unicode_literals import os import django from guardian.compat import unittest from guardian.utils import abspath from django.conf import settings from django.conf import UserSettingsHolder from django.utils.functional import wraps THIS = abspath(os.path.dirname(__file__)) TEST_TEMPLATES_DIR = abspath(THIS, 'templates') TEST_SETTINGS = dict( TEMPLATE_DIRS=[TEST_TEMPLATES_DIR], ) def skipUnlessTestApp(obj): app = 'guardian.testapp' return unittest.skipUnless(app in settings.INSTALLED_APPS, 'app %r must be installed to run this test' % app)(obj) def skipUnlessSupportsCustomUser(obj): # XXX: Following fixes problem with Python 2.6 and Django 1.2 gte15 = django.VERSION >= (1, 5) if not gte15: return lambda *args, **kwargs: None # XXX: End of the workaround return unittest.skipUnless(django.VERSION >= (1, 5), 'Must have Django 1.5 or greater')(obj) class TestDataMixin(object): def setUp(self): super(TestDataMixin, self).setUp() from django.contrib.auth.models import Group try: from django.contrib.auth import get_user_model User = get_user_model() except ImportError: from django.contrib.auth.models import User Group.objects.create(pk=1, name='admins') jack_group = Group.objects.create(pk=2, name='jackGroup') User.objects.get_or_create(pk=settings.ANONYMOUS_USER_ID) jack = User.objects.create(pk=1, username='jack', is_active=True, is_superuser=False, is_staff=False) jack.groups.add(jack_group) class override_settings(object): """ Acts as either a decorator, or a context manager. If it's a decorator it takes a function and returns a wrapped function. If it's a contextmanager it's used with the ``with`` statement. In either event entering/exiting are called before and after, respectively, the function/block is executed. """ def __init__(self, **kwargs): self.options = kwargs self.wrapped = settings._wrapped def __enter__(self): self.enable() def __exit__(self, exc_type, exc_value, traceback): self.disable() def __call__(self, test_func): from django.test import TransactionTestCase if isinstance(test_func, type) and issubclass(test_func, TransactionTestCase): original_pre_setup = test_func._pre_setup original_post_teardown = test_func._post_teardown def _pre_setup(innerself): self.enable() original_pre_setup(innerself) def _post_teardown(innerself): original_post_teardown(innerself) self.disable() test_func._pre_setup = _pre_setup test_func._post_teardown = _post_teardown return test_func else: @wraps(test_func) def inner(*args, **kwargs): with self: return test_func(*args, **kwargs) return inner def enable(self): override = UserSettingsHolder(settings._wrapped) for key, new_value in self.options.items(): setattr(override, key, new_value) settings._wrapped = override def disable(self): settings._wrapped = self.wrapped
bsd-2-clause
henrytao-me/openerp.positionq
openerp/report/render/odt2odt/odt2odt.py
443
2265
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.report.render.rml2pdf import utils import copy class odt2odt(object): def __init__(self, odt, localcontext): self.localcontext = localcontext self.etree = odt self._node = None def render(self): def process_text(node,new_node): for child in utils._child_get(node, self): new_child = copy.deepcopy(child) new_node.append(new_child) if len(child): for n in new_child: new_child.text = utils._process_text(self, child.text) new_child.tail = utils._process_text(self, child.tail) new_child.remove(n) process_text(child, new_child) else: new_child.text = utils._process_text(self, child.text) new_child.tail = utils._process_text(self, child.tail) self._node = copy.deepcopy(self.etree) for n in self._node: self._node.remove(n) process_text(self.etree, self._node) return self._node def parseNode(node, localcontext = {}): r = odt2odt(node, localcontext) return r.render() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
chimkentec/KodiMODo_rep
script.module.requests/lib/requests/packages/chardet/chardetect.py
1786
2504
#!/usr/bin/env python """ Script which takes one or more file paths and reports on their detected encodings Example:: % chardetect somefile someotherfile somefile: windows-1252 with confidence 0.5 someotherfile: ascii with confidence 1.0 If no paths are provided, it takes its input from stdin. """ from __future__ import absolute_import, print_function, unicode_literals import argparse import sys from io import open from chardet import __version__ from chardet.universaldetector import UniversalDetector def description_of(lines, name='stdin'): """ Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str """ u = UniversalDetector() for line in lines: u.feed(line) u.close() result = u.result if result['encoding']: return '{0}: {1} with confidence {2}'.format(name, result['encoding'], result['confidence']) else: return '{0}: no result'.format(name) def main(argv=None): ''' Handles command line arguments and gets things started. :param argv: List of arguments, as if specified on the command-line. If None, ``sys.argv[1:]`` is used instead. :type argv: list of str ''' # Get command line arguments parser = argparse.ArgumentParser( description="Takes one or more file paths and reports their detected \ encodings", formatter_class=argparse.ArgumentDefaultsHelpFormatter, conflict_handler='resolve') parser.add_argument('input', help='File whose encoding we would like to determine.', type=argparse.FileType('rb'), nargs='*', default=[sys.stdin]) parser.add_argument('--version', action='version', version='%(prog)s {0}'.format(__version__)) args = parser.parse_args(argv) for f in args.input: if f.isatty(): print("You are running chardetect interactively. Press " + "CTRL-D twice at the start of a blank line to signal the " + "end of your input. If you want help, run chardetect " + "--help\n", file=sys.stderr) print(description_of(f, f.name)) if __name__ == '__main__': main()
gpl-3.0
refindlyllc/python-rets
setup.py
2
1581
import os import re from setuptools import setup # Set external files try: from pypandoc import convert README = convert('README.md', 'rst') except ImportError: README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read() with open(os.path.join(os.path.dirname(__file__), 'requirements.txt')) as f: required = f.read().splitlines() with open(os.path.join(os.path.dirname(__file__), 'test_requirements.txt')) as f: test_required = f.read().splitlines() with open('rets/__init__.py', 'r') as fd: version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: raise RuntimeError('Cannot find version information') setup( name='rets', version=version, packages=['rets'], install_requires=required, tests_require=test_required, test_suite='nose.collector', include_package_data=True, license='MIT License', description='RETS Client for Real Estate Data', long_description=README, long_description_content_type='text/markdown', url='https://github.com/refindlyllc/rets', author='REfindly', author_email='info@refindly.com', classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Topic :: Software Development :: Libraries :: Python Modules' ], )
mit
Lektorium-LLC/edx-platform
lms/djangoapps/course_api/permissions.py
85
1125
""" Course API Authorization functions """ from student.roles import GlobalStaff def can_view_courses_for_username(requesting_user, target_username): """ Determine whether `requesting_user` has permission to view courses available to the user identified by `target_username`. Arguments: requesting_user (User): The user requesting permission to view another target_username (string): The name of the user `requesting_user` would like to access. Return value: Boolean: `True` if `requesting_user` is authorized to view courses as `target_username`. Otherwise, `False` Raises: TypeError if target_username is empty or None. """ # AnonymousUser has no username, so we test for requesting_user's own # username before prohibiting an empty target_username. if requesting_user.username == target_username: return True elif not target_username: raise TypeError("target_username must be specified") else: staff = GlobalStaff() return staff.has_user(requesting_user)
agpl-3.0
diogommartins/ryu
ryu/lib/packet/llc.py
32
10619
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Logical Link Control(LLC, IEEE 802.2) parser/serializer http://standards.ieee.org/getieee802/download/802.2-1998.pdf LLC format +-----------------+--------------+ | DSAP address | 8 bits | +-----------------+--------------+ | SSAP address | 8 bits | +-----------------+--------------+ | Control | 8 or 16 bits | +-----------------+--------------+ DSAP address field LSB +-----+---+---+---+---+---+---+---+ | I/G | D | D | D | D | D | D | D | +-----+---+---+---+---+---+---+---+ I/G bit = 0 : Individual DSAP I/G bit = 1 : Group DSA D : DSAP address SSAP address field LSB +-----+---+---+---+---+---+---+---+ | C/R | S | S | S | S | S | S | S | +-----+---+---+---+---+---+---+---+ C/R bit = 0 : Command C/R bit = 1 : Response S : SSAP address Control field Information transfer command/response (I-format PDU) 1 2 3 4 5 6 7 8 9 10-16 +---+---+---+---+---+---+---+---+-----+------+ | 0 | N(S) | P/F | N(R) | +---+---+---+---+---+---+---+---+-----+------+ Supervisory commands/responses (S-format PDUs) 1 2 3 4 5 6 7 8 9 10-16 +---+---+---+---+---+---+---+---+-----+------+ | 1 0 | S S | 0 0 0 0 | P/F | N(R) | +---+---+---+---+---+---+---+---+-----+------+ Unnumbered commands/responses (U-format PDUs) 1 2 3 4 5 6 7 8 +---+---+----+---+-----+---+----+---+ | 1 1 | M1 M1 | P/F | M2 M2 M2 | +---+---+----+---+-----+---+----+---+ N(S) : sender send sequence number (Bit 2=lower-order-bit) N(R) : sender receive sequence number (Bit 10=lower-order-bit) S : supervisory function bit M1/M2: modifier function bit P/F : poll bit - command LLC PDUs final bit - response LLC PDUs """ import struct from . import bpdu from . import packet_base from ryu.lib import stringify SAP_BPDU = 0x42 class llc(packet_base.PacketBase): """LLC(IEEE 802.2) header encoder/decoder class. An instance has the following attributes at least. Most of them are same to the on-wire counterparts but in host byte order. __init__ takes the corresponding args in this order. .. tabularcolumns:: |l|L| =============== =============================================== Attribute Description =============== =============================================== dsap_addr Destination service access point address field \ includes I/G bit at least significant bit. ssap_addr Source service access point address field \ includes C/R bit at least significant bit. control Control field \ [16 bits for formats that include sequence \ numbering, and 8 bits for formats that do not]. \ Either ryu.lib.packet.llc.ControlFormatI or \ ryu.lib.packet.llc.ControlFormatS or \ ryu.lib.packet.llc.ControlFormatU object. =============== =============================================== """ _PACK_STR = '!BB' _PACK_LEN = struct.calcsize(_PACK_STR) _CTR_TYPES = {} _CTR_PACK_STR = '!2xB' _MIN_LEN = _PACK_LEN @staticmethod def register_control_type(register_cls): llc._CTR_TYPES[register_cls.TYPE] = register_cls return register_cls def __init__(self, dsap_addr, ssap_addr, control): super(llc, self).__init__() assert getattr(control, 'TYPE', None) in self._CTR_TYPES self.dsap_addr = dsap_addr self.ssap_addr = ssap_addr self.control = control @classmethod def parser(cls, buf): assert len(buf) >= cls._PACK_LEN (dsap_addr, ssap_addr) = struct.unpack_from(cls._PACK_STR, buf) (control,) = struct.unpack_from(cls._CTR_PACK_STR, buf) ctrl = cls._get_control(control) control, information = ctrl.parser(buf[cls._PACK_LEN:]) return (cls(dsap_addr, ssap_addr, control), cls.get_packet_type(dsap_addr), information) def serialize(self, payload, prev): addr = struct.pack(self._PACK_STR, self.dsap_addr, self.ssap_addr) control = self.control.serialize() return addr + control @classmethod def _get_control(cls, buf): key = buf & 0b1 if buf & 0b1 == ControlFormatI.TYPE else buf & 0b11 return cls._CTR_TYPES[key] @llc.register_control_type class ControlFormatI(stringify.StringifyMixin): """LLC sub encoder/decoder class for control I-format field. An instance has the following attributes at least. Most of them are same to the on-wire counterparts but in host byte order. __init__ takes the corresponding args in this order. ======================== =============================== Attribute Description ======================== =============================== send_sequence_number sender send sequence number pf_bit poll/final bit receive_sequence_number sender receive sequence number ======================== =============================== """ TYPE = 0b0 _PACK_STR = '!H' _PACK_LEN = struct.calcsize(_PACK_STR) def __init__(self, send_sequence_number=0, pf_bit=0, receive_sequence_number=0): super(ControlFormatI, self).__init__() self.send_sequence_number = send_sequence_number self.pf_bit = pf_bit self.receive_sequence_number = receive_sequence_number @classmethod def parser(cls, buf): assert len(buf) >= cls._PACK_LEN (control,) = struct.unpack_from(cls._PACK_STR, buf) assert (control >> 8) & 0b1 == cls.TYPE send_sequence_number = (control >> 9) & 0b1111111 pf_bit = (control >> 8) & 0b1 receive_sequence_number = (control >> 1) & 0b1111111 return cls(send_sequence_number, pf_bit, receive_sequence_number), buf[cls._PACK_LEN:] def serialize(self): control = (self.send_sequence_number << 9 | self.TYPE << 8 | self.receive_sequence_number << 1 | self.pf_bit) return struct.pack(self._PACK_STR, control) @llc.register_control_type class ControlFormatS(stringify.StringifyMixin): """LLC sub encoder/decoder class for control S-format field. An instance has the following attributes at least. Most of them are same to the on-wire counterparts but in host byte order. __init__ takes the corresponding args in this order. ======================== =============================== Attribute Description ======================== =============================== supervisory_function supervisory function bit pf_bit poll/final bit receive_sequence_number sender receive sequence number ======================== =============================== """ TYPE = 0b01 _PACK_STR = '!H' _PACK_LEN = struct.calcsize(_PACK_STR) def __init__(self, supervisory_function=0, pf_bit=0, receive_sequence_number=0): super(ControlFormatS, self).__init__() self.supervisory_function = supervisory_function self.pf_bit = pf_bit self.receive_sequence_number = receive_sequence_number @classmethod def parser(cls, buf): assert len(buf) >= cls._PACK_LEN (control,) = struct.unpack_from(cls._PACK_STR, buf) assert (control >> 8) & 0b11 == cls.TYPE supervisory_function = (control >> 10) & 0b11 pf_bit = (control >> 8) & 0b1 receive_sequence_number = (control >> 1) & 0b1111111 return cls(supervisory_function, pf_bit, receive_sequence_number), buf[cls._PACK_LEN:] def serialize(self): control = (self.supervisory_function << 10 | self.TYPE << 8 | self.receive_sequence_number << 1 | self.pf_bit) return struct.pack(self._PACK_STR, control) @llc.register_control_type class ControlFormatU(stringify.StringifyMixin): """LLC sub encoder/decoder class for control U-format field. An instance has the following attributes at least. Most of them are same to the on-wire counterparts but in host byte order. __init__ takes the corresponding args in this order. ======================== =============================== Attribute Description ======================== =============================== modifier_function1 modifier function bit pf_bit poll/final bit modifier_function2 modifier function bit ======================== =============================== """ TYPE = 0b11 _PACK_STR = '!B' _PACK_LEN = struct.calcsize(_PACK_STR) def __init__(self, modifier_function1=0, pf_bit=0, modifier_function2=0): super(ControlFormatU, self).__init__() self.modifier_function1 = modifier_function1 self.pf_bit = pf_bit self.modifier_function2 = modifier_function2 @classmethod def parser(cls, buf): assert len(buf) >= cls._PACK_LEN (control,) = struct.unpack_from(cls._PACK_STR, buf) assert control & 0b11 == cls.TYPE modifier_function1 = (control >> 2) & 0b11 pf_bit = (control >> 4) & 0b1 modifier_function2 = (control >> 5) & 0b111 return cls(modifier_function1, pf_bit, modifier_function2), buf[cls._PACK_LEN:] def serialize(self): control = (self.modifier_function2 << 5 | self.pf_bit << 4 | self.modifier_function1 << 2 | self.TYPE) return struct.pack(self._PACK_STR, control) llc.register_packet_type(bpdu.bpdu, SAP_BPDU) llc.set_classes(llc._CTR_TYPES)
apache-2.0
sbt9uc/osf.io
tests/test_conferences.py
20
19864
# -*- coding: utf-8 -*- import mock from nose.tools import * # noqa (PEP8 asserts) import hmac import hashlib from StringIO import StringIO import furl from modularodm import Q from modularodm.exceptions import ValidationError from framework.auth.core import Auth from website import settings from website.models import User, Node from website.conferences import views from website.conferences.model import Conference from website.conferences import utils, message from website.util import api_url_for, web_url_for from tests.base import OsfTestCase, fake from tests.factories import ModularOdmFactory, FakerAttribute, ProjectFactory, UserFactory from factory import Sequence, post_generation def assert_absolute(url): parsed_domain = furl.furl(settings.DOMAIN) parsed_url = furl.furl(url) assert_equal(parsed_domain.host, parsed_url.host) def assert_equal_urls(first, second): parsed_first = furl.furl(first) parsed_first.port = None parsed_second = furl.furl(second) parsed_second.port = None assert_equal(parsed_first, parsed_second) class ConferenceFactory(ModularOdmFactory): FACTORY_FOR = Conference endpoint = Sequence(lambda n: 'conference{0}'.format(n)) name = FakerAttribute('catch_phrase') active = True @post_generation def admins(self, create, extracted, **kwargs): self.admins = extracted or [UserFactory()] def create_fake_conference_nodes(n, endpoint): nodes = [] for i in range(n): node = ProjectFactory(is_public=True) node.add_tag(endpoint, Auth(node.creator)) node.save() nodes.append(node) return nodes class TestConferenceUtils(OsfTestCase): def test_get_or_create_user_exists(self): user = UserFactory() fetched, created = utils.get_or_create_user(user.fullname, user.username, True) assert_false(created) assert_equal(user._id, fetched._id) assert_false('is_spam' in fetched.system_tags) def test_get_or_create_user_not_exists(self): fullname = 'Roger Taylor' username = 'roger@queen.com' fetched, created = utils.get_or_create_user(fullname, username, False) assert_true(created) assert_equal(fetched.fullname, fullname) assert_equal(fetched.username, username) assert_false('is_spam' in fetched.system_tags) def test_get_or_create_user_is_spam(self): fullname = 'John Deacon' username = 'deacon@queen.com' fetched, created = utils.get_or_create_user(fullname, username, True) assert_true(created) assert_equal(fetched.fullname, fullname) assert_equal(fetched.username, username) assert_true('is_spam' in fetched.system_tags) def test_get_or_create_node_exists(self): node = ProjectFactory() fetched, created = utils.get_or_create_node(node.title, node.creator) assert_false(created) assert_equal(node._id, fetched._id) def test_get_or_create_node_title_not_exists(self): title = 'Night at the Opera' creator = UserFactory() node = ProjectFactory(creator=creator) fetched, created = utils.get_or_create_node(title, creator) assert_true(created) assert_not_equal(node._id, fetched._id) def test_get_or_create_node_user_not_exists(self): title = 'Night at the Opera' creator = UserFactory() node = ProjectFactory(title=title) fetched, created = utils.get_or_create_node(title, creator) assert_true(created) assert_not_equal(node._id, fetched._id) class ContextTestCase(OsfTestCase): MAILGUN_API_KEY = 'mailkimp' @classmethod def setUpClass(cls): super(ContextTestCase, cls).setUpClass() settings.MAILGUN_API_KEY, cls._MAILGUN_API_KEY = cls.MAILGUN_API_KEY, settings.MAILGUN_API_KEY @classmethod def tearDownClass(cls): super(ContextTestCase, cls).tearDownClass() settings.MAILGUN_API_KEY = cls._MAILGUN_API_KEY def make_context(self, method='POST', **kwargs): data = { 'X-Mailgun-Sscore': 0, 'timestamp': '123', 'token': 'secret', 'signature': hmac.new( key=settings.MAILGUN_API_KEY, msg='{}{}'.format('123', 'secret'), digestmod=hashlib.sha256, ).hexdigest(), } data.update(kwargs.pop('data', {})) data = { key: value for key, value in data.iteritems() if value is not None } return self.app.app.test_request_context(method=method, data=data, **kwargs) class TestProvisionNode(ContextTestCase): def setUp(self): super(TestProvisionNode, self).setUp() self.node = ProjectFactory() self.user = self.node.creator self.conference = ConferenceFactory() self.body = 'dragon on my back' self.content = 'dragon attack' self.attachment = StringIO(self.content) self.recipient = '{0}{1}-poster@osf.io'.format( 'test-' if settings.DEV_MODE else '', self.conference.endpoint, ) def make_context(self, **kwargs): data = { 'attachment-count': '1', 'attachment-1': (self.attachment, 'attachment-1'), 'X-Mailgun-Sscore': 0, 'recipient': self.recipient, 'stripped-text': self.body, } data.update(kwargs.pop('data', {})) return super(TestProvisionNode, self).make_context(data=data, **kwargs) def test_provision(self): with self.make_context(): msg = message.ConferenceMessage() utils.provision_node(self.conference, msg, self.node, self.user) assert_true(self.node.is_public) assert_in(self.conference.admins[0], self.node.contributors) assert_in('emailed', self.node.system_tags) assert_in(self.conference.endpoint, self.node.system_tags) assert_in(self.conference.endpoint, self.node.tags) assert_not_in('spam', self.node.system_tags) def test_provision_private(self): self.conference.public_projects = False self.conference.save() with self.make_context(): msg = message.ConferenceMessage() utils.provision_node(self.conference, msg, self.node, self.user) assert_false(self.node.is_public) assert_in(self.conference.admins[0], self.node.contributors) assert_in('emailed', self.node.system_tags) assert_not_in('spam', self.node.system_tags) def test_provision_spam(self): with self.make_context(data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}): msg = message.ConferenceMessage() utils.provision_node(self.conference, msg, self.node, self.user) assert_false(self.node.is_public) assert_in(self.conference.admins[0], self.node.contributors) assert_in('emailed', self.node.system_tags) assert_in('spam', self.node.system_tags) @mock.patch('website.util.waterbutler_url_for') @mock.patch('website.conferences.utils.requests.put') def test_upload(self, mock_put, mock_get_url): mock_get_url.return_value = 'http://queen.com/' self.attachment.filename = 'hammer-to-fall' self.attachment.content_type = 'application/json' utils.upload_attachment(self.user, self.node, self.attachment) mock_get_url.assert_called_with( 'upload', 'osfstorage', '/' + self.attachment.filename, self.node, user=self.user, ) mock_put.assert_called_with( mock_get_url.return_value, data=self.content, ) @mock.patch('website.util.waterbutler_url_for') @mock.patch('website.conferences.utils.requests.put') def test_upload_no_file_name(self, mock_put, mock_get_url): mock_get_url.return_value = 'http://queen.com/' self.attachment.filename = '' self.attachment.content_type = 'application/json' utils.upload_attachment(self.user, self.node, self.attachment) mock_get_url.assert_called_with( 'upload', 'osfstorage', '/' + settings.MISSING_FILE_NAME, self.node, user=self.user, ) mock_put.assert_called_with( mock_get_url.return_value, data=self.content, ) class TestMessage(ContextTestCase): def test_verify_signature_valid(self): with self.make_context(): msg = message.ConferenceMessage() msg.verify_signature() def test_verify_signature_invalid(self): with self.make_context(data={'signature': 'fake'}): self.app.app.preprocess_request() msg = message.ConferenceMessage() with assert_raises(message.ConferenceError): msg.verify_signature() def test_is_spam_false_missing_headers(self): ctx = self.make_context( method='POST', data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1}, ) with ctx: msg = message.ConferenceMessage() assert not msg.is_spam def test_is_spam_false_all_headers(self): ctx = self.make_context( method='POST', data={ 'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1, 'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0], 'X-Mailgun-Spf': message.SPF_PASS_VALUES[0], }, ) with ctx: msg = message.ConferenceMessage() assert not msg.is_spam def test_is_spam_true_sscore(self): ctx = self.make_context( method='POST', data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}, ) with ctx: msg = message.ConferenceMessage() assert msg.is_spam def test_is_spam_true_dkim(self): ctx = self.make_context( method='POST', data={'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0][::-1]}, ) with ctx: msg = message.ConferenceMessage() assert msg.is_spam def test_is_spam_true_spf(self): ctx = self.make_context( method='POST', data={'X-Mailgun-Spf': message.SPF_PASS_VALUES[0][::-1]}, ) with ctx: msg = message.ConferenceMessage() assert msg.is_spam def test_subject(self): ctx = self.make_context( method='POST', data={'subject': 'RE: Hip Hopera'}, ) with ctx: msg = message.ConferenceMessage() assert_equal(msg.subject, 'Hip Hopera') def test_recipient(self): address = 'test-conference@osf.io' ctx = self.make_context( method='POST', data={'recipient': address}, ) with ctx: msg = message.ConferenceMessage() assert_equal(msg.recipient, address) def test_text(self): text = 'welcome to my nuclear family' ctx = self.make_context( method='POST', data={'stripped-text': text}, ) with ctx: msg = message.ConferenceMessage() assert_equal(msg.text, text) def test_sender_name(self): names = [ (' Fred', 'Fred'), (u'Me䬟', u'Me䬟'), (u'Fred <fred@queen.com>', u'Fred'), (u'"Fred" <fred@queen.com>', u'Fred'), ] for name in names: with self.make_context(data={'from': name[0]}): msg = message.ConferenceMessage() assert_equal(msg.sender_name, name[1]) def test_route_invalid_pattern(self): with self.make_context(data={'recipient': 'spam@osf.io'}): self.app.app.preprocess_request() msg = message.ConferenceMessage() with assert_raises(message.ConferenceError): msg.route def test_route_invalid_test(self): recipient = '{0}conf-talk@osf.io'.format('' if settings.DEV_MODE else 'test-') with self.make_context(data={'recipient': recipient}): self.app.app.preprocess_request() msg = message.ConferenceMessage() with assert_raises(message.ConferenceError): msg.route def test_route_valid(self): recipient = '{0}conf-talk@osf.io'.format('test-' if settings.DEV_MODE else '') with self.make_context(data={'recipient': recipient}): self.app.app.preprocess_request() msg = message.ConferenceMessage() assert_equal(msg.conference_name, 'conf') assert_equal(msg.conference_category, 'talk') def test_attachments_count_zero(self): with self.make_context(data={'attachment-count': '0'}): msg = message.ConferenceMessage() assert_equal(msg.attachments, []) def test_attachments_count_one(self): content = 'slightly mad' sio = StringIO(content) ctx = self.make_context( method='POST', data={ 'attachment-count': 1, 'attachment-1': (sio, 'attachment-1'), }, ) with ctx: msg = message.ConferenceMessage() assert_equal(len(msg.attachments), 1) assert_equal(msg.attachments[0].read(), content) class TestConferenceEmailViews(OsfTestCase): def test_redirect_to_meetings_url(self): url = '/presentations/' res = self.app.get(url) assert_equal(res.status_code, 302) res = res.follow() assert_equal(res.request.path, '/meetings/') def test_conference_plain_returns_200(self): conference = ConferenceFactory() url = web_url_for('conference_results__plain', meeting=conference.endpoint) res = self.app.get(url) assert_equal(res.status_code, 200) def test_conference_data(self): conference = ConferenceFactory() # Create conference nodes n_conference_nodes = 3 create_fake_conference_nodes( n_conference_nodes, conference.endpoint, ) # Create a non-conference node ProjectFactory() url = api_url_for('conference_data', meeting=conference.endpoint) res = self.app.get(url) assert_equal(res.status_code, 200) assert_equal(len(res.json), n_conference_nodes) def test_conference_data_url_upper(self): conference = ConferenceFactory() # Create conference nodes n_conference_nodes = 3 create_fake_conference_nodes( n_conference_nodes, conference.endpoint, ) # Create a non-conference node ProjectFactory() url = api_url_for('conference_data', meeting=conference.endpoint.upper()) res = self.app.get(url) assert_equal(res.status_code, 200) assert_equal(len(res.json), n_conference_nodes) def test_conference_data_tag_upper(self): conference = ConferenceFactory() # Create conference nodes n_conference_nodes = 3 create_fake_conference_nodes( n_conference_nodes, conference.endpoint.upper(), ) # Create a non-conference node ProjectFactory() url = api_url_for('conference_data', meeting=conference.endpoint) res = self.app.get(url) assert_equal(res.status_code, 200) assert_equal(len(res.json), n_conference_nodes) def test_conference_results(self): conference = ConferenceFactory() url = web_url_for('conference_results', meeting=conference.endpoint) res = self.app.get(url) assert_equal(res.status_code, 200) class TestConferenceModel(OsfTestCase): def test_endpoint_and_name_are_required(self): with assert_raises(ValidationError): ConferenceFactory(endpoint=None, name=fake.company()).save() with assert_raises(ValidationError): ConferenceFactory(endpoint='spsp2014', name=None).save() class TestConferenceIntegration(ContextTestCase): @mock.patch('website.conferences.views.send_mail') @mock.patch('website.conferences.utils.upload_attachments') def test_integration(self, mock_upload, mock_send_mail): fullname = 'John Deacon' username = 'deacon@queen.com' title = 'good songs' conference = ConferenceFactory() body = 'dragon on my back' content = 'dragon attack' recipient = '{0}{1}-poster@osf.io'.format( 'test-' if settings.DEV_MODE else '', conference.endpoint, ) self.app.post( api_url_for('meeting_hook'), { 'X-Mailgun-Sscore': 0, 'timestamp': '123', 'token': 'secret', 'signature': hmac.new( key=settings.MAILGUN_API_KEY, msg='{}{}'.format('123', 'secret'), digestmod=hashlib.sha256, ).hexdigest(), 'attachment-count': '1', 'X-Mailgun-Sscore': 0, 'from': '{0} <{1}>'.format(fullname, username), 'recipient': recipient, 'subject': title, 'stripped-text': body, }, upload_files=[ ('attachment-1', 'attachment-1', content), ], ) assert_true(mock_upload.called) users = User.find(Q('username', 'eq', username)) assert_equal(users.count(), 1) nodes = Node.find(Q('title', 'eq', title)) assert_equal(nodes.count(), 1) node = nodes[0] assert_equal(node.get_wiki_page('home').content, body) assert_true(mock_send_mail.called) call_args, call_kwargs = mock_send_mail.call_args assert_absolute(call_kwargs['conf_view_url']) assert_absolute(call_kwargs['set_password_url']) assert_absolute(call_kwargs['profile_url']) assert_absolute(call_kwargs['file_url']) assert_absolute(call_kwargs['node_url']) @mock.patch('website.conferences.views.send_mail') def test_integration_inactive(self, mock_send_mail): conference = ConferenceFactory(active=False) fullname = 'John Deacon' username = 'deacon@queen.com' title = 'good songs' body = 'dragon on my back' recipient = '{0}{1}-poster@osf.io'.format( 'test-' if settings.DEV_MODE else '', conference.endpoint, ) res = self.app.post( api_url_for('meeting_hook'), { 'X-Mailgun-Sscore': 0, 'timestamp': '123', 'token': 'secret', 'signature': hmac.new( key=settings.MAILGUN_API_KEY, msg='{}{}'.format('123', 'secret'), digestmod=hashlib.sha256, ).hexdigest(), 'attachment-count': '1', 'X-Mailgun-Sscore': 0, 'from': '{0} <{1}>'.format(fullname, username), 'recipient': recipient, 'subject': title, 'stripped-text': body, }, expect_errors=True, ) assert_equal(res.status_code, 406) call_args, call_kwargs = mock_send_mail.call_args assert_equal(call_args, (username, views.CONFERENCE_INACTIVE)) assert_equal(call_kwargs['fullname'], fullname) assert_equal_urls( call_kwargs['presentations_url'], web_url_for('conference_view', _absolute=True), )
apache-2.0
dissemin/dissemin
backend/tests/conftest.py
2
4317
import os import pytest import responses import zipfile from urllib.parse import parse_qs from urllib.parse import urlparse from django.conf import settings from papers.models import Researcher from publishers.models import AliasPublisher from publishers.models import Journal from publishers.models import Publisher @pytest.fixture def affiliations(): """ Returns a simple list of affiliations used in cireproc """ return ['University of Dublin', 'College Calvin'] @pytest.fixture def container_title(): """ Returns the title. Main reason is simpler test handling for CrossRef """ return 'The Infinite Library' @pytest.fixture def issn(): """ Returns a (valid) ISSN """ return '1234-5675' @pytest.fixture def orcids(): """ Returns a simple of ORCIDs used in citeproc """ return ['0000-0001-8187-9704', None] @pytest.fixture def title(): """ Returns the title. Main reason is simpler test handling for CrossRef """ return 'The God of the Labyrinth' @pytest.fixture def citeproc(affiliations, container_title, issn, orcids, title): """ Imaginary, yet complete citeproc example. Use this, to check different behaviour, by adding, deleting or modifying content. """ d = { 'abstract' : 'A detective story', 'author' : [ { 'given' : 'Herbert', 'family' : 'Quain', 'affiliation' : [ { 'name' : affiliations[0] } ], 'ORCID' : orcids[0] }, { 'given' : 'Jorge Luis', 'family' : 'Borges', 'affiliation' : [ { 'name' : affiliations[1] } ], 'ORCID' : orcids[1] }, ], 'container-title' : container_title, 'DOI' : '10.0123/quain-1933', 'ISSN' : [ issn, ], 'issue' : '1', 'issued' : { 'date-parts' : [ [ 2019, 10, 10 ] ], }, 'page' : 'p. 327', 'publisher' : 'Editorial Sur', 'title' : title, 'type' : 'book', 'volume' : '1', } return d @pytest.fixture def mock_journal_find(monkeypatch): """ Mocks Publisher to return no object on calling find """ monkeypatch.setattr(Journal, 'find', lambda issn, title: None) @pytest.fixture def mock_publisher_find(monkeypatch): """ Mocks Publisher to return no object on calling find """ monkeypatch.setattr(Publisher, 'find', lambda x: None) @pytest.fixture def mock_alias_publisher_increment(monkeypatch): """ Monkeypatch this function to mit DB access """ monkeypatch.setattr(AliasPublisher, 'increment', lambda x,y: True) @pytest.fixture def rsps_fetch_day(requests_mocker): """ Mocks the fetching of a day from CrossRef """ # Open zipfile with fixtures f_path = os.path.join(settings.BASE_DIR, 'backend', 'tests', 'data', 'crossref.zip') zf = zipfile.ZipFile(f_path) # Dynamic callback, response only depends on cursor def request_callback(request): called_url = request.url query = parse_qs(urlparse(called_url).query) cursor = query['cursor'][0] if cursor == '*': cursor = 'initial' f_name = '{}.json'.format(cursor) body = zf.read(f_name) return (200, {}, body) mock_url = 'https://api.crossref.org/works' # Mocking the requests requests_mocker.add_callback( responses.GET, mock_url, callback=request_callback, ) return requests_mocker @pytest.fixture def researcher_lesot(django_user_model): """ The Researcher Marie-Jeanne Lesot from doi 10.1016/j.ijar.2017.06.011 """ u = django_user_model.objects.create( username='lisotm', first_name='Marie-Jeanne', last_name='Lesot', ) r = Researcher.create_by_name( first=u.first_name, last=u.last_name, user=u, orcid='0000-0002-3604-6647', ) return r
agpl-3.0
larsbergstrom/servo
components/style/properties/counted_unknown_properties.py
1
3860
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # The following properties are under development, so they are not in this list. # FIXME: We should handle the developing properties properly by Bug 1577358: # "backdrop-filter", # "text-decoration-skip-ink", # "column-span", # "offset-distance", # "offset-path", # "offset-rotate", # "offset" COUNTED_UNKNOWN_PROPERTIES = [ "-webkit-font-smoothing", "zoom", "-webkit-tap-highlight-color", "speak", "text-size-adjust", "-webkit-font-feature-settings", "-webkit-user-drag", "size", "-webkit-clip-path", "orphans", "widows", "-webkit-user-modify", "-webkit-margin-before", "-webkit-margin-after", "tab-size", "-webkit-margin-start", "-webkit-column-break-inside", "-webkit-padding-start", "-webkit-margin-end", "-webkit-box-reflect", "-webkit-print-color-adjust", "-webkit-mask-box-image", "-webkit-line-break", "-webkit-text-security", "alignment-baseline", "-webkit-writing-mode", "baseline-shift", "-webkit-hyphenate-character", "page", "text-underline-position", "-webkit-highlight", "background-repeat-x", "-webkit-padding-end", "background-repeat-y", "-webkit-text-emphasis-color", "-webkit-margin-top-collapse", "-webkit-rtl-ordering", "-webkit-padding-before", "-webkit-text-decorations-in-effect", "-webkit-border-vertical-spacing", "-webkit-locale", "-webkit-padding-after", "-webkit-border-horizontal-spacing", "color-rendering", "-webkit-column-break-before", "-webkit-transform-origin-x", "-webkit-transform-origin-y", "-webkit-text-emphasis-position", "buffered-rendering", "-webkit-text-orientation", "-webkit-text-combine", "-webkit-text-emphasis-style", "-webkit-text-emphasis", "d", "-webkit-mask-box-image-width", "-webkit-mask-box-image-source", "-webkit-mask-box-image-outset", "-webkit-mask-box-image-slice", "-webkit-mask-box-image-repeat", "-webkit-margin-after-collapse", "-webkit-border-before-color", "-webkit-border-before-width", "-webkit-perspective-origin-x", "-webkit-perspective-origin-y", "-webkit-margin-before-collapse", "-webkit-border-before-style", "scroll-snap-stop", "-webkit-margin-bottom-collapse", "-webkit-ruby-position", "-webkit-column-break-after", "-webkit-margin-collapse", "-webkit-border-before", "-webkit-border-end", "-webkit-border-after", "-webkit-border-start", "-webkit-min-logical-width", "-webkit-logical-height", "-webkit-transform-origin-z", "-webkit-font-size-delta", "-webkit-logical-width", "-webkit-max-logical-width", "-webkit-min-logical-height", "-webkit-max-logical-height", "-webkit-border-end-color", "-webkit-border-end-width", "-webkit-border-start-color", "-webkit-border-start-width", "-webkit-border-after-color", "-webkit-border-after-width", "-webkit-border-end-style", "-webkit-border-after-style", "-webkit-border-start-style", "-webkit-mask-repeat-x", "-webkit-mask-repeat-y", "user-zoom", "min-zoom", "-webkit-box-decoration-break", "orientation", "max-zoom", "-webkit-app-region", "-webkit-column-rule", "-webkit-column-span", "-webkit-column-gap", "-webkit-shape-outside", "-webkit-column-rule-width", "-webkit-column-count", "-webkit-opacity", "-webkit-column-width", "-webkit-shape-image-threshold", "-webkit-column-rule-style", "-webkit-columns", "-webkit-column-rule-color", "-webkit-shape-margin", ]
mpl-2.0
Dabz/dstat
lib/pymongo/topology_description.py
23
12685
# Copyright 2014-2015 MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you # may not use this file except in compliance with the License. You # may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. """Represent the topology of servers.""" from collections import namedtuple from pymongo import common from pymongo.server_type import SERVER_TYPE from pymongo.errors import ConfigurationError from pymongo.server_description import ServerDescription TOPOLOGY_TYPE = namedtuple('TopologyType', ['Single', 'ReplicaSetNoPrimary', 'ReplicaSetWithPrimary', 'Sharded', 'Unknown'])(*range(5)) class TopologyDescription(object): def __init__( self, topology_type, server_descriptions, replica_set_name, max_election_id): """Represent a topology of servers. :Parameters: - `topology_type`: initial type - `server_descriptions`: dict of (address, ServerDescription) for all seeds - `replica_set_name`: replica set name or None - `max_election_id`: greatest electionId seen from a primary, or None """ self._topology_type = topology_type self._replica_set_name = replica_set_name self._server_descriptions = server_descriptions self._max_election_id = max_election_id # Is PyMongo compatible with all servers' wire protocols? self._incompatible_err = None for s in self._server_descriptions.values(): # s.min/max_wire_version is the server's wire protocol. # MIN/MAX_SUPPORTED_WIRE_VERSION is what PyMongo supports. server_too_new = ( # Server too new. s.min_wire_version is not None and s.min_wire_version > common.MAX_SUPPORTED_WIRE_VERSION) server_too_old = ( # Server too old. s.max_wire_version is not None and s.max_wire_version < common.MIN_SUPPORTED_WIRE_VERSION) if server_too_new or server_too_old: self._incompatible_err = ( "Server at %s:%d " "uses wire protocol versions %d through %d, " "but PyMongo only supports %d through %d" % (s.address[0], s.address[1], s.min_wire_version, s.max_wire_version, common.MIN_SUPPORTED_WIRE_VERSION, common.MAX_SUPPORTED_WIRE_VERSION)) break def check_compatible(self): """Raise ConfigurationError if any server is incompatible. A server is incompatible if its wire protocol version range does not overlap with PyMongo's. """ if self._incompatible_err: raise ConfigurationError(self._incompatible_err) def has_server(self, address): return address in self._server_descriptions def reset_server(self, address): """A copy of this description, with one server marked Unknown.""" return updated_topology_description(self, ServerDescription(address)) def reset(self): """A copy of this description, with all servers marked Unknown.""" if self._topology_type == TOPOLOGY_TYPE.ReplicaSetWithPrimary: topology_type = TOPOLOGY_TYPE.ReplicaSetNoPrimary else: topology_type = self._topology_type # The default ServerDescription's type is Unknown. sds = dict((address, ServerDescription(address)) for address in self._server_descriptions) return TopologyDescription( topology_type, sds, self._replica_set_name, self._max_election_id) def server_descriptions(self): """Dict of (address, ServerDescription).""" return self._server_descriptions.copy() @property def topology_type(self): return self._topology_type @property def replica_set_name(self): """The replica set name.""" return self._replica_set_name @property def max_election_id(self): """Greatest electionId seen from a primary, or None.""" return self._max_election_id @property def known_servers(self): """List of Servers of types besides Unknown.""" return [s for s in self._server_descriptions.values() if s.is_server_type_known] # If topology type is Unknown and we receive an ismaster response, what should # the new topology type be? _SERVER_TYPE_TO_TOPOLOGY_TYPE = { SERVER_TYPE.Mongos: TOPOLOGY_TYPE.Sharded, SERVER_TYPE.RSPrimary: TOPOLOGY_TYPE.ReplicaSetWithPrimary, SERVER_TYPE.RSSecondary: TOPOLOGY_TYPE.ReplicaSetNoPrimary, SERVER_TYPE.RSArbiter: TOPOLOGY_TYPE.ReplicaSetNoPrimary, SERVER_TYPE.RSOther: TOPOLOGY_TYPE.ReplicaSetNoPrimary, } def updated_topology_description(topology_description, server_description): """Return an updated copy of a TopologyDescription. :Parameters: - `topology_description`: the current TopologyDescription - `server_description`: a new ServerDescription that resulted from an ismaster call Called after attempting (successfully or not) to call ismaster on the server at server_description.address. Does not modify topology_description. """ address = server_description.address # These values will be updated, if necessary, to form the new # TopologyDescription. topology_type = topology_description.topology_type set_name = topology_description.replica_set_name max_election_id = topology_description.max_election_id server_type = server_description.server_type # Don't mutate the original dict of server descriptions; copy it. sds = topology_description.server_descriptions() # Replace this server's description with the new one. sds[address] = server_description if topology_type == TOPOLOGY_TYPE.Single: # Single type never changes. return TopologyDescription( TOPOLOGY_TYPE.Single, sds, set_name, max_election_id) if topology_type == TOPOLOGY_TYPE.Unknown: if server_type == SERVER_TYPE.Standalone: sds.pop(address) elif server_type not in (SERVER_TYPE.Unknown, SERVER_TYPE.RSGhost): topology_type = _SERVER_TYPE_TO_TOPOLOGY_TYPE[server_type] if topology_type == TOPOLOGY_TYPE.Sharded: if server_type not in (SERVER_TYPE.Mongos, SERVER_TYPE.Unknown): sds.pop(address) elif topology_type == TOPOLOGY_TYPE.ReplicaSetNoPrimary: if server_type in (SERVER_TYPE.Standalone, SERVER_TYPE.Mongos): sds.pop(address) elif server_type == SERVER_TYPE.RSPrimary: topology_type, set_name, max_election_id = _update_rs_from_primary( sds, set_name, server_description, max_election_id) elif server_type in ( SERVER_TYPE.RSSecondary, SERVER_TYPE.RSArbiter, SERVER_TYPE.RSOther): topology_type, set_name = _update_rs_no_primary_from_member( sds, set_name, server_description) elif topology_type == TOPOLOGY_TYPE.ReplicaSetWithPrimary: if server_type in (SERVER_TYPE.Standalone, SERVER_TYPE.Mongos): sds.pop(address) topology_type = _check_has_primary(sds) elif server_type == SERVER_TYPE.RSPrimary: topology_type, set_name, max_election_id = _update_rs_from_primary( sds, set_name, server_description, max_election_id) elif server_type in ( SERVER_TYPE.RSSecondary, SERVER_TYPE.RSArbiter, SERVER_TYPE.RSOther): topology_type = _update_rs_with_primary_from_member( sds, set_name, server_description) else: # Server type is Unknown or RSGhost: did we just lose the primary? topology_type = _check_has_primary(sds) # Return updated copy. return TopologyDescription(topology_type, sds, set_name, max_election_id) def _update_rs_from_primary( sds, replica_set_name, server_description, max_election_id): """Update topology description from a primary's ismaster response. Pass in a dict of ServerDescriptions, current replica set name, the ServerDescription we are processing, and the TopologyDescription's max_election_id if any. Returns (new topology type, new replica_set_name, new max_election_id). """ if replica_set_name is None: replica_set_name = server_description.replica_set_name elif replica_set_name != server_description.replica_set_name: # We found a primary but it doesn't have the replica_set_name # provided by the user. sds.pop(server_description.address) return _check_has_primary(sds), replica_set_name, max_election_id if server_description.election_id is not None: if max_election_id and max_election_id > server_description.election_id: # Stale primary, set to type Unknown. address = server_description.address sds[address] = ServerDescription(address) return _check_has_primary(sds), replica_set_name, max_election_id max_election_id = server_description.election_id # We've heard from the primary. Is it the same primary as before? for server in sds.values(): if (server.server_type is SERVER_TYPE.RSPrimary and server.address != server_description.address): # Reset old primary's type to Unknown. sds[server.address] = ServerDescription(server.address) # There can be only one prior primary. break # Discover new hosts from this primary's response. for new_address in server_description.all_hosts: if new_address not in sds: sds[new_address] = ServerDescription(new_address) # Remove hosts not in the response. for addr in set(sds) - server_description.all_hosts: sds.pop(addr) # If the host list differs from the seed list, we may not have a primary # after all. return _check_has_primary(sds), replica_set_name, max_election_id def _update_rs_with_primary_from_member( sds, replica_set_name, server_description): """RS with known primary. Process a response from a non-primary. Pass in a dict of ServerDescriptions, current replica set name, and the ServerDescription we are processing. Returns new topology type. """ assert replica_set_name is not None if replica_set_name != server_description.replica_set_name: sds.pop(server_description.address) # Had this member been the primary? return _check_has_primary(sds) def _update_rs_no_primary_from_member( sds, replica_set_name, server_description): """RS without known primary. Update from a non-primary's response. Pass in a dict of ServerDescriptions, current replica set name, and the ServerDescription we are processing. Returns (new topology type, new replica_set_name). """ topology_type = TOPOLOGY_TYPE.ReplicaSetNoPrimary if replica_set_name is None: replica_set_name = server_description.replica_set_name elif replica_set_name != server_description.replica_set_name: sds.pop(server_description.address) return topology_type, replica_set_name # This isn't the primary's response, so don't remove any servers # it doesn't report. Only add new servers. for address in server_description.all_hosts: if address not in sds: sds[address] = ServerDescription(address) return topology_type, replica_set_name def _check_has_primary(sds): """Current topology type is ReplicaSetWithPrimary. Is primary still known? Pass in a dict of ServerDescriptions. Returns new topology type. """ for s in sds.values(): if s.server_type == SERVER_TYPE.RSPrimary: return TOPOLOGY_TYPE.ReplicaSetWithPrimary else: return TOPOLOGY_TYPE.ReplicaSetNoPrimary
gpl-2.0
Entropy512/kernel_find7_reference
Documentation/target/tcm_mod_builder.py
4981
41422
#!/usr/bin/python # The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD # # Copyright (c) 2010 Rising Tide Systems # Copyright (c) 2010 Linux-iSCSI.org # # Author: nab@kernel.org # import os, sys import subprocess as sub import string import re import optparse tcm_dir = "" fabric_ops = [] fabric_mod_dir = "" fabric_mod_port = "" fabric_mod_init_port = "" def tcm_mod_err(msg): print msg sys.exit(1) def tcm_mod_create_module_subdir(fabric_mod_dir_var): if os.path.isdir(fabric_mod_dir_var) == True: return 1 print "Creating fabric_mod_dir: " + fabric_mod_dir_var ret = os.mkdir(fabric_mod_dir_var) if ret: tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var) return def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n" buf += " u64 nport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n" buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* FC lport target portal group tag for TCM */\n" buf += " u16 lport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n" buf += " struct " + fabric_mod_name + "_lport *lport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_lport {\n" buf += " /* SCSI protocol the lport is providing */\n" buf += " u8 lport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n" buf += " u64 lport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Target Lport */\n" buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n" buf += " struct se_wwn lport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "lport" fabric_mod_init_port = "nport" return def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n" buf += " u64 iport_wwpn;\n" buf += " /* ASCII formatted WWPN for Sas Initiator port */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* SAS port target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for SAS Target port */\n" buf += " u64 tport_wwpn;\n" buf += " /* ASCII formatted WWPN for SAS Target port */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* ASCII formatted InitiatorName */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* iSCSI target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* ASCII formatted TargetName for IQN */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name): if proto_ident == "FC": tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "SAS": tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "iSCSI": tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name) else: print "Unsupported proto_ident: " + proto_ident sys.exit(1) return def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#include <linux/module.h>\n" buf += "#include <linux/moduleparam.h>\n" buf += "#include <linux/version.h>\n" buf += "#include <generated/utsrelease.h>\n" buf += "#include <linux/utsname.h>\n" buf += "#include <linux/init.h>\n" buf += "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/configfs.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_fabric.h>\n" buf += "#include <target/target_core_fabric_configfs.h>\n" buf += "#include <target/target_core_configfs.h>\n" buf += "#include <target/configfs_macros.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "/* Local pointer to allocated TCM configfs fabric module */\n" buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n" buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n" buf += " u32 nexus_depth;\n\n" buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n" buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n" buf += " if (!se_nacl_new)\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n" buf += " nexus_depth = 1;\n" buf += " /*\n" buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n" buf += " * when converting a NodeACL from demo mode -> explict\n" buf += " */\n" buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n" buf += " name, nexus_depth);\n" buf += " if (IS_ERR(se_nacl)) {\n" buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n" buf += " return se_nacl;\n" buf += " }\n" buf += " /*\n" buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n" buf += " */\n" buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n" buf += " return se_nacl;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n" buf += " kfree(nacl);\n" buf += "}\n\n" buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n" buf += " struct se_wwn *wwn,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n" buf += " struct " + fabric_mod_name + "_tpg *tpg;\n" buf += " unsigned long tpgt;\n" buf += " int ret;\n\n" buf += " if (strstr(name, \"tpgt_\") != name)\n" buf += " return ERR_PTR(-EINVAL);\n" buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n" buf += " return ERR_PTR(-EINVAL);\n\n" buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n" buf += " if (!tpg) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n" buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n" buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n" buf += " &tpg->se_tpg, (void *)tpg,\n" buf += " TRANSPORT_TPG_TYPE_NORMAL);\n" buf += " if (ret < 0) {\n" buf += " kfree(tpg);\n" buf += " return NULL;\n" buf += " }\n" buf += " return &tpg->se_tpg;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n" buf += " core_tpg_deregister(se_tpg);\n" buf += " kfree(tpg);\n" buf += "}\n\n" buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n\n" buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n\n" buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n" buf += " if (!" + fabric_mod_port + ") {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n" buf += " kfree(" + fabric_mod_port + ");\n" buf += "}\n\n" buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " char *page)\n" buf += "{\n" buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += "}\n\n" buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n" buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n" buf += " &" + fabric_mod_name + "_wwn_version.attr,\n" buf += " NULL,\n" buf += "};\n\n" buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n" buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n" buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n" buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n" buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n" buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n" buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n" buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n" buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n" buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n" buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n" buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n" buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n" buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n" buf += " .close_session = " + fabric_mod_name + "_close_session,\n" buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n" buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n" buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n" buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n" buf += " .sess_get_initiator_sid = NULL,\n" buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n" buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n" buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n" buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n" buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n" buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n" buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n" buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n" buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n" buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n" buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n" buf += " /*\n" buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n" buf += " */\n" buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n" buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n" buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n" buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n" buf += " .fabric_post_link = NULL,\n" buf += " .fabric_pre_unlink = NULL,\n" buf += " .fabric_make_np = NULL,\n" buf += " .fabric_drop_np = NULL,\n" buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n" buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n" buf += "};\n\n" buf += "static int " + fabric_mod_name + "_register_configfs(void)\n" buf += "{\n" buf += " struct target_fabric_configfs *fabric;\n" buf += " int ret;\n\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += " /*\n" buf += " * Register the top level struct config_item_type with TCM core\n" buf += " */\n" buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n" buf += " if (IS_ERR(fabric)) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n" buf += " return PTR_ERR(fabric);\n" buf += " }\n" buf += " /*\n" buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n" buf += " */\n" buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n" buf += " /*\n" buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n" buf += " */\n" buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n" buf += " /*\n" buf += " * Register the fabric for use within TCM\n" buf += " */\n" buf += " ret = target_fabric_configfs_register(fabric);\n" buf += " if (ret < 0) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n" buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n" buf += " return ret;\n" buf += " }\n" buf += " /*\n" buf += " * Setup our local pointer to *fabric\n" buf += " */\n" buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n" buf += "{\n" buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n" buf += " return;\n\n" buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n" buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += "};\n\n" buf += "static int __init " + fabric_mod_name + "_init(void)\n" buf += "{\n" buf += " int ret;\n\n" buf += " ret = " + fabric_mod_name + "_register_configfs();\n" buf += " if (ret < 0)\n" buf += " return ret;\n\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void __exit " + fabric_mod_name + "_exit(void)\n" buf += "{\n" buf += " " + fabric_mod_name + "_deregister_configfs();\n" buf += "};\n\n" buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n" buf += "MODULE_LICENSE(\"GPL\");\n" buf += "module_init(" + fabric_mod_name + "_init);\n" buf += "module_exit(" + fabric_mod_name + "_exit);\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_scan_fabric_ops(tcm_dir): fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h" print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api process_fo = 0; p = open(fabric_ops_api, 'r') line = p.readline() while line: if process_fo == 0 and re.search('struct target_core_fabric_ops {', line): line = p.readline() continue if process_fo == 0: process_fo = 1; line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) continue line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) p.close() return def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" bufi = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h" print "Writing file: " + fi pi = open(fi, 'w') if not pi: tcm_mod_err("Unable to open file: " + fi) buf = "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/list.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n" buf += "#include <scsi/scsi.h>\n" buf += "#include <scsi/scsi_host.h>\n" buf += "#include <scsi/scsi_device.h>\n" buf += "#include <scsi/scsi_cmnd.h>\n" buf += "#include <scsi/libfc.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_fabric.h>\n" buf += "#include <target/target_core_configfs.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n" buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n" total_fabric_ops = len(fabric_ops) i = 0 while i < total_fabric_ops: fo = fabric_ops[i] i += 1 # print "fabric_ops: " + fo if re.search('get_fabric_name', fo): buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n" buf += "{\n" buf += " return \"" + fabric_mod_name[4:] + "\";\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n" continue if re.search('get_fabric_proto_ident', fo): buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " u8 proto_id;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" buf += " }\n\n" buf += " return proto_id;\n" buf += "}\n\n" bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n" if re.search('get_wwn', fo): buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n" if re.search('get_tag', fo): buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " return tpg->" + fabric_mod_port + "_tpgt;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n" if re.search('get_default_depth', fo): buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n" if re.search('get_pr_transport_id\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code,\n" buf += " unsigned char *buf)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *, unsigned char *);\n" if re.search('get_pr_transport_id_len\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *);\n" if re.search('parse_pr_out_transport_id\)\(', fo): buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " const char *buf,\n" buf += " u32 *out_tid_len,\n" buf += " char **port_nexus_ptr)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " char *tid = NULL;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" buf += " }\n\n" buf += " return tid;\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n" bufi += " const char *, u32 *, char **);\n" if re.search('alloc_fabric_acl\)\(', fo): buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n" buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n" buf += " if (!nacl) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n" buf += " return NULL;\n" buf += " }\n\n" buf += " return &nacl->se_node_acl;\n" buf += "}\n\n" bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n" if re.search('release_fabric_acl\)\(', fo): buf += "void " + fabric_mod_name + "_release_fabric_acl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " kfree(nacl);\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n" bufi += " struct se_node_acl *);\n" if re.search('tpg_get_inst_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n" if re.search('\*release_cmd\)\(', fo): buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n" if re.search('shutdown_session\)\(', fo): buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n" if re.search('close_session\)\(', fo): buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n" if re.search('stop_session\)\(', fo): buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n" if re.search('fall_back_to_erl0\)\(', fo): buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n" if re.search('sess_logged_in\)\(', fo): buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n" if re.search('sess_get_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n" if re.search('write_pending\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n" if re.search('write_pending_status\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n" if re.search('set_default_node_attributes\)\(', fo): buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n" if re.search('get_task_tag\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n" if re.search('get_cmd_state\)\(', fo): buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n" if re.search('queue_data_in\)\(', fo): buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n" if re.search('queue_status\)\(', fo): buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n" if re.search('queue_tm_rsp\)\(', fo): buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n" if re.search('get_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n" if re.search('set_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n" if re.search('is_state_remove\)\(', fo): buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() ret = pi.write(bufi) if ret: tcm_mod_err("Unable to write fi: " + fi) pi.close() return def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Makefile" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n" buf += " " + fabric_mod_name + "_configfs.o\n" buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Kconfig" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf = "config " + fabric_mod_name.upper() + "\n" buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n" buf += " depends on TARGET_CORE && CONFIGFS_FS\n" buf += " default n\n" buf += " ---help---\n" buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name): buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n" kbuild = tcm_dir + "/drivers/target/Makefile" f = open(kbuild, 'a') f.write(buf) f.close() return def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name): buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n" kconfig = tcm_dir + "/drivers/target/Kconfig" f = open(kconfig, 'a') f.write(buf) f.close() return def main(modname, proto_ident): # proto_ident = "FC" # proto_ident = "SAS" # proto_ident = "iSCSI" tcm_dir = os.getcwd(); tcm_dir += "/../../" print "tcm_dir: " + tcm_dir fabric_mod_name = modname fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name print "Set fabric_mod_name: " + fabric_mod_name print "Set fabric_mod_dir: " + fabric_mod_dir print "Using proto_ident: " + proto_ident if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI": print "Unsupported proto_ident: " + proto_ident sys.exit(1) ret = tcm_mod_create_module_subdir(fabric_mod_dir) if ret: print "tcm_mod_create_module_subdir() failed because module already exists!" sys.exit(1) tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_scan_fabric_ops(tcm_dir) tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name) tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kbuild(tcm_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kconfig(tcm_dir, fabric_mod_name) return parser = optparse.OptionParser() parser.add_option('-m', '--modulename', help='Module name', dest='modname', action='store', nargs=1, type='string') parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident', action='store', nargs=1, type='string') (opts, args) = parser.parse_args() mandatories = ['modname', 'protoident'] for m in mandatories: if not opts.__dict__[m]: print "mandatory option is missing\n" parser.print_help() exit(-1) if __name__ == "__main__": main(str(opts.modname), opts.protoident)
gpl-2.0
axinging/chromium-crosswalk
build/android/gyp/create_test_runner_script.py
7
4838
#!/usr/bin/env python # # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Creates a script to run an android test using build/android/test_runner.py. """ import argparse import os import sys from util import build_utils SCRIPT_TEMPLATE = """\ #!/usr/bin/env python # # This file was generated by build/android/gyp/create_test_runner_script.py import os import subprocess import sys def main(): script_directory = os.path.dirname(__file__) def ResolvePath(path): \"\"\"Returns an absolute filepath given a path relative to this script. \"\"\" return os.path.abspath(os.path.join(script_directory, path)) test_runner_path = ResolvePath('{test_runner_path}') test_runner_args = {test_runner_args} test_runner_path_args = {test_runner_path_args} for arg, path in test_runner_path_args: test_runner_args.extend([arg, ResolvePath(path)]) test_runner_cmd = [test_runner_path] + test_runner_args + sys.argv[1:] return subprocess.call(test_runner_cmd) if __name__ == '__main__': sys.exit(main()) """ def main(args): parser = argparse.ArgumentParser() parser.add_argument('--script-output-path', help='Output path for executable script.') parser.add_argument('--depfile', help='Path to the depfile. This must be specified as ' "the action's first output.") parser.add_argument('--test-runner-path', help='Path to test_runner.py (optional).') # We need to intercept any test runner path arguments and make all # of the paths relative to the output script directory. group = parser.add_argument_group('Test runner path arguments.') group.add_argument('--additional-apk', action='append', dest='additional_apks', default=[]) group.add_argument('--additional-apk-list') group.add_argument('--apk-under-test') group.add_argument('--apk-under-test-incremental-install-script') group.add_argument('--executable-dist-dir') group.add_argument('--isolate-file-path') group.add_argument('--output-directory') group.add_argument('--test-apk') group.add_argument('--test-apk-incremental-install-script') group.add_argument('--coverage-dir') args, test_runner_args = parser.parse_known_args( build_utils.ExpandFileArgs(args)) def RelativizePathToScript(path): """Returns the path relative to the output script directory.""" return os.path.relpath(path, os.path.dirname(args.script_output_path)) test_runner_path = args.test_runner_path or os.path.join( os.path.dirname(__file__), os.path.pardir, 'test_runner.py') test_runner_path = RelativizePathToScript(test_runner_path) test_runner_path_args = [] if args.additional_apk_list: args.additional_apks.extend( build_utils.ParseGypList(args.additional_apk_list)) if args.additional_apks: test_runner_path_args.extend( ('--additional-apk', RelativizePathToScript(a)) for a in args.additional_apks) if args.apk_under_test: test_runner_path_args.append( ('--apk-under-test', RelativizePathToScript(args.apk_under_test))) if args.apk_under_test_incremental_install_script: test_runner_path_args.append( ('--apk-under-test-incremental-install-script', RelativizePathToScript( args.apk_under_test_incremental_install_script))) if args.executable_dist_dir: test_runner_path_args.append( ('--executable-dist-dir', RelativizePathToScript(args.executable_dist_dir))) if args.isolate_file_path: test_runner_path_args.append( ('--isolate-file-path', RelativizePathToScript(args.isolate_file_path))) if args.output_directory: test_runner_path_args.append( ('--output-directory', RelativizePathToScript(args.output_directory))) if args.test_apk: test_runner_path_args.append( ('--test-apk', RelativizePathToScript(args.test_apk))) if args.test_apk_incremental_install_script: test_runner_path_args.append( ('--test-apk-incremental-install-script', RelativizePathToScript(args.test_apk_incremental_install_script))) if args.coverage_dir: test_runner_path_args.append( ('--coverage-dir', RelativizePathToScript(args.coverage_dir))) with open(args.script_output_path, 'w') as script: script.write(SCRIPT_TEMPLATE.format( test_runner_path=str(test_runner_path), test_runner_args=str(test_runner_args), test_runner_path_args=str(test_runner_path_args))) os.chmod(args.script_output_path, 0750) if args.depfile: build_utils.WriteDepfile( args.depfile, build_utils.GetPythonDependencies()) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
bsd-3-clause
40223125/w16btest1
static/Brython3.1.3-20150514-095342/Lib/unittest/test/test_assertions.py
738
15398
import datetime import warnings import unittest from itertools import product class Test_Assertions(unittest.TestCase): def test_AlmostEqual(self): self.assertAlmostEqual(1.00000001, 1.0) self.assertNotAlmostEqual(1.0000001, 1.0) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.0000001, 1.0) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 1.00000001, 1.0) self.assertAlmostEqual(1.1, 1.0, places=0) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, places=1) self.assertAlmostEqual(0, .1+.1j, places=0) self.assertNotAlmostEqual(0, .1+.1j, places=1) self.assertRaises(self.failureException, self.assertAlmostEqual, 0, .1+.1j, places=1) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 0, .1+.1j, places=0) self.assertAlmostEqual(float('inf'), float('inf')) self.assertRaises(self.failureException, self.assertNotAlmostEqual, float('inf'), float('inf')) def test_AmostEqualWithDelta(self): self.assertAlmostEqual(1.1, 1.0, delta=0.5) self.assertAlmostEqual(1.0, 1.1, delta=0.5) self.assertNotAlmostEqual(1.1, 1.0, delta=0.05) self.assertNotAlmostEqual(1.0, 1.1, delta=0.05) self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, delta=0.05) self.assertRaises(self.failureException, self.assertNotAlmostEqual, 1.1, 1.0, delta=0.5) self.assertRaises(TypeError, self.assertAlmostEqual, 1.1, 1.0, places=2, delta=2) self.assertRaises(TypeError, self.assertNotAlmostEqual, 1.1, 1.0, places=2, delta=2) first = datetime.datetime.now() second = first + datetime.timedelta(seconds=10) self.assertAlmostEqual(first, second, delta=datetime.timedelta(seconds=20)) self.assertNotAlmostEqual(first, second, delta=datetime.timedelta(seconds=5)) def test_assertRaises(self): def _raise(e): raise e self.assertRaises(KeyError, _raise, KeyError) self.assertRaises(KeyError, _raise, KeyError("key")) try: self.assertRaises(KeyError, lambda: None) except self.failureException as e: self.assertIn("KeyError not raised", str(e)) else: self.fail("assertRaises() didn't fail") try: self.assertRaises(KeyError, _raise, ValueError) except ValueError: pass else: self.fail("assertRaises() didn't let exception pass through") with self.assertRaises(KeyError) as cm: try: raise KeyError except Exception as e: exc = e raise self.assertIs(cm.exception, exc) with self.assertRaises(KeyError): raise KeyError("key") try: with self.assertRaises(KeyError): pass except self.failureException as e: self.assertIn("KeyError not raised", str(e)) else: self.fail("assertRaises() didn't fail") try: with self.assertRaises(KeyError): raise ValueError except ValueError: pass else: self.fail("assertRaises() didn't let exception pass through") def testAssertNotRegex(self): self.assertNotRegex('Ala ma kota', r'r+') try: self.assertNotRegex('Ala ma kota', r'k.t', 'Message') except self.failureException as e: self.assertIn("'kot'", e.args[0]) self.assertIn('Message', e.args[0]) else: self.fail('assertNotRegex should have failed.') class TestLongMessage(unittest.TestCase): """Test that the individual asserts honour longMessage. This actually tests all the message behaviour for asserts that use longMessage.""" def setUp(self): class TestableTestFalse(unittest.TestCase): longMessage = False failureException = self.failureException def testTest(self): pass class TestableTestTrue(unittest.TestCase): longMessage = True failureException = self.failureException def testTest(self): pass self.testableTrue = TestableTestTrue('testTest') self.testableFalse = TestableTestFalse('testTest') def testDefault(self): self.assertTrue(unittest.TestCase.longMessage) def test_formatMsg(self): self.assertEqual(self.testableFalse._formatMessage(None, "foo"), "foo") self.assertEqual(self.testableFalse._formatMessage("foo", "bar"), "foo") self.assertEqual(self.testableTrue._formatMessage(None, "foo"), "foo") self.assertEqual(self.testableTrue._formatMessage("foo", "bar"), "bar : foo") # This blows up if _formatMessage uses string concatenation self.testableTrue._formatMessage(object(), 'foo') def test_formatMessage_unicode_error(self): one = ''.join(chr(i) for i in range(255)) # this used to cause a UnicodeDecodeError constructing msg self.testableTrue._formatMessage(one, '\uFFFD') def assertMessages(self, methodName, args, errors): """ Check that methodName(*args) raises the correct error messages. errors should be a list of 4 regex that match the error when: 1) longMessage = False and no msg passed; 2) longMessage = False and msg passed; 3) longMessage = True and no msg passed; 4) longMessage = True and msg passed; """ def getMethod(i): useTestableFalse = i < 2 if useTestableFalse: test = self.testableFalse else: test = self.testableTrue return getattr(test, methodName) for i, expected_regex in enumerate(errors): testMethod = getMethod(i) kwargs = {} withMsg = i % 2 if withMsg: kwargs = {"msg": "oops"} with self.assertRaisesRegex(self.failureException, expected_regex=expected_regex): testMethod(*args, **kwargs) def testAssertTrue(self): self.assertMessages('assertTrue', (False,), ["^False is not true$", "^oops$", "^False is not true$", "^False is not true : oops$"]) def testAssertFalse(self): self.assertMessages('assertFalse', (True,), ["^True is not false$", "^oops$", "^True is not false$", "^True is not false : oops$"]) def testNotEqual(self): self.assertMessages('assertNotEqual', (1, 1), ["^1 == 1$", "^oops$", "^1 == 1$", "^1 == 1 : oops$"]) def testAlmostEqual(self): self.assertMessages('assertAlmostEqual', (1, 2), ["^1 != 2 within 7 places$", "^oops$", "^1 != 2 within 7 places$", "^1 != 2 within 7 places : oops$"]) def testNotAlmostEqual(self): self.assertMessages('assertNotAlmostEqual', (1, 1), ["^1 == 1 within 7 places$", "^oops$", "^1 == 1 within 7 places$", "^1 == 1 within 7 places : oops$"]) def test_baseAssertEqual(self): self.assertMessages('_baseAssertEqual', (1, 2), ["^1 != 2$", "^oops$", "^1 != 2$", "^1 != 2 : oops$"]) def testAssertSequenceEqual(self): # Error messages are multiline so not testing on full message # assertTupleEqual and assertListEqual delegate to this method self.assertMessages('assertSequenceEqual', ([], [None]), ["\+ \[None\]$", "^oops$", r"\+ \[None\]$", r"\+ \[None\] : oops$"]) def testAssertSetEqual(self): self.assertMessages('assertSetEqual', (set(), set([None])), ["None$", "^oops$", "None$", "None : oops$"]) def testAssertIn(self): self.assertMessages('assertIn', (None, []), ['^None not found in \[\]$', "^oops$", '^None not found in \[\]$', '^None not found in \[\] : oops$']) def testAssertNotIn(self): self.assertMessages('assertNotIn', (None, [None]), ['^None unexpectedly found in \[None\]$', "^oops$", '^None unexpectedly found in \[None\]$', '^None unexpectedly found in \[None\] : oops$']) def testAssertDictEqual(self): self.assertMessages('assertDictEqual', ({}, {'key': 'value'}), [r"\+ \{'key': 'value'\}$", "^oops$", "\+ \{'key': 'value'\}$", "\+ \{'key': 'value'\} : oops$"]) def testAssertDictContainsSubset(self): with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) self.assertMessages('assertDictContainsSubset', ({'key': 'value'}, {}), ["^Missing: 'key'$", "^oops$", "^Missing: 'key'$", "^Missing: 'key' : oops$"]) def testAssertMultiLineEqual(self): self.assertMessages('assertMultiLineEqual', ("", "foo"), [r"\+ foo$", "^oops$", r"\+ foo$", r"\+ foo : oops$"]) def testAssertLess(self): self.assertMessages('assertLess', (2, 1), ["^2 not less than 1$", "^oops$", "^2 not less than 1$", "^2 not less than 1 : oops$"]) def testAssertLessEqual(self): self.assertMessages('assertLessEqual', (2, 1), ["^2 not less than or equal to 1$", "^oops$", "^2 not less than or equal to 1$", "^2 not less than or equal to 1 : oops$"]) def testAssertGreater(self): self.assertMessages('assertGreater', (1, 2), ["^1 not greater than 2$", "^oops$", "^1 not greater than 2$", "^1 not greater than 2 : oops$"]) def testAssertGreaterEqual(self): self.assertMessages('assertGreaterEqual', (1, 2), ["^1 not greater than or equal to 2$", "^oops$", "^1 not greater than or equal to 2$", "^1 not greater than or equal to 2 : oops$"]) def testAssertIsNone(self): self.assertMessages('assertIsNone', ('not None',), ["^'not None' is not None$", "^oops$", "^'not None' is not None$", "^'not None' is not None : oops$"]) def testAssertIsNotNone(self): self.assertMessages('assertIsNotNone', (None,), ["^unexpectedly None$", "^oops$", "^unexpectedly None$", "^unexpectedly None : oops$"]) def testAssertIs(self): self.assertMessages('assertIs', (None, 'foo'), ["^None is not 'foo'$", "^oops$", "^None is not 'foo'$", "^None is not 'foo' : oops$"]) def testAssertIsNot(self): self.assertMessages('assertIsNot', (None, None), ["^unexpectedly identical: None$", "^oops$", "^unexpectedly identical: None$", "^unexpectedly identical: None : oops$"]) def assertMessagesCM(self, methodName, args, func, errors): """ Check that the correct error messages are raised while executing: with method(*args): func() *errors* should be a list of 4 regex that match the error when: 1) longMessage = False and no msg passed; 2) longMessage = False and msg passed; 3) longMessage = True and no msg passed; 4) longMessage = True and msg passed; """ p = product((self.testableFalse, self.testableTrue), ({}, {"msg": "oops"})) for (cls, kwargs), err in zip(p, errors): method = getattr(cls, methodName) with self.assertRaisesRegex(cls.failureException, err): with method(*args, **kwargs) as cm: func() def testAssertRaises(self): self.assertMessagesCM('assertRaises', (TypeError,), lambda: None, ['^TypeError not raised$', '^oops$', '^TypeError not raised$', '^TypeError not raised : oops$']) def testAssertRaisesRegex(self): # test error not raised self.assertMessagesCM('assertRaisesRegex', (TypeError, 'unused regex'), lambda: None, ['^TypeError not raised$', '^oops$', '^TypeError not raised$', '^TypeError not raised : oops$']) # test error raised but with wrong message def raise_wrong_message(): raise TypeError('foo') self.assertMessagesCM('assertRaisesRegex', (TypeError, 'regex'), raise_wrong_message, ['^"regex" does not match "foo"$', '^oops$', '^"regex" does not match "foo"$', '^"regex" does not match "foo" : oops$']) def testAssertWarns(self): self.assertMessagesCM('assertWarns', (UserWarning,), lambda: None, ['^UserWarning not triggered$', '^oops$', '^UserWarning not triggered$', '^UserWarning not triggered : oops$']) def testAssertWarnsRegex(self): # test error not raised self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'unused regex'), lambda: None, ['^UserWarning not triggered$', '^oops$', '^UserWarning not triggered$', '^UserWarning not triggered : oops$']) # test warning raised but with wrong message def raise_wrong_message(): warnings.warn('foo') self.assertMessagesCM('assertWarnsRegex', (UserWarning, 'regex'), raise_wrong_message, ['^"regex" does not match "foo"$', '^oops$', '^"regex" does not match "foo"$', '^"regex" does not match "foo" : oops$'])
agpl-3.0
projectatomic/commissaire-http
test/__init__.py
1
2095
# Copyright (C) 2016 Red Hat, Inc # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os from unittest import TestCase, mock from commissaire_http.handlers import create_jsonrpc_response def create_environ(path='/', headers={}): """ Shortcut for creating an fake WSGI environ. """ env = { 'PATH_INFO': path, } env.update(headers) return env def get_fixture_file_path(filename): """ Attempts to return the path to a fixture file. :param filename: The name of the file to look for. :type filename: str :returns: Full path to the file :rtype: str :raises: Exception """ for x in ('.', '..'): try: a_path = os.path.sep.join((x, filename)) os.stat(a_path) return os.path.realpath(a_path) except: pass raise Exception( 'Can not find path for config: {0}'.format(filename)) def expected_error(message_id, code): """ Creates an expected error structure with the error information as mock.ANY. :param message_id: The ID of the message. :type message_id: str :param code: The JSONRPC_ERRORS code to use. :type code: int :returns: An error structure for use with tests. :rtpe: dict """ expected = create_jsonrpc_response( message_id, error='error', error_code=code) expected['error'] = mock.ANY return expected class TestCase(TestCase): """ Parent class for all unittests. """ pass
gpl-3.0
tersmitten/ansible
lib/ansible/modules/network/iosxr/iosxr_system.py
11
24654
#!/usr/bin/python # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = """ --- module: iosxr_system version_added: "2.3" author: - "Peter Sprygada (@privateip)" - "Kedar Kekan (@kedarX)" short_description: Manage the system attributes on Cisco IOS XR devices description: - This module provides declarative management of node system attributes on Cisco IOS XR devices. It provides an option to configure host system parameters or remove those parameters from the device active configuration. extends_documentation_fragment: iosxr notes: - Tested against IOS XRv 6.1.2 - name-servers I(state=absent) operation with C(netconf) transport is a success, but with rpc-error. This is due to XR platform issue. Recommended to use I(ignore_errors) option with the task as a workaround. options: hostname: description: - Configure the device hostname parameter. This option takes an ASCII string value. vrf: description: - VRF name for domain services version_added: 2.5 domain_name: description: - Configure the IP domain name on the remote device to the provided value. Value should be in the dotted name form and will be appended to the C(hostname) to create a fully-qualified domain name. domain_search: description: - Provides the list of domain suffixes to append to the hostname for the purpose of doing name resolution. This argument accepts a list of names and will be reconciled with the current active configuration on the running node. lookup_source: description: - The C(lookup_source) argument provides one or more source interfaces to use for performing DNS lookups. The interface provided in C(lookup_source) must be a valid interface configured on the device. lookup_enabled: description: - Provides administrative control for enabling or disabling DNS lookups. When this argument is set to True, lookups are performed and when it is set to False, lookups are not performed. type: bool name_servers: description: - The C(name_serves) argument accepts a list of DNS name servers by way of either FQDN or IP address to use to perform name resolution lookups. This argument accepts wither a list of DNS servers See examples. state: description: - State of the configuration values in the device's current active configuration. When set to I(present), the values should be configured in the device active configuration and when set to I(absent) the values should not be in the device active configuration default: present choices: ['present', 'absent'] """ EXAMPLES = """ - name: configure hostname and domain-name (default vrf=default) iosxr_system: hostname: iosxr01 domain_name: test.example.com domain-search: - ansible.com - redhat.com - cisco.com - name: remove configuration iosxr_system: hostname: iosxr01 domain_name: test.example.com domain-search: - ansible.com - redhat.com - cisco.com state: absent - name: configure hostname and domain-name with vrf iosxr_system: hostname: iosxr01 vrf: nondefault domain_name: test.example.com domain-search: - ansible.com - redhat.com - cisco.com - name: configure DNS lookup sources iosxr_system: lookup_source: MgmtEth0/0/CPU0/0 lookup_enabled: True - name: configure name servers iosxr_system: name_servers: - 8.8.8.8 - 8.8.4.4 """ RETURN = """ commands: description: The list of configuration mode commands to send to the device returned: always type: list sample: - hostname iosxr01 - ip domain-name test.example.com xml: description: NetConf rpc xml sent to device with transport C(netconf) returned: always (empty list when no xml rpc to send) type: list version_added: 2.5 sample: - '<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0"> <ip-domain xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-ip-domain-cfg"> <vrfs> <vrf> <vrf-name>default</vrf-name> <lists> <list xc:operation="merge"> <order>0</order> <list-name>redhat.com</list-name> </list> </lists> </vrf> </vrfs> </ip-domain> </config>' """ import re import collections from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.iosxr.iosxr import get_config, load_config, etree_findall from ansible.module_utils.network.iosxr.iosxr import is_cliconf, is_netconf, etree_find from ansible.module_utils.network.iosxr.iosxr import iosxr_argument_spec, build_xml def diff_list(want, have): adds = set(want).difference(have) removes = set(have).difference(want) return (adds, removes) class ConfigBase(object): def __init__(self, module): self._module = module self._result = {'changed': False, 'warnings': []} self._want = dict() self._have = dict() def map_params_to_obj(self): self._want.update({ 'hostname': self._module.params['hostname'], 'vrf': self._module.params['vrf'], 'domain_name': self._module.params['domain_name'], 'domain_search': self._module.params['domain_search'], 'lookup_source': self._module.params['lookup_source'], 'lookup_enabled': self._module.params['lookup_enabled'], 'name_servers': self._module.params['name_servers'] }) class CliConfiguration(ConfigBase): def __init__(self, module): super(CliConfiguration, self).__init__(module) def map_obj_to_commands(self): commands = list() state = self._module.params['state'] def needs_update(x): return self._want.get(x) and (self._want.get(x) != self._have.get(x)) if state == 'absent': if self._have['hostname'] != 'ios': commands.append('no hostname') if self._have['domain_name']: commands.append('no domain name') if self._have['lookup_source']: commands.append('no domain lookup source-interface {0!s}'.format(self._have['lookup_source'])) if not self._have['lookup_enabled']: commands.append('no domain lookup disable') for item in self._have['name_servers']: commands.append('no domain name-server {0!s}'.format(item)) for item in self._have['domain_search']: commands.append('no domain list {0!s}'.format(item)) elif state == 'present': if needs_update('hostname'): commands.append('hostname {0!s}'.format(self._want['hostname'])) if needs_update('domain_name'): commands.append('domain name {0!s}'.format(self._want['domain_name'])) if needs_update('lookup_source'): commands.append('domain lookup source-interface {0!s}'.format(self._want['lookup_source'])) cmd = None if not self._want['lookup_enabled'] and self._have['lookup_enabled']: cmd = 'domain lookup disable' elif self._want['lookup_enabled'] and not self._have['lookup_enabled']: cmd = 'no domain lookup disable' if cmd is not None: commands.append(cmd) if self._want['name_servers'] is not None: adds, removes = diff_list(self._want['name_servers'], self._have['name_servers']) for item in adds: commands.append('domain name-server {0!s}'.format(item)) for item in removes: commands.append('no domain name-server {0!s}'.format(item)) if self._want['domain_search'] is not None: adds, removes = diff_list(self._want['domain_search'], self._have['domain_search']) for item in adds: commands.append('domain list {0!s}'.format(item)) for item in removes: commands.append('no domain list {0!s}'.format(item)) self._result['commands'] = [] if commands: commit = not self._module.check_mode diff = load_config(self._module, commands, commit=commit) if diff: self._result['diff'] = dict(prepared=diff) self._result['commands'] = commands self._result['changed'] = True def parse_hostname(self, config): match = re.search(r'^hostname (\S+)', config, re.M) if match: return match.group(1) def parse_domain_name(self, config): match = re.search(r'^domain name (\S+)', config, re.M) if match: return match.group(1) def parse_lookup_source(self, config): match = re.search(r'^domain lookup source-interface (\S+)', config, re.M) if match: return match.group(1) def map_config_to_obj(self): config = get_config(self._module) self._have.update({ 'hostname': self.parse_hostname(config), 'domain_name': self.parse_domain_name(config), 'domain_search': re.findall(r'^domain list (\S+)', config, re.M), 'lookup_source': self.parse_lookup_source(config), 'lookup_enabled': 'domain lookup disable' not in config, 'name_servers': re.findall(r'^domain name-server (\S+)', config, re.M) }) def run(self): self.map_params_to_obj() self.map_config_to_obj() self.map_obj_to_commands() return self._result class NCConfiguration(ConfigBase): def __init__(self, module): super(NCConfiguration, self).__init__(module) self._system_meta = collections.OrderedDict() self._system_domain_meta = collections.OrderedDict() self._system_server_meta = collections.OrderedDict() self._hostname_meta = collections.OrderedDict() self._lookup_source_meta = collections.OrderedDict() self._lookup_meta = collections.OrderedDict() def map_obj_to_xml_rpc(self): self._system_meta.update([ ('vrfs', {'xpath': 'ip-domain/vrfs', 'tag': True, 'operation': 'edit'}), ('vrf', {'xpath': 'ip-domain/vrfs/vrf', 'tag': True, 'operation': 'edit'}), ('a:vrf', {'xpath': 'ip-domain/vrfs/vrf/vrf-name', 'operation': 'edit'}), ('a:domain_name', {'xpath': 'ip-domain/vrfs/vrf/name', 'operation': 'edit', 'attrib': "operation"}), ]) self._system_domain_meta.update([ ('vrfs', {'xpath': 'ip-domain/vrfs', 'tag': True, 'operation': 'edit'}), ('vrf', {'xpath': 'ip-domain/vrfs/vrf', 'tag': True, 'operation': 'edit'}), ('a:vrf', {'xpath': 'ip-domain/vrfs/vrf/vrf-name', 'operation': 'edit'}), ('lists', {'xpath': 'ip-domain/vrfs/vrf/lists', 'tag': True, 'operation': 'edit'}), ('list', {'xpath': 'ip-domain/vrfs/vrf/lists/list', 'tag': True, 'operation': 'edit', 'attrib': "operation"}), ('a:order', {'xpath': 'ip-domain/vrfs/vrf/lists/list/order', 'operation': 'edit'}), ('a:domain_search', {'xpath': 'ip-domain/vrfs/vrf/lists/list/list-name', 'operation': 'edit'}), ]) self._system_server_meta.update([ ('vrfs', {'xpath': 'ip-domain/vrfs', 'tag': True, 'operation': 'edit'}), ('vrf', {'xpath': 'ip-domain/vrfs/vrf', 'tag': True, 'operation': 'edit'}), ('a:vrf', {'xpath': 'ip-domain/vrfs/vrf/vrf-name', 'operation': 'edit'}), ('servers', {'xpath': 'ip-domain/vrfs/vrf/servers', 'tag': True, 'operation': 'edit'}), ('server', {'xpath': 'ip-domain/vrfs/vrf/servers/server', 'tag': True, 'operation': 'edit', 'attrib': "operation"}), ('a:order', {'xpath': 'ip-domain/vrfs/vrf/servers/server/order', 'operation': 'edit'}), ('a:name_servers', {'xpath': 'ip-domain/vrfs/vrf/servers/server/server-address', 'operation': 'edit'}), ]) self._hostname_meta.update([ ('a:hostname', {'xpath': 'host-names/host-name', 'operation': 'edit', 'attrib': "operation"}), ]) self._lookup_source_meta.update([ ('vrfs', {'xpath': 'ip-domain/vrfs', 'tag': True, 'operation': 'edit'}), ('vrf', {'xpath': 'ip-domain/vrfs/vrf', 'tag': True, 'operation': 'edit'}), ('a:vrf', {'xpath': 'ip-domain/vrfs/vrf/vrf-name', 'operation': 'edit'}), ('a:lookup_source', {'xpath': 'ip-domain/vrfs/vrf/source-interface', 'operation': 'edit', 'attrib': "operation"}), ]) self._lookup_meta.update([ ('vrfs', {'xpath': 'ip-domain/vrfs', 'tag': True, 'operation': 'edit'}), ('vrf', {'xpath': 'ip-domain/vrfs/vrf', 'tag': True, 'operation': 'edit'}), ('a:vrf', {'xpath': 'ip-domain/vrfs/vrf/vrf-name', 'operation': 'edit'}), ('lookup', {'xpath': 'ip-domain/vrfs/vrf/lookup', 'tag': True, 'operation': 'edit', 'attrib': "operation"}), ]) state = self._module.params['state'] _get_filter = build_xml('ip-domain', opcode="filter") running = get_config(self._module, source='running', config_filter=_get_filter) _get_filter = build_xml('host-names', opcode="filter") hostname_runn = get_config(self._module, source='running', config_filter=_get_filter) hostname_ele = etree_find(hostname_runn, 'host-name') hostname = hostname_ele.text if hostname_ele is not None else None vrf_ele = etree_findall(running, 'vrf') vrf_map = {} for vrf in vrf_ele: name_server_list = list() domain_list = list() vrf_name_ele = etree_find(vrf, 'vrf-name') vrf_name = vrf_name_ele.text if vrf_name_ele is not None else None domain_name_ele = etree_find(vrf, 'name') domain_name = domain_name_ele.text if domain_name_ele is not None else None domain_ele = etree_findall(vrf, 'list-name') for domain in domain_ele: domain_list.append(domain.text) server_ele = etree_findall(vrf, 'server-address') for server in server_ele: name_server_list.append(server.text) lookup_source_ele = etree_find(vrf, 'source-interface') lookup_source = lookup_source_ele.text if lookup_source_ele is not None else None lookup_enabled = False if etree_find(vrf, 'lookup') is not None else True vrf_map[vrf_name] = {'domain_name': domain_name, 'domain_search': domain_list, 'name_servers': name_server_list, 'lookup_source': lookup_source, 'lookup_enabled': lookup_enabled} opcode = None hostname_param = {} lookup_param = {} system_param = {} sys_server_params = list() sys_domain_params = list() add_domain_params = list() del_domain_params = list() add_server_params = list() del_server_params = list() lookup_source_params = {} try: sys_node = vrf_map[self._want['vrf']] except KeyError: sys_node = {'domain_name': None, 'domain_search': [], 'name_servers': [], 'lookup_source': None, 'lookup_enabled': True} if state == 'absent': opcode = "delete" def needs_update(x): return self._want[x] is not None and self._want[x] == sys_node[x] if needs_update('domain_name'): system_param = {'vrf': self._want['vrf'], 'domain_name': self._want['domain_name']} if needs_update('hostname'): hostname_param = {'hostname': hostname} if not self._want['lookup_enabled'] and not sys_node['lookup_enabled']: lookup_param['vrf'] = self._want['vrf'] if needs_update('lookup_source'): lookup_source_params['vrf'] = self._want['vrf'] lookup_source_params['lookup_source'] = self._want['lookup_source'] if self._want['domain_search']: domain_param = {} domain_param['domain_name'] = self._want['domain_name'] domain_param['vrf'] = self._want['vrf'] domain_param['order'] = '0' for domain in self._want['domain_search']: if domain in sys_node['domain_search']: domain_param['domain_search'] = domain sys_domain_params.append(domain_param.copy()) if self._want['name_servers']: server_param = {} server_param['vrf'] = self._want['vrf'] server_param['order'] = '0' for server in self._want['name_servers']: if server in sys_node['name_servers']: server_param['name_servers'] = server sys_server_params.append(server_param.copy()) elif state == 'present': opcode = "merge" def needs_update(x): return self._want[x] is not None and (sys_node[x] is None or (sys_node[x] is not None and self._want[x] != sys_node[x])) if needs_update('domain_name'): system_param = {'vrf': self._want['vrf'], 'domain_name': self._want['domain_name']} if self._want['hostname'] is not None and self._want['hostname'] != hostname: hostname_param = {'hostname': self._want['hostname']} if not self._want['lookup_enabled'] and sys_node['lookup_enabled']: lookup_param['vrf'] = self._want['vrf'] if needs_update('lookup_source'): lookup_source_params['vrf'] = self._want['vrf'] lookup_source_params['lookup_source'] = self._want['lookup_source'] if self._want['domain_search']: domain_adds, domain_removes = diff_list(self._want['domain_search'], sys_node['domain_search']) domain_param = {} domain_param['domain_name'] = self._want['domain_name'] domain_param['vrf'] = self._want['vrf'] domain_param['order'] = '0' for domain in domain_adds: if domain not in sys_node['domain_search']: domain_param['domain_search'] = domain add_domain_params.append(domain_param.copy()) for domain in domain_removes: if domain in sys_node['domain_search']: domain_param['domain_search'] = domain del_domain_params.append(domain_param.copy()) if self._want['name_servers']: server_adds, server_removes = diff_list(self._want['name_servers'], sys_node['name_servers']) server_param = {} server_param['vrf'] = self._want['vrf'] server_param['order'] = '0' for domain in server_adds: if domain not in sys_node['name_servers']: server_param['name_servers'] = domain add_server_params.append(server_param.copy()) for domain in server_removes: if domain in sys_node['name_servers']: server_param['name_servers'] = domain del_server_params.append(server_param.copy()) self._result['xml'] = [] _edit_filter_list = list() if opcode: if hostname_param: _edit_filter_list.append(build_xml('host-names', xmap=self._hostname_meta, params=hostname_param, opcode=opcode)) if system_param: _edit_filter_list.append(build_xml('ip-domain', xmap=self._system_meta, params=system_param, opcode=opcode)) if lookup_source_params: _edit_filter_list.append(build_xml('ip-domain', xmap=self._lookup_source_meta, params=lookup_source_params, opcode=opcode)) if lookup_param: _edit_filter_list.append(build_xml('ip-domain', xmap=self._lookup_meta, params=lookup_param, opcode=opcode)) if opcode == 'delete': if sys_domain_params: _edit_filter_list.append(build_xml('ip-domain', xmap=self._system_domain_meta, params=sys_domain_params, opcode=opcode)) if sys_server_params: _edit_filter_list.append(build_xml('ip-domain', xmap=self._system_server_meta, params=sys_server_params, opcode=opcode)) if self._want['vrf'] != 'default': self._result['warnings'] = ["name-servers delete operation with non-default vrf is a success, " "but with rpc-error. Recommended to use 'ignore_errors' option with the task as a workaround"] elif opcode == 'merge': if add_domain_params: _edit_filter_list.append(build_xml('ip-domain', xmap=self._system_domain_meta, params=add_domain_params, opcode=opcode)) if del_domain_params: _edit_filter_list.append(build_xml('ip-domain', xmap=self._system_domain_meta, params=del_domain_params, opcode="delete")) if add_server_params: _edit_filter_list.append(build_xml('ip-domain', xmap=self._system_server_meta, params=add_server_params, opcode=opcode)) if del_server_params: _edit_filter_list.append(build_xml('ip-domain', xmap=self._system_server_meta, params=del_server_params, opcode="delete")) diff = None if _edit_filter_list: commit = not self._module.check_mode diff = load_config(self._module, _edit_filter_list, commit=commit, running=running, nc_get_filter=_get_filter) if diff: if self._module._diff: self._result['diff'] = dict(prepared=diff) self._result['xml'] = _edit_filter_list self._result['changed'] = True def run(self): self.map_params_to_obj() self.map_obj_to_xml_rpc() return self._result def main(): """ Main entry point for Ansible module execution """ argument_spec = dict( hostname=dict(), vrf=dict(type='str', default='default'), domain_name=dict(), domain_search=dict(type='list'), name_servers=dict(type='list'), lookup_source=dict(), lookup_enabled=dict(type='bool', default=True), state=dict(choices=['present', 'absent'], default='present') ) argument_spec.update(iosxr_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) config_object = None if is_cliconf(module): # Commenting the below cliconf deprecation support call for Ansible 2.9 as it'll be continued to be supported # module.deprecate("cli support for 'iosxr_interface' is deprecated. Use transport netconf instead", # version='2.9') config_object = CliConfiguration(module) elif is_netconf(module): config_object = NCConfiguration(module) result = None if config_object: result = config_object.run() module.exit_json(**result) if __name__ == "__main__": main()
gpl-3.0
ky822/nyu_ml_lectures
notebooks/figures/plot_digits_datasets.py
19
2750
# Taken from example in scikit-learn examples # Authors: Fabian Pedregosa <fabian.pedregosa@inria.fr> # Olivier Grisel <olivier.grisel@ensta.org> # Mathieu Blondel <mathieu@mblondel.org> # Gael Varoquaux # License: BSD 3 clause (C) INRIA 2011 import numpy as np import matplotlib.pyplot as plt from matplotlib import offsetbox from sklearn import (manifold, datasets, decomposition, ensemble, lda, random_projection) def digits_plot(): digits = datasets.load_digits(n_class=6) n_digits = 500 X = digits.data[:n_digits] y = digits.target[:n_digits] n_samples, n_features = X.shape n_neighbors = 30 def plot_embedding(X, title=None): x_min, x_max = np.min(X, 0), np.max(X, 0) X = (X - x_min) / (x_max - x_min) plt.figure() ax = plt.subplot(111) for i in range(X.shape[0]): plt.text(X[i, 0], X[i, 1], str(digits.target[i]), color=plt.cm.Set1(y[i] / 10.), fontdict={'weight': 'bold', 'size': 9}) if hasattr(offsetbox, 'AnnotationBbox'): # only print thumbnails with matplotlib > 1.0 shown_images = np.array([[1., 1.]]) # just something big for i in range(X.shape[0]): dist = np.sum((X[i] - shown_images) ** 2, 1) if np.min(dist) < 1e5: # don't show points that are too close # set a high threshold to basically turn this off continue shown_images = np.r_[shown_images, [X[i]]] imagebox = offsetbox.AnnotationBbox( offsetbox.OffsetImage(digits.images[i], cmap=plt.cm.gray_r), X[i]) ax.add_artist(imagebox) plt.xticks([]), plt.yticks([]) if title is not None: plt.title(title) n_img_per_row = 10 img = np.zeros((10 * n_img_per_row, 10 * n_img_per_row)) for i in range(n_img_per_row): ix = 10 * i + 1 for j in range(n_img_per_row): iy = 10 * j + 1 img[ix:ix + 8, iy:iy + 8] = X[i * n_img_per_row + j].reshape((8, 8)) plt.imshow(img, cmap=plt.cm.binary) plt.xticks([]) plt.yticks([]) plt.title('A selection from the 64-dimensional digits dataset') print("Computing PCA projection") pca = decomposition.PCA(n_components=2).fit(X) X_pca = pca.transform(X) plot_embedding(X_pca, "Principal Components projection of the digits") plt.figure() plt.matshow(pca.components_[0, :].reshape(8, 8), cmap="gray") plt.axis('off') plt.figure() plt.matshow(pca.components_[1, :].reshape(8, 8), cmap="gray") plt.axis('off') plt.show()
cc0-1.0
mancoast/CPythonPyc_test
cpython/244_test_base64.py
32
8394
import unittest from test import test_support import base64 class LegacyBase64TestCase(unittest.TestCase): def test_encodestring(self): eq = self.assertEqual eq(base64.encodestring("www.python.org"), "d3d3LnB5dGhvbi5vcmc=\n") eq(base64.encodestring("a"), "YQ==\n") eq(base64.encodestring("ab"), "YWI=\n") eq(base64.encodestring("abc"), "YWJj\n") eq(base64.encodestring(""), "") eq(base64.encodestring("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") def test_decodestring(self): eq = self.assertEqual eq(base64.decodestring("d3d3LnB5dGhvbi5vcmc=\n"), "www.python.org") eq(base64.decodestring("YQ==\n"), "a") eq(base64.decodestring("YWI=\n"), "ab") eq(base64.decodestring("YWJj\n"), "abc") eq(base64.decodestring("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") eq(base64.decodestring(''), '') def test_encode(self): eq = self.assertEqual from cStringIO import StringIO infp = StringIO('abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' '0123456789!@#0^&*();:<>,. []{}') outfp = StringIO() base64.encode(infp, outfp) eq(outfp.getvalue(), 'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE' 'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT' 'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n') def test_decode(self): from cStringIO import StringIO infp = StringIO('d3d3LnB5dGhvbi5vcmc=') outfp = StringIO() base64.decode(infp, outfp) self.assertEqual(outfp.getvalue(), 'www.python.org') class BaseXYTestCase(unittest.TestCase): def test_b64encode(self): eq = self.assertEqual # Test default alphabet eq(base64.b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") eq(base64.b64encode('\x00'), 'AA==') eq(base64.b64encode("a"), "YQ==") eq(base64.b64encode("ab"), "YWI=") eq(base64.b64encode("abc"), "YWJj") eq(base64.b64encode(""), "") eq(base64.b64encode("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with arbitrary alternative characters eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd') # Test standard alphabet eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") eq(base64.standard_b64encode("a"), "YQ==") eq(base64.standard_b64encode("ab"), "YWI=") eq(base64.standard_b64encode("abc"), "YWJj") eq(base64.standard_b64encode(""), "") eq(base64.standard_b64encode("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd') def test_b64decode(self): eq = self.assertEqual eq(base64.b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") eq(base64.b64decode('AA=='), '\x00') eq(base64.b64decode("YQ=="), "a") eq(base64.b64decode("YWI="), "ab") eq(base64.b64decode("YWJj"), "abc") eq(base64.b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") eq(base64.b64decode(''), '') # Test with arbitrary alternative characters eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d') # Test standard alphabet eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") eq(base64.standard_b64decode("YQ=="), "a") eq(base64.standard_b64decode("YWI="), "ab") eq(base64.standard_b64decode("YWJj"), "abc") eq(base64.standard_b64decode(""), "") eq(base64.standard_b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d') def test_b64decode_error(self): self.assertRaises(TypeError, base64.b64decode, 'abc') def test_b32encode(self): eq = self.assertEqual eq(base64.b32encode(''), '') eq(base64.b32encode('\x00'), 'AA======') eq(base64.b32encode('a'), 'ME======') eq(base64.b32encode('ab'), 'MFRA====') eq(base64.b32encode('abc'), 'MFRGG===') eq(base64.b32encode('abcd'), 'MFRGGZA=') eq(base64.b32encode('abcde'), 'MFRGGZDF') def test_b32decode(self): eq = self.assertEqual eq(base64.b32decode(''), '') eq(base64.b32decode('AA======'), '\x00') eq(base64.b32decode('ME======'), 'a') eq(base64.b32decode('MFRA===='), 'ab') eq(base64.b32decode('MFRGG==='), 'abc') eq(base64.b32decode('MFRGGZA='), 'abcd') eq(base64.b32decode('MFRGGZDF'), 'abcde') def test_b32decode_casefold(self): eq = self.assertEqual eq(base64.b32decode('', True), '') eq(base64.b32decode('ME======', True), 'a') eq(base64.b32decode('MFRA====', True), 'ab') eq(base64.b32decode('MFRGG===', True), 'abc') eq(base64.b32decode('MFRGGZA=', True), 'abcd') eq(base64.b32decode('MFRGGZDF', True), 'abcde') # Lower cases eq(base64.b32decode('me======', True), 'a') eq(base64.b32decode('mfra====', True), 'ab') eq(base64.b32decode('mfrgg===', True), 'abc') eq(base64.b32decode('mfrggza=', True), 'abcd') eq(base64.b32decode('mfrggzdf', True), 'abcde') # Expected exceptions self.assertRaises(TypeError, base64.b32decode, 'me======') # Mapping zero and one eq(base64.b32decode('MLO23456'), 'b\xdd\xad\xf3\xbe') eq(base64.b32decode('M1023456', map01='L'), 'b\xdd\xad\xf3\xbe') eq(base64.b32decode('M1023456', map01='I'), 'b\x1d\xad\xf3\xbe') def test_b32decode_error(self): self.assertRaises(TypeError, base64.b32decode, 'abc') self.assertRaises(TypeError, base64.b32decode, 'ABCDEF==') def test_b16encode(self): eq = self.assertEqual eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF') eq(base64.b16encode('\x00'), '00') def test_b16decode(self): eq = self.assertEqual eq(base64.b16decode('0102ABCDEF'), '\x01\x02\xab\xcd\xef') eq(base64.b16decode('00'), '\x00') # Lower case is not allowed without a flag self.assertRaises(TypeError, base64.b16decode, '0102abcdef') # Case fold eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef') def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(LegacyBase64TestCase)) suite.addTest(unittest.makeSuite(BaseXYTestCase)) return suite def test_main(): test_support.run_suite(suite()) if __name__ == '__main__': unittest.main(defaultTest='suite')
gpl-3.0
eckucukoglu/arm-linux-gnueabihf
lib/python2.7/test/profilee.py
398
3041
""" Input for test_profile.py and test_cprofile.py. IMPORTANT: This stuff is touchy. If you modify anything above the test class you'll have to regenerate the stats by running the two test files. *ALL* NUMBERS in the expected output are relevant. If you change the formatting of pstats, please don't just regenerate the expected output without checking very carefully that not a single number has changed. """ import sys # In order to have reproducible time, we simulate a timer in the global # variable 'TICKS', which represents simulated time in milliseconds. # (We can't use a helper function increment the timer since it would be # included in the profile and would appear to consume all the time.) TICKS = 42000 def timer(): return TICKS def testfunc(): # 1 call # 1000 ticks total: 270 ticks local, 730 ticks in subfunctions global TICKS TICKS += 99 helper() # 300 helper() # 300 TICKS += 171 factorial(14) # 130 def factorial(n): # 23 calls total # 170 ticks total, 150 ticks local # 3 primitive calls, 130, 20 and 20 ticks total # including 116, 17, 17 ticks local global TICKS if n > 0: TICKS += n return mul(n, factorial(n-1)) else: TICKS += 11 return 1 def mul(a, b): # 20 calls # 1 tick, local global TICKS TICKS += 1 return a * b def helper(): # 2 calls # 300 ticks total: 20 ticks local, 260 ticks in subfunctions global TICKS TICKS += 1 helper1() # 30 TICKS += 2 helper1() # 30 TICKS += 6 helper2() # 50 TICKS += 3 helper2() # 50 TICKS += 2 helper2() # 50 TICKS += 5 helper2_indirect() # 70 TICKS += 1 def helper1(): # 4 calls # 30 ticks total: 29 ticks local, 1 tick in subfunctions global TICKS TICKS += 10 hasattr(C(), "foo") # 1 TICKS += 19 lst = [] lst.append(42) # 0 sys.exc_info() # 0 def helper2_indirect(): helper2() # 50 factorial(3) # 20 def helper2(): # 8 calls # 50 ticks local: 39 ticks local, 11 ticks in subfunctions global TICKS TICKS += 11 hasattr(C(), "bar") # 1 TICKS += 13 subhelper() # 10 TICKS += 15 def subhelper(): # 8 calls # 10 ticks total: 8 ticks local, 2 ticks in subfunctions global TICKS TICKS += 2 for i in range(2): # 0 try: C().foo # 1 x 2 except AttributeError: TICKS += 3 # 3 x 2 class C: def __getattr__(self, name): # 28 calls # 1 tick, local global TICKS TICKS += 1 raise AttributeError
gpl-2.0
OsoTech/appengine-mapreduce
python/test/mapreduce/output_writers_test.py
1
19109
#!/usr/bin/env python # # Copyright 2011 Google Inc. All Rights Reserved. # Using opensource naming conventions, pylint: disable=g-bad-name from testlib import mox import unittest from google.appengine.api import apiproxy_stub_map from google.appengine.api import files from google.appengine.api.files import testutil as files_testutil from mapreduce import context from mapreduce import errors from mapreduce import model from mapreduce import output_writers from mapreduce import records from testlib import testutil # pylint: disable=g-import-not-at-top # TODO(user): Cleanup imports if/when cloudstorage becomes part of runtime. try: import cloudstorage enable_cloudstorage_tests = True except ImportError: enable_cloudstorage_tests = False FILE_WRITER_NAME = (output_writers.__name__ + "." + output_writers.FileOutputWriter.__name__) class FilePoolTest(unittest.TestCase): """Tests for _FilePool class.""" def setUp(self): self.file_service = files_testutil.TestFileServiceStub() apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap() apiproxy_stub_map.apiproxy.RegisterStub( "file", self.file_service) self.pool = output_writers._FilePool(flush_size_chars=10) def testAppendAndFlush(self): self.pool.append("foo", "a") self.assertEquals("", self.file_service.get_content("foo")) self.pool.append("foo", "b") self.assertEquals("", self.file_service.get_content("foo")) self.pool.flush() self.assertEquals("ab", self.file_service.get_content("foo")) def testAutoFlush(self): self.pool.append("foo", "a"*10) self.pool.append("foo", "b") self.assertEquals("a"*10, self.file_service.get_content("foo")) self.pool.flush() self.assertEquals("a"*10 + "b", self.file_service.get_content("foo")) def testAppendTooMuchData(self): """Test appending too much data.""" self.assertRaises(errors.Error, self.pool.append, "foo", "a"*1024*1024*2) def testAppendLargeData(self): """Test appending large amount of data. See b/6827293. """ self.pool.append("foo", "a"*output_writers._FILES_API_FLUSH_SIZE + "a") self.assertEquals("a"*output_writers._FILES_API_FLUSH_SIZE + "a", self.file_service.get_content("foo")) def testAppendMultipleFiles(self): self.pool.append("foo", "a") self.pool.append("bar", "b") self.pool.append("foo", "a") self.pool.append("bar", "b") self.assertEquals("", self.file_service.get_content("foo")) self.assertEquals("", self.file_service.get_content("bar")) self.pool.flush() self.assertEquals("aa", self.file_service.get_content("foo")) self.assertEquals("bb", self.file_service.get_content("bar")) class RecordsPoolTest(unittest.TestCase): """Tests for RecordsPool.""" def setUp(self): self.file_service = files_testutil.TestFileServiceStub() apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap() apiproxy_stub_map.apiproxy.RegisterStub( "file", self.file_service) self.pool = output_writers.RecordsPool("tempfile", flush_size_chars=30) def testAppendAndFlush(self): self.pool.append("a") self.assertEquals("", self.file_service.get_content("tempfile")) self.pool.append("b") self.assertEquals("", self.file_service.get_content("tempfile")) self.pool.flush() self.assertEquals( ["a", "b"], list(records.RecordsReader(files.open("tempfile", "r")))) class FileOutputWriterTest(testutil.HandlerTestBase): def create_mapper_spec(self, output_writer_spec=FILE_WRITER_NAME, params=None): params = params or {} mapper_spec = model.MapperSpec( "FooHandler", "mapreduce.input_readers.DatastoreInputReader", params, 10, output_writer_spec=output_writer_spec) return mapper_spec def create_mapreduce_state(self, params=None): mapreduce_spec = model.MapreduceSpec( "mapreduce0", "mapreduce0", self.create_mapper_spec(params=params).to_json()) mapreduce_state = model.MapreduceState.create_new("mapreduce0") mapreduce_state.mapreduce_spec = mapreduce_spec return mapreduce_state def testValidate_Passes(self): output_writers.FileOutputWriter.validate( self.create_mapper_spec(params={"filesystem": "blobstore"})) def testValidate_WriterNotSet(self): self.assertRaises( errors.BadWriterParamsError, output_writers.FileOutputWriter.validate, self.create_mapper_spec(output_writer_spec=None)) def testValidate_ShardingNone(self): output_writers.FileOutputWriter.validate(self.create_mapper_spec( params={"output_sharding": "NONE", "filesystem": "blobstore"})) def testValidate_ShardingInput(self): output_writers.FileOutputWriter.validate(self.create_mapper_spec( params={"output_sharding": "input", "filesystem": "blobstore"})) def testValidate_ShardingIncorrect(self): self.assertRaises( errors.BadWriterParamsError, output_writers.FileOutputWriter.validate, self.create_mapper_spec( params={"output_sharding": "foo", "filesystem": "blobstore"})) def testNoShardingSpecified(self): """Test default output_sharding is one shared output file.""" self.assertEqual( output_writers.FileOutputWriter.OUTPUT_SHARDING_NONE, output_writers.FileOutputWriter._get_output_sharding( mapper_spec=self.create_mapper_spec(params={"filesystem": "gs"}))) def testInitJob_ShardingNone(self): mapreduce_state = self.create_mapreduce_state( params={"output_sharding": "none", "filesystem": "blobstore"}) output_writers.FileOutputWriter.init_job(mapreduce_state) self.assertTrue(mapreduce_state.writer_state) filenames = output_writers.FileOutputWriter._State.from_json( mapreduce_state.writer_state).filenames self.assertEqual(1, len(filenames)) self.assertTrue(filenames[0].startswith("/blobstore/writable:")) def testInitJob_ShardingInput(self): mapreduce_state = self.create_mapreduce_state( params={"output_sharding": "input", "filesystem": "blobstore"}) output_writers.FileOutputWriter.init_job(mapreduce_state) self.assertTrue(mapreduce_state.writer_state) filenames = output_writers.FileOutputWriter._State.from_json( mapreduce_state.writer_state).filenames self.assertEqual(0, len(filenames)) def testInitJob_GoogleStorage(self): mapreduce_state = self.create_mapreduce_state( params={"filesystem": "gs", "gs_bucket_name": "foo", "gs_acl": "public"}) m = mox.Mox() m.StubOutWithMock(files.gs, "create") files.gs.create(mox.StrContains('/gs/foo'), mox.IgnoreArg(), acl="public") m.ReplayAll() output_writers.FileOutputWriter.init_job(mapreduce_state) m.UnsetStubs() m.VerifyAll() self.assertTrue(mapreduce_state.writer_state) def testGetFilenamesNoInput(self): """Tests get_filenames when no other writer's methods are called. Emulates the zero input case. Other tests on get_filenames see output_writers_end_to_end_test. """ self.assertEqual( [], output_writers.FileOutputWriter.get_filenames( self.create_mapreduce_state(params={"filesystem": "blobstore"}))) def testValidate_MissingBucketParam(self): self.assertRaises( errors.BadWriterParamsError, output_writers.FileOutputWriter.validate, self.create_mapper_spec( params={"filesystem": "gs", "bucket_name": "foo"})) class GoogleCloudStorageOutputTestBase(testutil.CloudStorageTestBase): """Base class for running output tests with Google Cloud Storage. Subclasses must define WRITER_NAME and may redefine NUM_SHARDS. """ # Defaults NUM_SHARDS = 10 def create_mapper_spec(self, output_params=None): """Create a Mapper specification using the GoogleCloudStorageOutputWriter. The specification generated uses a dummy handler and input reader. The number of shards is 10 (some number greater than 1). Args: output_params: parameters for the output writer. Returns: a model.MapperSpec with default settings and specified output_params. """ return model.MapperSpec( "DummyHandler", "DummyInputReader", {"output_writer": output_params or {}}, self.NUM_SHARDS, output_writer_spec=self.WRITER_NAME) def create_mapreduce_state(self, output_params=None): """Create a model.MapreduceState including MapreduceSpec and MapperSpec. Args: output_params: parameters for the output writer. Returns: a model.MapreduceSpec with default settings and specified output_params. """ mapreduce_spec = model.MapreduceSpec( "DummyMapReduceJobName", "DummyMapReduceJobId", self.create_mapper_spec(output_params=output_params).to_json()) mapreduce_state = model.MapreduceState.create_new("DummyMapReduceJobId") mapreduce_state.mapreduce_spec = mapreduce_spec mapreduce_state.put() return mapreduce_state def create_shard_state(self, shard_number): """Create a model.ShardState. Args: shard_number: The index for this shard (zero-indexed). Returns: a model.ShardState for the given shard. """ shard_state = model.ShardState.create_new("DummyMapReduceJobId", shard_number) shard_state.put() return shard_state class GoogleCloudStorageOutputWriterTest(GoogleCloudStorageOutputTestBase): WRITER_CLS = output_writers._GoogleCloudStorageOutputWriter WRITER_NAME = output_writers.__name__ + "." + WRITER_CLS.__name__ def testValidate_PassesBasic(self): self.WRITER_CLS.validate(self.create_mapper_spec( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test"})) def testValidate_PassesAllOptions(self): self.WRITER_CLS.validate( self.create_mapper_spec( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test", self.WRITER_CLS.ACL_PARAM: "test-acl", self.WRITER_CLS.NAMING_FORMAT_PARAM: "fname", self.WRITER_CLS.CONTENT_TYPE_PARAM: "mime"})) def testValidate_NoBucket(self): self.assertRaises( errors.BadWriterParamsError, self.WRITER_CLS.validate, self.create_mapper_spec()) def testValidate_BadBucket(self): # Only test a single bad name to ensure that the validator is called. # Full testing of the validation is in cloudstorage component. self.assertRaises( errors.BadWriterParamsError, self.WRITER_CLS.validate, self.create_mapper_spec( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "#"})) def testValidate_BadNamingTemplate(self): # Send a naming format that includes an unknown subsitution: $bad self.assertRaises( errors.BadWriterParamsError, self.WRITER_CLS.validate, self.create_mapper_spec( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test", self.WRITER_CLS.NAMING_FORMAT_PARAM: "$bad"})) def testCreateWriters(self): mapreduce_state = self.create_mapreduce_state( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test"}) for shard_num in range(self.NUM_SHARDS): shard = self.create_shard_state(shard_num) writer = self.WRITER_CLS.create(mapreduce_state, shard) shard.result_status = model.ShardState.RESULT_SUCCESS writer.finalize(None, shard) shard.put() filenames = self.WRITER_CLS.get_filenames(mapreduce_state) # Verify we have the correct number of filenames self.assertEqual(self.NUM_SHARDS, len(filenames)) # Verify each has a unique filename self.assertEqual(self.NUM_SHARDS, len(set(filenames))) def testWriter(self): mapreduce_state = self.create_mapreduce_state( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test"}) shard_state = self.create_shard_state(0) ctx = context.Context(mapreduce_state.mapreduce_spec, shard_state) context.Context._set(ctx) writer = self.WRITER_CLS.create(mapreduce_state, shard_state) data = "fakedata" writer.write(data) writer.finalize(None, shard_state) filename = self.WRITER_CLS._get_filename(shard_state) self.assertNotEquals(None, filename) self.assertEqual(data, cloudstorage.open(filename).read()) def testCreateWritersWithRetries(self): mapreduce_state = self.create_mapreduce_state( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test"}) shard_state = self.create_shard_state(0) ctx = context.Context(mapreduce_state.mapreduce_spec, shard_state) context.Context._set(ctx) # Create the writer for the 1st attempt writer = self.WRITER_CLS.create(mapreduce_state, shard_state) filename = writer._filename writer.write("badData") # Test re-creating the writer for a retry shard_state.reset_for_retry() writer = self.WRITER_CLS.create(mapreduce_state, shard_state) new_filename = writer._filename good_data = "goodData" writer.write(good_data) writer.finalize(None, shard_state) # Verify the retry has a different filename self.assertNotEqual(filename, new_filename) # Verify the badData is not in the final file self.assertEqual(good_data, cloudstorage.open(new_filename).read()) def testWriterMetadata(self): test_acl = "test-acl" test_content_type = "test-mime" mapreduce_state = self.create_mapreduce_state( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test", self.WRITER_CLS.ACL_PARAM: test_acl, self.WRITER_CLS.CONTENT_TYPE_PARAM: test_content_type}) shard_state = self.create_shard_state(0) ctx = context.Context(mapreduce_state.mapreduce_spec, shard_state) context.Context._set(ctx) writer = self.WRITER_CLS.create(mapreduce_state, shard_state) writer.finalize(None, shard_state) filename = self.WRITER_CLS._get_filename( shard_state) file_stat = cloudstorage.stat(filename) self.assertEqual(test_content_type, file_stat.content_type) # TODO(user) Add support in the stub to retrieve acl metadata def testWriterSerialization(self): mapreduce_state = self.create_mapreduce_state( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test"}) shard_state = self.create_shard_state(0) ctx = context.Context(mapreduce_state.mapreduce_spec, shard_state) context.Context._set(ctx) writer = self.WRITER_CLS.create(mapreduce_state, shard_state) # data expliclity contains binary data data = "\"fake\"\tdatathatishardtoencode" writer.write(data) # Serialize/deserialize writer after some data written writer = self.WRITER_CLS.from_json(writer.to_json()) writer.write(data) # Serialize/deserialize writer after more data written writer = self.WRITER_CLS.from_json(writer.to_json()) writer.finalize(None, shard_state) # Serialize/deserialize writer after finalization writer = self.WRITER_CLS.from_json(writer.to_json()) self.assertRaises(IOError, writer.write, data) filename = self.WRITER_CLS._get_filename(shard_state) self.assertNotEquals(None, filename) self.assertEqual(data + data, cloudstorage.open(filename).read()) def testWriterCounters(self): mapreduce_state = self.create_mapreduce_state( output_params= {self.WRITER_CLS.BUCKET_NAME_PARAM: "test"}) shard_state = self.create_shard_state(0) writer = self.WRITER_CLS.create(mapreduce_state, shard_state) ctx = context.Context(mapreduce_state.mapreduce_spec, shard_state) context.Context._set(ctx) # Write large amount of data to ensure measurable time passes during write. data = "d" * 1024 * 1024 * 10 writer.write(data) self.assertEqual(len(data), shard_state.counters_map.get( output_writers.COUNTER_IO_WRITE_BYTES)) self.assertTrue(shard_state.counters_map.get( output_writers.COUNTER_IO_WRITE_MSEC) > 0) def testGetFilenamesNoInput(self): """Tests get_filenames when no other writer's methods are called. Emulates the zero input case. Other tests on get_filenames see output_writers_end_to_end_test. """ mapreduce_state = self.create_mapreduce_state( output_params={self.WRITER_CLS.BUCKET_NAME_PARAM: "test"}) self.assertEqual([], self.WRITER_CLS.get_filenames(mapreduce_state)) class GoogleCloudStorageRecordOutputWriterTest( GoogleCloudStorageOutputTestBase): BUCKET_NAME = "test" WRITER_CLS = output_writers._GoogleCloudStorageRecordOutputWriter WRITER_NAME = output_writers.__name__ + "." + WRITER_CLS.__name__ def create_mapreduce_state(self, output_params=None): """Create a model.MapreduceState including MapreduceSpec and MapperSpec. Args: output_params: parameters for the output writer. Returns: a model.MapreduceSpec with default settings and specified output_params. """ all_params = {self.WRITER_CLS.BUCKET_NAME_PARAM: self.BUCKET_NAME} all_params.update(output_params or {}) return super(GoogleCloudStorageRecordOutputWriterTest, self).create_mapreduce_state(all_params) def setupWriter(self): """Create an Google Cloud Storage LevelDB record output writer. Returns: a model.MapreduceSpec. """ self.mapreduce_state = self.create_mapreduce_state() self.shard_state = self.create_shard_state(0) self.writer = self.WRITER_CLS.create(self.mapreduce_state, self.shard_state) self.ctx = context.Context(self.mapreduce_state.mapreduce_spec, self.shard_state) context.Context._set(self.ctx) def testSmoke(self): data_size = 10 self.setupWriter() # Serialize un-used writer self.writer = self.WRITER_CLS.from_json_str(self.writer.to_json_str()) # Write single record self.writer.write("d" * data_size) self.assertEqual(data_size + records._HEADER_LENGTH, self.shard_state.counters_map.get( output_writers.COUNTER_IO_WRITE_BYTES)) # Serialize self.writer = self.WRITER_CLS.from_json_str(self.writer.to_json_str()) # A full (padded) block should have been flushed self.assertEqual(records._BLOCK_SIZE, self.shard_state.counters_map.get( output_writers.COUNTER_IO_WRITE_BYTES)) # Writer a large record. self.writer.write("d" * records._BLOCK_SIZE) self.assertEqual(records._BLOCK_SIZE + records._BLOCK_SIZE + 2 * records._HEADER_LENGTH, self.shard_state.counters_map.get( output_writers.COUNTER_IO_WRITE_BYTES)) self.writer.finalize(self.ctx, self.shard_state) self.writer = self.WRITER_CLS.from_json_str(self.writer.to_json_str()) if __name__ == "__main__": unittest.main()
apache-2.0
BubuLK/sfepy
sfepy/discrete/common/poly_spaces.py
3
6145
from __future__ import absolute_import import numpy as nm from sfepy.base.base import load_classes, Struct from sfepy import get_paths def transform_basis(transform, bf): """ Transform a basis `bf` using `transform` array of matrices. """ if bf.ndim == 3: nbf = nm.einsum('cij,qdj->cqdi', transform, bf, order='C') elif bf.ndim == 4: if bf.shape[0] == 1: nbf = nm.einsum('cij,qdj->cqdi', transform, bf[0], order='C') else: nbf = nm.einsum('cij,cqdj->cqdi', transform, bf, order='C') # Note: the 2nd derivatives are not supported here. # Workaround for NumPy 1.14.0 - order is ignored(?) nbf = nm.ascontiguousarray(nbf) return nbf class PolySpace(Struct): """Abstract polynomial space class.""" _all = None keys = { (0, 1) : 'simplex', (1, 2) : 'simplex', (2, 3) : 'simplex', (3, 4) : 'simplex', (2, 4) : 'tensor_product', (3, 8) : 'tensor_product', } @staticmethod def any_from_args(name, geometry, order, base='lagrange', force_bubble=False): """ Construct a particular polynomial space classes according to the arguments passed in. """ if name is None: name = PolySpace.suggest_name(geometry, order, base, force_bubble) if PolySpace._all is None: ps_files = get_paths('sfepy/discrete/fem/poly_spaces.py') ps_files += get_paths('sfepy/discrete/dg/poly_spaces.py') PolySpace._all = load_classes(ps_files, [PolySpace], ignore_errors=True, name_attr='name') table = PolySpace._all key = '%s_%s' % (base, PolySpace.keys[(geometry.dim, geometry.n_vertex)]) if (geometry.name == '1_2') and (key not in table): key = '%s_%s' % (base, 'tensor_product') if force_bubble: key += '_bubble' return table[key](name, geometry, order) @staticmethod def suggest_name(geometry, order, base='lagrange', force_bubble=False): """ Suggest the polynomial space name given its constructor parameters. """ aux = geometry.get_interpolation_name()[:-1] if force_bubble: return aux + ('%dB' % order) else: return aux + ('%d' % order) def __init__(self, name, geometry, order): self.name = name self.geometry = geometry self.order = order self.bbox = nm.vstack((geometry.coors.min(0), geometry.coors.max(0))) def eval_base(self, coors, diff=0, ori=None, force_axis=False, transform=None, suppress_errors=False, eps=1e-15): """ Evaluate the basis or its first or second derivatives in points given by coordinates. The real work is done in _eval_base() implemented in subclasses. Note that the second derivative code is a work-in-progress and only `coors` and `transform` arguments are used. Parameters ---------- coors : array_like The coordinates of points where the basis is evaluated. See Notes. diff : 0, 1 or 2 If nonzero, return the given derivative. ori : array_like, optional Optional orientation of element facets for per element basis. force_axis : bool If True, force the resulting array shape to have one more axis even when `ori` is None. transform : array_like, optional The basis transform array. suppress_errors : bool If True, do not report points outside the reference domain. eps : float Accuracy for comparing coordinates. Returns ------- base : array The basis (shape (n_coor, 1, n_base)) or its first derivative (shape (n_coor, dim, n_base)) or its second derivative (shape (n_coor, dim, dim, n_base)) evaluated in the given points. An additional axis is pre-pended of length n_cell, if `ori` is given, or of length 1, if `force_axis` is True. Notes ----- If coors.ndim == 3, several point sets are assumed, with equal number of points in each of them. This is the case, for example, of the values of the volume base functions on the element facets. The indexing (of bf_b(g)) is then (ifa,iqp,:,n_ep), so that the facet can be set in C using FMF_SetCell. """ coors = nm.asarray(coors) if not coors.ndim in (2, 3): raise ValueError('coordinates must have 2 or 3 dimensions! (%d)' % coors.ndim) if coors.shape[-1] != self.geometry.dim: raise ValueError('PolySpace geometry dimension %d does not agree' ' with quadrature coordinates dimension %d!' % (self.geometry.dim, coors.shape[-1])) if (coors.ndim == 2): base = self._eval_base(coors, diff=diff, ori=ori, suppress_errors=suppress_errors, eps=eps) if (base.ndim == 3) and force_axis: base = base[None, ...] if not base.flags['C_CONTIGUOUS']: base = nm.ascontiguousarray(base) else: # Several point sets. if diff: bdim = self.geometry.dim else: bdim = 1 base = nm.empty((coors.shape[0], coors.shape[1], bdim, self.n_nod), dtype=nm.float64) for ii, _coors in enumerate(coors): base[ii] = self._eval_base(_coors, diff=diff, ori=ori, suppress_errors=suppress_errors, eps=eps) if transform is not None: base = transform_basis(transform, base) return base
bsd-3-clause
EzyInsights/Diamond
src/collectors/postfix/postfix.py
32
3690
# coding=utf-8 """ Collect stats from postfix-stats. postfix-stats is a simple threaded stats aggregator for Postfix. When running as a syslog destination, it can be used to get realtime cumulative stats. #### Dependencies * socket * json (or simplejson) * [postfix-stats](https://github.com/disqus/postfix-stats) """ import socket import sys try: import json except ImportError: import simplejson as json import diamond.collector from diamond.collector import str_to_bool if sys.version_info < (2, 6): from string import maketrans DOTS_TO_UNDERS = maketrans('.', '_') else: DOTS_TO_UNDERS = {ord(u'.'): u'_'} class PostfixCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(PostfixCollector, self).get_default_config_help() config_help.update({ 'host': 'Hostname to connect to', 'port': 'Port to connect to', 'include_clients': 'Include client connection stats', }) return config_help def get_default_config(self): """ Returns the default collector settings """ config = super(PostfixCollector, self).get_default_config() config.update({ 'path': 'postfix', 'host': 'localhost', 'port': 7777, 'include_clients': True, }) return config def get_json(self): json_string = '' address = (self.config['host'], int(self.config['port'])) s = None try: try: s = socket.create_connection(address, timeout=1) s.sendall('stats\n') while 1: data = s.recv(4096) if not data: break json_string += data except socket.error: self.log.exception("Error talking to postfix-stats") return '{}' finally: if s: s.close() return json_string or '{}' def get_data(self): json_string = self.get_json() try: data = json.loads(json_string) except (ValueError, TypeError): self.log.exception("Error parsing json from postfix-stats") return None return data def collect(self): data = self.get_data() if not data: return if str_to_bool(self.config['include_clients']) and u'clients' in data: for client, value in data['clients'].iteritems(): # translate dots to underscores in client names metric = u'.'.join(['clients', client.translate(DOTS_TO_UNDERS)]) dvalue = self.derivative(metric, value) self.publish(metric, dvalue) for action in (u'in', u'recv', u'send'): if action not in data: continue for sect, stats in data[action].iteritems(): for status, value in stats.iteritems(): metric = '.'.join([action, sect, status.translate(DOTS_TO_UNDERS)]) dvalue = self.derivative(metric, value) self.publish(metric, dvalue) if u'local' in data: for key, value in data[u'local'].iteritems(): metric = '.'.join(['local', key]) dvalue = self.derivative(metric, value) self.publish(metric, dvalue)
mit
BBN-Q/pyqgl2
src/python/attic/check_qbit.py
1
36045
#!/usr/bin/env python3 # Copyright 2015 by Raytheon BBN Technologies Corp. All Rights Reserved. import ast import builtins from ast import NodeVisitor from copy import deepcopy # For testing only if __name__ == '__main__': import os import sys # Find the directory that this executable lives in; # munge the path to look within the parent module # DIRNAME = os.path.normpath( os.path.abspath(os.path.dirname(sys.argv[0]) or '.')) sys.path.append(os.path.normpath(os.path.join(DIRNAME, '..'))) import pyqgl2.importer from pyqgl2.ast_util import NodeError from pyqgl2.ast_util import NodeTransformerWithFname from pyqgl2.ast_util import NodeVisitorWithFname from pyqgl2.check_symtab import CheckSymtab from pyqgl2.check_waveforms import CheckWaveforms from pyqgl2.debugmsg import DebugMsg from pyqgl2.lang import QGL2 class FuncParam(object): def __init__(self, name): self.name = name self.value = None class QuantumFuncParam(FuncParam): pass class ClassicalFuncParam(FuncParam): pass class FindTypes(NodeVisitor): """ Mechanism for finding the type bindings of variables within the context of a function invocation. Because Python functions do not have (enforced) type signatures, the type bindings of two different invocations of the same function may be different. For example, if we have function: def foo(x): y = x return y Then if we call "foo(12)" then the local variable y within foo will have a type of int, but if we call "foo('hello')" then y will have a type of str. For this reason, we do not try to do type inference on functions when they are initially parsed, but instead defer this until we know how they will be called. For the same reason, the type bindings are not stored with the function definitions, but instead must be computed for each call (or stored in some other way). Because Python functions may be difficult/impossible to analyze with respect to type (and we might not have access to their source), there are many circumstances in which we cannot infer anything useful about variables types. Fortunately, we only really care about whether variables are quantum, classical, or have an unknown type (which we assume is some sort of classical type). Along the way, we also check whether type declarations are violated. For example, if we had: def bar(x: qbit): pass # do something and we invoked this as "bar(13)", this would contradict the declared type because 13 is not a qbit. We assume that variables do not change types (at least not with respect to classical vs quantum) so the following code would be considered an error: x = Qubit(label="1") # x is a reference to a qbit x = 14 # reassigning x to a classical value--error! We also treat the reassignment of references to quantum values as errors: x = Qubit(label="1") # x is a reference to a qbit x = Qubit(label="2") # reassignment of x--error! There are several ways that variables come into existance: explicit assignment, implicit assignment (keyword arguments), as statement variable (loop variables, exception variables, or with variables). TODO: we do not handle "local" and "global" statements yet. Each variable has one of the following types: 'classical', 'qbit', or 'unknown'. There will be more types in the future. """ def __init__(self, importer): self.importer = importer # dictionaries of all local symbols (parameters or locally-created # variables). # self.parameter_names = dict() self.local_names = dict() @staticmethod def find_lscope(importer, func_def, call=None, call_scope=None): worker = FindTypes(importer) val_bindings, type_bindings = worker.process_params( func_def, call=call, call_scope=call_scope) worker.parameter_names = type_bindings worker.visit(func_def) name = func_def.name print('%s PARAM: %s' % (name, str(worker.parameter_names))) print('%s LOCAL: %s' % (name, str(worker.local_names))) return worker def process_params(self, func_def, call=None, call_scope=None): # The formal parameters are an AST object. # The way they are represented is a little awkward; # all parameters (positional and keyword) are in a # positional list (because Python can handle keyword # parameters as positional parameters) and then the # keyword default values are in a separate positional # list.) type_bindings = dict() val_bindings = dict() all_arg_names = list() # First, pretend all the parameters are positional # for arg in func_def.args.args: arg_name = arg.arg arg_type = arg.annotation if arg_type and isinstance(arg_type, ast.Name): arg_type_name = arg_type.id else: arg_type_name = 'unknown' if arg_name in all_arg_names: NodeError.error_msg(arg, 'repeated parameter name \'%s\'' % arg_name) # if arg_type_name not in [QGL2.CLASSICAL, QGL2.QBIT, 'unknown', QGL2.CONTROL, QGL2.PULSE, QGL2.SEQUENCE, QGL2.QBIT_LIST]: if arg_type_name not in [QGL2.CLASSICAL, QGL2.QBIT, 'unknown']: NodeError.warning_msg(arg, ('parameter type \'%s\' is not supported' % arg_type_name)) all_arg_names.append(arg_name) type_bindings[arg_name] = arg_type_name val_bindings[arg_name] = None # Then process any defaults that were provided # default_vals = func_def.args.defaults if default_vals: default_names = all_arg_names[:-len(default_vals)] for ind in range(len(default_vals)): val_bindings[default_names[ind]] = default_vals[ind] # TODO: we need to make sure that the default # values actually match the declared type, if any # # NOTE that the default value is an AST, which could be # almost any expression. Many expressions are going to # be a headache for us, so maybe we should disallow # many of them. # Now replace the default values with whatever is in the # actuals, if any actuals are provided. if call: seen_args = set() print('CALL %s' % ast.dump(call)) if call.args: for ind in range(len(call.args)): seen_args.add(all_arg_names[ind]) val_bindings[all_arg_names[ind]] = call.args[ind] # TODO: If there were fewer args than required, then # gripe. TODO: if there were unexpected arguments, gripe for kwarg in call.keywords: name = kwarg.arg if name in seen_args: NodeError(call, 'more than one value for parameter \'%s\'' % name) seen_args.add(name) val_bindings[name] = kwarg.value print('CALL %s' % str(val_bindings)) # TODO: if provided a surrounding scope and a call, then try to # infer types from actual parameters. For example, if one of # the actual parameters is 'x', and we know the type of 'x', then # propogate it. # # Right now we don't try to statically determine values. # # # TODO: this is incomplete if call and call_scope: # Create a dictionary of known types from the given # call_scope. Note that we are only interested in # known types, so omit any "unknown" types # scope_types = dict() for name in call_scope.parameter_names: name_type = call_scope.parameter_names[name] if name_type != 'unknown': scope_types[name] = name_type for name in call_scope.local_names: name_type = call_scope.local_names[name] if name_type != 'unknown': scope_types[name] = name_type # Now look at each actual parameter, and try # to infer what type it has. If it's a number or # string, it's classical. If it's the value of # a variable, look in scope_types to see what we # know about that variable (if anything). If it's # a method call, look at the definition of the # method to see whether it has a declared type. # for name in type_bindings: actual_val = val_bindings[name] if isinstance(actual_val, ast.Num): type_bindings[name] = QGL2.CLASSICAL elif isinstance(actual_val, ast.Str): type_bindings[name] = QGL2.CLASSICAL elif isinstance(actual_val, ast.NameConstant): type_bindings[name] = QGL2.CLASSICAL elif isinstance(actual_val, ast.Name): if actual_val.id in scope_types: type_bindings[name] = scope_types[actual_val.id] elif isinstance(actual_val, ast.Call): called_func_name = pyqgl2.importer.collapse_name( actual_val.func) called_func = self.importer.resolve_sym( actual_val.qgl_fname, func_name) if not called_func: NodeError.warning_msg(value, 'function [%s] not found' % called_func_name) continue elif called_func.returns: rtype = called_func_def.returns if isinstance(rtype, ast.Name): rtype_name = rtype.id # if rtype_name not in [QGL2.CLASSICAL, QGL2.QBIT, 'unknown', QGL2.SEQUENCE, QGL2.PULSE, QGL2.CONTROL, QGL2.QBIT_LIST]: if rtype_name not in [QGL2.CLASSICAL, QGL2.QBIT, 'unknown']: NodeError.warning_msg(arg, ('parameter type \'%s\' is not supported' % arg_type_name)) type_bindings[name] = rtype_name return val_bindings, type_bindings def name_is_in_lscope(self, name): """ Return True if the name has been bound to the local scope, False otherwise. The name is assumed to be a string that may represent a lexically-local variable: no indirection permitted. TODO: this is not checked. This only considers the local scope, and not the surrounding scopes (class, module, global). """ return (name in self.parameter_names) or (name in self.local_names) def add_type_binding(self, node, name, name_type): """ Add a binding between a name and a type, in the local context. Gripe and do nothing if there is already a binding for this name in either the parameter or local scope, and it disagrees with the requested binding. The node parameter is used only to generate error messages that can be traced back to the original code, since the node contains the file and line number of the code prior to any transformation """ if name in self.parameter_names: old_type = self.parameter_names[name] if old_type != name_type: NodeError.error_msg(node, ('parameter type changed %s -> %s' % (old_type, name_type))) elif name in self.local_names: old_type = self.local_names[name] if old_type != name_type: NodeError.error_msg(node, 'type changed %s -> %s' % (old_type, name_type)) else: NodeError.diag_msg(node, 'add type %s -> %s' % (name, name_type)) self.local_names[name] = name_type def is_qbit_parameter(self, name): if name not in self.parameter_names: return False if self.parameter_names[name] != QGL2.QBIT: return False else: return True def is_qbit_local(self, name): if name not in self.local_names: return False elif self.local_names[name] != QGL2.QBIT: return False return True def is_qbit(self, name): return self.is_qbit_parameter(name) or self.is_qbit_local(name) def visit_body(self, body): """ Visit all the items in a "body", which is a list of statements """ for stmnt in body: self.visit(stmnt) def visit_Assign(self, node): # FIXME: can't handle nested tuples properly # For now we're not even going to try. if not isinstance(node.targets[0], ast.Name): NodeError.warning_msg(node, 'tuple returns not supported yet') self.generic_visit(node) return node target = node.targets[0] print('VA target0: %s' % ast.dump(target)) value = node.value name = target.id if not isinstance(target, ast.Name): # should this be considered an error? # it's not an error in Python, but it's hard for us to handle. return node if self.is_qbit_parameter(name): msg = 'reassignment of qbit parameter \'%s\' forbidden' % name NodeError.error_msg(node, msg) return node if self.is_qbit_local(name): msg = 'reassignment of qbit \'%s\' forbidden' % name NodeError.error_msg(node, msg) return node if isinstance(value, ast.Name): if not self.name_is_in_lscope(value.id): NodeError.error_msg(node, 'unknown symbol \'%s\'' % value.id) return node if self.is_qbit_parameter(name): self.warning_msg(node, 'aliasing qbit parameter \'%s\' as \'%s\'' % (value.id, name)) self.add_type_binding(value, name, QGL2.QBIT) target.qgl_is_qbit = True elif self.is_qbit_local(name): self.warning_msg(node, 'aliasing local qbit \'%s\' as \'%s\'' % (value.id, name)) self.add_type_binding(value, name, QGL2.QBIT) target.qgl_is_qbit = True else: self.add_type_binding(value, name, QGL2.CLASSICAL) target.qgl_is_qbit = False elif isinstance(value, ast.Call): func_name = pyqgl2.importer.collapse_name(value.func) func_def = self.importer.resolve_sym(value.qgl_fname, func_name) # FIXME: for debugging only! new_scope = FindTypes.find_lscope( self.importer, func_def, value, self) # FIXME: end debugging # If we can't find the function definition, or it's not declared # to be QGL, then we can't handle it. Return immediately. # if not func_def: NodeError.warning_msg( value, 'function [%s] not found' % func_name) self.add_type_binding(value, name, 'unknown') return node if func_def.returns: rtype = func_def.returns if (isinstance(rtype, ast.Name) and rtype.id == QGL2.QBIT): # Not sure what happens if we get here: we might # have a wandering variable that we know is a qbit, # but we never know which one. # DebugMsg.log('extending local (%s)' % name) self.add_type_binding(value, name, QGL2.QBIT) target.qgl_is_qbit = True if not func_def.qgl_func: # TODO: this seems bogus. We should be able to call # out to non-QGL functions # NodeError.error_msg( value, 'function [%s] not declared to be QGL2' % func_name) return node # When we're figuring out whether something is a call to # the Qbit assignment function, we look at the name of the # function as it is defined (i.e, as func_def), not as it # is imported (i.e., as func_name). # # This makes the assumption that ANYTHING named 'Qubit' or 'QubitFactory' # is a Qbit assignment function, which is lame and should # be more carefully parameterized. Things to think about: # looking more deeply at its signature and making certain # that it looks like the 'right' function and not something # someone mistakenly named 'Qubit' in an unrelated context. # if isinstance(value, ast.Call) and (func_def.name == QGL2.QBIT_ALLOC or func_def.name == QGL2.QBIT_ALLOC2): self.add_type_binding(value, name, QGL2.QBIT) return node def visit_For(self, node): """ Discover loop variables. TODO: this is incomplete; we just assume that loop variables are all classical. We don't attempt to infer anything about the iterator. """ for subnode in ast.walk(node.target): if isinstance(subnode, ast.Attribute): # This is a fatal error and we don't want to confuse # ourselves by trying to process the ast.Name # nodes beneath # name_text = pyqgl2.importer.collapse_name(subnode) NodeError.fatal_msg(subnode, ('loop var [%s] is not local' % name_text)) elif isinstance(subnode, ast.Name): name = subnode.id # Warn the user if they're doing something that's # likely to provoke an error # if self.name_is_in_lscope(name): NodeError.warning_msg(subnode, ('loop var [%s] hides sym in outer scope' % name)) DebugMsg.log('FOR (%s)' % name) self.add_type_binding(subnode, name, QGL2.CLASSICAL) self.visit_body(node.body) self.visit_body(node.orelse) def visit_ExceptHandler(self, node): name = node.name if self.name_is_in_lscope(name): NodeError.warn_msg(node, ('exception var [%s] hides sym in outer scope' % name)) # assume all exceptions are classical self.add_type_binding(subnode, subnode.id, QGL2.CLASSICAL) pass def visit_With(self, node): """ TODO: this is incomplete; we just assume that with-as variables are all classical. We don't attempt to infer anything about their type. (This is likely to be true in most cases, however) """ for item in node.items: if not item.optional_vars: continue for subnode in ast.walk(item.optional_vars): if isinstance(subnode, ast.Attribute): # This is a fatal error and we don't want to confuse # ourselves by trying to process the ast.Name # nodes beneath # name_text = pyqgl2.importer.collapse_name(subnode) NodeError.fatal_msg(subnode, ('with-as var [%s] is not local' % name_text)) elif isinstance(subnode, ast.Name): name = subnode.id DebugMsg.log('GOT WITH (%s)' % name) # Warn the user if they're doing something that's # likely to provoke an error # if self.name_is_in_lscope(name): NodeError.warn_msg(subnode, ('with-as var [%s] hides sym in outer scope' % name)) self.add_type_binding(subnode, subnode.id, QGL2.CLASSICAL) self.visit_body(node.body) class CheckType(NodeTransformerWithFname): def __init__(self, fname, importer=None): super(CheckType, self).__init__() # for each qbit, track where it is created # # the key is the qbit number, and the val is the name # and where it's created # self.qbit_origins = dict() # a list of scope tuples: (name, qbit?, context) # # We begin with the global scope, initially empty # self.scope = list(list()) self.local = list(list()) self.func_defs = dict() self.func_level = 0 self.waveforms = dict() # Reference to the main function, if any # self.qglmain = None self.qgl_call_stack = list() self.importer = importer def _push_scope(self, qbit_scope): self.scope.append(qbit_scope) def _pop_scope(self): self.scope = self.scope[:-1] def _qbit_scope(self): return self.scope[-1] def _extend_scope(self, name): self.scope[-1].append(name) def _push_local(self, qbit_local): self.local.append(qbit_local) def _pop_local(self): self.local = self.local[:-1] def _qbit_local(self): return self.local[-1] def _extend_local(self, name): self.local[-1].append(name) def assign_simple(self, node): target = node.targets[0] value = node.value DebugMsg.log('XX qbit_scope %s %s' % (str(self._qbit_scope()), ast.dump(node))) if not isinstance(target, ast.Name): return node if target.id in self._qbit_local(): msg = 'reassignment of qbit \'%s\' forbidden' % target.id self.error_msg(node, msg) return node if (target.id + ':qbit') in self._qbit_scope(): msg = 'reassignment of qbit parameter \'%s\' forbidden' % target.id self.error_msg(node, msg) return node DebugMsg.log('XX qbit_scope %s %s' % (str(self._qbit_scope()), ast.dump(node))) if isinstance(value, ast.Name): # print('CHECKING %s' % str(self._qbit_scope())) if (value.id + ':qbit') in self._qbit_scope(): self.warning_msg(node, 'aliasing qbit parameter \'%s\' as \'%s\'' % (value.id, target.id)) self._extend_local(target.id) elif value.id in self._qbit_local(): self.warning_msg(node, 'aliasing local qbit \'%s\' as \'%s\'' % (value.id, target.id)) self._extend_local(target.id) elif isinstance(value, ast.Call): func_name = pyqgl2.importer.collapse_name(value.func) func_def = self.importer.resolve_sym(value.qgl_fname, func_name) # If we can't find the function definition, check to see # whether it's a builtin. If we can't find it, or it's # not declared to be QGL, then we can't check it. # Return immediately. # # TODO the way we check whether a function is a builtin # is a non-portable hack. # # The warning about the function not being defined "locally" # is annoying because it will occur for any function imported # from a module in the system libraries, because we don't # import these right now. This needs a better approach. # if not func_def: """ # This error is no longer valid; it's not an error # if it's not a builtin # if func_name not in __builtins__: NodeError.error_msg( value, 'function [%s] not defined' % func_name) """ return node if func_def.returns: rtype = func_def.returns if (isinstance(rtype, ast.Name) and rtype.id == QGL2.QBIT): # Not sure what happens if we get here: we might # have a wandering variable that we know is a qbit, # but we never know which one. # DebugMsg.log('XX EXTENDING LOCAL (%s)' % target.id) self._extend_local(target.id) target.qgl_is_qbit = True if not func_def.qgl_func: # TODO: this seems bogus. We should be able to call # out to non-QGL functions # NodeError.error_msg( value, 'function [%s] not declared to be QGL2' % func_name) return node DebugMsg.log('NNN lookup [%s] got %s' % (func_name, str(func_def))) DebugMsg.log('NNN FuncDef %s' % ast.dump(func_def)) DebugMsg.log('NNN CALL [%s]' % func_name) # When we're figuring out whether something is a call to # the Qbit assignment function, we look at the name of the # function as it is defined (i.e, as func_def), not as it # is imported (i.e., as func_name). # # This makes the assumption that ANYTHING named 'Qubit' # or 'QubitFactory' # is a Qbit assignment function, which is lame and should # be more carefully parameterized. Things to think about: # looking more deeply at its signature and making certain # that it looks like the 'right' function and not something # someone mistakenly named 'Qubit' in an unrelated context. # if (isinstance(value, ast.Call) and (func_def.name == QGL2.QBIT_ALLOC or func_def.name == QGL2.QBIT_ALLOC2)): self._extend_local(target.id) DebugMsg.log('XX EXTENDED to include %s %s' % (target.id, str(self._qbit_local()))) return node def visit_Assign(self, node): # We only do singleton assignments, not tuples, # and not expressions # # TODO: fix this to handle arbitrary assignments if isinstance(node.targets[0], ast.Name): self.assign_simple(node) self.generic_visit(node) return node def visit_FunctionDef(self, node): # print('>>> %s' % ast.dump(node)) # Initialize the called functions list for this # definition, and then push this context onto # the call stack. The call stack is a stack of # call lists, with the top of the stack being # the current function at the top and bottom # of each function definition. # # We do this for all functions, not just QGL functions, # because we might want to be able to analyze non-QGL # functions # self.qgl_call_stack.append(list()) if self.func_level > 0: self.error_msg(node, '%s functions cannot be nested' % QGL2.QDECL) # So far so good: now actually begin to process this node if hasattr(node, 'qgl_args'): decls = node.qgl_args else: decls = list() # diagnostic only self.diag_msg( node, '%s declares qbits %s' % (node.name, str(decls))) self._push_scope(decls) self._push_local(list()) self.func_level += 1 self.generic_visit(node) self.func_level -= 1 # make a copy of this node and its qbit scope node.qgl_call_list = self.qgl_call_stack.pop() # print('DECLS: %s %s' % (node.name, str(decls))) self.func_defs[node.name] = (decls, deepcopy(node)) self._pop_scope() self._pop_local() self.diag_msg(node, 'call list %s: %s' % (node.name, str(', '.join([ pyqgl2.importer.collapse_name(call.func) for call in node.qgl_call_list])))) return node def visit_Call(self, node): # We can only check functions referenced by name, not arbitrary # expressions that return a function # # The way that we test whether something is referenced purely # by name is clunky: we try to collapse reference the AST for # the function reference back to a name, and if that works, # then we think it's a name. # if not pyqgl2.importer.collapse_name(node.func): self.error_msg(node, 'function not referenced by name') return node node.qgl_scope = self._qbit_scope()[:] node.qgl_local = self._qbit_local()[:] self.qgl_call_stack[-1].append(node) return node class CompileQGLFunctions(ast.NodeTransformer): LEVEL = 0 def __init__(self, *args, **kwargs): super(CompileQGLFunctions, self).__init__(*args, **kwargs) self.concur_finder = FindConcurBlocks() def visit_FunctionDef(self, node): if self.LEVEL > 0: self.error_msg(node, 'QGL mode functions cannot be nested') self.LEVEL += 1 # check for nested qglfunc functions self.generic_visit(node) self.LEVEL -= 1 # First, find and check all the concur blocks body = node.body for ind in range(len(body)): stmnt = body[ind] body[ind] = self.concur_finder.visit(stmnt) class FindWaveforms(ast.NodeTransformer): def __init__(self, *args, **kwargs): super(FindWaveforms, self).__init__(*args, **kwargs) self.seq = list() self.namespace = None # must be set later def set_namespace(self, namespace): self.namespace = namespace def visit_Call(self, node): # This is just a sketch # find the name of the function being called, # and then resolve it in the context of the local # namespace, and see if it returns a pulse # localname = node.func.id localfilename = node.qgl_fname if self.namespace.returns_pulse(localfilename, localname): DebugMsg.log('GOT PULSE [%s:%s]' % (localfilename, localname)) return node class FindConcurBlocks(ast.NodeTransformer): LEVEL = 0 def __init__(self, *args, **kwargs): super(FindConcurBlocks, self).__init__(*args, **kwargs) self.concur_stmnts = set() self.qbit_sets = dict() def visit_With(self, node): item = node.items[0] DebugMsg.log('WITH %s' % ast.dump(node)) if (not isinstance(item.context_expr, ast.Name) or (item.context_expr.id != QGL2.QCONCUR)): return node if self.LEVEL > 0: # need to fix this so we can squash multiple levels of concurs self.error_msg(node, 'nested concur blocks are not supported') self.LEVEL += 1 body = node.body for ind in range(len(body)): stmnt = body[ind] find_ref = FindQbitReferences() find_ref.generic_visit(stmnt) self.qbit_sets[ind] = find_ref.qbit_refs self.visit(stmnt) self.LEVEL -= 1 # check_conflicts will halt the program if it detects an error # qbits_referenced = self.check_conflicts(node) DebugMsg.log('qbits in concur block (line: %d): %s' % ( node.lineno, str(qbits_referenced))) """ # TO BE REPLACED for ind in range(len(body)): stmnt = body[ind] find_waveforms = FindWaveforms() find_waveforms.generic_visit(stmnt) for waveform in find_waveforms.seq: print('concur %d: WAVEFORM: %s' % (stmnt.lineno, waveform)) """ return node def check_conflicts(self, node): all_seen = set() for refs in self.qbit_sets.values(): if not refs.isdisjoint(all_seen): conflict = refs.intersection(all_seen) NodeError.error_msg(node, '%s appear in multiple concurrent statements' % str(', '.join(list(conflict)))) all_seen.update(refs) return all_seen class FindQbitReferences(ast.NodeTransformer): """ Find all the references to qbits in a node Assumes that all qbits are referenced by variables that have been marked as being qbits rather than arbitrary expressions For example, if you do something like qbit1 = Qubit(label="1") # Create a new qbit; qbit1 is marked arr[ind] = qbit1 foo = arr[ind] Then "qbit1" will be detected as a reference to a qbit, but "arr[ind]" or "foo" will not, even though all three expressions evaluate to a reference to the same qbit. """ def __init__(self, *args, **kwargs): super(FindQbitReferences, self).__init__(*args, **kwargs) self.qbit_refs = set() def visit_Name(self, node): if node.id in self.qbit_refs: DebugMsg.log('XX GOT qbit already %s' % node.id) node.qgl_is_qbit = True elif hasattr(node, 'qgl_is_qbit') and node.qgl_is_qbit: DebugMsg.log('XX GOT qbit %s' % node.id) self.qbit_refs.add(node.id) else: DebugMsg.log('XX NOT qbit %s' % node.id) return node if __name__ == '__main__': import sys from pyqgl2.importer import NameSpaces def preprocess(fname): importer = NameSpaces(fname) ptree = importer.path2ast[importer.base_fname] type_check = CheckType(fname, importer=importer) nptree = type_check.visit(ptree) for func_def in sorted(type_check.func_defs.keys()): types, node = type_check.func_defs[func_def] call_list = node.qgl_call_list if NodeError.MAX_ERR_LEVEL >= NodeError.NODE_ERROR_ERROR: print('bailing out 1') sys.exit(1) sym_check = CheckSymtab(fname, type_check.func_defs, importer) nptree2 = sym_check.visit(nptree) if NodeError.MAX_ERR_LEVEL >= NodeError.NODE_ERROR_ERROR: print('bailing out 2') sys.exit(1) wav_check = CheckWaveforms(type_check.func_defs, importer) nptree3 = wav_check.visit(nptree2) if NodeError.MAX_ERR_LEVEL >= NodeError.NODE_ERROR_ERROR: print('bailing out 3') sys.exit(1) def new_lscope(fname): importer = NameSpaces(fname) ptree = importer.qglmain print(ast.dump(ptree)) zz_def = importer.resolve_sym(ptree.qgl_fname, 'zz') main_scope = FindTypes.find_lscope(importer, ptree, None) # zz_scope = FindTypes.find_lscope(importer, zz_def, main_scope) new_lscope(sys.argv[1]) preprocess(sys.argv[1])
apache-2.0
rhyolight/nupic.research
projects/sequence_prediction/reberGrammar/reberSequence_CompareTMvsLSTM.py
13
2320
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2015, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import numpy as np import matplotlib.pyplot as plt from matplotlib import rcParams plt.ion() rcParams.update({'figure.autolayout': True}) def plotResult(): resultTM = np.load('result/reberSequenceTM.npz') resultLSTM = np.load('result/reberSequenceLSTM.npz') plt.figure() plt.hold(True) plt.subplot(2,2,1) plt.semilogx(resultTM['trainSeqN'], 100*np.mean(resultTM['correctRateAll'],1),'-*',label='TM') plt.semilogx(resultLSTM['trainSeqN'], 100*np.mean(resultLSTM['correctRateAll'],1),'-s',label='LSTM') plt.legend() plt.xlabel(' Training Sequence Number') plt.ylabel(' Hit Rate (Best Match) (%)') plt.subplot(2,2,4) plt.semilogx(resultTM['trainSeqN'], 100*np.mean(resultTM['missRateAll'],1),'-*',label='TM') plt.semilogx(resultLSTM['trainSeqN'], 100*np.mean(resultLSTM['missRateAll'],1),'-*',label='LSTM') plt.legend() plt.xlabel(' Training Sequence Number') plt.ylabel(' Miss Rate (%)') plt.subplot(2,2,3) plt.semilogx(resultTM['trainSeqN'], 100*np.mean(resultTM['fpRateAll'],1),'-*',label='TM') plt.semilogx(resultLSTM['trainSeqN'], 100*np.mean(resultLSTM['fpRateAll'],1),'-*',label='LSTM') plt.legend() plt.xlabel(' Training Sequence Number') plt.ylabel(' False Positive Rate (%)') plt.savefig('result/ReberSequence_CompareTM&LSTMperformance.pdf') if __name__ == "__main__": plotResult()
gpl-3.0
proxysh/Safejumper-for-Desktop
buildlinux/env32/lib/python2.7/site-packages/wheel/archive.py
233
2286
""" Archive tools for wheel. """ import os import time import logging import os.path import zipfile log = logging.getLogger("wheel") def archive_wheelfile(base_name, base_dir): '''Archive all files under `base_dir` in a whl file and name it like `base_name`. ''' olddir = os.path.abspath(os.curdir) base_name = os.path.abspath(base_name) try: os.chdir(base_dir) return make_wheelfile_inner(base_name) finally: os.chdir(olddir) def make_wheelfile_inner(base_name, base_dir='.'): """Create a whl file from all the files under 'base_dir'. Places .dist-info at the end of the archive.""" zip_filename = base_name + ".whl" log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) # Some applications need reproducible .whl files, but they can't do this # without forcing the timestamp of the individual ZipInfo objects. See # issue #143. timestamp = os.environ.get('SOURCE_DATE_EPOCH') if timestamp is None: date_time = None else: date_time = time.gmtime(int(timestamp))[0:6] # XXX support bz2, xz when available zip = zipfile.ZipFile(open(zip_filename, "wb+"), "w", compression=zipfile.ZIP_DEFLATED) score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3} deferred = [] def writefile(path, date_time): st = os.stat(path) if date_time is None: mtime = time.gmtime(st.st_mtime) date_time = mtime[0:6] zinfo = zipfile.ZipInfo(path, date_time) zinfo.external_attr = st.st_mode << 16 zinfo.compress_type = zipfile.ZIP_DEFLATED with open(path, 'rb') as fp: zip.writestr(zinfo, fp.read()) log.info("adding '%s'" % path) for dirpath, dirnames, filenames in os.walk(base_dir): for name in filenames: path = os.path.normpath(os.path.join(dirpath, name)) if os.path.isfile(path): if dirpath.endswith('.dist-info'): deferred.append((score.get(name, 0), path)) else: writefile(path, date_time) deferred.sort() for score, path in deferred: writefile(path, date_time) zip.close() return zip_filename
gpl-2.0
Orav/kbengine
kbe/src/lib/python/Tools/pybench/Calls.py
1
9812
from pybench import Test class PythonFunctionCalls(Test): version = 2.1 operations = 5*(1+4+4+2) rounds = 60000 def test(self): global f,f1,g,h # define functions def f(): pass def f1(x): pass def g(a,b,c): return a,b,c def h(a,b,c,d=1,e=2,f=3): return d,e,f # do calls for i in range(self.rounds): f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) def calibrate(self): global f,f1,g,h # define functions def f(): pass def f1(x): pass def g(a,b,c): return a,b,c def h(a,b,c,d=1,e=2,f=3): return d,e,f # do calls for i in range(self.rounds): pass ### class ComplexPythonFunctionCalls(Test): version = 2.0 operations = 4*5 rounds = 100000 def test(self): # define functions def f(a,b,c,d=1,e=2,f=3): return f args = 1,2 kwargs = dict(c=3,d=4,e=5) # do calls for i in range(self.rounds): f(a=i,b=i,c=i) f(f=i,e=i,d=i,c=2,b=i,a=3) f(1,b=i,**kwargs) f(*args,**kwargs) f(a=i,b=i,c=i) f(f=i,e=i,d=i,c=2,b=i,a=3) f(1,b=i,**kwargs) f(*args,**kwargs) f(a=i,b=i,c=i) f(f=i,e=i,d=i,c=2,b=i,a=3) f(1,b=i,**kwargs) f(*args,**kwargs) f(a=i,b=i,c=i) f(f=i,e=i,d=i,c=2,b=i,a=3) f(1,b=i,**kwargs) f(*args,**kwargs) f(a=i,b=i,c=i) f(f=i,e=i,d=i,c=2,b=i,a=3) f(1,b=i,**kwargs) f(*args,**kwargs) def calibrate(self): # define functions def f(a,b,c,d=1,e=2,f=3): return f args = 1,2 kwargs = dict(c=3,d=4,e=5) # do calls for i in range(self.rounds): pass ### class BuiltinFunctionCalls(Test): version = 2.0 operations = 5*(2+5+5+5) rounds = 60000 def test(self): # localize functions f0 = globals f1 = hash f2 = divmod f3 = max # do calls for i in range(self.rounds): f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) def calibrate(self): # localize functions f0 = dir f1 = hash f2 = divmod f3 = max # do calls for i in range(self.rounds): pass ### class PythonMethodCalls(Test): version = 2.0 operations = 5*(6 + 5 + 4) rounds = 30000 def test(self): class c: x = 2 s = 'string' def f(self): return self.x def j(self,a,b): self.y = a self.t = b return self.y def k(self,a,b,c=3): self.y = a self.s = b self.t = c o = c() for i in range(self.rounds): o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) def calibrate(self): class c: x = 2 s = 'string' def f(self): return self.x def j(self,a,b): self.y = a self.t = b def k(self,a,b,c=3): self.y = a self.s = b self.t = c o = c for i in range(self.rounds): pass ### class Recursion(Test): version = 2.0 operations = 5 rounds = 100000 def test(self): global f def f(x): if x > 1: return f(x-1) return 1 for i in range(self.rounds): f(10) f(10) f(10) f(10) f(10) def calibrate(self): global f def f(x): if x > 0: return f(x-1) return 1 for i in range(self.rounds): pass ### Test to make Fredrik happy... if __name__ == '__main__': import timeit if 0: timeit.TestClass = PythonFunctionCalls timeit.main(['-s', 'test = TestClass(); test.rounds = 1000', 'test.test()']) else: setup = """\ global f,f1,g,h # define functions def f(): pass def f1(x): pass def g(a,b,c): return a,b,c def h(a,b,c,d=1,e=2,f=3): return d,e,f i = 1 """ test = """\ f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) """ timeit.main(['-s', setup, test])
lgpl-3.0
Theer108/invenio
invenio/modules/pidstore/admin.py
13
2254
# -*- coding: utf-8 -*- # This file is part of Invenio. # Copyright (C) 2014 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. from __future__ import absolute_import from invenio.ext.admin.views import ModelView from invenio.ext.sqlalchemy import db from invenio.modules.pidstore.models import PersistentIdentifier, PidLog class PersistentIdentifierAdmin(ModelView): _can_create = False _can_edit = False _can_delete = False column_list = ( 'pid_type', 'pid_value', 'status', 'created', 'last_modified' ) column_searchable_list = ('pid_value',) column_choices = { 'status': { 'N': 'NEW', 'R': 'REGISTERED', 'K': 'RESERVED', 'D': 'INACTIVE', } } page_size = 100 def __init__(self, model, session, **kwargs): super(PersistentIdentifierAdmin, self).__init__( model, session, **kwargs ) class PidLogAdmin(ModelView): _can_create = False _can_edit = False _can_delete = False column_list = ('id_pid', 'action', 'message') def __init__(self, model, session, **kwargs): super(PidLogAdmin, self).__init__(model, session, **kwargs) def register_admin(app, admin): """ Called on app initialization to register administration interface. """ admin.add_view(PersistentIdentifierAdmin( PersistentIdentifier, db.session, name='Persistent identifiers', category="Persistent Identifiers") ) admin.add_view(PidLogAdmin( PidLog, db.session, name='Log', category="Persistent Identifiers") )
gpl-2.0
atacai/server-tools
auditlog/migrations/8.0.1.0/pre-migration.py
43
1185
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # This module copyright (C) 2015 Therp BV (<http://therp.nl>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.addons.auditlog import migrate_from_audittrail def migrate(cr, version): """if we migrate from an older version, it's a migration from audittrail""" migrate_from_audittrail(cr)
agpl-3.0
valkjsaaa/sl4a
python/src/Lib/test/test_tcl.py
75
4645
#!/usr/bin/env python import unittest import os from test import test_support from Tkinter import Tcl from _tkinter import TclError class TclTest(unittest.TestCase): def setUp(self): self.interp = Tcl() def testEval(self): tcl = self.interp tcl.eval('set a 1') self.assertEqual(tcl.eval('set a'),'1') def testEvalException(self): tcl = self.interp self.assertRaises(TclError,tcl.eval,'set a') def testEvalException2(self): tcl = self.interp self.assertRaises(TclError,tcl.eval,'this is wrong') def testCall(self): tcl = self.interp tcl.call('set','a','1') self.assertEqual(tcl.call('set','a'),'1') def testCallException(self): tcl = self.interp self.assertRaises(TclError,tcl.call,'set','a') def testCallException2(self): tcl = self.interp self.assertRaises(TclError,tcl.call,'this','is','wrong') def testSetVar(self): tcl = self.interp tcl.setvar('a','1') self.assertEqual(tcl.eval('set a'),'1') def testSetVarArray(self): tcl = self.interp tcl.setvar('a(1)','1') self.assertEqual(tcl.eval('set a(1)'),'1') def testGetVar(self): tcl = self.interp tcl.eval('set a 1') self.assertEqual(tcl.getvar('a'),'1') def testGetVarArray(self): tcl = self.interp tcl.eval('set a(1) 1') self.assertEqual(tcl.getvar('a(1)'),'1') def testGetVarException(self): tcl = self.interp self.assertRaises(TclError,tcl.getvar,'a') def testGetVarArrayException(self): tcl = self.interp self.assertRaises(TclError,tcl.getvar,'a(1)') def testUnsetVar(self): tcl = self.interp tcl.setvar('a',1) self.assertEqual(tcl.eval('info exists a'),'1') tcl.unsetvar('a') self.assertEqual(tcl.eval('info exists a'),'0') def testUnsetVarArray(self): tcl = self.interp tcl.setvar('a(1)',1) tcl.setvar('a(2)',2) self.assertEqual(tcl.eval('info exists a(1)'),'1') self.assertEqual(tcl.eval('info exists a(2)'),'1') tcl.unsetvar('a(1)') self.assertEqual(tcl.eval('info exists a(1)'),'0') self.assertEqual(tcl.eval('info exists a(2)'),'1') def testUnsetVarException(self): tcl = self.interp self.assertRaises(TclError,tcl.unsetvar,'a') def testEvalFile(self): tcl = self.interp filename = "testEvalFile.tcl" fd = open(filename,'w') script = """set a 1 set b 2 set c [ expr $a + $b ] """ fd.write(script) fd.close() tcl.evalfile(filename) os.remove(filename) self.assertEqual(tcl.eval('set a'),'1') self.assertEqual(tcl.eval('set b'),'2') self.assertEqual(tcl.eval('set c'),'3') def testEvalFileException(self): tcl = self.interp filename = "doesnotexists" try: os.remove(filename) except Exception,e: pass self.assertRaises(TclError,tcl.evalfile,filename) def testPackageRequireException(self): tcl = self.interp self.assertRaises(TclError,tcl.eval,'package require DNE') def testLoadTk(self): import os if 'DISPLAY' not in os.environ: # skipping test of clean upgradeability return tcl = Tcl() self.assertRaises(TclError,tcl.winfo_geometry) tcl.loadtk() self.assertEqual('1x1+0+0', tcl.winfo_geometry()) tcl.destroy() def testLoadTkFailure(self): import os old_display = None import sys if sys.platform.startswith(('win', 'darwin', 'cygwin')): return # no failure possible on windows? if 'DISPLAY' in os.environ: old_display = os.environ['DISPLAY'] del os.environ['DISPLAY'] # on some platforms, deleting environment variables # doesn't actually carry through to the process level # because they don't support unsetenv # If that's the case, abort. display = os.popen('echo $DISPLAY').read().strip() if display: return try: tcl = Tcl() self.assertRaises(TclError, tcl.winfo_geometry) self.assertRaises(TclError, tcl.loadtk) finally: if old_display is not None: os.environ['DISPLAY'] = old_display def test_main(): test_support.run_unittest(TclTest) if __name__ == "__main__": test_main()
apache-2.0
jazztpt/edx-platform
lms/djangoapps/shoppingcart/tests/test_models.py
40
57108
""" Tests for the Shopping Cart Models """ from decimal import Decimal import datetime import sys import json import copy import smtplib from boto.exception import BotoServerError # this is a super-class of SESError and catches connection errors from mock import patch, MagicMock import pytz import ddt from django.core import mail from django.core.mail.message import EmailMessage from django.conf import settings from django.db import DatabaseError from django.test import TestCase from django.test.utils import override_settings from django.core.urlresolvers import reverse from django.contrib.auth.models import AnonymousUser from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from shoppingcart.models import ( Order, OrderItem, CertificateItem, InvalidCartItem, CourseRegistrationCode, PaidCourseRegistration, CourseRegCodeItem, Donation, OrderItemSubclassPK, Invoice, CourseRegistrationCodeInvoiceItem, InvoiceTransaction, InvoiceHistory, RegistrationCodeRedemption, Coupon, CouponRedemption) from student.tests.factories import UserFactory from student.models import CourseEnrollment from course_modes.models import CourseMode from shoppingcart.exceptions import ( PurchasedCallbackException, CourseDoesNotExistException, ItemAlreadyInCartException, AlreadyEnrolledInCourseException, InvalidStatusToRetire, UnexpectedOrderItemStatus, ) from opaque_keys.edx.locator import CourseLocator @ddt.ddt class OrderTest(ModuleStoreTestCase): def setUp(self): super(OrderTest, self).setUp() self.user = UserFactory.create() course = CourseFactory.create() self.course_key = course.id self.other_course_keys = [] for __ in xrange(1, 5): self.other_course_keys.append(CourseFactory.create().id) self.cost = 40 # Add mock tracker for event testing. patcher = patch('shoppingcart.models.analytics') self.mock_tracker = patcher.start() self.addCleanup(patcher.stop) def test_get_cart_for_user(self): # create a cart cart = Order.get_cart_for_user(user=self.user) # add something to it CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') # should return the same cart cart2 = Order.get_cart_for_user(user=self.user) self.assertEquals(cart2.orderitem_set.count(), 1) def test_user_cart_has_items(self): anon = AnonymousUser() self.assertFalse(Order.user_cart_has_items(anon)) self.assertFalse(Order.user_cart_has_items(self.user)) cart = Order.get_cart_for_user(self.user) item = OrderItem(order=cart, user=self.user) item.save() self.assertTrue(Order.user_cart_has_items(self.user)) self.assertFalse(Order.user_cart_has_items(self.user, [CertificateItem])) self.assertFalse(Order.user_cart_has_items(self.user, [PaidCourseRegistration])) def test_user_cart_has_paid_course_registration_items(self): cart = Order.get_cart_for_user(self.user) item = PaidCourseRegistration(order=cart, user=self.user) item.save() self.assertTrue(Order.user_cart_has_items(self.user, [PaidCourseRegistration])) self.assertFalse(Order.user_cart_has_items(self.user, [CertificateItem])) def test_user_cart_has_certificate_items(self): cart = Order.get_cart_for_user(self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') self.assertTrue(Order.user_cart_has_items(self.user, [CertificateItem])) self.assertFalse(Order.user_cart_has_items(self.user, [PaidCourseRegistration])) def test_cart_clear(self): cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') CertificateItem.add_to_order(cart, self.other_course_keys[0], self.cost, 'honor') self.assertEquals(cart.orderitem_set.count(), 2) self.assertTrue(cart.has_items()) cart.clear() self.assertEquals(cart.orderitem_set.count(), 0) self.assertFalse(cart.has_items()) def test_add_item_to_cart_currency_match(self): cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor', currency='eur') # verify that a new item has been added self.assertEquals(cart.orderitem_set.count(), 1) # verify that the cart's currency was updated self.assertEquals(cart.currency, 'eur') with self.assertRaises(InvalidCartItem): CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor', currency='usd') # assert that this item did not get added to the cart self.assertEquals(cart.orderitem_set.count(), 1) def test_total_cost(self): cart = Order.get_cart_for_user(user=self.user) # add items to the order course_costs = [(self.other_course_keys[0], 30), (self.other_course_keys[1], 40), (self.other_course_keys[2], 10), (self.other_course_keys[3], 20)] for course, cost in course_costs: CertificateItem.add_to_order(cart, course, cost, 'honor') self.assertEquals(cart.orderitem_set.count(), len(course_costs)) self.assertEquals(cart.total_cost, sum(cost for _course, cost in course_costs)) def test_start_purchase(self): # Start the purchase, which will mark the cart as "paying" cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor', currency='usd') cart.start_purchase() self.assertEqual(cart.status, 'paying') for item in cart.orderitem_set.all(): self.assertEqual(item.status, 'paying') # Starting the purchase should be idempotent cart.start_purchase() self.assertEqual(cart.status, 'paying') for item in cart.orderitem_set.all(): self.assertEqual(item.status, 'paying') # If we retrieve the cart for the user, we should get a different order next_cart = Order.get_cart_for_user(user=self.user) self.assertNotEqual(cart, next_cart) self.assertEqual(next_cart.status, 'cart') # Complete the first purchase cart.purchase() self.assertEqual(cart.status, 'purchased') for item in cart.orderitem_set.all(): self.assertEqual(item.status, 'purchased') # Starting the purchase again should be a no-op cart.start_purchase() self.assertEqual(cart.status, 'purchased') for item in cart.orderitem_set.all(): self.assertEqual(item.status, 'purchased') def test_retire_order_cart(self): """Test that an order in cart can successfully be retired""" cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor', currency='usd') cart.retire() self.assertEqual(cart.status, 'defunct-cart') self.assertEqual(cart.orderitem_set.get().status, 'defunct-cart') def test_retire_order_paying(self): """Test that an order in "paying" can successfully be retired""" cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor', currency='usd') cart.start_purchase() cart.retire() self.assertEqual(cart.status, 'defunct-paying') self.assertEqual(cart.orderitem_set.get().status, 'defunct-paying') @ddt.data( ("cart", "paying", UnexpectedOrderItemStatus), ("purchased", "purchased", InvalidStatusToRetire), ) @ddt.unpack def test_retire_order_error(self, order_status, item_status, exception): """ Test error cases for retiring an order: 1) Order item has a different status than the order 2) The order's status isn't in "cart" or "paying" """ cart = Order.get_cart_for_user(user=self.user) item = CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor', currency='usd') cart.status = order_status cart.save() item.status = item_status item.save() with self.assertRaises(exception): cart.retire() @ddt.data('defunct-paying', 'defunct-cart') def test_retire_order_already_retired(self, status): """ Check that orders that have already been retired noop when the method is called on them again. """ cart = Order.get_cart_for_user(user=self.user) item = CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor', currency='usd') cart.status = item.status = status cart.save() item.save() cart.retire() self.assertEqual(cart.status, status) self.assertEqual(item.status, status) @override_settings( SEGMENT_IO_LMS_KEY="foobar", FEATURES={ 'SEGMENT_IO_LMS': True, 'STORE_BILLING_INFO': True, } ) def test_purchase(self): # This test is for testing the subclassing functionality of OrderItem, but in # order to do this, we end up testing the specific functionality of # CertificateItem, which is not quite good unit test form. Sorry. cart = Order.get_cart_for_user(user=self.user) self.assertFalse(CourseEnrollment.is_enrolled(self.user, self.course_key)) item = CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') # Course enrollment object should be created but still inactive self.assertFalse(CourseEnrollment.is_enrolled(self.user, self.course_key)) # Analytics client pipes output to stderr when using the default client with patch('sys.stderr', sys.stdout.write): cart.purchase() self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course_key)) # Test email sending self.assertEquals(len(mail.outbox), 1) self.assertEquals('Order Payment Confirmation', mail.outbox[0].subject) self.assertIn(settings.PAYMENT_SUPPORT_EMAIL, mail.outbox[0].body) self.assertIn(unicode(cart.total_cost), mail.outbox[0].body) self.assertIn(item.additional_instruction_text(), mail.outbox[0].body) # Verify Google Analytics event fired for purchase self.mock_tracker.track.assert_called_once_with( # pylint: disable=maybe-no-member self.user.id, 'Completed Order', { 'orderId': 1, 'currency': 'usd', 'total': '40', 'products': [ { 'sku': u'CertificateItem.honor', 'name': unicode(self.course_key), 'category': unicode(self.course_key.org), 'price': '40', 'id': 1, 'quantity': 1 } ] }, context={'Google Analytics': {'clientId': None}} ) def test_purchase_item_failure(self): # once again, we're testing against the specific implementation of # CertificateItem cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') with patch('shoppingcart.models.CertificateItem.save', side_effect=DatabaseError): with self.assertRaises(DatabaseError): cart.purchase() # verify that we rolled back the entire transaction self.assertFalse(CourseEnrollment.is_enrolled(self.user, self.course_key)) # verify that e-mail wasn't sent self.assertEquals(len(mail.outbox), 0) def test_purchase_twice(self): cart = Order.get_cart_for_user(self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') # purchase the cart more than once cart.purchase() cart.purchase() self.assertEquals(len(mail.outbox), 1) @patch('shoppingcart.models.log.error') def test_purchase_item_email_smtp_failure(self, error_logger): cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') with patch('shoppingcart.models.EmailMessage.send', side_effect=smtplib.SMTPException): cart.purchase() self.assertTrue(error_logger.called) @patch('shoppingcart.models.log.error') def test_purchase_item_email_boto_failure(self, error_logger): cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') with patch.object(EmailMessage, 'send') as mock_send: mock_send.side_effect = BotoServerError("status", "reason") cart.purchase() self.assertTrue(error_logger.called) def purchase_with_data(self, cart): """ purchase a cart with billing information """ CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') cart.purchase( first='John', last='Smith', street1='11 Cambridge Center', street2='Suite 101', city='Cambridge', state='MA', postalcode='02412', country='US', ccnum='1111', cardtype='001', ) @patch('shoppingcart.models.render_to_string') @patch.dict(settings.FEATURES, {'STORE_BILLING_INFO': True}) def test_billing_info_storage_on(self, render): cart = Order.get_cart_for_user(self.user) self.purchase_with_data(cart) self.assertNotEqual(cart.bill_to_first, '') self.assertNotEqual(cart.bill_to_last, '') self.assertNotEqual(cart.bill_to_street1, '') self.assertNotEqual(cart.bill_to_street2, '') self.assertNotEqual(cart.bill_to_postalcode, '') self.assertNotEqual(cart.bill_to_ccnum, '') self.assertNotEqual(cart.bill_to_cardtype, '') self.assertNotEqual(cart.bill_to_city, '') self.assertNotEqual(cart.bill_to_state, '') self.assertNotEqual(cart.bill_to_country, '') ((_, context), _) = render.call_args self.assertTrue(context['has_billing_info']) @patch('shoppingcart.models.render_to_string') @patch.dict(settings.FEATURES, {'STORE_BILLING_INFO': False}) def test_billing_info_storage_off(self, render): cart = Order.get_cart_for_user(self.user) self.purchase_with_data(cart) self.assertNotEqual(cart.bill_to_first, '') self.assertNotEqual(cart.bill_to_last, '') self.assertNotEqual(cart.bill_to_city, '') self.assertNotEqual(cart.bill_to_state, '') self.assertNotEqual(cart.bill_to_country, '') self.assertNotEqual(cart.bill_to_postalcode, '') # things we expect to be missing when the feature is off self.assertEqual(cart.bill_to_street1, '') self.assertEqual(cart.bill_to_street2, '') self.assertEqual(cart.bill_to_ccnum, '') self.assertEqual(cart.bill_to_cardtype, '') ((_, context), _) = render.call_args self.assertFalse(context['has_billing_info']) def test_generate_receipt_instructions_callchain(self): """ This tests the generate_receipt_instructions call chain (ie calling the function on the cart also calls it on items in the cart """ mock_gen_inst = MagicMock(return_value=(OrderItemSubclassPK(OrderItem, 1), set([]))) cart = Order.get_cart_for_user(self.user) item = OrderItem(user=self.user, order=cart) item.save() self.assertTrue(cart.has_items()) with patch.object(OrderItem, 'generate_receipt_instructions', mock_gen_inst): cart.generate_receipt_instructions() mock_gen_inst.assert_called_with() def test_confirmation_email_error(self): CourseMode.objects.create( course_id=self.course_key, mode_slug="verified", mode_display_name="Verified", min_price=self.cost ) cart = Order.get_cart_for_user(self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'verified') # Simulate an error when sending the confirmation # email. This should NOT raise an exception. # If it does, then the implicit view-level # transaction could cause a roll-back, effectively # reversing order fulfillment. with patch.object(mail.message.EmailMessage, 'send') as mock_send: mock_send.side_effect = Exception("Kaboom!") cart.purchase() # Verify that the purchase completed successfully self.assertEqual(cart.status, 'purchased') # Verify that the user is enrolled as "verified" mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course_key) self.assertTrue(is_active) self.assertEqual(mode, 'verified') class OrderItemTest(TestCase): def setUp(self): super(OrderItemTest, self).setUp() self.user = UserFactory.create() def test_order_item_purchased_callback(self): """ This tests that calling purchased_callback on the base OrderItem class raises NotImplementedError """ item = OrderItem(user=self.user, order=Order.get_cart_for_user(self.user)) with self.assertRaises(NotImplementedError): item.purchased_callback() def test_order_item_generate_receipt_instructions(self): """ This tests that the generate_receipt_instructions call chain and also that calling it on the base OrderItem class returns an empty list """ cart = Order.get_cart_for_user(self.user) item = OrderItem(user=self.user, order=cart) item.save() self.assertTrue(cart.has_items()) (inst_dict, inst_set) = cart.generate_receipt_instructions() self.assertDictEqual({item.pk_with_subclass: set([])}, inst_dict) self.assertEquals(set([]), inst_set) def test_is_discounted(self): """ This tests the is_discounted property of the OrderItem """ cart = Order.get_cart_for_user(self.user) item = OrderItem(user=self.user, order=cart) item.list_price = None item.unit_cost = 100 self.assertFalse(item.is_discounted) item.list_price = 100 item.unit_cost = 100 self.assertFalse(item.is_discounted) item.list_price = 100 item.unit_cost = 90 self.assertTrue(item.is_discounted) def test_get_list_price(self): """ This tests the get_list_price() method of the OrderItem """ cart = Order.get_cart_for_user(self.user) item = OrderItem(user=self.user, order=cart) item.list_price = None item.unit_cost = 100 self.assertEqual(item.get_list_price(), item.unit_cost) item.list_price = 200 item.unit_cost = 100 self.assertEqual(item.get_list_price(), item.list_price) @patch.dict('django.conf.settings.FEATURES', {'ENABLE_PAID_COURSE_REGISTRATION': True}) class PaidCourseRegistrationTest(ModuleStoreTestCase): """ Paid Course Registration Tests. """ def setUp(self): super(PaidCourseRegistrationTest, self).setUp() self.user = UserFactory.create() self.user.set_password('password') self.user.save() self.cost = 40 self.course = CourseFactory.create() self.course_key = self.course.id self.course_mode = CourseMode(course_id=self.course_key, mode_slug="honor", mode_display_name="honor cert", min_price=self.cost) self.course_mode.save() self.percentage_discount = 20.0 self.cart = Order.get_cart_for_user(self.user) def test_get_total_amount_of_purchased_items(self): """ Test to check the total amount of the purchased items. """ PaidCourseRegistration.add_to_order(self.cart, self.course_key) self.cart.purchase() total_amount = PaidCourseRegistration.get_total_amount_of_purchased_item(course_key=self.course_key) self.assertEqual(total_amount, 40.00) def test_get_total_amount_empty(self): """ Test to check the total amount of the purchased items. """ total_amount = PaidCourseRegistration.get_total_amount_of_purchased_item(course_key=self.course_key) self.assertEqual(total_amount, 0.00) def test_add_to_order(self): reg1 = PaidCourseRegistration.add_to_order(self.cart, self.course_key) self.assertEqual(reg1.unit_cost, self.cost) self.assertEqual(reg1.line_cost, self.cost) self.assertEqual(reg1.unit_cost, self.course_mode.min_price) self.assertEqual(reg1.mode, "honor") self.assertEqual(reg1.user, self.user) self.assertEqual(reg1.status, "cart") self.assertTrue(PaidCourseRegistration.contained_in_order(self.cart, self.course_key)) self.assertFalse(PaidCourseRegistration.contained_in_order( self.cart, CourseLocator(org="MITx", course="999", run="Robot_Super_Course_abcd")) ) self.assertEqual(self.cart.total_cost, self.cost) def test_order_generated_registration_codes(self): """ Test to check for the order generated registration codes. """ self.cart.order_type = 'business' self.cart.save() item = CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) self.cart.purchase() registration_codes = CourseRegistrationCode.order_generated_registration_codes(self.course_key) self.assertEqual(registration_codes.count(), item.qty) def test_order_generated_totals(self): """ Test to check for the order generated registration codes. """ total_amount = CourseRegCodeItem.get_total_amount_of_purchased_item(self.course_key) self.assertEqual(total_amount, 0) self.cart.order_type = 'business' self.cart.save() item = CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) self.cart.purchase() registration_codes = CourseRegistrationCode.order_generated_registration_codes(self.course_key) self.assertEqual(registration_codes.count(), item.qty) total_amount = CourseRegCodeItem.get_total_amount_of_purchased_item(self.course_key) self.assertEqual(total_amount, 80.00) def add_coupon(self, course_key, is_active, code): """ add dummy coupon into models """ Coupon.objects.create( code=code, description='testing code', course_id=course_key, percentage_discount=self.percentage_discount, created_by=self.user, is_active=is_active ) def login_user(self, username): """ login the user to the platform. """ self.client.login(username=username, password="password") def test_get_top_discount_codes_used(self): """ Test to check for the top coupon codes used. """ self.login_user(self.user.username) self.add_coupon(self.course_key, True, 'Ad123asd') self.add_coupon(self.course_key, True, '32213asd') self.purchases_using_coupon_codes() top_discounted_codes = CouponRedemption.get_top_discount_codes_used(self.course_key) self.assertTrue(top_discounted_codes[0]['coupon__code'], 'Ad123asd') self.assertTrue(top_discounted_codes[0]['coupon__used_count'], 1) self.assertTrue(top_discounted_codes[1]['coupon__code'], '32213asd') self.assertTrue(top_discounted_codes[1]['coupon__used_count'], 2) def test_get_total_coupon_code_purchases(self): """ Test to assert the number of coupon code purchases. """ self.login_user(self.user.username) self.add_coupon(self.course_key, True, 'Ad123asd') self.add_coupon(self.course_key, True, '32213asd') self.purchases_using_coupon_codes() total_coupon_code_purchases = CouponRedemption.get_total_coupon_code_purchases(self.course_key) self.assertTrue(total_coupon_code_purchases['coupon__count'], 3) def test_get_self_purchased_seat_count(self): """ Test to assert the number of seats purchased using individual purchases. """ PaidCourseRegistration.add_to_order(self.cart, self.course_key) self.cart.purchase() test_student = UserFactory.create() test_student.set_password('password') test_student.save() self.cart = Order.get_cart_for_user(test_student) PaidCourseRegistration.add_to_order(self.cart, self.course_key) self.cart.purchase() total_seats_count = PaidCourseRegistration.get_self_purchased_seat_count(course_key=self.course_key) self.assertTrue(total_seats_count, 2) def purchases_using_coupon_codes(self): """ helper method that uses coupon codes when purchasing courses. """ self.cart.order_type = 'business' self.cart.save() CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': 'Ad123asd'}) self.assertEqual(resp.status_code, 200) self.cart.purchase() self.cart.clear() self.cart = Order.get_cart_for_user(self.user) self.cart.order_type = 'business' self.cart.save() CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': 'Ad123asd'}) self.assertEqual(resp.status_code, 200) self.cart.purchase() self.cart.clear() self.cart = Order.get_cart_for_user(self.user) PaidCourseRegistration.add_to_order(self.cart, self.course_key) resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': '32213asd'}) self.assertEqual(resp.status_code, 200) self.cart.purchase() def test_cart_type_business(self): self.cart.order_type = 'business' self.cart.save() item = CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) self.cart.purchase() self.assertFalse(CourseEnrollment.is_enrolled(self.user, self.course_key)) # check that the registration codes are generated against the order registration_codes = CourseRegistrationCode.order_generated_registration_codes(self.course_key) self.assertEqual(registration_codes.count(), item.qty) def test_regcode_redemptions(self): """ Asserts the data model around RegistrationCodeRedemption """ self.cart.order_type = 'business' self.cart.save() CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) self.cart.purchase() reg_code = CourseRegistrationCode.order_generated_registration_codes(self.course_key)[0] enrollment = CourseEnrollment.enroll(self.user, self.course_key) redemption = RegistrationCodeRedemption( registration_code=reg_code, redeemed_by=self.user, course_enrollment=enrollment ) redemption.save() test_redemption = RegistrationCodeRedemption.registration_code_used_for_enrollment(enrollment) self.assertEqual(test_redemption.id, redemption.id) # pylint: disable=no-member def test_regcode_multi_redemptions(self): """ Asserts the data model around RegistrationCodeRedemption and what happens when we do multiple redemptions by same user """ self.cart.order_type = 'business' self.cart.save() CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) self.cart.purchase() reg_codes = CourseRegistrationCode.order_generated_registration_codes(self.course_key) self.assertEqual(len(reg_codes), 2) enrollment = CourseEnrollment.enroll(self.user, self.course_key) ids = [] for reg_code in reg_codes: redemption = RegistrationCodeRedemption( registration_code=reg_code, redeemed_by=self.user, course_enrollment=enrollment ) redemption.save() ids.append(redemption.id) # pylint: disable=no-member test_redemption = RegistrationCodeRedemption.registration_code_used_for_enrollment(enrollment) self.assertIn(test_redemption.id, ids) def test_add_with_default_mode(self): """ Tests add_to_cart where the mode specified in the argument is NOT in the database and NOT the default "honor". In this case it just adds the user in the CourseMode.DEFAULT_MODE, 0 price """ reg1 = PaidCourseRegistration.add_to_order(self.cart, self.course_key, mode_slug="DNE") self.assertEqual(reg1.unit_cost, 0) self.assertEqual(reg1.line_cost, 0) self.assertEqual(reg1.mode, "honor") self.assertEqual(reg1.user, self.user) self.assertEqual(reg1.status, "cart") self.assertEqual(self.cart.total_cost, 0) self.assertTrue(PaidCourseRegistration.contained_in_order(self.cart, self.course_key)) course_reg_code_item = CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2, mode_slug="DNE") self.assertEqual(course_reg_code_item.unit_cost, 0) self.assertEqual(course_reg_code_item.line_cost, 0) self.assertEqual(course_reg_code_item.mode, "honor") self.assertEqual(course_reg_code_item.user, self.user) self.assertEqual(course_reg_code_item.status, "cart") self.assertEqual(self.cart.total_cost, 0) self.assertTrue(CourseRegCodeItem.contained_in_order(self.cart, self.course_key)) def test_add_course_reg_item_with_no_course_item(self): fake_course_id = CourseLocator(org="edx", course="fake", run="course") with self.assertRaises(CourseDoesNotExistException): CourseRegCodeItem.add_to_order(self.cart, fake_course_id, 2) def test_course_reg_item_already_in_cart(self): CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) with self.assertRaises(ItemAlreadyInCartException): CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) def test_course_reg_item_already_enrolled_in_course(self): CourseEnrollment.enroll(self.user, self.course_key) with self.assertRaises(AlreadyEnrolledInCourseException): CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) def test_purchased_callback(self): reg1 = PaidCourseRegistration.add_to_order(self.cart, self.course_key) self.cart.purchase() self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course_key)) reg1 = PaidCourseRegistration.objects.get(id=reg1.id) # reload from DB to get side-effect self.assertEqual(reg1.status, "purchased") self.assertIsNotNone(reg1.course_enrollment) self.assertEqual(reg1.course_enrollment.id, CourseEnrollment.objects.get(user=self.user, course_id=self.course_key).id) def test_generate_receipt_instructions(self): """ Add 2 courses to the order and make sure the instruction_set only contains 1 element (no dups) """ course2 = CourseFactory.create() course_mode2 = CourseMode(course_id=course2.id, mode_slug="honor", mode_display_name="honor cert", min_price=self.cost) course_mode2.save() pr1 = PaidCourseRegistration.add_to_order(self.cart, self.course_key) pr2 = PaidCourseRegistration.add_to_order(self.cart, course2.id) self.cart.purchase() inst_dict, inst_set = self.cart.generate_receipt_instructions() self.assertEqual(2, len(inst_dict)) self.assertEqual(1, len(inst_set)) self.assertIn("dashboard", inst_set.pop()) self.assertIn(pr1.pk_with_subclass, inst_dict) self.assertIn(pr2.pk_with_subclass, inst_dict) def test_purchased_callback_exception(self): reg1 = PaidCourseRegistration.add_to_order(self.cart, self.course_key) reg1.course_id = CourseLocator(org="changed", course="forsome", run="reason") reg1.save() with self.assertRaises(PurchasedCallbackException): reg1.purchased_callback() self.assertFalse(CourseEnrollment.is_enrolled(self.user, self.course_key)) reg1.course_id = CourseLocator(org="abc", course="efg", run="hij") reg1.save() with self.assertRaises(PurchasedCallbackException): reg1.purchased_callback() self.assertFalse(CourseEnrollment.is_enrolled(self.user, self.course_key)) course_reg_code_item = CourseRegCodeItem.add_to_order(self.cart, self.course_key, 2) course_reg_code_item.course_id = CourseLocator(org="changed1", course="forsome1", run="reason1") course_reg_code_item.save() with self.assertRaises(PurchasedCallbackException): course_reg_code_item.purchased_callback() def test_user_cart_has_both_items(self): """ This test exists b/c having both CertificateItem and PaidCourseRegistration in an order used to break PaidCourseRegistration.contained_in_order """ cart = Order.get_cart_for_user(self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') PaidCourseRegistration.add_to_order(self.cart, self.course_key) self.assertTrue(PaidCourseRegistration.contained_in_order(cart, self.course_key)) class CertificateItemTest(ModuleStoreTestCase): """ Tests for verifying specific CertificateItem functionality """ def setUp(self): super(CertificateItemTest, self).setUp() self.user = UserFactory.create() self.cost = 40 course = CourseFactory.create() self.course_key = course.id course_mode = CourseMode(course_id=self.course_key, mode_slug="honor", mode_display_name="honor cert", min_price=self.cost) course_mode.save() course_mode = CourseMode(course_id=self.course_key, mode_slug="verified", mode_display_name="verified cert", min_price=self.cost) course_mode.save() patcher = patch('student.models.tracker') self.mock_tracker = patcher.start() self.addCleanup(patcher.stop) analytics_patcher = patch('shoppingcart.models.analytics') self.mock_analytics_tracker = analytics_patcher.start() self.addCleanup(analytics_patcher.stop) def _assert_refund_tracked(self): """ Assert that we fired a refund event. """ self.mock_analytics_tracker.track.assert_called_with( # pylint: disable=maybe-no-member self.user.id, 'Refunded Order', { 'orderId': 1, 'currency': 'usd', 'total': '40', 'products': [ { 'sku': u'CertificateItem.verified', 'name': unicode(self.course_key), 'category': unicode(self.course_key.org), 'price': '40', 'id': 1, 'quantity': 1 } ] }, context={'Google Analytics': {'clientId': None}} ) def test_existing_enrollment(self): CourseEnrollment.enroll(self.user, self.course_key) cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'verified') # verify that we are still enrolled self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course_key)) self.mock_tracker.reset_mock() cart.purchase() enrollment = CourseEnrollment.objects.get(user=self.user, course_id=self.course_key) self.assertEquals(enrollment.mode, u'verified') def test_single_item_template(self): cart = Order.get_cart_for_user(user=self.user) cert_item = CertificateItem.add_to_order(cart, self.course_key, self.cost, 'verified') self.assertEquals(cert_item.single_item_receipt_template, 'shoppingcart/receipt.html') cert_item = CertificateItem.add_to_order(cart, self.course_key, self.cost, 'honor') self.assertEquals(cert_item.single_item_receipt_template, 'shoppingcart/receipt.html') @override_settings( SEGMENT_IO_LMS_KEY="foobar", FEATURES={ 'SEGMENT_IO_LMS': True, 'STORE_BILLING_INFO': True, } ) def test_refund_cert_callback_no_expiration(self): # When there is no expiration date on a verified mode, the user can always get a refund # need to prevent analytics errors from appearing in stderr with patch('sys.stderr', sys.stdout.write): CourseEnrollment.enroll(self.user, self.course_key, 'verified') cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'verified') cart.purchase() CourseEnrollment.unenroll(self.user, self.course_key) target_certs = CertificateItem.objects.filter(course_id=self.course_key, user_id=self.user, status='refunded', mode='verified') self.assertTrue(target_certs[0]) self.assertTrue(target_certs[0].refund_requested_time) self.assertEquals(target_certs[0].order.status, 'refunded') self._assert_refund_tracked() def test_no_refund_on_cert_callback(self): # If we explicitly skip refunds, the unenroll action should not modify the purchase. CourseEnrollment.enroll(self.user, self.course_key, 'verified') cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'verified') cart.purchase() CourseEnrollment.unenroll(self.user, self.course_key, skip_refund=True) target_certs = CertificateItem.objects.filter( course_id=self.course_key, user_id=self.user, status='purchased', mode='verified' ) self.assertTrue(target_certs[0]) self.assertFalse(target_certs[0].refund_requested_time) self.assertEquals(target_certs[0].order.status, 'purchased') @override_settings( SEGMENT_IO_LMS_KEY="foobar", FEATURES={ 'SEGMENT_IO_LMS': True, 'STORE_BILLING_INFO': True, } ) def test_refund_cert_callback_before_expiration(self): # If the expiration date has not yet passed on a verified mode, the user can be refunded many_days = datetime.timedelta(days=60) course = CourseFactory.create() self.course_key = course.id course_mode = CourseMode(course_id=self.course_key, mode_slug="verified", mode_display_name="verified cert", min_price=self.cost, expiration_datetime=(datetime.datetime.now(pytz.utc) + many_days)) course_mode.save() # need to prevent analytics errors from appearing in stderr with patch('sys.stderr', sys.stdout.write): CourseEnrollment.enroll(self.user, self.course_key, 'verified') cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'verified') cart.purchase() CourseEnrollment.unenroll(self.user, self.course_key) target_certs = CertificateItem.objects.filter(course_id=self.course_key, user_id=self.user, status='refunded', mode='verified') self.assertTrue(target_certs[0]) self.assertTrue(target_certs[0].refund_requested_time) self.assertEquals(target_certs[0].order.status, 'refunded') self._assert_refund_tracked() def test_refund_cert_callback_before_expiration_email(self): """ Test that refund emails are being sent correctly. """ course = CourseFactory.create() course_key = course.id many_days = datetime.timedelta(days=60) course_mode = CourseMode(course_id=course_key, mode_slug="verified", mode_display_name="verified cert", min_price=self.cost, expiration_datetime=datetime.datetime.now(pytz.utc) + many_days) course_mode.save() CourseEnrollment.enroll(self.user, course_key, 'verified') cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, course_key, self.cost, 'verified') cart.purchase() mail.outbox = [] with patch('shoppingcart.models.log.error') as mock_error_logger: CourseEnrollment.unenroll(self.user, course_key) self.assertFalse(mock_error_logger.called) self.assertEquals(len(mail.outbox), 1) self.assertEquals('[Refund] User-Requested Refund', mail.outbox[0].subject) self.assertEquals(settings.PAYMENT_SUPPORT_EMAIL, mail.outbox[0].from_email) self.assertIn('has requested a refund on Order', mail.outbox[0].body) @patch('shoppingcart.models.log.error') def test_refund_cert_callback_before_expiration_email_error(self, error_logger): # If there's an error sending an email to billing, we need to log this error many_days = datetime.timedelta(days=60) course = CourseFactory.create() course_key = course.id course_mode = CourseMode(course_id=course_key, mode_slug="verified", mode_display_name="verified cert", min_price=self.cost, expiration_datetime=datetime.datetime.now(pytz.utc) + many_days) course_mode.save() CourseEnrollment.enroll(self.user, course_key, 'verified') cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, course_key, self.cost, 'verified') cart.purchase() with patch('shoppingcart.models.send_mail', side_effect=smtplib.SMTPException): CourseEnrollment.unenroll(self.user, course_key) self.assertTrue(error_logger.call_args[0][0].startswith('Failed sending email')) def test_refund_cert_callback_after_expiration(self): # If the expiration date has passed, the user cannot get a refund many_days = datetime.timedelta(days=60) course = CourseFactory.create() course_key = course.id course_mode = CourseMode(course_id=course_key, mode_slug="verified", mode_display_name="verified cert", min_price=self.cost,) course_mode.save() CourseEnrollment.enroll(self.user, course_key, 'verified') cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, course_key, self.cost, 'verified') cart.purchase() course_mode.expiration_datetime = (datetime.datetime.now(pytz.utc) - many_days) course_mode.save() CourseEnrollment.unenroll(self.user, course_key) target_certs = CertificateItem.objects.filter(course_id=course_key, user_id=self.user, status='refunded', mode='verified') self.assertEqual(len(target_certs), 0) def test_refund_cert_no_cert_exists(self): # If there is no paid certificate, the refund callback should return nothing CourseEnrollment.enroll(self.user, self.course_key, 'verified') ret_val = CourseEnrollment.unenroll(self.user, self.course_key) self.assertFalse(ret_val) def test_no_id_prof_confirm_email(self): # Pay for a no-id-professional course course_mode = CourseMode(course_id=self.course_key, mode_slug="no-id-professional", mode_display_name="No Id Professional Cert", min_price=self.cost) course_mode.save() CourseEnrollment.enroll(self.user, self.course_key) cart = Order.get_cart_for_user(user=self.user) CertificateItem.add_to_order(cart, self.course_key, self.cost, 'no-id-professional') # verify that we are still enrolled self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course_key)) self.mock_tracker.reset_mock() cart.purchase() enrollment = CourseEnrollment.objects.get(user=self.user, course_id=self.course_key) self.assertEquals(enrollment.mode, u'no-id-professional') # Check that the tax-deduction information appears in the confirmation email self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] self.assertEquals('Order Payment Confirmation', email.subject) self.assertNotIn("If you haven't verified your identity yet, please start the verification process", email.body) class DonationTest(ModuleStoreTestCase): """Tests for the donation order item type. """ COST = Decimal('23.45') def setUp(self): """Create a test user and order. """ super(DonationTest, self).setUp() self.user = UserFactory.create() self.cart = Order.get_cart_for_user(self.user) def test_donate_to_org(self): # No course ID provided, so this is a donation to the entire organization donation = Donation.add_to_order(self.cart, self.COST) self._assert_donation( donation, donation_type="general", unit_cost=self.COST, line_desc="Donation for edX" ) def test_donate_to_course(self): # Create a test course course = CourseFactory.create(display_name="Test Course") # Donate to the course donation = Donation.add_to_order(self.cart, self.COST, course_id=course.id) self._assert_donation( donation, donation_type="course", course_id=course.id, unit_cost=self.COST, line_desc=u"Donation for Test Course" ) def test_confirmation_email(self): # Pay for a donation Donation.add_to_order(self.cart, self.COST) self.cart.start_purchase() self.cart.purchase() # Check that the tax-deduction information appears in the confirmation email self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] self.assertEquals('Order Payment Confirmation', email.subject) self.assertIn("tax purposes", email.body) def test_donate_no_such_course(self): fake_course_id = CourseLocator(org="edx", course="fake", run="course") with self.assertRaises(CourseDoesNotExistException): Donation.add_to_order(self.cart, self.COST, course_id=fake_course_id) def _assert_donation(self, donation, donation_type=None, course_id=None, unit_cost=None, line_desc=None): """Verify the donation fields and that the donation can be purchased. """ self.assertEqual(donation.order, self.cart) self.assertEqual(donation.user, self.user) self.assertEqual(donation.donation_type, donation_type) self.assertEqual(donation.course_id, course_id) self.assertEqual(donation.qty, 1) self.assertEqual(donation.unit_cost, unit_cost) self.assertEqual(donation.currency, "usd") self.assertEqual(donation.line_desc, line_desc) # Verify that the donation is in the cart self.assertTrue(self.cart.has_items(item_type=Donation)) self.assertEqual(self.cart.total_cost, unit_cost) # Purchase the item self.cart.start_purchase() self.cart.purchase() # Verify that the donation is marked as purchased donation = Donation.objects.get(pk=donation.id) self.assertEqual(donation.status, "purchased") class InvoiceHistoryTest(TestCase): """Tests for the InvoiceHistory model. """ INVOICE_INFO = { 'is_valid': True, 'internal_reference': 'Test Internal Ref Num', 'customer_reference_number': 'Test Customer Ref Num', } CONTACT_INFO = { 'company_name': 'Test Company', 'company_contact_name': 'Test Company Contact Name', 'company_contact_email': 'test-contact@example.com', 'recipient_name': 'Test Recipient Name', 'recipient_email': 'test-recipient@example.com', 'address_line_1': 'Test Address 1', 'address_line_2': 'Test Address 2', 'address_line_3': 'Test Address 3', 'city': 'Test City', 'state': 'Test State', 'zip': '12345', 'country': 'US', } def setUp(self): super(InvoiceHistoryTest, self).setUp() invoice_data = copy.copy(self.INVOICE_INFO) invoice_data.update(self.CONTACT_INFO) self.course_key = CourseLocator('edX', 'DemoX', 'Demo_Course') self.invoice = Invoice.objects.create(total_amount="123.45", course_id=self.course_key, **invoice_data) self.user = UserFactory.create() def test_get_invoice_total_amount(self): """ test to check the total amount of the invoices for the course. """ total_amount = Invoice.get_invoice_total_amount_for_course(self.course_key) self.assertEqual(total_amount, 123.45) def test_get_total_amount_of_paid_invoices(self): """ Test to check the Invoice Transactions amount. """ InvoiceTransaction.objects.create( invoice=self.invoice, amount='123.45', currency='usd', comments='test comments', status='completed', created_by=self.user, last_modified_by=self.user ) total_amount_paid = InvoiceTransaction.get_total_amount_of_paid_course_invoices(self.course_key) self.assertEqual(float(total_amount_paid), 123.45) def test_get_total_amount_of_no_invoices(self): """ Test to check the Invoice Transactions amount. """ total_amount_paid = InvoiceTransaction.get_total_amount_of_paid_course_invoices(self.course_key) self.assertEqual(float(total_amount_paid), 0) def test_invoice_contact_info_history(self): self._assert_history_invoice_info( is_valid=True, internal_ref=self.INVOICE_INFO['internal_reference'], customer_ref=self.INVOICE_INFO['customer_reference_number'] ) self._assert_history_contact_info(**self.CONTACT_INFO) self._assert_history_items([]) self._assert_history_transactions([]) def test_invoice_generated_registration_codes(self): """ test filter out the registration codes that were generated via Invoice. """ invoice_item = CourseRegistrationCodeInvoiceItem.objects.create( invoice=self.invoice, qty=5, unit_price='123.45', course_id=self.course_key ) for i in range(5): CourseRegistrationCode.objects.create( code='testcode{counter}'.format(counter=i), course_id=self.course_key, created_by=self.user, invoice=self.invoice, invoice_item=invoice_item, mode_slug='honor' ) registration_codes = CourseRegistrationCode.invoice_generated_registration_codes(self.course_key) self.assertEqual(registration_codes.count(), 5) def test_invoice_history_items(self): # Create an invoice item CourseRegistrationCodeInvoiceItem.objects.create( invoice=self.invoice, qty=1, unit_price='123.45', course_id=self.course_key ) self._assert_history_items([{ 'qty': 1, 'unit_price': '123.45', 'currency': 'usd', 'course_id': unicode(self.course_key) }]) # Create a second invoice item CourseRegistrationCodeInvoiceItem.objects.create( invoice=self.invoice, qty=2, unit_price='456.78', course_id=self.course_key ) self._assert_history_items([ { 'qty': 1, 'unit_price': '123.45', 'currency': 'usd', 'course_id': unicode(self.course_key) }, { 'qty': 2, 'unit_price': '456.78', 'currency': 'usd', 'course_id': unicode(self.course_key) } ]) def test_invoice_history_transactions(self): # Create an invoice transaction first_transaction = InvoiceTransaction.objects.create( invoice=self.invoice, amount='123.45', currency='usd', comments='test comments', status='completed', created_by=self.user, last_modified_by=self.user ) self._assert_history_transactions([{ 'amount': '123.45', 'currency': 'usd', 'comments': 'test comments', 'status': 'completed', 'created_by': self.user.username, 'last_modified_by': self.user.username, }]) # Create a second invoice transaction second_transaction = InvoiceTransaction.objects.create( invoice=self.invoice, amount='456.78', currency='usd', comments='test more comments', status='started', created_by=self.user, last_modified_by=self.user ) self._assert_history_transactions([ { 'amount': '123.45', 'currency': 'usd', 'comments': 'test comments', 'status': 'completed', 'created_by': self.user.username, 'last_modified_by': self.user.username, }, { 'amount': '456.78', 'currency': 'usd', 'comments': 'test more comments', 'status': 'started', 'created_by': self.user.username, 'last_modified_by': self.user.username, } ]) # Delete the transactions first_transaction.delete() second_transaction.delete() self._assert_history_transactions([]) def _assert_history_invoice_info(self, is_valid=True, customer_ref=None, internal_ref=None): """Check top-level invoice information in the latest history record. """ latest = self._latest_history() self.assertEqual(latest['is_valid'], is_valid) self.assertEqual(latest['customer_reference'], customer_ref) self.assertEqual(latest['internal_reference'], internal_ref) def _assert_history_contact_info(self, **kwargs): """Check contact info in the latest history record. """ contact_info = self._latest_history()['contact_info'] for key, value in kwargs.iteritems(): self.assertEqual(contact_info[key], value) def _assert_history_items(self, expected_items): """Check line item info in the latest history record. """ items = self._latest_history()['items'] self.assertItemsEqual(items, expected_items) def _assert_history_transactions(self, expected_transactions): """Check transactions (payments/refunds) in the latest history record. """ transactions = self._latest_history()['transactions'] self.assertItemsEqual(transactions, expected_transactions) def _latest_history(self): """Retrieve the snapshot from the latest history record. """ latest = InvoiceHistory.objects.latest() return json.loads(latest.snapshot)
agpl-3.0
MotorolaMobilityLLC/external-chromium_org
native_client_sdk/src/tools/tests/sel_ldr_test.py
104
2080
#!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import sys import unittest SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) PARENT_DIR = os.path.dirname(SCRIPT_DIR) DATA_DIR = os.path.join(SCRIPT_DIR, 'data') CHROME_SRC = os.path.dirname(os.path.dirname(os.path.dirname(PARENT_DIR))) MOCK_DIR = os.path.join(CHROME_SRC, "third_party", "pymock") # For the mock library sys.path.append(MOCK_DIR) sys.path.append(PARENT_DIR) import sel_ldr import mock class TestSelLdr(unittest.TestCase): def testRequiresArg(self): with mock.patch('sys.stderr'): self.assertRaises(SystemExit, sel_ldr.main, []) def testUsesHelper(self): with mock.patch('subprocess.call') as call: with mock.patch('os.path.exists'): with mock.patch('os.path.isfile'): with mock.patch('create_nmf.ParseElfHeader') as parse_header: parse_header.return_value = ('x8-64', False) with mock.patch('getos.GetPlatform') as get_platform: # assert that when we are running on linux # the helper is used. get_platform.return_value = 'linux' sel_ldr.main(['foo.nexe']) parse_header.assert_called_once_with('foo.nexe') self.assertEqual(call.call_count, 1) cmd = call.call_args[0][0] self.assertTrue('helper_bootstrap' in cmd[0]) # assert that when not running on linux the # helper is not used. get_platform.reset_mock() parse_header.reset_mock() call.reset_mock() get_platform.return_value = 'win' sel_ldr.main(['foo.nexe']) parse_header.assert_called_once_with('foo.nexe') self.assertEqual(call.call_count, 1) cmd = call.call_args[0][0] self.assertTrue('helper_bootstrap' not in cmd[0]) if __name__ == '__main__': unittest.main()
bsd-3-clause
dreamsxin/kbengine
kbe/src/lib/python/Lib/test/test_platform.py
88
12413
from unittest import mock import os import platform import subprocess import sys import tempfile import unittest import warnings from test import support class PlatformTest(unittest.TestCase): def test_architecture(self): res = platform.architecture() @support.skip_unless_symlink def test_architecture_via_symlink(self): # issue3762 # On Windows, the EXE needs to know where pythonXY.dll is at so we have # to add the directory to the path. if sys.platform == "win32": os.environ["Path"] = "{};{}".format( os.path.dirname(sys.executable), os.environ["Path"]) def get(python): cmd = [python, '-c', 'import platform; print(platform.architecture())'] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) return p.communicate() real = os.path.realpath(sys.executable) link = os.path.abspath(support.TESTFN) os.symlink(real, link) try: self.assertEqual(get(real), get(link)) finally: os.remove(link) def test_platform(self): for aliased in (False, True): for terse in (False, True): res = platform.platform(aliased, terse) def test_system(self): res = platform.system() def test_node(self): res = platform.node() def test_release(self): res = platform.release() def test_version(self): res = platform.version() def test_machine(self): res = platform.machine() def test_processor(self): res = platform.processor() def setUp(self): self.save_version = sys.version self.save_mercurial = sys._mercurial self.save_platform = sys.platform def tearDown(self): sys.version = self.save_version sys._mercurial = self.save_mercurial sys.platform = self.save_platform def test_sys_version(self): # Old test. for input, output in ( ('2.4.3 (#1, Jun 21 2006, 13:54:21) \n[GCC 3.3.4 (pre 3.3.5 20040809)]', ('CPython', '2.4.3', '', '', '1', 'Jun 21 2006 13:54:21', 'GCC 3.3.4 (pre 3.3.5 20040809)')), ('IronPython 1.0.60816 on .NET 2.0.50727.42', ('IronPython', '1.0.60816', '', '', '', '', '.NET 2.0.50727.42')), ('IronPython 1.0 (1.0.61005.1977) on .NET 2.0.50727.42', ('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')), ): # branch and revision are not "parsed", but fetched # from sys._mercurial. Ignore them (name, version, branch, revision, buildno, builddate, compiler) \ = platform._sys_version(input) self.assertEqual( (name, version, '', '', buildno, builddate, compiler), output) # Tests for python_implementation(), python_version(), python_branch(), # python_revision(), python_build(), and python_compiler(). sys_versions = { ("2.6.1 (r261:67515, Dec 6 2008, 15:26:00) \n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]", ('CPython', 'tags/r261', '67515'), self.save_platform) : ("CPython", "2.6.1", "tags/r261", "67515", ('r261:67515', 'Dec 6 2008 15:26:00'), 'GCC 4.0.1 (Apple Computer, Inc. build 5370)'), ("IronPython 2.0 (2.0.0.0) on .NET 2.0.50727.3053", None, "cli") : ("IronPython", "2.0.0", "", "", ("", ""), ".NET 2.0.50727.3053"), ("2.6.1 (IronPython 2.6.1 (2.6.10920.0) on .NET 2.0.50727.1433)", None, "cli") : ("IronPython", "2.6.1", "", "", ("", ""), ".NET 2.0.50727.1433"), ("2.7.4 (IronPython 2.7.4 (2.7.0.40) on Mono 4.0.30319.1 (32-bit))", None, "cli") : ("IronPython", "2.7.4", "", "", ("", ""), "Mono 4.0.30319.1 (32-bit)"), ("2.5 (trunk:6107, Mar 26 2009, 13:02:18) \n[Java HotSpot(TM) Client VM (\"Apple Computer, Inc.\")]", ('Jython', 'trunk', '6107'), "java1.5.0_16") : ("Jython", "2.5.0", "trunk", "6107", ('trunk:6107', 'Mar 26 2009'), "java1.5.0_16"), ("2.5.2 (63378, Mar 26 2009, 18:03:29)\n[PyPy 1.0.0]", ('PyPy', 'trunk', '63378'), self.save_platform) : ("PyPy", "2.5.2", "trunk", "63378", ('63378', 'Mar 26 2009'), "") } for (version_tag, subversion, sys_platform), info in \ sys_versions.items(): sys.version = version_tag if subversion is None: if hasattr(sys, "_mercurial"): del sys._mercurial else: sys._mercurial = subversion if sys_platform is not None: sys.platform = sys_platform self.assertEqual(platform.python_implementation(), info[0]) self.assertEqual(platform.python_version(), info[1]) self.assertEqual(platform.python_branch(), info[2]) self.assertEqual(platform.python_revision(), info[3]) self.assertEqual(platform.python_build(), info[4]) self.assertEqual(platform.python_compiler(), info[5]) def test_system_alias(self): res = platform.system_alias( platform.system(), platform.release(), platform.version(), ) def test_uname(self): res = platform.uname() self.assertTrue(any(res)) self.assertEqual(res[0], res.system) self.assertEqual(res[1], res.node) self.assertEqual(res[2], res.release) self.assertEqual(res[3], res.version) self.assertEqual(res[4], res.machine) self.assertEqual(res[5], res.processor) @unittest.skipUnless(sys.platform.startswith('win'), "windows only test") def test_uname_win32_ARCHITEW6432(self): # Issue 7860: make sure we get architecture from the correct variable # on 64 bit Windows: if PROCESSOR_ARCHITEW6432 exists we should be # using it, per # http://blogs.msdn.com/david.wang/archive/2006/03/26/HOWTO-Detect-Process-Bitness.aspx try: with support.EnvironmentVarGuard() as environ: if 'PROCESSOR_ARCHITEW6432' in environ: del environ['PROCESSOR_ARCHITEW6432'] environ['PROCESSOR_ARCHITECTURE'] = 'foo' platform._uname_cache = None system, node, release, version, machine, processor = platform.uname() self.assertEqual(machine, 'foo') environ['PROCESSOR_ARCHITEW6432'] = 'bar' platform._uname_cache = None system, node, release, version, machine, processor = platform.uname() self.assertEqual(machine, 'bar') finally: platform._uname_cache = None def test_java_ver(self): res = platform.java_ver() if sys.platform == 'java': self.assertTrue(all(res)) def test_win32_ver(self): res = platform.win32_ver() def test_mac_ver(self): res = platform.mac_ver() if platform.uname().system == 'Darwin': # We're on a MacOSX system, check that # the right version information is returned fd = os.popen('sw_vers', 'r') real_ver = None for ln in fd: if ln.startswith('ProductVersion:'): real_ver = ln.strip().split()[-1] break fd.close() self.assertFalse(real_ver is None) result_list = res[0].split('.') expect_list = real_ver.split('.') len_diff = len(result_list) - len(expect_list) # On Snow Leopard, sw_vers reports 10.6.0 as 10.6 if len_diff > 0: expect_list.extend(['0'] * len_diff) self.assertEqual(result_list, expect_list) # res[1] claims to contain # (version, dev_stage, non_release_version) # That information is no longer available self.assertEqual(res[1], ('', '', '')) if sys.byteorder == 'little': self.assertIn(res[2], ('i386', 'x86_64')) else: self.assertEqual(res[2], 'PowerPC') @unittest.skipUnless(sys.platform == 'darwin', "OSX only test") def test_mac_ver_with_fork(self): # Issue7895: platform.mac_ver() crashes when using fork without exec # # This test checks that the fix for that issue works. # pid = os.fork() if pid == 0: # child info = platform.mac_ver() os._exit(0) else: # parent cpid, sts = os.waitpid(pid, 0) self.assertEqual(cpid, pid) self.assertEqual(sts, 0) def test_dist(self): res = platform.dist() def test_libc_ver(self): import os if os.path.isdir(sys.executable) and \ os.path.exists(sys.executable+'.exe'): # Cygwin horror executable = sys.executable + '.exe' else: executable = sys.executable res = platform.libc_ver(executable) def test_parse_release_file(self): for input, output in ( # Examples of release file contents: ('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64')), ('SUSE LINUX 10.1 (X86-64)', ('SUSE LINUX ', '10.1', 'X86-64')), ('SUSE LINUX 10.1 (i586)', ('SUSE LINUX ', '10.1', 'i586')), ('Fedora Core release 5 (Bordeaux)', ('Fedora Core', '5', 'Bordeaux')), ('Red Hat Linux release 8.0 (Psyche)', ('Red Hat Linux', '8.0', 'Psyche')), ('Red Hat Linux release 9 (Shrike)', ('Red Hat Linux', '9', 'Shrike')), ('Red Hat Enterprise Linux release 4 (Nahant)', ('Red Hat Enterprise Linux', '4', 'Nahant')), ('CentOS release 4', ('CentOS', '4', None)), ('Rocks release 4.2.1 (Cydonia)', ('Rocks', '4.2.1', 'Cydonia')), ('', ('', '', '')), # If there's nothing there. ): self.assertEqual(platform._parse_release_file(input), output) def test_popen(self): mswindows = (sys.platform == "win32") if mswindows: command = '"{}" -c "print(\'Hello\')"'.format(sys.executable) else: command = "'{}' -c 'print(\"Hello\")'".format(sys.executable) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) with platform.popen(command) as stdout: hello = stdout.read().strip() stdout.close() self.assertEqual(hello, "Hello") data = 'plop' if mswindows: command = '"{}" -c "import sys; data=sys.stdin.read(); exit(len(data))"' else: command = "'{}' -c 'import sys; data=sys.stdin.read(); exit(len(data))'" command = command.format(sys.executable) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) with platform.popen(command, 'w') as stdin: stdout = stdin.write(data) ret = stdin.close() self.assertIsNotNone(ret) if os.name == 'nt': returncode = ret else: returncode = ret >> 8 self.assertEqual(returncode, len(data)) def test_linux_distribution_encoding(self): # Issue #17429 with tempfile.TemporaryDirectory() as tempdir: filename = os.path.join(tempdir, 'fedora-release') with open(filename, 'w', encoding='utf-8') as f: f.write('Fedora release 19 (Schr\xf6dinger\u2019s Cat)\n') with mock.patch('platform._UNIXCONFDIR', tempdir): distname, version, distid = platform.linux_distribution() self.assertEqual(distname, 'Fedora') self.assertEqual(version, '19') self.assertEqual(distid, 'Schr\xf6dinger\u2019s Cat') def test_main(): support.run_unittest( PlatformTest ) if __name__ == '__main__': test_main()
lgpl-3.0
ProfessionalIT/maxigenios-website
sdk/google_appengine/lib/django-0.96/django/oldforms/__init__.py
32
42862
from django.core import validators from django.core.exceptions import PermissionDenied from django.utils.html import escape from django.conf import settings from django.utils.translation import gettext, ngettext FORM_FIELD_ID_PREFIX = 'id_' class EmptyValue(Exception): "This is raised when empty data is provided" pass class Manipulator(object): # List of permission strings. User must have at least one to manipulate. # None means everybody has permission. required_permission = '' def __init__(self): # List of FormField objects self.fields = [] def __getitem__(self, field_name): "Looks up field by field name; raises KeyError on failure" for field in self.fields: if field.field_name == field_name: return field raise KeyError, "Field %s not found\n%s" % (field_name, repr(self.fields)) def __delitem__(self, field_name): "Deletes the field with the given field name; raises KeyError on failure" for i, field in enumerate(self.fields): if field.field_name == field_name: del self.fields[i] return raise KeyError, "Field %s not found" % field_name def check_permissions(self, user): """Confirms user has required permissions to use this manipulator; raises PermissionDenied on failure.""" if self.required_permission is None: return if user.has_perm(self.required_permission): return raise PermissionDenied def prepare(self, new_data): """ Makes any necessary preparations to new_data, in place, before data has been validated. """ for field in self.fields: field.prepare(new_data) def get_validation_errors(self, new_data): "Returns dictionary mapping field_names to error-message lists" errors = {} self.prepare(new_data) for field in self.fields: errors.update(field.get_validation_errors(new_data)) val_name = 'validate_%s' % field.field_name if hasattr(self, val_name): val = getattr(self, val_name) try: field.run_validator(new_data, val) except (validators.ValidationError, validators.CriticalValidationError), e: errors.setdefault(field.field_name, []).extend(e.messages) # if field.is_required and not new_data.get(field.field_name, False): # errors.setdefault(field.field_name, []).append(gettext_lazy('This field is required.')) # continue # try: # validator_list = field.validator_list # if hasattr(self, 'validate_%s' % field.field_name): # validator_list.append(getattr(self, 'validate_%s' % field.field_name)) # for validator in validator_list: # if field.is_required or new_data.get(field.field_name, False) or hasattr(validator, 'always_test'): # try: # if hasattr(field, 'requires_data_list'): # validator(new_data.getlist(field.field_name), new_data) # else: # validator(new_data.get(field.field_name, ''), new_data) # except validators.ValidationError, e: # errors.setdefault(field.field_name, []).extend(e.messages) # # If a CriticalValidationError is raised, ignore any other ValidationErrors # # for this particular field # except validators.CriticalValidationError, e: # errors.setdefault(field.field_name, []).extend(e.messages) return errors def save(self, new_data): "Saves the changes and returns the new object" # changes is a dictionary-like object keyed by field_name raise NotImplementedError def do_html2python(self, new_data): """ Convert the data from HTML data types to Python datatypes, changing the object in place. This happens after validation but before storage. This must happen after validation because html2python functions aren't expected to deal with invalid input. """ for field in self.fields: field.convert_post_data(new_data) class FormWrapper(object): """ A wrapper linking a Manipulator to the template system. This allows dictionary-style lookups of formfields. It also handles feeding prepopulated data and validation error messages to the formfield objects. """ def __init__(self, manipulator, data=None, error_dict=None, edit_inline=True): self.manipulator = manipulator if data is None: data = {} if error_dict is None: error_dict = {} self.data = data self.error_dict = error_dict self._inline_collections = None self.edit_inline = edit_inline def __repr__(self): return repr(self.__dict__) def __getitem__(self, key): for field in self.manipulator.fields: if field.field_name == key: data = field.extract_data(self.data) return FormFieldWrapper(field, data, self.error_dict.get(field.field_name, [])) if self.edit_inline: self.fill_inline_collections() for inline_collection in self._inline_collections: # The 'orig_name' comparison is for backwards compatibility # with hand-crafted forms. if inline_collection.name == key or (':' not in key and inline_collection.orig_name == key): return inline_collection raise KeyError, "Could not find Formfield or InlineObjectCollection named %r" % key def fill_inline_collections(self): if not self._inline_collections: ic = [] related_objects = self.manipulator.get_related_objects() for rel_obj in related_objects: data = rel_obj.extract_data(self.data) inline_collection = InlineObjectCollection(self.manipulator, rel_obj, data, self.error_dict) ic.append(inline_collection) self._inline_collections = ic def has_errors(self): return self.error_dict != {} def _get_fields(self): try: return self._fields except AttributeError: self._fields = [self.__getitem__(field.field_name) for field in self.manipulator.fields] return self._fields fields = property(_get_fields) class FormFieldWrapper(object): "A bridge between the template system and an individual form field. Used by FormWrapper." def __init__(self, formfield, data, error_list): self.formfield, self.data, self.error_list = formfield, data, error_list self.field_name = self.formfield.field_name # for convenience in templates def __str__(self): "Renders the field" return str(self.formfield.render(self.data)) def __repr__(self): return '<FormFieldWrapper for "%s">' % self.formfield.field_name def field_list(self): """ Like __str__(), but returns a list. Use this when the field's render() method returns a list. """ return self.formfield.render(self.data) def errors(self): return self.error_list def html_error_list(self): if self.errors(): return '<ul class="errorlist"><li>%s</li></ul>' % '</li><li>'.join([escape(e) for e in self.errors()]) else: return '' def get_id(self): return self.formfield.get_id() class FormFieldCollection(FormFieldWrapper): "A utility class that gives the template access to a dict of FormFieldWrappers" def __init__(self, formfield_dict): self.formfield_dict = formfield_dict def __str__(self): return str(self.formfield_dict) def __getitem__(self, template_key): "Look up field by template key; raise KeyError on failure" return self.formfield_dict[template_key] def __repr__(self): return "<FormFieldCollection: %s>" % self.formfield_dict def errors(self): "Returns list of all errors in this collection's formfields" errors = [] for field in self.formfield_dict.values(): if hasattr(field, 'errors'): errors.extend(field.errors()) return errors def has_errors(self): return bool(len(self.errors())) def html_combined_error_list(self): return ''.join([field.html_error_list() for field in self.formfield_dict.values() if hasattr(field, 'errors')]) class InlineObjectCollection(object): "An object that acts like a sparse list of form field collections." def __init__(self, parent_manipulator, rel_obj, data, errors): self.parent_manipulator = parent_manipulator self.rel_obj = rel_obj self.data = data self.errors = errors self._collections = None self.name = rel_obj.name # This is the name used prior to fixing #1839. Needs for backwards # compatibility. self.orig_name = rel_obj.opts.module_name def __len__(self): self.fill() return self._collections.__len__() def __getitem__(self, k): self.fill() return self._collections.__getitem__(k) def __setitem__(self, k, v): self.fill() return self._collections.__setitem__(k,v) def __delitem__(self, k): self.fill() return self._collections.__delitem__(k) def __iter__(self): self.fill() return iter(self._collections.values()) def items(self): self.fill() return self._collections.items() def fill(self): if self._collections: return else: var_name = self.rel_obj.opts.object_name.lower() collections = {} orig = None if hasattr(self.parent_manipulator, 'original_object'): orig = self.parent_manipulator.original_object orig_list = self.rel_obj.get_list(orig) for i, instance in enumerate(orig_list): collection = {'original': instance} for f in self.rel_obj.editable_fields(): for field_name in f.get_manipulator_field_names(''): full_field_name = '%s.%d.%s' % (var_name, i, field_name) field = self.parent_manipulator[full_field_name] data = field.extract_data(self.data) errors = self.errors.get(full_field_name, []) collection[field_name] = FormFieldWrapper(field, data, errors) collections[i] = FormFieldCollection(collection) self._collections = collections class FormField(object): """Abstract class representing a form field. Classes that extend FormField should define the following attributes: field_name The field's name for use by programs. validator_list A list of validation tests (callback functions) that the data for this field must pass in order to be added or changed. is_required A Boolean. Is it a required field? Subclasses should also implement a render(data) method, which is responsible for rending the form field in XHTML. """ def __str__(self): return self.render('') def __repr__(self): return 'FormField "%s"' % self.field_name def prepare(self, new_data): "Hook for doing something to new_data (in place) before validation." pass def html2python(data): "Hook for converting an HTML datatype (e.g. 'on' for checkboxes) to a Python type" return data html2python = staticmethod(html2python) def render(self, data): raise NotImplementedError def get_member_name(self): if hasattr(self, 'member_name'): return self.member_name else: return self.field_name def extract_data(self, data_dict): if hasattr(self, 'requires_data_list') and hasattr(data_dict, 'getlist'): data = data_dict.getlist(self.get_member_name()) else: data = data_dict.get(self.get_member_name(), None) if data is None: data = '' return data def convert_post_data(self, new_data): name = self.get_member_name() if new_data.has_key(self.field_name): d = new_data.getlist(self.field_name) try: converted_data = [self.__class__.html2python(data) for data in d] except ValueError: converted_data = d new_data.setlist(name, converted_data) else: try: #individual fields deal with None values themselves new_data.setlist(name, [self.__class__.html2python(None)]) except EmptyValue: new_data.setlist(name, []) def run_validator(self, new_data, validator): if self.is_required or new_data.get(self.field_name, False) or hasattr(validator, 'always_test'): if hasattr(self, 'requires_data_list'): validator(new_data.getlist(self.field_name), new_data) else: validator(new_data.get(self.field_name, ''), new_data) def get_validation_errors(self, new_data): errors = {} if self.is_required and not new_data.get(self.field_name, False): errors.setdefault(self.field_name, []).append(gettext('This field is required.')) return errors try: for validator in self.validator_list: try: self.run_validator(new_data, validator) except validators.ValidationError, e: errors.setdefault(self.field_name, []).extend(e.messages) # If a CriticalValidationError is raised, ignore any other ValidationErrors # for this particular field except validators.CriticalValidationError, e: errors.setdefault(self.field_name, []).extend(e.messages) return errors def get_id(self): "Returns the HTML 'id' attribute for this form field." return FORM_FIELD_ID_PREFIX + self.field_name #################### # GENERIC WIDGETS # #################### class TextField(FormField): input_type = "text" def __init__(self, field_name, length=30, maxlength=None, is_required=False, validator_list=None, member_name=None): if validator_list is None: validator_list = [] self.field_name = field_name self.length, self.maxlength = length, maxlength self.is_required = is_required self.validator_list = [self.isValidLength, self.hasNoNewlines] + validator_list if member_name != None: self.member_name = member_name def isValidLength(self, data, form): if data and self.maxlength and len(data.decode(settings.DEFAULT_CHARSET)) > self.maxlength: raise validators.ValidationError, ngettext("Ensure your text is less than %s character.", "Ensure your text is less than %s characters.", self.maxlength) % self.maxlength def hasNoNewlines(self, data, form): if data and '\n' in data: raise validators.ValidationError, gettext("Line breaks are not allowed here.") def render(self, data): if data is None: data = '' maxlength = '' if self.maxlength: maxlength = 'maxlength="%s" ' % self.maxlength if isinstance(data, unicode): data = data.encode(settings.DEFAULT_CHARSET) return '<input type="%s" id="%s" class="v%s%s" name="%s" size="%s" value="%s" %s/>' % \ (self.input_type, self.get_id(), self.__class__.__name__, self.is_required and ' required' or '', self.field_name, self.length, escape(data), maxlength) def html2python(data): return data html2python = staticmethod(html2python) class PasswordField(TextField): input_type = "password" class LargeTextField(TextField): def __init__(self, field_name, rows=10, cols=40, is_required=False, validator_list=None, maxlength=None): if validator_list is None: validator_list = [] self.field_name = field_name self.rows, self.cols, self.is_required = rows, cols, is_required self.validator_list = validator_list[:] if maxlength: self.validator_list.append(self.isValidLength) self.maxlength = maxlength def render(self, data): if data is None: data = '' if isinstance(data, unicode): data = data.encode(settings.DEFAULT_CHARSET) return '<textarea id="%s" class="v%s%s" name="%s" rows="%s" cols="%s">%s</textarea>' % \ (self.get_id(), self.__class__.__name__, self.is_required and ' required' or '', self.field_name, self.rows, self.cols, escape(data)) class HiddenField(FormField): def __init__(self, field_name, is_required=False, validator_list=None): if validator_list is None: validator_list = [] self.field_name, self.is_required = field_name, is_required self.validator_list = validator_list[:] def render(self, data): return '<input type="hidden" id="%s" name="%s" value="%s" />' % \ (self.get_id(), self.field_name, escape(data)) class CheckboxField(FormField): def __init__(self, field_name, checked_by_default=False, validator_list=None, is_required=False): if validator_list is None: validator_list = [] self.field_name = field_name self.checked_by_default = checked_by_default self.is_required = is_required self.validator_list = validator_list[:] def render(self, data): checked_html = '' if data or (data is '' and self.checked_by_default): checked_html = ' checked="checked"' return '<input type="checkbox" id="%s" class="v%s" name="%s"%s />' % \ (self.get_id(), self.__class__.__name__, self.field_name, checked_html) def html2python(data): "Convert value from browser ('on' or '') to a Python boolean" if data == 'on': return True return False html2python = staticmethod(html2python) class SelectField(FormField): def __init__(self, field_name, choices=None, size=1, is_required=False, validator_list=None, member_name=None): if validator_list is None: validator_list = [] if choices is None: choices = [] self.field_name = field_name # choices is a list of (value, human-readable key) tuples because order matters self.choices, self.size, self.is_required = choices, size, is_required self.validator_list = [self.isValidChoice] + validator_list if member_name != None: self.member_name = member_name def render(self, data): output = ['<select id="%s" class="v%s%s" name="%s" size="%s">' % \ (self.get_id(), self.__class__.__name__, self.is_required and ' required' or '', self.field_name, self.size)] str_data = str(data) # normalize to string for value, display_name in self.choices: selected_html = '' if str(value) == str_data: selected_html = ' selected="selected"' output.append(' <option value="%s"%s>%s</option>' % (escape(value), selected_html, escape(display_name))) output.append(' </select>') return '\n'.join(output) def isValidChoice(self, data, form): str_data = str(data) str_choices = [str(item[0]) for item in self.choices] if str_data not in str_choices: raise validators.ValidationError, gettext("Select a valid choice; '%(data)s' is not in %(choices)s.") % {'data': str_data, 'choices': str_choices} class NullSelectField(SelectField): "This SelectField converts blank fields to None" def html2python(data): if not data: return None return data html2python = staticmethod(html2python) class RadioSelectField(FormField): def __init__(self, field_name, choices=None, ul_class='', is_required=False, validator_list=None, member_name=None): if validator_list is None: validator_list = [] if choices is None: choices = [] self.field_name = field_name # choices is a list of (value, human-readable key) tuples because order matters self.choices, self.is_required = choices, is_required self.validator_list = [self.isValidChoice] + validator_list self.ul_class = ul_class if member_name != None: self.member_name = member_name def render(self, data): """ Returns a special object, RadioFieldRenderer, that is iterable *and* has a default str() rendered output. This allows for flexible use in templates. You can just use the default rendering: {{ field_name }} ...which will output the radio buttons in an unordered list. Or, you can manually traverse each radio option for special layout: {% for option in field_name.field_list %} {{ option.field }} {{ option.label }}<br /> {% endfor %} """ class RadioFieldRenderer: def __init__(self, datalist, ul_class): self.datalist, self.ul_class = datalist, ul_class def __str__(self): "Default str() output for this radio field -- a <ul>" output = ['<ul%s>' % (self.ul_class and ' class="%s"' % self.ul_class or '')] output.extend(['<li>%s %s</li>' % (d['field'], d['label']) for d in self.datalist]) output.append('</ul>') return ''.join(output) def __iter__(self): for d in self.datalist: yield d def __len__(self): return len(self.datalist) datalist = [] str_data = str(data) # normalize to string for i, (value, display_name) in enumerate(self.choices): selected_html = '' if str(value) == str_data: selected_html = ' checked="checked"' datalist.append({ 'value': value, 'name': display_name, 'field': '<input type="radio" id="%s" name="%s" value="%s"%s/>' % \ (self.get_id() + '_' + str(i), self.field_name, value, selected_html), 'label': '<label for="%s">%s</label>' % \ (self.get_id() + '_' + str(i), display_name), }) return RadioFieldRenderer(datalist, self.ul_class) def isValidChoice(self, data, form): str_data = str(data) str_choices = [str(item[0]) for item in self.choices] if str_data not in str_choices: raise validators.ValidationError, gettext("Select a valid choice; '%(data)s' is not in %(choices)s.") % {'data':str_data, 'choices':str_choices} class NullBooleanField(SelectField): "This SelectField provides 'Yes', 'No' and 'Unknown', mapping results to True, False or None" def __init__(self, field_name, is_required=False, validator_list=None): if validator_list is None: validator_list = [] SelectField.__init__(self, field_name, choices=[('1', _('Unknown')), ('2', _('Yes')), ('3', _('No'))], is_required=is_required, validator_list=validator_list) def render(self, data): if data is None: data = '1' elif data == True: data = '2' elif data == False: data = '3' return SelectField.render(self, data) def html2python(data): return {None: None, '1': None, '2': True, '3': False}[data] html2python = staticmethod(html2python) class SelectMultipleField(SelectField): requires_data_list = True def render(self, data): output = ['<select id="%s" class="v%s%s" name="%s" size="%s" multiple="multiple">' % \ (self.get_id(), self.__class__.__name__, self.is_required and ' required' or '', self.field_name, self.size)] str_data_list = map(str, data) # normalize to strings for value, choice in self.choices: selected_html = '' if str(value) in str_data_list: selected_html = ' selected="selected"' output.append(' <option value="%s"%s>%s</option>' % (escape(value), selected_html, escape(choice))) output.append(' </select>') return '\n'.join(output) def isValidChoice(self, field_data, all_data): # data is something like ['1', '2', '3'] str_choices = [str(item[0]) for item in self.choices] for val in map(str, field_data): if val not in str_choices: raise validators.ValidationError, gettext("Select a valid choice; '%(data)s' is not in %(choices)s.") % {'data':val, 'choices':str_choices} def html2python(data): if data is None: raise EmptyValue return data html2python = staticmethod(html2python) class CheckboxSelectMultipleField(SelectMultipleField): """ This has an identical interface to SelectMultipleField, except the rendered widget is different. Instead of a <select multiple>, this widget outputs a <ul> of <input type="checkbox">es. Of course, that results in multiple form elements for the same "single" field, so this class's prepare() method flattens the split data elements back into the single list that validators, renderers and save() expect. """ requires_data_list = True def __init__(self, field_name, choices=None, ul_class='', validator_list=None): if validator_list is None: validator_list = [] if choices is None: choices = [] self.ul_class = ul_class SelectMultipleField.__init__(self, field_name, choices, size=1, is_required=False, validator_list=validator_list) def prepare(self, new_data): # new_data has "split" this field into several fields, so flatten it # back into a single list. data_list = [] for value, readable_value in self.choices: if new_data.get('%s%s' % (self.field_name, value), '') == 'on': data_list.append(value) new_data.setlist(self.field_name, data_list) def render(self, data): output = ['<ul%s>' % (self.ul_class and ' class="%s"' % self.ul_class or '')] str_data_list = map(str, data) # normalize to strings for value, choice in self.choices: checked_html = '' if str(value) in str_data_list: checked_html = ' checked="checked"' field_name = '%s%s' % (self.field_name, value) output.append('<li><input type="checkbox" id="%s" class="v%s" name="%s"%s value="on" /> <label for="%s">%s</label></li>' % \ (self.get_id() + escape(value), self.__class__.__name__, field_name, checked_html, self.get_id() + escape(value), choice)) output.append('</ul>') return '\n'.join(output) #################### # FILE UPLOADS # #################### class FileUploadField(FormField): def __init__(self, field_name, is_required=False, validator_list=None): if validator_list is None: validator_list = [] self.field_name, self.is_required = field_name, is_required self.validator_list = [self.isNonEmptyFile] + validator_list def isNonEmptyFile(self, field_data, all_data): try: content = field_data['content'] except TypeError: raise validators.CriticalValidationError, gettext("No file was submitted. Check the encoding type on the form.") if not content: raise validators.CriticalValidationError, gettext("The submitted file is empty.") def render(self, data): return '<input type="file" id="%s" class="v%s" name="%s" />' % \ (self.get_id(), self.__class__.__name__, self.field_name) def html2python(data): if data is None: raise EmptyValue return data html2python = staticmethod(html2python) class ImageUploadField(FileUploadField): "A FileUploadField that raises CriticalValidationError if the uploaded file isn't an image." def __init__(self, *args, **kwargs): FileUploadField.__init__(self, *args, **kwargs) self.validator_list.insert(0, self.isValidImage) def isValidImage(self, field_data, all_data): try: validators.isValidImage(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages #################### # INTEGERS/FLOATS # #################### class IntegerField(TextField): def __init__(self, field_name, length=10, maxlength=None, is_required=False, validator_list=None, member_name=None): if validator_list is None: validator_list = [] validator_list = [self.isInteger] + validator_list if member_name is not None: self.member_name = member_name TextField.__init__(self, field_name, length, maxlength, is_required, validator_list) def isInteger(self, field_data, all_data): try: validators.isInteger(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages def html2python(data): if data == '' or data is None: return None return int(data) html2python = staticmethod(html2python) class SmallIntegerField(IntegerField): def __init__(self, field_name, length=5, maxlength=5, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isSmallInteger] + validator_list IntegerField.__init__(self, field_name, length, maxlength, is_required, validator_list) def isSmallInteger(self, field_data, all_data): if not -32768 <= int(field_data) <= 32767: raise validators.CriticalValidationError, gettext("Enter a whole number between -32,768 and 32,767.") class PositiveIntegerField(IntegerField): def __init__(self, field_name, length=10, maxlength=None, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isPositive] + validator_list IntegerField.__init__(self, field_name, length, maxlength, is_required, validator_list) def isPositive(self, field_data, all_data): if int(field_data) < 0: raise validators.CriticalValidationError, gettext("Enter a positive number.") class PositiveSmallIntegerField(IntegerField): def __init__(self, field_name, length=5, maxlength=None, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isPositiveSmall] + validator_list IntegerField.__init__(self, field_name, length, maxlength, is_required, validator_list) def isPositiveSmall(self, field_data, all_data): if not 0 <= int(field_data) <= 32767: raise validators.CriticalValidationError, gettext("Enter a whole number between 0 and 32,767.") class FloatField(TextField): def __init__(self, field_name, max_digits, decimal_places, is_required=False, validator_list=None): if validator_list is None: validator_list = [] self.max_digits, self.decimal_places = max_digits, decimal_places validator_list = [self.isValidFloat] + validator_list TextField.__init__(self, field_name, max_digits+2, max_digits+2, is_required, validator_list) def isValidFloat(self, field_data, all_data): v = validators.IsValidFloat(self.max_digits, self.decimal_places) try: v(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages def html2python(data): if data == '' or data is None: return None return float(data) html2python = staticmethod(html2python) #################### # DATES AND TIMES # #################### class DatetimeField(TextField): """A FormField that automatically converts its data to a datetime.datetime object. The data should be in the format YYYY-MM-DD HH:MM:SS.""" def __init__(self, field_name, length=30, maxlength=None, is_required=False, validator_list=None): if validator_list is None: validator_list = [] self.field_name = field_name self.length, self.maxlength = length, maxlength self.is_required = is_required self.validator_list = [validators.isValidANSIDatetime] + validator_list def html2python(data): "Converts the field into a datetime.datetime object" import datetime try: date, time = data.split() y, m, d = date.split('-') timebits = time.split(':') h, mn = timebits[:2] if len(timebits) > 2: s = int(timebits[2]) else: s = 0 return datetime.datetime(int(y), int(m), int(d), int(h), int(mn), s) except ValueError: return None html2python = staticmethod(html2python) class DateField(TextField): """A FormField that automatically converts its data to a datetime.date object. The data should be in the format YYYY-MM-DD.""" def __init__(self, field_name, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isValidDate] + validator_list TextField.__init__(self, field_name, length=10, maxlength=10, is_required=is_required, validator_list=validator_list) def isValidDate(self, field_data, all_data): try: validators.isValidANSIDate(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages def html2python(data): "Converts the field into a datetime.date object" import time, datetime try: time_tuple = time.strptime(data, '%Y-%m-%d') return datetime.date(*time_tuple[0:3]) except (ValueError, TypeError): return None html2python = staticmethod(html2python) class TimeField(TextField): """A FormField that automatically converts its data to a datetime.time object. The data should be in the format HH:MM:SS or HH:MM:SS.mmmmmm.""" def __init__(self, field_name, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isValidTime] + validator_list TextField.__init__(self, field_name, length=8, maxlength=8, is_required=is_required, validator_list=validator_list) def isValidTime(self, field_data, all_data): try: validators.isValidANSITime(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages def html2python(data): "Converts the field into a datetime.time object" import time, datetime try: part_list = data.split('.') try: time_tuple = time.strptime(part_list[0], '%H:%M:%S') except ValueError: # seconds weren't provided time_tuple = time.strptime(part_list[0], '%H:%M') t = datetime.time(*time_tuple[3:6]) if (len(part_list) == 2): t = t.replace(microsecond=int(part_list[1])) return t except (ValueError, TypeError, AttributeError): return None html2python = staticmethod(html2python) #################### # INTERNET-RELATED # #################### class EmailField(TextField): "A convenience FormField for validating e-mail addresses" def __init__(self, field_name, length=50, maxlength=75, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isValidEmail] + validator_list TextField.__init__(self, field_name, length, maxlength=maxlength, is_required=is_required, validator_list=validator_list) def isValidEmail(self, field_data, all_data): try: validators.isValidEmail(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages class URLField(TextField): "A convenience FormField for validating URLs" def __init__(self, field_name, length=50, maxlength=200, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isValidURL] + validator_list TextField.__init__(self, field_name, length=length, maxlength=maxlength, is_required=is_required, validator_list=validator_list) def isValidURL(self, field_data, all_data): try: validators.isValidURL(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages class IPAddressField(TextField): def __init__(self, field_name, length=15, maxlength=15, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isValidIPAddress] + validator_list TextField.__init__(self, field_name, length=length, maxlength=maxlength, is_required=is_required, validator_list=validator_list) def isValidIPAddress(self, field_data, all_data): try: validators.isValidIPAddress4(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages def html2python(data): return data or None html2python = staticmethod(html2python) #################### # MISCELLANEOUS # #################### class FilePathField(SelectField): "A SelectField whose choices are the files in a given directory." def __init__(self, field_name, path, match=None, recursive=False, is_required=False, validator_list=None): import os from django.db.models import BLANK_CHOICE_DASH if match is not None: import re match_re = re.compile(match) choices = not is_required and BLANK_CHOICE_DASH[:] or [] if recursive: for root, dirs, files in os.walk(path): for f in files: if match is None or match_re.search(f): choices.append((os.path.join(root, f), f)) else: try: for f in os.listdir(path): full_file = os.path.join(path, f) if os.path.isfile(full_file) and (match is None or match_re.search(f)): choices.append((full_file, f)) except OSError: pass SelectField.__init__(self, field_name, choices, 1, is_required, validator_list) class PhoneNumberField(TextField): "A convenience FormField for validating phone numbers (e.g. '630-555-1234')" def __init__(self, field_name, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isValidPhone] + validator_list TextField.__init__(self, field_name, length=12, maxlength=12, is_required=is_required, validator_list=validator_list) def isValidPhone(self, field_data, all_data): try: validators.isValidPhone(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages class USStateField(TextField): "A convenience FormField for validating U.S. states (e.g. 'IL')" def __init__(self, field_name, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isValidUSState] + validator_list TextField.__init__(self, field_name, length=2, maxlength=2, is_required=is_required, validator_list=validator_list) def isValidUSState(self, field_data, all_data): try: validators.isValidUSState(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages def html2python(data): if data: return data.upper() # Should always be stored in upper case return data html2python = staticmethod(html2python) class CommaSeparatedIntegerField(TextField): "A convenience FormField for validating comma-separated integer fields" def __init__(self, field_name, maxlength=None, is_required=False, validator_list=None): if validator_list is None: validator_list = [] validator_list = [self.isCommaSeparatedIntegerList] + validator_list TextField.__init__(self, field_name, length=20, maxlength=maxlength, is_required=is_required, validator_list=validator_list) def isCommaSeparatedIntegerList(self, field_data, all_data): try: validators.isCommaSeparatedIntegerList(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages def render(self, data): if data is None: data = '' elif isinstance(data, (list, tuple)): data = ','.join(data) return super(CommaSeparatedIntegerField, self).render(data) class RawIdAdminField(CommaSeparatedIntegerField): def html2python(data): if data: return data.split(',') else: return [] html2python = staticmethod(html2python) class XMLLargeTextField(LargeTextField): """ A LargeTextField with an XML validator. The schema_path argument is the full path to a Relax NG compact schema to validate against. """ def __init__(self, field_name, schema_path, **kwargs): self.schema_path = schema_path kwargs.setdefault('validator_list', []).insert(0, self.isValidXML) LargeTextField.__init__(self, field_name, **kwargs) def isValidXML(self, field_data, all_data): v = validators.RelaxNGCompact(self.schema_path) try: v(field_data, all_data) except validators.ValidationError, e: raise validators.CriticalValidationError, e.messages
mit
pdellaert/ansible
lib/ansible/modules/cloud/azure/azure_rm_aks.py
14
33985
#!/usr/bin/python # # Copyright (c) 2018 Sertac Ozercan, <seozerca@microsoft.com> # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: azure_rm_aks version_added: "2.6" short_description: Manage a managed Azure Container Service (AKS) instance description: - Create, update and delete a managed Azure Container Service (AKS) instance. options: resource_group: description: - Name of a resource group where the managed Azure Container Services (AKS) exists or will be created. required: true name: description: - Name of the managed Azure Container Services (AKS) instance. required: true state: description: - Assert the state of the AKS. Use C(present) to create or update an AKS and C(absent) to delete it. default: present choices: - absent - present location: description: - Valid azure location. Defaults to location of the resource group. dns_prefix: description: - DNS prefix specified when creating the managed cluster. kubernetes_version: description: - Version of Kubernetes specified when creating the managed cluster. linux_profile: description: - The Linux profile suboptions. suboptions: admin_username: description: - The Admin Username for the cluster. required: true ssh_key: description: - The Public SSH Key used to access the cluster. required: true agent_pool_profiles: description: - The agent pool profile suboptions. suboptions: name: description: - Unique name of the agent pool profile in the context of the subscription and resource group. required: true count: description: - Number of agents (VMs) to host docker containers. - Allowed values must be in the range of C(1) to C(100) (inclusive). required: true vm_size: description: - The VM Size of each of the Agent Pool VM's (e.g. C(Standard_F1) / C(Standard_D2v2)). required: true os_disk_size_gb: description: - Size of the OS disk. service_principal: description: - The service principal suboptions. suboptions: client_id: description: - The ID for the Service Principal. required: true client_secret: description: - The secret password associated with the service principal. required: true enable_rbac: description: - Enable RBAC. - Existing non-RBAC enabled AKS clusters cannot currently be updated for RBAC use. type: bool default: no version_added: "2.8" network_profile: description: - Profile of network configuration. suboptions: network_plugin: description: - Network plugin used for building Kubernetes network. - This property cannot been changed. - With C(kubenet), nodes get an IP address from the Azure virtual network subnet. - AKS features such as Virtual Nodes or network policies aren't supported with C(kubenet). - C(azure) enables Azure Container Networking Interface(CNI), every pod gets an IP address from the subnet and can be accessed directly. default: kubenet choices: - azure - kubenet network_policy: description: Network policy used for building Kubernetes network. choices: - azure - calico pod_cidr: description: - A CIDR notation IP range from which to assign pod IPs when I(network_plugin=kubenet) is used. - It should be a large address space that isn't in use elsewhere in your network environment. - This address range must be large enough to accommodate the number of nodes that you expect to scale up to. default: "10.244.0.0/16" service_cidr: description: - A CIDR notation IP range from which to assign service cluster IPs. - It must not overlap with any Subnet IP ranges. - It should be the *.10 address of your service IP address range. default: "10.0.0.0/16" dns_service_ip: description: - An IP address assigned to the Kubernetes DNS service. - It must be within the Kubernetes service address range specified in serviceCidr. default: "10.0.0.10" docker_bridge_cidr: description: - A CIDR notation IP range assigned to the Docker bridge network. - It must not overlap with any Subnet IP ranges or the Kubernetes service address range. default: "172.17.0.1/16" version_added: "2.8" aad_profile: description: - Profile of Azure Active Directory configuration. suboptions: client_app_id: description: The client AAD application ID. server_app_id: description: The server AAD application ID. server_app_secret: description: The server AAD application secret. tenant_id: description: - The AAD tenant ID to use for authentication. - If not specified, will use the tenant of the deployment subscription. version_added: "2.8" addon: description: - Profile of managed cluster add-on. - Key can be C(http_application_routing), C(monitoring), C(virtual_node). - Value must be a dict contains a bool variable C(enabled). type: dict suboptions: http_application_routing: description: - The HTTP application routing solution makes it easy to access applications that are deployed to your cluster. type: dict suboptions: enabled: description: - Whether the solution enabled. type: bool monitoring: description: - It gives you performance visibility by collecting memory and processor metrics from controllers, nodes, and containers that are available in Kubernetes through the Metrics API. type: dict suboptions: enabled: description: - Whether the solution enabled. type: bool log_analytics_workspace_resource_id: description: - Where to store the container metrics. virtual_node: description: - With virtual nodes, you have quick provisioning of pods, and only pay per second for their execution time. - You don't need to wait for Kubernetes cluster autoscaler to deploy VM compute nodes to run the additional pods. type: dict suboptions: enabled: description: - Whether the solution enabled. type: bool subnet_resource_id: description: - Subnet associated to the cluster. version_added: "2.8" extends_documentation_fragment: - azure - azure_tags author: - Sertac Ozercan (@sozercan) - Yuwei Zhou (@yuwzho) ''' EXAMPLES = ''' - name: Create a managed Azure Container Services (AKS) instance azure_rm_aks: name: myAKS location: eastus resource_group: myResourceGroup dns_prefix: akstest kubernetes_version: 1.14.6 linux_profile: admin_username: azureuser ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA... service_principal: client_id: "cf72ca99-f6b9-4004-b0e0-bee10c521948" client_secret: "mySPNp@ssw0rd!" agent_pool_profiles: - name: default count: 5 vm_size: Standard_D2_v2 tags: Environment: Production - name: Remove a managed Azure Container Services (AKS) instance azure_rm_aks: name: myAKS resource_group: myResourceGroup state: absent ''' RETURN = ''' state: description: Current state of the Azure Container Service (AKS). returned: always type: dict example: agent_pool_profiles: - count: 1 dns_prefix: Null name: default os_disk_size_gb: Null os_type: Linux ports: Null storage_profile: ManagedDisks vm_size: Standard_DS1_v2 vnet_subnet_id: Null changed: false dns_prefix: aks9860bdcd89 id: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourcegroups/myResourceGroup/providers/Microsoft.ContainerService/managedClusters/aks9860bdc" kube_config: "......" kubernetes_version: 1.14.6 linux_profile: admin_username: azureuser ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADA..... location: eastus name: aks9860bdc provisioning_state: Succeeded service_principal_profile: client_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx tags: {} type: Microsoft.ContainerService/ManagedClusters ''' from ansible.module_utils.azure_rm_common import AzureRMModuleBase try: from msrestazure.azure_exceptions import CloudError except ImportError: # This is handled in azure_rm_common pass def create_aks_dict(aks): ''' Helper method to deserialize a ContainerService to a dict :param: aks: ContainerService or AzureOperationPoller with the Azure callback object :return: dict with the state on Azure ''' return dict( id=aks.id, name=aks.name, location=aks.location, dns_prefix=aks.dns_prefix, kubernetes_version=aks.kubernetes_version, tags=aks.tags, linux_profile=create_linux_profile_dict(aks.linux_profile), service_principal_profile=create_service_principal_profile_dict( aks.service_principal_profile), provisioning_state=aks.provisioning_state, agent_pool_profiles=create_agent_pool_profiles_dict( aks.agent_pool_profiles), type=aks.type, kube_config=aks.kube_config, enable_rbac=aks.enable_rbac, network_profile=create_network_profiles_dict(aks.network_profile), aad_profile=create_aad_profiles_dict(aks.aad_profile), addon=create_addon_dict(aks.addon_profiles), fqdn=aks.fqdn, node_resource_group=aks.node_resource_group ) def create_network_profiles_dict(network): return dict( network_plugin=network.network_plugin, network_policy=network.network_policy, pod_cidr=network.pod_cidr, service_cidr=network.service_cidr, dns_service_ip=network.dns_service_ip, docker_bridge_cidr=network.docker_bridge_cidr ) if network else dict() def create_aad_profiles_dict(aad): return aad.as_dict() if aad else dict() def create_addon_dict(addon): result = dict() addon = addon or dict() for key in addon.keys(): result[key] = addon[key].config result[key]['enabled'] = addon[key].enabled return result def create_linux_profile_dict(linuxprofile): ''' Helper method to deserialize a ContainerServiceLinuxProfile to a dict :param: linuxprofile: ContainerServiceLinuxProfile with the Azure callback object :return: dict with the state on Azure ''' return dict( ssh_key=linuxprofile.ssh.public_keys[0].key_data, admin_username=linuxprofile.admin_username ) def create_service_principal_profile_dict(serviceprincipalprofile): ''' Helper method to deserialize a ContainerServiceServicePrincipalProfile to a dict Note: For security reason, the service principal secret is skipped on purpose. :param: serviceprincipalprofile: ContainerServiceServicePrincipalProfile with the Azure callback object :return: dict with the state on Azure ''' return dict( client_id=serviceprincipalprofile.client_id ) def create_agent_pool_profiles_dict(agentpoolprofiles): ''' Helper method to deserialize a ContainerServiceAgentPoolProfile to a dict :param: agentpoolprofiles: ContainerServiceAgentPoolProfile with the Azure callback object :return: dict with the state on Azure ''' return [dict( count=profile.count, vm_size=profile.vm_size, name=profile.name, os_disk_size_gb=profile.os_disk_size_gb, storage_profile=profile.storage_profile, vnet_subnet_id=profile.vnet_subnet_id, os_type=profile.os_type ) for profile in agentpoolprofiles] if agentpoolprofiles else None def create_addon_profiles_spec(): ''' Helper method to parse the ADDONS dictionary and generate the addon spec ''' spec = dict() for key in ADDONS.keys(): values = ADDONS[key] addon_spec = dict( enabled=dict(type='bool', default=True) ) configs = values.get('config') or {} for item in configs.keys(): addon_spec[item] = dict(type='str', aliases=[configs[item]], required=True) spec[key] = dict(type='dict', options=addon_spec, aliases=[values['name']]) return spec ADDONS = { 'http_application_routing': dict(name='httpApplicationRouting'), 'monitoring': dict(name='omsagent', config={'log_analytics_workspace_resource_id': 'logAnalyticsWorkspaceResourceID'}), 'virtual_node': dict(name='aciConnector', config={'subnet_resource_id': 'SubnetName'}) } linux_profile_spec = dict( admin_username=dict(type='str', required=True), ssh_key=dict(type='str', required=True) ) service_principal_spec = dict( client_id=dict(type='str', required=True), client_secret=dict(type='str', no_log=True) ) agent_pool_profile_spec = dict( name=dict(type='str', required=True), count=dict(type='int', required=True), vm_size=dict(type='str', required=True), os_disk_size_gb=dict(type='int'), dns_prefix=dict(type='str'), ports=dict(type='list', elements='int'), storage_profiles=dict(type='str', choices=[ 'StorageAccount', 'ManagedDisks']), vnet_subnet_id=dict(type='str'), os_type=dict(type='str', choices=['Linux', 'Windows']) ) network_profile_spec = dict( network_plugin=dict(type='str', choices=['azure', 'kubenet']), network_policy=dict(type='str'), pod_cidr=dict(type='str'), service_cidr=dict(type='str'), dns_service_ip=dict(type='str'), docker_bridge_cidr=dict(type='str') ) aad_profile_spec = dict( client_app_id=dict(type='str'), server_app_id=dict(type='str'), server_app_secret=dict(type='str', no_log=True), tenant_id=dict(type='str') ) class AzureRMManagedCluster(AzureRMModuleBase): """Configuration class for an Azure RM container service (AKS) resource""" def __init__(self): self.module_arg_spec = dict( resource_group=dict( type='str', required=True ), name=dict( type='str', required=True ), state=dict( type='str', default='present', choices=['present', 'absent'] ), location=dict( type='str' ), dns_prefix=dict( type='str' ), kubernetes_version=dict( type='str' ), linux_profile=dict( type='dict', options=linux_profile_spec ), agent_pool_profiles=dict( type='list', elements='dict', options=agent_pool_profile_spec ), service_principal=dict( type='dict', options=service_principal_spec ), enable_rbac=dict( type='bool', default=False ), network_profile=dict( type='dict', options=network_profile_spec ), aad_profile=dict( type='dict', options=aad_profile_spec ), addon=dict( type='dict', options=create_addon_profiles_spec() ) ) self.resource_group = None self.name = None self.location = None self.dns_prefix = None self.kubernetes_version = None self.tags = None self.state = None self.linux_profile = None self.agent_pool_profiles = None self.service_principal = None self.enable_rbac = False self.network_profile = None self.aad_profile = None self.addon = None required_if = [ ('state', 'present', [ 'dns_prefix', 'linux_profile', 'agent_pool_profiles', 'service_principal']) ] self.results = dict(changed=False) super(AzureRMManagedCluster, self).__init__(derived_arg_spec=self.module_arg_spec, supports_check_mode=True, supports_tags=True, required_if=required_if) def exec_module(self, **kwargs): """Main module execution method""" for key in list(self.module_arg_spec.keys()) + ['tags']: setattr(self, key, kwargs[key]) resource_group = None to_be_updated = False update_tags = False resource_group = self.get_resource_group(self.resource_group) if not self.location: self.location = resource_group.location response = self.get_aks() # Check if the AKS instance already present in the RG if self.state == 'present': # For now Agent Pool cannot be more than 1, just remove this part in the future if it change agentpoolcount = len(self.agent_pool_profiles) if agentpoolcount > 1: self.fail('You cannot specify more than one agent_pool_profiles currently') available_versions = self.get_all_versions() if not response: to_be_updated = True if self.kubernetes_version not in available_versions.keys(): self.fail("Unsupported kubernetes version. Expected one of {0} but got {1}".format(available_versions.keys(), self.kubernetes_version)) else: self.results = response self.results['changed'] = False self.log('Results : {0}'.format(response)) update_tags, response['tags'] = self.update_tags(response['tags']) if response['provisioning_state'] == "Succeeded": def is_property_changed(profile, property, ignore_case=False): base = response[profile].get(property) new = getattr(self, profile).get(property) if ignore_case: return base.lower() != new.lower() else: return base != new # Cannot Update the SSH Key for now // Let service to handle it if is_property_changed('linux_profile', 'ssh_key'): self.log(("Linux Profile Diff SSH, Was {0} / Now {1}" .format(response['linux_profile']['ssh_key'], self.linux_profile.get('ssh_key')))) to_be_updated = True # self.module.warn("linux_profile.ssh_key cannot be updated") # self.log("linux_profile response : {0}".format(response['linux_profile'].get('admin_username'))) # self.log("linux_profile self : {0}".format(self.linux_profile[0].get('admin_username'))) # Cannot Update the Username for now // Let service to handle it if is_property_changed('linux_profile', 'admin_username'): self.log(("Linux Profile Diff User, Was {0} / Now {1}" .format(response['linux_profile']['admin_username'], self.linux_profile.get('admin_username')))) to_be_updated = True # self.module.warn("linux_profile.admin_username cannot be updated") # Cannot have more that one agent pool profile for now if len(response['agent_pool_profiles']) != len(self.agent_pool_profiles): self.log("Agent Pool count is diff, need to updated") to_be_updated = True if response['kubernetes_version'] != self.kubernetes_version: upgrade_versions = available_versions.get(response['kubernetes_version']) or available_versions.keys() if upgrade_versions and self.kubernetes_version not in upgrade_versions: self.fail('Cannot upgrade kubernetes version to {0}, supported value are {1}'.format(self.kubernetes_version, upgrade_versions)) to_be_updated = True if response['enable_rbac'] != self.enable_rbac: to_be_updated = True if self.network_profile: for key in self.network_profile.keys(): original = response['network_profile'].get(key) or '' if self.network_profile[key] and self.network_profile[key].lower() != original.lower(): to_be_updated = True def compare_addon(origin, patch, config): if not patch: return True if not origin: return False if origin['enabled'] != patch['enabled']: return False config = config or dict() for key in config.keys(): if origin.get(config[key]) != patch.get(key): return False return True if self.addon: for key in ADDONS.keys(): addon_name = ADDONS[key]['name'] if not compare_addon(response['addon'].get(addon_name), self.addon.get(key), ADDONS[key].get('config')): to_be_updated = True for profile_result in response['agent_pool_profiles']: matched = False for profile_self in self.agent_pool_profiles: if profile_result['name'] == profile_self['name']: matched = True os_disk_size_gb = profile_self.get('os_disk_size_gb') or profile_result['os_disk_size_gb'] if profile_result['count'] != profile_self['count'] \ or profile_result['vm_size'] != profile_self['vm_size'] \ or profile_result['os_disk_size_gb'] != os_disk_size_gb \ or profile_result['vnet_subnet_id'] != profile_self.get('vnet_subnet_id', profile_result['vnet_subnet_id']): self.log(("Agent Profile Diff - Origin {0} / Update {1}".format(str(profile_result), str(profile_self)))) to_be_updated = True if not matched: self.log("Agent Pool not found") to_be_updated = True if to_be_updated: self.log("Need to Create / Update the AKS instance") if not self.check_mode: self.results = self.create_update_aks() self.log("Creation / Update done") self.results['changed'] = True elif update_tags: self.log("Need to Update the AKS tags") if not self.check_mode: self.results['tags'] = self.update_aks_tags() self.results['changed'] = True return self.results elif self.state == 'absent' and response: self.log("Need to Delete the AKS instance") self.results['changed'] = True if self.check_mode: return self.results self.delete_aks() self.log("AKS instance deleted") return self.results def create_update_aks(self): ''' Creates or updates a managed Azure container service (AKS) with the specified configuration of agents. :return: deserialized AKS instance state dictionary ''' self.log("Creating / Updating the AKS instance {0}".format(self.name)) agentpools = [] if self.agent_pool_profiles: agentpools = [self.create_agent_pool_profile_instance(profile) for profile in self.agent_pool_profiles] service_principal_profile = self.create_service_principal_profile_instance(self.service_principal) parameters = self.managedcluster_models.ManagedCluster( location=self.location, dns_prefix=self.dns_prefix, kubernetes_version=self.kubernetes_version, tags=self.tags, service_principal_profile=service_principal_profile, agent_pool_profiles=agentpools, linux_profile=self.create_linux_profile_instance(self.linux_profile), enable_rbac=self.enable_rbac, network_profile=self.create_network_profile_instance(self.network_profile), aad_profile=self.create_aad_profile_instance(self.aad_profile), addon_profiles=self.create_addon_profile_instance(self.addon) ) # self.log("service_principal_profile : {0}".format(parameters.service_principal_profile)) # self.log("linux_profile : {0}".format(parameters.linux_profile)) # self.log("ssh from yaml : {0}".format(results.get('linux_profile')[0])) # self.log("ssh : {0}".format(parameters.linux_profile.ssh)) # self.log("agent_pool_profiles : {0}".format(parameters.agent_pool_profiles)) try: poller = self.managedcluster_client.managed_clusters.create_or_update(self.resource_group, self.name, parameters) response = self.get_poller_result(poller) response.kube_config = self.get_aks_kubeconfig() return create_aks_dict(response) except CloudError as exc: self.log('Error attempting to create the AKS instance.') self.fail("Error creating the AKS instance: {0}".format(exc.message)) def update_aks_tags(self): try: poller = self.managedcluster_client.managed_clusters.update_tags(self.resource_group, self.name, self.tags) response = self.get_poller_result(poller) return response.tags except CloudError as exc: self.fail("Error attempting to update AKS tags: {0}".format(exc.message)) def delete_aks(self): ''' Deletes the specified managed container service (AKS) in the specified subscription and resource group. :return: True ''' self.log("Deleting the AKS instance {0}".format(self.name)) try: poller = self.managedcluster_client.managed_clusters.delete(self.resource_group, self.name) self.get_poller_result(poller) return True except CloudError as e: self.log('Error attempting to delete the AKS instance.') self.fail("Error deleting the AKS instance: {0}".format(e.message)) return False def get_aks(self): ''' Gets the properties of the specified container service. :return: deserialized AKS instance state dictionary ''' self.log("Checking if the AKS instance {0} is present".format(self.name)) try: response = self.managedcluster_client.managed_clusters.get(self.resource_group, self.name) self.log("Response : {0}".format(response)) self.log("AKS instance : {0} found".format(response.name)) response.kube_config = self.get_aks_kubeconfig() return create_aks_dict(response) except CloudError: self.log('Did not find the AKS instance.') return False def get_all_versions(self): try: result = dict() response = self.containerservice_client.container_services.list_orchestrators(self.location, resource_type='managedClusters') orchestrators = response.orchestrators for item in orchestrators: result[item.orchestrator_version] = [x.orchestrator_version for x in item.upgrades] if item.upgrades else [] return result except Exception as exc: self.fail('Error when getting AKS supported kubernetes version list for location {0} - {1}'.format(self.location, exc.message or str(exc))) def get_aks_kubeconfig(self): ''' Gets kubeconfig for the specified AKS instance. :return: AKS instance kubeconfig ''' access_profile = self.managedcluster_client.managed_clusters.get_access_profile(resource_group_name=self.resource_group, resource_name=self.name, role_name="clusterUser") return access_profile.kube_config.decode('utf-8') def create_agent_pool_profile_instance(self, agentpoolprofile): ''' Helper method to serialize a dict to a ManagedClusterAgentPoolProfile :param: agentpoolprofile: dict with the parameters to setup the ManagedClusterAgentPoolProfile :return: ManagedClusterAgentPoolProfile ''' return self.managedcluster_models.ManagedClusterAgentPoolProfile(**agentpoolprofile) def create_service_principal_profile_instance(self, spnprofile): ''' Helper method to serialize a dict to a ManagedClusterServicePrincipalProfile :param: spnprofile: dict with the parameters to setup the ManagedClusterServicePrincipalProfile :return: ManagedClusterServicePrincipalProfile ''' return self.managedcluster_models.ManagedClusterServicePrincipalProfile( client_id=spnprofile['client_id'], secret=spnprofile['client_secret'] ) def create_linux_profile_instance(self, linuxprofile): ''' Helper method to serialize a dict to a ContainerServiceLinuxProfile :param: linuxprofile: dict with the parameters to setup the ContainerServiceLinuxProfile :return: ContainerServiceLinuxProfile ''' return self.managedcluster_models.ContainerServiceLinuxProfile( admin_username=linuxprofile['admin_username'], ssh=self.managedcluster_models.ContainerServiceSshConfiguration(public_keys=[ self.managedcluster_models.ContainerServiceSshPublicKey(key_data=str(linuxprofile['ssh_key']))]) ) def create_network_profile_instance(self, network): return self.managedcluster_models.ContainerServiceNetworkProfile(**network) if network else None def create_aad_profile_instance(self, aad): return self.managedcluster_models.ManagedClusterAADProfile(**aad) if aad else None def create_addon_profile_instance(self, addon): result = dict() addon = addon or {} for key in addon.keys(): if not ADDONS.get(key): self.fail('Unsupported addon {0}'.format(key)) if addon.get(key): name = ADDONS[key]['name'] config_spec = ADDONS[key].get('config') or dict() config = addon[key] for v in config_spec.keys(): config[config_spec[v]] = config[v] result[name] = self.managedcluster_models.ManagedClusterAddonProfile(config=config, enabled=config['enabled']) return result def main(): """Main execution""" AzureRMManagedCluster() if __name__ == '__main__': main()
gpl-3.0
fentas/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/common.py
139
10217
# Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """This file must not depend on any module specific to the WebSocket protocol. """ from mod_pywebsocket import http_header_util # Additional log level definitions. LOGLEVEL_FINE = 9 # Constants indicating WebSocket protocol version. VERSION_HIXIE75 = -1 VERSION_HYBI00 = 0 VERSION_HYBI01 = 1 VERSION_HYBI02 = 2 VERSION_HYBI03 = 2 VERSION_HYBI04 = 4 VERSION_HYBI05 = 5 VERSION_HYBI06 = 6 VERSION_HYBI07 = 7 VERSION_HYBI08 = 8 VERSION_HYBI09 = 8 VERSION_HYBI10 = 8 VERSION_HYBI11 = 8 VERSION_HYBI12 = 8 VERSION_HYBI13 = 13 VERSION_HYBI14 = 13 VERSION_HYBI15 = 13 VERSION_HYBI16 = 13 VERSION_HYBI17 = 13 # Constants indicating WebSocket protocol latest version. VERSION_HYBI_LATEST = VERSION_HYBI13 # Port numbers DEFAULT_WEB_SOCKET_PORT = 80 DEFAULT_WEB_SOCKET_SECURE_PORT = 443 # Schemes WEB_SOCKET_SCHEME = 'ws' WEB_SOCKET_SECURE_SCHEME = 'wss' # Frame opcodes defined in the spec. OPCODE_CONTINUATION = 0x0 OPCODE_TEXT = 0x1 OPCODE_BINARY = 0x2 OPCODE_CLOSE = 0x8 OPCODE_PING = 0x9 OPCODE_PONG = 0xa # UUIDs used by HyBi 04 and later opening handshake and frame masking. WEBSOCKET_ACCEPT_UUID = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' # Opening handshake header names and expected values. UPGRADE_HEADER = 'Upgrade' WEBSOCKET_UPGRADE_TYPE = 'websocket' WEBSOCKET_UPGRADE_TYPE_HIXIE75 = 'WebSocket' CONNECTION_HEADER = 'Connection' UPGRADE_CONNECTION_TYPE = 'Upgrade' HOST_HEADER = 'Host' ORIGIN_HEADER = 'Origin' SEC_WEBSOCKET_ORIGIN_HEADER = 'Sec-WebSocket-Origin' SEC_WEBSOCKET_KEY_HEADER = 'Sec-WebSocket-Key' SEC_WEBSOCKET_ACCEPT_HEADER = 'Sec-WebSocket-Accept' SEC_WEBSOCKET_VERSION_HEADER = 'Sec-WebSocket-Version' SEC_WEBSOCKET_PROTOCOL_HEADER = 'Sec-WebSocket-Protocol' SEC_WEBSOCKET_EXTENSIONS_HEADER = 'Sec-WebSocket-Extensions' SEC_WEBSOCKET_DRAFT_HEADER = 'Sec-WebSocket-Draft' SEC_WEBSOCKET_KEY1_HEADER = 'Sec-WebSocket-Key1' SEC_WEBSOCKET_KEY2_HEADER = 'Sec-WebSocket-Key2' SEC_WEBSOCKET_LOCATION_HEADER = 'Sec-WebSocket-Location' # Extensions DEFLATE_FRAME_EXTENSION = 'deflate-frame' PERFRAME_COMPRESSION_EXTENSION = 'perframe-compress' PERMESSAGE_COMPRESSION_EXTENSION = 'permessage-compress' PERMESSAGE_DEFLATE_EXTENSION = 'permessage-deflate' X_WEBKIT_DEFLATE_FRAME_EXTENSION = 'x-webkit-deflate-frame' X_WEBKIT_PERMESSAGE_COMPRESSION_EXTENSION = 'x-webkit-permessage-compress' MUX_EXTENSION = 'mux_DO_NOT_USE' # Status codes # Code STATUS_NO_STATUS_RECEIVED, STATUS_ABNORMAL_CLOSURE, and # STATUS_TLS_HANDSHAKE are pseudo codes to indicate specific error cases. # Could not be used for codes in actual closing frames. # Application level errors must use codes in the range # STATUS_USER_REGISTERED_BASE to STATUS_USER_PRIVATE_MAX. The codes in the # range STATUS_USER_REGISTERED_BASE to STATUS_USER_REGISTERED_MAX are managed # by IANA. Usually application must define user protocol level errors in the # range STATUS_USER_PRIVATE_BASE to STATUS_USER_PRIVATE_MAX. STATUS_NORMAL_CLOSURE = 1000 STATUS_GOING_AWAY = 1001 STATUS_PROTOCOL_ERROR = 1002 STATUS_UNSUPPORTED_DATA = 1003 STATUS_NO_STATUS_RECEIVED = 1005 STATUS_ABNORMAL_CLOSURE = 1006 STATUS_INVALID_FRAME_PAYLOAD_DATA = 1007 STATUS_POLICY_VIOLATION = 1008 STATUS_MESSAGE_TOO_BIG = 1009 STATUS_MANDATORY_EXTENSION = 1010 STATUS_INTERNAL_ENDPOINT_ERROR = 1011 STATUS_TLS_HANDSHAKE = 1015 STATUS_USER_REGISTERED_BASE = 3000 STATUS_USER_REGISTERED_MAX = 3999 STATUS_USER_PRIVATE_BASE = 4000 STATUS_USER_PRIVATE_MAX = 4999 # Following definitions are aliases to keep compatibility. Applications must # not use these obsoleted definitions anymore. STATUS_NORMAL = STATUS_NORMAL_CLOSURE STATUS_UNSUPPORTED = STATUS_UNSUPPORTED_DATA STATUS_CODE_NOT_AVAILABLE = STATUS_NO_STATUS_RECEIVED STATUS_ABNORMAL_CLOSE = STATUS_ABNORMAL_CLOSURE STATUS_INVALID_FRAME_PAYLOAD = STATUS_INVALID_FRAME_PAYLOAD_DATA STATUS_MANDATORY_EXT = STATUS_MANDATORY_EXTENSION # HTTP status codes HTTP_STATUS_BAD_REQUEST = 400 HTTP_STATUS_FORBIDDEN = 403 HTTP_STATUS_NOT_FOUND = 404 def is_control_opcode(opcode): return (opcode >> 3) == 1 class ExtensionParameter(object): """Holds information about an extension which is exchanged on extension negotiation in opening handshake. """ def __init__(self, name): self._name = name # TODO(tyoshino): Change the data structure to more efficient one such # as dict when the spec changes to say like # - Parameter names must be unique # - The order of parameters is not significant self._parameters = [] def name(self): return self._name def add_parameter(self, name, value): self._parameters.append((name, value)) def get_parameters(self): return self._parameters def get_parameter_names(self): return [name for name, unused_value in self._parameters] def has_parameter(self, name): for param_name, param_value in self._parameters: if param_name == name: return True return False def get_parameter_value(self, name): for param_name, param_value in self._parameters: if param_name == name: return param_value class ExtensionParsingException(Exception): def __init__(self, name): super(ExtensionParsingException, self).__init__(name) def _parse_extension_param(state, definition, allow_quoted_string): param_name = http_header_util.consume_token(state) if param_name is None: raise ExtensionParsingException('No valid parameter name found') http_header_util.consume_lwses(state) if not http_header_util.consume_string(state, '='): definition.add_parameter(param_name, None) return http_header_util.consume_lwses(state) if allow_quoted_string: # TODO(toyoshim): Add code to validate that parsed param_value is token param_value = http_header_util.consume_token_or_quoted_string(state) else: param_value = http_header_util.consume_token(state) if param_value is None: raise ExtensionParsingException( 'No valid parameter value found on the right-hand side of ' 'parameter %r' % param_name) definition.add_parameter(param_name, param_value) def _parse_extension(state, allow_quoted_string): extension_token = http_header_util.consume_token(state) if extension_token is None: return None extension = ExtensionParameter(extension_token) while True: http_header_util.consume_lwses(state) if not http_header_util.consume_string(state, ';'): break http_header_util.consume_lwses(state) try: _parse_extension_param(state, extension, allow_quoted_string) except ExtensionParsingException, e: raise ExtensionParsingException( 'Failed to parse parameter for %r (%r)' % (extension_token, e)) return extension def parse_extensions(data, allow_quoted_string=False): """Parses Sec-WebSocket-Extensions header value returns a list of ExtensionParameter objects. Leading LWSes must be trimmed. """ state = http_header_util.ParsingState(data) extension_list = [] while True: extension = _parse_extension(state, allow_quoted_string) if extension is not None: extension_list.append(extension) http_header_util.consume_lwses(state) if http_header_util.peek(state) is None: break if not http_header_util.consume_string(state, ','): raise ExtensionParsingException( 'Failed to parse Sec-WebSocket-Extensions header: ' 'Expected a comma but found %r' % http_header_util.peek(state)) http_header_util.consume_lwses(state) if len(extension_list) == 0: raise ExtensionParsingException( 'No valid extension entry found') return extension_list def format_extension(extension): """Formats an ExtensionParameter object.""" formatted_params = [extension.name()] for param_name, param_value in extension.get_parameters(): if param_value is None: formatted_params.append(param_name) else: quoted_value = http_header_util.quote_if_necessary(param_value) formatted_params.append('%s=%s' % (param_name, quoted_value)) return '; '.join(formatted_params) def format_extensions(extension_list): """Formats a list of ExtensionParameter objects.""" formatted_extension_list = [] for extension in extension_list: formatted_extension_list.append(format_extension(extension)) return ', '.join(formatted_extension_list) # vi:sts=4 sw=4 et
bsd-3-clause
flynx/pli
pli/misc/evil.py
1
1190
#======================================================================= __version__ = '''0.0.00''' __sub_version__ = '''20070725015812''' #----------------------------------------------------------------------- import sys #-----------------------------------------------------------------set--- # An evil equivalent for C expression: 'while x=f()' # # Usage: # # sample # A=range(10) # while set(x=A.pop()): # print x # # NOTE: this was originally posted by: Sebastien Keim at the ASPN Python CookBook # for the original post see: # <http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/202234> # the original function was modified by me (on: 20030619170645). # NOTE: this will return the list of values (right side of the = # operator) # def set(**kw): ''' provides the abbility to do assignment in an expression the folowing C and Python expressions are equivelent: if (a=1, b=f()) //.... if set(a=1, b=f())[-1]: #.... ''' a = sys._getframe(1) a.f_locals.update(kw) return kw.values() #======================================================================= # vim:set ts=4 sw=4 nowrap :
bsd-3-clause
aimas/TuniErp-8.0
addons/analytic/__openerp__.py
1
1894
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name' : 'Analytic Accounting', 'version': '1.1', 'author' : 'OpenERP SA', 'website' : 'https://www.tunierp.com/page/accounting', 'category': 'Hidden/Dependency', 'depends' : ['base', 'decimal_precision', 'mail'], 'description': """ Module for defining analytic accounting object. =============================================== In OpenERP, analytic accounts are linked to general accounts but are treated totally independently. So, you can enter various different analytic operations that have no counterpart in the general financial accounts. """, 'data': [ 'security/analytic_security.xml', 'security/ir.model.access.csv', 'analytic_sequence.xml', 'analytic_view.xml', 'analytic_data.xml', ], 'demo': [], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
yelongyu/chihu
venv/lib/python2.7/site-packages/dominate/util.py
25
3902
''' Utility classes for creating dynamic html documents ''' __license__ = ''' This file is part of Dominate. Dominate is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Dominate is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with Dominate. If not, see <http://www.gnu.org/licenses/>. ''' import re from .dom_tag import dom_tag try: basestring = basestring except NameError: basestring = str unichr = chr def include(f): ''' includes the contents of a file on disk. takes a filename ''' fl = open(f, 'r') data = fl.read() fl.close() return raw(data) def system(cmd, data=None): ''' pipes the output of a program ''' import subprocess s = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE) out, err = s.communicate(data) return out.decode('utf8') def escape(data, quote=True): # stoled from std lib cgi ''' Escapes special characters into their html entities Replace special characters "&", "<" and ">" to HTML-safe sequences. If the optional flag quote is true, the quotation mark character (") is also translated. This is used to escape content that appears in the body of an HTML cocument ''' data = data.replace("&", "&amp;") # Must be done first! data = data.replace("<", "&lt;") data = data.replace(">", "&gt;") if quote: data = data.replace('"', "&quot;") return data _unescape = { 'quot': 34, 'amp': 38, 'lt': 60, 'gt': 62, 'nbsp': 32, # more here # http://www.w3.org/TR/html4/sgml/entities.html 'yuml': 255, } str_escape = escape def unescape(data): ''' unescapes html entities. the opposite of escape. ''' cc = re.compile('&(?:(?:#(\d+))|([^;]+));') result = [] m = cc.search(data) while m: result.append(data[0:m.start()]) d = m.group(1) if d: d = int(d) result.append(unichr(d)) else: d = _unescape.get(m.group(2), ord('?')) result.append(unichr(d)) data = data[m.end():] m = cc.search(data) result.append(data) return ''.join(result) _reserved = ";/?:@&=+$, " _replace_map = dict((c, '%%%2X' % ord(c)) for c in _reserved) def url_escape(data): return ''.join(_replace_map.get(c, c) for c in data) def url_unescape(data): return re.sub('%([0-9a-fA-F]{2})', lambda m: unichr(int(m.group(1), 16)), data) class lazy(dom_tag): ''' delays function execution until rendered ''' def __new__(_cls, *args, **kwargs): ''' Need to reset this special method or else dom_tag will think it's being used as a dectorator. This means lazy() can't be used as a dectorator, but thinking about when you might want that just confuses me. ''' return object.__new__(_cls) def __init__(self, func, *args, **kwargs): super(lazy, self).__init__() self.func = func self.args = args self.kwargs = kwargs def _render(self, sb, *a, **kw): r = self.func(*self.args, **self.kwargs) sb.append(str(r)) # TODO rename this to raw? class text(dom_tag): ''' Just a string. useful for inside context managers ''' is_pretty = False is_inline = True def __init__(self, _text, escape=True): super(text, self).__init__() if escape: self.text = str_escape(_text) else: self.text = _text def _render(self, sb, *a, **kw): sb.append(self.text) return sb def raw(s): ''' Inserts a raw string into the DOM. Unsafe. ''' return text(s, escape=False)
gpl-3.0
mapnik/mapnik
scons/scons-local-4.1.0/SCons/compat/_scons_dbm.py
4
1644
# MIT License # # Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """dbm compatibility module for Python versions that don't have dbm. This does not not NOT (repeat, *NOT*) provide complete dbm functionality. It's just a stub on which to hang just enough pieces of dbm functionality that the whichdb.whichdb() implementstation in the various 2.X versions of Python won't blow up even if dbm wasn't compiled in. """ class error(Exception): pass def open(*args, **kw): raise error() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
lgpl-2.1
edx/django-oauth2-provider
tests/settings.py
2
2583
# Django settings for example project. import os DEBUG = True ADMINS = ( ('Tester', 'test@example.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '%s/db.sqlite' % os.path.dirname(__file__), # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } PASSWORD_HASHERS = ['django.contrib.auth.hashers.SHA1PasswordHasher'] SITE_ID = 1 # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = '' # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = '/static/' # Make this unique, and don't share it with anybody. SECRET_KEY = 'secret' ROOT_URLCONF = 'tests.urls' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', 'provider', 'provider.oauth2', 'django_nose', ] # When we drop support for Django 1.8 we can remove MIDDLEWARE_CLASSES MIDDLEWARE_CLASSES = MIDDLEWARE = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", ] TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'debug': DEBUG, 'context_processors': ( 'django.contrib.auth.context_processors.auth', ) } }, ] TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
mit
erikr/django
tests/template_tests/syntax_tests/test_template_tag.py
521
2594
from django.template import TemplateSyntaxError from django.test import SimpleTestCase from ..utils import setup class TemplateTagTests(SimpleTestCase): @setup({'templatetag01': '{% templatetag openblock %}'}) def test_templatetag01(self): output = self.engine.render_to_string('templatetag01') self.assertEqual(output, '{%') @setup({'templatetag02': '{% templatetag closeblock %}'}) def test_templatetag02(self): output = self.engine.render_to_string('templatetag02') self.assertEqual(output, '%}') @setup({'templatetag03': '{% templatetag openvariable %}'}) def test_templatetag03(self): output = self.engine.render_to_string('templatetag03') self.assertEqual(output, '{{') @setup({'templatetag04': '{% templatetag closevariable %}'}) def test_templatetag04(self): output = self.engine.render_to_string('templatetag04') self.assertEqual(output, '}}') @setup({'templatetag05': '{% templatetag %}'}) def test_templatetag05(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('templatetag05') @setup({'templatetag06': '{% templatetag foo %}'}) def test_templatetag06(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('templatetag06') @setup({'templatetag07': '{% templatetag openbrace %}'}) def test_templatetag07(self): output = self.engine.render_to_string('templatetag07') self.assertEqual(output, '{') @setup({'templatetag08': '{% templatetag closebrace %}'}) def test_templatetag08(self): output = self.engine.render_to_string('templatetag08') self.assertEqual(output, '}') @setup({'templatetag09': '{% templatetag openbrace %}{% templatetag openbrace %}'}) def test_templatetag09(self): output = self.engine.render_to_string('templatetag09') self.assertEqual(output, '{{') @setup({'templatetag10': '{% templatetag closebrace %}{% templatetag closebrace %}'}) def test_templatetag10(self): output = self.engine.render_to_string('templatetag10') self.assertEqual(output, '}}') @setup({'templatetag11': '{% templatetag opencomment %}'}) def test_templatetag11(self): output = self.engine.render_to_string('templatetag11') self.assertEqual(output, '{#') @setup({'templatetag12': '{% templatetag closecomment %}'}) def test_templatetag12(self): output = self.engine.render_to_string('templatetag12') self.assertEqual(output, '#}')
bsd-3-clause
LudovicRousseau/PyKCS11-debug
samples/ec_signature.py
2
1923
#!/usr/bin/env python # Copyright (C) 2015 Roman Pasechnik # Copyright (C) 2018 Ludovic Rousseau # Copyright (C) 2019 Atte Pellikka # # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. from __future__ import print_function from PyKCS11 import * import binascii pkcs11 = PyKCS11Lib() pkcs11.load() # define environment variable PYKCS11LIB=YourPKCS11Lib # get 1st slot slot = pkcs11.getSlotList(tokenPresent=True)[0] session = pkcs11.openSession(slot, CKF_SERIAL_SESSION | CKF_RW_SESSION) session.login("1234") priv_search_tmpl = [(CKA_CLASS, CKO_PRIVATE_KEY), (CKA_KEY_TYPE, CKK_ECDSA)] pub_search_tmpl = [(CKA_CLASS, CKO_PUBLIC_KEY), (CKA_KEY_TYPE, CKK_ECDSA)] # "Hello world" in hex toSign = "48656c6c6f20776f726c640d0a" mechanism = Mechanism(CKM_ECDSA, None) # find first private key and compute signature privKey = session.findObjects(priv_search_tmpl)[0] signature = session.sign(privKey, binascii.unhexlify(toSign), mechanism) print("\nsignature: {}".format(binascii.hexlify(bytearray(signature)))) # find first public key and verify signature pubKey = session.findObjects(pub_search_tmpl)[0] result = session.verify(pubKey, binascii.unhexlify(toSign), signature, mechanism) print("\nVerified:", result) # logout session.logout() session.closeSession()
gpl-2.0
meteorcloudy/tensorflow
tensorflow/contrib/model_pruning/python/learning.py
32
7928
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Wrapper around tf-slim's training code contrib/slim/python/slim/learning.py to support training of pruned models ******************************************************************* * A simple working training script with support for model pruning * ******************************************************************* # Load data and create the model: images, labels = LoadData(...) predictions = MyModel(images) # Define the loss: slim.losses.log_loss(predictions, labels) total_loss = slim.losses.get_total_loss() # Define the optimizer: optimizer = tf.train.MomentumOptimizer(FLAGS.learning_rate, FLAGS.momentum) # Create the train_op train_op = slim.learning.create_train_op(total_loss, optimizer) # Set up sparsity sparsity = pruning.setup_gradual_sparsity(self.global_step) # Create mask update op mask_update_op = pruning.add_mask_update_ip(sparsity) # Run training. learning.train(train_op, my_log_dir, mask_update_op) see contrib/slim/python/slim/learning.py for additional examples """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib import slim as _slim _USE_DEFAULT = 0 train_step = _slim.learning.train_step def train(train_op, logdir, mask_update_op, train_step_fn=train_step, train_step_kwargs=_USE_DEFAULT, log_every_n_steps=1, graph=None, master='', is_chief=True, global_step=None, number_of_steps=None, init_op=_USE_DEFAULT, init_feed_dict=None, local_init_op=_USE_DEFAULT, init_fn=None, ready_op=_USE_DEFAULT, summary_op=_USE_DEFAULT, save_summaries_secs=600, summary_writer=_USE_DEFAULT, startup_delay_steps=0, saver=None, save_interval_secs=600, sync_optimizer=None, session_config=None, trace_every_n_steps=None): """Wrapper around tf-slim's train function. Runs a training loop using a TensorFlow supervisor. When the sync_optimizer is supplied, gradient updates are applied synchronously. Otherwise, gradient updates are applied asynchronous. Args: train_op: A `Tensor` that, when executed, will apply the gradients and return the loss value. logdir: The directory where training logs are written to. If None, model checkpoints and summaries will not be written. mask_update_op: Operation that upon execution updates the weight masks and thresholds. train_step_fn: The function to call in order to execute a single gradient step. The function must have take exactly four arguments: the current session, the `train_op` `Tensor`, a global step `Tensor` and a dictionary. train_step_kwargs: A dictionary which is passed to the `train_step_fn`. By default, two `Boolean`, scalar ops called "should_stop" and "should_log" are provided. log_every_n_steps: The frequency, in terms of global steps, that the loss and global step and logged. graph: The graph to pass to the supervisor. If no graph is supplied the default graph is used. master: The address of the tensorflow master. is_chief: Specifies whether or not the training is being run by the primary replica during replica training. global_step: The `Tensor` representing the global step. If left as `None`, then slim.variables.get_or_create_global_step() is used. number_of_steps: The max number of gradient steps to take during training, as measured by 'global_step': training will stop if global_step is greater than 'number_of_steps'. If the value is left as None, training proceeds indefinitely. init_op: The initialization operation. If left to its default value, then the session is initialized by calling `tf.global_variables_initializer()`. init_feed_dict: A feed dictionary to use when executing the `init_op`. local_init_op: The local initialization operation. If left to its default value, then the session is initialized by calling `tf.local_variables_initializer()` and `tf.tables_initializer()`. init_fn: An optional callable to be executed after `init_op` is called. The callable must accept one argument, the session being initialized. ready_op: Operation to check if the model is ready to use. If left to its default value, then the session checks for readiness by calling `tf.report_uninitialized_variables()`. summary_op: The summary operation. save_summaries_secs: How often, in seconds, to save summaries. summary_writer: `SummaryWriter` to use. Can be `None` to indicate that no summaries should be written. If unset, we create a SummaryWriter. startup_delay_steps: The number of steps to wait for before beginning. Note that this must be 0 if a sync_optimizer is supplied. saver: Saver to save checkpoints. If None, a default one will be created and used. save_interval_secs: How often, in seconds, to save the model to `logdir`. sync_optimizer: an instance of tf.train.SyncReplicasOptimizer, or a list of them. If the argument is supplied, gradient updates will be synchronous. If left as `None`, gradient updates will be asynchronous. session_config: An instance of `tf.ConfigProto` that will be used to configure the `Session`. If left as `None`, the default will be used. trace_every_n_steps: produce and save a `Timeline` in Chrome trace format and add it to the summaries every `trace_every_n_steps`. If None, no trace information will be produced or saved. Returns: the value of the loss function after training. Raises: ValueError: if `train_op` is empty or if `startup_delay_steps` is non-zero when `sync_optimizer` is supplied, if `number_of_steps` is negative, or if `trace_every_n_steps` is not `None` and no `logdir` is provided. """ def train_step_with_pruning_fn(sess, train_op, global_step, train_step_kwargs): total_loss, should_stop = train_step_fn(sess, train_op, global_step, train_step_kwargs) sess.run(mask_update_op) return total_loss, should_stop total_loss, _ = _slim.learning.train( train_op, logdir, train_step_fn=train_step_with_pruning_fn, train_step_kwargs=train_step_kwargs, log_every_n_steps=log_every_n_steps, graph=graph, master=master, is_chief=is_chief, global_step=global_step, number_of_steps=number_of_steps, init_op=init_op, init_feed_dict=init_feed_dict, local_init_op=local_init_op, init_fn=init_fn, ready_op=ready_op, summary_op=summary_op, save_summaries_secs=save_summaries_secs, summary_writer=summary_writer, startup_delay_steps=startup_delay_steps, saver=saver, save_interval_secs=save_interval_secs, sync_optimizer=sync_optimizer, session_config=session_config, trace_every_n_steps=trace_every_n_steps) return total_loss
apache-2.0
Intel-Corporation/tensorflow
tensorflow/contrib/factorization/python/ops/gmm_test.py
41
8716
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for ops.gmm.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.factorization.python.ops import gmm as gmm_lib from tensorflow.contrib.learn.python.learn.estimators import kmeans from tensorflow.contrib.learn.python.learn.estimators import run_config from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import random_seed as random_seed_lib from tensorflow.python.ops import array_ops from tensorflow.python.ops import data_flow_ops from tensorflow.python.ops import random_ops from tensorflow.python.platform import test from tensorflow.python.training import queue_runner class GMMTest(test.TestCase): def input_fn(self, batch_size=None, points=None): batch_size = batch_size or self.batch_size points = points if points is not None else self.points num_points = points.shape[0] def _fn(): x = constant_op.constant(points) if batch_size == num_points: return x, None indices = random_ops.random_uniform(constant_op.constant([batch_size]), minval=0, maxval=num_points-1, dtype=dtypes.int32, seed=10) return array_ops.gather(x, indices), None return _fn def setUp(self): np.random.seed(3) random_seed_lib.set_random_seed(2) self.num_centers = 2 self.num_dims = 2 self.num_points = 4000 self.batch_size = self.num_points self.true_centers = self.make_random_centers(self.num_centers, self.num_dims) self.points, self.assignments = self.make_random_points( self.true_centers, self.num_points) # Use initial means from kmeans (just like scikit-learn does). clusterer = kmeans.KMeansClustering(num_clusters=self.num_centers) clusterer.fit(input_fn=lambda: (constant_op.constant(self.points), None), steps=30) self.initial_means = clusterer.clusters() @staticmethod def make_random_centers(num_centers, num_dims): return np.round( np.random.rand(num_centers, num_dims).astype(np.float32) * 500) @staticmethod def make_random_points(centers, num_points): num_centers, num_dims = centers.shape assignments = np.random.choice(num_centers, num_points) offsets = np.round( np.random.randn(num_points, num_dims).astype(np.float32) * 20) points = centers[assignments] + offsets return (points, assignments) def test_weights(self): """Tests the shape of the weights.""" gmm = gmm_lib.GMM(self.num_centers, initial_clusters=self.initial_means, random_seed=4, config=run_config.RunConfig(tf_random_seed=2)) gmm.fit(input_fn=self.input_fn(), steps=0) weights = gmm.weights() self.assertAllEqual(list(weights.shape), [self.num_centers]) def test_clusters(self): """Tests the shape of the clusters.""" gmm = gmm_lib.GMM(self.num_centers, initial_clusters=self.initial_means, random_seed=4, config=run_config.RunConfig(tf_random_seed=2)) gmm.fit(input_fn=self.input_fn(), steps=0) clusters = gmm.clusters() self.assertAllEqual(list(clusters.shape), [self.num_centers, self.num_dims]) def test_fit(self): gmm = gmm_lib.GMM(self.num_centers, initial_clusters='random', random_seed=4, config=run_config.RunConfig(tf_random_seed=2)) gmm.fit(input_fn=self.input_fn(), steps=1) score1 = gmm.score(input_fn=self.input_fn(batch_size=self.num_points), steps=1) gmm.fit(input_fn=self.input_fn(), steps=10) score2 = gmm.score(input_fn=self.input_fn(batch_size=self.num_points), steps=1) self.assertLess(score1, score2) def test_infer(self): gmm = gmm_lib.GMM(self.num_centers, initial_clusters=self.initial_means, random_seed=4, config=run_config.RunConfig(tf_random_seed=2)) gmm.fit(input_fn=self.input_fn(), steps=60) clusters = gmm.clusters() # Make a small test set num_points = 40 points, true_assignments = self.make_random_points(clusters, num_points) assignments = [] for item in gmm.predict_assignments( input_fn=self.input_fn(points=points, batch_size=num_points)): assignments.append(item) assignments = np.ravel(assignments) self.assertAllEqual(true_assignments, assignments) def _compare_with_sklearn(self, cov_type): # sklearn version. iterations = 40 np.random.seed(5) sklearn_assignments = np.asarray([0, 0, 1, 0, 0, 0, 1, 0, 0, 1]) sklearn_means = np.asarray([[144.83417719, 254.20130341], [274.38754816, 353.16074346]]) sklearn_covs = np.asarray([[[395.0081194, -4.50389512], [-4.50389512, 408.27543989]], [[385.17484203, -31.27834935], [-31.27834935, 391.74249925]]]) # skflow version. gmm = gmm_lib.GMM(self.num_centers, initial_clusters=self.initial_means, covariance_type=cov_type, config=run_config.RunConfig(tf_random_seed=2)) gmm.fit(input_fn=self.input_fn(), steps=iterations) points = self.points[:10, :] skflow_assignments = [] for item in gmm.predict_assignments( input_fn=self.input_fn(points=points, batch_size=10)): skflow_assignments.append(item) self.assertAllClose(sklearn_assignments, np.ravel(skflow_assignments).astype(int)) self.assertAllClose(sklearn_means, gmm.clusters()) if cov_type == 'full': self.assertAllClose(sklearn_covs, gmm.covariances(), rtol=0.01) else: for d in [0, 1]: self.assertAllClose( np.diag(sklearn_covs[d]), gmm.covariances()[d, :], rtol=0.01) def test_compare_full(self): self._compare_with_sklearn('full') def test_compare_diag(self): self._compare_with_sklearn('diag') def test_random_input_large(self): # sklearn version. iterations = 5 # that should be enough to know whether this diverges np.random.seed(5) num_classes = 20 x = np.array([[np.random.random() for _ in range(100)] for _ in range(num_classes)], dtype=np.float32) # skflow version. gmm = gmm_lib.GMM(num_classes, covariance_type='full', config=run_config.RunConfig(tf_random_seed=2)) def get_input_fn(x): def input_fn(): return constant_op.constant(x.astype(np.float32)), None return input_fn gmm.fit(input_fn=get_input_fn(x), steps=iterations) self.assertFalse(np.isnan(gmm.clusters()).any()) class GMMTestQueues(test.TestCase): def input_fn(self): def _fn(): queue = data_flow_ops.FIFOQueue(capacity=10, dtypes=dtypes.float32, shapes=[10, 3]) enqueue_op = queue.enqueue(array_ops.zeros([10, 3], dtype=dtypes.float32)) queue_runner.add_queue_runner(queue_runner.QueueRunner(queue, [enqueue_op])) return queue.dequeue(), None return _fn # This test makes sure that there are no deadlocks when using a QueueRunner. # Note that since cluster initialization is dependent on inputs, if input # is generated using a QueueRunner, one has to make sure that these runners # are started before the initialization. def test_queues(self): gmm = gmm_lib.GMM(2, covariance_type='diag') gmm.fit(input_fn=self.input_fn(), steps=1) if __name__ == '__main__': test.main()
apache-2.0
koduj-z-klasa/python101
docs/pyqt/kalkulator/kalkulator05.py
1
3356
#!/usr/bin/python3 # -*- coding: utf-8 -*- from PyQt5.QtWidgets import QApplication, QWidget from PyQt5.QtGui import QIcon from PyQt5.QtWidgets import QLabel, QGridLayout from PyQt5.QtWidgets import QLineEdit, QPushButton, QHBoxLayout from PyQt5.QtWidgets import QMessageBox from PyQt5.QtCore import Qt class Kalkulator(QWidget): def __init__(self, parent=None): super().__init__(parent) self.interfejs() def interfejs(self): # etykiety etykieta1 = QLabel("Liczba 1:", self) etykieta2 = QLabel("Liczba 2:", self) etykieta3 = QLabel("Wynik:", self) # przypisanie widgetów do układu tabelarycznego ukladT = QGridLayout() ukladT.addWidget(etykieta1, 0, 0) ukladT.addWidget(etykieta2, 0, 1) ukladT.addWidget(etykieta3, 0, 2) # 1-liniowe pola edycyjne self.liczba1Edt = QLineEdit() self.liczba2Edt = QLineEdit() self.wynikEdt = QLineEdit() self.wynikEdt.readonly = True self.wynikEdt.setToolTip('Wpisz <b>liczby</b> i wybierz działanie...') ukladT.addWidget(self.liczba1Edt, 1, 0) ukladT.addWidget(self.liczba2Edt, 1, 1) ukladT.addWidget(self.wynikEdt, 1, 2) # przyciski dodajBtn = QPushButton("&Dodaj", self) odejmijBtn = QPushButton("&Odejmij", self) dzielBtn = QPushButton("&Mnóż", self) mnozBtn = QPushButton("D&ziel", self) koniecBtn = QPushButton("&Koniec", self) koniecBtn.resize(koniecBtn.sizeHint()) ukladH = QHBoxLayout() ukladH.addWidget(dodajBtn) ukladH.addWidget(odejmijBtn) ukladH.addWidget(dzielBtn) ukladH.addWidget(mnozBtn) ukladT.addLayout(ukladH, 2, 0, 1, 3) ukladT.addWidget(koniecBtn, 3, 0, 1, 3) # przypisanie utworzonego układu do okna self.setLayout(ukladT) koniecBtn.clicked.connect(self.koniec) dodajBtn.clicked.connect(self.dzialanie) odejmijBtn.clicked.connect(self.dzialanie) mnozBtn.clicked.connect(self.dzialanie) dzielBtn.clicked.connect(self.dzialanie) self.setGeometry(20, 20, 300, 100) self.setWindowIcon(QIcon('kalkulator.png')) self.setWindowTitle("Prosty kalkulator") self.show() def koniec(self): self.close() def closeEvent(self, event): odp = QMessageBox.question( self, 'Komunikat', "Czy na pewno koniec?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if odp == QMessageBox.Yes: event.accept() else: event.ignore() def keyPressEvent(self, e): if e.key() == Qt.Key_Escape: self.close() def dzialanie(self): nadawca = self.sender() try: liczba1 = float(self.liczba1Edt.text()) liczba2 = float(self.liczba2Edt.text()) wynik = "" if nadawca.text() == "&Dodaj": wynik = liczba1 + liczba2 else: pass self.wynikEdt.setText(str(wynik)) except ValueError: QMessageBox.warning(self, "Błąd", "Błędne dane", QMessageBox.Ok) if __name__ == '__main__': import sys app = QApplication(sys.argv) okno = Kalkulator() sys.exit(app.exec_())
mit
parallel-fs-utils/fs-drift
fsd_log.py
2
1486
import os import logging # standardize use of logging module in fs-drift def start_log(prefix, verbosity=0): log = logging.getLogger(prefix) if os.getenv('LOGLEVEL_DEBUG') != None or verbosity != 0: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) log_format = prefix + ' %(asctime)s - %(levelname)s - %(message)s' formatter = logging.Formatter(log_format) h = logging.StreamHandler() h.setFormatter(formatter) h.setLevel(logging.INFO) log.addHandler(h) h2 = logging.FileHandler('/var/tmp/fsd.%s.log' % prefix) h2.setFormatter(formatter) log.addHandler(h2) log.info('starting log') return log # assumptions: # - there is only 1 FileHandler associated with logger # - you don't want to change loglevel of StreamHandler def change_loglevel(logger, loglevel): for h in logger.handlers: if isinstance(h, logging.FileHandler): logger.info('changing log level of FileHandler to %s' % loglevel) h.setLevel(loglevel) if __name__ == '__main__': log = start_log('fsd_log_test') log.error('level %s', 'error') log.warn('level %s', 'warn') log.info('level %s', 'info') log.debug('level %s', 'debug') change_loglevel(log, logging.DEBUG) log.debug('level %s', 'debug - should see this one in the log file /var/tmp/fsd.fsd_log_test.log') change_loglevel(log, logging.INFO) log.debug('level %s', 'debug - should NOT see this one there')
apache-2.0
CognitiveScale/ansible-modules-extras
database/vertica/vertica_role.py
148
8202
#!/usr/bin/python # -*- coding: utf-8 -*- # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = """ --- module: vertica_role version_added: '2.0' short_description: Adds or removes Vertica database roles and assigns roles to them. description: - Adds or removes Vertica database role and, optionally, assign other roles. options: name: description: - Name of the role to add or remove. required: true assigned_roles: description: - Comma separated list of roles to assign to the role. aliases: ['assigned_role'] required: false default: null state: description: - Whether to create C(present), drop C(absent) or lock C(locked) a role. required: false choices: ['present', 'absent'] default: present db: description: - Name of the Vertica database. required: false default: null cluster: description: - Name of the Vertica cluster. required: false default: localhost port: description: - Vertica cluster port to connect to. required: false default: 5433 login_user: description: - The username used to authenticate with. required: false default: dbadmin login_password: description: - The password used to authenticate with. required: false default: null notes: - The default authentication assumes that you are either logging in as or sudo'ing to the C(dbadmin) account on the host. - This module uses C(pyodbc), a Python ODBC database adapter. You must ensure that C(unixODBC) and C(pyodbc) is installed on the host and properly configured. - Configuring C(unixODBC) for Vertica requires C(Driver = /opt/vertica/lib64/libverticaodbc.so) to be added to the C(Vertica) section of either C(/etc/odbcinst.ini) or C($HOME/.odbcinst.ini) and both C(ErrorMessagesPath = /opt/vertica/lib64) and C(DriverManagerEncoding = UTF-16) to be added to the C(Driver) section of either C(/etc/vertica.ini) or C($HOME/.vertica.ini). requirements: [ 'unixODBC', 'pyodbc' ] author: "Dariusz Owczarek (@dareko)" """ EXAMPLES = """ - name: creating a new vertica role vertica_role: name=role_name db=db_name state=present - name: creating a new vertica role with other role assigned vertica_role: name=role_name assigned_role=other_role_name state=present """ try: import pyodbc except ImportError: pyodbc_found = False else: pyodbc_found = True class NotSupportedError(Exception): pass class CannotDropError(Exception): pass # module specific functions def get_role_facts(cursor, role=''): facts = {} cursor.execute(""" select r.name, r.assigned_roles from roles r where (? = '' or r.name ilike ?) """, role, role) while True: rows = cursor.fetchmany(100) if not rows: break for row in rows: role_key = row.name.lower() facts[role_key] = { 'name': row.name, 'assigned_roles': []} if row.assigned_roles: facts[role_key]['assigned_roles'] = row.assigned_roles.replace(' ', '').split(',') return facts def update_roles(role_facts, cursor, role, existing, required): for assigned_role in set(existing) - set(required): cursor.execute("revoke {0} from {1}".format(assigned_role, role)) for assigned_role in set(required) - set(existing): cursor.execute("grant {0} to {1}".format(assigned_role, role)) def check(role_facts, role, assigned_roles): role_key = role.lower() if role_key not in role_facts: return False if assigned_roles and cmp(sorted(assigned_roles), sorted(role_facts[role_key]['assigned_roles'])) != 0: return False return True def present(role_facts, cursor, role, assigned_roles): role_key = role.lower() if role_key not in role_facts: cursor.execute("create role {0}".format(role)) update_roles(role_facts, cursor, role, [], assigned_roles) role_facts.update(get_role_facts(cursor, role)) return True else: changed = False if assigned_roles and cmp(sorted(assigned_roles), sorted(role_facts[role_key]['assigned_roles'])) != 0: update_roles(role_facts, cursor, role, role_facts[role_key]['assigned_roles'], assigned_roles) changed = True if changed: role_facts.update(get_role_facts(cursor, role)) return changed def absent(role_facts, cursor, role, assigned_roles): role_key = role.lower() if role_key in role_facts: update_roles(role_facts, cursor, role, role_facts[role_key]['assigned_roles'], []) cursor.execute("drop role {0} cascade".format(role_facts[role_key]['name'])) del role_facts[role_key] return True else: return False # module logic def main(): module = AnsibleModule( argument_spec=dict( role=dict(required=True, aliases=['name']), assigned_roles=dict(default=None, aliases=['assigned_role']), state=dict(default='present', choices=['absent', 'present']), db=dict(default=None), cluster=dict(default='localhost'), port=dict(default='5433'), login_user=dict(default='dbadmin'), login_password=dict(default=None), ), supports_check_mode = True) if not pyodbc_found: module.fail_json(msg="The python pyodbc module is required.") role = module.params['role'] assigned_roles = [] if module.params['assigned_roles']: assigned_roles = module.params['assigned_roles'].split(',') assigned_roles = filter(None, assigned_roles) state = module.params['state'] db = '' if module.params['db']: db = module.params['db'] changed = False try: dsn = ( "Driver=Vertica;" "Server={0};" "Port={1};" "Database={2};" "User={3};" "Password={4};" "ConnectionLoadBalance={5}" ).format(module.params['cluster'], module.params['port'], db, module.params['login_user'], module.params['login_password'], 'true') db_conn = pyodbc.connect(dsn, autocommit=True) cursor = db_conn.cursor() except Exception, e: module.fail_json(msg="Unable to connect to database: {0}.".format(e)) try: role_facts = get_role_facts(cursor) if module.check_mode: changed = not check(role_facts, role, assigned_roles) elif state == 'absent': try: changed = absent(role_facts, cursor, role, assigned_roles) except pyodbc.Error, e: module.fail_json(msg=str(e)) elif state == 'present': try: changed = present(role_facts, cursor, role, assigned_roles) except pyodbc.Error, e: module.fail_json(msg=str(e)) except NotSupportedError, e: module.fail_json(msg=str(e), ansible_facts={'vertica_roles': role_facts}) except CannotDropError, e: module.fail_json(msg=str(e), ansible_facts={'vertica_roles': role_facts}) except SystemExit: # avoid catching this on python 2.4 raise except Exception, e: module.fail_json(msg=e) module.exit_json(changed=changed, role=role, ansible_facts={'vertica_roles': role_facts}) # import ansible utilities from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
shsingh/ansible
lib/ansible/modules/storage/vexata/vexata_volume.py
37
5080
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2019, Sandeep Kasargod (sandeep@vexata.com) # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: vexata_volume version_added: 2.8 short_description: Manage volumes on Vexata VX100 storage arrays description: - Create, deletes or extend volumes on a Vexata VX100 array. author: - Sandeep Kasargod (@vexata) options: name: description: - Volume name. required: true type: str state: description: - Creates/Modifies volume when present or removes when absent. default: present choices: [ present, absent ] type: str size: description: - Volume size in M, G, T units. M=2^20, G=2^30, T=2^40 bytes. type: str extends_documentation_fragment: - vexata.vx100 ''' EXAMPLES = r''' - name: Create new 2 TiB volume named foo vexata_volume: name: foo size: 2T state: present array: vx100_ultra.test.com user: admin password: secret - name: Expand volume named foo to 4 TiB vexata_volume: name: foo size: 4T state: present array: vx100_ultra.test.com user: admin password: secret - name: Delete volume named foo vexata_volume: name: foo state: absent array: vx100_ultra.test.com user: admin password: secret ''' RETURN = r''' ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.vexata import ( argument_spec, get_array, required_together, size_to_MiB) def get_volume(module, array): """Retrieve a named volume if it exists, None if absent.""" name = module.params['name'] try: vols = array.list_volumes() vol = filter(lambda v: v['name'] == name, vols) if len(vol) == 1: return vol[0] else: return None except Exception: module.fail_json(msg='Error while attempting to retrieve volumes.') def validate_size(module, err_msg): size = module.params.get('size', False) if not size: module.fail_json(msg=err_msg) size = size_to_MiB(size) if size <= 0: module.fail_json(msg='Invalid volume size, must be <integer>[MGT].') return size def create_volume(module, array): """"Create a new volume.""" changed = False size = validate_size(module, err_msg='Size is required to create volume.') if module.check_mode: module.exit_json(changed=changed) try: vol = array.create_volume( module.params['name'], 'Ansible volume', size) if vol: module.log(msg='Created volume {0}'.format(vol['id'])) changed = True else: module.fail_json(msg='Volume create failed.') except Exception: pass module.exit_json(changed=changed) def update_volume(module, array, volume): """Expand the volume size.""" changed = False size = validate_size(module, err_msg='Size is required to update volume') prev_size = volume['volSize'] if size <= prev_size: module.log(msg='Volume expanded size needs to be larger ' 'than current size.') if module.check_mode: module.exit_json(changed=changed) try: vol = array.grow_volume( volume['name'], volume['description'], volume['id'], size) if vol: changed = True except Exception: pass module.exit_json(changed=changed) def delete_volume(module, array, volume): changed = False vol_name = volume['name'] if module.check_mode: module.exit_json(changed=changed) try: ok = array.delete_volume( volume['id']) if ok: module.log(msg='Volume {0} deleted.'.format(vol_name)) changed = True else: raise Exception except Exception: pass module.exit_json(changed=changed) def main(): arg_spec = argument_spec() arg_spec.update( dict( name=dict(type='str', required=True), state=dict(default='present', choices=['present', 'absent']), size=dict(type='str') ) ) module = AnsibleModule(arg_spec, supports_check_mode=True, required_together=required_together()) state = module.params['state'] array = get_array(module) volume = get_volume(module, array) if state == 'present': if not volume: create_volume(module, array) else: update_volume(module, array, volume) elif state == 'absent' and volume: delete_volume(module, array, volume) else: module.exit_json(changed=False) if __name__ == '__main__': main()
gpl-3.0
PetrDlouhy/django-registration
registration/backends/default/views.py
17
5717
from django.conf import settings from ... import signals from ...models import RegistrationProfile from ...views import ActivationView as BaseActivationView from ...views import RegistrationView as BaseRegistrationView from ...compat import RequestSite, is_app_installed, get_site_model from ...users import UserModel class RegistrationView(BaseRegistrationView): """ A registration backend which follows a simple workflow: 1. User signs up, inactive account is created. 2. Email is sent to user with activation link. 3. User clicks activation link, account is now active. Using this backend requires that * ``registration`` be listed in the ``INSTALLED_APPS`` setting (since this backend makes use of models defined in this application). * The setting ``ACCOUNT_ACTIVATION_DAYS`` be supplied, specifying (as an integer) the number of days from registration during which a user may activate their account (after that period expires, activation will be disallowed). * The creation of the templates ``registration/activation_email_subject.txt`` and ``registration/activation_email.txt``, which will be used for the activation email. See the notes for this backends ``register`` method for details regarding these templates. When subclassing this view, you can set the ``SEND_ACTIVATION_EMAIL`` class variable to False to skip sending the new user a confirmation email or set ``SEND_ACTIVATION_EMAIL`` to ``False``. Doing so implies that you will have to activate the user manually from the admin site or send an activation by some other method. For example, by listening for the ``user_registered`` signal. Additionally, registration can be temporarily closed by adding the setting ``REGISTRATION_OPEN`` and setting it to ``False``. Omitting this setting, or setting it to ``True``, will be interpreted as meaning that registration is currently open and permitted. Internally, this is accomplished via storing an activation key in an instance of ``registration.models.RegistrationProfile``. See that model and its custom manager for full documentation of its fields and supported operations. """ SEND_ACTIVATION_EMAIL = getattr(settings, 'SEND_ACTIVATION_EMAIL', True) success_url = 'registration_complete' def register(self, request, form): """ Given a username, email address and password, register a new user account, which will initially be inactive. Along with the new ``User`` object, a new ``registration.models.RegistrationProfile`` will be created, tied to that ``User``, containing the activation key which will be used for this account. An email will be sent to the supplied email address; this email should contain an activation link. The email will be rendered using two templates. See the documentation for ``RegistrationProfile.send_activation_email()`` for information about these templates and the contexts provided to them. After the ``User`` and ``RegistrationProfile`` are created and the activation email is sent, the signal ``registration.signals.user_registered`` will be sent, with the new ``User`` as the keyword argument ``user`` and the class of this backend as the sender. """ if is_app_installed('django.contrib.sites'): site = get_site_model().objects.get_current() else: site = RequestSite(request) if hasattr(form, 'save'): new_user_instance = form.save() else: new_user_instance = (UserModel().objects .create_user(**form.cleaned_data)) new_user = RegistrationProfile.objects.create_inactive_user( new_user=new_user_instance, site=site, send_email=self.SEND_ACTIVATION_EMAIL, request=request, ) signals.user_registered.send(sender=self.__class__, user=new_user, request=request) return new_user def registration_allowed(self, request): """ Indicate whether account registration is currently permitted, based on the value of the setting ``REGISTRATION_OPEN``. This is determined as follows: * If ``REGISTRATION_OPEN`` is not specified in settings, or is set to ``True``, registration is permitted. * If ``REGISTRATION_OPEN`` is both specified and set to ``False``, registration is not permitted. """ return getattr(settings, 'REGISTRATION_OPEN', True) class ActivationView(BaseActivationView): def activate(self, request, activation_key): """ Given an an activation key, look up and activate the user account corresponding to that key (if possible). After successful activation, the signal ``registration.signals.user_activated`` will be sent, with the newly activated ``User`` as the keyword argument ``user`` and the class of this backend as the sender. """ activated_user = (RegistrationProfile.objects .activate_user(activation_key)) if activated_user: signals.user_activated.send(sender=self.__class__, user=activated_user, request=request) return activated_user def get_success_url(self, request, user): return ('registration_activation_complete', (), {})
bsd-3-clause
40223112/2015cd_midterm
static/Brython3.1.1-20150328-091302/Lib/unittest/test/test_discovery.py
785
13838
import os import re import sys import unittest class TestableTestProgram(unittest.TestProgram): module = '__main__' exit = True defaultTest = failfast = catchbreak = buffer = None verbosity = 1 progName = '' testRunner = testLoader = None def __init__(self): pass class TestDiscovery(unittest.TestCase): # Heavily mocked tests so I can avoid hitting the filesystem def test_get_name_from_path(self): loader = unittest.TestLoader() loader._top_level_dir = '/foo' name = loader._get_name_from_path('/foo/bar/baz.py') self.assertEqual(name, 'bar.baz') if not __debug__: # asserts are off return with self.assertRaises(AssertionError): loader._get_name_from_path('/bar/baz.py') def test_find_tests(self): loader = unittest.TestLoader() original_listdir = os.listdir def restore_listdir(): os.listdir = original_listdir original_isfile = os.path.isfile def restore_isfile(): os.path.isfile = original_isfile original_isdir = os.path.isdir def restore_isdir(): os.path.isdir = original_isdir path_lists = [['test1.py', 'test2.py', 'not_a_test.py', 'test_dir', 'test.foo', 'test-not-a-module.py', 'another_dir'], ['test3.py', 'test4.py', ]] os.listdir = lambda path: path_lists.pop(0) self.addCleanup(restore_listdir) def isdir(path): return path.endswith('dir') os.path.isdir = isdir self.addCleanup(restore_isdir) def isfile(path): # another_dir is not a package and so shouldn't be recursed into return not path.endswith('dir') and not 'another_dir' in path os.path.isfile = isfile self.addCleanup(restore_isfile) loader._get_module_from_name = lambda path: path + ' module' loader.loadTestsFromModule = lambda module: module + ' tests' top_level = os.path.abspath('/foo') loader._top_level_dir = top_level suite = list(loader._find_tests(top_level, 'test*.py')) expected = [name + ' module tests' for name in ('test1', 'test2')] expected.extend([('test_dir.%s' % name) + ' module tests' for name in ('test3', 'test4')]) self.assertEqual(suite, expected) def test_find_tests_with_package(self): loader = unittest.TestLoader() original_listdir = os.listdir def restore_listdir(): os.listdir = original_listdir original_isfile = os.path.isfile def restore_isfile(): os.path.isfile = original_isfile original_isdir = os.path.isdir def restore_isdir(): os.path.isdir = original_isdir directories = ['a_directory', 'test_directory', 'test_directory2'] path_lists = [directories, [], [], []] os.listdir = lambda path: path_lists.pop(0) self.addCleanup(restore_listdir) os.path.isdir = lambda path: True self.addCleanup(restore_isdir) os.path.isfile = lambda path: os.path.basename(path) not in directories self.addCleanup(restore_isfile) class Module(object): paths = [] load_tests_args = [] def __init__(self, path): self.path = path self.paths.append(path) if os.path.basename(path) == 'test_directory': def load_tests(loader, tests, pattern): self.load_tests_args.append((loader, tests, pattern)) return 'load_tests' self.load_tests = load_tests def __eq__(self, other): return self.path == other.path loader._get_module_from_name = lambda name: Module(name) def loadTestsFromModule(module, use_load_tests): if use_load_tests: raise self.failureException('use_load_tests should be False for packages') return module.path + ' module tests' loader.loadTestsFromModule = loadTestsFromModule loader._top_level_dir = '/foo' # this time no '.py' on the pattern so that it can match # a test package suite = list(loader._find_tests('/foo', 'test*')) # We should have loaded tests from the test_directory package by calling load_tests # and directly from the test_directory2 package self.assertEqual(suite, ['load_tests', 'test_directory2' + ' module tests']) self.assertEqual(Module.paths, ['test_directory', 'test_directory2']) # load_tests should have been called once with loader, tests and pattern self.assertEqual(Module.load_tests_args, [(loader, 'test_directory' + ' module tests', 'test*')]) def test_discover(self): loader = unittest.TestLoader() original_isfile = os.path.isfile original_isdir = os.path.isdir def restore_isfile(): os.path.isfile = original_isfile os.path.isfile = lambda path: False self.addCleanup(restore_isfile) orig_sys_path = sys.path[:] def restore_path(): sys.path[:] = orig_sys_path self.addCleanup(restore_path) full_path = os.path.abspath(os.path.normpath('/foo')) with self.assertRaises(ImportError): loader.discover('/foo/bar', top_level_dir='/foo') self.assertEqual(loader._top_level_dir, full_path) self.assertIn(full_path, sys.path) os.path.isfile = lambda path: True os.path.isdir = lambda path: True def restore_isdir(): os.path.isdir = original_isdir self.addCleanup(restore_isdir) _find_tests_args = [] def _find_tests(start_dir, pattern): _find_tests_args.append((start_dir, pattern)) return ['tests'] loader._find_tests = _find_tests loader.suiteClass = str suite = loader.discover('/foo/bar/baz', 'pattern', '/foo/bar') top_level_dir = os.path.abspath('/foo/bar') start_dir = os.path.abspath('/foo/bar/baz') self.assertEqual(suite, "['tests']") self.assertEqual(loader._top_level_dir, top_level_dir) self.assertEqual(_find_tests_args, [(start_dir, 'pattern')]) self.assertIn(top_level_dir, sys.path) def test_discover_with_modules_that_fail_to_import(self): loader = unittest.TestLoader() listdir = os.listdir os.listdir = lambda _: ['test_this_does_not_exist.py'] isfile = os.path.isfile os.path.isfile = lambda _: True orig_sys_path = sys.path[:] def restore(): os.path.isfile = isfile os.listdir = listdir sys.path[:] = orig_sys_path self.addCleanup(restore) suite = loader.discover('.') self.assertIn(os.getcwd(), sys.path) self.assertEqual(suite.countTestCases(), 1) test = list(list(suite)[0])[0] # extract test from suite with self.assertRaises(ImportError): test.test_this_does_not_exist() def test_command_line_handling_parseArgs(self): program = TestableTestProgram() args = [] def do_discovery(argv): args.extend(argv) program._do_discovery = do_discovery program.parseArgs(['something', 'discover']) self.assertEqual(args, []) program.parseArgs(['something', 'discover', 'foo', 'bar']) self.assertEqual(args, ['foo', 'bar']) def test_command_line_handling_discover_by_default(self): program = TestableTestProgram() program.module = None self.called = False def do_discovery(argv): self.called = True self.assertEqual(argv, []) program._do_discovery = do_discovery program.parseArgs(['something']) self.assertTrue(self.called) def test_command_line_handling_discover_by_default_with_options(self): program = TestableTestProgram() program.module = None args = ['something', '-v', '-b', '-v', '-c', '-f'] self.called = False def do_discovery(argv): self.called = True self.assertEqual(argv, args[1:]) program._do_discovery = do_discovery program.parseArgs(args) self.assertTrue(self.called) def test_command_line_handling_do_discovery_too_many_arguments(self): class Stop(Exception): pass def usageExit(): raise Stop program = TestableTestProgram() program.usageExit = usageExit with self.assertRaises(Stop): # too many args program._do_discovery(['one', 'two', 'three', 'four']) def test_command_line_handling_do_discovery_calls_loader(self): program = TestableTestProgram() class Loader(object): args = [] def discover(self, start_dir, pattern, top_level_dir): self.args.append((start_dir, pattern, top_level_dir)) return 'tests' program._do_discovery(['-v'], Loader=Loader) self.assertEqual(program.verbosity, 2) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['--verbose'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery([], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['fish', 'eggs'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['fish', 'eggs', 'ham'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', 'ham')]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-s', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'test*.py', None)]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-t', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', 'fish')]) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-p', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'fish', None)]) self.assertFalse(program.failfast) self.assertFalse(program.catchbreak) Loader.args = [] program = TestableTestProgram() program._do_discovery(['-p', 'eggs', '-s', 'fish', '-v', '-f', '-c'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', None)]) self.assertEqual(program.verbosity, 2) self.assertTrue(program.failfast) self.assertTrue(program.catchbreak) def test_detect_module_clash(self): class Module(object): __file__ = 'bar/foo.py' sys.modules['foo'] = Module full_path = os.path.abspath('foo') original_listdir = os.listdir original_isfile = os.path.isfile original_isdir = os.path.isdir def cleanup(): os.listdir = original_listdir os.path.isfile = original_isfile os.path.isdir = original_isdir del sys.modules['foo'] if full_path in sys.path: sys.path.remove(full_path) self.addCleanup(cleanup) def listdir(_): return ['foo.py'] def isfile(_): return True def isdir(_): return True os.listdir = listdir os.path.isfile = isfile os.path.isdir = isdir loader = unittest.TestLoader() mod_dir = os.path.abspath('bar') expected_dir = os.path.abspath('foo') msg = re.escape(r"'foo' module incorrectly imported from %r. Expected %r. " "Is this module globally installed?" % (mod_dir, expected_dir)) self.assertRaisesRegex( ImportError, '^%s$' % msg, loader.discover, start_dir='foo', pattern='foo.py' ) self.assertEqual(sys.path[0], full_path) def test_discovery_from_dotted_path(self): loader = unittest.TestLoader() tests = [self] expectedPath = os.path.abspath(os.path.dirname(unittest.test.__file__)) self.wasRun = False def _find_tests(start_dir, pattern): self.wasRun = True self.assertEqual(start_dir, expectedPath) return tests loader._find_tests = _find_tests suite = loader.discover('unittest.test') self.assertTrue(self.wasRun) self.assertEqual(suite._tests, tests) if __name__ == '__main__': unittest.main()
gpl-3.0
jolevq/odoopub
addons/marketing_campaign/report/campaign_analysis.py
379
5310
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import tools from openerp.osv import fields, osv from openerp.addons.decimal_precision import decimal_precision as dp class campaign_analysis(osv.osv): _name = "campaign.analysis" _description = "Campaign Analysis" _auto = False _rec_name = 'date' def _total_cost(self, cr, uid, ids, field_name, arg, context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of case and section Data’s IDs @param context: A standard dictionary for contextual values """ result = {} for ca_obj in self.browse(cr, uid, ids, context=context): wi_ids = self.pool.get('marketing.campaign.workitem').search(cr, uid, [('segment_id.campaign_id', '=', ca_obj.campaign_id.id)]) total_cost = ca_obj.activity_id.variable_cost + \ ((ca_obj.campaign_id.fixed_cost or 1.00) / len(wi_ids)) result[ca_obj.id] = total_cost return result _columns = { 'res_id' : fields.integer('Resource', readonly=True), 'year': fields.char('Year', size=4, readonly=True), 'month': fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'),('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')], 'Month', readonly=True), 'day': fields.char('Day', size=10, readonly=True), 'date': fields.date('Date', readonly=True, select=True), 'campaign_id': fields.many2one('marketing.campaign', 'Campaign', readonly=True), 'activity_id': fields.many2one('marketing.campaign.activity', 'Activity', readonly=True), 'segment_id': fields.many2one('marketing.campaign.segment', 'Segment', readonly=True), 'partner_id': fields.many2one('res.partner', 'Partner', readonly=True), 'country_id': fields.related('partner_id', 'country_id', type='many2one', relation='res.country',string='Country'), 'total_cost' : fields.function(_total_cost, string='Cost', type="float", digits_compute=dp.get_precision('Account')), 'revenue': fields.float('Revenue', readonly=True, digits_compute=dp.get_precision('Account')), 'count' : fields.integer('# of Actions', readonly=True), 'state': fields.selection([('todo', 'To Do'), ('exception', 'Exception'), ('done', 'Done'), ('cancelled', 'Cancelled')], 'Status', readonly=True), } def init(self, cr): tools.drop_view_if_exists(cr, 'campaign_analysis') cr.execute(""" create or replace view campaign_analysis as ( select min(wi.id) as id, min(wi.res_id) as res_id, to_char(wi.date::date, 'YYYY') as year, to_char(wi.date::date, 'MM') as month, to_char(wi.date::date, 'YYYY-MM-DD') as day, wi.date::date as date, s.campaign_id as campaign_id, wi.activity_id as activity_id, wi.segment_id as segment_id, wi.partner_id as partner_id , wi.state as state, sum(act.revenue) as revenue, count(*) as count from marketing_campaign_workitem wi left join res_partner p on (p.id=wi.partner_id) left join marketing_campaign_segment s on (s.id=wi.segment_id) left join marketing_campaign_activity act on (act.id= wi.activity_id) group by s.campaign_id,wi.activity_id,wi.segment_id,wi.partner_id,wi.state, wi.date::date ) """) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
calfonso/ansible
lib/ansible/modules/windows/win_iis_webapppool.py
43
7037
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2015, Henrik Wallström <henrik@wallstroms.nu> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: win_iis_webapppool version_added: "2.0" short_description: Configure IIS Web Application Pools description: - Creates, removes and configures an IIS Web Application Pool. options: attributes: description: - This field is a free form dictionary value for the application pool attributes. - These attributes are based on the naming standard at U(https://www.iis.net/configreference/system.applicationhost/applicationpools/add#005), see the examples section for more details on how to set this. - You can also set the attributes of child elements like cpu and processModel, see the examples to see how it is done. - While you can use the numeric values for enums it is recommended to use the enum name itself, e.g. use SpecificUser instead of 3 for processModel.identityType. - managedPipelineMode may be either "Integrated" or "Classic". - startMode may be either "OnDemand" or "AlwaysRunning". - Use C(state) module parameter to modify the state of the app pool. - When trying to set 'processModel.password' and you receive a 'Value does fall within the expected range' error, you have a corrupted keystore. Please follow U(http://structuredsight.com/2014/10/26/im-out-of-range-youre-out-of-range/) to help fix your host. name: description: - Name of the application pool. required: yes state: choices: [ absent, present, restarted, started, stopped ] default: present description: - The state of the application pool. - If C(absent) will ensure the app pool is removed. - If C(present) will ensure the app pool is configured and exists. - If C(restarted) will ensure the app pool exists and will restart, this is never idempotent. - If C(started) will ensure the app pool exists and is started. - If C(stopped) will ensure the app pool exists and is stopped. author: - Henrik Wallström (@henrikwallstrom) - Jordan Borean (@jborean93) ''' EXAMPLES = r''' - name: return information about an existing application pool win_iis_webapppool: name: DefaultAppPool state: present - name: create a new application pool in 'Started' state win_iis_webapppool: name: AppPool state: started - name: stop an application pool win_iis_webapppool: name: AppPool state: stopped - name: restart an application pool (non-idempotent) win_iis_webapppool: name: AppPool state: restart - name: change application pool attributes using new dict style win_iis_webapppool: name: AppPool attributes: managedRuntimeVersion: v4.0 autoStart: no - name: creates an application pool, sets attributes and starts it win_iis_webapppool: name: AnotherAppPool state: started attributes: managedRuntimeVersion: v4.0 autoStart: no # In the below example we are setting attributes in child element processModel # https://www.iis.net/configreference/system.applicationhost/applicationpools/add/processmodel - name: manage child element and set identity of application pool win_iis_webapppool: name: IdentitiyAppPool state: started attributes: managedPipelineMode: Classic processModel.identityType: SpecificUser processModel.userName: '{{ansible_user}}' processModel.password: '{{ansible_password}}' processModel.loadUserProfile: True - name: manage a timespan attribute win_iis_webapppool: name: TimespanAppPool state: started attributes: # Timespan with full string "day:hour:minute:second.millisecond" recycling.periodicRestart.time: "00:00:05:00.000000" recycling.periodicRestart.schedule: ["00:10:00", "05:30:00"] # Shortened timespan "hour:minute:second" processModel.pingResponseTime: "00:03:00" ''' RETURN = r''' attributes: description: Application Pool attributes that were set and processed by this module invocation. returned: success type: dictionary sample: enable32BitAppOnWin64: "true" managedRuntimeVersion: "v4.0" managedPipelineMode: "Classic" info: description: Information on current state of the Application Pool. See https://www.iis.net/configreference/system.applicationhost/applicationpools/add#005 for the full list of return attributes based on your IIS version. returned: success type: complex sample: contains: attributes: description: Key value pairs showing the current Application Pool attributes. returned: success type: dictionary sample: autoStart: true managedRuntimeLoader: "webengine4.dll" managedPipelineMode: "Classic" name: "DefaultAppPool" CLRConfigFile: "" passAnonymousToken: true applicationPoolSid: "S-1-5-82-1352790163-598702362-1775843902-1923651883-1762956711" queueLength: 1000 managedRuntimeVersion: "v4.0" state: "Started" enableConfigurationOverride: true startMode: "OnDemand" enable32BitAppOnWin64: true cpu: description: Key value pairs showing the current Application Pool cpu attributes. returned: success type: dictionary sample: action: "NoAction" limit: 0 resetInterval: Days: 0 Hours: 0 failure: description: Key value pairs showing the current Application Pool failure attributes. returned: success type: dictionary sample: autoShutdownExe: "" orphanActionExe: "" rapidFailProtextionInterval: Days: 0 Hours: 0 name: description: Name of Application Pool that was processed by this module invocation. returned: success type: string sample: "DefaultAppPool" processModel: description: Key value pairs showing the current Application Pool processModel attributes. returned: success type: dictionary sample: identityType: "ApplicationPoolIdentity" logonType: "LogonBatch" pingInterval: Days: 0 Hours: 0 recycling: description: Key value pairs showing the current Application Pool recycling attributes. returned: success type: dictionary sample: disallowOverlappingRotation: false disallowRotationOnConfigChange: false logEventOnRecycle: "Time,Requests,Schedule,Memory,IsapiUnhealthy,OnDemand,ConfigChange,PrivateMemory" state: description: Current runtime state of the pool as the module completed. returned: success type: string sample: "Started" '''
gpl-3.0
40223208/2015cdb_g4_0420
static/Brython3.1.1-20150328-091302/Lib/_struct.py
726
13787
# # This module is a pure Python version of pypy.module.struct. # It is only imported if the vastly faster pypy.module.struct is not # compiled in. For now we keep this version for reference and # because pypy.module.struct is not ootype-backend-friendly yet. # # this module 'borrowed' from # https://bitbucket.org/pypy/pypy/src/18626459a9b2/lib_pypy/_struct.py?at=py3k-listview_str """Functions to convert between Python values and C structs. Python strings are used to hold the data representing the C struct and also as format strings to describe the layout of data in the C struct. The optional first format char indicates byte order, size and alignment: @: native order, size & alignment (default) =: native order, std. size & alignment <: little-endian, std. size & alignment >: big-endian, std. size & alignment !: same as > The remaining chars indicate types of args and must match exactly; these can be preceded by a decimal repeat count: x: pad byte (no data); c:char; b:signed byte; B:unsigned byte; h:short; H:unsigned short; i:int; I:unsigned int; l:long; L:unsigned long; f:float; d:double. Special cases (preceding decimal count indicates length): s:string (array of char); p: pascal string (with count byte). Special case (only available in native format): P:an integer type that is wide enough to hold a pointer. Special case (not in native mode unless 'long long' in platform C): q:long long; Q:unsigned long long Whitespace between formats is ignored. The variable struct.error is an exception raised on errors.""" import math, sys # TODO: XXX Find a way to get information on native sizes and alignments class StructError(Exception): pass error = StructError def unpack_int(data,index,size,le): bytes = [b for b in data[index:index+size]] if le == 'little': bytes.reverse() number = 0 for b in bytes: number = number << 8 | b return int(number) def unpack_signed_int(data,index,size,le): number = unpack_int(data,index,size,le) max = 2**(size*8) if number > 2**(size*8 - 1) - 1: number = int(-1*(max - number)) return number INFINITY = 1e200 * 1e200 NAN = INFINITY / INFINITY def unpack_char(data,index,size,le): return data[index:index+size] def pack_int(number,size,le): x=number res=[] for i in range(size): res.append(x&0xff) x >>= 8 if le == 'big': res.reverse() return bytes(res) def pack_signed_int(number,size,le): if not isinstance(number, int): raise StructError("argument for i,I,l,L,q,Q,h,H must be integer") if number > 2**(8*size-1)-1 or number < -1*2**(8*size-1): raise OverflowError("Number:%i too large to convert" % number) return pack_int(number,size,le) def pack_unsigned_int(number,size,le): if not isinstance(number, int): raise StructError("argument for i,I,l,L,q,Q,h,H must be integer") if number < 0: raise TypeError("can't convert negative long to unsigned") if number > 2**(8*size)-1: raise OverflowError("Number:%i too large to convert" % number) return pack_int(number,size,le) def pack_char(char,size,le): return bytes(char) def isinf(x): return x != 0.0 and x / 2 == x def isnan(v): return v != v*1.0 or (v == 1.0 and v == 2.0) def pack_float(x, size, le): unsigned = float_pack(x, size) result = [] for i in range(8): result.append((unsigned >> (i * 8)) & 0xFF) if le == "big": result.reverse() return bytes(result) def unpack_float(data, index, size, le): binary = [data[i] for i in range(index, index + 8)] if le == "big": binary.reverse() unsigned = 0 for i in range(8): unsigned |= binary[i] << (i * 8) return float_unpack(unsigned, size, le) def round_to_nearest(x): """Python 3 style round: round a float x to the nearest int, but unlike the builtin Python 2.x round function: - return an int, not a float - do round-half-to-even, not round-half-away-from-zero. We assume that x is finite and nonnegative; except wrong results if you use this for negative x. """ int_part = int(x) frac_part = x - int_part if frac_part > 0.5 or frac_part == 0.5 and int_part & 1 == 1: int_part += 1 return int_part def float_unpack(Q, size, le): """Convert a 32-bit or 64-bit integer created by float_pack into a Python float.""" if size == 8: MIN_EXP = -1021 # = sys.float_info.min_exp MAX_EXP = 1024 # = sys.float_info.max_exp MANT_DIG = 53 # = sys.float_info.mant_dig BITS = 64 elif size == 4: MIN_EXP = -125 # C's FLT_MIN_EXP MAX_EXP = 128 # FLT_MAX_EXP MANT_DIG = 24 # FLT_MANT_DIG BITS = 32 else: raise ValueError("invalid size value") if Q >> BITS: raise ValueError("input out of range") # extract pieces sign = Q >> BITS - 1 exp = (Q & ((1 << BITS - 1) - (1 << MANT_DIG - 1))) >> MANT_DIG - 1 mant = Q & ((1 << MANT_DIG - 1) - 1) if exp == MAX_EXP - MIN_EXP + 2: # nan or infinity result = float('nan') if mant else float('inf') elif exp == 0: # subnormal or zero result = math.ldexp(float(mant), MIN_EXP - MANT_DIG) else: # normal mant += 1 << MANT_DIG - 1 result = math.ldexp(float(mant), exp + MIN_EXP - MANT_DIG - 1) return -result if sign else result def float_pack(x, size): """Convert a Python float x into a 64-bit unsigned integer with the same byte representation.""" if size == 8: MIN_EXP = -1021 # = sys.float_info.min_exp MAX_EXP = 1024 # = sys.float_info.max_exp MANT_DIG = 53 # = sys.float_info.mant_dig BITS = 64 elif size == 4: MIN_EXP = -125 # C's FLT_MIN_EXP MAX_EXP = 128 # FLT_MAX_EXP MANT_DIG = 24 # FLT_MANT_DIG BITS = 32 else: raise ValueError("invalid size value") sign = math.copysign(1.0, x) < 0.0 if math.isinf(x): mant = 0 exp = MAX_EXP - MIN_EXP + 2 elif math.isnan(x): mant = 1 << (MANT_DIG-2) # other values possible exp = MAX_EXP - MIN_EXP + 2 elif x == 0.0: mant = 0 exp = 0 else: m, e = math.frexp(abs(x)) # abs(x) == m * 2**e exp = e - (MIN_EXP - 1) if exp > 0: # Normal case. mant = round_to_nearest(m * (1 << MANT_DIG)) mant -= 1 << MANT_DIG - 1 else: # Subnormal case. if exp + MANT_DIG - 1 >= 0: mant = round_to_nearest(m * (1 << exp + MANT_DIG - 1)) else: mant = 0 exp = 0 # Special case: rounding produced a MANT_DIG-bit mantissa. assert 0 <= mant <= 1 << MANT_DIG - 1 if mant == 1 << MANT_DIG - 1: mant = 0 exp += 1 # Raise on overflow (in some circumstances, may want to return # infinity instead). if exp >= MAX_EXP - MIN_EXP + 2: raise OverflowError("float too large to pack in this format") # check constraints assert 0 <= mant < 1 << MANT_DIG - 1 assert 0 <= exp <= MAX_EXP - MIN_EXP + 2 assert 0 <= sign <= 1 return ((sign << BITS - 1) | (exp << MANT_DIG - 1)) | mant big_endian_format = { 'x':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None}, 'b':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int}, 'B':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int}, 'c':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_char, 'unpack' : unpack_char}, 's':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None}, 'p':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None}, 'h':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int}, 'H':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int}, 'i':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int}, 'I':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int}, 'l':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int}, 'L':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int}, 'q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int}, 'Q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int}, 'f':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float}, 'd':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float}, } default = big_endian_format formatmode={ '<' : (default, 'little'), '>' : (default, 'big'), '!' : (default, 'big'), '=' : (default, sys.byteorder), '@' : (default, sys.byteorder) } def getmode(fmt): try: formatdef,endianness = formatmode[fmt[0]] index = 1 except (IndexError, KeyError): formatdef,endianness = formatmode['@'] index = 0 return formatdef,endianness,index def getNum(fmt,i): num=None cur = fmt[i] while ('0'<= cur ) and ( cur <= '9'): if num == None: num = int(cur) else: num = 10*num + int(cur) i += 1 cur = fmt[i] return num,i def calcsize(fmt): """calcsize(fmt) -> int Return size of C struct described by format string fmt. See struct.__doc__ for more on format strings.""" formatdef,endianness,i = getmode(fmt) num = 0 result = 0 while i<len(fmt): num,i = getNum(fmt,i) cur = fmt[i] try: format = formatdef[cur] except KeyError: raise StructError("%s is not a valid format" % cur) if num != None : result += num*format['size'] else: result += format['size'] num = 0 i += 1 return result def pack(fmt,*args): """pack(fmt, v1, v2, ...) -> string Return string containing values v1, v2, ... packed according to fmt. See struct.__doc__ for more on format strings.""" formatdef,endianness,i = getmode(fmt) args = list(args) n_args = len(args) result = [] while i<len(fmt): num,i = getNum(fmt,i) cur = fmt[i] try: format = formatdef[cur] except KeyError: raise StructError("%s is not a valid format" % cur) if num == None : num_s = 0 num = 1 else: num_s = num if cur == 'x': result += [b'\0'*num] elif cur == 's': if isinstance(args[0], bytes): padding = num - len(args[0]) result += [args[0][:num] + b'\0'*padding] args.pop(0) else: raise StructError("arg for string format not a string") elif cur == 'p': if isinstance(args[0], bytes): padding = num - len(args[0]) - 1 if padding > 0: result += [bytes([len(args[0])]) + args[0][:num-1] + b'\0'*padding] else: if num<255: result += [bytes([num-1]) + args[0][:num-1]] else: result += [bytes([255]) + args[0][:num-1]] args.pop(0) else: raise StructError("arg for string format not a string") else: if len(args) < num: raise StructError("insufficient arguments to pack") for var in args[:num]: result += [format['pack'](var,format['size'],endianness)] args=args[num:] num = None i += 1 if len(args) != 0: raise StructError("too many arguments for pack format") return b''.join(result) def unpack(fmt,data): """unpack(fmt, string) -> (v1, v2, ...) Unpack the string, containing packed C structure data, according to fmt. Requires len(string)==calcsize(fmt). See struct.__doc__ for more on format strings.""" formatdef,endianness,i = getmode(fmt) j = 0 num = 0 result = [] length= calcsize(fmt) if length != len (data): raise StructError("unpack str size does not match format") while i<len(fmt): num,i=getNum(fmt,i) cur = fmt[i] i += 1 try: format = formatdef[cur] except KeyError: raise StructError("%s is not a valid format" % cur) if not num : num = 1 if cur == 'x': j += num elif cur == 's': result.append(data[j:j+num]) j += num elif cur == 'p': n=data[j] if n >= num: n = num-1 result.append(data[j+1:j+n+1]) j += num else: for n in range(num): result += [format['unpack'](data,j,format['size'],endianness)] j += format['size'] return tuple(result) def pack_into(fmt, buf, offset, *args): data = pack(fmt, *args) buffer(buf)[offset:offset+len(data)] = data def unpack_from(fmt, buf, offset=0): size = calcsize(fmt) data = buffer(buf)[offset:offset+size] if len(data) != size: raise error("unpack_from requires a buffer of at least %d bytes" % (size,)) return unpack(fmt, data) def _clearcache(): "Clear the internal cache." # No cache in this implementation
gpl-3.0
devilry/devilry-django
devilry/devilry_admin/tests/period/test_manage_tags/test_manage_tags_relatedusers.py
1
19151
# -*- coding: utf-8 -*- from django import test from cradmin_legacy import cradmin_testhelpers from model_bakery import baker from devilry.apps.core.models import PeriodTag from devilry.apps.core.models import RelatedExaminer, RelatedStudent from devilry.devilry_admin.views.period.manage_tags import manage_tags from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql class TestAddRelatedExaminersToTag(test.TestCase, cradmin_testhelpers.TestCaseMixin): viewclass = manage_tags.RelatedExaminerAddView def setUp(self): AssignmentGroupDbCacheCustomSql().initialize() def test_add_single_relatedexaminer_to_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedexaminer = baker.make('core.RelatedExaminer', period=testperiod) self.assertEqual(testrelatedexaminer.periodtag_set.count(), 0) self.assertEqual(testperiodtag.relatedexaminers.count(), 0) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedexaminer.id] } } ) relatedexaminer = RelatedExaminer.objects.get(id=testrelatedexaminer.id) relatedexaminer_tag_ids = relatedexaminer.periodtag_set.all().values_list('id', flat=True) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedexaminers_ids = periodtag.relatedexaminers.all().values_list('id', flat=True) self.assertEqual(relatedexaminer.periodtag_set.count(), 1) self.assertEqual(periodtag.relatedexaminers.count(), 1) self.assertIn(relatedexaminer.id, periodtag_relatedexaminers_ids) self.assertIn(periodtag.id, relatedexaminer_tag_ids) def test_add_multiple_relatedexaminer_to_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedexaminer1 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer2 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer3 = baker.make('core.RelatedExaminer', period=testperiod) self.assertEqual(testrelatedexaminer1.periodtag_set.count(), 0) self.assertEqual(testrelatedexaminer2.periodtag_set.count(), 0) self.assertEqual(testrelatedexaminer3.periodtag_set.count(), 0) self.assertEqual(testperiodtag.relatedexaminers.count(), 0) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedexaminer1.id, testrelatedexaminer2.id, testrelatedexaminer3.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedexaminers_ids = periodtag.relatedexaminers.all().values_list('id', flat=True) self.assertEqual(len(periodtag_relatedexaminers_ids), 3) self.assertIn(testrelatedexaminer1.id, periodtag_relatedexaminers_ids) self.assertIn(testrelatedexaminer2.id, periodtag_relatedexaminers_ids) self.assertIn(testrelatedexaminer3.id, periodtag_relatedexaminers_ids) def test_add_only_selected_relatedexaminers_are_added(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedexaminer1 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer2 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer3 = baker.make('core.RelatedExaminer', period=testperiod) self.assertEqual(testrelatedexaminer1.periodtag_set.count(), 0) self.assertEqual(testrelatedexaminer2.periodtag_set.count(), 0) self.assertEqual(testrelatedexaminer3.periodtag_set.count(), 0) self.assertEqual(testperiodtag.relatedexaminers.count(), 0) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedexaminer1.id, testrelatedexaminer2.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedexaminers_ids = periodtag.relatedexaminers.all().values_list('id', flat=True) self.assertIn(testrelatedexaminer1.id, periodtag_relatedexaminers_ids) self.assertIn(testrelatedexaminer2.id, periodtag_relatedexaminers_ids) self.assertNotIn(testrelatedexaminer3.id, periodtag_relatedexaminers_ids) class TestRemoveRelatedExaminersFromTag(test.TestCase, cradmin_testhelpers.TestCaseMixin): viewclass = manage_tags.RelatedExaminerRemoveView def setUp(self): AssignmentGroupDbCacheCustomSql().initialize() def test_remove_single_relatedexaminer_from_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedexaminer = baker.make('core.RelatedExaminer', period=testperiod) testperiodtag.relatedexaminers.add(testrelatedexaminer) self.assertEqual(testrelatedexaminer.periodtag_set.count(), 1) self.assertEqual(testperiodtag.relatedexaminers.count(), 1) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedexaminer.id] } } ) relatedexaminer = RelatedExaminer.objects.get(id=testrelatedexaminer.id) periodtag = PeriodTag.objects.get(id=testperiodtag.id) self.assertEqual(relatedexaminer.periodtag_set.count(), 0) self.assertEqual(periodtag.relatedexaminers.count(), 0) def test_remove_multiple_relatedexaminers_to_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedexaminer1 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer2 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer3 = baker.make('core.RelatedExaminer', period=testperiod) testperiodtag.relatedexaminers.add(testrelatedexaminer1) testperiodtag.relatedexaminers.add(testrelatedexaminer2) testperiodtag.relatedexaminers.add(testrelatedexaminer3) self.assertEqual(testrelatedexaminer1.periodtag_set.count(), 1) self.assertEqual(testrelatedexaminer2.periodtag_set.count(), 1) self.assertEqual(testrelatedexaminer3.periodtag_set.count(), 1) self.assertEqual(testperiodtag.relatedexaminers.count(), 3) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedexaminer1.id, testrelatedexaminer2.id, testrelatedexaminer3.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) relatedexaminer1 = RelatedExaminer.objects.get(id=testrelatedexaminer1.id) relatedexaminer2 = RelatedExaminer.objects.get(id=testrelatedexaminer2.id) relatedexaminer3 = RelatedExaminer.objects.get(id=testrelatedexaminer3.id) self.assertEqual(periodtag.relatedexaminers.count(), 0) self.assertEqual(relatedexaminer1.periodtag_set.count(), 0) self.assertEqual(relatedexaminer2.periodtag_set.count(), 0) self.assertEqual(relatedexaminer3.periodtag_set.count(), 0) def test_remove_only_selected_relatedexaminers_are_removed(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedexaminer1 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer2 = baker.make('core.RelatedExaminer', period=testperiod) testrelatedexaminer3 = baker.make('core.RelatedExaminer', period=testperiod) testperiodtag.relatedexaminers.add(testrelatedexaminer1) testperiodtag.relatedexaminers.add(testrelatedexaminer2) testperiodtag.relatedexaminers.add(testrelatedexaminer3) self.assertEqual(testrelatedexaminer1.periodtag_set.count(), 1) self.assertEqual(testrelatedexaminer2.periodtag_set.count(), 1) self.assertEqual(testrelatedexaminer3.periodtag_set.count(), 1) self.assertEqual(testperiodtag.relatedexaminers.count(), 3) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedexaminer1.id, testrelatedexaminer2.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedexaminers_ids = periodtag.relatedexaminers.all().values_list('id', flat=True) self.assertNotIn(testrelatedexaminer1.id, periodtag_relatedexaminers_ids) self.assertNotIn(testrelatedexaminer2.id, periodtag_relatedexaminers_ids) self.assertIn(testrelatedexaminer3.id, periodtag_relatedexaminers_ids) class TestAddRelatedStudentsToTag(test.TestCase, cradmin_testhelpers.TestCaseMixin): viewclass = manage_tags.RelatedStudentAddView def setUp(self): AssignmentGroupDbCacheCustomSql().initialize() def test_add_single_relatedstudent_to_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedstudent = baker.make('core.RelatedStudent', period=testperiod) self.assertEqual(testrelatedstudent.periodtag_set.count(), 0) self.assertEqual(testperiodtag.relatedstudents.count(), 0) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedstudent.id] } } ) relatedstudent = RelatedStudent.objects.get(id=testrelatedstudent.id) relatedstudent_tag_ids = relatedstudent.periodtag_set.all().values_list('id', flat=True) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedstudents_ids = periodtag.relatedstudents.all().values_list('id', flat=True) self.assertEqual(relatedstudent.periodtag_set.count(), 1) self.assertEqual(periodtag.relatedstudents.count(), 1) self.assertIn(relatedstudent.id, periodtag_relatedstudents_ids) self.assertIn(periodtag.id, relatedstudent_tag_ids) def test_add_multiple_relatedstudents_to_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedstudent1 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent2 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent3 = baker.make('core.RelatedStudent', period=testperiod) self.assertEqual(testrelatedstudent1.periodtag_set.count(), 0) self.assertEqual(testrelatedstudent2.periodtag_set.count(), 0) self.assertEqual(testrelatedstudent3.periodtag_set.count(), 0) self.assertEqual(testperiodtag.relatedstudents.count(), 0) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedstudent1.id, testrelatedstudent2.id, testrelatedstudent3.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedstudents_ids = periodtag.relatedstudents.all().values_list('id', flat=True) self.assertEqual(len(periodtag_relatedstudents_ids), 3) self.assertIn(testrelatedstudent1.id, periodtag_relatedstudents_ids) self.assertIn(testrelatedstudent2.id, periodtag_relatedstudents_ids) self.assertIn(testrelatedstudent3.id, periodtag_relatedstudents_ids) def test_add_only_selected_relatedstudents_are_added(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedstudent1 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent2 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent3 = baker.make('core.RelatedStudent', period=testperiod) self.assertEqual(testrelatedstudent1.periodtag_set.count(), 0) self.assertEqual(testrelatedstudent2.periodtag_set.count(), 0) self.assertEqual(testrelatedstudent3.periodtag_set.count(), 0) self.assertEqual(testperiodtag.relatedstudents.count(), 0) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedstudent1.id, testrelatedstudent2.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedstudents_ids = periodtag.relatedstudents.all().values_list('id', flat=True) self.assertIn(testrelatedstudent1.id, periodtag_relatedstudents_ids) self.assertIn(testrelatedstudent2.id, periodtag_relatedstudents_ids) self.assertNotIn(testrelatedstudent3.id, periodtag_relatedstudents_ids) class TestRemoveRelatedStudentsFromTag(test.TestCase, cradmin_testhelpers.TestCaseMixin): viewclass = manage_tags.RelatedStudentRemoveView def setUp(self): AssignmentGroupDbCacheCustomSql().initialize() def test_remove_single_relatedstudent_from_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedstudent = baker.make('core.RelatedStudent', period=testperiod) testperiodtag.relatedstudents.add(testrelatedstudent) self.assertEqual(testrelatedstudent.periodtag_set.count(), 1) self.assertEqual(testperiodtag.relatedstudents.count(), 1) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedstudent.id] } } ) relatedstudent = RelatedStudent.objects.get(id=testrelatedstudent.id) periodtag = PeriodTag.objects.get(id=testperiodtag.id) self.assertEqual(relatedstudent.periodtag_set.count(), 0) self.assertEqual(periodtag.relatedstudents.count(), 0) def test_remove_multiple_relatedstudents_from_tag(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedstudent1 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent2 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent3 = baker.make('core.RelatedStudent', period=testperiod) testperiodtag.relatedstudents.add(testrelatedstudent1) testperiodtag.relatedstudents.add(testrelatedstudent2) testperiodtag.relatedstudents.add(testrelatedstudent3) self.assertEqual(testrelatedstudent1.periodtag_set.count(), 1) self.assertEqual(testrelatedstudent2.periodtag_set.count(), 1) self.assertEqual(testrelatedstudent3.periodtag_set.count(), 1) self.assertEqual(testperiodtag.relatedstudents.count(), 3) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedstudent1.id, testrelatedstudent2.id, testrelatedstudent3.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) relatedstudent1 = RelatedStudent.objects.get(id=testrelatedstudent1.id) relatedstudent2 = RelatedStudent.objects.get(id=testrelatedstudent2.id) relatedstudent3 = RelatedStudent.objects.get(id=testrelatedstudent3.id) self.assertEqual(periodtag.relatedstudents.count(), 0) self.assertEqual(relatedstudent1.periodtag_set.count(), 0) self.assertEqual(relatedstudent2.periodtag_set.count(), 0) self.assertEqual(relatedstudent3.periodtag_set.count(), 0) def test_remove_only_selected_relatedexaminers_are_removed(self): testperiod = baker.make('core.Period') testperiodtag = baker.make('core.PeriodTag', period=testperiod, tag='a') testrelatedstudent1 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent2 = baker.make('core.RelatedStudent', period=testperiod) testrelatedstudent3 = baker.make('core.RelatedStudent', period=testperiod) testperiodtag.relatedstudents.add(testrelatedstudent1) testperiodtag.relatedstudents.add(testrelatedstudent2) testperiodtag.relatedstudents.add(testrelatedstudent3) self.assertEqual(testrelatedstudent1.periodtag_set.count(), 1) self.assertEqual(testrelatedstudent2.periodtag_set.count(), 1) self.assertEqual(testrelatedstudent3.periodtag_set.count(), 1) self.assertEqual(testperiodtag.relatedstudents.count(), 3) self.mock_http302_postrequest( cradmin_role=testperiod, viewkwargs={ 'tag_id': testperiodtag.id }, requestkwargs={ 'data': { 'selected_items': [testrelatedstudent1.id, testrelatedstudent2.id] } } ) periodtag = PeriodTag.objects.get(id=testperiodtag.id) periodtag_relatedstudents_ids = periodtag.relatedstudents.all().values_list('id', flat=True) self.assertNotIn(testrelatedstudent1.id, periodtag_relatedstudents_ids) self.assertNotIn(testrelatedstudent2.id, periodtag_relatedstudents_ids) self.assertIn(testrelatedstudent3.id, periodtag_relatedstudents_ids)
bsd-3-clause
sumeetsk/NEXT
apps/ActiveRanking/algs/AR_Random/AR_Random.py
1
3328
""" AR_Random app implements Active ranking Random author: Sumeet Katariya, sumeetsk@gmail.com last updated: 09/24/2016 AR_Random implements random sampling """ import numpy as np import numpy.random import next.utils as utils #import pickle #import logging #logging.basicConfig(filename='logging_AR_Random.log', level=logging.DEBUG, filemode='w') class AR_Random: app_id = 'ActiveRanking' def initExp(self, butler, n=None, params=None): """ This function is meant to set keys used later by the algorith implemented in this file. """ butler.algorithms.set(key='n', value=n) W = numpy.zeros((n,n)) butler.algorithms.set(key='W', value=W) f = open('AR_Random.log','a') f.close() #with open('pickle_log.pkl', 'wb') as f: # pickle.dump({'obj': 'obj'}, f) return True def getQuery(self, butler, participant_uid): n = butler.algorithms.get(key='n') item_repeated_last_query_count = 0 #if any item from the previous query is repeated, change items last_query = butler.participants.get(key='last_query') if last_query == None: butler.participants.set(key='last_query', value=(-1,-1)) last_query = butler.participants.get(key='last_query') #utils.debug_print('last_query='+str(last_query)) while item_repeated_last_query_count<10: index = np.random.randint(n) alt_index = np.random.randint(n) while alt_index == index: alt_index = np.random.randint(n) if not any(x in (index,alt_index) for x in last_query): #no repetition break else: f = open('Repeats.log', 'a') f.write(str((index,alt_index))+'\n') f.write('Query item repeated\n') f.close() item_repeated_last_query_count += 1 butler.participants.set(key='last_query', value=(index, alt_index)) return [index, alt_index, 0] def processAnswer(self, butler, left_id=0, right_id=0, winner_id=0, quicksort_data=0): utils.debug_print('In AR_Random: processAnswer') utils.debug_print('left_id:'+str(left_id)) utils.debug_print('right_id:'+str(right_id)) W = np.array(butler.algorithms.get(key='W')) #utils.debug_print('old W:'+str(W)) f = open('AR_Random.log','a') f.write(str([left_id,right_id,winner_id])+'\n') f.close() f = open('Queries.log','a') f.write('AR '+str([left_id,right_id,winner_id])+'\n') f.close() #f.write('Old W \n') #for rownbr in range(np.shape(W)[0]): # for colnbr in range(np.shape(W)[1]-1): # f.write(str(W[rownbr, colnbr])+',') # f.write(str(W[rownbr, colnbr+1])+'\n') #f.write('\n') #f.close() if left_id == winner_id: W[left_id, right_id] = W[left_id, right_id] + 1 else: W[right_id, left_id] = W[right_id, left_id] + 1 butler.algorithms.set(key='W', value=W) #logging.debug('W = ',W) #utils.debug_print('new W:'+str(W)) return True def getModel(self,butler): W = butler.algorithms.get(key='W') return W, range(5)
apache-2.0
SpectraLogic/samba
third_party/dnspython/dns/exception.py
101
1318
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Common DNS Exceptions.""" class DNSException(Exception): """Abstract base class shared by all dnspython exceptions.""" pass class FormError(DNSException): """DNS message is malformed.""" pass class SyntaxError(DNSException): """Text input is malformed.""" pass class UnexpectedEnd(SyntaxError): """Raised if text input ends unexpectedly.""" pass class TooBig(DNSException): """The message is too big.""" pass class Timeout(DNSException): """The operation timed out.""" pass
gpl-3.0
wskplho/sl4a
python/src/Lib/encodings/aliases.py
60
14721
""" Encoding Aliases Support This module is used by the encodings package search function to map encodings names to module names. Note that the search function normalizes the encoding names before doing the lookup, so the mapping will have to map normalized encoding names to module names. Contents: The following aliases dictionary contains mappings of all IANA character set names for which the Python core library provides codecs. In addition to these, a few Python specific codec aliases have also been added. """ aliases = { # Please keep this list sorted alphabetically by value ! # ascii codec '646' : 'ascii', 'ansi_x3.4_1968' : 'ascii', 'ansi_x3_4_1968' : 'ascii', # some email headers use this non-standard name 'ansi_x3.4_1986' : 'ascii', 'cp367' : 'ascii', 'csascii' : 'ascii', 'ibm367' : 'ascii', 'iso646_us' : 'ascii', 'iso_646.irv_1991' : 'ascii', 'iso_ir_6' : 'ascii', 'us' : 'ascii', 'us_ascii' : 'ascii', # base64_codec codec 'base64' : 'base64_codec', 'base_64' : 'base64_codec', # big5 codec 'big5_tw' : 'big5', 'csbig5' : 'big5', # big5hkscs codec 'big5_hkscs' : 'big5hkscs', 'hkscs' : 'big5hkscs', # bz2_codec codec 'bz2' : 'bz2_codec', # cp037 codec '037' : 'cp037', 'csibm037' : 'cp037', 'ebcdic_cp_ca' : 'cp037', 'ebcdic_cp_nl' : 'cp037', 'ebcdic_cp_us' : 'cp037', 'ebcdic_cp_wt' : 'cp037', 'ibm037' : 'cp037', 'ibm039' : 'cp037', # cp1026 codec '1026' : 'cp1026', 'csibm1026' : 'cp1026', 'ibm1026' : 'cp1026', # cp1140 codec '1140' : 'cp1140', 'ibm1140' : 'cp1140', # cp1250 codec '1250' : 'cp1250', 'windows_1250' : 'cp1250', # cp1251 codec '1251' : 'cp1251', 'windows_1251' : 'cp1251', # cp1252 codec '1252' : 'cp1252', 'windows_1252' : 'cp1252', # cp1253 codec '1253' : 'cp1253', 'windows_1253' : 'cp1253', # cp1254 codec '1254' : 'cp1254', 'windows_1254' : 'cp1254', # cp1255 codec '1255' : 'cp1255', 'windows_1255' : 'cp1255', # cp1256 codec '1256' : 'cp1256', 'windows_1256' : 'cp1256', # cp1257 codec '1257' : 'cp1257', 'windows_1257' : 'cp1257', # cp1258 codec '1258' : 'cp1258', 'windows_1258' : 'cp1258', # cp424 codec '424' : 'cp424', 'csibm424' : 'cp424', 'ebcdic_cp_he' : 'cp424', 'ibm424' : 'cp424', # cp437 codec '437' : 'cp437', 'cspc8codepage437' : 'cp437', 'ibm437' : 'cp437', # cp500 codec '500' : 'cp500', 'csibm500' : 'cp500', 'ebcdic_cp_be' : 'cp500', 'ebcdic_cp_ch' : 'cp500', 'ibm500' : 'cp500', # cp775 codec '775' : 'cp775', 'cspc775baltic' : 'cp775', 'ibm775' : 'cp775', # cp850 codec '850' : 'cp850', 'cspc850multilingual' : 'cp850', 'ibm850' : 'cp850', # cp852 codec '852' : 'cp852', 'cspcp852' : 'cp852', 'ibm852' : 'cp852', # cp855 codec '855' : 'cp855', 'csibm855' : 'cp855', 'ibm855' : 'cp855', # cp857 codec '857' : 'cp857', 'csibm857' : 'cp857', 'ibm857' : 'cp857', # cp860 codec '860' : 'cp860', 'csibm860' : 'cp860', 'ibm860' : 'cp860', # cp861 codec '861' : 'cp861', 'cp_is' : 'cp861', 'csibm861' : 'cp861', 'ibm861' : 'cp861', # cp862 codec '862' : 'cp862', 'cspc862latinhebrew' : 'cp862', 'ibm862' : 'cp862', # cp863 codec '863' : 'cp863', 'csibm863' : 'cp863', 'ibm863' : 'cp863', # cp864 codec '864' : 'cp864', 'csibm864' : 'cp864', 'ibm864' : 'cp864', # cp865 codec '865' : 'cp865', 'csibm865' : 'cp865', 'ibm865' : 'cp865', # cp866 codec '866' : 'cp866', 'csibm866' : 'cp866', 'ibm866' : 'cp866', # cp869 codec '869' : 'cp869', 'cp_gr' : 'cp869', 'csibm869' : 'cp869', 'ibm869' : 'cp869', # cp932 codec '932' : 'cp932', 'ms932' : 'cp932', 'mskanji' : 'cp932', 'ms_kanji' : 'cp932', # cp949 codec '949' : 'cp949', 'ms949' : 'cp949', 'uhc' : 'cp949', # cp950 codec '950' : 'cp950', 'ms950' : 'cp950', # euc_jis_2004 codec 'jisx0213' : 'euc_jis_2004', 'eucjis2004' : 'euc_jis_2004', 'euc_jis2004' : 'euc_jis_2004', # euc_jisx0213 codec 'eucjisx0213' : 'euc_jisx0213', # euc_jp codec 'eucjp' : 'euc_jp', 'ujis' : 'euc_jp', 'u_jis' : 'euc_jp', # euc_kr codec 'euckr' : 'euc_kr', 'korean' : 'euc_kr', 'ksc5601' : 'euc_kr', 'ks_c_5601' : 'euc_kr', 'ks_c_5601_1987' : 'euc_kr', 'ksx1001' : 'euc_kr', 'ks_x_1001' : 'euc_kr', # gb18030 codec 'gb18030_2000' : 'gb18030', # gb2312 codec 'chinese' : 'gb2312', 'csiso58gb231280' : 'gb2312', 'euc_cn' : 'gb2312', 'euccn' : 'gb2312', 'eucgb2312_cn' : 'gb2312', 'gb2312_1980' : 'gb2312', 'gb2312_80' : 'gb2312', 'iso_ir_58' : 'gb2312', # gbk codec '936' : 'gbk', 'cp936' : 'gbk', 'ms936' : 'gbk', # hex_codec codec 'hex' : 'hex_codec', # hp_roman8 codec 'roman8' : 'hp_roman8', 'r8' : 'hp_roman8', 'csHPRoman8' : 'hp_roman8', # hz codec 'hzgb' : 'hz', 'hz_gb' : 'hz', 'hz_gb_2312' : 'hz', # iso2022_jp codec 'csiso2022jp' : 'iso2022_jp', 'iso2022jp' : 'iso2022_jp', 'iso_2022_jp' : 'iso2022_jp', # iso2022_jp_1 codec 'iso2022jp_1' : 'iso2022_jp_1', 'iso_2022_jp_1' : 'iso2022_jp_1', # iso2022_jp_2 codec 'iso2022jp_2' : 'iso2022_jp_2', 'iso_2022_jp_2' : 'iso2022_jp_2', # iso2022_jp_2004 codec 'iso_2022_jp_2004' : 'iso2022_jp_2004', 'iso2022jp_2004' : 'iso2022_jp_2004', # iso2022_jp_3 codec 'iso2022jp_3' : 'iso2022_jp_3', 'iso_2022_jp_3' : 'iso2022_jp_3', # iso2022_jp_ext codec 'iso2022jp_ext' : 'iso2022_jp_ext', 'iso_2022_jp_ext' : 'iso2022_jp_ext', # iso2022_kr codec 'csiso2022kr' : 'iso2022_kr', 'iso2022kr' : 'iso2022_kr', 'iso_2022_kr' : 'iso2022_kr', # iso8859_10 codec 'csisolatin6' : 'iso8859_10', 'iso_8859_10' : 'iso8859_10', 'iso_8859_10_1992' : 'iso8859_10', 'iso_ir_157' : 'iso8859_10', 'l6' : 'iso8859_10', 'latin6' : 'iso8859_10', # iso8859_11 codec 'thai' : 'iso8859_11', 'iso_8859_11' : 'iso8859_11', 'iso_8859_11_2001' : 'iso8859_11', # iso8859_13 codec 'iso_8859_13' : 'iso8859_13', 'l7' : 'iso8859_13', 'latin7' : 'iso8859_13', # iso8859_14 codec 'iso_8859_14' : 'iso8859_14', 'iso_8859_14_1998' : 'iso8859_14', 'iso_celtic' : 'iso8859_14', 'iso_ir_199' : 'iso8859_14', 'l8' : 'iso8859_14', 'latin8' : 'iso8859_14', # iso8859_15 codec 'iso_8859_15' : 'iso8859_15', 'l9' : 'iso8859_15', 'latin9' : 'iso8859_15', # iso8859_16 codec 'iso_8859_16' : 'iso8859_16', 'iso_8859_16_2001' : 'iso8859_16', 'iso_ir_226' : 'iso8859_16', 'l10' : 'iso8859_16', 'latin10' : 'iso8859_16', # iso8859_2 codec 'csisolatin2' : 'iso8859_2', 'iso_8859_2' : 'iso8859_2', 'iso_8859_2_1987' : 'iso8859_2', 'iso_ir_101' : 'iso8859_2', 'l2' : 'iso8859_2', 'latin2' : 'iso8859_2', # iso8859_3 codec 'csisolatin3' : 'iso8859_3', 'iso_8859_3' : 'iso8859_3', 'iso_8859_3_1988' : 'iso8859_3', 'iso_ir_109' : 'iso8859_3', 'l3' : 'iso8859_3', 'latin3' : 'iso8859_3', # iso8859_4 codec 'csisolatin4' : 'iso8859_4', 'iso_8859_4' : 'iso8859_4', 'iso_8859_4_1988' : 'iso8859_4', 'iso_ir_110' : 'iso8859_4', 'l4' : 'iso8859_4', 'latin4' : 'iso8859_4', # iso8859_5 codec 'csisolatincyrillic' : 'iso8859_5', 'cyrillic' : 'iso8859_5', 'iso_8859_5' : 'iso8859_5', 'iso_8859_5_1988' : 'iso8859_5', 'iso_ir_144' : 'iso8859_5', # iso8859_6 codec 'arabic' : 'iso8859_6', 'asmo_708' : 'iso8859_6', 'csisolatinarabic' : 'iso8859_6', 'ecma_114' : 'iso8859_6', 'iso_8859_6' : 'iso8859_6', 'iso_8859_6_1987' : 'iso8859_6', 'iso_ir_127' : 'iso8859_6', # iso8859_7 codec 'csisolatingreek' : 'iso8859_7', 'ecma_118' : 'iso8859_7', 'elot_928' : 'iso8859_7', 'greek' : 'iso8859_7', 'greek8' : 'iso8859_7', 'iso_8859_7' : 'iso8859_7', 'iso_8859_7_1987' : 'iso8859_7', 'iso_ir_126' : 'iso8859_7', # iso8859_8 codec 'csisolatinhebrew' : 'iso8859_8', 'hebrew' : 'iso8859_8', 'iso_8859_8' : 'iso8859_8', 'iso_8859_8_1988' : 'iso8859_8', 'iso_ir_138' : 'iso8859_8', # iso8859_9 codec 'csisolatin5' : 'iso8859_9', 'iso_8859_9' : 'iso8859_9', 'iso_8859_9_1989' : 'iso8859_9', 'iso_ir_148' : 'iso8859_9', 'l5' : 'iso8859_9', 'latin5' : 'iso8859_9', # johab codec 'cp1361' : 'johab', 'ms1361' : 'johab', # koi8_r codec 'cskoi8r' : 'koi8_r', # latin_1 codec # # Note that the latin_1 codec is implemented internally in C and a # lot faster than the charmap codec iso8859_1 which uses the same # encoding. This is why we discourage the use of the iso8859_1 # codec and alias it to latin_1 instead. # '8859' : 'latin_1', 'cp819' : 'latin_1', 'csisolatin1' : 'latin_1', 'ibm819' : 'latin_1', 'iso8859' : 'latin_1', 'iso8859_1' : 'latin_1', 'iso_8859_1' : 'latin_1', 'iso_8859_1_1987' : 'latin_1', 'iso_ir_100' : 'latin_1', 'l1' : 'latin_1', 'latin' : 'latin_1', 'latin1' : 'latin_1', # mac_cyrillic codec 'maccyrillic' : 'mac_cyrillic', # mac_greek codec 'macgreek' : 'mac_greek', # mac_iceland codec 'maciceland' : 'mac_iceland', # mac_latin2 codec 'maccentraleurope' : 'mac_latin2', 'maclatin2' : 'mac_latin2', # mac_roman codec 'macroman' : 'mac_roman', # mac_turkish codec 'macturkish' : 'mac_turkish', # mbcs codec 'dbcs' : 'mbcs', # ptcp154 codec 'csptcp154' : 'ptcp154', 'pt154' : 'ptcp154', 'cp154' : 'ptcp154', 'cyrillic-asian' : 'ptcp154', # quopri_codec codec 'quopri' : 'quopri_codec', 'quoted_printable' : 'quopri_codec', 'quotedprintable' : 'quopri_codec', # rot_13 codec 'rot13' : 'rot_13', # shift_jis codec 'csshiftjis' : 'shift_jis', 'shiftjis' : 'shift_jis', 'sjis' : 'shift_jis', 's_jis' : 'shift_jis', # shift_jis_2004 codec 'shiftjis2004' : 'shift_jis_2004', 'sjis_2004' : 'shift_jis_2004', 's_jis_2004' : 'shift_jis_2004', # shift_jisx0213 codec 'shiftjisx0213' : 'shift_jisx0213', 'sjisx0213' : 'shift_jisx0213', 's_jisx0213' : 'shift_jisx0213', # tactis codec 'tis260' : 'tactis', # tis_620 codec 'tis620' : 'tis_620', 'tis_620_0' : 'tis_620', 'tis_620_2529_0' : 'tis_620', 'tis_620_2529_1' : 'tis_620', 'iso_ir_166' : 'tis_620', # utf_16 codec 'u16' : 'utf_16', 'utf16' : 'utf_16', # utf_16_be codec 'unicodebigunmarked' : 'utf_16_be', 'utf_16be' : 'utf_16_be', # utf_16_le codec 'unicodelittleunmarked' : 'utf_16_le', 'utf_16le' : 'utf_16_le', # utf_32 codec 'u32' : 'utf_32', 'utf32' : 'utf_32', # utf_32_be codec 'utf_32be' : 'utf_32_be', # utf_32_le codec 'utf_32le' : 'utf_32_le', # utf_7 codec 'u7' : 'utf_7', 'utf7' : 'utf_7', 'unicode_1_1_utf_7' : 'utf_7', # utf_8 codec 'u8' : 'utf_8', 'utf' : 'utf_8', 'utf8' : 'utf_8', 'utf8_ucs2' : 'utf_8', 'utf8_ucs4' : 'utf_8', # uu_codec codec 'uu' : 'uu_codec', # zlib_codec codec 'zip' : 'zlib_codec', 'zlib' : 'zlib_codec', }
apache-2.0
datagovuk/reserve
running_stats.py
1
3330
'''Tool for a script to keep track changes performed on a large number of objects. Example: from running_stats import Stats stats = Stats() for package in packages: if package.enabled: package.delete() stats.add('deleted', package.name) else: stats.add('not deleted' package.name) print stats > deleted: 30 pollution-uk, flood-regions, river-quality, ... > not deleted: 70 spending-bristol, ... ''' import copy import datetime class StatsCount(dict): # {category:count} _init_value = 0 report_value_limit = 150 def __init__(self, *args, **kwargs): self._start_time = datetime.datetime.now() super(StatsCount, self).__init__(*args, **kwargs) def _init_category(self, category): if not self.has_key(category): self[category] = copy.deepcopy(self._init_value) def increment(self, category): self._init_category(category) self[category] += 1 def report_value(self, category): '''Returns the value for a category and value to sort categories by.''' value = repr(self[category]) if len(value) > self.report_value_limit: value = value[:self.report_value_limit] + '...' return (value, self[category]) def report(self, indent=1, order_by_title=False, show_time_taken=True): lines = [] indent_str = '\t' * indent report_dict = dict() for category in self.keys(): report_dict[category] = self.report_value(category) if order_by_title: items = sorted(report_dict.iteritems()) else: items = sorted(report_dict.iteritems(), key=lambda x: -x[1][1]) for category, value_tuple in items: value = value_tuple[0] lines.append(indent_str + '%s: %s' % (category, value)) if not self: lines = [indent_str + 'None'] if show_time_taken: time_taken = datetime.datetime.now() - self._start_time lines.append(indent_str + 'Time taken (h:m:s): %s' % time_taken) return '\n'.join(lines) def show_time_taken(self): time_taken = datetime.datetime.now() - self._start_time print 'Time taken (h:m:s): %s' % time_taken def __repr__(self): return self.report() class Stats(StatsCount): # {category:[values]} _init_value = [] def add(self, category, value): self._init_category(category) self[category].append(value) return ('%s: %s' % (category, value)).encode('ascii', 'ignore') # so you can log it too def report_value(self, category): value = self[category] number_of_values = len(value) value_str = '%i %r' % (number_of_values, value) if len(value_str) > self.report_value_limit: value_str = value_str[:self.report_value_limit] + '...' return (value_str, number_of_values) # deprecated name - kept for backward compatibility class StatsList(Stats): pass if __name__ == '__main__': package_stats = Stats() package_stats.add('Success', 'good1') package_stats.add('Success', 'good2') package_stats.add('Success', 'good3') package_stats.add('Success', 'good4') package_stats.add('Failure', 'bad1') print package_stats
mit
fast90/youtube-dl
youtube_dl/extractor/awaan.py
3
8019
# coding: utf-8 from __future__ import unicode_literals import re import base64 from .common import InfoExtractor from ..compat import ( compat_urllib_parse_urlencode, compat_str, ) from ..utils import ( int_or_none, parse_iso8601, smuggle_url, unsmuggle_url, urlencode_postdata, ) class AWAANIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?(?:awaan|dcndigital)\.ae/(?:#/)?show/(?P<show_id>\d+)/[^/]+(?:/(?P<video_id>\d+)/(?P<season_id>\d+))?' def _real_extract(self, url): show_id, video_id, season_id = re.match(self._VALID_URL, url).groups() if video_id and int(video_id) > 0: return self.url_result( 'http://awaan.ae/media/%s' % video_id, 'AWAANVideo') elif season_id and int(season_id) > 0: return self.url_result(smuggle_url( 'http://awaan.ae/program/season/%s' % season_id, {'show_id': show_id}), 'AWAANSeason') else: return self.url_result( 'http://awaan.ae/program/%s' % show_id, 'AWAANSeason') class AWAANBaseIE(InfoExtractor): def _parse_video_data(self, video_data, video_id, is_live): title = video_data.get('title_en') or video_data['title_ar'] img = video_data.get('img') return { 'id': video_id, 'title': self._live_title(title) if is_live else title, 'description': video_data.get('description_en') or video_data.get('description_ar'), 'thumbnail': 'http://admin.mangomolo.com/analytics/%s' % img if img else None, 'duration': int_or_none(video_data.get('duration')), 'timestamp': parse_iso8601(video_data.get('create_time'), ' '), 'is_live': is_live, } def _extract_video_formats(self, webpage, video_id, m3u8_entry_protocol): formats = [] format_url_base = 'http' + self._html_search_regex( [ r'file\s*:\s*"https?(://[^"]+)/playlist.m3u8', r'<a[^>]+href="rtsp(://[^"]+)"' ], webpage, 'format url') formats.extend(self._extract_mpd_formats( format_url_base + '/manifest.mpd', video_id, mpd_id='dash', fatal=False)) formats.extend(self._extract_m3u8_formats( format_url_base + '/playlist.m3u8', video_id, 'mp4', m3u8_entry_protocol, m3u8_id='hls', fatal=False)) formats.extend(self._extract_f4m_formats( format_url_base + '/manifest.f4m', video_id, f4m_id='hds', fatal=False)) self._sort_formats(formats) return formats class AWAANVideoIE(AWAANBaseIE): IE_NAME = 'awaan:video' _VALID_URL = r'https?://(?:www\.)?(?:awaan|dcndigital)\.ae/(?:#/)?(?:video(?:/[^/]+)?|media|catchup/[^/]+/[^/]+)/(?P<id>\d+)' _TESTS = [{ 'url': 'http://www.dcndigital.ae/#/video/%D8%B1%D8%AD%D9%84%D8%A9-%D8%A7%D9%84%D8%B9%D9%85%D8%B1-%D8%A7%D9%84%D8%AD%D9%84%D9%82%D8%A9-1/17375', 'md5': '5f61c33bfc7794315c671a62d43116aa', 'info_dict': { 'id': '17375', 'ext': 'mp4', 'title': 'رحلة العمر : الحلقة 1', 'description': 'md5:0156e935d870acb8ef0a66d24070c6d6', 'duration': 2041, 'timestamp': 1227504126, 'upload_date': '20081124', }, }, { 'url': 'http://awaan.ae/video/26723981/%D8%AF%D8%A7%D8%B1-%D8%A7%D9%84%D8%B3%D9%84%D8%A7%D9%85:-%D8%AE%D9%8A%D8%B1-%D8%AF%D9%88%D8%B1-%D8%A7%D9%84%D8%A3%D9%86%D8%B5%D8%A7%D8%B1', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) video_data = self._download_json( 'http://admin.mangomolo.com/analytics/index.php/plus/video?id=%s' % video_id, video_id, headers={'Origin': 'http://awaan.ae'}) info = self._parse_video_data(video_data, video_id, False) webpage = self._download_webpage( 'http://admin.mangomolo.com/analytics/index.php/customers/embed/video?' + compat_urllib_parse_urlencode({ 'id': video_data['id'], 'user_id': video_data['user_id'], 'signature': video_data['signature'], 'countries': 'Q0M=', 'filter': 'DENY', }), video_id) info['formats'] = self._extract_video_formats(webpage, video_id, 'm3u8_native') return info class AWAANLiveIE(AWAANBaseIE): IE_NAME = 'awaan:live' _VALID_URL = r'https?://(?:www\.)?(?:awaan|dcndigital)\.ae/(?:#/)?live/(?P<id>\d+)' _TEST = { 'url': 'http://awaan.ae/live/6/dubai-tv', 'info_dict': { 'id': '6', 'ext': 'mp4', 'title': 're:Dubai Al Oula [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$', 'upload_date': '20150107', 'timestamp': 1420588800, }, 'params': { # m3u8 download 'skip_download': True, }, } def _real_extract(self, url): channel_id = self._match_id(url) channel_data = self._download_json( 'http://admin.mangomolo.com/analytics/index.php/plus/getchanneldetails?channel_id=%s' % channel_id, channel_id, headers={'Origin': 'http://awaan.ae'}) info = self._parse_video_data(channel_data, channel_id, True) webpage = self._download_webpage( 'http://admin.mangomolo.com/analytics/index.php/customers/embed/index?' + compat_urllib_parse_urlencode({ 'id': base64.b64encode(channel_data['user_id'].encode()).decode(), 'channelid': base64.b64encode(channel_data['id'].encode()).decode(), 'signature': channel_data['signature'], 'countries': 'Q0M=', 'filter': 'DENY', }), channel_id) info['formats'] = self._extract_video_formats(webpage, channel_id, 'm3u8') return info class AWAANSeasonIE(InfoExtractor): IE_NAME = 'awaan:season' _VALID_URL = r'https?://(?:www\.)?(?:awaan|dcndigital)\.ae/(?:#/)?program/(?:(?P<show_id>\d+)|season/(?P<season_id>\d+))' _TEST = { 'url': 'http://dcndigital.ae/#/program/205024/%D9%85%D8%AD%D8%A7%D8%B6%D8%B1%D8%A7%D8%AA-%D8%A7%D9%84%D8%B4%D9%8A%D8%AE-%D8%A7%D9%84%D8%B4%D8%B9%D8%B1%D8%A7%D9%88%D9%8A', 'info_dict': { 'id': '7910', 'title': 'محاضرات الشيخ الشعراوي', }, 'playlist_mincount': 27, } def _real_extract(self, url): url, smuggled_data = unsmuggle_url(url, {}) show_id, season_id = re.match(self._VALID_URL, url).groups() data = {} if season_id: data['season'] = season_id show_id = smuggled_data.get('show_id') if show_id is None: season = self._download_json( 'http://admin.mangomolo.com/analytics/index.php/plus/season_info?id=%s' % season_id, season_id, headers={'Origin': 'http://awaan.ae'}) show_id = season['id'] data['show_id'] = show_id show = self._download_json( 'http://admin.mangomolo.com/analytics/index.php/plus/show', show_id, data=urlencode_postdata(data), headers={ 'Origin': 'http://awaan.ae', 'Content-Type': 'application/x-www-form-urlencoded' }) if not season_id: season_id = show['default_season'] for season in show['seasons']: if season['id'] == season_id: title = season.get('title_en') or season['title_ar'] entries = [] for video in show['videos']: video_id = compat_str(video['id']) entries.append(self.url_result( 'http://awaan.ae/media/%s' % video_id, 'AWAANVideo', video_id)) return self.playlist_result(entries, season_id, title)
unlicense