text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
"""
This program tests the correct addition and removal of components to the InstalledComponentsDB,
as well as the components
CLI functions are used to ensure the test is as similar as possible to a real user-to-cli interaction
This test assumes that there is a DIRAC master server running on the local machine
This test assumes that the Notification service is not installed
This test assumes that the FTS3DB database is not installed and doesn't exist in MySQL
"""
# pylint: disable=invalid-name,wrong-import-position
import sys
import unittest
from DIRAC.Core.Base.Script import parseCommandLine
parseCommandLine()
from DIRAC.FrameworkSystem.Client.ComponentInstaller import gComponentInstaller
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.FrameworkSystem.Client.ComponentMonitoringClient import ComponentMonitoringClient
from DIRAC.FrameworkSystem.Client.SystemAdministratorClientCLI import SystemAdministratorClientCLI
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getUsernameForDN
class TestComponentInstallation(unittest.TestCase):
"""
Contains methods for testing of separate elements
"""
def setUp(self):
self.host = 'localhost'
self.notificationPort = 9154
self.rootPwd = ''
self.csClient = CSAPI()
self.monitoringClient = ComponentMonitoringClient()
self.client = SystemAdministratorClientCLI(self.host)
self.csClient.downloadCSData()
result = self.csClient.getCurrentCFG()
if not result['OK']:
raise Exception(result['Message'])
cfg = result['Value']
setup = cfg.getOption('DIRAC/Setup', 'dirac-JenkinsSetup')
self.frameworkSetup = cfg.getOption('DIRAC/Setups/' + setup + '/Framework')
self.rootPwd = cfg.getOption('Systems/Databases/Password')
self.diracPwd = self.rootPwd
result = getProxyInfo()
if not result['OK']:
raise Exception(result['Message'])
chain = result['Value']['chain']
result = chain.getCertInChain(-1)
if not result['OK']:
raise Exception(result['Message'])
result = result['Value'].getSubjectDN()
if not result['OK']:
raise Exception(result['Message'])
userDN = result['Value']
result = getUsernameForDN(userDN)
if not result['OK']:
raise Exception(result['Message'])
self.user = result['Value']
if not self.user:
self.user = 'unknown'
def tearDown(self):
pass
class ComponentInstallationChain(TestComponentInstallation):
def testComponent(self):
service1Present = False
service2Present = False
# Check whether the service is already present or not
cfg = self.csClient.getCurrentCFG()['Value']
if cfg.isSection('Systems/Framework/' +
self.frameworkSetup +
'/Services/Notification/') and cfg.isOption('Systems/Framework/' +
self.frameworkSetup +
'/URLs/Notification'):
service1Present = True
if not service1Present:
# Install component
self.client.do_install('service Framework Notification')
self.csClient.downloadCSData()
# Check installation in CS
cfg = self.csClient.getCurrentCFG()['Value']
self.assertTrue(cfg.isSection('Systems/Framework/' + self.frameworkSetup + '/Services/Notification/') and
cfg.isOption('Systems/Framework/' + self.frameworkSetup + '/URLs/Notification'))
self.assertTrue(cfg.getOption('Systems/Framework/' + self.frameworkSetup + '/URLs/Notification') ==
'dips://' + self.host + ':' + str(self.notificationPort) + '/Framework/Notification')
# Check installation in database
if not service1Present:
result = self.monitoringClient.getInstallations({'Instance': 'Notification',
'UnInstallationTime': None, 'InstalledBy': self.user},
{'System': 'Framework', 'Type': 'service',
'Module': 'Notification'},
{}, False)
else:
# We dont know who made the previous installation
result = self.monitoringClient.getInstallations({'Instance': 'Notification', 'UnInstallationTime': None},
{'System': 'Framework',
'Type': 'service', 'Module': 'Notification'},
{}, False)
self.assertTrue(result['OK'] and len(result['Value']) == 1)
# Check whether the second service is already present or not
cfg = self.csClient.getCurrentCFG()['Value']
if cfg.isSection('Systems/Framework/' +
self.frameworkSetup +
'/Services/Notification2/') and cfg.isOption('Systems/Framework/' +
self.frameworkSetup +
'/URLs/Notification2'):
service2Present = True
if not service2Present:
# Install second component
self.client.do_install('service Framework Notification2 -m Notification')
# Check installation in CS
self.csClient.downloadCSData()
cfg = self.csClient.getCurrentCFG()['Value']
self.assertTrue(cfg.isSection('Systems/Framework/' + self.frameworkSetup + '/Services/Notification2/') and
cfg.isOption('Systems/Framework/' + self.frameworkSetup + '/URLs/Notification2'))
if not service1Present:
# Uninstall component
self.client.do_uninstall('-f Framework Notification')
# Check CS is intact ( there should still be at least one instance of Notification )
self.csClient.downloadCSData()
cfg = self.csClient.getCurrentCFG()['Value']
self.assertTrue(cfg.isSection('Systems/Framework/' +
self.frameworkSetup +
'/Services/Notification/') and cfg.isSection('Systems/Framework/' +
self.frameworkSetup +
'/Services/Notification/') and
cfg.isOption('Systems/Framework/' +
self.frameworkSetup +
'/URLs/Notification'))
if not service2Present:
# Uninstall second component
self.client.do_uninstall('-f Framework Notification2')
if not service1Present and not service2Present:
# Check uninstallation in CS ( only if the services were not already present )
self.csClient.downloadCSData()
cfg = self.csClient.getCurrentCFG()['Value']
self.assertTrue(not cfg.isSection('Systems/Framework/' +
self.frameworkSetup +
'/Services/Notification/') and not cfg.isSection(
'Systems/Framework/' +
self.frameworkSetup +
'/Services/Notification2/') and not
cfg.isOption('Systems/Framework/' +
self.frameworkSetup +
'/URLs/Notification'))
def testDatabase(self):
gComponentInstaller.setMySQLPasswords(self.rootPwd, self.diracPwd)
# Install database
self.client.do_install('db FTS3DB')
# Check installation in CS
self.csClient.downloadCSData()
cfg = self.csClient.getCurrentCFG()['Value']
self.assertTrue(cfg.isSection('Systems/DataManagement/' + self.frameworkSetup + '/Databases/FTS3DB/'))
# Check in database
result = self.monitoringClient.getInstallations({'Instance': 'FTS3DB',
'UnInstallationTime': None, 'InstalledBy': self.user},
{'System': 'DataManagement', 'Type': 'DB', 'Module': 'FTS3DB'},
{}, False)
self.assertTrue(result['OK'] and len(result['Value']) == 1)
# Uninstall database
self.client.do_uninstall('db FTS3DB')
# Check uninstallation in CS
self.csClient.downloadCSData()
cfg = self.csClient.getCurrentCFG()['Value']
self.assertTrue(not cfg.isSection('Systems/DataManagement/' + self.frameworkSetup + '/Databases/FTS3DB/'))
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestComponentInstallation)
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(ComponentInstallationChain))
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not testResult.wasSuccessful())
|
fstagni/DIRAC
|
tests/Integration/Framework/NotRun_Test_ComponentInstallUninstall.py
|
Python
|
gpl-3.0
| 8,990
|
[
"DIRAC"
] |
0c00b9a619b229509df941a9ca4ae699049b1e545278cba0a38cf16b0dbe01f2
|
# -*- coding: utf-8 -*-
"""
pySEBAL_3.4.0
@author: Tim Hessels, Jonna van Opstal, Patricia Trambauer, Wim Bastiaanssen, Mohamed Faouzi Smiej, Yasir Mohamed, and Ahmed Er-Raji
UNESCO-IHE
June 2018
"""
import sys
import os
import shutil
import numpy as np
import osr
import gdal
from math import sin, cos, pi, tan
import subprocess
import numpy.polynomial.polynomial as poly
from openpyxl import load_workbook
from pyproj import Proj, transform
import warnings
def main(number, inputExcel):
import SEBAL.pySEBAL.pySEBAL_input_LANDSAT as input_LS
import SEBAL.pySEBAL.pySEBAL_input_PROBAV_VIIRS as input_PROBAV_VIIRS
import SEBAL.pySEBAL.pySEBAL_input_MODIS as input_MODIS
# Do not show warnings
warnings.filterwarnings('ignore')
# Open Excel workbook
wb = load_workbook(inputExcel)
# Open the General_Input sheet
ws = wb['General_Input']
# Extract the input and output folder, and Image type from the excel file
input_folder = r"%s" %str(ws['B%d' %number].value)
output_folder = r"%s" %str(ws['C%d' %number].value)
Image_Type = int(ws['D%d' %number].value) # Type of Image (1=Landsat & 2 = VIIRS & PROBA-V)
# Create or empty output folder
if os.path.isdir(output_folder):
shutil.rmtree(output_folder)
os.makedirs(output_folder)
# Start log file
filename_logfile = os.path.join(output_folder, 'log.txt')
sys.stdout = open(filename_logfile, 'w')
# Print data used from sheet General_Input
print('.................................................................. ')
print('......................SEBAL Model running ........................ ')
print('.................................................................. ')
print('pySEBAL version 3.4.0 Github')
print('General Input:')
print('input_folder = %s' %str(input_folder))
print('output_folder = %s' %str(output_folder))
print('Image_Type = %s' %int(Image_Type))
print('.................................................................. ')
print('...........................Parameters ............................ ')
print('.................................................................. ')
# ------------------------------------------------------------------------
# General constants that could be changed by the user:
print(' ')
print('...................... General Constants ......................... ')
print(' ')
# Data for Module 1 - Open DEM and reproject
print('General Constants: Open DEM and reproject (Part 1)')
print(' ')
# Data for Module 2 - Radiation
print('General Constants: Radiation (Part 2)')
print(' ')
# Data for Module 3 - Read Soil and Meteo Input
print('General Constants: Read Soil and Meteo input (Part 3)')
print(' ')
# Data for Module 4 - Calc meteo
Temp_lapse_rate = 0.0065 #0.01199 # Temperature lapse rate (°K/m)
Gsc = 1367 # Solar constant (W / m2)
SB_const = 5.6703E-8 # Stefan-Bolzmann constant (watt/m2/°K4)
print('General Constants: Calc Meteo (Part 4)')
print('Lapse Rate Temperature = %s Kelvin/m' %Temp_lapse_rate)
print('Solar Constant = %s W/m2' %Gsc)
print('Stefan Bolzmann Constant = %s watt/m2/°K4' %SB_const)
print(' ')
# Data for Module 5 - Open VIS
Apparent_atmosf_transm = 0.89 # This value is used for atmospheric correction of broad band albedo. This value is used for now, would be better to use tsw.
path_radiance = 0.03 # Recommended, Range: [0.025 - 0.04], based on Bastiaanssen (2000).
print('General Constants: Open VIS (Part 5)')
print('Atmospheric correction of broad band albedo = %s' %Apparent_atmosf_transm)
print('Path Radiance = %s' %path_radiance)
print(' ')
# Data for Module 6 - Open Thermal
Thermal_Sharpening_not_needed = 0# (1 == off 0 == on)
Rp = 0.91 # Path radiance in the 10.4-12.5 µm band (W/m2/sr/µm)
tau_sky = 0.866 # Narrow band transmissivity of air, range: [10.4-12.5 µm]
surf_temp_offset = 3 # Surface temperature offset for water
Temperature_offset_shadow = -1 # Temperature offset for detecting shadow
Maximum_shadow_albedo = 0.1 # Minimum albedo value for shadow
Temperature_offset_clouds = -3 # Temperature offset for detecting clouds
Minimum_cloud_albedo = 0.4 # Minimum albedo value for clouds
print('General Constants: Open Thermal (Part 6)')
print('Thermal Sharpening 0:on/1:off = %s' %Thermal_Sharpening_not_needed)
print('Path Radiance in the 10.4-12.5 band = %s (W/m2/sr/µm)' %Rp)
print('Narrow band transmissivity of air = %s' %tau_sky)
print('Surface temperature offset for water = %s (Kelvin)' %surf_temp_offset)
print('Temperature offset for detecting shadow = %s (Kelvin)' %Temperature_offset_shadow)
print('Maximum albedo value for shadow = %s' %Maximum_shadow_albedo)
print('Temperature offset for detecting clouds = %s (Kelvin)' %Temperature_offset_clouds)
print('Minimum albedo value for clouds = %s' %Minimum_cloud_albedo)
print(' ')
# Data for Module 7 - Apply Thermal Sharpening
print('Apply Thermal Sharpening (Part 7)')
print(' ')
# Data for Module 8 - Create Masks and Quality Layers
print('Create Masks and Quality Layers (Part 8)')
print(' ')
# Data for Module 9 - Calc meteo and radiation
print('General Constants: Calc meteo and radiation (Part 9)')
print(' ')
# Data for Module 10 - Calc Hot/Cold Pixel
NDVIhot_low = 0.03 # Lower NDVI treshold for hot pixels
NDVIhot_high = 0.25 # Higher NDVI treshold for hot pixels
print('General Constants: Calc Hot/Cold Pixel (Part 10)')
print('Lower NDVI treshold for hot pixels = %s' %NDVIhot_low)
print('Higher NDVI treshold for hot pixels = %s' %NDVIhot_high)
print(' ')
# Data for Module 11 - Sensible Heat Flux
surf_roughness_equation_used = 2 # NDVI model = 1, Raupach model = 2
print('General Constants: Sensible Heat Flux (Part 11)')
print('NDVI model(1), Raupach model(2) = %s' %surf_roughness_equation_used)
print(' ')
# Data for Module 12 - Evapotranspiration
print('General Constants: Evapotranspiration (Part 12)')
print(' ')
# Data for Module 13 - Soil Moisture
print('General Constants: Soil Moisture (Part 13)')
print(' ')
# Data for Module 14 - Biomass
Th = 35.0 # Upper limit of stomatal activity
Kt = 23.0 # Optimum conductance temperature (°C), range: [17 - 19]
Tl = 0.0 # Lower limit of stomatal activity
rl = 130 # Bulk stomatal resistance of the well-illuminated leaf (s/m)
Light_use_extinction_factor = 0.5 # Light use extinction factor for Bear's Law
print('General Constants: Biomass (Part 14)')
print('Upper limit of stomatal activity = %s' %Th)
print('Optimum conductance temperature = %s (Celcius Degrees)' %Kt)
print('Lower limit of stomatal activity= %s' %Tl)
print('Bulk stomatal resistance of the well-illuminated leaf = %s (s/m)' %rl)
print('Light use extinction factor for Bears Law = %s' %(Light_use_extinction_factor))
print(' ')
print('.................... Input Satellite ........................ ')
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
# --- Extract general info from Landsat or VIIRS metadata: DOY, hour, minutes
if Image_Type is 1:
year, DOY, hour, minutes, UTM_Zone, Sun_elevation, Landsat_nr = input_LS.Get_Time_Info(wb, number)
# define the kind of sensor and resolution of the sensor
pixel_spacing = int(30)
sensor1 = 'LS%d' %Landsat_nr
sensor2 = 'LS%s' %Landsat_nr
res1 = '30m'
res2 = '30m'
res3 = '30m'
# Print data used from sheet General_Input
print('LANDSAT model Input:')
print('Landsat number = %s' %str(Landsat_nr))
print('UTM Zone = %s' %(UTM_Zone))
print('Pixel size model = %s (Meters)' %(pixel_spacing))
# Open the Landsat_Input sheet
ws = wb['Landsat_Input']
if Image_Type is 2:
year, DOY, hour, minutes, UTM_Zone = input_PROBAV_VIIRS.Get_Time_Info(wb, number)
# define the kind of sensor and resolution of the sensor
pixel_spacing = int(100)
sensor1 = 'PROBAV'
sensor2 = 'VIIRS'
res1 = '375m'
res2 = '100m'
res3 = '30m'
# Print data used from sheet General_Input
print('PROBA-V VIIRS model Input:')
print('UTM Zone = %s' %(UTM_Zone))
print('Pixel size model = %s (Meters)' %(pixel_spacing))
# Open the VIIRS_PROBAV_Input sheet
ws = wb['VIIRS_PROBAV_Input']
if Image_Type is 3:
year, DOY, UTM_Zone = input_MODIS.Get_Time_Info(wb, number)
# define the kind of sensor and resolution of the sensor
pixel_spacing = int(250)
sensor1 = 'MODIS'
sensor2 = 'MODIS'
res1 = '1000m'
res2 = '250m'
res3 = '500m'
# Print data used from sheet General_Input
print('MODIS model Input:')
print('UTM Zone = %s' %(UTM_Zone))
print('Pixel size model = %s (Meters)' %(pixel_spacing))
# Open the MODIS_Input sheet
ws = wb['MODIS_Input']
# Calibartion constants Hot Pixels extracted from the excel file
Hot_Pixel_Constant = float(ws['E%d' %number].value) # Hot Pixel Value = Mean_Hot_Pixel + Hot_Pixel_Constant * Std_Hot_Pixel (only for VIIRS images)
# Calibartion constants Cold Pixels from the excel file
Cold_Pixel_Constant = float(ws['F%d' %number].value) # Cold Pixel Value = Mean_Cold_Pixel + Cold_Pixel_Constant * Std_Cold_Pixel (only for VIIRS images)
# ------------------------------------------------------------------------
# Define the output maps names
# output radiation balance
proyDEM_fileName = os.path.join(output_folder, 'Output_radiation_balance', 'proy_DEM_%s.tif' %res2)
slope_fileName = os.path.join(output_folder, 'Output_radiation_balance', 'slope_%s.tif' %res2)
aspect_fileName = os.path.join(output_folder, 'Output_radiation_balance', 'aspect_%s.tif' %res2)
radiation_inst_fileName = os.path.join(output_folder, 'Output_radiation_balance', 'Ra_inst_%s_%s_%s.tif' %(res2, year, DOY))
phi_fileName = os.path.join(output_folder, 'Output_radiation_balance', 'phi_%s_%s_%s.tif' %(res2, year, DOY))
radiation_fileName = os.path.join(output_folder, 'Output_radiation_balance', 'Ra24_mountain_%s_%s_%s.tif' %(res2, year, DOY))
cos_zn_fileName = os.path.join(output_folder, 'Output_radiation_balance', 'cos_zn_%s_%s_%s.tif' %(res2, year, DOY))
lon_fileName_rep = os.path.join(output_folder, 'Output_radiation_balance', 'longitude_proj_%s_%s_%s.tif' %(res1, year, DOY))
lat_fileName_rep = os.path.join(output_folder, 'Output_radiation_balance', 'latitude_proj_%s_%s_%s.tif' %(res1, year, DOY))
# output meteo
Atmos_pressure_fileName = os.path.join(output_folder, 'Output_meteo', 'atmos_pressure_%s_%s_%s.tif' %(res2, year, DOY))
Psychro_c_fileName = os.path.join(output_folder, 'Output_meteo', 'psychro_%s_%s_%s.tif' %(res2, year, DOY))
# output soil moisture
water_mask_temp_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_Water_mask_temporary_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
snow_mask_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_snow_mask_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
water_mask_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_water_mask_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
total_soil_moisture_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_%s_Total_soil_moisture_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
top_soil_moisture_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_%s_Top_soil_moisture_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
RZ_SM_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_%s_Root_zone_moisture_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
SM_stress_trigger_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_%s_Moisture_stress_trigger_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
irrigation_needs_fileName = os.path.join(output_folder, 'Output_soil_moisture', '%s_%s_irrigation_needs_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
# output vegetation
veg_cover_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_vegt_cover_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
lai_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_lai_average_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
nitrogen_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_nitrogen_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
tir_emissivity_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_tir_emissivity_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
fpar_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_fpar_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
b10_emissivity_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_b10_emissivity_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
surf_temp_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_%s_surface_temp_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
temp_surface_sharpened_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_%s_surface_temp_sharpened_%s_%s_%s.tif' %(sensor1, sensor2, res1, year, DOY))
surf_rough_fileName = os.path.join(output_folder, 'Output_vegetation', '%s_%s_surface_roughness_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
surface_albedo_fileName = os.path.join(output_folder, 'Output_vegetation','%s_surface_albedo_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
ndvi_fileName = os.path.join(output_folder, 'Output_vegetation','%s_ndvi_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
# output cloud mask
cloud_mask_fileName = os.path.join(output_folder, 'Output_cloud_masked', '%s_cloud_mask_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
shadow_mask_fileName = os.path.join(output_folder, 'Output_cloud_masked', '%s_shadow_mask_%s_%s_%s.tif' %(sensor1, res2, year, DOY))
QC_Map_fileName = os.path.join(output_folder, 'Output_cloud_masked', '%s_quality_mask_%s_%s_%s.tif.tif' %(sensor1, res2, year, DOY))
# output energy balance
Rn_24_fileName = os.path.join(output_folder, 'Output_energy_balance', '%s_%s_Rn_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
rn_inst_fileName = os.path.join(output_folder, 'Output_energy_balance', '%s_%s_Rn_inst_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
g_inst_fileName = os.path.join(output_folder, 'Output_energy_balance', '%s_%s_G_inst_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
h_inst_fileName = os.path.join(output_folder, 'Output_energy_balance', '%s_%s_h_inst_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
EF_inst_fileName = os.path.join(output_folder, 'Output_energy_balance', '%s_%s_EFinst_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
LE_inst_fileName = os.path.join(output_folder, 'Output_energy_balance', '%s_%s_LEinst_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
# output temporary
temp_corr_fileName = os.path.join(output_folder, 'Output_temporary', '%s_%s_temp_corr_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
ts_dem_fileName = os.path.join(output_folder, 'Output_temporary', '%s_%s_ts_dem_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
hot_pixels_fileName = os.path.join(output_folder, 'Output_temporary', '%s_%s_hot_pixels_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
cold_pixels_fileName = os.path.join(output_folder, 'Output_temporary', '%s_%s_cold_pixels_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
QC_Map_after_VIS = os.path.join(output_folder, 'Output_temporary', '%s_QC_MAP_After_VIS_%s_%s_%s.tif' %(sensor1, res1, year, DOY))
proyDEM_fileName_up = os.path.join(output_folder, 'Output_temporary', 'proy_DEM_up.tif')
# output evapotranspiration
min_bulk_surf_res_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_%s_min_bulk_surf_resis_24_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
ETref_24_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_ETref_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
ETA_24_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_ETact_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
ETP_24_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_ETpot_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
ET_24_deficit_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_ET_24_deficit_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
AF_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_Advection_Factor_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
kc_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_kc_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
kc_max_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_kc_max_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
bulk_surf_res_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_bulk_surf_resis_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
Tact24_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_Tact_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
Eact24_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_Eact_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
Tpot24_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_Tpot_24_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
T24_deficit_fileName = os.path.join(output_folder, 'Output_evapotranspiration', '%s_%s_T_24_deficit_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
# output biomass production
moisture_stress_biomass_fileName = os.path.join(output_folder, 'Output_biomass_production', '%s_%s_Moisture_stress_biomass_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
LUE_fileName = os.path.join(output_folder, 'Output_biomass_production', '%s_%s_LUE_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
Biomass_prod_fileName = os.path.join(output_folder, 'Output_biomass_production', '%s_%s_Biomass_production_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
Biomass_wp_fileName = os.path.join(output_folder, 'Output_biomass_production', '%s_%s_Biomass_wp_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
Biomass_deficit_fileName = os.path.join(output_folder, 'Output_biomass_production', '%s_%s_Biomass_deficit_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
print('---------------------------------------------------------')
print('------------------ General info -------------------------')
print('---------------------------------------------------------')
print('General info: ')
print(' DOY: ', DOY)
if not Image_Type == 3:
print(' Hour: ', hour)
print(' Minutes: ', '%0.3f' % minutes)
print(' UTM_Zone: ', UTM_Zone)
print('---------------------------------------------------------')
print('---------- Open DEM and reproject (Part 1) --------------')
print('---------------------------------------------------------')
ws = wb['General_Input']
# Extract the Path to the DEM map from the excel file
DEM_fileName = r"%s" %str(ws['E%d' %number].value) #'DEM_HydroShed_m'
print('Path to DEM file = %s' %str(DEM_fileName))
# Open DEM and create Latitude and longitude files
lat, lon, lat_fileName, lon_fileName = DEM_lat_lon(DEM_fileName, output_folder)
# Reproject from Geog Coord Syst to UTM -
# 1) DEM - Original DEM coordinates is Geographic: lat, lon
lsc, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset(
DEM_fileName, pixel_spacing, UTM_Zone = UTM_Zone)
band = lsc.GetRasterBand(1) # Get the reprojected dem band
ncol = lsc.RasterXSize # Get the reprojected dem column size
nrow = lsc.RasterYSize # Get the reprojected dem row size
shape_lsc = [ncol, nrow]
# Read out the DEM band and print the DEM properties
DEM_resh = band.ReadAsArray(0, 0, ncol, nrow)
#DEM_resh[DEM_resh<0] = 1
print('Projected DEM - ')
print(' Size: ', ncol, nrow)
print(' Upper Left corner x, y: ', ulx_dem, ',', uly_dem)
print(' Lower right corner x, y: ', lrx_dem, ',', lry_dem)
# 2) Latitude File - reprojection
# reproject latitude to the landsat projection and save as tiff file
lat_rep, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset(
lat_fileName, pixel_spacing, UTM_Zone=UTM_Zone)
# Get the reprojected latitude data
lat_proy = lat_rep.GetRasterBand(1).ReadAsArray(0, 0, ncol, nrow)
# 3) Longitude file - reprojection
# reproject longitude to the landsat projection and save as tiff file
lon_rep, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset(lon_fileName, pixel_spacing, UTM_Zone)
# Get the reprojected longitude data
lon_proy = lon_rep.GetRasterBand(1).ReadAsArray(0, 0, ncol, nrow)
# Calculate slope and aspect from the reprojected DEM
deg2rad, rad2deg, slope, aspect = Calc_Gradient(DEM_resh, pixel_spacing)
# Saving the reprojected maps
save_GeoTiff_proy(lsc, DEM_resh, proyDEM_fileName, shape_lsc, nband = 1)
save_GeoTiff_proy(lsc, slope, slope_fileName, shape_lsc, nband = 1)
save_GeoTiff_proy(lsc, aspect, aspect_fileName, shape_lsc, nband = 1)
save_GeoTiff_proy(lon_rep, lon_proy, lon_fileName_rep, shape_lsc, nband = 1)
save_GeoTiff_proy(lat_rep, lat_proy, lat_fileName_rep, shape_lsc, nband = 1)
print('---------------------------------------------------------')
print('---------------- Radiation (Part 2) ---------------------')
print('---------------------------------------------------------')
# now we can also get the time for a MODIS run
if Image_Type == 3:
hour, minutes = input_MODIS.Modis_Time(wb, epsg_to, number, proyDEM_fileName)
hour = np.nanmean(hour)
minutes = np.nanmean(minutes)
# Calculation of extraterrestrial solar radiation for slope and aspect
Ra_mountain_24, Ra_inst, cos_zn, dr, phi, delta = Calc_Ra_Mountain(lon, DOY, hour, minutes, lon_proy, lat_proy, slope, aspect)
if Image_Type == 2 or Image_Type == 3:
Sun_elevation = 90 - (np.nanmean(cos_zn) * 180/np.pi)
# Save files created in module 1
save_GeoTiff_proy(lsc, cos_zn, cos_zn_fileName, shape_lsc, nband = 1)
save_GeoTiff_proy(lsc, Ra_mountain_24, radiation_fileName, shape_lsc, nband = 1)
save_GeoTiff_proy(lsc, Ra_inst, radiation_inst_fileName, shape_lsc, nband = 1 )
save_GeoTiff_proy(lsc, phi, phi_fileName, shape_lsc, nband = 1 )
print('---------------------------------------------------------')
print('------- Read Meteo and Soil inputs (Part 3) -------------')
print('---------------------------------------------------------')
# Open the Meteo_Input sheet
ws = wb['Meteo_Input']
# 6a) Instantanious Temperature
Output_filename_temp_inst = os.path.join(output_folder, 'Output_radiation_balance', 'Temp_24_input.tif')
Temp_inst, Temp_inst_source = Open_constant_or_spatial_map(ws, "B%d" %number, Output_filename_temp_inst, proyDEM_fileName)
print('_____________________Instantanious Temperature______________________')
print('Source of instantanious temperature = %s' %str(Temp_inst_source))
print('Average instantanious temperature = %s Kelvin\n' %float(np.nanmean(Temp_inst)))
# 6b) Daily Temperature
Output_filename_temp_24 = os.path.join(output_folder, 'Output_radiation_balance', 'Temp_24_input.tif')
Temp_24, Temp_24_source = Open_constant_or_spatial_map(ws, "C%d" %number, Output_filename_temp_24, proyDEM_fileName)
print('__________________________Daily Temperature_________________________')
print('Source of daily temperature = %s' %str(Temp_24_source))
print('Average daily temperature = %s Kelvin\n' %float(np.nanmean(Temp_24)))
# 6c) Instantanious Relative Humidity
Output_filename_RH_inst = os.path.join(output_folder, 'Output_radiation_balance', 'RH_inst_input.tif')
RH_inst, RH_inst_source = Open_constant_or_spatial_map(ws, "D%d" %number, Output_filename_RH_inst, proyDEM_fileName)
print('________________Instantanious Relative Humidity_____________________')
print('Source of instantanious relative humidity = %s' %str(RH_inst_source))
print('Average instantanious relative humidity = %s Procent\n' %float(np.nanmean(RH_inst)))
# 6d) Daily Relative Humidity
Output_filename_RH_24 = os.path.join(output_folder, 'Output_radiation_balance', 'RH_24_input.tif')
RH_24, RH_24_source = Open_constant_or_spatial_map(ws, "E%d" %number, Output_filename_RH_24, proyDEM_fileName)
print('____________________Daily Relative Humidity_________________________')
print('Source of daily relative humidity = %s' %str(RH_24_source))
print('Average daily relative humidity = %s Procent\n' %float(np.nanmean(RH_24)))
# 6) Wind speed measurement height
zx = float(ws['F%d' %number].value)
print('___________________Measurement Height Wind Speed____________________')
print('Height at which wind speed is measured = %s (m)\n' %(zx))
# 6e) Instantanious wind speed
Output_filename_wind_inst = os.path.join(output_folder, 'Output_radiation_balance', 'Wind_inst_input.tif')
Wind_inst, Wind_inst_source = Open_constant_or_spatial_map(ws, "G%d" %number, Output_filename_wind_inst, proyDEM_fileName)
print('_____________________Instantanious Wind Speed_______________________')
print('Source of instantanious wind speed = %s' %str(Wind_inst_source))
print('Average instantanious wind speed = %s m/s\n' %float(np.nanmean(Wind_inst)))
# 6f) Daily wind speed
Output_filename_wind_24 = os.path.join(output_folder, 'Output_radiation_balance', 'Wind_24_input.tif')
Wind_24, Wind_24_source = Open_constant_or_spatial_map(ws, "H%d" %number, Output_filename_wind_24, proyDEM_fileName)
print('__________________________Daily Wind Speed__________________________')
print('Source of daily wind speed = %s' %str(Wind_24_source))
print('Average daily wind speed = %s m/s\n' %float(np.nanmean(Wind_24)))
# 6g) instantanious radiation or transmissivity
# Define the method of radiation (1 or 2)
Method_Radiation_inst=int(ws['I%d' %number].value) # 1=Transm_inst will be calculated Rs_inst must be given
# 2=Rs_inst will be determined Transm_inst must be given
print('________________________Instantanious Solar_________________________')
print('Method for instantanious radiation (1=Rs_inst, 2=Transm_inst) = %s\n' %(Method_Radiation_inst))
if Method_Radiation_inst == 1:
Output_filename_radiation_inst = os.path.join(output_folder, 'Output_radiation_balance', 'Rs_inst_input.tif')
Rs_inst, Rs_inst_source = Open_constant_or_spatial_map(ws, "J%d" %number, Output_filename_radiation_inst, proyDEM_fileName)
print('____________________Instantanious Radiation_________________________')
print('Source of instantanious solar radiation = %s' %str(Rs_inst_source))
print('Average instantanious solar radiation = %s W/m2\n' %float(np.nanmean(Rs_inst)))
if Method_Radiation_inst == 2:
Output_filename_transm_inst = os.path.join(output_folder, 'Output_radiation_balance', 'Transm_inst_input.tif')
Transm_inst, Transm_inst_source = Open_constant_or_spatial_map(ws, "K%d" %number, Output_filename_transm_inst, proyDEM_fileName)
print('___________________Instantanious Transmissivity_____________________')
print('Source of instantanious transmissivity = %s' %str(Transm_inst_source))
print('Average instantanious transmissivity = %s\n' %float(np.nanmean(Transm_inst)))
# 6h) daily radiation or transmissivity
# Define the method of radiation (1 or 2)
Method_Radiation_24=int(ws['L%d' %number].value) # 1=Transm_inst will be calculated Rs_24 must be given
# 2=Rs_inst will be determined Transm_24 must be given
print('____________________________Daily Solar_____________________________')
print('Method for daily radiation (1=Rs_24, 2=Transm_24) = %s\n' %(Method_Radiation_24))
if Method_Radiation_24 == 1:
Output_filename_radiation_24 = os.path.join(output_folder, 'Output_radiation_balance', 'Rs_24_input.tif')
Rs_24, Rs_24_source = Open_constant_or_spatial_map(ws, "M%d" %number, Output_filename_radiation_24, proyDEM_fileName)
print('____________________________Daily Radiation_________________________')
print('Source of daily solar radiation = %s' %str(Rs_24_source))
print('Average daily solar radiation = %s W/m2\n' %float(np.nanmean(Rs_24)))
if Method_Radiation_24 == 2:
Output_filename_transm_24 = os.path.join(output_folder, 'Output_radiation_balance', 'Transm_24_input.tif')
Transm_24, Transm_24_source = Open_constant_or_spatial_map(ws, "N%d" %number, Output_filename_transm_24, proyDEM_fileName)
print('___________________________Daily Transmissivity_____________________')
print('Source of daily transmissivity = %s' %str(Transm_24_source))
print('Average daily transmissivity = %s\n' %float(np.nanmean(Transm_24)))
# 6i) Obstacle height
Output_filename_h_obst = os.path.join(output_folder, 'Output_soil_moisture', 'Obst_h_input.tif')
h_obst, h_obst_source = Open_constant_or_spatial_map(ws, "O%d" %number, Output_filename_h_obst, proyDEM_fileName)
print('___________________________Obstacle Height__________________________')
print('Source of obstacle height = %s' %str(h_obst_source))
print('Average obstacle height = %s meter\n' %float(np.nanmean(h_obst)))
# Open the Meteo_Input sheet
ws = wb['Soil_Input']
# 6j) Saturated Soil Moisture Content topsoil
Output_filename_Theta_sat_top = os.path.join(output_folder, 'Output_soil_moisture', 'Theta_sat_top_input.tif')
Theta_sat_top, Theta_sat_top_source = Open_constant_or_spatial_map(ws, "B%d" %number, Output_filename_Theta_sat_top, proyDEM_fileName)
print('________________Saturated Soil Moisture Content Topsoil_____________')
print('Source of the saturated soil moisture content topsoil = %s' %str(Theta_sat_top_source))
print('Average saturated soil moisture content topsoil = %s\n' %float(np.nanmean(Theta_sat_top)))
# 6k) Saturated Soil Moisture Content subsoil
Output_filename_Theta_sat_sub = os.path.join(output_folder, 'Output_soil_moisture', 'Theta_sat_sub_input.tif')
Theta_sat_sub, Theta_sat_sub_source = Open_constant_or_spatial_map(ws, "C%d" %number, Output_filename_Theta_sat_sub, proyDEM_fileName)
print('________________Saturated Soil Moisture Content Subsoil_____________')
print('Source of the saturated soil moisture content subsoil = %s' %str(Theta_sat_sub_source))
print('Average saturated soil moisture content subsoil = %s\n' %float(np.nanmean(Theta_sat_sub)))
# 6l) Residual Soil Moisture Content topsoil
Output_filename_Theta_res_top = os.path.join(output_folder, 'Output_soil_moisture', 'Theta_res_top_input.tif')
Theta_res_top, Theta_res_top_source = Open_constant_or_spatial_map(ws, "D%d" %number, Output_filename_Theta_res_top, proyDEM_fileName)
print('_________________Residual Soil Moisture Content Topsoil_____________')
print('Source of the residual soil moisture content topsoil = %s' %str(Theta_res_top_source))
print('Average residual soil moisture content topsoil = %s\n' %float(np.nanmean(Theta_res_top)))
# 6m) Residual Soil Moisture Content subsoil
Output_filename_Theta_res_sub = os.path.join(output_folder, 'Output_soil_moisture', 'Theta_res_sub_input.tif')
Theta_res_sub, Theta_res_sub_source = Open_constant_or_spatial_map(ws, "E%d" %number, Output_filename_Theta_res_sub, proyDEM_fileName)
print('_________________Residual Soil Moisture Content Subsoil_____________')
print('Source of the residual soil moisture content subsoil = %s' %str(Theta_res_sub_source))
print('Average residual soil moisture content subsoil = %s\n' %float(np.nanmean(Theta_res_sub)))
# 6n) Soil Moisture Wilting point
Output_filename_soil_wilting_point = os.path.join(output_folder, 'Output_soil_moisture', 'Soil_moisture_wilting_point_input.tif')
Soil_moisture_wilting_point, Soil_moisture_wilting_point_source = Open_constant_or_spatial_map(ws, "G%d" %number, Output_filename_soil_wilting_point, proyDEM_fileName)
print('_______________________Soil Moisture Wilting point__________________')
print('Source of the soil moisture wilting point = %s' %str(Soil_moisture_wilting_point_source))
print('Average soil moisture wilting point = %s\n' %float(np.nanmean(Soil_moisture_wilting_point)))
# 6o) Fraction Field Capacity
Output_filename_Field_Capacity = os.path.join(output_folder, 'Output_soil_moisture', 'Fraction_Field_Capacity_input.tif')
Field_Capacity, Field_Capacity_source = Open_constant_or_spatial_map(ws, "F%d" %number, Output_filename_Field_Capacity, proyDEM_fileName)
print('_________________________Fraction Field Capacity____________________')
print('Source of the fraction field capacity = %s' %str(Field_Capacity_source))
print('Average fraction field capacity = %s\n' %float(np.nanmean(Field_Capacity)))
# 6p) Light Use Efficiency
Output_filename_LUEmax = os.path.join(output_folder, 'Output_soil_moisture', 'LUEmax_input.tif')
LUEmax, LUEmax_source = Open_constant_or_spatial_map(ws, "I%d" %number, Output_filename_LUEmax, proyDEM_fileName)
print('______________________Maximum Light Use Efficiency__________________')
print('Source of the Maximum Light Use Efficiency = %s' %str(LUEmax_source))
print('Average Maximum Light Use Efficiency = %s\n' %float(np.nanmean(LUEmax)))
# 6p) Depletion Factor
Output_filename_depl_factor = os.path.join(output_folder, 'Output_soil_moisture', 'depl_factor_input.tif')
depl_factor, depl_factor_source = Open_constant_or_spatial_map(ws, "H%d" %number, Output_filename_depl_factor, proyDEM_fileName)
print('______________________________Depletion Factor______________________')
print('Source of the Depletion Factor = %s' %str(depl_factor_source))
print('Average Depletion Factor = %s\n' %float(np.nanmean(depl_factor)))
print('---------------------------------------------------------')
print('---------------- Calc Meteo (Part 4) --------------------')
print('---------------------------------------------------------')
# Atmospheric pressure for altitude:
Pair = 101.3 * np.power((293 - Temp_lapse_rate * DEM_resh) / 293, 5.26)
# Psychrometric constant (kPa / °C), FAO 56, eq 8.:
Psychro_c = 0.665E-3 * Pair
# Saturation Vapor Pressure at the air temperature (kPa):
esat_inst = 0.6108 * np.exp(17.27 * Temp_inst / (Temp_inst + 237.3))
esat_24 = 0.6108 * np.exp(17.27 * Temp_24 / (Temp_24 + 237.3))
# Actual vapour pressure (kPa), FAO 56, eq 19.:
eact_inst = RH_inst * esat_inst / 100
eact_24 = RH_24 * esat_24 / 100
print('Instantaneous Saturation Vapor Pressure = ', '%0.3f (kPa)' % np.nanmean(esat_inst))
print('Instantaneous Actual vapour pressure = ', '%0.3f (kPa)' % np.nanmean(eact_inst))
print('Daily Saturation Vapor Pressure = ', '%0.3f (kPa)' % np.nanmean(esat_24))
print('Daily Actual vapour pressure = ', '%0.3f (kPa)' % np.nanmean(eact_24))
print('---------------------------------------------------------')
print('------------ Open VIS Parameters (Part 5) ---------------')
print('---------------------------------------------------------')
if Image_Type == 1:
Surf_albedo, NDVI, LAI, vegt_cover, FPAR, Nitrogen, tir_emis, b10_emissivity, water_mask_temp, QC_Map = input_LS.Get_LS_Para_Veg(wb, number, proyDEM_fileName, year, DOY, path_radiance, Apparent_atmosf_transm, cos_zn, dr)
if Image_Type == 2:
Surf_albedo, NDVI, LAI, vegt_cover, FPAR, Nitrogen, tir_emis, b10_emissivity, water_mask_temp, QC_Map = input_PROBAV_VIIRS.Get_PROBAV_Para_Veg(wb, number, proyDEM_fileName, year, DOY, path_radiance, Apparent_atmosf_transm, cos_zn, dr, DEM_resh)
if Image_Type == 3:
Surf_albedo, NDVI, LAI, vegt_cover, FPAR, Nitrogen, tir_emis, b10_emissivity, water_mask_temp, QC_Map = input_MODIS.Get_MODIS_Para_Veg(wb, number, proyDEM_fileName, year, DOY, path_radiance, Apparent_atmosf_transm, cos_zn, dr, DEM_resh, epsg_to)
# Save output maps
save_GeoTiff_proy(lsc, water_mask_temp, water_mask_temp_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, FPAR, fpar_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, tir_emis, tir_emissivity_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Nitrogen, nitrogen_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, vegt_cover, veg_cover_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, LAI, lai_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, b10_emissivity, b10_emissivity_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, NDVI, ndvi_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Surf_albedo, surface_albedo_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, QC_Map, QC_Map_after_VIS, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('--------- Open Thermal Parameters (Part 6) --------------')
print('---------------------------------------------------------')
if Image_Type == 1:
Surface_temp, cloud_mask_temp, Thermal_Sharpening_not_needed = input_LS.Get_LS_Para_Thermal(wb, number, proyDEM_fileName, year, DOY, water_mask_temp, b10_emissivity, Temp_inst, Rp, tau_sky, surf_temp_offset, Thermal_Sharpening_not_needed, DEM_fileName, UTM_Zone, eact_inst, QC_Map)
if Image_Type == 2:
Surface_temp, cloud_mask_temp , Thermal_Sharpening_not_needed = input_PROBAV_VIIRS.Get_VIIRS_Para_Thermal(wb, number, proyDEM_fileName, year, DOY, water_mask_temp, b10_emissivity, Temp_inst, Rp, tau_sky, surf_temp_offset, Thermal_Sharpening_not_needed)
if Image_Type == 3:
Surface_temp, cloud_mask_temp, Thermal_Sharpening_not_needed = input_MODIS.Get_MODIS_Para_Thermal(wb, number, proyDEM_fileName, year, DOY, water_mask_temp, b10_emissivity, Temp_inst, Rp, tau_sky, surf_temp_offset, Thermal_Sharpening_not_needed, epsg_to)
# Save output maps
save_GeoTiff_proy(lsc, Surface_temp, surf_temp_fileName, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('------ Apply Thermal Sharpening (Part 7) ----------------')
print('---------------------------------------------------------')
# Perform Thermal sharpening for the thermal band
if Thermal_Sharpening_not_needed is 1:
temp_surface_sharpened = Surface_temp
if Thermal_Sharpening_not_needed is 0:
# Create mask for thermal sharpening
Total_mask_thermal = QC_Map + cloud_mask_temp + water_mask_temp
Total_mask_thermal[Total_mask_thermal > 0] = 1
# Upscale DEM
if Image_Type == 1:
pixel_spacing_upscale=90
Box = 7
if Image_Type == 2:
pixel_spacing_upscale=400
Box = 9
if Image_Type == 3:
pixel_spacing_upscale=1000
Box = 9
dest_up, ulx_dem_up, lry_dem_up, lrx_dem_up, uly_dem_up, epsg_to = reproject_dataset(
DEM_fileName, pixel_spacing_upscale, UTM_Zone = UTM_Zone)
DEM_up = dest_up.GetRasterBand(1).ReadAsArray()
Y_raster_size_up = dest_up.RasterYSize
X_raster_size_up = dest_up.RasterXSize
shape_up=([X_raster_size_up, Y_raster_size_up])
save_GeoTiff_proy(dest_up, DEM_up, proyDEM_fileName_up, shape_up, nband=1)
# save landsat surface temperature
surf_temp_fileName = os.path.join(output_folder, 'Output_vegetation','%s_%s_surface_temp_%s_%s_%s.tif' %(sensor1, sensor2, res2, year, DOY))
save_GeoTiff_proy(lsc, Surface_temp, surf_temp_fileName, shape_lsc, nband=1)
# Upscale NDVI data
dest_up, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset_example(
ndvi_fileName, proyDEM_fileName_up)
NDVI_Landsat_up = dest_up.GetRasterBand(1).ReadAsArray()
# upscale the mask to coarser resolution
Total_mask_thermal_up = resize_array_example(Total_mask_thermal, NDVI_Landsat_up, method=2)
Total_mask_thermal_up[Total_mask_thermal_up>0]=1
# Upscale Thermal data
dest_up, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset_example(
surf_temp_fileName, proyDEM_fileName_up)
surface_temp_up = dest_up.GetRasterBand(1).ReadAsArray()
# Remove wrong values
surface_temp_up[surface_temp_up==0] = np.nan
NDVI_Landsat_up[NDVI_Landsat_up==0] = np.nan
surface_temp_up[surface_temp_up==1] = np.nan
NDVI_Landsat_up[Total_mask_thermal_up==1] = np.nan
NDVI[Total_mask_thermal==1] = np.nan
# Apply thermal sharpening
temp_surface_sharpened = Thermal_Sharpening(surface_temp_up, NDVI_Landsat_up, NDVI, Box, dest_up, output_folder, ndvi_fileName, shape_lsc, lsc)
# Replace water values to original thermal values
temp_surface_sharpened[water_mask_temp == 1] = Surface_temp[water_mask_temp == 1]
temp_surface_sharpened[np.isnan(temp_surface_sharpened)] = Surface_temp[np.isnan(temp_surface_sharpened)]
# remove low temperature values
temp_surface_sharpened[temp_surface_sharpened <= 253.0]=np.nan
# Calculate the tempearture of the water
Temperature_water_std=np.nanstd(temp_surface_sharpened[water_mask_temp != 0])
Temperature_water_mean=np.nanmean(temp_surface_sharpened[water_mask_temp != 0])
print('Mean water Temperature = %0.3f (K)' % Temperature_water_mean)
print('Standard deviation water temperature = %0.3f (K)' % Temperature_water_std)
# save landsat surface temperature
save_GeoTiff_proy(lsc, temp_surface_sharpened, temp_surface_sharpened_fileName, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('------- Create Masks and Quality Layers (Part 8) --------')
print('---------------------------------------------------------')
# Check Quality
try:
ws = wb['Additional_Input']
if (ws['F%d' % number].value) is not None:
# Output folder QC defined by the user
QC_Map_fileName = os.path.join(output_folder, 'Output_cloud_masked', 'User_quality_mask_%s_%s_%s.tif' %(res2, year, DOY))
# Reproject and reshape users NDVI
QC_Map = Reshape_Reproject_Input_data(r'%s' %str(ws['F%d' % number].value), QC_Map_fileName, proyDEM_fileName)
else:
snow_mask, water_mask, ts_moist_veg_min, NDVI_max, NDVI_std = CalculateSnowWaterMask(NDVI,shape_lsc,water_mask_temp,Surface_temp)
Temperature_water_mean=np.nanmean(temp_surface_sharpened[water_mask != 0])
if np.isnan(Temperature_water_mean) == True or Temperature_water_mean < 0.0:
ts_cold_land=ts_moist_veg_min
else:
ts_cold_land=Temperature_water_mean
# Make shadow mask
shadow_mask=np.zeros((shape_lsc[1], shape_lsc[0]))
shadow_mask[np.logical_and.reduce((temp_surface_sharpened < (ts_cold_land+Temperature_offset_shadow),Surf_albedo < Maximum_shadow_albedo,water_mask!=1))]=1
shadow_mask = Create_Buffer(shadow_mask)
# Improve cloud mask for Landsat
if Image_Type == 1:
# open worksheet
ws = wb['Landsat_Input']
# Extract Landsat name
Name_Landsat_Image = str(ws['B%d' %number].value)
if os.path.exists(os.path.join(input_folder, '%s_BQA.TIF' %Name_Landsat_Image)):
cloud_mask_temp[np.logical_and.reduce((Surface_temp < (ts_cold_land+Temperature_offset_clouds),Surf_albedo > Minimum_cloud_albedo,NDVI<0.7,snow_mask!=1))]=1
cloud_mask = Create_Buffer(cloud_mask_temp) # if there are no cold water pixels than use cold vegetation pixels
else:
cloud_mask = cloud_mask_temp
else:
cloud_mask_temp[np.logical_and.reduce((Surface_temp < (ts_cold_land+Temperature_offset_clouds),Surf_albedo > Minimum_cloud_albedo,NDVI<0.7,snow_mask!=1))]=1
cloud_mask = Create_Buffer(cloud_mask_temp)
# Total Quality Mask
Tot_Masks = cloud_mask + snow_mask + shadow_mask + QC_Map
QC_Map[Tot_Masks>0] = 1
# Output folder QC defined by the user
QC_Map_fileName = os.path.join(output_folder, 'Output_cloud_masked', '%s_quality_mask_%s_%s_%s.tif.tif' %(sensor1, res2, year, DOY))
# Save output maps
save_GeoTiff_proy(lsc, cloud_mask, cloud_mask_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, snow_mask, snow_mask_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, shadow_mask, shadow_mask_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, QC_Map, QC_Map_fileName, shape_lsc, nband=1)
except:
assert "Please check the quality path"
# Check Water Mask and replace the temporay
try:
ws = wb['Additional_Input']
if (ws['E%d' % number].value) is not None:
# Overwrite the Water mask and change the output name
water_mask_fileName = os.path.join(output_folder, 'Output_soil_moisture', 'User_Water_mask_temporary_%s_%s_%s.tif' %(res2, year, DOY))
water_mask = Reshape_Reproject_Input_data(r'%s' %str(ws['E%d' % number].value), water_mask_temp_fileName, proyDEM_fileName)
except:
assert "Please check the Water Mask input path"
if not "water_mask" in locals():
water_mask = water_mask_temp
# Save output maps
save_GeoTiff_proy(lsc, water_mask, water_mask_fileName, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('------- Meteo and Radiation Continue (Part 9) -----------')
print('---------------------------------------------------------')
# Slope of satur vapour pressure curve at air temp (kPa / °C)
sl_es_24 = 4098 * esat_24 / np.power(Temp_24 + 237.3, 2)
# Daily 24 hr radiation - For flat terrain only !
ws_angle = np.arccos(-np.tan(phi)*tan(delta)) # Sunset hour angle ws
# Extraterrestrial daily radiation, Ra (W/m2):
Ra24_flat = (Gsc/np.pi * dr * (ws_angle * np.sin(phi[int(nrow/2), int(ncol/2)]) * np.sin(delta) +
np.cos(phi[int(nrow/2), int(ncol/2)]) * np.cos(delta) * np.sin(ws_angle)))
# calculate the daily radiation or daily transmissivity or daily surface radiation based on the method defined by the user
if Method_Radiation_24==1:
Transm_24 = Rs_24/Ra_mountain_24
if Method_Radiation_24==2:
Rs_24 = Ra_mountain_24 * Transm_24
# Solar radiation from extraterrestrial radiation
Rs_24_flat = Ra24_flat * Transm_24
print('Mean Daily Transmissivity = %0.3f (-)' % np.nanmean(Transm_24))
print('Mean Daily incoming net Radiation = %0.3f (W/m2)' % np.nanmean(Rs_24))
print('Mean Daily incoming net Radiation Flat Terrain = %0.3f (W/m2)' % np.nanmean(Rs_24_flat))
# If method of instantaneous radiation 1 is used than calculate the Transmissivity
if Method_Radiation_inst==1:
Transm_corr=Rs_inst/Ra_inst
# If method of instantaneous radiation 2 is used than calculate the instantaneous incomming Radiation
if Method_Radiation_inst==2:
# calculate the transmissivity index for direct beam radiation
Transm_corr = Transm_inst + 2e-5 * DEM_resh
# Instantaneous incoming short wave radiation (W/m2):
Rs_inst = Ra_inst * Transm_corr
# Atmospheric emissivity, by Bastiaanssen (1995):
Transm_corr[Transm_corr<0.001]=0.1
Transm_corr[Transm_corr>1]=1
atmos_emis = 0.85 * np.power(-np.log(Transm_corr), 0.09)
# Instantaneous incoming longwave radiation:
lw_in_inst = atmos_emis * SB_const * np.power(Temp_inst + 273.15, 4)
print('Instantaneous longwave incoming radiation = %0.3f (W/m2)' % np.nanmean(lw_in_inst))
print('Atmospheric emissivity = %0.3f' % np.nanmean(atmos_emis))
# calculates the ground heat flux and the solar radiation
Rn_24,rn_inst,g_inst,Rnl_24_FAO = Calc_Meteo(Rs_24,eact_24,Temp_24,Surf_albedo,dr,tir_emis,temp_surface_sharpened,water_mask,NDVI,Transm_24,SB_const,lw_in_inst,Rs_inst)
print('Mean Daily Net Radiation (FAO) = %0.3f (W/m2)' % np.nanmean(Rnl_24_FAO))
print('Mean Daily Net Radiation = %0.3f (W/m2)' % np.nanmean(Rn_24))
print('Mean instantaneous Net Radiation = %0.3f (W/m2)' % np.nanmean(rn_inst))
print('Mean instantaneous Ground Heat Flux = %0.3f (W/m2)' % np.nanmean(g_inst))
# Save output maps
save_GeoTiff_proy(lsc, Rn_24, Rn_24_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, rn_inst, rn_inst_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, g_inst, g_inst_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Pair, Atmos_pressure_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Psychro_c, Psychro_c_fileName, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('---------------- Hot/Cold Pixels (Part 10) --------------')
print('---------------------------------------------------------')
# Temperature at sea level corrected for elevation: ??
ts_dem,air_dens,Temp_corr=Correct_Surface_Temp(temp_surface_sharpened,Temp_lapse_rate,DEM_resh,Pair,dr,Transm_corr,cos_zn,Sun_elevation,deg2rad,QC_Map)
# Selection of hot and cold pixels
# Open Additional_Input sheet in the excel
ws = wb['Additional_Input']
if (ws['G%d' % number].value) is not None:
ts_dem_cold = float(ws['G%d' % number].value) + 273.15
print('cold pixel defined by the user: value=%0.3f (Kelvin)' %ts_dem_cold)
else:
if not "NDVI_max" in locals():
NDVI_max = np.nanmax(NDVI)
NDVI_std = np.nanstd(NDVI)
# Cold pixels vegetation
ts_dem_cold_veg = Calc_Cold_Pixels_Veg(NDVI,NDVI_max,NDVI_std, QC_Map,ts_dem,Image_Type, Cold_Pixel_Constant)
# Cold pixels water
ts_dem_cold,cold_pixels,ts_dem_cold_mean = Calc_Cold_Pixels(ts_dem,water_mask,QC_Map,ts_dem_cold_veg,Cold_Pixel_Constant)
if np.isnan(ts_dem_cold) == True:
ts_dem_cold = Temp_inst
save_GeoTiff_proy(lsc, cold_pixels, cold_pixels_fileName, shape_lsc, nband=1)
if (ws['H%d' % number].value) is not None:
ts_dem_hot = float(ws['H%d' % number].value) + 273.15
print('hot pixel defined by the user: value=%0.3f (Kelvin)' %ts_dem_hot)
for_hot = np.copy(ts_dem)
for_hot[NDVI <= NDVIhot_low] = 0.0
for_hot[NDVI >= NDVIhot_high] = 0.0
for_hot[np.logical_or(water_mask != 0.0, QC_Map != 0.0)] = 0.0
hot_pixels = np.copy(for_hot)
hot_pixels[for_hot < ts_dem_cold] = np.nan
else:
# Hot pixels
ts_dem_hot,hot_pixels = Calc_Hot_Pixels(ts_dem,QC_Map, water_mask,NDVI,NDVIhot_low,NDVIhot_high, Hot_Pixel_Constant, ts_dem_cold)
save_GeoTiff_proy(lsc, hot_pixels, hot_pixels_fileName, shape_lsc, nband=1)
# Save files
save_GeoTiff_proy(lsc, Temp_corr, temp_corr_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, ts_dem, ts_dem_fileName, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('------------ Sensible heat flux (Part 11) ---------------')
print('---------------------------------------------------------')
# Change the minimum windspeed to prevent high values in further calculations
Wind_inst = np.where(Wind_inst<1.5, 1.5, Wind_inst)
Wind_24 = np.where(Wind_24<1.5, 1.5, Wind_24)
# calculate windspeed at the blending height and the friction velocity by using the Raupach model or NDVI
Surf_roughness,u_200,ustar_1=Calc_Wind_Speed_Friction(h_obst,Wind_inst,zx,LAI,NDVI,Surf_albedo,water_mask,surf_roughness_equation_used)
save_GeoTiff_proy(lsc, Surf_roughness, surf_rough_fileName, shape_lsc, nband=1)
# Computation of surface roughness for momentum transport
k_vk = 0.41 # Von Karman constant
# Sensible heat 1 (Step 5)
# Corrected value for the aerodynamic resistance (eq 41 with psi2 = psi1):
rah1 = np.log(2.0/0.01) / (k_vk * ustar_1)
i=0
L, psi_m200_stable, psi, psi_m200,h_inst,dT, slope_dt, offset_dt = sensible_heat(
rah1, ustar_1, rn_inst, g_inst, ts_dem, ts_dem_hot, ts_dem_cold,
air_dens, temp_surface_sharpened, k_vk,QC_Map, hot_pixels, slope)
# do the calculation iteratively 10 times
for i in range(1,10):
L,psi,psi_m200,psi_m200_stable,h_inst,ustar_corr,rah_corr,dT, slope_dt, offset_dt = Iterate_Friction_Velocity(k_vk,u_200,Surf_roughness,g_inst,rn_inst, ts_dem, ts_dem_hot, ts_dem_cold,air_dens, temp_surface_sharpened,L,psi,psi_m200,psi_m200_stable,QC_Map, hot_pixels, slope)
# Save files
save_GeoTiff_proy(lsc, h_inst, h_inst_fileName, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('-------------- Evaporation (Part 12) --------------------')
print('---------------------------------------------------------')
# calculate reference net radiation
Rn_ref, Refl_rad_water, rah_grass=Calc_Rn_Ref(shape_lsc,water_mask,Rn_24,Ra_mountain_24,Transm_24,Rnl_24_FAO,Wind_24)
# Calculate rah of PM for the ET act (dT after iteration) and ETpot (4 degrees)
rah_pm_act=((np.log((2.0-0.0)/(Surf_roughness*0.1))*np.log((2.0-0.0)/(Surf_roughness)))/(k_vk*1.5**2))*((1-5*(-9.82*dT*(2.0-0.0))/((273.15+Temp_inst)*1.5**2))**(-0.75))
rah_pm_act[rah_pm_act<25]=25
rah_pm_pot=((np.log((2.0-0.0)/(Surf_roughness*0.1))*np.log((2.0-0.0)/(Surf_roughness)))/(k_vk*1.5**2))*((1-5*(-9.82*4.0*(2.0-0.0))/((273.15+Temp_inst)*1.5**2))**(-0.75))
rah_pm_pot[rah_pm_pot<25]=25
# calculate reference potential evaporation.
ETpot_24,ETref_24,Lhv,rs_min=Calc_Ref_Pot_ET(LAI,temp_surface_sharpened,sl_es_24,Rn_ref,air_dens,esat_24,eact_24,rah_grass,Psychro_c,Rn_24,Refl_rad_water,rah_pm_pot,rl)
# Instantaneous evapotranspiration
LE_inst = rn_inst - g_inst - h_inst
# Evaporative fraction
EF_inst=Calc_instantaneous_ET_fraction(LE_inst,rn_inst,g_inst)
# Daily Evaporation and advection factor
ETA_24, AF=Calc_ETact(esat_24,eact_24,EF_inst,Rn_24,Refl_rad_water,Lhv, Image_Type)
# Bulk surface resistance (s/m):
bulk_surf_resis_24=Calc_Bulk_surface_resistance(sl_es_24,Rn_24,Refl_rad_water,air_dens,esat_24,eact_24,rah_pm_act,ETA_24,Lhv,Psychro_c)
# crop factor
kc = ETA_24 / ETref_24 # Crop factor
ETP_24 = np.where(ETpot_24 < ETA_24, ETA_24, ETpot_24)
ET_24_deficit = ETP_24 - ETA_24
kc_max = ETP_24 / ETref_24
# Save files
save_GeoTiff_proy(lsc, rs_min, min_bulk_surf_res_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, EF_inst, EF_inst_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, LE_inst, LE_inst_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, ETref_24, ETref_24_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, ETA_24, ETA_24_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, ETP_24, ETP_24_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, ET_24_deficit, ET_24_deficit_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, AF, AF_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, kc, kc_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, kc_max, kc_max_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, bulk_surf_resis_24, bulk_surf_res_fileName, shape_lsc, nband=1)
print('---------------------------------------------------------')
print('--------------- Soil Moisture (Part 13) -----------------')
print('---------------------------------------------------------')
# Calculate soil properties
#SM_stress_trigger, total_soil_moisture, RZ_SM,moisture_stress_biomass,irrigation_needs,top_soil_moisture=Calc_Soil_Moisture(ETA_24,accum_prec_14d,accum_ETo_14d,EF_inst,water_mask,vegt_cover,Theta_sat,Theta_res)
SM_stress_trigger, total_soil_moisture, root_zone_moisture_first, moisture_stress_biomass_first,top_soil_moisture,RZ_SM_NAN = Calc_Soil_Moisture(ETA_24,EF_inst,QC_Map,water_mask,vegt_cover,Theta_sat_top,Theta_sat_sub, Theta_res_top,Theta_res_sub, depl_factor,Field_Capacity,FPAR, Soil_moisture_wilting_point)
# seperation of E and T
Eact_24,Tpot_24,Tact_24,moisture_stress_biomass,T24_deficit,beneficial_fraction,root_zone_moisture_final,top_zone_moisture_final=Separate_E_T(Light_use_extinction_factor,LAI,ETP_24,Theta_res_top, Theta_res_sub,Theta_sat_top,Theta_sat_sub,top_soil_moisture,sl_es_24, Psychro_c,moisture_stress_biomass_first,vegt_cover,ETA_24,SM_stress_trigger,root_zone_moisture_first,total_soil_moisture)
# Irrigation:
irrigation_needs = Classify_Irrigation(moisture_stress_biomass, vegt_cover)
# Save files
save_GeoTiff_proy(lsc, Tact_24, Tact24_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Eact_24, Eact24_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Tpot_24, Tpot24_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, T24_deficit, T24_deficit_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, total_soil_moisture, total_soil_moisture_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, top_zone_moisture_final, top_soil_moisture_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, root_zone_moisture_final, RZ_SM_fileName, shape_lsc,nband=1)
save_GeoTiff_proy(lsc, SM_stress_trigger, SM_stress_trigger_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, moisture_stress_biomass, moisture_stress_biomass_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, irrigation_needs, irrigation_needs_fileName,shape_lsc, nband=1)
print('---------------------------------------------------------')
print('------------------ Biomass (Part 14)---------------------')
print('---------------------------------------------------------')
# calculate biomass production
LUE,Biomass_prod,Biomass_wp,Biomass_deficit = Calc_Biomass_production(LAI,ETP_24,moisture_stress_biomass,ETA_24,Ra_mountain_24,Transm_24,FPAR,esat_24,eact_24,Th,Kt,Tl,Temp_24,LUEmax)
# Save files
save_GeoTiff_proy(lsc, LUE, LUE_fileName,shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Biomass_prod, Biomass_prod_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Biomass_wp, Biomass_wp_fileName, shape_lsc, nband=1)
save_GeoTiff_proy(lsc, Biomass_deficit, Biomass_deficit_fileName,shape_lsc, nband=1)
lsc=None
print('...................................................................')
print('............................DONE!..................................')
print('...................................................................')
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
# FUNCTIONS
#-------------------------------------------------------------------------
def Create_Buffer(Data_In):
'''
This function creates a 3D array which is used to apply the moving window
'''
Buffer_area = 2 # A block of 2 times Buffer_area + 1 will be 1 if there is the pixel in the middle is 1
Data_Out=np.empty((len(Data_In),len(Data_In[1])))
Data_Out[:,:] = Data_In
for ypixel in range(0,Buffer_area + 1):
for xpixel in range(1,Buffer_area + 1):
if ypixel==0:
for xpixel in range(1,Buffer_area + 1):
Data_Out[:,0:-xpixel] += Data_In[:,xpixel:]
Data_Out[:,xpixel:] += Data_In[:,:-xpixel]
for ypixel in range(1,Buffer_area + 1):
Data_Out[ypixel:,:] += Data_In[:-ypixel,:]
Data_Out[0:-ypixel,:] += Data_In[ypixel:,:]
else:
Data_Out[0:-xpixel,ypixel:] += Data_In[xpixel:,:-ypixel]
Data_Out[xpixel:,ypixel:] += Data_In[:-xpixel,:-ypixel]
Data_Out[0:-xpixel,0:-ypixel] += Data_In[xpixel:,ypixel:]
Data_Out[xpixel:,0:-ypixel] += Data_In[:-xpixel,ypixel:]
Data_Out[Data_Out>0.1] = 1
Data_Out[Data_Out<=0.1] = 0
return(Data_Out)
def Calc_Biomass_production(LAI,ETP_24,moisture_stress_biomass,ETA_24,Ra_mountain_24,Transm_24,FPAR,esat_24,eact_24,Th,Kt,Tl,Temp_24,LUEmax):
"""
Function to calculate the biomass production and water productivity
"""
Ksolar = Ra_mountain_24 * Transm_24
# Incident Photosynthetically active radiation (PAR, MJ/m2) per time period
PAR = 0.48 * Ksolar
# Aborbed Photosynthetical Active Radiation (APAR) by the vegetation:
APAR = FPAR * PAR
vapor_stress = 0.88 - 0.183 * np.log(esat_24 - eact_24)
vapor_stress_biomass = vapor_stress.clip(0.0, 1.0)
Jarvis_coeff = (Th - Kt) / (Kt - Tl)
heat_stress_biomass = ((Temp_24 - Tl) * np.power(Th - Temp_24, Jarvis_coeff) /
((Kt - Tl) * np.power(Th - Kt, Jarvis_coeff)))
print('vapor stress biomass =', '%0.3f' % np.nanmean(vapor_stress_biomass))
print('heat stress biomass =', '%0.3f' % np.nanmean(heat_stress_biomass))
# Light use efficiency, reduced below its potential value by low
# temperature or water shortage:
LUE = (LUEmax * heat_stress_biomass * vapor_stress_biomass * moisture_stress_biomass)
# Dry matter production (kg/ha/d):
Biomass_prod = APAR * LUE * 0.864 # C3 vegetation
# Water productivity
Biomass_wp = Biomass_prod/ (ETA_24 * 10) # C3 vegetation
Biomass_wp[ETA_24 == 0.0] = 0.0
# Water deficit
Biomass_deficit = (Biomass_prod / moisture_stress_biomass -
Biomass_prod)
return(LUE,Biomass_prod,Biomass_wp,Biomass_deficit)
#------------------------------------------------------------------------------
def Classify_Irrigation(moisture_stress_biomass, vegt_cover):
'''
This function makes a classification with 4 categories which show the irrigation needs
'''
for_irrigation = np.copy(moisture_stress_biomass)
# make a discreed irrigation needs map with the following categories
# Irrigation needs:
# 0: No need for irrigation
# 1: Perhaps irrigate
# 2: Irrigate
# 3: Irrigate immediately
irrigation_needs = np.copy(for_irrigation)
irrigation_needs[np.where(irrigation_needs >= 1.0)] == 0.0
irrigation_needs[np.logical_and(irrigation_needs >= 0.9, irrigation_needs < 1.0)] = 1.0
irrigation_needs[np.where((irrigation_needs >= 0.8) & (irrigation_needs < 0.9))] = 2.0
irrigation_needs[np.where(irrigation_needs < 0.8)] = 3.0
irrigation_needs[vegt_cover <= 0.3] = 0.0
return(irrigation_needs)
#------------------------------------------------------------------------------
def Separate_E_T(Light_use_extinction_factor,LAI,ETP_24,Theta_res_top,Theta_res_sub, Theta_sat_top, Theta_sat_sub, top_soil_moisture,sl_es_24, Psychro_c,moisture_stress_biomass_first,vegt_cover,ETA_24,SM_stress_trigger,root_zone_moisture_first,total_soil_moisture):
'''
Separate the Evapotranspiration into evaporation and Transpiration
'''
# constants
Tpot_24_estimate=(1-np.exp(-Light_use_extinction_factor*LAI))*ETP_24
SE_top = (top_soil_moisture-Theta_res_top)/(Theta_sat_top-Theta_res_top)
Eact_24_estimate=np.minimum(1,1 / np.power(SE_top + 0.1,-2.0))*(ETP_24-Tpot_24_estimate)
#RS_soil = RS_soil_min * np.power(SE_top,-2.0)
#Eact_24_estimate=(sl_es_24+Psychro_c*(1+RS_soil_min/Rah_PM))/(sl_es_24+Psychro_c*(1+RS_soil/Rah_PM))*(ETP_24-Tpot_24_estimate)
n66_memory = moisture_stress_biomass_first * Tpot_24_estimate
# calulate the first estimation of actual daily tranpiration
Tact_24_estimate = np.copy(n66_memory)
Tact_24_estimate[n66_memory > 0.99*ETA_24]=ETA_24[n66_memory > 0.99*ETA_24]
Tact_24_estimate[vegt_cover == 0.0] = 0.0
# calculate the second estimation and end estimation of the actual daily tranpiration
Tact_24 = np.abs((Tact_24_estimate/(Tact_24_estimate + Eact_24_estimate))*ETA_24)
# calculate the actual daily potential transpiration
Tpot_24 = np.copy(Tpot_24_estimate)
Tpot_24[Tpot_24_estimate < Tact_24] = Tact_24[Tpot_24_estimate < Tact_24]
# calculate moisture stress biomass
moisture_stress_biomass = Tact_24 / Tpot_24
# Calculate root zone moisture final
Se_Poly=2.23*np.power(moisture_stress_biomass,3)-3.35*np.power(moisture_stress_biomass,2)+1.98*moisture_stress_biomass+0.07
root_zone_moisture1=Se_Poly*(SM_stress_trigger+0.02-Theta_res_sub)+Theta_res_sub
root_zone_moisture_final=np.where(root_zone_moisture1>root_zone_moisture_first,root_zone_moisture1,root_zone_moisture_first)
# Calculate top zone moisture final
top_zone_moisture1=(total_soil_moisture-root_zone_moisture_final*vegt_cover)/(1-vegt_cover)
top_zone_moisture_final=top_zone_moisture1.clip(Theta_res_top,Theta_sat_top)
# calculate the actual daily evaporation
Eact_24 = ETA_24 - Tact_24
# calculate the Transpiration deficit
T24_deficit = Tpot_24 - Tact_24
# calculate the beneficial fraction
beneficial_fraction=Tact_24 / ETA_24
beneficial_fraction[ETA_24 == 0.0] = 0.0
return(Eact_24,Tpot_24,Tact_24,moisture_stress_biomass,T24_deficit,beneficial_fraction,root_zone_moisture_final,top_zone_moisture_final)
#------------------------------------------------------------------------------
def Calc_Soil_Moisture(ETA_24,EF_inst,QC_Map, water_mask,vegt_cover,Theta_sat_top, Theta_sat_sub,Theta_res_top, Theta_res_sub,depl_factor,Field_Capacity,FPAR, Soil_moisture_wilting_point):
"""
Function to calculate soil characteristics
"""
# constants:
Veg_Cover_Threshold_RZ = 0.9 # Threshold vegetation cover for root zone moisture
# Average fraction of TAW that can be depleted from the root zone
# before stress:
p_factor = depl_factor + 0.04 * (5.0 - ETA_24) # page 163 of FAO 56
# The factor p differs from one crop to another. It normally varies from
# 0.30 for shallow rooted plants at high rates of ETc (> 8 mm d-1)
# to 0.70 for deep rooted plants at low rates of ETc (< 3 mm d-1)
# Critical value under which plants get stressed:
SM_stress_trigger = Field_Capacity - p_factor * (Field_Capacity - Soil_moisture_wilting_point)
EF_inst[EF_inst >= 1.0] = 0.999
# Total soil water content (cm3/cm3):
total_soil_moisture = Theta_sat_sub * np.exp((EF_inst - 1.0) / 0.421) # asce paper Scott et al. 2003
total_soil_moisture[np.logical_or(water_mask == 1.0,QC_Map == 1.0)] = 1.0 # In water and snow is 1
total_soil_moisture[QC_Map == 1.0] = np.nan # Where clouds no data
# Root zone soil moisture:
RZ_SM = np.copy(total_soil_moisture)
RZ_SM[vegt_cover <= Veg_Cover_Threshold_RZ] = np.nan
if np.isnan(np.nanmean(RZ_SM)) == True:
Veg_Cover_Threshold_RZ = np.nanpercentile(vegt_cover, 80)
RZ_SM = np.copy(total_soil_moisture)
RZ_SM[vegt_cover <= Veg_Cover_Threshold_RZ] = np.nan
print('No RZ_SM so the vegetation Threshold for RZ is adjusted from 0,9 to =', '%0.3f' % Veg_Cover_Threshold_RZ)
#RZ_SM = RZ_SM.clip(Theta_res, (0.85 * Theta_sat))
#RZ_SM[np.logical_or(water_mask == 1.0, water_mask == 2.0)] = 1.0
RZ_SM_NAN = np.copy(RZ_SM)
RZ_SM_NAN[RZ_SM==0] = np.nan
RZ_SM_min = np.nanmin(RZ_SM_NAN)
RZ_SM_max = np.nanmax(RZ_SM_NAN)
RZ_SM_mean = np.nanmean(RZ_SM_NAN)
print('Root Zone Soil moisture mean =', '%0.3f (cm3/cm3)' % RZ_SM_mean)
print('Root Zone Soil moisture min =', '%0.3f (cm3/cm3)' % RZ_SM_min)
print('Root Zone Soil moisture max =', '%0.3f (cm3/cm3)' % RZ_SM_max)
Max_moisture_RZ = vegt_cover * (RZ_SM_max - RZ_SM_mean) + RZ_SM_mean
# Soil moisture in the top (temporary)
top_soil_moisture_temp = np.copy(total_soil_moisture)
top_soil_moisture_temp[np.logical_or(vegt_cover <= 0.02, vegt_cover >= 0.1)] = 0
top_soil_moisture_temp[top_soil_moisture_temp == 0] = np.nan
top_soil_moisture_std = np.nanstd(top_soil_moisture_temp)
top_soil_moisture_mean = np.nanmean(top_soil_moisture_temp)
print('Top Soil moisture mean =', '%0.3f (cm3/cm3)' % top_soil_moisture_mean)
print('Top Soil moisture Standard Deviation', '%0.3f (cm3/cm3)' % top_soil_moisture_std)
# calculate root zone moisture
root_zone_moisture_temp = (total_soil_moisture - (top_soil_moisture_mean + top_soil_moisture_std) * (1-vegt_cover))/vegt_cover # total soil moisture = soil moisture no vegtatation *(1-vegt_cover)+soil moisture root zone * vegt_cover
try:
root_zone_moisture_temp[root_zone_moisture_temp <= Theta_res_sub] = Theta_res_sub[root_zone_moisture_temp <= Theta_res_sub]
except:
root_zone_moisture_temp[root_zone_moisture_temp <= Theta_res_sub] = Theta_res_sub
root_zone_moisture_temp[root_zone_moisture_temp >= Max_moisture_RZ] = Max_moisture_RZ[root_zone_moisture_temp >= Max_moisture_RZ]
root_zone_moisture_first = np.copy(root_zone_moisture_temp)
root_zone_moisture_first[np.logical_or(QC_Map ==1.0 ,np.logical_or(water_mask == 1.0, vegt_cover < 0.0))] = 0
# Normalized stress trigger:
norm_trigger = (root_zone_moisture_first - Soil_moisture_wilting_point)/ (SM_stress_trigger + 0.02 - Soil_moisture_wilting_point)
norm_trigger[norm_trigger > 1.0] = 1.0
# moisture stress biomass:
moisture_stress_biomass_first = norm_trigger - (np.sin(2 * np.pi * norm_trigger)) / (2 * np.pi)
moisture_stress_biomass_first=np.where(moisture_stress_biomass_first<0.5*FPAR,0.5*FPAR,moisture_stress_biomass_first)
moisture_stress_biomass_first[moisture_stress_biomass_first <= 0.0] = 0
moisture_stress_biomass_first[moisture_stress_biomass_first > 1.0] = 1.0
# Soil moisture in the top layer - Recalculated ??
top_soil_moisture = ((total_soil_moisture - root_zone_moisture_first * vegt_cover) / (1.0 - vegt_cover))
try:
top_soil_moisture[top_soil_moisture > Theta_sat_top] = Theta_sat_top [top_soil_moisture > Theta_sat_top]
except:
top_soil_moisture[top_soil_moisture > Theta_sat_top] = Theta_sat_top
top_soil_moisture[np.logical_or(water_mask == 1.0, QC_Map == 1.0)] = 1.0
return(SM_stress_trigger, total_soil_moisture, root_zone_moisture_first, moisture_stress_biomass_first,top_soil_moisture,RZ_SM_NAN)
#------------------------------------------------------------------------------
def Calc_Bulk_surface_resistance(sl_es_24,Rn_24,Refl_rad_water,air_dens,esat_24,eact_24,rah_pm_act,ETA_24,Lhv,Psychro_c):
"""
Function to calculate the bulk surface resistance
"""
# Bulk surface resistance (s/m):
bulk_surf_resis_24 = ((((sl_es_24 * (Rn_24 - Refl_rad_water) + air_dens *
1004 * (esat_24 - eact_24) / rah_pm_act) / (ETA_24 * Lhv / 86400) -
sl_es_24) / Psychro_c - 1.0) * rah_pm_act)
bulk_surf_resis_24[ETA_24 <= 0.0] = 100000.0
bulk_surf_resis_24 = bulk_surf_resis_24.clip(0.0, 100000.0)
return(bulk_surf_resis_24)
#------------------------------------------------------------------------------
def Calc_ETact(esat_24, eact_24, EF_inst, Rn_24, Refl_rad_water, Lhv, Image_Type):
"""
Function to calculate the daily evaporation
"""
# Advection factor
if Image_Type == 2:
AF = np.ones(Rn_24.shape)
else:
AF = 1 + 0.985 * (np.exp((esat_24 - eact_24) * 0.08) - 1.0) * EF_inst
# Daily evapotranspiration:
ETA_24 = EF_inst * AF * (Rn_24 - Refl_rad_water) / (Lhv * 1000) * 86400000
ETA_24=ETA_24.clip(0,15.0)
return(ETA_24, AF)
#------------------------------------------------------------------------------
def Calc_instantaneous_ET_fraction(LE_inst,rn_inst,g_inst):
"""
Function to calculate the evaporative fraction
"""
EF_inst = LE_inst / (rn_inst - g_inst) # Evaporative fraction
EF_inst = EF_inst.clip(0.0, 1.8)
EF_inst[LE_inst<0] = 0
return(EF_inst)
#------------------------------------------------------------------------------
def Calc_Ref_Pot_ET(LAI,Surface_temp,sl_es_24,Rn_ref,air_dens,esat_24,eact_24,rah_grass,Psychro_c,Rn_24,Refl_rad_water,rah_pm_pot,rl):
"""
Function to calculate the reference potential evapotransporation and potential evaporation
"""
# Effective leaf area index involved, see Allen et al. (2006):
LAI_eff = LAI / (0.3 * LAI + 1.2)
rs_min = rl / LAI_eff # Min (Bulk) surface resistance (s/m)
# Latent heat of vaporization (J/kg):
Lhv = (2.501 - 2.361e-3 * (Surface_temp - 273.15)) * 1E6
# Reference evapotranspiration- grass
# Penman-Monteith of the combination equation (eq 3 FAO 56) (J/s/m2)
LET_ref_24 = ((sl_es_24 * Rn_ref + air_dens * 1004 * (esat_24 - eact_24) /
rah_grass) / (sl_es_24 + Psychro_c * (1 + 70.0/rah_grass)))
# Reference evaportranspiration (mm/d):
ETref_24 = LET_ref_24 / (Lhv * 1000) * 86400000
# Potential evapotranspiration
# Penman-Monteith of the combination equation (eq 3 FAO 56) (J/s/m2)
LETpot_24 = ((sl_es_24 * (Rn_24 - Refl_rad_water) + air_dens * 1004 *
(esat_24 - eact_24)/rah_pm_pot) / (sl_es_24 + Psychro_c * (1 + rs_min/rah_pm_pot)))
# Potential evaportranspiration (mm/d)
ETpot_24 = LETpot_24 / (Lhv * 1000) * 86400000
ETpot_24[ETpot_24 > 15.0] = 15.0
return(ETpot_24,ETref_24,Lhv,rs_min)
#------------------------------------------------------------------------------
def Calc_Rn_Ref(shape_lsc,water_mask,Rn_24,Ra_mountain_24,Transm_24,Rnl_24_FAO,Wind_24):
"""
Function to calculate the net solar radiation
"""
# constants:
G24_water = 0.1 # G24 ratio for water - reflectivity?
# Reflected radiation at water surface: ??
Refl_rad_water = np.zeros((shape_lsc[1], shape_lsc[0]))
Refl_rad_water = np.where(water_mask != 0.0, G24_water * Rn_24, 0.0)
# Aerodynamic resistance (s/m) for grass surface:
rah_grass = 208.0 / Wind_24
print('rah_grass=', '%0.3f (s/m)' % np.nanmean(rah_grass))
# Net radiation for grass Rn_ref, eq 40, FAO56:
Rn_ref = Ra_mountain_24 * Transm_24 * (1 - 0.23) - Rnl_24_FAO # Rnl avg(fao-slob)?
return(Rn_ref, Refl_rad_water,rah_grass)
#------------------------------------------------------------------------------
def Iterate_Friction_Velocity(k_vk,u_200,Surf_roughness,g_inst,rn_inst, ts_dem, ts_dem_hot, ts_dem_cold,air_dens, Surface_temp,L,psi,psi_m200,psi_m200_stable,QC_Map, hot_pixels, slope):
"""
Function to correct the windspeed and aerodynamic resistance for the iterative process the output can be used as the new input for this model
"""
# Sensible heat 2 (Step 6)
# Corrected value for the friction velocity, unstable
ustar_corr_unstable = (k_vk * u_200 / (np.log(200.0 / Surf_roughness) -
psi_m200))
# Corrected value for the friction velocity, stable
ustar_corr_stable = (k_vk * u_200 / (np.log(200.0 / Surf_roughness) -
psi_m200_stable))
ustar_corr = np.where(L > 0.0, ustar_corr_stable, ustar_corr_unstable)
ustar_corr[ustar_corr < 0.02] = 0.02
rah_corr_unstable = (np.log(2.0/0.01) - psi) / (k_vk * ustar_corr) # unstable
rah_corr_stable = (np.log(2.0/0.01) - 0.0) / (k_vk * ustar_corr) # stable
rah_corr = np.where(L > 0.0, rah_corr_stable, rah_corr_unstable)
L_corr, psi_m200_corr_stable, psi_corr, psi_m200_corr,h,dT, slope_dt, offset_dt = sensible_heat(
rah_corr, ustar_corr, rn_inst, g_inst, ts_dem, ts_dem_hot, ts_dem_cold,
air_dens, Surface_temp, k_vk,QC_Map, hot_pixels, slope)
return(L_corr,psi_corr,psi_m200_corr,psi_m200_corr_stable,h,ustar_corr,rah_corr,dT,slope_dt, offset_dt)
#------------------------------------------------------------------------------
def Calc_Wind_Speed_Friction(h_obst,Wind_inst,zx,LAI,NDVI,Surf_albedo,water_mask,surf_roughness_equation_used):
"""
Function to calculate the windspeed and friction by using the Raupach or NDVI model
"""
# constants
k_vk = 0.41 # Von Karman constant
h_grass = 0.12 # Grass height (m)
cd = 53 # Free parameter for displacement height, default = 20.6
# 1) Raupach model
zom_Raupach=Raupach_Model(h_obst,cd,LAI)
# 2) NDVI model
zom_NDVI=NDVI_Model(NDVI,Surf_albedo,water_mask)
if surf_roughness_equation_used == 1:
Surf_roughness = zom_NDVI
else:
Surf_roughness = zom_Raupach
zom_grass = 0.123 * h_grass
# Friction velocity for grass (m/s):
ustar_grass = k_vk * Wind_inst / np.log(zx / zom_grass)
print('u*_grass = ', '%0.3f (m/s)' % np.mean(ustar_grass))
# Wind speed (m/s) at the "blending height" (200m):
u_200 = ustar_grass * np.log(200 / zom_grass) / k_vk
print('Wind speed at the blending height, u200 =', '%0.3f (m/s)' % np.mean(u_200))
# Friction velocity (m/s):
ustar_1 = k_vk * u_200 / np.log(200 / Surf_roughness)
return(Surf_roughness,u_200,ustar_1)
#------------------------------------------------------------------------------
def Raupach_Model(h_obst,cd,LAI):
"""
Function for the Raupach model to calculate the surface roughness (based on Raupach 1994)
"""
# constants
cw = 2.0
LAIshelter = 2.5
# calculate psi
psi = np.log(cw) - 1 + np.power(2.0, -1) # Vegetation influence function
# Calculate Ustar divided by U
ustar_u = np.power((0.003+0.3*LAI/2), 0.5)
ustar_u[LAI<LAIshelter] = 0.3
# calculate: 1 - d/hv
inv_d_hv =(1-np.exp(-1*np.power((cd*LAI),0.5)))/np.power((cd * LAI),0.5)
# Calculate: surface roughness/hv
zom_hv = inv_d_hv * np.exp(-0.41/ustar_u-psi)
# Calculate: surface roughness
zom_Raupach = zom_hv * h_obst
return(zom_Raupach)
#------------------------------------------------------------------------------
def NDVI_Model(NDVI,Surf_albedo,water_mask):
"""
Function for the NDVI model to calculate the surface roughness
"""
zom_NDVI = np.exp(1.096 * NDVI / Surf_albedo - 5.307)
zom_NDVI[water_mask == 1.0] = 0.001
zom_NDVI[zom_NDVI > 10.0] = 10.0
return(zom_NDVI)
#------------------------------------------------------------------------------
def Correct_Surface_Temp(Surface_temp,Temp_lapse_rate,DEM_resh,Pair,dr,Transm_corr,cos_zn,Sun_elevation,deg2rad,ClipLandsat):
"""
Function to correct the surface temperature based on the DEM map
"""
#constants:
Gsc = 1367 # Solar constant (W / m2)
cos_zenith_flat = np.cos((90 - Sun_elevation) * deg2rad)
Temp_corr = Surface_temp + Temp_lapse_rate * DEM_resh # rescale everything to sea level
Temp_corr[Surface_temp == 350.0] = 0.0
air_dens = 1000 * Pair / (1.01 * Surface_temp * 287)
#
ts_dem = (Temp_corr + (Gsc * dr * Transm_corr * cos_zn -
Gsc * dr * Transm_corr * cos_zenith_flat) / (air_dens * 1004 * 0.050))
#(Temp_corr - (Gsc * dr * Transm_corr * cos_zn -
# Gsc * dr * Transm_corr * cos_zenith_flat) / (air_dens * 1004 * 0.050))
ts_dem[ClipLandsat==1]=np.nan
ts_dem[ts_dem==0]=np.nan
ts_dem[ts_dem<273]=np.nan
ts_dem[ts_dem>350]=np.nan
return(ts_dem,air_dens,Temp_corr)
#------------------------------------------------------------------------------
def Calc_Hot_Pixels(ts_dem,QC_Map, water_mask, NDVI,NDVIhot_low,NDVIhot_high,Hot_Pixel_Constant, ts_dem_cold):
"""
Function to calculates the hot pixels based on the surface temperature and NDVI
"""
for_hot = np.copy(ts_dem)
for_hot[NDVI <= NDVIhot_low] = 0.0
for_hot[NDVI >= NDVIhot_high] = 0.0
for_hot[np.logical_or(water_mask != 0.0, QC_Map != 0.0)] = 0.0
hot_pixels = np.copy(for_hot)
hot_pixels[for_hot < ts_dem_cold] = np.nan
ts_dem_hot_max = np.nanmax(hot_pixels) # Max
ts_dem_hot_mean = np.nanmean(hot_pixels) # Mean
ts_dem_hot_std = np.nanstd(hot_pixels) # Standard deviation
#ts_dem_hot = ts_dem_hot_max - 0.25 * ts_dem_hot_std
#ts_dem_hot = (ts_dem_hot_max + ts_dem_hot_mean)/2
ts_dem_hot=ts_dem_hot_mean + Hot_Pixel_Constant * ts_dem_hot_std
print('hot : max= %0.3f (Kelvin)' % ts_dem_hot_max, ', sd= %0.3f (Kelvin)' % ts_dem_hot_std, \
', mean= %0.3f (Kelvin)' % ts_dem_hot_mean, ', value= %0.3f (Kelvin)' % ts_dem_hot)
return(ts_dem_hot,hot_pixels)
#------------------------------------------------------------------------------
def Calc_Cold_Pixels(ts_dem,water_mask,QC_Map,ts_dem_cold_veg,Cold_Pixel_Constant):
"""
Function to calculates the the cold pixels based on the surface temperature
"""
for_cold = np.copy(ts_dem)
for_cold[water_mask != 1.0] = 0.0
for_cold[QC_Map != 0] = 0.0
cold_pixels = np.copy(for_cold)
cold_pixels[for_cold < 278.0] = np.nan
cold_pixels[for_cold > 320.0] = np.nan
# cold_pixels[for_cold < 285.0] = 285.0
ts_dem_cold_std = np.nanstd(cold_pixels) # Standard deviation
ts_dem_cold_min = np.nanmin(cold_pixels) # Min
ts_dem_cold_mean = np.nanmean(cold_pixels) # Mean
# If average temperature is below zero or nan than use the vegetation cold pixel
if ts_dem_cold_mean <= 0.0:
ts_dem_cold = ts_dem_cold_veg + Cold_Pixel_Constant * ts_dem_cold_std
if np.isnan(ts_dem_cold_mean) == True:
ts_dem_cold = ts_dem_cold_veg + Cold_Pixel_Constant * ts_dem_cold_std
else:
ts_dem_cold = ts_dem_cold_mean + Cold_Pixel_Constant * ts_dem_cold_std
if ts_dem_cold > ts_dem_cold_veg:
ts_dem_cold = ts_dem_cold_veg
if np.isnan(ts_dem_cold):
ts_dem_cold = ts_dem_cold_veg
print('cold water: min=%0.3f (Kelvin)' %ts_dem_cold_min , ', sd= %0.3f (Kelvin)' % ts_dem_cold_std, \
', mean= %0.3f (Kelvin)' % ts_dem_cold_mean, ', value= %0.3f (Kelvin)' % ts_dem_cold)
return(ts_dem_cold,cold_pixels,ts_dem_cold_mean)
#------------------------------------------------------------------------------
def Calc_Cold_Pixels_Veg(NDVI,NDVI_max,NDVI_std,QC_Map,ts_dem,Image_Type, Cold_Pixel_Constant):
"""
Function to calculates the the cold pixels based on vegetation
"""
cold_pixels_vegetation = np.copy(ts_dem)
cold_pixels_vegetation[np.logical_or(NDVI <= (NDVI_max-0.1*NDVI_std),QC_Map != 0.0)] = 0.0
cold_pixels_vegetation[cold_pixels_vegetation==0.0] = np.nan
ts_dem_cold_std_veg = np.nanstd(cold_pixels_vegetation)
ts_dem_cold_min_veg = np.nanmin(cold_pixels_vegetation)
ts_dem_cold_mean_veg = np.nanmean(cold_pixels_vegetation)
if Image_Type == 1:
ts_dem_cold_veg = ts_dem_cold_mean_veg + Cold_Pixel_Constant * ts_dem_cold_std_veg
if Image_Type == 2:
ts_dem_cold_veg = ts_dem_cold_mean_veg + Cold_Pixel_Constant * ts_dem_cold_std_veg
if Image_Type == 3:
ts_dem_cold_veg = ts_dem_cold_mean_veg + Cold_Pixel_Constant * ts_dem_cold_std_veg
print('cold vegetation: min=%0.3f (Kelvin)' %ts_dem_cold_min_veg , ', sd= %0.3f (Kelvin)' % ts_dem_cold_std_veg, \
', mean= %0.3f (Kelvin)' % ts_dem_cold_mean_veg, ', value= %0.3f (Kelvin)' % ts_dem_cold_veg)
return(ts_dem_cold_veg)
#------------------------------------------------------------------------------
def Calc_Meteo(Rs_24,eact_24,Temp_24,Surf_albedo,dr,tir_emis,Surface_temp,water_mask,NDVI,Transm_24,SB_const,lw_in_inst,Rs_inst):
"""
Calculates the instantaneous Ground heat flux and solar radiation.
"""
# Net shortwave radiation (W/m2):
Rns_24 = Rs_24 * (1 - Surf_albedo)
# Net outgoing longwave radiation (W/m2):
Rnl_24_FAO = (SB_const * np.power(Temp_24 + 273.15, 4) * (0.34-0.14 *
np.power(eact_24, 0.5)) * (1.35 * Transm_24 / 0.8 - 0.35))
Rnl_24_Slob = 110 * Transm_24
print('Mean Daily Net longwave Radiation (Slob) = %0.3f (W/m2)' % np.nanmean(Rnl_24_Slob))
print('Mean Daily Net longwave Radiation (FAO) = %0.3f (W/m2)' % np.nanmean(Rnl_24_FAO))
# Net 24 hrs radiation (W/m2):
Rn_24_FAO = Rns_24 - Rnl_24_FAO # FAO equation
Rn_24_Slob = Rns_24 - Rnl_24_Slob # Slob equation
Rn_24 = (Rn_24_FAO + Rn_24_Slob) / 2 # Average
print('Mean Daily Net Radiation (Slob) = %0.3f (W/m2)' % np.nanmean(Rn_24_Slob))
print('Mean Daily Net Radiation (FAO) = %0.3f (W/m2)' % np.nanmean(Rn_24_FAO))
# Instantaneous outgoing longwave radiation:
lw_out_inst = tir_emis * SB_const * np.power(Surface_temp, 4)
# Instantaneous net radiation
rn_inst = (Rs_inst * (1 - Surf_albedo) + lw_in_inst - lw_out_inst -
(1 - tir_emis) * lw_in_inst)
# Instantaneous Soil heat flux
g_inst = np.where(water_mask != 0.0, 0.4 * rn_inst,
((Surface_temp - 273.15) * (0.0038 + 0.0074 * Surf_albedo) *
(1 - 0.978 * np.power(NDVI, 4))) * rn_inst)
return(Rn_24,rn_inst,g_inst,Rnl_24_FAO)
#------------------------------------------------------------------------------
def Calc_surface_water_temp(Temp_inst,Landsat_nr,Lmax,Lmin,therm_data,b10_emissivity,k1_c,k2_c,eact,shape_lsc,water_mask_temp,Bands_thermal,Rp,tau_sky,surf_temp_offset,Image_Type):
"""
Calculates the surface temperature and create a water mask
"""
# Spectral radiance for termal
if Landsat_nr == 8:
if Bands_thermal == 1:
k1 = k1_c[0]
k2 = k2_c[0]
L_lambda_b10 = (Lmax[-1] - Lmin[-1]) / (65535-1) * therm_data[:, :, 0] + Lmin[-1]
# Get Temperature
Surface_temp = Get_Thermal(L_lambda_b10,Rp,Temp_inst,tau_sky,b10_emissivity,k1,k2)
elif Bands_thermal == 2:
L_lambda_b10 = (Lmax[-2] - Lmin[-2]) / (65535-1) * therm_data[:, :, 0] + Lmin[-2]
L_lambda_b11 = (Lmax[-1] - Lmin[-1]) / (65535-1) * therm_data[:, :, 1] + Lmin[-1]
# Brightness temperature
# From Band 10:
Temp_TOA_10 = (k2_c[0] / np.log(k1_c[0] / L_lambda_b10 + 1.0))
# From Band 11:
Temp_TOA_11 = (k2_c[1] / np.log(k1_c[1] / L_lambda_b11 + 1.0))
# Combined:
Surface_temp = (Temp_TOA_10 + 1.378 * (Temp_TOA_10 - Temp_TOA_11) +
0.183 * np.power(Temp_TOA_10 - Temp_TOA_11, 2) - 0.268 +
(54.30 - 2.238 * eact) * (1 - b10_emissivity))
elif Landsat_nr == 7:
k1=666.09
k2=1282.71
L_lambda_b6 = (Lmax[-1] - Lmin[-1]) / (256-1) * therm_data[:, :, 0] + Lmin[-1]
# Brightness temperature - From Band 6:
Surface_temp = Get_Thermal(L_lambda_b6,Rp,Temp_inst,tau_sky,b10_emissivity,k1,k2)
elif Landsat_nr == 5:
k1=607.76
k2=1260.56
L_lambda_b6 = ((Lmax[-1] - Lmin[-1]) / (256-1) * therm_data[:, :, 0] +
Lmin[-1])
# Brightness temperature - From Band 6:
Surface_temp = Get_Thermal(L_lambda_b6,Rp,Temp_inst,tau_sky,b10_emissivity,k1,k2)
# Surface temperature
Surface_temp = Surface_temp.clip(230.0, 360.0)
# Cloud mask:
temp_water = np.zeros((shape_lsc[1], shape_lsc[0]))
temp_water = np.copy(Surface_temp)
temp_water[water_mask_temp == 0.0] = np.nan
temp_water_sd = np.nanstd(temp_water) # Standard deviation
temp_water_mean = np.nanmean(temp_water) # Mean
print('Mean water temperature = ', '%0.3f (Kelvin)' % temp_water_mean)
print('SD water temperature = ', '%0.3f (Kelvin)' % temp_water_sd)
cloud_mask = np.zeros((shape_lsc[1], shape_lsc[0]))
cloud_mask[Surface_temp < np.minimum((temp_water_mean - 1.0 * temp_water_sd -
surf_temp_offset),290)] = 1.0
return(Surface_temp, cloud_mask)
#------------------------------------------------------------------------------
def Get_Thermal(lambda_b10,Rp,Temp_inst,tau_sky,TIR_Emissivity,k1,k2):
# Narrow band downward thermal radiation from clear sky, rsky (W/m2/sr/µm)
rsky = (1.807E-10 * np.power(Temp_inst + 273.15, 4) * (1 - 0.26 *
np.exp(-7.77E-4 * np.power((-Temp_inst), -2))))
print('Rsky = ', '%0.3f (W/m2/sr/µm)' % np.nanmean(rsky))
# Corrected thermal radiance from the surface, Wukelikc et al. (1989):
correc_lambda_b10 = ((lambda_b10 - Rp) / tau_sky -
(1.0 - TIR_Emissivity) * rsky)
# Brightness temperature - From Band 10:
Temp_TOA = (k2 / np.log(TIR_Emissivity * k1 /
correc_lambda_b10 + 1.0))
return(Temp_TOA)
#------------------------------------------------------------------------------
def Calc_vegt_para(NDVI,water_mask_temp,shape_lsc):
"""
Calculates the Fraction of PAR, Thermal infrared emissivity, Nitrogen, Vegetation Cover, LAI, b10_emissivity
"""
# Fraction of PAR absorbed by the vegetation canopy (FPAR):
FPAR = -0.161 + 1.257 * NDVI
FPAR[NDVI < 0.125] = 0.0
# Termal infrared emissivity
tir_emis = 1.009 + 0.047 * np.log(NDVI)
tir_emis[np.logical_or(water_mask_temp == 1.0, water_mask_temp == 2.0)] = 1.0
tir_emis[np.logical_and(NDVI < 0.125, water_mask_temp == 0.0)] = 0.92
# Vegetation Index - Regression model from Bagheri et al. (2013)
VI = 38.764 * np.square(NDVI) - 24.605 * NDVI + 5.8103
# Nitrogen computation
Nitrogen = np.copy(VI)
Nitrogen[VI <= 0.0] = 0.0
Nitrogen[NDVI <= 0.0] = 0.0
# Vegetation cover:
vegt_cover = 1 - np.power((0.8 - NDVI)/(0.8 - 0.125), 0.7)
vegt_cover[NDVI < 0.125] = 0.0
vegt_cover[NDVI > 0.8] = 0.99
# Leaf Area Index (LAI)
LAI_1 = np.log(-(vegt_cover - 1)) / -0.45
LAI_1[LAI_1 > 8] = 8.0
LAI_2 = (9.519 * np.power(NDVI, 3) + 0.104 * np.power(NDVI, 2) +
1.236 * NDVI - 0.257)
LAI = (LAI_1 + LAI_2) / 2.0 # Average LAI
LAI[LAI < 0.001] = 0.001
b10_emissivity = np.zeros((shape_lsc[1], shape_lsc[0]))
b10_emissivity = np.where(LAI <= 3.0, 0.95 + 0.01 * LAI, 0.98)
b10_emissivity[water_mask_temp != 0.0] = 1.0
return(FPAR,tir_emis,Nitrogen,vegt_cover,LAI,b10_emissivity)
#------------------------------------------------------------------------------
def Water_Mask(shape_lsc,Reflect):
"""
Calculates the water and cloud mask
"""
mask = np.zeros((shape_lsc[1], shape_lsc[0]))
mask[np.logical_and(Reflect[:, :, 3] < Reflect[:, :, 2],
Reflect[:, :, 4] < Reflect[:, :, 1])] = 1.0
water_mask_temp = np.copy(mask)
return(water_mask_temp)
#------------------------------------------------------------------------------
def Calc_albedo(Reflect,path_radiance,Apparent_atmosf_transm):
"""
This function calculates and returns the Surface albedo, NDVI by using the refectance from the landsat image.
"""
# Surface albedo:
Surf_albedo = (0.254 * Reflect[:, :, 0] + 0.149 * Reflect[:, :, 1] +
0.147 * Reflect[:, :, 2] + 0.311 * Reflect[:, :, 3] +
0.103 * Reflect[:, :, 4] + 0.036 * Reflect[:, :, 5] -
path_radiance) / np.power(Apparent_atmosf_transm, 2)
# Better tsw instead of Apparent_atmosf_transm ??
Surf_albedo = Surf_albedo.clip(0.0, 0.6)
return(Surf_albedo)
#------------------------------------------------------------------------------
def Calc_NDVI(Reflect):
"""
This function calculates and returns the Surface albedo, NDVI by using the refectance from the landsat image.
"""
# Computation of Normalized Difference Vegetation Index (NDVI)
NDVI = ((Reflect[:, :, 3] - Reflect[:, :, 2]) /
(Reflect[:, :, 3] + Reflect[:, :, 2]))
return(NDVI)
#------------------------------------------------------------------------------
def CalculateSnowWaterMask(NDVI,shape_lsc,water_mask_temp,Surface_temp):
'''
Devides the temporaly water mask into a snow and water mask by using the surface temperature
'''
NDVI_nan=np.copy(NDVI)
NDVI_nan[NDVI==0]=np.nan
NDVI_nan=np.float32(NDVI_nan)
NDVI_std=np.nanstd(NDVI_nan)
NDVI_max=np.nanmax(NDVI_nan)
NDVI_treshold_cold_pixels=NDVI_max-0.1*NDVI_std
print('NDVI treshold for cold pixels = ', '%0.3f' % NDVI_treshold_cold_pixels)
ts_moist_veg_min=np.nanmin(Surface_temp[NDVI>NDVI_treshold_cold_pixels])
# calculate new water mask
mask=np.zeros((shape_lsc[1], shape_lsc[0]))
mask[np.logical_and(np.logical_and(water_mask_temp==1, Surface_temp <= 275),NDVI>=0.3)]=1
snow_mask=np.copy(mask)
# calculate new water mask
mask=np.zeros((shape_lsc[1], shape_lsc[0]))
mask[np.logical_and(water_mask_temp==1, Surface_temp > 273)]=1
water_mask=np.copy(mask)
return(snow_mask,water_mask,ts_moist_veg_min, NDVI_max, NDVI_std)
#------------------------------------------------------------------------------
def Calc_Ra_Mountain(lon,DOY,hour,minutes,lon_proy,lat_proy,slope,aspect):
"""
Calculates the extraterrestiral solar radiation by using the date, slope and aspect.
"""
# Constants
deg2rad = np.pi / 180.0 # Factor to transform from degree to rad
Min_cos_zn = 0.1 # Min value for cos zenith angle
Max_cos_zn = 1.0 # Max value for cos zenith angle
Gsc = 1367 # Solar constant (W / m2)
try:
Loc_time = float(hour) + float(minutes)/60 # Local time (hours)
except:
Loc_time = np.float_(hour) + np.float_(minutes)/60 # Local time (hours)
# Rounded difference of the local time from Greenwich (GMT) (hours):
offset_GTM = round(np.sign(lon[int(lon.shape[0]/2), int(lon.shape[1]/2)]) * lon[int(lon.shape[0]/2),int(lon.shape[1]/2)] * 24 / 360)
print(' Local time: ', '%0.3f' % np.nanmean(Loc_time))
print(' Difference of local time (LT) from Greenwich (GMT): ', offset_GTM)
# 1. Calculation of extraterrestrial solar radiation for slope and aspect
# Computation of Hour Angle (HRA = w)
B = 360./365 * (DOY-81) # (degrees)
# Computation of cos(theta), where theta is the solar incidence angle
# relative to the normal to the land surface
delta=np.arcsin(np.sin(23.45*deg2rad)*np.sin(np.deg2rad(B))) # Declination angle (radians)
phi = lat_proy * deg2rad # latitude of the pixel (radians)
s = slope * deg2rad # Surface slope (radians)
gamma = (aspect-180) * deg2rad # Surface aspect angle (radians)
w=w_time(Loc_time, lon_proy, DOY) # Hour angle (radians)
a,b,c = Constants(delta,s,gamma,phi)
cos_zn= AngleSlope(a,b,c,w)
cos_zn = cos_zn.clip(Min_cos_zn, Max_cos_zn)
print('Average Cos Zenith Angle: ', '%0.3f (Radians)' % np.nanmean(cos_zn))
dr = 1 + 0.033 * cos(DOY*2*pi/365) # Inverse relative distance Earth-Sun
# Instant. extraterrestrial solar radiation (W/m2), Allen et al.(2006):
Ra_inst = Gsc * cos_zn * dr
# 24-hours extraterrestrial radiation
# 1.) determine if there are one or two periods of sun
# 2.) calculate the 24-hours extraterrestrial radiation if there are two periods of sun
# 3.) calculate the 24-hours extraterrestrial radiation if there is one period of sun
#1.) determine amount of sun periods
Ra_24 = np.zeros(np.shape(lat_proy))*np.nan
constant=Gsc*dr/(2*np.pi)
TwoPeriod= TwoPeriods(delta,s,phi) # all input in radians
#2.) calculate the 24-hours extraterrestrial radiation (2 periods)
ID = np.where(np.ravel(TwoPeriod==True))
Ra_24.flat[ID]=TwoPeriodSun(constant,delta,s.flat[ID],gamma.flat[ID],phi.flat[ID])
#3.) calculate the 24-hours extraterrestrial radiation (1 period)
ID = np.where(np.ravel(TwoPeriod==False))
Ra_24.flat[ID]=OnePeriodSun(constant,delta,s.flat[ID],gamma.flat[ID],phi.flat[ID])
# Horizontal surface
ws = np.arccos(-np.tan(delta) * np.tan(phi)) # Sunrise/sunset time angle
# Extraterrestial radiation for a horizontal surface for 24-h period:
Ra_hor_24 = (Gsc * dr / np.pi * (np.sin(delta) * np.sin(phi) * ws + np.cos(delta) * np.cos(phi) * np.sin(ws)))
# cos_theta_flat = (np.sin(delta) * np.sin(phi) + np.cos(delta) * np.cos(phi) * np.cos(w))
# Mountain radiation
Ra_mountain_24 = np.where(Ra_24 > Min_cos_zn * Ra_hor_24, Ra_24 / np.cos(s),
Ra_hor_24)
Ra_mountain_24[Ra_mountain_24 > 600.0] = 600.0
return(Ra_mountain_24,Ra_inst,cos_zn,dr,phi,delta)
#------------------------------------------------------------------------------
def OnePeriodSun(constant,delta,s,gamma,phi):
'''
Based on Richard G. Allen 2006
Calculate the 24-hours extraterrestrial radiation when there is one sun period
'''
sunrise,sunset = SunHours(delta,s,gamma,phi)
Vals=IntegrateSlope(constant,sunrise,sunset,delta,s,gamma,phi)
return(Vals)
#------------------------------------------------------------------------------
def TwoPeriodSun(constant,delta,s,gamma,phi):
'''
Based on Richard G. Allen 2006
Calculate the 24-hours extraterrestrial radiation when there are two sun period
'''
A1, A2 = SunHours(delta,s,gamma,phi)
a,b,c = Constants(delta,s,gamma,phi)
riseSlope, setSlope = BoundsSlope(a,b,c)
B1 = np.maximum(riseSlope,setSlope)
B2 = np.minimum(riseSlope,setSlope)
Angle_B1 = AngleSlope(a,b,c,B1)
Angle_B2 = AngleSlope(a,b,c,B2)
B1[abs(Angle_B1) > 0.001] = np.pi - B1[abs(Angle_B1) > 0.001]
B2[abs(Angle_B2) > 0.001] = -np.pi - B2[abs(Angle_B2) > 0.001]
# Check if two periods really exist
ID = np.ravel_multi_index(np.where(np.logical_and(B2 >= A1, B1 >= A2) == True),a.shape)
Val = IntegrateSlope(constant,B2.flat[ID],B1.flat[ID],delta,s.flat[ID],gamma.flat[ID],phi.flat[ID])
ID = ID[Val < 0]
# Finally calculate resulting values
Vals = np.zeros(B1.shape)
Vals.flat[ID] = (IntegrateSlope(constant,A1.flat[ID],B2.flat[ID],delta,s.flat[ID],gamma.flat[ID],phi.flat[ID]) +
IntegrateSlope(constant,B1.flat[ID],A2.flat[ID],delta,s.flat[ID],gamma.flat[ID],phi.flat[ID]))
ID = np.ravel_multi_index(np.where(Vals == 0),a.shape)
Vals.flat[ID] = IntegrateSlope(constant,A1.flat[ID],A2.flat[ID],delta,s.flat[ID],gamma.flat[ID],phi.flat[ID])
return(Vals)
#------------------------------------------------------------------------------
def IntegrateSlope(constant,sunrise,sunset,delta,s,gamma,phi):
'''
Based on Richard G. Allen 2006 equation 5
Calculate the 24 hours extraterrestrial radiation
'''
# correct the sunset and sunrise angels for days that have no sunset or no sunrise
SunOrNoSun = np.logical_or(((np.abs(delta + phi)) > (np.pi/2)),((np.abs(delta - phi)) > (np.pi/2)))
integral=np.zeros(s.shape)
ID = np.where(np.ravel(SunOrNoSun==True))
# No sunset
if abs(delta+phi.flat[ID])>(np.pi/2):
sunset1=np.pi
sunrise1=-np.pi
integral.flat[ID] = constant * (np.sin(delta)*np.sin(phi)*np.cos(s)*(sunset1-sunrise1)
- np.sin(delta)*np.cos(phi)*np.sin(s)*np.cos(gamma)*(sunset1-sunrise1)
+ np.cos(delta)*np.cos(phi)*np.cos(s)*(np.sin(sunset1)-np.sin(sunrise1))
+ np.cos(delta)*np.sin(phi)*np.sin(s)*np.cos(gamma)*(np.sin(sunset1)-np.sin(sunrise1))
- np.cos(delta)*np.sin(s)*np.sin(gamma)*(np.cos(sunset1)-np.cos(sunrise1)))
# No sunrise
elif np.abs(delta-phi.flat[ID])>(np.pi/2):
integral.flat[ID]=constant * (np.sin(delta)*np.sin(phi)*np.cos(s)*(0)
- np.sin(delta)*np.cos(phi)*np.sin(s)*np.cos(gamma)*(0)
+ np.cos(delta)*np.cos(phi)*np.cos(s)*(np.sin(0)-np.sin(0))
+ np.cos(delta)*np.sin(phi)*np.sin(s)*np.cos(gamma)*(np.sin(0)-np.sin(0))
- np.cos(delta)*np.sin(s)*np.sin(gamma)*(np.cos(0)-np.cos(0)))
ID = np.where(np.ravel(SunOrNoSun==False))
integral.flat[ID] = constant * (np.sin(delta)*np.sin(phi)*np.cos(s)*(sunset-sunrise)
- np.sin(delta)*np.cos(phi)*np.sin(s)*np.cos(gamma)*(sunset-sunrise)
+ np.cos(delta)*np.cos(phi)*np.cos(s)*(np.sin(sunset)-np.sin(sunrise))
+ np.cos(delta)*np.sin(phi)*np.sin(s)*np.cos(gamma)*(np.sin(sunset)-np.sin(sunrise))
- np.cos(delta)*np.sin(s)*np.sin(gamma)*(np.cos(sunset)-np.cos(sunrise)))
return(integral)
#------------------------------------------------------------------------------
def TwoPeriods(delta,s,phi):
'''
Based on Richard G. Allen 2006
Create a boolean map with True values for places with two sunsets
'''
TwoPeriods = (np.sin(s) > np.ones(s.shape)*np.sin(phi)*np.cos(delta)+np.cos(phi)*np.sin(delta))
return(TwoPeriods)
#------------------------------------------------------------------------------
def SunHours(delta,slope,slopedir,lat):
# Define sun hours in case of one sunlight period
a,b,c = Constants(delta,slope,slopedir,lat)
riseSlope, setSlope = BoundsSlope(a,b,c)
bound = BoundsHorizontal(delta,lat)
Calculated = np.zeros(slope.shape, dtype = bool)
RiseFinal = np.zeros(slope.shape)
SetFinal = np.zeros(slope.shape)
# First check sunrise is not nan
# This means that their is either no sunrise (whole day night) or no sunset (whole day light)
# For whole day light, use the horizontal sunrise and whole day night a zero..
Angle4 = AngleSlope(a,b,c,-bound)
RiseFinal[np.logical_and(np.isnan(riseSlope),Angle4 >= 0)] = -bound[np.logical_and(np.isnan(riseSlope),Angle4 >= 0)]
Calculated[np.isnan(riseSlope)] = True
# Step 1 > 4
Angle1 = AngleSlope(a,b,c,riseSlope)
Angle2 = AngleSlope(a,b,c,-bound)
ID = np.ravel_multi_index(np.where(np.logical_and(np.logical_and(Angle2 < Angle1+0.001 ,Angle1 < 0.001),Calculated == False) == True),a.shape)
RiseFinal.flat[ID] = riseSlope.flat[ID]
Calculated.flat[ID] = True
# step 5 > 7
Angle3 = AngleSlope(a,b,c,-np.pi - riseSlope)
ID = np.ravel_multi_index(np.where(np.logical_and(np.logical_and(-bound<(-np.pi-riseSlope),Angle3 <= 0.001),Calculated == False) == True),a.shape)
RiseFinal.flat[ID] = -np.pi -riseSlope.flat[ID]
Calculated.flat[ID] = True
# For all other values we use the horizontal sunset if it is positive, otherwise keep a zero
RiseFinal[Calculated == False] = -bound[Calculated == False]
# Then check sunset is not nan or < 0
Calculated = np.zeros(slope.shape, dtype = bool)
Angle4 = AngleSlope(a,b,c,bound)
SetFinal[np.logical_and(np.isnan(setSlope),Angle4 >= 0)] = bound[np.logical_and(np.isnan(setSlope),Angle4 >= 0)]
Calculated[np.isnan(setSlope)] = True
# Step 1 > 4
Angle1 = AngleSlope(a,b,c,setSlope)
Angle2 = AngleSlope(a,b,c,bound)
ID = np.ravel_multi_index(np.where(np.logical_and(np.logical_and(Angle2 < Angle1+0.001,Angle1 < 0.001),Calculated == False) == True),a.shape)
SetFinal.flat[ID] = setSlope.flat[ID]
Calculated.flat[ID] = True
# step 5 > 7
Angle3 = AngleSlope(a,b,c,np.pi - setSlope)
ID = np.ravel_multi_index(np.where(np.logical_and(np.logical_and(bound>(np.pi-setSlope),Angle3 <= 0.001),Calculated == False) == True),a.shape)
SetFinal.flat[ID] = np.pi - setSlope.flat[ID]
Calculated.flat[ID] = True
# For all other values we use the horizontal sunset if it is positive, otherwise keep a zero
SetFinal[Calculated == False] = bound[Calculated == False]
# Angle4 = AngleSlope(a,b,c,bound)
# SetFinal[np.logical_and(Calculated == False,Angle4 >= 0)] = bound[np.logical_and(Calculated == False,Angle4 >= 0)]
# If Sunrise is after Sunset there is no sunlight during the day
SetFinal[SetFinal <= RiseFinal] = 0
RiseFinal[SetFinal <= RiseFinal] = 0
return(RiseFinal,SetFinal)
#------------------------------------------------------------------------------
def Constants(delta,s,gamma,phi):
'''
Based on Richard G. Allen 2006 equation 11
determines constants for calculating the exterrestial solar radiation
'''
a = np.sin(delta)*np.cos(phi)*np.sin(s)*np.cos(gamma) - np.sin(delta)*np.sin(phi)*np.cos(s)
b = np.cos(delta)*np.cos(phi)*np.cos(s) + np.cos(delta)*np.sin(phi)*np.sin(s)*np.cos(gamma)
c = np.cos(delta)*np.sin(s)*np.sin(gamma)
return(a,b,c)
#------------------------------------------------------------------------------
def BoundsSlope(a,b,c):
'''
Based on Richard G. Allen 2006 equation 13
This function calculates candidate values for sunrise and sunset hour angles
'''
Div = (b**2+c**2)
Div[Div <= 0] = 0.00001
sinB = (a*c + b*np.sqrt(b**2+c**2-a**2)) / Div
sinA = (a*c - b*np.sqrt(b**2+c**2-a**2)) / Div
sinB[sinB < -1] = -1; sinB[sinB > 1] = 1 # Limits see appendix A.2.i
sinA[sinA < -1] = -1; sinA[sinA > 1] = 1 # Limits see appendix A.2.i
sunrise = np.arcsin(sinA)
sunset = np.arcsin(sinB)
return(sunrise,sunset)
#------------------------------------------------------------------------------
def BoundsHorizontal(delta,phi):
''''
Based on Richard G. Allen 2006
This function calculates sunrise hours based on earth inclination and latitude
If there is no sunset or sunrise hours the values are either set to 0 (polar night) or pi (polar day)
'''
bound = np.arccos(-np.tan(delta)*np.tan(phi))
bound[abs(delta+phi) > np.pi/2] = np.pi
bound[abs(delta-phi) > np.pi/2] = 0
return(bound)
#------------------------------------------------------------------------------
def AngleSlope(a,b,c,w):
'''
Based on Richard G. Allen 2006
Calculate the cos zenith angle by using the hour angle and constants
'''
angle = -a + b*np.cos(w) + c*np.sin(w)
return(angle)
#------------------------------------------------------------------------------
def Calc_Gradient(dataset,pixel_spacing):
"""
This function calculates the slope and aspect of a DEM map.
"""
# constants
deg2rad = np.pi / 180.0 # Factor to transform from degree to rad
rad2deg = 180.0 / np.pi # Factor to transform from rad to degree
# Calculate slope
x, y = np.gradient(dataset)
slope = np.arctan(np.sqrt(np.square(x/pixel_spacing) + np.square(y/pixel_spacing))) * rad2deg
# calculate aspect
aspect = np.arctan2(y/pixel_spacing, -x/pixel_spacing) * rad2deg
aspect = 180 + aspect
return(deg2rad,rad2deg,slope,aspect)
#------------------------------------------------------------------------------
def DEM_lat_lon(DEM_fileName,output_folder):
"""
This function retrieves information about the latitude and longitude of the
DEM map.
"""
# name for output
lat_fileName = os.path.join(output_folder, 'Output_radiation_balance','latitude.tif')
lon_fileName = os.path.join(output_folder, 'Output_radiation_balance','longitude.tif')
g = gdal.Open(DEM_fileName) # Open DEM
geo_t = g.GetGeoTransform() # Get the Geotransform vector:
x_size = g.RasterXSize # Raster xsize - Columns
y_size = g.RasterYSize # Raster ysize - Rows
# create a longitude and a latitude array
lon = np.zeros((y_size, x_size))
lat = np.zeros((y_size, x_size))
for col in np.arange(x_size):
lon[:, col] = geo_t[0] + col * geo_t[1] + geo_t[1]/2
# ULx + col*(E-W pixel spacing) + E-W pixel spacing
for row in np.arange(y_size):
lat[row, :] = geo_t[3] + row * geo_t[5] + geo_t[5]/2
# ULy + row*(N-S pixel spacing) + N-S pixel spacing,
# negative as we will be counting from the UL corner
# Define shape of the raster
shape = [x_size, y_size]
# Save lat and lon files in geo- coordinates
save_GeoTiff_proy(g, lat, lat_fileName, shape, nband=1)
save_GeoTiff_proy(g, lon, lon_fileName, shape, nband=1)
return(lat,lon,lat_fileName,lon_fileName)
#------------------------------------------------------------------------------
def reproject_dataset(dataset, pixel_spacing, UTM_Zone):
"""
A sample function to reproject and resample a GDAL dataset from within
Python. The idea here is to reproject from one system to another, as well
as to change the pixel size. The procedure is slightly long-winded, but
goes like this:
1. Set up the two Spatial Reference systems.
2. Open the original dataset, and get the geotransform
3. Calculate bounds of new geotransform by projecting the UL corners
4. Calculate the number of pixels with the new projection & spacing
5. Create an in-memory raster dataset
6. Perform the projection
"""
# 1) Open the dataset
g = gdal.Open(dataset)
if g is None:
print('input folder does not exist')
# Define the EPSG code...
EPSG_code = '326%02d' % UTM_Zone
epsg_to = int(EPSG_code)
# 2) Define the UK OSNG, see <http://spatialreference.org/ref/epsg/27700/>
try:
proj = g.GetProjection()
Proj_in=proj.split('EPSG","')
epsg_from=int((str(Proj_in[-1]).split(']')[0])[0:-1])
except:
epsg_from = int(4326) # Get the Geotransform vector:
geo_t = g.GetGeoTransform()
# Vector components:
# 0- The Upper Left easting coordinate (i.e., horizontal)
# 1- The E-W pixel spacing
# 2- The rotation (0 degrees if image is "North Up")
# 3- The Upper left northing coordinate (i.e., vertical)
# 4- The rotation (0 degrees)
# 5- The N-S pixel spacing, negative as it is counted from the UL corner
x_size = g.RasterXSize # Raster xsize
y_size = g.RasterYSize # Raster ysize
epsg_to = int(epsg_to)
# 2) Define the UK OSNG, see <http://spatialreference.org/ref/epsg/27700/>
osng = osr.SpatialReference()
osng.ImportFromEPSG(epsg_to)
wgs84 = osr.SpatialReference()
wgs84.ImportFromEPSG(epsg_from)
inProj = Proj(init='epsg:%d' %epsg_from)
outProj = Proj(init='epsg:%d' %epsg_to)
nrow_skip = round((0.06*y_size)/2)
ncol_skip = round((0.06*x_size)/2)
# Up to here, all the projection have been defined, as well as a
# transformation from the from to the to
ulx, uly = transform(inProj,outProj,geo_t[0] + nrow_skip * geo_t[1], geo_t[3] + nrow_skip * geo_t[5])
lrx, lry = transform(inProj,outProj,geo_t[0] + geo_t[1] * (x_size-ncol_skip),
geo_t[3] + geo_t[5] * (y_size-nrow_skip))
# See how using 27700 and WGS84 introduces a z-value!
# Now, we create an in-memory raster
mem_drv = gdal.GetDriverByName('MEM')
# The size of the raster is given the new projection and pixel spacing
# Using the values we calculated above. Also, setting it to store one band
# and to use Float32 data type.
col = int((lrx - ulx)/pixel_spacing)
rows = int((uly - lry)/pixel_spacing)
# Re-define lr coordinates based on whole number or rows and columns
(lrx, lry) = (ulx + col * pixel_spacing, uly -
rows * pixel_spacing)
dest = mem_drv.Create('', col, rows, 1, gdal.GDT_Float32)
if dest is None:
print('input folder to large for memory, clip input map')
# Calculate the new geotransform
new_geo = (ulx, pixel_spacing, geo_t[2], uly,
geo_t[4], - pixel_spacing)
# Set the geotransform
dest.SetGeoTransform(new_geo)
dest.SetProjection(osng.ExportToWkt())
# Perform the projection/resampling
gdal.ReprojectImage(g, dest, wgs84.ExportToWkt(), osng.ExportToWkt(),gdal.GRA_Bilinear)
return dest, ulx, lry, lrx, uly, epsg_to
#------------------------------------------------------------------------------
def reproject_dataset_example(dataset, dataset_example, method = 1):
try:
if (os.path.splitext(dataset)[-1] == '.tif' or os.path.splitext(dataset)[-1] == '.TIF'):
g_in = gdal.Open(dataset)
else:
g_in = dataset
except:
g_in = dataset
epsg_from = Get_epsg(g_in)
#exceptions
if epsg_from == 9001:
epsg_from = 5070
# open dataset that is used for transforming the dataset
try:
if (os.path.splitext(dataset_example)[-1] == '.tif' or os.path.splitext(dataset_example)[-1] == '.TIF'):
g_ex = gdal.Open(dataset_example)
else:
g_ex = dataset_example
except:
g_ex = dataset_example
epsg_to = Get_epsg(g_ex)
Y_raster_size = g_ex.RasterYSize
X_raster_size = g_ex.RasterXSize
Geo = g_ex.GetGeoTransform()
ulx = Geo[0]
uly = Geo[3]
lrx = ulx + X_raster_size * Geo[1]
lry = uly + Y_raster_size * Geo[5]
# Set the EPSG codes
osng = osr.SpatialReference()
osng.ImportFromEPSG(epsg_to)
wgs84 = osr.SpatialReference()
wgs84.ImportFromEPSG(epsg_from)
# Create new raster
mem_drv = gdal.GetDriverByName('MEM')
dest1 = mem_drv.Create('', X_raster_size, Y_raster_size, 1, gdal.GDT_Float32)
dest1.SetGeoTransform(Geo)
dest1.SetProjection(osng.ExportToWkt())
# Perform the projection/resampling
if method == 1:
gdal.ReprojectImage(g_in, dest1, wgs84.ExportToWkt(), osng.ExportToWkt(), gdal.GRA_NearestNeighbour)
if method == 2:
gdal.ReprojectImage(g_in, dest1, wgs84.ExportToWkt(), osng.ExportToWkt(), gdal.GRA_Average)
if method == 3:
gdal.ReprojectImage(g_in, dest1, wgs84.ExportToWkt(), osng.ExportToWkt(), gdal.GRA_Cubic)
return(dest1, ulx, lry, lrx, uly, epsg_to)
#------------------------------------------------------------------------------
def save_GeoTiff_proy(src_dataset, dst_dataset_array, dst_fileName, shape_lsc, nband):
"""
This function saves an array dataset in GeoTiff, using the parameters
from the source dataset, in projected coordinates
"""
dst_dataset_array = np.float_(dst_dataset_array)
dst_dataset_array[dst_dataset_array<-9999] = np.nan
geotransform = src_dataset.GetGeoTransform()
spatialreference = src_dataset.GetProjection()
# create dataset for output
fmt = 'GTiff'
driver = gdal.GetDriverByName(fmt)
dir_name = os.path.dirname(dst_fileName)
# If the directory does not exist, make it.
if not os.path.exists(dir_name):
os.makedirs(dir_name)
dst_dataset = driver.Create(dst_fileName, shape_lsc[0], shape_lsc[1], nband,gdal.GDT_Float32)
dst_dataset.SetGeoTransform(geotransform)
dst_dataset.SetProjection(spatialreference)
dst_dataset.GetRasterBand(1).SetNoDataValue(-9999)
dst_dataset.GetRasterBand(1).WriteArray(dst_dataset_array)
dst_dataset = None
#------------------------------------------------------------------------------
def w_time(LT,lon_proy, DOY):
"""
This function computes the hour angle (radians) of an image given the
local time, longitude, and day of the year.
"""
nrow, ncol = lon_proy.shape
# Difference of the local time (LT) from Greenwich Mean Time (GMT) (hours):
delta_GTM = np.sign(lon_proy[int(nrow/2), int(ncol/2)]) * lon_proy[int(nrow/2), int(ncol/2)] * 24 / 360
if np.isnan(delta_GTM) == True:
delta_GTM = np.nanmean(lon_proy) * np.nanmean(lon_proy) * 24 / 360
# Local Standard Time Meridian (degrees):
LSTM = 15 * delta_GTM
# Ecuation of time (EoT, minutes):
B = 360./365 * (DOY-81) # (degrees)
EoT = 9.87*sin(np.deg2rad(2*B))-7.53*cos(np.deg2rad(B))-1.5*sin(np.deg2rad(B))
# Net Time Correction Factor (minutes) at the center of the image:
TC = 4 * (lon_proy - LSTM) + EoT # Difference in time over the longitude
LST = LT + delta_GTM + TC/60 # Local solar time (hours)
HRA = 15 * (LST-12) # Hour angle HRA (degrees)
deg2rad = np.pi / 180.0 # Factor to transform from degree to rad
w = HRA * deg2rad # Hour angle HRA (radians)
return w
#------------------------------------------------------------------------------
def sensible_heat(rah, ustar, rn_inst, g_inst, ts_dem, ts_dem_hot, ts_dem_cold,
air_dens, Surf_temp, k_vk, QC_Map, hot_pixels, slope):
"""
This function computes the instantaneous sensible heat given the
instantaneous net radiation, ground heat flux, and other parameters.
"""
# Near surface temperature difference (dT):
dT_ini = (rn_inst - g_inst) * rah / (air_dens * 1004)
dT_hot = np.copy(dT_ini)
#dT_hot_fileName = os.path.join(output_folder, 'Output_cloud_masked','test.tif')
#save_GeoTiff_proy(dest, dT_hot, dT_hot_fileName,shape, nband=1)
# dT for hot pixels - hot, (dry) agricultural fields with no green veget.:
dT_hot[ts_dem <= (ts_dem_hot - 0.5)] = np.nan
dT_hot[QC_Map == 1] = np.nan
dT_hot[dT_hot == 0] = np.nan
if np.all(np.isnan(dT_hot)) == True:
dT_hot = np.copy(dT_ini)
ts_dem_hot = np.nanpercentile(hot_pixels, 99.5)
dT_hot[ts_dem <= (ts_dem_hot - 0.5)] = np.nan
dT_hot[dT_hot == 0] = np.nan
dT_hot=np.float32(dT_hot)
dT_hot[slope > 10]=np.nan
dT_hot_mean = np.nanmean(dT_hot)
# Compute slope and offset of linear relationship dT = b + a * Ts
slope_dt = (dT_hot_mean - 0.0) / (ts_dem_hot - ts_dem_cold) # EThot = 0.0
offset_dt = dT_hot_mean - slope_dt * ts_dem_hot
dT = offset_dt + slope_dt * ts_dem
# Sensible heat flux:
h = air_dens * 1004 * dT / rah
h[QC_Map == 1] = np.nan
h[h==0]=np.nan
h[QC_Map != 0] = np.nan
# Monin-Obukhov length (m):
L_MO = ((-1.0 * air_dens * 1004 * np.power(ustar, 3) * Surf_temp) /
(k_vk * 9.81 * h))
L_MO[L_MO < -1000] = -1000
# Stability correction for momentum, stable conditions (L_MO >= 0):
psi_200_stable = -0.05 * 200 / L_MO
# Stability correction for momentum and heat transport, unstable
# conditions (L_MO < 0):
x2 = np.power((1.0 - 16.0 * (2.0/L_MO)), 0.25) # x at 2m
x200 = np.power(1.0 - 16.0 * (200/L_MO), 0.25) # x at 200m
psi_h = 2 * np.log((1 + np.power(x2, 2))/2)
psi_m200 = (2 * np.log((1 + x200) / 2) + np.log((1 + np.power(x200, 2)) /
2) - 2 * np.arctan(x200) + 0.5*np.pi)
print('Sensible Heat ', np.nanmean(h))
print('dT' , np.nanmean(dT))
return L_MO, psi_200_stable, psi_h, psi_m200, h, dT, slope_dt, offset_dt
#------------------------------------------------------------------------------
def Reshape_Reproject_Input_data(input_File_Name, output_File_Name, Example_extend_fileName):
# Reproject the dataset based on the example
data_rep, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset_example(
input_File_Name, Example_extend_fileName)
# Get the array information from the new created map
band_data = data_rep.GetRasterBand(1) # Get the reprojected dem band
ncol_data = data_rep.RasterXSize
nrow_data = data_rep.RasterYSize
shape_data=[ncol_data, nrow_data]
# Save new dataset
#stats = band.GetStatistics(0, 1)
data = band_data.ReadAsArray(0, 0, ncol_data, nrow_data)
save_GeoTiff_proy(data_rep, data, output_File_Name, shape_data, nband=1)
return(data)
#------------------------------------------------------------------------------
def Thermal_Sharpening(surface_temp_up, NDVI_up, NDVI, Box, dest_up, output_folder, ndvi_fileName, shape_down, dest_down):
# Creating arrays to store the coefficients
CoefA=np.zeros((len(surface_temp_up),len(surface_temp_up[1])))
CoefB=np.zeros((len(surface_temp_up),len(surface_temp_up[1])))
CoefC=np.zeros((len(surface_temp_up),len(surface_temp_up[1])))
# Fit a second polynominal fit to the NDVI and Thermal data and save the coefficients for each pixel
# NOW USING FOR LOOPS PROBABLY NOT THE FASTEST METHOD
for i in range(0,len(surface_temp_up)):
for j in range(0,len(surface_temp_up[1])):
if np.isnan(np.sum(surface_temp_up[i,j]))==False and np.isnan(np.sum(NDVI_up[i,j]))==False:
x_data = NDVI_up[int(np.maximum(0, i - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up), i + (Box - 1) / 2 + 1)), int(np.maximum(0, j - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up[1]), j + (Box - 1) / 2 + 1))][np.logical_and(np.logical_not(np.isnan(NDVI_up[int(np.maximum(0, i - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up), i + (Box - 1) / 2 + 1)),int(np.maximum(0, j - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up[1]), j + (Box - 1) / 2 + 1))])), np.logical_not(np.isnan(surface_temp_up[int(np.maximum(0, i - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up), i + (Box - 1) / 2 + 1)),int(np.maximum(0, j - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up[1]),j + (Box - 1) / 2 + 1))])))]
y_data = surface_temp_up[int(np.maximum(0, i - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up), i + (Box - 1) / 2 + 1)), int(np.maximum(0, j - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up[1]), j + (Box - 1) / 2 + 1))][np.logical_and(np.logical_not(np.isnan(NDVI_up[int(np.maximum(0, i - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up), i + (Box - 1) / 2 + 1)),int(np.maximum(0, j - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up[1]),j + (Box - 1) / 2 + 1))])), np.logical_not(np.isnan(surface_temp_up[int(np.maximum(0, i - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up), i + (Box - 1) / 2 + 1)),int(np.maximum(0, j - (Box - 1) / 2)):int(np.minimum(len(surface_temp_up[1]), j + (Box - 1) / 2 + 1))])))]
x_data[~np.isnan(x_data)]
y_data[~np.isnan(y_data)]
if len(x_data)>6:
coefs = poly.polyfit(x_data, y_data, 2)
CoefA[i,j] = coefs[2]
CoefB[i,j] = coefs[1]
CoefC[i,j] = coefs[0]
else:
CoefA[i,j] = np.nan
CoefB[i,j] = np.nan
CoefC[i,j] = np.nan
else:
CoefA[i,j] = np.nan
CoefB[i,j] = np.nan
CoefC[i,j] = np.nan
# Define the shape of the surface temperature with the resolution of 400m
shape_up=[len(surface_temp_up[1]),len(surface_temp_up)]
# Save the coefficients
CoefA_fileName_Optie2 = os.path.join(output_folder, 'Output_temporary','coef_A.tif')
save_GeoTiff_proy(dest_up,CoefA, CoefA_fileName_Optie2,shape_up, nband=1)
CoefB_fileName_Optie2 = os.path.join(output_folder, 'Output_temporary','coef_B.tif')
save_GeoTiff_proy(dest_up,CoefB, CoefB_fileName_Optie2,shape_up, nband=1)
CoefC_fileName_Optie2 = os.path.join(output_folder, 'Output_temporary','coef_C.tif')
save_GeoTiff_proy(dest_up,CoefC, CoefC_fileName_Optie2,shape_up, nband=1)
# Downscale the fitted coefficients
CoefA_Downscale, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset_example(
CoefA_fileName_Optie2, ndvi_fileName)
CoefA = CoefA_Downscale.GetRasterBand(1).ReadAsArray()
CoefB_Downscale, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset_example(
CoefB_fileName_Optie2, ndvi_fileName)
CoefB = CoefB_Downscale.GetRasterBand(1).ReadAsArray()
CoefC_downscale, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = reproject_dataset_example(
CoefC_fileName_Optie2, ndvi_fileName)
CoefC = CoefC_downscale.GetRasterBand(1).ReadAsArray()
# Calculate the surface temperature based on the fitted coefficents and NDVI
temp_surface_sharpened=CoefA*NDVI**2+CoefB*NDVI+CoefC
temp_surface_sharpened[temp_surface_sharpened < 250] = np.nan
temp_surface_sharpened[temp_surface_sharpened > 400] = np.nan
return(temp_surface_sharpened)
#------------------------------------------------------------------------------
def Run_command_window(argument):
"""
This function runs the argument in the command window without showing cmd window
Keyword Arguments:
argument -- string, name of the adf file
"""
if os.name == 'posix':
argument = argument.replace(".exe","")
os.system(argument)
else:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
process = subprocess.Popen(argument, startupinfo=startupinfo, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
process.wait()
return()
#------------------------------------------------------------------------------
def Get_epsg(g, extension = 'tiff'):
"""
This function reads the projection of a GEOGCS file or tiff file
Keyword arguments:
g -- string
Filename to the file that must be read
extension -- tiff or GEOGCS
Define the extension of the dataset (default is tiff)
"""
try:
if extension == 'tiff':
# Get info of the dataset that is used for transforming
g_proj = g.GetProjection()
Projection=g_proj.split('EPSG","')
if extension == 'GEOGCS':
Projection = g
epsg_to=int((str(Projection[-1]).split(']')[0])[0:-1])
except:
epsg_to=4326
#print('Was not able to get the projection, so WGS84 is assumed')
return(epsg_to)
#------------------------------------------------------------------------------
def Open_constant_or_spatial_map(worksheet, CellID, Output_filename, Example_file):
# Open data, first try to open as value, otherwise as string (path)
try:
Constant_or_Map = float(worksheet['%s' %CellID].value)
Map_file_name = "Constant value of: " + str(Constant_or_Map)
# if the data is not a value, than open as a string
except:
Map_file_name = '%s' %str(worksheet['%s' %CellID].value)
try:
Constant_or_Map = Reshape_Reproject_Input_data(Map_file_name, Output_filename, Example_file)
except:
print('ERROR: One of the INPUTS is NOT CORRECT')
return(Constant_or_Map, Map_file_name)
#------------------------------------------------------------------------------
def resize_array_example(Array_in, Array_example, method=1):
"""
This function resizes an array so it has the same size as an example array
The extend of the array must be the same
Keyword arguments:
Array_in -- []
Array: 2D or 3D array
Array_example -- []
Array: 2D or 3D array
method: -- 1 ... 5
int: Resampling method
"""
# Create old raster
Array_out_shape = np.int_(Array_in.shape)
Array_out_shape[-1] = Array_example.shape[-1]
Array_out_shape[-2] = Array_example.shape[-2]
if method == 1:
interpolation_method='nearest'
interpolation_number = 0
if method == 2:
interpolation_method='bicubic'
interpolation_number = 3
if method == 3:
interpolation_method='bilinear'
interpolation_number = 1
if method == 4:
interpolation_method='cubic'
if method == 5:
interpolation_method='lanczos'
if len(Array_out_shape) == 3:
Array_out = np.zeros(Array_out_shape)
for i in range(0, Array_out_shape[0]):
Array_in_slice = Array_in[i,:,:]
size=tuple(Array_out_shape[1:])
if sys.version_info[0] == 2:
import scipy.misc as misc
Array_out_slice= misc.imresize(np.float_(Array_in_slice), size, interp=interpolation_method, mode='F')
if sys.version_info[0] == 3:
import skimage.transform as transform
Array_out_slice= transform.resize(np.float_(Array_in_slice), size, order=interpolation_number)
Array_out[i,:,:] = Array_out_slice
elif len(Array_out_shape) == 2:
size=tuple(Array_out_shape)
if sys.version_info[0] == 2:
import scipy.misc as misc
Array_out= misc.imresize(np.float_(Array_in), size, interp=interpolation_method, mode='F')
if sys.version_info[0] == 3:
import skimage.transform as transform
Array_out= transform.resize(np.float_(Array_in), size, order=interpolation_number)
else:
print('only 2D or 3D dimensions are supported')
return(Array_out)
|
wateraccounting/SEBAL
|
pySEBAL/pySEBAL_code.py
|
Python
|
apache-2.0
| 129,049
|
[
"ADF"
] |
d70dd6e216315c8439ed76cb9df5caeef1735fc60991402ed0ea39f0d9c23f36
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 29 21:55:06 2018
@author: leandrodemarcovedelago
"""
import numpy as np
import numpy.matlib as npmatlib
import math
import Utils
class Acor:
def __init__(self, alg_variant, uses_log):
"""
* alg_variant should be one of the following strings:
'ContinuoLibre', 'ContinuoFijo', 'Vecinos', 'DiscretoPuro'
* uses_log: boolean indicating whether or not to use logarithm
of components instead of their regular value
"""
self.alg_variant = alg_variant
self.uses_log = uses_log
self.utils = Utils.Utils()
self.num_dimensions = 5 if alg_variant == 'ContinuoLibre' else 4
self.num_resistors = 3 if alg_variant == 'ContinuoLibre' else 2
def _calc_comp_val_discrete(self, means, sigmas, comp_idx, p):
"""
This function is used to discretize the value of a filter component
from a continuous calculalted value by ACOR when using the
variant 'DiscretoPuro'
* means: array of means
* sigmas: array of standard deviations
* comp_idx: index of the component to discretize
* p: probabilities array
"""
i = comp_idx
res_vals, cap_vals = self.utils.res_vals, self.utils.cap_vals
log_res_vals = self.utils.log_res_vals
log_cap_vals = self.utils.log_cap_vals
# Select Gaussian Kernel
l = Utils.wheel_selection(p)
# Generate Gaussian Random Variable
aux = means[l][i] + sigmas[l][i] * np.random.randn()
is_resistor = i < self.num_resistors
if (is_resistor and not self.uses_log):
vals_to_use = res_vals
elif (is_resistor and self.uses_log):
vals_to_use = log_res_vals
elif (not is_resistor and not self.uses_log):
vals_to_use = cap_vals
else:
vals_to_use = log_cap_vals
idx = np.abs(vals_to_use - aux).argmin()
return vals_to_use[idx]
def _initialize_archive(self, R1):
res_min, res_max = self.utils.res_min, self.utils.res_max
cap_min, cap_max = self.utils.cap_min, self.utils.cap_max
num_dim = self.num_dimensions
archive_size = self.utils.archive_size
cost = self.utils.cost
empty_ant = np.empty([num_dim + 1])
archive = npmatlib.repmat(empty_ant, archive_size, 1)
for i in range(0, archive_size):
for j in range(0, num_dim + 1):
if (j < self.num_resistors):
# Resistor
low = math.log(res_min) if self.uses_log else res_min
high = math.log(res_max) if self.uses_log else res_max
archive[i][j] = np.random.uniform(low, high)
elif (j < num_dim):
# Capacitor
low = math.log(cap_min) if self.uses_log else cap_min
high = math.log(cap_max) if self.uses_log else cap_max
archive[i][j] = np.random.uniform(low, high)
else:
# Cost
archive[i][j] = cost(archive[i][0:num_dim], self.uses_log,
R1)
return archive
def main_loop(self, R1 = None):
archive_size = self.utils.archive_size
num_dim = self.num_dimensions
max_iterations = self.utils.max_iterations
int_factor = self.utils.intensification_factor
zeta = self.utils.zeta
sample_size = self.utils.sample_size
cost = self.utils.cost
use_log = self.uses_log
# Hold data of evolution for cost and variables through execution
self.best_cost = np.zeros([max_iterations])
self.best_r1 = np.zeros([max_iterations])
self.best_r2 = np.zeros([max_iterations])
self.best_r3 = np.zeros([max_iterations])
self.best_c4 = np.zeros([max_iterations])
self.best_c5 = np.zeros([max_iterations])
archive = self._initialize_archive(R1)
archive = archive[archive[:,num_dim].argsort()]
# Weights array
w = np.empty([archive_size])
for l in range(0, archive_size):
f_factor = 1/(math.sqrt(2*math.pi)*int_factor*archive_size)
s_factor = math.exp(-0.5*(l/(int_factor*archive_size))**2)
w[l] = f_factor * s_factor
# Selection probabilities
p = w / np.sum(w)
# ACOR Main Loop
empty_ant = np.empty([num_dim + 1])
for it in range(0, max_iterations):
# Means
s = np.zeros([archive_size, num_dim])
for l in range(0, archive_size):
s[l] = archive[l][0:num_dim]
# Standard deviations
sigma = np.zeros([archive_size, num_dim])
for l in range(0, archive_size):
D = 0
for r in range(0, archive_size):
D += abs(s[l]-s[r])
sigma[l] = zeta * D / (archive_size - 1)
# Create new population array
new_population = np.matlib.repmat(empty_ant, sample_size, 1)
# Initialize solution for each new ant
for t in range(0, sample_size):
new_population[t][0:num_dim] = np.zeros([num_dim])
for i in range(0, num_dim):
if (self.alg_variant == 'DiscretoPuro'):
comp_val = self._calc_comp_val_discrete(s, sigma, i, p)
new_population[t][i] = comp_val
else:
# Select Gaussian Kernel
l = Utils.wheel_selection(p)
# Generate Gaussian Random Variable
new_population[t][i] = (s[l][i]
+ sigma[l][i]*np.random.randn())
# Evaluation of built solution
filter_comps = new_population[t][0:num_dim]
new_population[t][num_dim] = cost(filter_comps, use_log, R1)
# Merge old population (archive) with new one
merged_pop = np.concatenate([archive, new_population])
# And sort it again
merged_pop = merged_pop[merged_pop[:,num_dim].argsort()]
# Store the bests in the archive and update best sol
archive = merged_pop[:archive_size]
best_sol = archive[0][0:num_dim] # Current best solution, NO cost
self.best_cost[it] = archive[0][num_dim] # Current best cost
self.best_r1[it] = R1 if R1 != None else best_sol[0]
if self.uses_log and R1 != None:
self.best_r1[it] = math.log(R1)
self.best_r2[it] = best_sol[0] if R1 != None else best_sol[1]
self.best_r3[it] = best_sol[1] if R1 != None else best_sol[2]
self.best_c4[it] = best_sol[2] if R1 != None else best_sol[3]
self.best_c5[it] = best_sol[3] if R1 != None else best_sol[4]
return archive[0] # Best population and cost
|
leandrodemarcovedelago/thesis-aco
|
informe/ACOR.py
|
Python
|
gpl-3.0
| 7,338
|
[
"Gaussian"
] |
1ecdcd9b04a206bc60912b0d12bda10d571f5efe66c59eb219b9ea5f1876cdc8
|
# -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from os import path as op
from numpy.testing import assert_allclose, assert_array_equal
from vispy.io import write_mesh, read_mesh, load_data_file
from vispy.geometry import _fast_cross_3d
from vispy.util import _TempDir
from vispy.testing import (run_tests_if_main, assert_equal, assert_raises,
requires_ssl)
temp_dir = _TempDir()
@requires_ssl()
def test_wavefront():
"""Test wavefront reader"""
fname_mesh = load_data_file('orig/triceratops.obj.gz')
fname_out = op.join(temp_dir, 'temp.obj')
mesh1 = read_mesh(fname_mesh)
assert_raises(IOError, read_mesh, 'foo.obj')
assert_raises(ValueError, read_mesh, op.abspath(__file__))
assert_raises(ValueError, write_mesh, fname_out, *mesh1, format='foo')
write_mesh(fname_out, mesh1[0], mesh1[1], mesh1[2], mesh1[3])
assert_raises(IOError, write_mesh, fname_out, *mesh1)
write_mesh(fname_out, *mesh1, overwrite=True)
mesh2 = read_mesh(fname_out)
assert_equal(len(mesh1), len(mesh2))
for m1, m2 in zip(mesh1, mesh2):
if m1 is None:
assert_equal(m2, None)
else:
assert_allclose(m1, m2, rtol=1e-5)
# test our efficient normal calculation routine
assert_allclose(mesh1[2], _slow_calculate_normals(mesh1[0], mesh1[1]),
rtol=1e-7, atol=1e-7)
def test_wavefront_non_triangular():
'''Test wavefront writing with non-triangular faces'''
vertices = np.array([[0.5, 1.375, 0.],
[0.5, 0.625, 0.],
[3.25, 1., 0.],
[1., 0.375, 0.],
[2., 0.375, 0.],
[1.5, 0.625, 0.],
[1.5, 1.375, 0.],
[1., 1.625, 0.],
[2., 1.625, 0.]])
faces = np.array([[1, 0, 7, 6, 5, 3],
[4, 5, 6, 8, 2]], dtype=object)
fname_out = op.join(temp_dir, 'temp.obj')
write_mesh(fname_out, vertices=vertices,
faces=faces, normals=None,
texcoords=None, overwrite=True,
reshape_faces=False)
assert_raises(RuntimeError, read_mesh, fname_out)
with open(fname_out, 'r+') as out_file:
lines = out_file.readlines()
assert lines[-1].startswith('f 5 6 7 9 3')
assert lines[-2].startswith('f 2 1 8 7 6 4')
def test_meshio():
'''Test meshio i/o'''
vertices = np.array([[0.0, 0.0, 0.0],
[1.0, 0.0, 0.],
[-.0, 1.0, 0.],
[1.0, 1.0, 0.]])
faces = np.array([[0, 1, 3],
[1, 2, 3]])
fname_out = op.join(temp_dir, 'temp.vtk')
write_mesh(fname_out, vertices=vertices,
faces=faces, normals=None,
texcoords=None, overwrite=True,
reshape_faces=False)
out_vertices, out_faces, _, _ = read_mesh(fname_out)
assert np.all(np.abs(out_vertices - vertices) < 1.0e-14)
assert np.all(out_faces == faces)
def _slow_calculate_normals(rr, tris):
"""Efficiently compute vertex normals for triangulated surface"""
# first, compute triangle normals
rr = rr.astype(np.float64)
r1 = rr[tris[:, 0], :]
r2 = rr[tris[:, 1], :]
r3 = rr[tris[:, 2], :]
tri_nn = np.cross((r2 - r1), (r3 - r1))
# Triangle normals and areas
size = np.sqrt(np.sum(tri_nn * tri_nn, axis=1))
zidx = np.where(size == 0)[0]
size[zidx] = 1.0 # prevent ugly divide-by-zero
tri_nn /= size[:, np.newaxis]
# accumulate the normals
nn = np.zeros((len(rr), 3))
for p, verts in enumerate(tris):
nn[verts] += tri_nn[p, :]
size = np.sqrt(np.sum(nn * nn, axis=1))
size[size == 0] = 1.0 # prevent ugly divide-by-zero
nn /= size[:, np.newaxis]
return nn
def test_huge_cross():
"""Test cross product with lots of elements
"""
x = np.random.rand(100000, 3)
y = np.random.rand(1, 3)
z = np.cross(x, y)
zz = _fast_cross_3d(x, y)
assert_array_equal(z, zz)
run_tests_if_main()
|
Eric89GXL/vispy
|
vispy/io/tests/test_io.py
|
Python
|
bsd-3-clause
| 4,228
|
[
"VTK"
] |
2b7aeabd1241f053e63cd1f06cc3875b0dd040bc3842275b32ec28a0e1385525
|
# (c) 2012-2017, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
from django import test
from galaxy.main import views
class TestErrorHandlers(test.TestCase):
def setUp(self):
self.factory = test.RequestFactory()
def test_handle_400_view(self):
request = self.factory.get('/path')
response = views.handle_400_view(request)
self.assertEqual(response.status_code, 400)
self.assertIn("The requested page could not be found.",
response.content)
def test_handle_404_view(self):
request = self.factory.get('/path')
response = views.handle_404_view(request)
self.assertEqual(response.status_code, 404)
self.assertIn("The requested page could not be found.",
response.content)
def test_handle_500_view(self):
request = self.factory.get('/path')
response = views.handle_500_view(request)
self.assertEqual(response.status_code, 500)
self.assertIn("An error occurred while loading the requested page.",
response.content)
|
chouseknecht/galaxy
|
galaxy/tests/main/test_views.py
|
Python
|
apache-2.0
| 1,718
|
[
"Galaxy"
] |
9efbf73def989efb65a93aa82336d3fa6203ce7cadb93100f6de74ff826ab652
|
# proxy module
from __future__ import absolute_import
from mayavi.core.file_data_source import *
|
enthought/etsproxy
|
enthought/mayavi/core/file_data_source.py
|
Python
|
bsd-3-clause
| 97
|
[
"Mayavi"
] |
c88be1105eeb98fa116174b3444a7b7d6dc20004ffd6bc2ba5593478f87c37fe
|
"""
Copyright (c) 2017 Sam Witte
Created on Jan 19, 2017
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Results from
"""
from __future__ import absolute_import
from __future__ import division
import numpy as np
from scipy.interpolate import interp1d
pi = np.pi
name = "3BinXe2"
modulated = False
energy_resolution_type = "Gaussian"
def EnergyResolution(e):
return np.ones_like(e)*.15
FFSD = 'GaussianFFSD'
FFSI = 'HelmFF'
FF = {'SI': FFSI,
'SDPS': FFSD,
'SDAV': FFSD,
}
target_nuclide_AZC_list = \
np.array([[124, 54, 0.0008966], [126, 54, 0.0008535], [128, 54, 0.018607],
[129, 54, 0.25920], [130, 54, 0.040280], [131, 54, 0.21170],
[132, 54, 0.27035], [134, 54, 0.10644], [136, 54, 0.09168]])
target_nuclide_JSpSn_list = \
np.array([[0, 0, 0], [0, 0, 0], [0, 0, 0],
[1./2, 0.010 * np.sqrt(3./2 / pi), .329 * np.sqrt(3./2 / pi)], [0, 0, 0],
[3./2, -0.009 * np.sqrt(5./2 / pi), -.272 * np.sqrt(5./2 / pi)], [0, 0, 0],
[0, 0, 0], [0, 0, 0]])
target_nuclide_mass_list = np.array([115.418, 117.279, 119.141, 120.074, 121.004,
121.937, 122.868, 124.732, 126.597])
num_target_nuclides = target_nuclide_mass_list.size
def QuenchingFactor(x):
return np.ones_like(x)
Ethreshold = 3.
Emaximum = 100.
ERmaximum = 30.
def Efficiency_ER(er):
try:
len(er)
except TypeError:
er = [er]
return np.ones_like(er)
def Efficiency(er):
try:
len(er)
except TypeError:
er = [er]
return np.ones_like(er)
Exposure = 1. * 1000. * 365.24
#ERecoilList = np.array([])
#Expected_limit = 1.
BinData = np.array([1., 4., 6.])
BinEdges_left = np.array([1., 2.5, 4.])
BinEdges_right = np.array([2.5, 4., 5.5])
BinBkgr = np.array([1., 1., 1.])
BinSize = 3.
BinExposure = np.array([Exposure, Exposure, Exposure])
Nbins=3.
|
SamWitte/Codds_DarkMatter
|
src/Data/3BinXe2.py
|
Python
|
gpl-2.0
| 2,545
|
[
"Gaussian"
] |
44fce778221c44153f7265cfa0ca0efd8477eb8d857bfb9285dfd950f96772be
|
# -*- coding: utf-8 -*-
'''
Copyright (C) 2011-2021 Maximilian Maahn, U Leipzig
maximilian.maahn_AT_uni-leipzig.de
example script for converting mrrRaw data to netcdf using IMProToos
'''
from __future__ import print_function
import sys
import numpy as np
import glob
import os
import datetime
import IMProToo
import gzip
version = IMProToo.__version__
if len(sys.argv) < 4:
sys.exit('use: python batch_convert_rawData.py pathIn pathOut site')
pathIn = sys.argv[1]
pathOut = sys.argv[2]
site = sys.argv[3]
skipExisting = True
print(pathIn)
try:
os.mkdir(pathOut)
except OSError:
pass
# go through all gz compressed files in pathIn/year/month/
for nfile in np.sort(glob.glob(pathIn+"/*raw*")):
# get the timestamp
timestamp = None
if nfile.split('.')[-1] == 'gz':
f = gzip.open(nfile, 'rt')
else:
f = open(nfile, 'r')
# Sometimes the first MRR timestamps are from the day before, so we cannot take the first date we found. get list of line breaks
line_offset = []
offset = 0
for line in f:
line_offset.append(offset)
offset += len(line)
f.seek(0)
# Now, to skip 20% of the file
f.seek(line_offset[len(line_offset)//5])
# now find the date
try:
while True:
string = str(f.readline())
if not string:
break
if string[:2] == "T:":
timestamp = datetime.datetime.strptime(
string[2:14], "%y%m%d%H%M%S").strftime("%Y%m%d")
break
elif string[:4] == "MRR ":
timestamp = datetime.datetime.strptime(
string[4:16], "%y%m%d%H%M%S").strftime("%Y%m%d")
break
finally:
f.close()
if timestamp is None:
print("did not find MRR timesamp in %s, Skipping" % nfile)
continue
fileOut = pathOut+"/mrr_improtoo_"+version+"_"+site+"_"+timestamp+".nc"
if skipExisting and (os.path.isfile(fileOut) or os.path.isfile(fileOut+".gz")):
print("NetCDF file aready exists, skipping: ", timestamp, nfile, fileOut)
continue
print(timestamp, nfile, fileOut)
# load raw data from file
print("reading...", nfile)
try:
rawData = IMProToo.mrrRawData(nfile)
except:
print("could not read data")
continue
try:
# convert rawData object
processedSpec = IMProToo.MrrZe(rawData)
# average rawData to 60s
processedSpec.averageSpectra(60)
# the MRR at 'lyr' was affected by interference for some days, dealiasing routine needs to know about that:
if site == "lyr" and timestamp in ['20100620', '20100621', '20100622', '20100623', '20100624', '20100625', '20100626', '20100627', '20100628', '20100629', '20100630', '20100701', '20100702', '20100703', '20100704', '20100705', '20100706', '20100707']:
processedSpec.co['dealiaseSpectrum_heightsWithInterference'] = processedSpec.co[
'dealiaseSpectrum_heightsWithInterference'] + [25, 26, 27, 28, 29, 30]
# creator attribute of netCDF file
processedSpec.co["ncCreator"] = "M.Maahn, IGM University of Cologne"
# calculate Ze and other moments
processedSpec.rawToSnow()
# write all variables to a netCDF file.
print("writing...", fileOut)
processedSpec.writeNetCDF(fileOut, ncForm="NETCDF3_CLASSIC")
except Exception as error:
print(str(error))
print("could not process data")
continue
|
aronnem/IMProToo
|
examples/batch_convert_rawData.py
|
Python
|
gpl-3.0
| 3,538
|
[
"NetCDF"
] |
3e9bda5ae9431604d48ba241639424a6805fdb0fbb810aa82d6c50741f50be54
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines the astrophysics-specific units. They are also
available in the `astropy.units` namespace.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from . import si
from ..constants import si as _si
from .core import (UnitBase, def_unit, si_prefixes, binary_prefixes,
set_enabled_units)
# To ensure si units of the constants can be interpreted.
set_enabled_units([si])
import numpy as _numpy
_ns = globals()
###########################################################################
# LENGTH
def_unit((['AU', 'au'], ['astronomical_unit']), _si.au, namespace=_ns, prefixes=True,
doc="astronomical unit: approximately the mean Earth--Sun "
"distance.")
def_unit(['pc', 'parsec'], _si.pc, namespace=_ns, prefixes=True,
doc="parsec: approximately 3.26 light-years.")
def_unit(['solRad', 'R_sun', 'Rsun'], _si.R_sun, namespace=_ns,
doc="Solar radius", prefixes=True,
format={'latex': r'R_{\odot}', 'unicode': 'R⊙'})
def_unit(['jupiterRad', 'R_jup', 'Rjup','R_jupiter', 'Rjupiter'],
_si.R_jup, namespace=_ns, prefixes=True, doc="Jupiter radius",
# LaTeX jupiter symbol requires wasysym
format={'latex': r'R_{\rm J}', 'unicode': 'R♃'})
def_unit(['earthRad', 'R_earth', 'Rearth'], _si.R_earth, namespace=_ns,
prefixes=True, doc="Earth radius",
# LaTeX earth symbol requires wasysym
format={'latex': r'R_{\oplus}', 'unicode': 'R⊕'})
def_unit(['lyr', 'lightyear'], (_si.c * si.yr).to(si.m),
namespace=_ns, prefixes=True, doc="Light year")
###########################################################################
# AREAS
def_unit(['barn', 'barn'], 10 ** -28 * si.m ** 2, namespace=_ns, prefixes=True,
doc="barn: unit of area used in HEP")
###########################################################################
# ANGULAR MEASUREMENTS
def_unit(['cycle', 'cy'], 2.0 * _numpy.pi * si.rad,
namespace=_ns, prefixes=False,
doc="cycle: angular measurement, a full turn or rotation")
###########################################################################
# MASS
def_unit(['solMass', 'M_sun', 'Msun'], _si.M_sun, namespace=_ns,
prefixes=True, doc="Solar mass",
format={'latex': r'M_{\odot}', 'unicode': 'M⊙'})
def_unit(['jupiterMass', 'M_jup', 'Mjup','M_jupiter', 'Mjupiter'],
_si.M_jup, namespace=_ns, prefixes=True, doc="Jupiter mass",
# LaTeX jupiter symbol requires wasysym
format={'latex': r'M_{\rm J}', 'unicode': 'M♃'})
def_unit(['earthMass', 'M_earth', 'Mearth'], _si.M_earth, namespace=_ns,
prefixes=True, doc="Earth mass",
# LaTeX earth symbol requires wasysym
format={'latex': r'M_{\oplus}', 'unicode': 'M⊕'})
def_unit(['M_p'], _si.m_p, namespace=_ns, doc="Proton mass",
format={'latex': r'M_{p}', 'unicode': 'Mₚ'})
def_unit(['M_e'], _si.m_e, namespace=_ns, doc="Electron mass",
format={'latex': r'M_{e}', 'unicode': 'Mₑ'})
# Unified atomic mass unit
def_unit(['u', 'Da', 'Dalton'], _si.u, namespace=_ns,
prefixes=True, exclude_prefixes=['a', 'da'],
doc="Unified atomic mass unit")
##########################################################################
# ENERGY
# Here, explicitly convert the planck constant to 'eV s' since the constant
# can override that to give a more precise value that takes into account
# covariances between e and h. Eventually, this may also be replaced with
# just `_si.Ryd.to(eV)`.
def_unit(['Ry', 'rydberg'],
(_si.Ryd * _si.c * _si.h.to(si.eV * si.s)).to(si.eV),
namespace=_ns, prefixes=True,
doc="Rydberg: Energy of a photon whose wavenumber is the Rydberg "
"constant",
format={'latex': r'R_{\infty}', 'unicode': 'R∞'})
###########################################################################
# ILLUMINATION
def_unit(['solLum', 'L_sun', 'Lsun'], _si.L_sun, namespace=_ns,
prefixes=True, doc="Solar luminance",
format={'latex': r'L_{\odot}', 'unicode': 'L⊙'})
###########################################################################
# SPECTRAL DENSITY
def_unit((['ph', 'photon'], ['photon']),
format={'ogip': 'photon', 'vounit': 'photon'},
namespace=_ns, prefixes=True)
def_unit(['Jy', 'Jansky', 'jansky'], 1e-26 * si.W / si.m ** 2 / si.Hz,
namespace=_ns, prefixes=True,
doc="Jansky: spectral flux density")
def_unit(['R', 'Rayleigh', 'rayleigh'],
(1e10 / (4 * _numpy.pi)) *
ph * si.m ** -2 * si.s ** -1 * si.sr ** -1,
namespace=_ns, prefixes=True,
doc="Rayleigh: photon flux")
###########################################################################
# MISCELLANEOUS
# Some of these are very FITS-specific and perhaps considered a mistake.
# Maybe they should be moved into the FITS format class?
# TODO: This is defined by the FITS standard as "relative to the sun".
# Is that mass, volume, what?
def_unit(['Sun'], namespace=_ns)
###########################################################################
# EVENTS
def_unit((['ct', 'count'], ['count']),
format={'fits': 'count', 'ogip': 'count', 'vounit': 'count'},
namespace=_ns, prefixes=True, exclude_prefixes=['p'])
def_unit((['pix', 'pixel'], ['pixel']),
format={'ogip': 'pixel', 'vounit': 'pixel'},
namespace=_ns, prefixes=True)
###########################################################################
# MISCELLANEOUS
def_unit(['chan'], namespace=_ns, prefixes=True)
def_unit(['bin'], namespace=_ns, prefixes=True)
def_unit((['vox', 'voxel'], ['voxel']),
format={'fits': 'voxel', 'ogip': 'voxel', 'vounit': 'voxel'},
namespace=_ns, prefixes=True)
def_unit((['bit', 'b'], ['bit']), namespace=_ns,
prefixes=si_prefixes + binary_prefixes)
def_unit((['byte', 'B'], ['byte']), 8 * bit, namespace=_ns,
format={'vounit': 'byte'},
prefixes=si_prefixes + binary_prefixes,
exclude_prefixes=['d'])
def_unit(['adu'], namespace=_ns, prefixes=True)
def_unit(['beam'], namespace=_ns, prefixes=True)
def_unit(['electron'], doc="Number of electrons", namespace=_ns,
format={'latex': r'e^{-}', 'unicode': 'e⁻'})
###########################################################################
# CLEANUP
del UnitBase
del def_unit
del si
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import generate_unit_summary as _generate_unit_summary
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
|
tbabej/astropy
|
astropy/units/astrophys.py
|
Python
|
bsd-3-clause
| 6,881
|
[
"Dalton"
] |
3faa81935a7a4ef5dd36ba92746a3afd0a535bbc10d8db99e2b4406c5fbdd4ed
|
from sympy import (meijerg, I, S, integrate, Integral, oo, gamma,
hyperexpand, exp, simplify, sqrt, pi, erf, sin, cos,
exp_polar, polar_lift, polygamma, hyper, log)
from sympy.integrals.meijerint import (_rewrite_single, _rewrite1,
meijerint_indefinite, _inflate_g, _create_lookup_table,
meijerint_definite, meijerint_inversion)
from sympy.utilities.randtest import (test_numerically,
random_complex_number as randcplx)
from sympy.abc import x, y, a, b, c, d, s, t, z
def test_rewrite_single():
def t(expr, c, m):
e = _rewrite_single(meijerg([a], [b], [c], [d], expr), x)
assert e is not None
assert isinstance(e[0][0][2], meijerg)
assert e[0][0][2].argument.as_coeff_mul(x) == (c, (m,))
def tn(expr):
assert _rewrite_single(meijerg([a], [b], [c], [d], expr), x) is None
t(x, 1, x)
t(x**2, 1, x**2)
t(x**2 + y*x**2, y + 1, x**2)
tn(x**2 + x)
tn(x**y)
def u(expr, x):
from sympy import Add, exp, exp_polar
r = _rewrite_single(expr, x)
e = Add(*[res[0]*res[2] for res in r[0]]).replace(exp_polar, exp) # XXX Hack?
assert test_numerically(e, expr, x)
u(exp(-x)*sin(x), x)
# The following has stopped working because hyperexpand changed slightly.
# It is probably not worth fixing
#u(exp(-x)*sin(x)*cos(x), x)
# This one cannot be done numerically, since it comes out as a g-function
# of argument 4*pi
# NOTE This also tests a bug in inverse mellin transform (which used to
# turn exp(4*pi*I*t) into a factor of exp(4*pi*I)**t instead of
# exp_polar).
#u(exp(x)*sin(x), x)
assert _rewrite_single(exp(x)*sin(x), x) == \
([(-sqrt(2)/(2*sqrt(pi)), 0,
meijerg(((-S(1)/2, 0, S(1)/4, S(1)/2, S(3)/4), (1,)),
((), (-S(1)/2, 0)), 64*exp_polar(-4*I*pi)/x**4))], True)
def test_rewrite1():
assert _rewrite1(x**3*meijerg([a], [b], [c], [d], x**2 + y*x**2)*5, x) \
== (5, x**3, [(1, 0, meijerg([a], [b], [c], [d], x**2*(y + 1)))], \
True)
def test_meijerint_indefinite_numerically():
def t(fac, arg):
g = meijerg([a], [b], [c], [d], arg)*fac
subs = {a: randcplx()/10, b:randcplx()/10 + I,
c: randcplx(), d: randcplx()}
integral = meijerint_indefinite(g, x)
assert integral is not None
assert test_numerically(g.subs(subs), integral.diff(x).subs(subs), x)
t(1, x)
t(2, x)
t(1, 2*x)
t(1, x**2)
t(5, x**S('3/2'))
t(x**3, x)
t(3*x**S('3/2'), 4*x**S('7/3'))
def test_inflate():
subs = {a: randcplx()/10, b: randcplx()/10 + I, c: randcplx(),
d: randcplx(), y:randcplx()/10}
def t(a, b, arg, n):
from sympy import Mul
m1 = meijerg(a, b, arg)
m2 = Mul(*_inflate_g(m1, n))
# NOTE: (the random number)**9 must still be on the principal sheet.
# Thus make b&d small to create random numbers of small imaginary part.
return test_numerically(m1.subs(subs), m2.subs(subs), x, b=0.1, d=-0.1)
assert t([[a], [b]], [[c], [d]], x, 3)
assert t([[a, y], [b]], [[c], [d]], x, 3)
assert t([[a], [b]], [[c, y], [d]], 2*x**3, 3)
def test_recursive():
from sympy import symbols, exp_polar, expand
a, b, c = symbols('a b c', positive=True)
assert simplify(integrate(exp(-(x-a)**2)*exp(-(x - b)**2), (x, 0, oo))) \
== sqrt(2*pi)/4*(1 + erf(sqrt(2)/2*(a + b))) \
*exp(-a**2 - b**2 + (a + b)**2/2)
assert simplify(integrate
(exp(-(x - a)**2)*exp(-(x - b)**2)*exp(c*x), (x, 0, oo))) \
== sqrt(2*pi)/4*(1 + erf(sqrt(2)/4*(2*a + 2*b + c))) \
*exp(-a**2 - b**2 + (2*a + 2*b + c)**2/8)
assert simplify(integrate(exp(-(x - a - b - c)**2), (x, 0, oo))) \
== sqrt(pi)/2*(1 + erf(a + b + c))
assert simplify(integrate(exp(-(x + a + b + c)**2), (x, 0, oo))) \
== sqrt(pi)/2*(1 - erf(a + b + c))
def test_meijerint():
from sympy import symbols, expand, arg
s, t, mu = symbols('s t mu', real=True)
assert integrate(meijerg([], [], [0], [], s*t)
*meijerg([], [], [mu/2], [-mu/2], t**2/4),
(t, 0, oo)).is_Piecewise
s = symbols('s', positive=True)
assert integrate(x**s*meijerg([[],[]], [[0],[]], x), (x, 0, oo)) \
== gamma(s + 1)
assert integrate(x**s*meijerg([[],[]], [[0],[]], x), (x, 0, oo),
meijerg=True) == gamma(s + 1)
assert isinstance(integrate(x**s*meijerg([[],[]], [[0],[]], x),
(x, 0, oo), meijerg=False),
Integral)
assert meijerint_indefinite(exp(x), x) == exp(x)
# TODO what simplifications should be done automatically?
# This tests "extra case" for antecedents_1.
a, b = symbols('a b', positive=True)
assert simplify(meijerint_definite(x**a, x, 0, b)[0]) \
== b**(a + 1)/(a + 1)
# This tests various conditions and expansions:
meijerint_definite((x+1)**3*exp(-x), x, 0, oo) == (16, True)
# Again, how about simplifications?
sigma, mu = symbols('sigma mu', positive=True)
i, c = meijerint_definite(exp(-((x - mu)/(2*sigma))**2), x, 0, oo)
assert simplify(i) \
== sqrt(pi)*sigma*(erf(mu/(2*sigma)) + 1)
assert c is True
i, _ = meijerint_definite(exp(-mu*x)*exp(sigma*x), x, 0, oo)
# TODO it would be nice to test the condition
assert simplify(i) == 1/(mu - sigma)
# Test substitutions to change limits
assert meijerint_definite(exp(x), x, -oo, 2) == (exp(2), True)
assert expand(meijerint_definite(exp(x), x, 0, I)[0]) == exp(I) - 1
assert expand(meijerint_definite(exp(-x), x, 0, x)[0]) == \
1 - exp(-exp(I*arg(x))*abs(x))
# Test -oo to oo
assert meijerint_definite(exp(-x**2), x, -oo, oo) == (sqrt(pi), True)
assert meijerint_definite(exp(-abs(x)), x, -oo, oo) == (2, True)
assert meijerint_definite(exp(-(2*x-3)**2), x, -oo, oo) == \
(sqrt(pi)/2, True)
assert meijerint_definite(exp(-abs(2*x-3)), x, -oo, oo) == (1, True)
assert meijerint_definite(exp(-((x - mu)/sigma)**2/2)/sqrt(2*pi*sigma**2),
x, -oo, oo) == (1, True)
# Test one of the extra conditions for 2 g-functinos
assert meijerint_definite(exp(-x)*sin(x), x, 0, oo) == (S(1)/2, True)
# Test a bug
def res(n): return (1/(1+x**2)).diff(x, n).subs(x,1)*(-1)**n
for n in range(6):
assert integrate(exp(-x)*sin(x)*x**n, (x, 0, oo), meijerg=True) == \
res(n)
# This used to test trigexpand... now it is done by linear substitution
assert simplify(integrate(exp(-x)*sin(x + a), (x, 0, oo), meijerg=True)
).expand().rewrite(sin).expand() == sin(a)/2 + cos(a)/2
# Test the condition 14 from prudnikov.
# (This is besselj*besselj in disguise, to stop the product from being
# recognised in the tables.)
a, b, s = symbols('a b s')
from sympy import And, re
assert meijerint_definite(meijerg([], [], [a/2], [-a/2], x/4) \
*meijerg([], [], [b/2], [-b/2], x/4)*x**(s-1), x, 0, oo) == \
(4*2**(2*s - 2)*gamma(-2*s + 1)*gamma(a/2 + b/2 + s) \
/(gamma(-a/2 + b/2 - s + 1)*gamma(a/2 - b/2 - s + 1) \
*gamma(a/2 + b/2 - s + 1)),
And(0 < -2*re(4*s) + 8, 0 < re(a/2 + b/2 + s), re(2*s) < 1))
# test a bug
assert integrate(sin(x**a)*sin(x**b), (x, 0, oo), meijerg=True) == \
Integral(sin(x**a)*sin(x**b), (x, 0, oo))
# test better hyperexpand
assert integrate(exp(-x**2)*log(x), (x, 0, oo), meijerg=True) == \
(sqrt(pi)*polygamma(0, S(1)/2)/4).expand()
# Test hyperexpand bug.
from sympy import lowergamma
n = symbols('n', integer = True)
assert simplify(integrate(exp(-x)*x**n, x, meijerg=True)) == \
lowergamma(n + 1, x)
# Test a bug with argument 1/x
alpha = symbols('alpha', positive=True)
assert meijerint_definite((2-x)**alpha*sin(alpha/x), x, 0, 2) == \
(sqrt(pi)*gamma(alpha + 1) \
*meijerg([S(1)/2, 0, S(1)/2], [1], [],
[-alpha/2, -alpha/2 - S(1)/2], 16/alpha**2), True)
# test a bug related to 3016
a, s = symbols('a s', positive=True)
assert simplify(integrate(x**s*exp(-a*x**2), (x, -oo, oo))) == \
a**(-s/2 - S(1)/2)*(exp(I*pi*s) + 1)*gamma(s/2 + S(1)/2)/2
def test_bessel():
from sympy import (besselj, Heaviside, besseli, polar_lift, exp_polar,
powdenest)
assert simplify(integrate(besselj(a, z)*besselj(b, z)/z, (z, 0, oo),
meijerg=True, conds='none')) == \
2*sin(pi*a/2 - pi*b/2)/(pi*(a - b)*(a + b))
assert simplify(integrate(besselj(a, z)*besselj(a, z)/z, (z, 0, oo),
meijerg=True, conds='none')) == 1/(2*a)
# TODO more orthogonality integrals
assert simplify(integrate(sin(z*x)*(x**2-1)**(-(y+S(1)/2)),
(x, 1, oo), meijerg=True, conds='none')
*2/((z/2)**y*sqrt(pi)*gamma(S(1)/2-y))) == \
besselj(y, z)
# Werner Rosenheinrich
# SOME INDEFINITE INTEGRALS OF BESSEL FUNCTIONS
assert integrate(x*besselj(0, x), x, meijerg=True) == x*besselj(1, x)
assert integrate(x*besseli(0, x), x, meijerg=True) == x*besseli(1, x)
# TODO can do higher powers, but come out as high order ... should they be
# reduced to order 0, 1?
assert integrate(besselj(1, x), x, meijerg=True) == -besselj(0, x)
assert integrate(besselj(1, x)**2/x, x, meijerg=True) == \
-(besselj(0, x)**2 + besselj(1, x)**2)/2
# TODO more besseli when tables are extended or recursive mellin works
assert integrate(besselj(0, x)**2/x**2, x, meijerg=True) == \
-2*x*besselj(0, x)**2 - 2*x*besselj(1, x)**2 \
+ 2*besselj(0, x)*besselj(1, x) - besselj(0, x)**2/x
assert integrate(besselj(0, x)*besselj(1, x), x, meijerg=True) == \
-besselj(0, x)**2/2
assert integrate(x**2*besselj(0, x)*besselj(1, x), x, meijerg=True) == \
x**2*besselj(1, x)**2/2
assert integrate(besselj(0, x)*besselj(1, x)/x, x, meijerg=True) == \
(x*besselj(0, x)**2 + x*besselj(1, x)**2 - \
besselj(0, x)*besselj(1, x))
# TODO how does besselj(0, a*x)*besselj(0, b*x) work?
# TODO how does besselj(0, x)**2*besselj(1, x)**2 work?
# TODO sin(x)*besselj(0, x) etc come out a mess
# TODO can x*log(x)*besselj(0, x) be done?
# TODO how does besselj(1, x)*besselj(0, x+a) work?
# TODO more indefinite integrals when struve functions etc are implemented
# test a substitution
assert integrate(besselj(1, x**2)*x, x, meijerg=True) == \
-besselj(0, x**2)/2
def test_inversion():
from sympy import piecewise_fold, besselj, sqrt, I, sin, cos, Heaviside
def inv(f): return piecewise_fold(meijerint_inversion(f, s, t))
assert inv(1/(s**2 + 1)) == sin(t)*Heaviside(t)
assert inv(s/(s**2 + 1)) == cos(t)*Heaviside(t)
assert inv(exp(-s)/s) == Heaviside(t - 1)
assert inv(1/sqrt(1 + s**2)) == besselj(0, t)*Heaviside(t)
# Test some antcedents checking.
assert meijerint_inversion(sqrt(s)/sqrt(1 + s**2), s, t) is None
assert inv(exp(s**2)) is None
assert meijerint_inversion(exp(-s**2), s, t) is None
def test_lookup_table():
from random import uniform, randrange
from sympy import Add, unpolarify, exp_polar, exp
from sympy.integrals.meijerint import z as z_dummy
table = {}
_create_lookup_table(table)
for _, l in sorted(table.items()):
for formula, terms, cond, hint in sorted(l):
subs = {}
for a in list(formula.free_symbols) + [z_dummy]:
if hasattr(a, 'properties') and a.properties:
# these Wilds match positive integers
subs[a] = randrange(1, 10)
else:
subs[a] = uniform(1.5, 3.5)
if not isinstance(terms, list):
terms = terms(subs)
# First test that hyperexpand can do this.
expanded = [hyperexpand(g) for (_, g) in terms]
assert all (x.is_Piecewise or not x.has(meijerg) for x in expanded)
# Now test that the meijer g-function is indeed as advertised.
expanded = Add(*[f*x for (f, x) in terms])
a, b = formula.n(subs=subs), expanded.n(subs=subs)
r = min(abs(a), abs(b))
if r < 1:
assert abs(a - b).n() <= 1e-10
else:
assert (abs(a - b)/r).n() <= 1e-10
def test_branch_bug():
from sympy import powdenest, lowergamma
# TODO combsimp cannot prove that the factor is unity
assert powdenest(integrate(erf(x**3), x, meijerg=True).diff(x),
polar=True) == 2*erf(x**3)*gamma(S(2)/3)/3/gamma(S(5)/3)
assert integrate(erf(x**3), x, meijerg=True) == \
2*x*erf(x**3)*gamma(S(2)/3)/(3*gamma(S(5)/3)) \
- 2*gamma(S(2)/3)*lowergamma(S(2)/3, x**6)/(3*sqrt(pi)*gamma(S(5)/3))
def test_linear_subs():
from sympy import besselj
assert integrate(sin(x-1), x, meijerg=True) == -cos(1 - x)
assert integrate(besselj(1, x-1), x, meijerg=True) == -besselj(0, 1 - x)
def test_probability():
# various integrals from probability theory
from sympy.abc import x, y, z
from sympy import symbols, Symbol, Abs, expand_mul, combsimp, powsimp, sin
mu1, mu2 = symbols('mu1 mu2', real=True, finite=True, bounded=True)
sigma1, sigma2 = symbols('sigma1 sigma2', real=True, finite=True,
bounded=True, positive=True)
rate = Symbol('lambda', real=True, positive=True, bounded=True)
def normal(x, mu, sigma):
return 1/sqrt(2*pi*sigma**2)*exp(-(x - mu)**2/2/sigma**2)
def exponential(x, rate):
return rate*exp(-rate*x)
assert integrate(normal(x, mu1, sigma1), (x, -oo, oo), meijerg=True) == 1
assert integrate(x*normal(x, mu1, sigma1), (x, -oo, oo), meijerg=True) == \
mu1
assert integrate(x**2*normal(x, mu1, sigma1), (x, -oo, oo), meijerg=True) \
== mu1**2 + sigma1**2
assert integrate(x**3*normal(x, mu1, sigma1), (x, -oo, oo), meijerg=True) \
== mu1**3 + 3*mu1*sigma1**2
assert integrate(normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True) == 1
assert integrate(x*normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True) == mu1
assert integrate(y*normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True) == mu2
assert integrate(x*y*normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True) == mu1*mu2
assert integrate((x + y + 1)*normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True) == 1 + mu1 + mu2
assert integrate((x + y - 1)*normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True) == \
-1 + mu1 + mu2
i = integrate(x**2*normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True)
assert not i.has(Abs)
assert simplify(i) == mu1**2 + sigma1**2
assert integrate(y**2*normal(x, mu1, sigma1)*normal(y, mu2, sigma2),
(x, -oo, oo), (y, -oo, oo), meijerg=True) == \
sigma2**2 + mu2**2
assert integrate(exponential(x, rate), (x, 0, oo), meijerg=True) == 1
assert integrate(x*exponential(x, rate), (x, 0, oo), meijerg=True) == \
1/rate
assert integrate(x**2*exponential(x, rate), (x, 0, oo), meijerg=True) \
== 2/rate**2
def E(expr):
res1 = integrate(expr*exponential(x, rate)*normal(y, mu1, sigma1),
(x, 0, oo), (y, -oo, oo), meijerg=True)
res2 = integrate(expr*exponential(x, rate)*normal(y, mu1, sigma1),
(y, -oo, oo), (x, 0, oo), meijerg=True)
assert expand_mul(res1) == expand_mul(res2)
return res1
assert E(1) == 1
assert E(x*y) == mu1/rate
assert E(x*y**2) == mu1**2/rate + sigma1**2/rate
ans = (rate**2*sigma1**2 + 1)/rate**2
assert simplify(E((x + y + 1)**2) - E(x + y + 1)**2) == ans
assert simplify(E((x + y - 1)**2) - E(x + y - 1)**2) == ans
assert simplify(E((x + y)**2) - E(x + y)**2) == ans
# Beta' distribution
alpha, beta = symbols('alpha beta', positive=True)
betadist = x**(alpha-1)*(1+x)**(-alpha - beta)*gamma(alpha + beta) \
/gamma(alpha)/gamma(beta)
assert integrate(betadist, (x, 0, oo), meijerg=True) == 1
i = integrate(x*betadist, (x, 0, oo), meijerg=True, conds='separate')
assert (combsimp(i[0]), i[1]) == (alpha/(beta - 1), 1 < beta)
j = integrate(x**2*betadist, (x, 0, oo), meijerg=True, conds='separate')
assert j[1] == (1 < beta - 1)
assert combsimp(j[0] - i[0]**2) == (alpha + beta - 1)*alpha \
/(beta - 2)/(beta - 1)**2
# Beta distribution
# NOTE: this is evaluated using antiderivatives. It also tests that
# meijerint_indefinite returns the simplest possible answer.
a, b = symbols('a b', positive=True)
betadist = x**(a - 1)*(-x + 1)**(b - 1)*gamma(a + b)/(gamma(a)*gamma(b))
assert simplify(integrate(betadist, (x, 0, 1), meijerg=True)) == 1
assert simplify(integrate(x*betadist, (x, 0, 1), meijerg=True)) == \
a/(a + b)
assert simplify(integrate(x**2*betadist, (x, 0, 1), meijerg=True)) == \
a*(a + 1)/(a + b)/(a + b + 1)
assert simplify(integrate(x**y*betadist, (x, 0, 1), meijerg=True)) == \
gamma(a + b)*gamma(a + y)/gamma(a)/gamma(a + b + y)
# Chi distribution
k = Symbol('k', integer=True, positive=True)
chi = 2**(1-k/2)*x**(k-1)*exp(-x**2/2)/gamma(k/2)
assert powsimp(integrate(chi, (x, 0, oo), meijerg=True)) == 1
assert simplify(integrate(x*chi, (x, 0, oo), meijerg=True)) == \
sqrt(2)*gamma((k + 1)/2)/gamma(k/2)
assert simplify(integrate(x**2*chi, (x, 0, oo), meijerg=True)) == k
# Chi^2 distribution
chisquared = 2**(-k/2)/gamma(k/2)*x**(k/2-1)*exp(-x/2)
assert powsimp(integrate(chisquared, (x, 0, oo), meijerg=True)) == 1
assert simplify(integrate(x*chisquared, (x, 0, oo), meijerg=True)) == k
assert simplify(integrate(x**2*chisquared, (x, 0, oo), meijerg=True)) == \
k*(k + 2)
assert combsimp(integrate(((x - k)/sqrt(2*k))**3*chisquared, (x, 0, oo),
meijerg=True)) == 2*sqrt(2)/sqrt(k)
# Dagum distribution
a, b, p = symbols('a b p', positive=True)
# XXX (x/b)**a does not work
dagum = a*p/x*(x/b)**(a*p)/(1 + x**a/b**a)**(p+1)
assert simplify(integrate(dagum, (x, 0, oo), meijerg=True)) == 1
# XXX conditions are a mess
arg = x*dagum
assert simplify(integrate(arg, (x, 0, oo), meijerg=True, conds='none')
) == b*gamma(1 - 1/a)*gamma(p + 1/a)/gamma(p)
assert simplify(integrate(x*arg, (x, 0, oo), meijerg=True, conds='none')
) == b**2*gamma(1 - 2/a)*gamma(p + 2/a)/gamma(p)
# F-distribution
d1, d2 = symbols('d1 d2', positive=True)
f = sqrt(((d1*x)**d1 * d2**d2)/(d1*x + d2)**(d1+d2))/x \
/gamma(d1/2)/gamma(d2/2)*gamma((d1 + d2)/2)
assert simplify(integrate(f, (x, 0, oo), meijerg=True)) == 1
# TODO conditions are a mess
assert simplify(integrate(x*f, (x, 0, oo), meijerg=True, conds='none')
) == d2/(d2 - 2)
assert simplify(integrate(x**2*f, (x, 0, oo), meijerg=True, conds='none')
) == d2**2*(d1 + 2)/d1/(d2 - 4)/(d2 - 2)
# TODO gamma, rayleigh
# inverse gaussian
lamda, mu = symbols('lamda mu', positive=True)
dist = sqrt(lamda/2/pi)*x**(-S(3)/2)*exp(-lamda*(x - mu)**2/x/2/mu**2)
mysimp = lambda expr: simplify(expr.rewrite(exp))
assert mysimp(integrate(dist, (x, 0, oo))) == 1
assert mysimp(integrate(x*dist, (x, 0, oo))) == mu
assert mysimp(integrate((x - mu)**2*dist, (x, 0, oo))) == mu**3/lamda
assert mysimp(integrate((x - mu)**3*dist, (x, 0, oo))) == 3*mu**5/lamda**2
# Levi
c = Symbol('c', positive=True)
assert integrate(sqrt(c/2/pi)*exp(-c/2/(x - mu))/(x - mu)**S('3/2'),
(x, mu, oo)) == 1
# higher moments oo
# log-logistic
distn = (beta/alpha)*x**(beta-1)/alpha**(beta-1)\
/(1 + x**beta/alpha**beta)**2
assert simplify(integrate(distn, (x, 0, oo))) == 1
# NOTE the conditions are a mess, but correctly state beta > 1
assert simplify(integrate(x*distn, (x, 0, oo), conds='none')) == \
pi*alpha/beta/sin(pi/beta)
# (similar comment for conditions applies)
assert simplify(integrate(x**y*distn, (x, 0, oo), conds='none')) == \
pi*alpha**y*y/beta/sin(pi*y/beta)
# weibull
k = Symbol('k', positive=True)
n = Symbol('n', positive=True)
distn = k/lamda*(x/lamda)**(k-1)*exp(-(x/lamda)**k)
assert simplify(integrate(distn, (x, 0, oo))) == 1
assert simplify(integrate(x**n*distn, (x, 0, oo))) == \
lamda**n*gamma(1 + n/k)
# rice distribution
from sympy import besseli
nu, sigma = symbols('nu sigma', positive=True)
rice = x/sigma**2*exp(-(x**2+ nu**2)/2/sigma**2)*besseli(0, x*nu/sigma**2)
assert integrate(rice, (x, 0, oo), meijerg=True) == 1
# can someone verify higher moments?
# Laplace distribution
mu = Symbol('mu', real=True)
b = Symbol('b', positive=True)
laplace = exp(-abs(x - mu)/b)/2/b
assert integrate(laplace, (x, -oo, oo), meijerg=True) == 1
assert integrate(x*laplace, (x, -oo, oo), meijerg=True) == mu
assert integrate(x**2*laplace, (x, -oo, oo), meijerg=True) == \
2*b**2 + mu**2
# TODO are there other distributions supported on (-oo, oo) that we can do?
# misc tests
k = Symbol('k', positive=True)
assert combsimp(expand_mul(integrate(log(x)*x**(k - 1)*exp(-x)/gamma(k),
(x, 0, oo)))) == polygamma(0, k)
def test_expint():
""" Test various exponential integrals. """
from sympy import (expint, unpolarify, Symbol, Ci, Si, Shi, Chi,
sin, cos, sinh, cosh, Ei)
assert simplify(unpolarify(integrate(exp(-z*x)/x**y, (x, 1, oo),
meijerg=True, conds='none'
).rewrite(expint).expand(func=True))) == expint(y, z)
assert integrate(exp(-z*x)/x, (x, 1, oo), meijerg=True,
conds='none').rewrite(expint).expand() == \
expint(1, z)
assert integrate(exp(-z*x)/x**2, (x, 1, oo), meijerg=True,
conds='none').rewrite(expint).expand() == \
expint(2, z).rewrite(Ei).rewrite(expint)
assert integrate(exp(-z*x)/x**3, (x, 1, oo), meijerg=True,
conds='none').rewrite(expint).expand() == \
expint(3, z).rewrite(Ei).rewrite(expint).expand()
t = Symbol('t', positive=True)
assert integrate(-cos(x)/x, (x, t, oo), meijerg=True).expand() == Ci(t)
assert integrate(-sin(x)/x, (x, t, oo), meijerg=True).expand() == \
Si(t) - pi/2
assert integrate(sin(x)/x, (x, 0, z), meijerg=True) == Si(z)
assert integrate(sinh(x)/x, (x, 0, z), meijerg=True) == Shi(z)
assert integrate(exp(-x)/x, x, meijerg=True).expand().rewrite(expint) == \
-expint(1, x)
assert integrate(exp(-x)/x**2, x, meijerg=True).rewrite(expint).expand() \
== expint(1, x) - exp(-x)/x
u = Symbol('u', polar=True)
assert integrate(cos(u)/u, u, meijerg=True).expand().as_independent(u)[1] \
== Ci(u)
assert integrate(cosh(u)/u, u, meijerg=True).expand().as_independent(u)[1]\
== Chi(u)
assert integrate(expint(1, x), x, meijerg=True
).rewrite(expint).expand() == x*expint(1, x) - exp(-x)
assert integrate(expint(2, x), x, meijerg=True
).rewrite(expint).expand() == \
-x**2*expint(1, x)/2 + x*exp(-x)/2 - exp(-x)/2
assert simplify(unpolarify(integrate(expint(y,x), x,
meijerg=True).rewrite(expint).expand(func=True))) == \
-expint(y + 1, x)
assert integrate(Si(x), x, meijerg=True) == x*Si(x) + cos(x)
assert integrate(Ci(u), u, meijerg=True).expand() == u*Ci(u) - sin(u)
assert integrate(Shi(x), x, meijerg=True) == x*Shi(x) - cosh(x)
assert integrate(Chi(u), u, meijerg=True).expand() == u*Chi(u) - sinh(u)
assert integrate(Si(x)*exp(-x), (x, 0, oo), meijerg=True) == pi/4
assert integrate(expint(1, x)*sin(x), (x, 0, oo), meijerg=True) == log(2)/2
def test_messy():
from sympy import (laplace_transform, Si, Ci, Shi, Chi, atan, Piecewise,
atanh, acoth, E1, besselj, acosh, asin, Ne, And, re,
fourier_transform, sqrt, Abs)
assert laplace_transform(Si(x), x, s) == ((pi - 2*atan(s))/(2*s), 0, True)
assert laplace_transform(Shi(x), x, s) == (acoth(s)/s, 1, True)
# where should the logs be simplified?
assert laplace_transform(Chi(x), x, s) == \
((log(s**(-2)) - log((s**2 - 1)/s**2))/(2*s), 1, True)
# TODO maybe simplify the inequalities?
assert laplace_transform(besselj(a, x), x, s)[1:] == \
(0, And(S(0) < re(a/2) + S(1)/2, S(0) < re(a/2) + 1))
# NOTE s < 0 can be done, but argument reduction is not good enough yet
assert fourier_transform(besselj(1, x)/x, x, s, noconds=False) == \
(Piecewise((0, 1 < 4*abs(pi**2*s**2)),
(2*sqrt(-4*pi**2*s**2 + 1), True)), 0 < s)
# TODO FT(besselj(0,x)) - conditions are messy (but for acceptable reasons)
# - folding could be better
assert integrate(E1(x)*besselj(0, x), (x, 0, oo), meijerg=True) \
== log(1 + sqrt(2))
assert integrate(E1(x)*besselj(1, x), (x, 0, oo), meijerg=True) \
== log(S(1)/2 + sqrt(2)/2)
assert integrate(1/x/sqrt(1 - x**2), x, meijerg=True) == \
Piecewise((-acosh(1/x), 1 < abs(x**(-2))), (I*asin(1/x), True))
def test_3023():
assert integrate(exp(-I*x**2), (x, -oo, oo), meijerg=True) == \
-I*sqrt(pi)*exp(I*pi/4)
def test_3153():
expr = 1/x/(a+b*x)**(S(1)/3)
anti = integrate(expr, x, meijerg=True)
assert not expr.has(hyper)
# XXX the expression is a mess, but actually upon differentiation and
# putting in numerical values seems to work...
|
ichuang/sympy
|
sympy/integrals/tests/test_meijerint.py
|
Python
|
bsd-3-clause
| 26,696
|
[
"Gaussian"
] |
51106bf4fbae88fc6ee6f664c6f6c87f10054e29df48ac8af4b6357cc5bc8859
|
from ase import Atoms
from ase.calculators.emt import EMT
from ase.constraints import FixAtoms
from ase.optimize import QuasiNewton
from ase.io import write
# Find the initial and final states for the reaction.
# Set up a (3 x 3) two layer slab of Ru:
a = 2.70
c = 1.59 * a
sqrt3 = 3. ** .5
bulk = Atoms('2Cu', [(0., 0., 0.), (1./3, 1./3, -0.5*c)],
tags=(1, 1),
pbc=(1, 1, 0))
bulk.set_cell([(a, 0, 0),
(a / 2, sqrt3 * a / 2, 0),
(0, 0, 1)])
slab = bulk.repeat((4, 4, 1))
# Initial state.
# Add the molecule:
x = a / 2.
y = a * 3. ** .5 / 6.
z = 1.8
d = 1.10 # N2 bond length
# Molecular state parallel to the surface:
slab += Atoms('2N', [(x, y, z), (x + sqrt3 * d / 2, y + d / 2, z)])
# Use the EMT calculator for the forces and energies:
slab.set_calculator(EMT())
# We don't want to worry about the Cu degrees of freedom:
mask = [atom.symbol == 'Cu' for atom in slab]
slab.set_constraint(FixAtoms(mask=mask))
relax = QuasiNewton(slab)
relax.run(fmax=0.05)
print('initial state:', slab.get_potential_energy())
write('N2.traj', slab)
# Now the final state.
# Move the second N atom to a neighboring hollow site:
slab[-1].position = (x + a, y, z)
relax.run()
print('final state: ', slab.get_potential_energy())
write('2N.traj', slab)
|
misdoro/python-ase
|
doc/tutorials/N2Ru-Dissociation1.py
|
Python
|
gpl-2.0
| 1,333
|
[
"ASE"
] |
108a855b6fa001fc4fa450f806b898abd7e79550d08e40274053ce66574281db
|
#!/usr/bin/env python
#
# Copyright 2002-2003 by Michael Hoffman. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
Bio.DocSQL: easy access to DB API databases.
>>> import os
>>> import MySQLdb
>>> from Bio import DocSQL
>>> db=MySQLdb.connect(passwd='', db='test')
>>> class CreatePeople(DocSQL.Create):
... '''
... CREATE TEMPORARY TABLE people
... (id INT UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
... last_name TINYTEXT,
... first_name TINYTEXT)
... '''
...
>>> CreatePeople(connection=db)
CreatePeople(message=Success)
"""
__version__ = "$Revision: 1.13 $"
# $Source: /home/bartek/cvs2bzr/biopython_fastimport/cvs_repo/biopython/Bio/DocSQL.py,v $
import sys
from Bio import MissingPythonDependencyError
try:
import MySQLdb
except:
raise MissingPythonDependencyError("Install MySQLdb if you want to use "
"Bio.DocSQL.")
connection = None
class NoInsertionError(Exception):
pass
def _check_is_public(name):
if name[:6] == "_names":
raise AttributeError
class QueryRow(list):
def __init__(self, cursor):
try:
row = cursor.fetchone()
super(QueryRow, self).__init__(row)
except TypeError:
raise StopIteration
object.__setattr__(self, "_names", [x[0] for x in cursor.description]) # FIXME: legacy
object.__setattr__(self, "_names_hash", {})
for i, name in enumerate(self._names):
self._names_hash[name] = i
def __getattr__(self, name):
_check_is_public(name)
try:
return self[self._names_hash[name]]
except (KeyError, AttributeError):
raise AttributeError("'%s' object has no attribute '%s'" \
% (self.__class__.__name__, name))
def __setattr__(self, name, value):
try:
self._names_hash
except AttributeError:
return object.__setattr__(self, name, value)
_check_is_public(name)
try:
index = self._names_hash[name]
self[index] = value
except KeyError:
return object.__setattr__(self, name, value)
class Query(object):
"""
SHOW TABLES
"""
MSG_FAILURE = "Failure"
MSG_SUCCESS = "Success"
message = "not executed"
error_message = ""
prefix = ""
suffix = ""
row_class = QueryRow
def __init__(self, *args, **keywds):
try:
self.connection = keywds['connection']
except KeyError:
self.connection = connection
try:
self.diagnostics = keywds['diagnostics']
except KeyError:
self.diagnostics = 0
self.statement = self.prefix + self.__doc__ + self.suffix
self.params = args
def __iter__(self):
return IterationCursor(self, self.connection)
def __repr__(self):
return "%s(message=%s)" % (self.__class__.__name__, self.message)
def cursor(self):
return iter(self).cursor
def dump(self):
for item in self:
print item
class QueryGeneric(Query):
def __init__(self, statement, *args, **keywds):
Query.__init__(self, *args, **keywds)
self.statement = statement,
class IterationCursor(object):
def __init__(self, query, connection=connection):
if connection is None:
raise TypeError("database connection is None")
self.cursor = connection.cursor()
self.row_class = query.row_class
if query.diagnostics:
print >>sys.stderr, query.statement
print >>sys.stderr, query.params
self.cursor.execute(query.statement, query.params)
def next(self):
return self.row_class(self.cursor)
class QuerySingle(Query, QueryRow):
ignore_warnings = 0
def __init__(self, *args, **keywds):
message = self.MSG_FAILURE
Query.__init__(self, *args, **keywds)
try:
self.single_cursor = Query.cursor(self)
except MySQLdb.Warning:
if not self.ignore_warnings:
raise
self.row_class.__init__(self, self.cursor())
object.__setattr__(self, "message", self.MSG_SUCCESS)
def cursor(self):
return self.single_cursor
class QueryAll(list, Query):
def __init__(self, *args, **keywds):
Query.__init__(self, *args, **keywds)
list.__init__(self, map(self.process_row, self.cursor().fetchall()))
def process_row(self, row):
return row
class QueryAllFirstItem(QueryAll):
def process_row(self, row):
return row[0]
class Create(QuerySingle):
def __init__(self, *args, **keywds):
try:
QuerySingle.__init__(self, *args, **keywds)
except StopIteration:
self.message = self.MSG_SUCCESS
class Update(Create):
pass
class Insert(Create):
MSG_INTEGRITY_ERROR = "Couldn't insert: %s. "
def __init__(self, *args, **keywds):
try:
Create.__init__(self, *args, **keywds)
except MySQLdb.IntegrityError, error_data:
self.error_message += self.MSG_INTEGRITY_ERROR % error_data[1]
try:
self.total_count
except AttributeError:
self.total_count = 0
raise MySQLdb.IntegrityError(self.error_message)
self.id = self.cursor().insert_id()
try:
self.total_count += self.cursor().rowcount
except AttributeError:
self.total_count = self.cursor().rowcount
if self.cursor().rowcount == 0:
raise NoInsertionError
def _test(*args, **keywds):
import doctest, sys
doctest.testmod(sys.modules[__name__], *args, **keywds)
if __name__ == "__main__":
if __debug__:
_test()
|
BlogomaticProject/Blogomatic
|
opt/blog-o-matic/usr/lib/python/Bio/DocSQL.py
|
Python
|
gpl-2.0
| 5,991
|
[
"Biopython"
] |
ce8803776ec3aa73eb19bb526e79c0e6aa7e335508ae7bbabcc8cd3f6777fa89
|
from django.db import models
from edc_base.model.fields import OtherCharField
from edc_constants.choices import YES_NO, YES_NO_NA
from edc_code_lists.models import WcsDxAdult
from td_list.models import MaternalDiagnoses
class DiagnosesMixin(models.Model):
"""Base Model for forms with diagnosis questions i.e Maternal Diagnoses, Maternal Post Partum Fu1 etc"""
new_diagnoses = models.CharField(
max_length=25,
verbose_name="Have there been any new diagnoses or medical problems in the mother's health since last visit?",
choices=YES_NO,
help_text="",
)
diagnoses = models.ManyToManyField(
MaternalDiagnoses,
verbose_name="Have any of the following diagnoses occured since last visit?",
blank=True,
help_text="",
)
diagnoses_other = OtherCharField(
max_length=35,
verbose_name="if other specify...",
blank=True,
null=True,
)
has_who_dx = models.CharField(
verbose_name=(
"During this pregnancy, did the mother have any new diagnoses "
"listed in the WHO Adult/Adolescent HIV clinical staging document which "
"is/are NOT reported?"),
max_length=3,
choices=YES_NO_NA)
who = models.ManyToManyField(
WcsDxAdult,
verbose_name="List any new WHO Stage III/IV diagnoses that are not reported in Question ?? above:")
class Meta:
abstract = True
|
botswana-harvard/tshilo-dikotla
|
td_maternal/models/diagnoses_mixin.py
|
Python
|
gpl-2.0
| 1,465
|
[
"VisIt"
] |
8496e637384e062199cf39a17d20bbf90056f2345ebf8174e37d8b56135cb415
|
# -*- coding: utf-8 -*-
'''
Master Reborn Add-on
Copyright (C) 2017 Master Reborn
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys,pkgutil,re,json,urllib,urlparse,random,datetime,time
from resources.lib.modules import dialogs, dialogs_list
from resources.lib.modules.executor import execute
from master_commons import cleantitle_get
from resources.lib.modules import control
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import workers
from resources.lib.modules import unshorten
import nanscrapers
debridstatus = control.setting('debridsources')
import os
from threading import Event
import xbmc
import xbmcaddon
import xbmcvfs
try: from sqlite3 import dbapi2 as database
except: from pysqlite2 import dbapi2 as database
try: import urlresolver
except: pass
try: import xbmc
except: pass
_shst_regex = ['sh.st','viid.me']
class sources:
def __init__(self):
self.getConstants()
self.sources = []
def play(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, meta, select):
try:
url = None
items = self.getSources(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered)
select = control.setting('hosts.mode') if select == None else select
title = tvshowtitle if not tvshowtitle == None else title
if control.window.getProperty('PseudoTVRunning') == 'True':
return control.resolve(int(sys.argv[1]), True, control.item(path=str(self.sourcesDirect(items))))
if len(items) > 0:
if select == '1' and 'plugin' in control.infoLabel('Container.PluginName'):
control.window.clearProperty(self.itemProperty)
control.window.setProperty(self.itemProperty, json.dumps(items))
control.window.clearProperty(self.metaProperty)
control.window.setProperty(self.metaProperty, meta)
control.sleep(200)
return control.execute('Container.Update(%s?action=addItem&title=%s)' % (sys.argv[0], urllib.quote_plus(title.encode('utf-8'))))
elif select == '0' or select == '1' or select == '3' or select == '4':
url = self.sourcesDialog(items)
else:
url = self.sourcesDirect(items)
if url == None:
return self.errorForSources()
meta = json.loads(meta)
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, url, meta)
except:
pass
def play_alter(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, meta):
try:
url = None
items = self.getSources(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered)
if control.setting('hosts.mode') == '2': select = "1"
else: select = "2"
title = tvshowtitle if not tvshowtitle == None else title
if control.window.getProperty('PseudoTVRunning') == 'True':
return control.resolve(int(sys.argv[1]), True, control.item(path=str(self.sourcesDirect(items))))
if len(items) > 0:
if select == '1' and 'plugin' in control.infoLabel('Container.PluginName'):
control.window.clearProperty(self.itemProperty)
control.window.setProperty(self.itemProperty, json.dumps(items))
control.window.clearProperty(self.metaProperty)
control.window.setProperty(self.metaProperty, meta)
control.sleep(200)
return control.execute('Container.Update(%s?action=addItem&title=%s)' % (sys.argv[0], urllib.quote_plus(title.encode('utf-8'))))
elif select == '0' or select == '1' or select == '3' or select == '4' or select == '5':
url = self.sourcesDialog(items)
else:
url = self.sourcesDirect(items)
if url == None:
return self.errorForSources()
meta = json.loads(meta)
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, url, meta)
except:
pass
def play_dialog(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, meta, select):
try:
url = None
items = self.getSource_dialog(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered)
title = tvshowtitle if not tvshowtitle == None else title
header = control.addonInfo('name')
header2 = header.upper()
try: meta = json.loads(meta)
except: meta = ''
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
filter = []
for i in range(len(items)):
try:
try:
label = '[B]%s[/B] | %s | [B][I]%s [/I][/B]' % (items[i]['scraper'], items[i]['source'], items[i]['quality'])
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(items))) * i), label.upper(), '')
except:
progressDialog.update(int((100 / float(len(items))) * i), str(header2), label.upper())
# if items[i]['source'] == block: raise Exception()
w = workers.Thread(self.sourcesResolve, items[i])
w.start()
m = ''
for x in range(3600):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
k = control.condVisibility('Window.IsActive(virtualkeyboard)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
k = control.condVisibility('Window.IsActive(yesnoDialog)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
time.sleep(0.5)
for x in range(30):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
if m == '': break
if w.is_alive() == False: break
time.sleep(0.5)
if w.is_alive() == True: block = items[i]
if self.url == None: raise Exception()
try: progressDialog.close()
except: pass
control.sleep(200)
control.execute('Dialog.Close(virtualkeyboard)')
control.execute('Dialog.Close(yesnoDialog)')
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, self.url, meta)
return self.url
except:
pass
try: progressDialog.close()
except: pass
self.errorForSources()
except:
pass
def play_dialog_list(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, meta, select):
try:
url = None
items = self.getSources(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered)
select = control.setting('hosts.mode') if select == None else select
title = tvshowtitle if not tvshowtitle == None else title
if control.window.getProperty('PseudoTVRunning') == 'True':
return control.resolve(int(sys.argv[1]), True, control.item(path=str(self.sourcesDirect(items))))
if len(items) > 0: url = self.sourcesDialog2(items)
if url == None: return self.errorForSources()
meta = json.loads(meta)
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, url, meta)
except:
pass
def play_library(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, meta, select):
try:
url = None
items = self.getSources(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered)
select = control.setting('hosts.mode') if select == None else select
title = tvshowtitle if not tvshowtitle == None else title
if control.window.getProperty('PseudoTVRunning') == 'True':
return control.resolve(int(sys.argv[1]), True, control.item(path=str(self.sourcesDirect(items))))
if len(items) > 0:
if select == '1' and 'plugin' in control.infoLabel('Container.PluginName'):
control.window.clearProperty(self.itemProperty)
control.window.setProperty(self.itemProperty, json.dumps(items))
control.window.clearProperty(self.metaProperty)
control.window.setProperty(self.metaProperty, meta)
control.sleep(200)
return control.execute('Container.Update(%s?action=addItem&title=%s)' % (sys.argv[0], urllib.quote_plus(title.encode('utf-8'))))
elif select == '0' or select == '1':
url = self.sourcesDialog(items)
else:
url = self.sourcesDirect(items)
if url == None:
return self.errorForSources()
meta = 'play_library'
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, url, meta)
except:
pass
def addItem(self, title):
control.playlist.clear()
items = control.window.getProperty(self.itemProperty)
items = json.loads(items)
if items == None or len(items) == 0: control.idle() ; sys.exit()
meta = control.window.getProperty(self.metaProperty)
meta = json.loads(meta)
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
downloads = True if control.setting('downloads') == 'true' and not (control.setting('movie.download.path') == '' or control.setting('tv.download.path') == '') else False
if 'tvshowtitle' in meta and 'season' in meta and 'episode' in meta:
name = '%s S%02dE%02d' % (title, int(meta['season']), int(meta['episode']))
elif 'year' in meta:
name = '%s (%s)' % (title, meta['year'])
else:
name = title
systitle = urllib.quote_plus(title.encode('utf-8'))
sysname = urllib.quote_plus(name.encode('utf-8'))
poster = meta['poster'] if 'poster' in meta else '0'
banner = meta['banner'] if 'banner' in meta else '0'
thumb = meta['thumb'] if 'thumb' in meta else poster
fanart = meta['fanart'] if 'fanart' in meta else '0'
if poster == '0': poster = control.addonPoster()
if banner == '0' and poster == '0': banner = control.addonBanner()
elif banner == '0': banner = poster
if thumb == '0' and fanart == '0': thumb = control.addonFanart()
elif thumb == '0': thumb = fanart
if control.setting('fanart') == 'true' and not fanart == '0': pass
else: fanart = control.addonFanart()
sysimage = urllib.quote_plus(poster.encode('utf-8'))
downloadMenu = control.lang(32403).encode('utf-8')
for i in range(len(items)):
try:
label = items[i]['label']
syssource = urllib.quote_plus(json.dumps([items[i]]))
sysurl = '%s?action=playItem&title=%s&source=%s' % (sysaddon, systitle, syssource)
cm = []
if downloads == True:
cm.append((downloadMenu, 'RunPlugin(%s?action=download&name=%s&image=%s&source=%s)' % (sysaddon, sysname, sysimage, syssource)))
item = control.item(label=label)
item.setArt({'icon': thumb, 'thumb': thumb, 'poster': poster, 'tvshow.poster': poster, 'season.poster': poster, 'banner': banner, 'tvshow.banner': banner, 'season.banner': banner})
if not fanart == None: item.setProperty('Fanart_Image', fanart)
item.addContextMenuItems(cm)
item.setInfo(type='Video', infoLabels = meta)
control.addItem(handle=syshandle, url=sysurl, listitem=item, isFolder=False)
except:
pass
control.content(syshandle, 'files')
control.directory(syshandle, cacheToDisc=True)
def playItem(self, title, source):
try:
meta = control.window.getProperty(self.metaProperty)
meta = json.loads(meta)
year = meta['year'] if 'year' in meta else None
season = meta['season'] if 'season' in meta else None
episode = meta['episode'] if 'episode' in meta else None
imdb = meta['imdb'] if 'imdb' in meta else None
tvdb = meta['tvdb'] if 'tvdb' in meta else None
next = [] ; prev = [] ; total = []
for i in range(1,1000):
try:
u = control.infoLabel('ListItem(%s).FolderPath' % str(i))
if u in total: raise Exception()
total.append(u)
u = dict(urlparse.parse_qsl(u.replace('?','')))
u = json.loads(u['source'])[0]
next.append(u)
except:
break
for i in range(-1000,0)[::-1]:
try:
u = control.infoLabel('ListItem(%s).FolderPath' % str(i))
if u in total: raise Exception()
total.append(u)
u = dict(urlparse.parse_qsl(u.replace('?','')))
u = json.loads(u['source'])[0]
prev.append(u)
except:
break
items = json.loads(source)
items = [i for i in items+next+prev][:40]
header = control.addonInfo('name')
header2 = header.upper()
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
block = None
for i in range(len(items)):
try:
try:
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label']), str(' '))
except:
progressDialog.update(int((100 / float(len(items))) * i), str(header2), str(items[i]['label']))
if items[i]['source'] == block: raise Exception()
w = workers.Thread(self.sourcesResolve, items[i])
w.start()
m = ''
for x in range(3600):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
k = control.condVisibility('Window.IsActive(virtualkeyboard)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
k = control.condVisibility('Window.IsActive(yesnoDialog)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
time.sleep(0.5)
for x in range(30):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
if m == '': break
if w.is_alive() == False: break
time.sleep(0.5)
if w.is_alive() == True: block = items[i]['source']
if self.url == None: raise Exception()
try: progressDialog.close()
except: pass
control.sleep(200)
control.execute('Dialog.Close(virtualkeyboard)')
control.execute('Dialog.Close(yesnoDialog)')
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, self.url, meta)
return self.url
except:
pass
try: progressDialog.close()
except: pass
self.errorForSources()
except:
pass
def getSource_dialog(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, presetDict=[], timeout=30):
self.__scrapers = []
sourceDict = []
for pkg, name, is_pkg in pkgutil.walk_packages(__path__): sourceDict.append((name, is_pkg))
sourceDict = [i[0] for i in sourceDict if i[1] == False]
sourceDict = [(i, __import__(i, globals(), locals(), [], -1).source()) for i in sourceDict]
content = 'movie' if tvshowtitle == None else 'episode'
if content == 'movie':
sourceDict = [(i[0], i[1], getattr(i[1], 'movie', None)) for i in sourceDict]
else:
sourceDict = [(i[0], i[1], getattr(i[1], 'tvshow', None)) for i in sourceDict]
sourceDict = [(i[0], i[1]) for i in sourceDict if not i[2] == None]
try: sourceDict = [(i[0], i[1], control.setting('provider.' + i[0])) for i in sourceDict]
except: sourceDict = [(i[0], i[1], 'true') for i in sourceDict]
self.__scrapers = [i[1] for i in sourceDict if not i[2] == 'false']
self.title = title
self.year = year
self.imdb = imdb
self.tvdb = tvdb
self.season = season
self.episode = episode
self.tvshowtitle = tvshowtitle
self.premiered = premiered
print ("MASTER REBORN SELFSCRAPERS", self.__scrapers)
sourceDict = [i[0] for i in sourceDict if not i[2] == 'false']
threads = []
select_sources = []
if control.setting('cachesources') == 'true':
control.makeFile(control.dataPath)
self.sourceFile = control.providercacheFile
if content == 'movie':
scraped_sources = self.scrape_movie_with_dialog()
else:
scraped_sources = self.scrape_tv_with_dialog()
for item in scraped_sources:
if type(item) == tuple:
item = item[1]
if type(item) == list:
for subitem in item:
select_sources.extend(item)
else:
select_sources.append(item)
return select_sources
def scrape_tv_with_dialog(self, maximum_age=60, sort_function=None):
try:
timeout = int(control.setting('scrapers.timeout.1'))
except:
pass
self.timeout = timeout
allow_debrid = control.setting("debridsources") == "true"
scraper = nanscrapers.scrape_episode_with_dialog
link, rest = scraper(
self.tvshowtitle,
self.year,
self.premiered,
self.season,
self.episode,
self.imdb,
self.tvdb,
timeout=self.timeout,
extended=True,
sort_function=self.sort_function,
enable_debrid=allow_debrid)
if type(link) == dict and "path" in link:
link = link["path"]
result = [link]
result.extend(rest)
return result
def scrape_movie_with_dialog(self, maximum_age=60, sort_function=None):
try:
timeout = int(control.setting('scrapers.timeout.1'))
except:
pass
self.timeout = timeout
allow_debrid = control.setting("debridsources") == "true"
scraper = nanscrapers.scrape_movie_with_dialog
link, rest = scraper(
self.title,
self.year,
self.imdb,
timeout=self.timeout,
extended=True,
sort_function=self.sort_function,
enable_debrid=allow_debrid)
if type(link) == dict and "path" in link:
link = link["path"]
result = [link]
result.extend(rest)
return result
def to_dialog_tuple(self, scraper_array):
results_array = []
if scraper_array:
for link in scraper_array:
try:
url = link['url']
quality = ""
try:
quality = link['quality']
except:
quality = "SD"
if "1080" in quality: quality2 = "FHD"
elif "HD" in quality: quality2 = "HD"
else: quality2 = "SD"
label = '%s | %s | %s' % (quality, link['provider'], link['source'])
label = label.upper()
if not url == '' or url == None:
if not any(value in url for value in self.hostBlackList):
results_array.append(link)
except:
pass
return results_array
def getSources(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, presetDict=[], timeout=30):
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(control.addonInfo('name'), '')
progressDialog.update(0, 'Preparing Sources...')
# if control.setting('cachesources') == 'true': self.prepareSources()
content = 'movie' if tvshowtitle is None else 'episode'
try:
timeout = int(control.setting('scrapers.timeout.1'))
except:
pass
allow_debrid = control.setting("debridsources") == "true"
if control.setting('cachesources') == 'true':
control.makeFile(control.dataPath)
self.sourceFile = control.providercacheFile
if content == 'movie':
title = self.getTitle(title)
scraper = nanscrapers.scrape_movie
links_scraper = scraper(
title,
year,
imdb,
timeout=timeout,
enable_debrid=allow_debrid)
else:
tvshowtitle = self.getTitle(tvshowtitle)
scraper = nanscrapers.scrape_episode
links_scraper = scraper(
tvshowtitle,
year,
premiered,
season,
episode,
imdb,
tvdb,
timeout=timeout,
enable_debrid=allow_debrid)
thread = workers.Thread(self.get_nan_sources, links_scraper,
progressDialog)
thread.start()
for i in range(0, timeout * 2):
try:
if xbmc.abortRequested:
return sys.exit()
try:
if progressDialog.iscanceled():
break
except:
pass
if not thread.is_alive(): break
time.sleep(0.5)
except:
pass
try:
progressDialog.close()
except:
pass
self.sourcesFilter()
return self.sources
def get_nan_sources(self, links_scraper, progressDialog):
num_scrapers = len(nanscrapers.relevant_scrapers())
index = 0
string1 = "Time Elapsed %s"
string2 = control.lang(32405).encode('utf-8')
string3 = control.lang(32406).encode('utf-8')
counthd = 0
count1080 = 0
countSD = 0
for scraper_links in links_scraper():
try:
if xbmc.abortRequested:
return sys.exit()
if progressDialog.iscanceled():
break
index = index + 1
percent = int((index * 100) / num_scrapers)
if scraper_links is not None:
random.shuffle(scraper_links)
for scraper_link in scraper_links:
try:
q = scraper_link['quality']
if "1080" in q:
count1080 += 1
elif "HD" in q:
counthd += 1
elif "720" in q:
counthd += 1
scraper_link["quality"] = "HD"
elif "720" in q:
counthd += 1
scraper_link["quality"] = "HD"
elif "560" in q:
counthd += 1
scraper_link["quality"] = "HD"
else:
countSD += 1
except:
pass
progressDialog.update(percent,
"Links: ([B]" + str(count1080) + "/" + str(counthd) + "/" + str(countSD) + "[/B]) (" + str(len(self.sources)) + ")",
string3 % (num_scrapers - index))
self.sources.append(scraper_link)
try:
if progressDialog.iscanceled():
break
except:
pass
except:
pass
def prepareSources(self):
try:
control.makeFile(control.dataPath)
self.sourceFile = control.providercacheFile
except:
pass
def getTitle(self, title):
title = cleantitle.normalize(title)
return title
def getMovieSource(self, title, year, imdb, source, call):
source = cleantitle_get(str(source))
type = "movie"
try:
url = None
if url == None: url = call.movie(imdb, title, year)
if url == None: raise Exception()
except:
pass
try:
sources = []
sources = call.sources(url, self.hostDict, self.hostprDict)
if sources == None: raise Exception()
self.sources.extend(sources)
except:
pass
def getEpisodeSource(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, source, call):
source = cleantitle_get(str(source))
try:
url = None
if url == None: url = call.tvshow(imdb, tvdb, tvshowtitle, year)
if url == None: raise Exception()
except:
pass
try:
ep_url = None
if url == None: raise Exception()
if ep_url == None: ep_url = call.episode(url, imdb, tvdb, title, premiered, season, episode)
if ep_url == None: raise Exception()
except:
pass
try:
sources = []
sources = call.sources(ep_url, self.hostDict, self.hostprDict)
if sources == None: raise Exception()
self.sources.extend(sources)
except:
pass
def getMovieSource2(self, title, year, imdb, source, call):
str_call = str(call)
r = re.findall('resources.lib.sources.(.+?).source', str_call)[0]
if r:
source = r
else: source = "Master Reborn"
type = "movie"
try:
url = None
if url == None: url = call.movie(imdb, title, year)
if url == None: raise Exception()
except:
pass
try:
sources = []
sources = call.sources(url, self.hostDict, self.hostprDict)
if sources == None: raise Exception()
self.sources.extend(sources)
except:
pass
return sources
def getEpisodeSource2(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, source, call):
str_call = str(call)
r = re.findall('resources.lib.sources.(.+?).source', str_call)[0]
if r:
source = r
else: source = "Master Reborn"
type = "episode"
try:
url = None
if url == None: url = call.tvshow(imdb, tvdb, tvshowtitle, year)
if url == None: raise Exception()
except:
pass
try:
ep_url = None
if url == None: raise Exception()
if ep_url == None: ep_url = call.episode(url, imdb, tvdb, title, premiered, season, episode)
if ep_url == None: raise Exception()
except:
pass
try:
sources = []
sources = call.sources(ep_url, self.hostDict, self.hostprDict)
if sources == None: raise Exception()
self.sources.extend(sources)
except:
pass
return sources
def getURISource(self, url):
try:
sourceDict = []
for package, name, is_pkg in pkgutil.walk_packages(__path__): sourceDict.append((name, is_pkg))
sourceDict = [i[0] for i in sourceDict if i[1] == False]
sourceDict = [(i, __import__(i, globals(), locals(), [], -1).source()) for i in sourceDict]
domain = (urlparse.urlparse(url).netloc).lower()
domains = [(i[0], i[1].domains) for i in sourceDict]
domains = [i[0] for i in domains if any(x in domain for x in i[1])]
if len(domains) == 0: return False
call = [i[1] for i in sourceDict if i[0] == domains[0]][0]
self.sources = call.sources(url, self.hostDict, self.hostprDict)
for i in range(len(self.sources)):
try: self.sources[i]['autoplay'] = True
except: pass
self.sources = self.sourcesFilter()
return self.sources
except:
pass
def alterSources(self, url, meta):
try:
if control.setting('hosts.mode') == '2': url += '&select=1'
else: url += '&select=2'
control.execute('RunPlugin(%s)' % url)
except:
pass
def clearSources(self):
try:
control.idle()
yes = control.yesnoDialog(control.lang(32407).encode('utf-8'), '', '')
if not yes: return
control.makeFile(control.dataPath)
dbcon = database.connect(control.providercacheFile)
dbcur = dbcon.cursor()
dbcur.execute("DROP TABLE IF EXISTS rel_src")
dbcur.execute("VACUUM")
dbcon.commit()
control.infoDialog(control.lang(32408).encode('utf-8'), sound=True, icon='INFO')
except:
pass
def sourcesFilter(self):
provider = control.setting('hosts.sort.provider')
quality = control.setting('hosts.quality')
if quality == '':
quality = '0'
captcha = control.setting('hosts.captcha')
random.shuffle(self.sources)
if provider == 'true':
self.sources = sorted(self.sources, key=lambda k: k['scraper'])
local = [i for i in self.sources if 'local' in i and i.get('local', False) == True]
self.sources = [i for i in self.sources if not i in local]
filter = []
filter += [i for i in self.sources if i['direct'] == True]
filter += [i for i in self.sources if i['direct'] == False]
self.sources = filter
filter = []
filter += [i for i in self.sources if not i['source'].lower() in self.hostBlackList]
self.sources = filter
filter = []
filter += local
if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4k' and i.get('debridonly', False) == True]
if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4k' and i.get('debridonly', False) == False]
if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '2k' and i.get('debridonly', False) == True]
if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '2k' and i.get('debridonly', False) == False]
if quality in ['0' ,'1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and i.get('debridonly', False) == True]
if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and i.get('debridonly', False) == False]
if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == 'HD' and i.get('debridonly', False) == True]
if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == 'HD' and i.get('debridonly', False) == False]
filter += [i for i in self.sources if i['quality'] == 'SD' and i.get('debridonly', False) == True]
filter += [i for i in self.sources if i['quality'] == 'SD' and i.get('debridonly', False) == False]
if len(filter) < 10: filter += [i for i in self.sources if i['quality'] == 'SCR']
if len(filter) < 10: filter += [i for i in self.sources if i['quality'] == 'CAM']
self.sources = filter
if not captcha == 'true':
filter = [i for i in self.sources if i['source'].lower() in self.hostcapDict and not 'debrid' in i]
self.sources = [i for i in self.sources if not i in filter]
# filter = [i for i in self.sources if i['source'].lower() in self.hostblockDict and not 'debrid' in i]
# self.sources = [i for i in self.sources if not i in filter]
self.sources = self.filter_zips(self.sources)
self.sources = self.sources[:1000]
for i in range(len(self.sources)):
u = self.sources[i]['url']
s = self.sources[i]['scraper'].lower()
s = s.rsplit('.', 1)[0]
p = self.sources[i]['source']
d = self.sources[i].get('debridonly', False)
d = str(d)
# print ("DEBRID STATUS", d)
p = re.sub('v\d*$', '', p)
q = self.sources[i]['quality']
try:
f = (' | '.join(['[I]%s [/I]' % info.strip() for info in self.sources[i]['info'].split('|')]))
except:
f = ''
if d == 'True':
label = '%02d |[I]DEB[/I] | [B]%s[/B] | ' % (int(i+1), p)
#if not d == '': label = '%02d | [B]%s[/B] | [B]%s[/B] | ' % (int(i+1), p, d)
else:
label = '%02d | [B]%s[/B] | ' % (int(i+1), p)
if q in ['4K', '2k', '1080p', 'HD']:
label += '%s | %s | [B][I]%s [/I][/B]' % (s, f, q)
elif q == 'SD':
label += '%s | %s | [I]%s [/I]' % (s, f, q)
else:
label += '%s | %s | [I]%s [/I]' % (s, f, q)
label = label.replace('| 0 |', '|').replace(' | [I]0 [/I]', '')
label = label.replace('[I]HEVC [/I]', 'HEVC')
label = re.sub('\[I\]\s+\[/I\]', ' ', label)
label = re.sub('\|\s+\|', '|', label)
label = re.sub('\|(?:\s+|)$', '', label)
self.sources[i]['label'] = label.upper()
return self.sources
def filter_zips(self, sources):
filtered = []
for item in sources:
url = item['url'].encode('utf-8')
# ext = url.split('?')[0].split('&')[0].split('|')[0].rsplit('.')[-1].replace('/', '').lower()
# print ("MASTER REBORN FILTERING", ext)
if "google" in url.lower():
filtered.append(item)
else:
if not any(value in url.lower() for value in self.blacklist_zips):
filtered.append(item)
return filtered
def sourcesResolve(self, item, info=False):
try:
self.url = None
u = url = item['url']
# d = item['debrid'] ;
direct = item['direct']
provider = item['scraper'].lower()
# if not provider.endswith(('_mv', '_tv', '_mv_tv')):
# sourceDict = []
# for package, name, is_pkg in pkgutil.walk_packages(__path__): sourceDict.append((name, is_pkg))
# provider = [i[0] for i in sourceDict if i[1] == False and i[0].startswith(provider + '_')][0]
#source = __import__(provider, globals(), locals(), [], -1).source()
u = url = item["url"]
if url == None: raise Exception()
if any(value in url for value in _shst_regex): u = unshorten._unshorten_shst(url)
# if not d == '':
# url = debrid.resolver(url, d)
if not direct == True:
if not debridstatus == 'true': hmf = urlresolver.HostedMediaFile(url=u, include_disabled=True, include_universal=False)
else: hmf = urlresolver.HostedMediaFile(url=u, include_disabled=True, include_universal=True)
if hmf.valid_url() == True: url = hmf.resolve()
if url == False or url == None: raise Exception()
ext = url.split('?')[0].split('&')[0].split('|')[0].rsplit('.')[-1].replace('/', '').lower()
if ext == 'rar': raise Exception()
try: headers = url.rsplit('|', 1)[1]
except: headers = ''
headers = urllib.quote_plus(headers).replace('%3D', '=') if ' ' in headers else headers
headers = dict(urlparse.parse_qsl(headers))
xbmc.log("url3:" + repr(url), xbmc.LOGNOTICE)
if url.startswith('http') and '.m3u8' in url:
result = client.request(url.split('|')[0], headers=headers, output='geturl', timeout='20')
if result == None: raise Exception()
elif url.startswith('http'):
result = client.request(url.split('|')[0], headers=headers, output='chunk', timeout='30')
if result == None: raise Exception()
else:
raise Exception()
xbmc.log("url4:" + repr(url), xbmc.LOGNOTICE)
self.url = url
xbmc.log("url2:" + repr(url), xbmc.LOGNOTICE)
return url
except:
if info == True: self.errorForSources()
return
def sourcesDialog(self, items):
try:
labels = [i['label'] for i in items]
select = control.selectDialog(labels)
if select == -1: return 'close://'
next = [y for x,y in enumerate(items) if x >= select]
prev = [y for x,y in enumerate(items) if x < select][::-1]
items = [items[select]]
items = [i for i in items+next+prev][:40]
header = control.addonInfo('name')
header2 = header.upper()
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
block = None
for i in range(len(items)):
try:
if items[i]['source'] == block: raise Exception()
w = workers.Thread(self.sourcesResolve, items[i])
w.start()
try:
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label']), str(' '))
except:
progressDialog.update(int((100 / float(len(items))) * i), str(header2), str(items[i]['label']))
m = ''
for x in range(3600):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
k = control.condVisibility('Window.IsActive(virtualkeyboard)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
k = control.condVisibility('Window.IsActive(yesnoDialog)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
time.sleep(0.5)
for x in range(30):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
if m == '': break
if w.is_alive() == False: break
time.sleep(0.5)
if w.is_alive() == True: block = items[i]['source']
if self.url == None: raise Exception()
self.selectedSource = items[i]['label']
try: progressDialog.close()
except: pass
control.execute('Dialog.Close(virtualkeyboard)')
control.execute('Dialog.Close(yesnoDialog)')
return self.url
except:
pass
try: progressDialog.close()
except: pass
except:
try: progressDialog.close()
except: pass
def sourcesDialog2(self, items):
try:
labels = [i['label'] for i in items]
select = dialogs_list.select_ext("Select Link", items)
selected_items = select
if not len(selected_items) > 1: return self.errorForSources()
header = control.addonInfo('name')
header2 = header.upper()
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
block = None
for i in range(len(selected_items)):
try:
if selected_items[i]['source'] == block: raise Exception()
w = workers.Thread(self.sourcesResolve, selected_items[i])
w.start()
try:
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(selected_items))) * i), str(selected_items[i]['label']), str(' '))
except:
progressDialog.update(int((100 / float(len(selected_items))) * i), str(header2), str(selected_items[i]['label']))
m = ''
for x in range(3600):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
k = control.condVisibility('Window.IsActive(virtualkeyboard)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
k = control.condVisibility('Window.IsActive(yesnoDialog)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
time.sleep(0.5)
for x in range(30):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
if m == '': break
if w.is_alive() == False: break
time.sleep(0.5)
if w.is_alive() == True: block = selected_items[i]['source']
if self.url == None: raise Exception()
self.selectedSource = selected_items[i]['label']
try: progressDialog.close()
except: pass
control.execute('Dialog.Close(virtualkeyboard)')
control.execute('Dialog.Close(yesnoDialog)')
return self.url
except:
pass
try: progressDialog.close()
except: pass
except:
try: progressDialog.close()
except: pass
def sourcesDirect(self, items):
# filter = [i for i in items if i['source'].lower() in self.hostcapDict and i['debrid'] == '']
# items = [i for i in items if not i in filter]
# filter = [i for i in items if i['source'].lower() in self.hostblockDict and i['debrid'] == '']
items = [i for i in items]
# items = [i for i in items if ('autoplay' in i and i['autoplay'] == True) or not 'autoplay' in i]
if control.setting('autoplay.sd') == 'true':
items = [i for i in items if not i['quality'] in ['4K', '2k', '1080p', 'HD']]
u = None
header = control.addonInfo('name')
header2 = header.upper()
try:
control.sleep(1000)
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
except:
pass
for i in range(len(items)):
try:
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label']), str(' '))
except:
progressDialog.update(int((100 / float(len(items))) * i), str(header2), str(items[i]['label']))
try:
if xbmc.abortRequested == True: return sys.exit()
url = self.sourcesResolve(items[i])
if u == None: u = url
if not url == None: break
except:
pass
try: progressDialog.close()
except: pass
return u
def errorForSources(self):
control.infoDialog(control.lang(32401).encode('utf-8'), sound=False, icon='INFO')
def getConstants(self):
self.itemProperty = 'plugin.video.master.reborn.container.items'
self.metaProperty = 'plugin.video.master.reborn.container.meta'
try:
self.hostDict = urlresolver.relevant_resolvers(order_matters=True)
self.hostDict = [i.domains for i in self.hostDict if not '*' in i.domains]
self.hostDict = [i.lower() for i in reduce(lambda x, y: x+y, self.hostDict)]
self.hostDict = [x for y,x in enumerate(self.hostDict) if x not in self.hostDict[:y]]
except:
self.hostDict = []
self.hostBlackList = ['youtube.com','uploading.site',
'uploadkadeh.ir','uploadkadeh.com','adf.ly','indishare.me','rlsbb.com','nfo.rlsbb.com','bankupload.com','katfile.com','userboard.org','multiup.org','hitfile.net','letitbit.net','pastebin.com','myvideolinks.userboard.org','arabloads.net','multiup','uppit.com','4upld.com',
'bdupload.org', 'bdupload.info','ziifile.com','bytewhale.com','go4up.com','file.rocks', 'mylinkgen.com']
self.hostmyDict = ['uploadrocket.net','userscloud','alfafile','.avi','.mkv','.mov','.mp4','.xvid','.divx','oboom', 'rapidgator', 'rg.to', 'uploaded', 'ul.to', 'filefactory', 'nitroflare', 'turbobit', '1fichier','uptobox', '1fich', 'uploadrocket','uploading','hugefiles', 'uploaded' , 'clicknupload']
self.hostprDict = self.hostDict + self.hostmyDict
self.hostcapDict = ['hugefiles.net', 'kingfiles.net', 'openload.io', 'openload.co', 'oload.tv', 'thevideo.me', 'vidup.me', 'streamin.to', 'torba.se']
self.blacklist_zips = ['.zip', '.rar', '.jpeg', '.img', '.jpg', '.RAR', '.ZIP', '.png' , '.sub', '.srt']
self.hostblockDict = []
self.debridDict = debrid.debridDict()
@staticmethod
def sort_function(item):
"""
transform items quality into a string that's sort-able
Args:
item: scraper link
Returns:
sortable quality string
"""
if 'quality' in item[1][0]:
quality = item[1][0]["quality"]
else:
quality = item[1][0]["path"]["quality"]
if quality.startswith("1080"):
quality = "HDa"
elif quality.startswith("720"):
quality = "HDb"
elif quality.startswith("560"):
quality = "HDc"
elif quality == "DVD":
quality = "HDd"
elif quality == "HD":
quality = "HDe"
elif quality.startswith("480"):
quality = "SDa"
elif quality.startswith("360"):
quality = "SDb"
elif quality.startswith("SD"):
quality = "SDc"
return quality
|
TheWardoctor/Wardoctors-repo
|
plugin.video.master.reborn/resources/lib/sources/__init__.py
|
Python
|
apache-2.0
| 52,029
|
[
"ADF"
] |
19713e508d6a025bf670817fccf09c70f9a6e3385a528aacca362b32730ce68b
|
from rdkit import Chem
from rdkit import rdBase
from rdkit import RDConfig
import os
from rdkit.Chem import rdMolDescriptors as rdMD
from rdkit.Chem import AllChem
haveDescrs3D = hasattr(rdMD, 'CalcAUTOCORR3D')
import time, unittest
def _gen3D(m, is3d, calculator):
if not is3d:
m = Chem.AddHs(m)
ps = AllChem.ETKDG()
ps.randomSeed = 0xf00d
AllChem.EmbedMolecule(m, ps)
return calculator(m)
class TestCase(unittest.TestCase):
def setUp(self):
self.dataDir = os.path.join(RDConfig.RDBaseDir, 'Code', 'GraphMol', 'Descriptors', 'test_data')
self.suppl = Chem.SDMolSupplier(os.path.join(self.dataDir, 'PBF_egfr.sdf'), removeHs=False)
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def test1AUTOCORR2D(self):
# not really a 3D descriptor, but this was added at the same time
with open(os.path.join(self.dataDir, 'auto2D.out')) as refFile:
for i, m in enumerate(self.suppl):
if i > 10:
break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0], nm)
split.pop(0)
vs = rdMD.CalcAUTOCORR2D(m)
for rv, nv in zip(split, vs):
self.assertAlmostEqual(float(rv), nv, delta=0.05)
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def test2AUTOCORR3D(self):
with open(os.path.join(self.dataDir, 'auto3D_dragon.out')) as refFile:
for i, m in enumerate(self.suppl):
if i > 10:
break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0], nm)
split.pop(0)
vs = _gen3D(m, True, rdMD.CalcAUTOCORR3D)
for rv, nv in zip(split, vs):
self.assertAlmostEqual(float(rv), nv, delta=0.05)
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def test3GETAWAY(self):
with open(os.path.join(self.dataDir, 'GETAWAY.new.out')) as refFile:
for i, m in enumerate(self.suppl):
if i > 10:
break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0], nm)
split.pop(0)
vs = _gen3D(m, True, rdMD.CalcGETAWAY)
for rv, nv in zip(split, vs):
self.assertAlmostEqual(float(rv), nv, delta=0.05)
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def test4MORSE(self):
with open(os.path.join(self.dataDir, 'MORSE.out')) as refFile:
for i, m in enumerate(self.suppl):
if i > 10:
break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0], nm)
split.pop(0)
vs = _gen3D(m, True, rdMD.CalcMORSE)
for rv, nv in zip(split, vs):
ref = float(rv)
self.assertTrue(ref < 1 or abs(ref - nv) / ref < 0.02)
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def test5RDF(self):
with open(os.path.join(self.dataDir, 'RDF.out')) as refFile:
for i, m in enumerate(self.suppl):
if i > 10:
break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0], nm)
split.pop(0)
vs = _gen3D(m, True, rdMD.CalcRDF)
for rv, nv in zip(split, vs):
ref = float(rv)
self.assertTrue(ref < 0.5 or abs(ref - nv) / ref < 0.02)
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def test6WHIM(self):
with open(os.path.join(self.dataDir, 'whim.new.out')) as refFile:
for i, m in enumerate(self.suppl):
if i > 10:
break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0], nm)
split.pop(0)
vs = _gen3D(m, True, lambda x: rdMD.CalcWHIM(x, thresh=0.01))
for rv, nv in zip(split, vs):
self.assertAlmostEqual(float(rv), nv, delta=0.01)
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def testGithub2037(self):
m = Chem.AddHs(Chem.MolFromSmiles("CCCCCCC"))
cids = AllChem.EmbedMultipleConfs(m, 10)
# start with defaults (which does not cache results):
npr1s = []
npr2s = []
for cid in cids:
npr1s.append(rdMD.CalcNPR1(m, confId=cid))
npr2s.append(rdMD.CalcNPR2(m, confId=cid))
for i in range(1, len(npr1s)):
self.assertNotAlmostEqual(npr1s[0], npr1s[i])
self.assertNotAlmostEqual(npr2s[0], npr2s[i])
# now ensure that we can cache:
npr1s = []
npr2s = []
for cid in cids:
npr1s.append(rdMD.CalcNPR1(m, confId=cid, force=False))
npr2s.append(rdMD.CalcNPR2(m, confId=cid, force=False))
for i in range(1, len(npr1s)):
self.assertAlmostEqual(npr1s[0], npr1s[i])
self.assertAlmostEqual(npr2s[0], npr2s[i])
@unittest.skipIf(not haveDescrs3D, "3d descriptors not present")
def testGithub4167(self):
with Chem.SDMolSupplier(os.path.join(self.dataDir, 'github4167.sdf'), removeHs=False,
sanitize=True) as suppl:
m1 = suppl[0]
m2 = suppl[1]
m1.AddConformer(Chem.Conformer(m2.GetConformer()), assignId=True)
v1_0 = rdMD.CalcSpherocityIndex(m1)
v1_1 = rdMD.CalcSpherocityIndex(m1, confId=1, force=True)
v2 = rdMD.CalcSpherocityIndex(m2)
self.assertNotEqual(v1_0, v1_1)
self.assertEqual(v1_1, v2)
if (__name__ == '__main__'):
unittest.main()
|
ptosco/rdkit
|
Code/GraphMol/Descriptors/Wrap/test3D.py
|
Python
|
bsd-3-clause
| 5,546
|
[
"RDKit"
] |
541c4a27a34d07c4da7df8acb7bcafb72de49160f5bbbfc9609dc72284402216
|
import os
import numpy as np
from os.path import join as pjoin
from dipy.viz import actor, window, widget, fvtk
from dipy.data import DATA_DIR
from dipy.data import fetch_viz_icons, read_viz_icons
import numpy.testing as npt
from dipy.testing.decorators import xvfb_it
use_xvfb = os.environ.get('TEST_WITH_XVFB', False)
if use_xvfb == 'skip':
skip_it = True
else:
skip_it = False
@npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it)
@xvfb_it
def test_button_and_slider_widgets():
recording = False
filename = "test_button_and_slider_widgets.log.gz"
recording_filename = pjoin(DATA_DIR, filename)
renderer = window.Renderer()
# create some minimalistic streamlines
lines = [np.array([[-1, 0, 0.], [1, 0, 0.]]),
np.array([[-1, 1, 0.], [1, 1, 0.]])]
colors = np.array([[1., 0., 0.], [0.3, 0.7, 0.]])
stream_actor = actor.streamtube(lines, colors)
states = {'camera_button_count': 0,
'plus_button_count': 0,
'minus_button_count': 0,
'slider_moved_count': 0,
}
renderer.add(stream_actor)
# the show manager allows to break the rendering process
# in steps so that the widgets can be added properly
show_manager = window.ShowManager(renderer, size=(800, 800))
if recording:
show_manager.initialize()
show_manager.render()
def button_callback(obj, event):
print('Camera pressed')
states['camera_button_count'] += 1
def button_plus_callback(obj, event):
print('+ pressed')
states['plus_button_count'] += 1
def button_minus_callback(obj, event):
print('- pressed')
states['minus_button_count'] += 1
fetch_viz_icons()
button_png = read_viz_icons(fname='camera.png')
button = widget.button(show_manager.iren,
show_manager.ren,
button_callback,
button_png, (.98, 1.), (80, 50))
button_png_plus = read_viz_icons(fname='plus.png')
button_plus = widget.button(show_manager.iren,
show_manager.ren,
button_plus_callback,
button_png_plus, (.98, .9), (120, 50))
button_png_minus = read_viz_icons(fname='minus.png')
button_minus = widget.button(show_manager.iren,
show_manager.ren,
button_minus_callback,
button_png_minus, (.98, .9), (50, 50))
def print_status(obj, event):
rep = obj.GetRepresentation()
stream_actor.SetPosition((rep.GetValue(), 0, 0))
states['slider_moved_count'] += 1
slider = widget.slider(show_manager.iren, show_manager.ren,
callback=print_status,
min_value=-1,
max_value=1,
value=0.,
label="X",
right_normalized_pos=(.98, 0.6),
size=(120, 0), label_format="%0.2lf")
# This callback is used to update the buttons/sliders' position
# so they can stay on the right side of the window when the window
# is being resized.
global size
size = renderer.GetSize()
def win_callback(obj, event):
global size
if size != obj.GetSize():
button.place(renderer)
button_plus.place(renderer)
button_minus.place(renderer)
slider.place(renderer)
size = obj.GetSize()
if recording:
# show_manager.add_window_callback(win_callback)
# you can also register any callback in a vtk way like this
# show_manager.window.AddObserver(vtk.vtkCommand.ModifiedEvent,
# win_callback)
show_manager.record_events_to_file(recording_filename)
print(states)
else:
show_manager.play_events_from_file(recording_filename)
npt.assert_equal(states["camera_button_count"], 7)
npt.assert_equal(states["plus_button_count"], 3)
npt.assert_equal(states["minus_button_count"], 4)
npt.assert_equal(states["slider_moved_count"], 116)
if not recording:
button.Off()
slider.Off()
# Uncomment below to test the slider and button with analyze
# button.place(renderer)
# slider.place(renderer)
arr = window.snapshot(renderer, size=(800, 800))
report = window.analyze_snapshot(arr)
# import pylab as plt
# plt.imshow(report.labels, origin='lower')
# plt.show()
npt.assert_equal(report.objects, 4)
report = window.analyze_renderer(renderer)
npt.assert_equal(report.actors, 1)
@npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it)
@xvfb_it
def test_text_widget():
interactive = False
renderer = window.Renderer()
axes = fvtk.axes()
window.add(renderer, axes)
renderer.ResetCamera()
show_manager = window.ShowManager(renderer, size=(900, 900))
if interactive:
show_manager.initialize()
show_manager.render()
fetch_viz_icons()
button_png = read_viz_icons(fname='home3.png')
def button_callback(obj, event):
print('Button Pressed')
button = widget.button(show_manager.iren,
show_manager.ren,
button_callback,
button_png, (.8, 1.2), (100, 100))
global rulez
rulez = True
def text_callback(obj, event):
global rulez
print('Text selected')
if rulez:
obj.GetTextActor().SetInput("Diffusion Imaging Rulez!!")
rulez = False
else:
obj.GetTextActor().SetInput("Diffusion Imaging in Python")
rulez = True
show_manager.render()
text = widget.text(show_manager.iren,
show_manager.ren,
text_callback,
message="Diffusion Imaging in Python",
left_down_pos=(0., 0.),
right_top_pos=(0.4, 0.05),
opacity=1.,
border=False)
if not interactive:
button.Off()
text.Off()
pass
if interactive:
show_manager.render()
show_manager.start()
arr = window.snapshot(renderer, size=(900, 900))
report = window.analyze_snapshot(arr)
npt.assert_equal(report.objects, 3)
# If you want to see the segmented objects after the analysis is finished
# you can use imshow(report.labels, origin='lower')
if __name__ == '__main__':
npt.run_module_suite()
|
villalonreina/dipy
|
dipy/viz/tests/test_fvtk_widgets.py
|
Python
|
bsd-3-clause
| 6,791
|
[
"VTK"
] |
458877fb78099b5d5c46f5e2d84c522902610b5e3c59941434836b0052878e85
|
"""This module holds classes for image loading and manipulation."""
import copy
import io
import pathlib
from collections import Counter, Iterable
from datetime import datetime
from io import BytesIO, BufferedReader
import re
import os.path as osp
import os
from typing import Union, Sequence, List, Any, Tuple, Optional, BinaryIO
import pydicom
from pydicom.errors import InvalidDicomError
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image as pImage
from scipy import ndimage
import scipy.ndimage.filters as spf
import argue
from .utilities import is_close
from .geometry import Point
from .io import get_url, TemporaryZipDirectory, retrieve_filenames, is_dicom_image, retrieve_dicom_file
from .profile import stretch as stretcharray
from .typing import NumberLike
from ..settings import get_dicom_cmap, PATH_TRUNCATION_LENGTH
ARRAY = 'Array'
DICOM = 'DICOM'
IMAGE = 'Image'
FILE_TYPE = 'file'
STREAM_TYPE = 'stream'
MM_PER_INCH = 25.4
ImageLike = Union['DicomImage', 'ArrayImage', 'FileImage', 'LinacDicomImage']
def equate_images(image1: ImageLike, image2: ImageLike) -> Tuple[ImageLike, ImageLike]:
"""Crop and resize two images to make them:
* The same pixel dimensions
* The same DPI
The usefulness of the function comes when trying to compare images from different sources.
The best example is calculating gamma on a machine log fluence and EPID image. The physical
and pixel dimensions must be normalized, the SID normalized
Parameters
----------
image1 : {:class:`~pylinac.core.image.ArrayImage`, :class:`~pylinac.core.image.DicomImage`, :class:`~pylinac.core.image.FileImage`}
Must have DPI and SID.
image2 : {:class:`~pylinac.core.image.ArrayImage`, :class:`~pylinac.core.image.DicomImage`, :class:`~pylinac.core.image.FileImage`}
Must have DPI and SID.
Returns
-------
image1 : :class:`~pylinac.core.image.ArrayImage`
image2 : :class:`~pylinac.core.image.ArrayImage`
The returns are new instances of Images.
"""
image1 = copy.deepcopy(image1)
image2 = copy.deepcopy(image2)
# crop images to be the same physical size
# ...crop height
physical_height_diff = image1.physical_shape[0] - image2.physical_shape[0]
if physical_height_diff < 0: # image2 is bigger
img = image2
else:
img = image1
pixel_height_diff = abs(int(round(-physical_height_diff * img.dpmm / 2)))
img.remove_edges(pixel_height_diff, edges=('top', 'bottom'))
# ...crop width
physical_width_diff = image1.physical_shape[1] - image2.physical_shape[1]
if physical_width_diff > 0:
img = image1
else:
img = image2
pixel_width_diff = abs(int(round(physical_width_diff*img.dpmm/2)))
img.remove_edges(pixel_width_diff, edges=('left', 'right'))
# resize images to be of the same shape
zoom_factor = image1.shape[1] / image2.shape[1]
image2_array = ndimage.interpolation.zoom(image2.as_type(float), zoom_factor)
image2 = load(image2_array, dpi=image2.dpi * zoom_factor)
return image1, image2
def is_image(path: Union[str, io.BytesIO, ImageLike, np.ndarray]) -> bool:
"""Determine whether the path is a valid image file.
Returns
-------
bool
"""
return any((_is_array(path), _is_dicom(path), _is_image_file(path)))
def retrieve_image_files(path: str) -> List[str]:
"""Retrieve the file names of all the valid image files in the path.
Returns
-------
list
Contains strings pointing to valid image paths.
"""
return retrieve_filenames(directory=path, func=is_image)
def load(path: Union[str, ImageLike, np.ndarray, BinaryIO], **kwargs) -> ImageLike:
"""Load a DICOM image, JPG/TIF/BMP image, or numpy 2D array.
Parameters
----------
path : str, file-object
The path to the image file or data stream or array.
kwargs
See :class:`~pylinac.core.image.FileImage`, :class:`~pylinac.core.image.DicomImage`,
or :class:`~pylinac.core.image.ArrayImage` for keyword arguments.
Returns
-------
::class:`~pylinac.core.image.FileImage`, :class:`~pylinac.core.image.ArrayImage`, or :class:`~pylinac.core.image.DicomImage`
Return type depends on input image.
Examples
--------
Load an image from a file and then apply a filter::
>>> from pylinac.core.image import load
>>> my_image = r"C:\QA\image.tif"
>>> img = load(my_image) # returns a FileImage
>>> img.filter(5)
Loading from an array is just like loading from a file::
>>> arr = np.arange(36).reshape(6, 6)
>>> img = load(arr) # returns an ArrayImage
"""
if isinstance(path, BaseImage):
return path
if _is_array(path):
return ArrayImage(path, **kwargs)
elif _is_dicom(path):
return DicomImage(path, **kwargs)
elif _is_image_file(path):
return FileImage(path, **kwargs)
else:
raise TypeError(f"The argument `{path}` was not found to be a valid DICOM file, Image file, or array")
def load_url(url: str, progress_bar: bool = True, **kwargs) -> ImageLike:
"""Load an image from a URL.
Parameters
----------
url : str
A string pointing to a valid URL that points to a file.
.. note:: For some images (e.g. Github), the raw binary URL must be used, not simply the basic link.
progress_bar: bool
Whether to display a progress bar of download status.
"""
filename = get_url(url, progress_bar=progress_bar)
return load(filename, **kwargs)
@argue.options(method=('mean', 'max', 'sum'))
def load_multiples(image_file_list: Sequence, method: str = 'mean', stretch_each: bool = True, **kwargs) -> ImageLike:
"""Combine multiple image files into one superimposed image.
Parameters
----------
image_file_list : list
A list of the files to be superimposed.
method : {'mean', 'max', 'sum'}
A string specifying how the image values should be combined.
stretch_each : bool
Whether to normalize the images being combined by stretching their high/low values to the same values across images.
kwargs :
Further keyword arguments are passed to the load function and stretch function.
Examples
--------
Load multiple images::
>>> from pylinac.core.image import load_multiples
>>> paths = ['starshot1.tif', 'starshot2.tif']
>>> superimposed_img = load_multiples(paths)
"""
# load images
img_list = [load(path, **kwargs) for path in image_file_list]
first_img = img_list[0]
# check that all images are the same size and stretch if need be
for img in img_list:
if img.shape != first_img.shape:
raise ValueError("Images were not the same shape")
if stretch_each:
img.array = stretcharray(img.array, fill_dtype=kwargs.get('dtype'))
# stack and combine arrays
new_array = np.dstack(tuple(img.array for img in img_list))
if method == 'mean':
combined_arr = np.mean(new_array, axis=2)
elif method == 'max':
combined_arr = np.max(new_array, axis=2)
elif method == 'sum':
combined_arr = np.sum(new_array, axis=2)
# replace array of first object and return
first_img.array = combined_arr
return first_img
def _is_dicom(path: Union[str, io.BytesIO, ImageLike, np.ndarray]) -> bool:
"""Whether the file is a readable DICOM file via pydicom."""
return is_dicom_image(file=path)
def _is_image_file(path: str) -> bool:
"""Whether the file is a readable image file via Pillow."""
try:
pImage.open(path)
return True
except:
return False
def _is_array(obj: Any) -> bool:
"""Whether the object is a numpy array."""
return isinstance(obj, np.ndarray)
class BaseImage:
"""Base class for the Image classes.
Attributes
----------
path : str
The path to the image file.
array : numpy.ndarray
The actual image pixel array.
"""
def __init__(self, path: Union[str, BytesIO, ImageLike, np.ndarray, BufferedReader]):
"""
Parameters
----------
path : str
The path to the image.
"""
source: Union[FILE_TYPE, STREAM_TYPE]
if isinstance(path, (str, pathlib.Path)) and not osp.isfile(path):
raise FileExistsError(f"File `{path}` does not exist. Verify the file path name.")
elif isinstance(path, (str, pathlib.Path)) and osp.isfile(path):
self.path = path
self.base_path = osp.basename(path)
self.source = FILE_TYPE
else:
self.source = STREAM_TYPE
path.seek(0)
try:
self.path = str(pathlib.Path(path.name))
except AttributeError:
self.path = ''
@property
def truncated_path(self) -> str:
if self.source == FILE_TYPE:
if len(self.path) > PATH_TRUNCATION_LENGTH:
return self.path[:PATH_TRUNCATION_LENGTH // 2] + '...' + self.path[-PATH_TRUNCATION_LENGTH // 2:]
else:
return self.path
else:
return '' # was from stream, no path
@classmethod
def from_multiples(cls, filelist: List[str], method: str='mean', stretch: bool=True, **kwargs) -> ImageLike:
"""Load an instance from multiple image items. See :func:`~pylinac.core.image.load_multiples`."""
return load_multiples(filelist, method, stretch, **kwargs)
@property
def center(self) -> Point:
"""Return the center position of the image array as a Point."""
x_center = (self.shape[1] / 2) - 0.5
y_center = (self.shape[0] / 2) - 0.5
return Point(x_center, y_center)
@property
def physical_shape(self) -> Tuple[float, float]:
"""The physical size of the image in mm."""
return self.shape[0] / self.dpmm, self.shape[1] / self.dpmm
def date_created(self, format: str="%A, %B %d, %Y") -> str:
date = None
try:
date = datetime.strptime(self.metadata.InstanceCreationDate+str(round(float(self.metadata.InstanceCreationTime))), "%Y%m%d%H%M%S")
date = date.strftime(format)
except (AttributeError, ValueError):
try:
date = datetime.strptime(self.metadata.StudyDate, "%Y%m%d")
date = date.strftime(format)
except:
pass
if date is None:
try:
date = datetime.fromtimestamp(osp.getctime(self.path)).strftime(format)
except AttributeError:
date = 'Unknown'
return date
def plot(self, ax: plt.Axes=None, show: bool=True, clear_fig: bool=False, **kwargs) -> plt.Axes:
"""Plot the image.
Parameters
----------
ax : matplotlib.Axes instance
The axis to plot the image to. If None, creates a new figure.
show : bool
Whether to actually show the image. Set to false when plotting multiple items.
clear_fig : bool
Whether to clear the prior items on the figure before plotting.
"""
if ax is None:
fig, ax = plt.subplots()
if clear_fig:
plt.clf()
ax.imshow(self.array, cmap=get_dicom_cmap(), **kwargs)
if show:
plt.show()
return ax
@argue.options(kind=('median', 'gaussian'))
def filter(self, size: Union[float, int]=0.05, kind: str='median') -> None:
"""Filter the profile.
Parameters
----------
size : int, float
Size of the median filter to apply.
If a float, the size is the ratio of the length. Must be in the range 0-1.
E.g. if size=0.1 for a 1000-element array, the filter will be 100 elements.
If an int, the filter is the size passed.
kind : {'median', 'gaussian'}
The kind of filter to apply. If gaussian, *size* is the sigma value.
"""
if isinstance(size, float):
if 0 < size < 1:
size *= len(self.array)
size = max(size, 1)
else:
raise TypeError("Float was passed but was not between 0 and 1")
if kind == 'median':
self.array = ndimage.median_filter(self.array, size=size)
elif kind == 'gaussian':
self.array = ndimage.gaussian_filter(self.array, sigma=size)
def crop(self, pixels: int=15, edges: Tuple[str, ...]=('top', 'bottom', 'left', 'right')) -> None:
"""Removes pixels on all edges of the image in-place.
Parameters
----------
pixels : int
Number of pixels to cut off all sides of the image.
edges : tuple
Which edges to remove from. Can be any combination of the four edges.
"""
if pixels < 0:
raise ValueError("Pixels to remove must be a positive number")
if 'top' in edges:
self.array = self.array[pixels:, :]
if 'bottom' in edges:
self.array = self.array[:-pixels, :]
if 'left' in edges:
self.array = self.array[:, pixels:]
if 'right' in edges:
self.array = self.array[:, :-pixels]
def remove_edges(self, pixels: int=15, edges: Tuple[str, ...]=('top', 'bottom', 'left', 'right')) -> None:
"""Removes pixels on all edges of the image in-place.
Parameters
----------
pixels : int
Number of pixels to cut off all sides of the image.
edges : tuple
Which edges to remove from. Can be any combination of the four edges.
"""
DeprecationWarning("`remove_edges` is deprecated and will be removed in a future version. Use `crop` instead")
self.crop(pixels=pixels, edges=edges)
def flipud(self) -> None:
""" Flip the image array upside down in-place. Wrapper for np.flipud()"""
self.array = np.flipud(self.array)
def fliplr(self) -> None:
""" Flip the image array upside down in-place. Wrapper for np.fliplr()"""
self.array = np.fliplr(self.array)
def invert(self) -> None:
"""Invert (imcomplement) the image."""
orig_array = self.array
self.array = -orig_array + orig_array.max() + orig_array.min()
def roll(self, direction: str='x', amount: int=1) -> None:
"""Roll the image array around in-place. Wrapper for np.roll().
Parameters
----------
direction : {'x', 'y'}
The axis to roll over.
amount : int
The amount of elements to roll over.
"""
axis = 1 if direction == 'x' else 0
self.array = np.roll(self.array, amount, axis=axis)
def rot90(self, n: int=1) -> None:
"""Wrapper for numpy.rot90; rotate the array by 90 degrees CCW."""
self.array = np.rot90(self.array, n)
@argue.options(kind=('high', 'low'))
def threshold(self, threshold: int, kind: str='high') -> None:
"""Apply a high- or low-pass threshold filter.
Parameters
----------
threshold : int
The cutoff value.
kind : str
If ``high`` (default), will apply a high-pass threshold. All values above the cutoff are left as-is.
Remaining points are set to 0.
If ``low``, will apply a low-pass threshold.
"""
if kind == 'high':
self.array = np.where(self.array >= threshold, self, 0)
else:
self.array = np.where(self.array <= threshold, self, 0)
def as_binary(self, threshold: int) -> ImageLike:
"""Return a binary (black & white) image based on the given threshold.
Parameters
----------
threshold : int, float
The threshold value. If the value is above or equal to the threshold it is set to 1, otherwise to 0.
Returns
-------
ArrayImage
"""
array = np.where(self.array >= threshold, 1, 0)
return ArrayImage(array)
def dist2edge_min(self, point: Union[Point, Tuple]) -> float:
"""Calculates minimum distance from given point to image edges.
Parameters
----------
point : geometry.Point, tuple
Returns
-------
float
"""
if isinstance(point, tuple):
point = Point(point)
rows = self.shape[0]
cols = self.shape[1]
disttoedge = np.zeros(4)
disttoedge[0] = rows - point.y
disttoedge[1] = cols - point.x
disttoedge[2] = point.y
disttoedge[3] = point.x
return min(disttoedge)
def ground(self) -> float:
"""Ground the profile such that the lowest value is 0.
.. note::
This will also "ground" profiles that are negative or partially-negative.
For such profiles, be careful that this is the behavior you desire.
Returns
-------
float
The amount subtracted from the image.
"""
min_val = self.array.min()
self.array -= min_val
return min_val
def normalize(self, norm_val: Union[str, NumberLike]='max') -> None:
"""Normalize the image values to the given value.
Parameters
----------
norm_val : str, number
If a string, must be 'max', which normalizes the values to the maximum value.
If a number, normalizes all values to that number.
"""
if norm_val == 'max':
val = self.array.max()
else:
val = norm_val
self.array = self.array / val
def check_inversion(self, box_size: int=20, position: Sequence=(0.0, 0.0)) -> None:
"""Check the image for inversion by sampling the 4 image corners.
If the average value of the four corners is above the average pixel value, then it is very likely inverted.
Parameters
----------
box_size : int
The size in pixels of the corner box to detect inversion.
position : 2-element sequence
The location of the sampling boxes.
"""
row_pos = max(int(position[0]*self.array.shape[0]), 1)
col_pos = max(int(position[1]*self.array.shape[1]), 1)
lt_upper = self.array[row_pos: row_pos+box_size, col_pos: col_pos+box_size]
rt_upper = self.array[row_pos: row_pos+box_size, -col_pos-box_size: -col_pos]
lt_lower = self.array[-row_pos-box_size:-row_pos, col_pos: col_pos+box_size]
rt_lower = self.array[-row_pos-box_size:-row_pos, -col_pos-box_size:-col_pos]
avg = np.mean((lt_upper, lt_lower, rt_upper, rt_lower))
if avg > np.mean(self.array.flatten()):
self.invert()
def check_inversion_by_histogram(self, percentiles=(5, 50, 95)) -> bool:
"""Check the inversion of the image using histogram analysis. The assumption is that the image
is mostly background-like values and that there is a relatively small amount of dose getting to the image
(e.g. a picket fence image). This function looks at the distance from one percentile to another to determine
if the image should be inverted.
Parameters
----------
percentiles : 3-element tuple
The 3 percentiles to compare. Default is (5, 50, 95). Recommend using (x, 50, y). To invert the other way
(where pixel value is *decreasing* with dose, reverse the percentiles, e.g. (95, 50, 5).
"""
was_inverted = False
p5 = np.percentile(self.array, percentiles[0])
p50 = np.percentile(self.array, percentiles[1])
p95 = np.percentile(self.array, percentiles[2])
dist_to_5 = abs(p50 - p5)
dist_to_95 = abs(p50 - p95)
if dist_to_5 > dist_to_95:
was_inverted = True
self.invert()
return was_inverted
@argue.bounds(threshold=(0.0, 1.0))
def gamma(self, comparison_image: ImageLike, doseTA: NumberLike=1, distTA: NumberLike=1,
threshold: NumberLike=0.1, ground: bool=True, normalize: bool=True) -> np.ndarray:
"""Calculate the gamma between the current image (reference) and a comparison image.
.. versionadded:: 1.2
The gamma calculation is based on `Bakai et al
<http://iopscience.iop.org/0031-9155/48/21/006/>`_ eq.6,
which is a quicker alternative to the standard Low gamma equation.
Parameters
----------
comparison_image : {:class:`~pylinac.core.image.ArrayImage`, :class:`~pylinac.core.image.DicomImage`, or :class:`~pylinac.core.image.FileImage`}
The comparison image. The image must have the same DPI/DPMM to be comparable.
The size of the images must also be the same.
doseTA : int, float
Dose-to-agreement in percent; e.g. 2 is 2%.
distTA : int, float
Distance-to-agreement in mm.
threshold : float
The dose threshold percentage of the maximum dose, below which is not analyzed.
Must be between 0 and 1.
ground : bool
Whether to "ground" the image values. If true, this sets both datasets to have the minimum value at 0.
This can fix offset errors in the data.
normalize : bool
Whether to normalize the images. This sets the max value of each image to the same value.
Returns
-------
gamma_map : numpy.ndarray
The calculated gamma map.
See Also
--------
:func:`~pylinac.core.image.equate_images`
"""
# error checking
if not is_close(self.dpi, comparison_image.dpi, delta=0.1):
raise AttributeError(f"The image DPIs to not match: {self.dpi:.2f} vs. {comparison_image.dpi:.2f}")
same_x = is_close(self.shape[1], comparison_image.shape[1], delta=1.1)
same_y = is_close(self.shape[0], comparison_image.shape[0], delta=1.1)
if not (same_x and same_y):
raise AttributeError(f"The images are not the same size: {self.shape} vs. {comparison_image.shape}")
# set up reference and comparison images
ref_img = ArrayImage(copy.copy(self.array))
ref_img.check_inversion_by_histogram()
if ground:
ref_img.ground()
if normalize:
ref_img.normalize()
comp_img = ArrayImage(copy.copy(comparison_image.array))
comp_img.check_inversion_by_histogram()
if ground:
comp_img.ground()
if normalize:
comp_img.normalize()
# invalidate dose values below threshold so gamma doesn't calculate over it
ref_img.array[ref_img < threshold * np.max(ref_img)] = np.NaN
# convert distance value from mm to pixels
distTA_pixels = self.dpmm * distTA
# construct image gradient using sobel filter
img_x = spf.sobel(ref_img.as_type(np.float32), 1)
img_y = spf.sobel(ref_img.as_type(np.float32), 0)
grad_img = np.hypot(img_x, img_y)
# equation: (measurement - reference) / sqrt ( doseTA^2 + distTA^2 * image_gradient^2 )
subtracted_img = np.abs(comp_img - ref_img)
denominator = np.sqrt(((doseTA / 100.0) ** 2) + ((distTA_pixels ** 2) * (grad_img ** 2)))
gamma_map = subtracted_img / denominator
return gamma_map
def as_type(self, dtype) -> np.ndarray:
return self.array.astype(dtype)
@property
def shape(self) -> Tuple[int, int]:
return self.array.shape
@property
def size(self) -> int:
return self.array.size
@property
def ndim(self) -> int:
return self.array.ndim
@property
def dtype(self) -> np.dtype:
return self.array.dtype
def sum(self) -> float:
return self.array.sum()
def ravel(self) -> np.ndarray:
return self.array.ravel()
@property
def flat(self) -> np.ndarray:
return self.array.flat
def __len__(self):
return len(self.array)
def __getitem__(self, item):
return self.array[item]
class DicomImage(BaseImage):
"""An image from a DICOM RTImage file.
Attributes
----------
metadata : pydicom Dataset
The dataset of the file as returned by pydicom without pixel data.
"""
metadata: pydicom.FileDataset
_sid = NumberLike
_dpi = NumberLike
def __init__(self, path: Union[str, BytesIO, BufferedReader], *, dtype=None, dpi: NumberLike=None, sid: NumberLike=None):
"""
Parameters
----------
path : str, file-object
The path to the file or the data stream.
dtype : dtype, None, optional
The data type to cast the image data as. If None, will use whatever raw image format is.
dpi : int, float
The dots-per-inch of the image, defined at isocenter.
.. note:: If a DPI tag is found in the image, that value will override the parameter, otherwise this one
will be used.
sid : int, float
The Source-to-Image distance in mm.
"""
super().__init__(path)
self._sid = sid
self._dpi = dpi
# read the file once to get just the DICOM metadata
self.metadata = retrieve_dicom_file(path)
self._original_dtype = self.metadata.pixel_array.dtype
# read a second time to get pixel data
try:
path.seek(0)
except AttributeError:
pass
ds = retrieve_dicom_file(path)
if dtype is not None:
self.array = ds.pixel_array.astype(dtype)
else:
self.array = ds.pixel_array.copy()
# convert values to HU or CU: real_values = slope * raw + intercept
has_all_rescale_tags = hasattr(self.metadata, 'RescaleSlope') and hasattr(self.metadata, 'RescaleIntercept') and hasattr(self.metadata, 'PixelIntensityRelationshipSign')
has_some_rescale_tags = hasattr(self.metadata, 'RescaleSlope') and hasattr(self.metadata, 'RescaleIntercept')
is_ct_storage = self.metadata.SOPClassUID.name == 'CT Image Storage'
if has_all_rescale_tags:
self.array = ((self.metadata.RescaleSlope*self.array) + self.metadata.RescaleIntercept)*self.metadata.PixelIntensityRelationshipSign
elif is_ct_storage or has_some_rescale_tags:
self.array = (self.metadata.RescaleSlope * self.array) + self.metadata.RescaleIntercept
else:
# invert it
orig_array = self.array
self.array = -orig_array + orig_array.max() + orig_array.min()
def save(self, filename: str) -> str:
"""Save the image instance back out to a .dcm file.
Returns
-------
A string pointing to the new filename.
"""
if self.metadata.SOPClassUID.name == 'CT Image Storage':
self.array = (self.array - int(self.metadata.RescaleIntercept)) / int(self.metadata.RescaleSlope)
self.metadata.PixelData = self.array.astype(self._original_dtype).tobytes()
self.metadata.save_as(filename)
return filename
@property
def sid(self) -> NumberLike:
"""The Source-to-Image in mm."""
try:
return float(self.metadata.RTImageSID)
except:
return self._sid
@property
def dpi(self) -> NumberLike:
"""The dots-per-inch of the image, defined at isocenter."""
try:
return self.dpmm * MM_PER_INCH
except:
return self._dpi
@property
def dpmm(self) -> NumberLike:
"""The Dots-per-mm of the image, defined at isocenter. E.g. if an EPID image is taken at 150cm SID,
the dpmm will scale back to 100cm."""
dpmm = None
for tag in ('PixelSpacing', 'ImagePlanePixelSpacing'):
mmpd = self.metadata.get(tag)
if mmpd is not None:
dpmm = 1 / mmpd[0]
break
if dpmm is not None and self.sid is not None:
dpmm *= self.sid / 1000
elif dpmm is None and self._dpi is not None:
dpmm = self._dpi / MM_PER_INCH
return dpmm
@property
def cax(self) -> Point:
"""The position of the beam central axis. If no DICOM translation tags are found then the center is returned.
Uses this tag: https://dicom.innolitics.com/ciods/rt-beams-delivery-instruction/rt-beams-delivery-instruction/00741020/00741030/3002000d"""
try:
x = self.center.x - self.metadata.XRayImageReceptorTranslation[0]
y = self.center.y - self.metadata.XRayImageReceptorTranslation[1]
except AttributeError:
return self.center
else:
return Point(x, y)
class LinacDicomImage(DicomImage):
"""DICOM image taken on a linac. Also allows passing of gantry/coll/couch values via the filename."""
gantry_keyword = 'Gantry'
collimator_keyword = 'Coll'
couch_keyword = 'Couch'
_use_filenames: bool
def __init__(self, path: str, use_filenames: bool=False):
super().__init__(path)
self._use_filenames = use_filenames
@property
def gantry_angle(self) -> float:
"""Gantry angle of the irradiation."""
return self._get_axis_value(self.gantry_keyword.lower(), 'GantryAngle')
@property
def collimator_angle(self) -> float:
"""Collimator angle of the irradiation."""
return self._get_axis_value(self.collimator_keyword.lower(), 'BeamLimitingDeviceAngle')
@property
def couch_angle(self) -> float:
"""Couch angle of the irradiation."""
return self._get_axis_value(self.couch_keyword.lower(), 'PatientSupportAngle')
def _get_axis_value(self, axis_str: str, axis_dcm_attr: str) -> float:
"""Retrieve the value of the axis. This will first look in the file name for the value.
If not in the filename then it will look in the DICOM metadata. If the value can be found in neither
then a value of 0 is assumed.
Parameters
----------
axis_str : str
The string to look for in the filename.
axis_dcm_attr : str
The DICOM attribute that should contain the axis value.
Returns
-------
float
"""
axis_found = False
if self._use_filenames:
filename = osp.basename(self.path)
# see if the keyword is in the filename
keyword_in_filename = axis_str.lower() in filename.lower()
# if it's not there, then assume it's zero
if not keyword_in_filename:
axis = 0
axis_found = True
# if it is, then make sure it follows the naming convention of <axis###>
else:
match = re.search(r'(?<={})\d+'.format(axis_str.lower()), filename.lower())
if match is None:
raise ValueError(
f"The filename contains '{axis_str}' but could not read a number following it. Use the format '...{axis_str}<#>...'")
else:
axis = float(match.group())
axis_found = True
# try to interpret from DICOM data
if not axis_found:
try:
axis = float(getattr(self.metadata, axis_dcm_attr))
except AttributeError:
axis = 0
# if the value is close to 0 or 360 then peg at 0
if is_close(axis, [0, 360], delta=1):
return 0
else:
return axis
class FileImage(BaseImage):
"""An image from a "regular" file (.tif, .jpg, .bmp).
Attributes
----------
info : dict
The info dictionary as generated by Pillow.
sid : float
The SID value as passed in upon construction.
"""
def __init__(self, path: str, *, dpi: NumberLike=None, sid: NumberLike=None, dtype=None):
"""
Parameters
----------
path : str, file-object
The path to the file or a data stream.
dpi : int, float
The dots-per-inch of the image, defined at isocenter.
.. note:: If a DPI tag is found in the image, that value will override the parameter, otherwise this one
will be used.
sid : int, float
The Source-to-Image distance in mm.
dtype : numpy.dtype
The data type to cast the array as.
"""
super().__init__(path)
pil_image = pImage.open(path)
# convert to gray if need be
if pil_image.mode not in ('F', 'L', '1'):
pil_image = pil_image.convert('F')
self.info = pil_image.info
if dtype is not None:
self.array = np.array(pil_image, dtype=dtype)
else:
self.array = np.array(pil_image)
self._dpi = dpi
self.sid = sid
@property
def dpi(self) -> float:
"""The dots-per-inch of the image, defined at isocenter."""
dpi = None
for key in ('dpi', 'resolution'):
dpi = self.info.get(key)
if dpi is not None:
dpi = float(dpi[0])
break
if dpi is None:
dpi = self._dpi
if self.sid is not None and dpi is not None:
dpi *= self.sid / 1000
return dpi
@property
def dpmm(self) -> Optional[float]:
"""The Dots-per-mm of the image, defined at isocenter. E.g. if an EPID image is taken at 150cm SID,
the dpmm will scale back to 100cm."""
try:
return self.dpi / MM_PER_INCH
except TypeError:
return
class ArrayImage(BaseImage):
"""An image constructed solely from a numpy array."""
def __init__(self, array: np.array, *, dpi: NumberLike=None, sid: NumberLike=None, dtype=None):
"""
Parameters
----------
array : numpy.ndarray
The image array.
dpi : int, float
The dots-per-inch of the image, defined at isocenter.
.. note:: If a DPI tag is found in the image, that value will override the parameter, otherwise this one
will be used.
sid : int, float
The Source-to-Image distance in mm.
dtype : dtype, None, optional
The data type to cast the image data as. If None, will use whatever raw image format is.
"""
if dtype is not None:
self.array = np.array(array, dtype=dtype)
else:
self.array = array
self._dpi = dpi
self.sid = sid
@property
def dpmm(self) -> Optional[float]:
"""The Dots-per-mm of the image, defined at isocenter. E.g. if an EPID image is taken at 150cm SID,
the dpmm will scale back to 100cm."""
try:
return self.dpi / MM_PER_INCH
except:
return
@property
def dpi(self) -> Optional[float]:
"""The dots-per-inch of the image, defined at isocenter."""
dpi = None
if self._dpi is not None:
dpi = self._dpi
if self.sid is not None:
dpi *= self.sid / 1000
return dpi
def __sub__(self, other):
return ArrayImage(self.array - other.array)
class DicomImageStack:
"""A class that loads and holds a stack of DICOM images (e.g. a CT dataset). The class can take
a folder or zip file and will read CT images. The images must all be the same size. Supports
indexing to individual images.
Attributes
----------
images : list
Holds instances of :class:`~pylinac.core.image.DicomImage`. Can be accessed via index;
i.e. self[0] == self.images[0].
Examples
--------
Load a folder of Dicom images
>>> from pylinac import image
>>> img_folder = r"folder/qa/cbct/june"
>>> dcm_stack = image.DicomImageStack(img_folder) # loads and sorts the images
>>> dcm_stack.plot(3) # plot the 3rd image
Load a zip archive
>>> img_folder_zip = r"archive/qa/cbct/june.zip" # save space and zip your CBCTs
>>> dcm_stack = image.DicomImageStack.from_zip(img_folder_zip)
Load as a certain data type
>>> dcm_stack_uint32 = image.DicomImageStack(img_folder, dtype=np.uint32)
"""
images: List
def __init__(self, folder: str, dtype=None, min_number: int=39, check_uid: bool=True):
"""Load a folder with DICOM CT images.
Parameters
----------
folder : str
Path to the folder.
dtype : dtype, None, optional
The data type to cast the image data as. If None, will use whatever raw image format is.
"""
self.images = []
paths = []
# load in images in their received order
if isinstance(folder, (list, tuple)):
paths = folder
elif osp.isdir(folder):
for pdir, sdir, files in os.walk(folder):
for file in files:
paths.append(osp.join(pdir, file))
for path in paths:
if self.is_CT_slice(path):
img = DicomImage(path, dtype=dtype)
self.images.append(img)
# check that at least 1 image was loaded
if len(self.images) < 1:
raise FileNotFoundError(f"No files were found in the specified location: {folder}")
# error checking
if check_uid:
self.images = self._check_number_and_get_common_uid_imgs(min_number)
# sort according to physical order
self.images.sort(key=lambda x: x.metadata.ImagePositionPatient[-1])
@classmethod
def from_zip(cls, zip_path: str, dtype=None):
"""Load a DICOM ZIP archive.
Parameters
----------
zip_path : str
Path to the ZIP archive.
dtype : dtype, None, optional
The data type to cast the image data as. If None, will use whatever raw image format is.
"""
with TemporaryZipDirectory(zip_path) as tmpzip:
obj = cls(tmpzip, dtype)
return obj
@staticmethod
def is_CT_slice(file: str) -> bool:
"""Test if the file is a CT Image storage DICOM file."""
try:
ds = pydicom.dcmread(file, force=True, stop_before_pixels=True)
return ds.SOPClassUID.name == 'CT Image Storage'
except (InvalidDicomError, AttributeError, MemoryError):
return False
def _check_number_and_get_common_uid_imgs(self, min_number: int) -> List:
"""Check that all the images are from the same study."""
most_common_uid = Counter(i.metadata.SeriesInstanceUID for i in self.images).most_common(1)[0]
if most_common_uid[1] < min_number:
raise ValueError("The minimum number images from the same study were not found")
return [i for i in self.images if i.metadata.SeriesInstanceUID == most_common_uid[0]]
def plot(self, slice: int=0) -> None:
"""Plot a slice of the DICOM dataset.
Parameters
----------
slice : int
The slice to plot.
"""
self.images[slice].plot()
@property
def metadata(self) -> pydicom.FileDataset:
"""The metadata of the first image; shortcut attribute. Only attributes that are common throughout the stack should be used,
otherwise the individual image metadata should be used."""
return self.images[0].metadata
def __getitem__(self, item) -> DicomImage:
return self.images[item]
def __setitem__(self, key, value: DicomImage):
self.images[key] = value
def __len__(self):
return len(self.images)
|
jrkerns/pylinac
|
pylinac/core/image.py
|
Python
|
mit
| 39,827
|
[
"Gaussian"
] |
17f935ad3049400cc1f73b19edb26a88fa333351d4e917a8c8bf59e15a71fb0e
|
"""
Loadable.Loadable subclass
"""
# This file is part of Munin.
# Munin is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# Munin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Munin; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# This work is Copyright (C)2006 by Andreas Jacobsen
# Individual portions may be copyright by individual contributors, and
# are included in this collective work with permission of the copyright
# owners.
import re
from munin import loadable
class longtel(loadable.loadable):
"""
foo
"""
def __init__(self, cursor):
loadable.loadable.__init__(self, cursor, 100)
self.commandre = re.compile(r"^" + self.__class__.__name__ + "(.*)")
self.paramre = re.compile(r"^\s*(\d+)[. :-](\d+)")
self.usage = self.__class__.__name__ + " x:y"
self.helptext = ["Shows the long version of intel on a galaxy."]
def execute(self, user, access, irc_msg):
if access < self.level:
irc_msg.reply("You do not have enough access to use this command")
return 0
m = self.paramre.search(irc_msg.command_parameters)
if not m:
irc_msg.reply("Usage: %s" % (self.usage,))
return 0
x = int(m.group(1))
y = int(m.group(2))
self.exec_gal(irc_msg, x, y)
# do stuff here
return 1
def exec_gal(self, irc_msg, x, y):
query = "SELECT t2.id AS id, t1.id AS pid, t1.x AS x, t1.y AS y, t1.z AS z, t2.nick AS nick, t2.fakenick AS fakenick, t2.defwhore AS defwhore, t2.gov AS gov, t2.bg AS bg, t2.covop AS covop, t2.alliance_id AS alliance_id, t2.relay AS relay, t2.reportchan AS reportchan, t2.scanner AS scanner, t2.distwhore AS distwhore, t2.comment AS comment, t3.name AS alliance FROM planet_dump as t1, intel as t2 LEFT JOIN alliance_canon AS t3 ON t2.alliance_id=t3.id WHERE tick=(SELECT MAX(tick) FROM updates) AND t1.id=t2.pid AND x=%s AND y=%s ORDER BY y,z,x"
self.cursor.execute(query, (x, y))
replied_to_request = False
for d in self.cursor.fetchall():
x = d["x"]
y = d["y"]
z = d["z"]
i = loadable.intel(
pid=d["pid"],
nick=d["nick"],
fakenick=d["fakenick"],
defwhore=d["defwhore"],
gov=d["gov"],
bg=d["bg"],
covop=d["covop"],
alliance=d["alliance"],
relay=d["relay"],
reportchan=d["reportchan"],
scanner=d["scanner"],
distwhore=d["distwhore"],
comment=d["comment"],
)
if not i.is_empty():
replied_to_request = True
reply = "Information stored for %s:%s:%s - " % (x, y, z)
reply += i.__str__()
irc_msg.reply(reply)
if not replied_to_request:
irc_msg.reply("No information stored for galaxy %s:%s" % (x, y))
return 1
|
munin/munin
|
deprecated/longtel.py
|
Python
|
gpl-2.0
| 3,525
|
[
"Galaxy"
] |
a034b0128553d529e2e9441dfd4ed651085f55e4c26c2bcbbb27479e47cecdd6
|
import numpy as np
import pandas as pd
import mdtraj as md
from mixtape.utils import iterobjects, assign
import mixtape.ghmm, mixtape.featurizer
import sklearn.hmm
import os
name = "tica"
json_filename = "./%s.jsonlines" % name
feature_filename = "./%s.pkl" % name
featurizer = mixtape.featurizer.load(feature_filename)
models = list(iterobjects(json_filename))
df = pd.DataFrame(models)
x = df.ix[0]
T = np.array(x["transmat"])
p = np.array(x["populations"])
n_states = len(p)
model = mixtape.ghmm.GaussianFusionHMM(n_states, featurizer.n_features)
model.means_ = x["means"]
model.vars_ = x["vars"]
model.transmat_ = x["transmat"]
model.populations_ = x["populations"]
means = model.means_
covars = model.vars_
#n_traj = 348
#n_traj = 131
n_traj = 1
all_assignments = []
all_probs = []
for i in range(n_traj):
print(i)
traj = md.load("./Trajectories/trj%d.h5" % i)
ass, probs = assign(featurizer, traj, model)
ass_assignments.extend(ass)
all_probs.extend(probs)
all_assignments = np.array(all_assignments)
all_probs = np.array(all_probs)
traj = md.load("./Trajectories/trj%d.h5" % 50)
traj.superpose(trj0, atom_indices=atom_indices)
diff2 = (traj.xyz[:, atom_indices] - trj0.xyz[0, atom_indices]) ** 2
data = np.sqrt(np.sum(diff2, axis=2))
ass = hmm.predict(data)
rmsd = md.rmsd(traj, trj0)
|
hainm/MSMs
|
attic/src/code/hmsm/plot_assign.py
|
Python
|
gpl-2.0
| 1,331
|
[
"MDTraj"
] |
d2024dfa56dc1f9e27bb8beda79e04274d7e33ef0371ce98298a5b6acd1ae14f
|
# Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""Integration tests for regridding."""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
import iris
from iris.analysis._regrid import RectilinearRegridder as Regridder
from iris.coord_systems import GeogCS
from iris.coords import DimCoord
from iris.cube import Cube
from iris.tests.stock import global_pp, simple_3d
from iris.analysis import UnstructuredNearest
@tests.skip_data
class TestOSGBToLatLon(tests.IrisTest):
def setUp(self):
path = tests.get_data_path(
(
"NIMROD",
"uk2km",
"WO0000000003452",
"201007020900_u1096_ng_ey00_visibility0180_screen_2km",
)
)
self.src = iris.load_cube(path)[0]
# Cast up to float64, to work around numpy<=1.8 bug with means of
# arrays of 32bit floats.
self.src.data = self.src.data.astype(np.float64)
self.grid = Cube(np.empty((73, 96)))
cs = GeogCS(6370000)
lat = DimCoord(
np.linspace(46, 65, 73),
"latitude",
units="degrees",
coord_system=cs,
)
lon = DimCoord(
np.linspace(-14, 8, 96),
"longitude",
units="degrees",
coord_system=cs,
)
self.grid.add_dim_coord(lat, 0)
self.grid.add_dim_coord(lon, 1)
def _regrid(self, method):
regridder = Regridder(self.src, self.grid, method, "mask")
result = regridder(self.src)
return result
def test_linear(self):
res = self._regrid("linear")
self.assertArrayShapeStats(res, (73, 96), -16100.351951, 5603.850769)
def test_nearest(self):
res = self._regrid("nearest")
self.assertArrayShapeStats(res, (73, 96), -16095.965585, 5612.657155)
@tests.skip_data
class TestGlobalSubsample(tests.IrisTest):
def setUp(self):
self.src = global_pp()
_ = self.src.data
# Cast up to float64, to work around numpy<=1.8 bug with means of
# arrays of 32bit floats.
self.src.data = self.src.data.astype(np.float64)
# Subsample and shift the target grid so that we can see a visual
# difference between regridding scheme methods.
grid = self.src[1::2, 1::3]
grid.coord("latitude").points = grid.coord("latitude").points + 1
grid.coord("longitude").points = grid.coord("longitude").points + 1
self.grid = grid
def _regrid(self, method):
regridder = Regridder(self.src, self.grid, method, "mask")
result = regridder(self.src)
return result
def test_linear(self):
res = self._regrid("linear")
self.assertArrayShapeStats(res, (36, 32), 280.35907, 15.997223)
def test_nearest(self):
res = self._regrid("nearest")
self.assertArrayShapeStats(res, (36, 32), 280.33726, 16.064001)
@tests.skip_data
class TestUnstructured(tests.IrisTest):
def setUp(self):
path = tests.get_data_path(
("NetCDF", "unstructured_grid", "theta_nodal_xios.nc")
)
self.src = iris.load_cube(path, "Potential Temperature")
self.grid = simple_3d()[0, :, :]
def test_nearest(self):
res = self.src.regrid(self.grid, UnstructuredNearest())
self.assertArrayShapeStats(res, (1, 6, 3, 4), 315.890808, 11.000724)
class TestZonalMean_global(tests.IrisTest):
def setUp(self):
np.random.seed(0)
self.src = iris.cube.Cube(np.random.random_integers(0, 10, (140, 1)))
s_crs = iris.coord_systems.GeogCS(6371229.0)
sy_coord = iris.coords.DimCoord(
np.linspace(-90, 90, 140),
standard_name="latitude",
units="degrees",
coord_system=s_crs,
)
sx_coord = iris.coords.DimCoord(
-180,
bounds=[-180, 180],
standard_name="longitude",
units="degrees",
circular=True,
coord_system=s_crs,
)
self.src.add_dim_coord(sy_coord, 0)
self.src.add_dim_coord(sx_coord, 1)
def test_linear_same_crs_global(self):
# Regrid the zonal mean onto an identical coordinate system target, but
# on a different set of longitudes - which should result in no change.
points = [-150, -90, -30, 30, 90, 150]
bounds = [
[-180, -120],
[-120, -60],
[-60, 0],
[0, 60],
[60, 120],
[120, 180],
]
sx_coord = self.src.coord(axis="x")
sy_coord = self.src.coord(axis="y")
x_coord = sx_coord.copy(points, bounds=bounds)
grid = iris.cube.Cube(
np.zeros([sy_coord.points.size, x_coord.points.size])
)
grid.add_dim_coord(sy_coord, 0)
grid.add_dim_coord(x_coord, 1)
res = self.src.regrid(grid, iris.analysis.Linear())
# Ensure data remains unchanged.
# (the same along each column)
self.assertTrue(
np.array(
[
(res.data[:, 0] - res.data[:, i]).max()
for i in range(1, res.shape[1])
]
).max()
< 1e-10
)
self.assertArrayAlmostEqual(res.data[:, 0], self.src.data.reshape(-1))
class TestZonalMean_regional(TestZonalMean_global, tests.IrisTest):
def setUp(self):
super().setUp()
# Define a target grid and a target result (what we expect the
# regridder to return).
sx_coord = self.src.coord(axis="x")
sy_coord = self.src.coord(axis="y")
grid_crs = iris.coord_systems.RotatedGeogCS(
37.5, 177.5, ellipsoid=iris.coord_systems.GeogCS(6371229.0)
)
grid_x = sx_coord.copy(np.linspace(350, 370, 100))
grid_x.circular = False
grid_x.coord_system = grid_crs
grid_y = sy_coord.copy(np.linspace(-10, 10, 100))
grid_y.coord_system = grid_crs
grid = iris.cube.Cube(
np.zeros([grid_y.points.size, grid_x.points.size])
)
grid.add_dim_coord(grid_y, 0)
grid.add_dim_coord(grid_x, 1)
# The target result is derived by regridding a multi-column version of
# the source to the target (i.e. turning a zonal mean regrid into a
# conventional regrid).
self.tar = self.zonal_mean_as_multi_column(self.src).regrid(
grid, iris.analysis.Linear()
)
self.grid = grid
def zonal_mean_as_multi_column(self, src_cube):
# Munge the source (duplicate source latitudes) so that we can
# utilise linear regridding as a conventional problem (that is, to
# duplicate columns so that it is no longer a zonal mean problem).
src_cube2 = src_cube.copy()
src_cube2.coord(axis="x").points = -90
src_cube2.coord(axis="x").bounds = [-180, 0]
src_cube.coord(axis="x").points = 90
src_cube.coord(axis="x").bounds = [0, 180]
src_cubes = iris.cube.CubeList([src_cube, src_cube2])
return src_cubes.concatenate_cube()
def test_linear_rotated_regional(self):
# Ensure that zonal mean source data is linearly interpolated onto a
# high resolution target.
regridder = iris.analysis.Linear()
res = self.src.regrid(self.grid, regridder)
self.assertArrayAlmostEqual(res.data, self.tar.data)
def test_linear_rotated_regional_no_extrapolation(self):
# Capture the case where our source remains circular but we don't use
# extrapolation.
regridder = iris.analysis.Linear(extrapolation_mode="nan")
res = self.src.regrid(self.grid, regridder)
self.assertArrayAlmostEqual(res.data, self.tar.data)
def test_linear_rotated_regional_not_circular(self):
# Capture the case where our source is not circular but we utilise
# extrapolation.
regridder = iris.analysis.Linear()
self.src.coord(axis="x").circular = False
res = self.src.regrid(self.grid, regridder)
self.assertArrayAlmostEqual(res.data, self.tar.data)
def test_linear_rotated_regional_no_extrapolation_not_circular(self):
# Confirm how zonal mean actually works in so far as, that
# extrapolation and circular source handling is the means by which
# these usecases are supported.
# In the case where the source is neither using extrapolation and is
# not circular, then 'nan' values will result (as we would expect).
regridder = iris.analysis.Linear(extrapolation_mode="nan")
self.src.coord(axis="x").circular = False
res = self.src.regrid(self.grid, regridder)
self.assertTrue(np.isnan(res.data).all())
if __name__ == "__main__":
tests.main()
|
ocefpaf/iris
|
lib/iris/tests/integration/test_regridding.py
|
Python
|
lgpl-3.0
| 9,065
|
[
"NetCDF"
] |
97ba1552b4ed14f6532d17980b3ab9ecd6d5285179af5749975af3165e766643
|
import logging
from galaxy.web.form_builder import SelectField
log = logging.getLogger( __name__ )
REPOSITORY_DEPENDENCY_DEFINITION_FILENAME = 'repository_dependencies.xml'
REPOSITORY_SUITE_DEFINITION = 'repository_suite_definition'
TOOL_DEPENDENCY_DEFINITION = 'tool_dependency_definition'
TOOL_DEPENDENCY_DEFINITION_FILENAME = 'tool_dependencies.xml'
UNRESTRICTED = 'unrestricted'
types = [ UNRESTRICTED, TOOL_DEPENDENCY_DEFINITION, REPOSITORY_SUITE_DEFINITION ]
def build_repository_type_select_field( trans, repository=None, name='repository_type' ):
"""Called from the Tool Shed to generate the current list of supported repository types."""
if repository:
selected_type = str( repository.type )
else:
selected_type = None
repository_type_select_field = SelectField( name=name )
for type_label, type_class in trans.app.repository_types_registry.repository_types_by_label.items():
option_label = str( type_class.label )
option_value = str( type_class.type )
if selected_type and selected_type == option_value:
selected = True
else:
selected = False
if repository:
if repository.type == option_value:
repository_type_select_field.add_option( option_label, option_value, selected=selected )
elif type_class.is_valid_for_type( trans.app, repository ):
repository_type_select_field.add_option( option_label, option_value, selected=selected )
else:
repository_type_select_field.add_option( option_label, option_value, selected=selected )
return repository_type_select_field
def generate_message_for_repository_type_change( app, repository ):
message = ''
if repository.can_change_type_to( app, REPOSITORY_SUITE_DEFINITION ):
repository_suite_definition_type_class = \
app.repository_types_registry.get_class_by_label( REPOSITORY_SUITE_DEFINITION )
message += "This repository currently contains a single file named <b>%s</b>. If the intent of this repository is " % \
REPOSITORY_DEPENDENCY_DEFINITION_FILENAME
message += "to define relationships to a collection of repositories that contain related Galaxy utilities with "
message += "no plans to add additional files, consider setting its type to <b>%s</b>.<br/>" % \
repository_suite_definition_type_class.label
elif repository.can_change_type_to( app, TOOL_DEPENDENCY_DEFINITION ):
tool_dependency_definition_type_class = \
app.repository_types_registry.get_class_by_label( TOOL_DEPENDENCY_DEFINITION )
message += "This repository currently contains a single file named <b>%s</b>. If additional files will " % \
TOOL_DEPENDENCY_DEFINITION_FILENAME
message += "not be added to this repository, consider setting its type to <b>%s</b>.<br/>" % \
tool_dependency_definition_type_class.label
return message
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/tool_shed/repository_types/util.py
|
Python
|
gpl-3.0
| 2,985
|
[
"Galaxy"
] |
a13eaac3d6ba070b955077032ce6761ae4390ad8c445eb56c716ae04e567a784
|
# -*- coding: utf-8 -*-
"""
ORCA Open Remote Control Application
Copyright (C) 2013-2020 Carsten Thielepape
Please contact me by : http://www.orca-remote.org/
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
def GetDefaultStretchMode() -> str:
return u'RESIZE'
|
thica/ORCA-Remote
|
src/ORCA/utils/Platform/generic/generic_GetDefaultStretchMode.py
|
Python
|
gpl-3.0
| 913
|
[
"ORCA"
] |
bb86feef9bf3fe55d3e99e75351e7f93f071fd15615ca8e0265d10c1ce1b35ee
|
import numpy
import scipy.special
def gauss(hyp, y=None, mu=None, s2=None, inf=None, hi=None, nargout=None):
"""
Gaussian likelihood function for regression. The expression for the
likelihood is
likGauss(t) = exp(-(t-y)^2/2*sn^2) / sqrt(2*pi*sn^2),
where y is the mean and sn is the standard deviation.
The hyperparameters are:
hyp = [ log(sn) ]
Several modes are provided, for computing likelihoods, derivatives and moments
respectively, see likFunctions.m for the details. In general, care is taken
to avoid numerical issues when the arguments are extreme.
"""
if mu is None:
return '1'
sn2 = numpy.exp(2*hyp)
if inf is None:
if numpy.size(y) == 0:
y = numpy.zeros(numpy.shape(mu))
if s2 is not None and numpy.linalg.norm(s2) > 0: # s2==0?
out = gauss(hyp, y, mu, s2, 'ep')
lp = out[0]
else:
lp = -(y-mu)**2/sn2/2-numpy.log(2*numpy.pi*sn2)/2
s2 = 0
if nargout == 1:
return lp
elif nargout == 2:
return (mu, mu)
else:
return (lp, mu, s2 + sn2)
else:
if inf == 'laplace':
if hi is None:
if nargout is None:
nargout = 4
if numpy.size(y) == 0:
y = 0
ymmu = y-mu
lp = -numpy.power(ymmu,2)/(2*sn2) - numpy.log(2*numpy.pi*sn2)/2
res = lp
if nargout > 1:
dlp = ymmu/sn2
res = (lp, dlp)
if nargout > 2:
d2lp = -numpy.ones(numpy.shape(ymmu))/sn2
res += (d2lp,)
if nargout > 3:
d3lp = numpy.zeros(numpy.shape(ymmu))
res += (d3lp)
else:
if nargout is None:
nargout = 3
lp_dhyp = numpy.power(y-mu,2)/sn2 - 1
res = lp
if nargout > 1:
dlp_dhyp = 2*(mu-y)/sn2
res = (lp, dlp_dhyp)
if nargout > 2:
d2lp_dhyp = 2*numpy.ones(numpy.shape(mu))/sn2
res += (d2lp_dhyp,)
return res
elif inf == 'ep':
if hi is None:
if nargout is None:
nargout = 3
lZ = -(y-mu)**2/(sn2+s2)/2 - numpy.log(2*numpy.pi*(sn2+s2))/2
dlZ = (y-mu)/(sn2+s2)
d2lZ = -1./(sn2+s2)
if nargout == 1:
return lZ
elif nargout == 2:
return (lZ, dlZ)
else:
return (lZ, dlZ, d2lZ)
else:
if nargout is None:
nargout = 1
dlZhyp = ((y-mu)**2/(sn2+s2)-1)/(1+s2/sn2)
if nargout == 1:
return dlZhyp
else:
res = (dlZhyp,)
for i in range(2,nargout):
res += (None,)
return res
# elif inf == 'infVB':
# if hi is None:
# # variational lower site bound
# # t(s) = exp(-(y-s)^2/2sn2)/sqrt(2*pi*sn2)
# # the bound has the form: b*s - s.^2/(2*ga) - h(ga)/2 with b=y/ga
# ga = s2
# n = numel(ga)
# b = y./ga
# y = y.*ones(n,1)
# db = -y./ga.^2
# d2b = 2*y./ga.^3
# h = zeros(n,1)
# dh = h
# d2h = h
# id = ga(:)<=sn2+1e-8
# h(id) = y(id).^2./ga(id) + log(2*pi*sn2)
# h(~id) = Inf
# dh(id) = -y(id).^2./ga(id).^2
# d2h(id) = 2*y(id).^2./ga(id).^3
# id = ga<0
# h(id) = numpy.inf
# dh(id) = 0
# d2h(id) = 0
# return (h, b, dh, db, d2h, d2b)
# else:
# ga = s2
# n = numel(ga)
# dhhyp = zeros(n,1)
# dhhyp(ga(:)<=sn2) = 2
# dhhyp(ga<0) = 0
# return (dhhyp,)
else:
raise AttributeError('Unknown inference')
def erf(hyp, y=None, mu=None, s2=None, inf=None, hi=None, nargout=None):
"""
Error function or cumulative Gaussian likelihood function for binary
classification or probit regression. The expression for the likelihood is
likErf(t) = (1+erf(t/sqrt(2)))/2 = normcdf(t).
Several modes are provided, for computing likelihoods, derivatives and moments
respectively. In general, care is taken to avoid numerical issues when the
arguments are extreme.
"""
if mu is None:
return '0'
if y is not None:
if numpy.size(y) == 0:
y = numpy.array([[1]])
else:
y = numpy.sign(y)
y[y==0] = 1
else:
y = numpy.array([[1]])
# prediction mode if inf is not present
if inf is None:
y = y*numpy.ones(numpy.shape(mu))
if s2 is not None and numpy.linalg.norm(s2) > 0: # s2==0?
lp = erf(hyp, y, mu, s2, 'ep', nargout=1)
p = numpy.exp(lp)
else:
p, lp = __cumGauss(y,mu,nargout=2)
if nargout is None:
nargout = 3
res = lp
if nargout > 1:
ymu = 2*p-1
res = (lp, ymu)
if nargout > 2:
ys2 = 4*p*(1-p)
res += (ys2,)
return res
else:
# TODO: TEST
if inf == 'laplace':
# no derivative mode
if hi is None:
f = mu
yf = y*f # product latents and labels
p, lp = __cumGauss(y, f, nargout=2)
res = lp
# derivative of log likelihood
if nargout > 1:
n_p = __gauOverCumGauss(yf, p)
dlp = y*n_p # derivative of log likelihood
res = (lp, dlp)
# 2nd derivative of log likelihood
if nargout > 2:
d2lp = -numpy.power(n_p,2) - yf*n_p
res += (d2lp,)
# 3rd derivative of log likelihood
if nargout > 3:
d3lp = 2*y*numpy.power(n_p,3) + 3*f*numpy.power(n_p,2) + y*(numpy.power(f,2)-1)*n_p
res += (d3lp,)
return res
# derivative mode
else:
return numpy.array([[]])
elif inf == 'ep':
if hi is None:
if nargout is None:
nargout = 3
z = mu/numpy.sqrt(1+s2)
# log part function
junk, lZ = __cumGauss(y,z,nargout=2)
res = lZ
if numpy.size(y) > 0:
z = z*y
if nargout > 1:
if numpy.size(y) == 0:
y = 1
n_p = __gauOverCumGauss(z,numpy.exp(lZ))
# 1st derivative wrt mean
dlZ = y*n_p/numpy.sqrt(1+s2)
res = (lZ,dlZ)
if nargout > 2:
# 2nd derivative wrt mean
d2lZ = -n_p*(z+n_p)/(1+s2)
res += (d2lZ,)
return res
else:
return numpy.array([[]])
elif inf == 'vb':
a = 0
else:
raise AttributeError('Unknown inference')
def __cumGauss(y, f, nargout=1):
# product of latents and labels
if numpy.size(y) > 0:
yf = y*f
else:
yf = f
# likelihood
p = (1+scipy.special.erf(yf/numpy.sqrt(2)))/2
res = p
# log likelihood
if nargout > 1:
lp = __logphi(yf,p)
res = (p,lp)
return res
def __logphi(z, p):
"""
safe implementation of the log of phi(x) = \int_{-\infty}^x N(f|0,1) df
logphi(z) = log(normcdf(z))
"""
lp = numpy.zeros(numpy.shape(z))
zmin = -6.2
zmax = -5.5
ok = z > zmax
bd = z < zmin
# interpolate between both of them
ip = ~ok & ~bd
# interpolate weights
lam = 1/(1+numpy.exp(25*(1/2-(z[ip]-zmin)/(zmax-zmin))))
lp[ok] = numpy.log(p[ok])
# use lower and upper bound acoording to Abramowitz&Stegun 7.1.13 for z<0
# lower -log(pi)/2 -z.^2/2 -log( sqrt(z.^2/2+2 ) -z/sqrt(2) )
# upper -log(pi)/2 -z.^2/2 -log( sqrt(z.^2/2+4/pi) -z/sqrt(2) )
# the lower bound captures the asymptotics
lp[~ok] = -numpy.log(numpy.pi)/2 -numpy.power(z[~ok],2)/2 - numpy.log(numpy.sqrt(numpy.power(z[~ok],2)/2+2)-z[~ok]/numpy.sqrt(2))
lp[ip] = (1-lam)*lp[ip] + lam*numpy.log(p[ip])
return lp
def __gauOverCumGauss(f, p):
"""
Safely compute Gaussian over cumulative Gaussian.
"""
n_p = numpy.zeros(numpy.shape(f))
# naive evaluation for large values of f
ok = f>-5
n_p[ok] = (numpy.exp(-numpy.power(f[ok],2)/2)/numpy.sqrt(2*numpy.pi)) / p[ok]
# tight upper bound evaluation
bd = f < -6
n_p[bd] = numpy.sqrt(numpy.power(f[bd],2)/4+1)-f[bd]/2
# linearly interpolate between both of them
interp = ~ok & ~bd
tmp = f[interp]
lam = -5-f[interp]
n_p[interp] = (1-lam)*(numpy.exp(-numpy.power(tmp,2)/2)/numpy.sqrt(2*numpy.pi))/p[interp] + lam*(numpy.sqrt(numpy.power(tmp,2)/4+1)-tmp/2)
return n_p
def logistic(hyp, y=None, mu=None, s2=None, inf=None, hi=None, nargout=None):
"""
Logistic function for binary classification or logit regression.
The expression for the likelihood is
logistic(t) = 1/(1+exp(-t)).
Several modes are provided, for computing likelihoods, derivatives and moments
respectively. In general, care is taken to avoid numerical issues when the
arguments are extreme. The moments \int f^k logistic(y,f) N(f|mu,var) df
are calculated via a cumulative Gaussian scale mixture approximation.
"""
if mu is None:
return '0'
if y is not None:
if numpy.size(y) == 0:
y = numpy.array([[1]])
else:
y = numpy.sign(y)
y[y==0] = 1
else:
y = numpy.array([[1]])
# prediction mode if inf is not present
if inf is None:
y = y*numpy.ones(numpy.shape(mu))
if s2 is not None and numpy.linalg.norm(s2) > 0: # s2==0?
lp = logistic(hyp, y, mu, s2, 'ep', nargout=1)
else:
yf = y*mu
lp = yf.copy()
ok = -35<yf
lp[ok] = -numpy.log(1+numpy.exp(-yf[ok]))
if nargout is None:
nargout = 3
res = lp
if nargout > 1:
p = numpy.exp(lp)
ymu = 2*p-1
res = (lp, ymu)
if nargout > 2:
ys2 = 4*p*(1-p)
res += (ys2,)
return res
else:
# TODO: TEST
if inf == 'laplace':
# no derivative mode
if hi is None:
# product latents and labels
f = mu
yf = y*f
s = -yf
ps = numpy.maximum(0,s)
# lp = -(log(1+exp(s)))
lp = -(ps+numpy.log(numpy.exp(-ps) + numpy.exp(s-ps)))
res = lp
# first derivatives
if nargout > 1:
s = numpy.minimum(0,f)
p = numpy.exp(s)/(numpy.exp(s) + numpy.exp(s-f)) # p = 1./(1+exp(-f))
dlp = (y+1)/2.-p # derivative of log likelihood
res = (lp,dlp)
# 2nd derivative of log likelihood
if nargout > 2:
d2lp = -numpy.exp(2*s-f)/numpy.power(numpy.exp(s)+numpy.exp(s-f),2)
res += (d2lp,)
# 3rd derivative of log likelihood
if nargout > 3:
d3lp = 2*d2lp*(0.5-p)
res += (d3lp)
return res
# derivative mode
else:
return numpy.array([[]])
elif inf == 'ep':
if hi is None:
if nargout is None:
nargout = 3
y = y*numpy.ones(numpy.shape(mu))
# likLogistic(t) \approx 1/2 + \sum_{i=1}^5 (c_i/2) erf(lam_i/sqrt(2)t)
# approx coeffs lam_i and c_i
lam = numpy.sqrt(2)*numpy.array([[0.44, 0.41, 0.40, 0.39, 0.36]])
c = numpy.array([[1.146480988574439e+02, -1.508871030070582e+03, 2.676085036831241e+03, -1.356294962039222e+03, 7.543285642111850e+01]]).T
lZc, dlZc, d2lZc = erf({'cov': numpy.array([[]]), 'lik': numpy.array([[]]), 'mean': numpy.array([[]])}, numpy.dot(y,numpy.ones((1,5))), numpy.dot(mu,lam), numpy.dot(s2,numpy.power(lam,2)), inf, nargout=3)
# A=lZc, B=dlZc, d=c.*lam', lZ=log(exp(A)*c)
lZ = __log_expA_x(lZc,c)
# ((exp(A).*B)*d)./(exp(A)*c)
dlZ = __expABz_expAx(lZc, c, dlZc, c*lam.T)
# d2lZ = ((exp(A).*Z)*e)./(exp(A)*c) - dlZ.^2 where e = c.*(lam.^2)'
d2lZ = __expABz_expAx(lZc, c, numpy.power(dlZc,2)+d2lZc, c*numpy.power(lam,2).T) - numpy.power(dlZ,2)
# The scale mixture approximation does not capture the correct asymptotic
# behavior; we have linear decay instead of quadratic decay as suggested
# by the scale mixture approximation. By observing that for large values
# of -f*y ln(p(y|f)) of logistic likelihood is linear in f with slope y,
# we are able to analytically integrate the tail region; there is no
# contribution to the second derivative
# empirically determined bound at val==0
val = numpy.abs(mu)-196./200.*s2-4.
# interpolation weights
lam = 1/(1+numpy.exp(-10*val))
# apply the same to p(y|f) = 1 - p(-y|f)
lZtail = numpy.minimum(s2/2-numpy.abs(mu),-0.1)
dlZtail = -numpy.sign(mu)
id = y*mu>0
# label and mean agree
lZtail[id] = numpy.log(1-numpy.exp(lZtail[id]))
dlZtail[id] = 0
# interpolate between scale mixture ..
lZ = (1-lam)*lZ + lam*lZtail
# .. and tail approximation
dlZ = (1-lam)*dlZ + lam*dlZtail
res = lZ
if nargout > 1:
res = (lZ,dlZ)
if nargout > 2:
res += (d2lZ,)
return res
else:
return numpy.array([[]])
elif inf == 'vb':
a = 0
else:
raise AttributeError('Unknown inference')
def __log_expA_x(A,x):
"""
Computes y = log( exp(A)*x ) in a numerically safe way by subtracting the
maximal value in each row to avoid cancelation after taking the exp.
"""
N = numpy.size(A,1)
# number of columns, max over columns
maxA = numpy.reshape(numpy.max(A,1),(-1,1))
# exp(A) = exp(A-max(A))*exp(max(A))
return numpy.log(numpy.dot(numpy.exp(A-numpy.dot(maxA,numpy.ones((1,N)))),x)) + maxA
def __expABz_expAx(A,x,B,z):
"""
Computes y = ( (exp(A).*B)*z ) ./ ( exp(A)*x ) in a numerically safe way
The function is not general in the sense that it yields correct values for
all types of inputs. We assume that the values are close together.
"""
# number of columns, max over columns
N = numpy.size(A,1)
maxA = numpy.reshape(numpy.max(A,1),(-1,1))
# subtract maximum value
A = A - numpy.dot(maxA,numpy.ones((1,N)))
return numpy.dot(numpy.exp(A)*B,z) / numpy.dot(numpy.exp(A),x)
def mix():
raise NotImplementedError('')
# Evaluates lik functions
def feval(fun, hyp=None, y=None, mu=None, s2=None, inff=None, hi=None, nargout=None):
if not isinstance(fun, tuple):
fun = (fun,)
f = fun[0]
if f.__module__ == 'sklearn.gpml.lik':
if len(fun) > 1 and f == lik.mix:
return f(fun[1], hyp, y, mu, s2, inff, hi, nargout)
else:
return f(hyp, y, mu, s2, inff, hi, nargout)
else:
raise AttributeError('Unknown function')
|
depet/scikit-learn
|
sklearn/gpml/lik.py
|
Python
|
bsd-3-clause
| 14,213
|
[
"Gaussian"
] |
9c34f7588779898ac0dbce3fc1728f76aa9bd62b40b5bbea65fc7da503b6b075
|
# coding: utf-8
import os, sys, time, re
from Crypto.Cipher import AES
import crypt
import pwd
from binascii import b2a_hex, a2b_hex
import hashlib
import datetime
import random
import subprocess
import uuid
import json
import logging
from settings import *
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.http import HttpResponse, Http404
from django.template import RequestContext
from juser.models import User, UserGroup
from jlog.models import Log, TtyLog
from jasset.models import Asset, AssetGroup
from jperm.models import PermRule, PermRole
from jumpserver.models import Setting
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
def set_log(level, filename='jumpserver.log'):
"""
return a log file object
根据提示设置log打印
"""
log_file = os.path.join(LOG_DIR, filename)
if not os.path.isfile(log_file):
os.mknod(log_file)
os.chmod(log_file, 0777)
log_level_total = {'debug': logging.DEBUG, 'info': logging.INFO, 'warning': logging.WARN, 'error': logging.ERROR,
'critical': logging.CRITICAL}
logger_f = logging.getLogger('jumpserver')
logger_f.setLevel(logging.DEBUG)
fh = logging.FileHandler(log_file)
fh.setLevel(log_level_total.get(level, logging.DEBUG))
formatter = logging.Formatter('%(asctime)s - %(filename)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger_f.addHandler(fh)
return logger_f
def list_drop_str(a_list, a_str):
for i in a_list:
if i == a_str:
a_list.remove(a_str)
return a_list
def get_asset_info(asset):
"""
获取资产的相关管理账号端口等信息
"""
default = get_object(Setting, name='default')
info = {'hostname': asset.hostname, 'ip': asset.ip}
if asset.use_default_auth:
if default:
info['username'] = default.field1
try:
info['password'] = CRYPTOR.decrypt(default.field3)
except ServerError:
pass
if os.path.isfile(default.field4):
info['ssh_key'] = default.field4
else:
info['username'] = asset.username
info['password'] = CRYPTOR.decrypt(asset.password)
try:
info['port'] = int(asset.port)
except TypeError:
info['port'] = int(default.field2)
return info
def get_role_key(user, role):
"""
由于role的key的权限是所有人可以读的, ansible执行命令等要求为600,所以拷贝一份到特殊目录
:param user:
:param role:
:return: self key path
"""
user_role_key_dir = os.path.join(KEY_DIR, 'user')
user_role_key_path = os.path.join(user_role_key_dir, '%s_%s.pem' % (user.username, role.name))
mkdir(user_role_key_dir, mode=777)
if not os.path.isfile(user_role_key_path):
with open(os.path.join(role.key_path, 'id_rsa')) as fk:
with open(user_role_key_path, 'w') as fu:
fu.write(fk.read())
logger.debug(u"创建新的系统用户key %s, Owner: %s" % (user_role_key_path, user.username))
chown(user_role_key_path, user.username)
os.chmod(user_role_key_path, 0600)
return user_role_key_path
def chown(path, user, group=''):
if not group:
group = user
try:
uid = pwd.getpwnam(user).pw_uid
gid = pwd.getpwnam(group).pw_gid
os.chown(path, uid, gid)
except KeyError:
pass
def page_list_return(total, current=1):
"""
page
分页,返回本次分页的最小页数到最大页数列表
"""
min_page = current - 2 if current - 4 > 0 else 1
max_page = min_page + 4 if min_page + 4 < total else total
return range(min_page, max_page + 1)
def pages(post_objects, request):
"""
page public function , return page's object tuple
分页公用函数,返回分页的对象元组
"""
paginator = Paginator(post_objects, 20)
try:
current_page = int(request.GET.get('page', '1'))
except ValueError:
current_page = 1
page_range = page_list_return(len(paginator.page_range), current_page)
try:
page_objects = paginator.page(current_page)
except (EmptyPage, InvalidPage):
page_objects = paginator.page(paginator.num_pages)
if current_page >= 5:
show_first = 1
else:
show_first = 0
if current_page <= (len(paginator.page_range) - 3):
show_end = 1
else:
show_end = 0
# 所有对象, 分页器, 本页对象, 所有页码, 本页页码,是否显示第一页,是否显示最后一页
return post_objects, paginator, page_objects, page_range, current_page, show_first, show_end
class PyCrypt(object):
"""
This class used to encrypt and decrypt password.
加密类
"""
def __init__(self, key):
self.key = key
self.mode = AES.MODE_CBC
@staticmethod
def gen_rand_pass(length=16, especial=False):
"""
random password
随机生成密码
"""
salt_key = '1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_'
symbol = '!@$%^&*()_'
salt_list = []
if especial:
for i in range(length - 4):
salt_list.append(random.choice(salt_key))
for i in range(4):
salt_list.append(random.choice(symbol))
else:
for i in range(length):
salt_list.append(random.choice(salt_key))
salt = ''.join(salt_list)
return salt
@staticmethod
def md5_crypt(string):
"""
md5 encrypt method
md5非对称加密方法
"""
return hashlib.new("md5", string).hexdigest()
@staticmethod
def gen_sha512(salt, password):
"""
generate sha512 format password
生成sha512加密密码
"""
return crypt.crypt(password, '$6$%s$' % salt)
def encrypt(self, passwd=None, length=32):
"""
encrypt gen password
对称加密之加密生成密码
"""
if not passwd:
passwd = self.gen_rand_pass()
cryptor = AES.new(self.key, self.mode, b'8122ca7d906ad5e1')
try:
count = len(passwd)
except TypeError:
raise ServerError('Encrypt password error, TYpe error.')
add = (length - (count % length))
passwd += ('\0' * add)
cipher_text = cryptor.encrypt(passwd)
return b2a_hex(cipher_text)
def decrypt(self, text):
"""
decrypt pass base the same key
对称加密之解密,同一个加密随机数
"""
cryptor = AES.new(self.key, self.mode, b'8122ca7d906ad5e1')
try:
plain_text = cryptor.decrypt(a2b_hex(text))
except TypeError:
raise ServerError('Decrypt password error, TYpe error.')
return plain_text.rstrip('\0')
class ServerError(Exception):
"""
self define exception
自定义异常
"""
pass
def get_object(model, **kwargs):
"""
use this function for query
使用改封装函数查询数据库
"""
for value in kwargs.values():
if not value:
return None
the_object = model.objects.filter(**kwargs)
if len(the_object) == 1:
the_object = the_object[0]
else:
the_object = None
return the_object
def require_role(role='user'):
"""
decorator for require user role in ["super", "admin", "user"]
要求用户是某种角色 ["super", "admin", "user"]的装饰器
"""
def _deco(func):
def __deco(request, *args, **kwargs):
request.session['pre_url'] = request.path
if not request.user.is_authenticated():
return HttpResponseRedirect(reverse('login'))
if role == 'admin':
# if request.session.get('role_id', 0) < 1:
if request.user.role == 'CU':
return HttpResponseRedirect(reverse('index'))
elif role == 'super':
# if request.session.get('role_id', 0) < 2:
if request.user.role in ['CU', 'GA']:
return HttpResponseRedirect(reverse('index'))
return func(request, *args, **kwargs)
return __deco
return _deco
def is_role_request(request, role='user'):
"""
require this request of user is right
要求请求角色正确
"""
role_all = {'user': 'CU', 'admin': 'GA', 'super': 'SU'}
if request.user.role == role_all.get(role, 'CU'):
return True
else:
return False
def get_session_user_dept(request):
"""
get department of the user in session
获取session中用户的部门
"""
# user_id = request.session.get('user_id', 0)
# print '#' * 20
# print user_id
# user = User.objects.filter(id=user_id)
# if user:
# user = user[0]
# return user, None
return request.user, None
@require_role
def get_session_user_info(request):
"""
get the user info of the user in session, for example id, username etc.
获取用户的信息
"""
# user_id = request.session.get('user_id', 0)
# user = get_object(User, id=user_id)
# if user:
# return [user.id, user.username, user]
return [request.user.id, request.user.username, request.user]
def get_user_dept(request):
"""
get the user dept id
获取用户的部门id
"""
user_id = request.user.id
if user_id:
user_dept = User.objects.get(id=user_id).dept
return user_dept.id
def api_user(request):
hosts = Log.objects.filter(is_finished=0).count()
users = Log.objects.filter(is_finished=0).values('user').distinct().count()
ret = {'users': users, 'hosts': hosts}
json_data = json.dumps(ret)
return HttpResponse(json_data)
def view_splitter(request, su=None, adm=None):
"""
for different user use different view
视图分页器
"""
if is_role_request(request, 'super'):
return su(request)
elif is_role_request(request, 'admin'):
return adm(request)
else:
return HttpResponseRedirect(reverse('login'))
def validate(request, user_group=None, user=None, asset_group=None, asset=None, edept=None):
"""
validate the user request
判定用户请求是否合法
"""
dept = get_session_user_dept(request)[1]
if edept:
if dept.id != int(edept[0]):
return False
if user_group:
dept_user_groups = dept.usergroup_set.all()
user_group_ids = []
for group in dept_user_groups:
user_group_ids.append(str(group.id))
if not set(user_group).issubset(set(user_group_ids)):
return False
if user:
dept_users = dept.user_set.all()
user_ids = []
for dept_user in dept_users:
user_ids.append(str(dept_user.id))
if not set(user).issubset(set(user_ids)):
return False
if asset_group:
dept_asset_groups = dept.bisgroup_set.all()
asset_group_ids = []
for group in dept_asset_groups:
asset_group_ids.append(str(group.id))
if not set(asset_group).issubset(set(asset_group_ids)):
return False
if asset:
dept_assets = dept.asset_set.all()
asset_ids = []
for dept_asset in dept_assets:
asset_ids.append(str(dept_asset.id))
if not set(asset).issubset(set(asset_ids)):
return False
return True
def verify(request, user_group=None, user=None, asset_group=None, asset=None, edept=None):
dept = get_session_user_dept(request)[1]
if edept:
if dept.id != int(edept[0]):
return False
if user_group:
dept_user_groups = dept.usergroup_set.all()
user_groups = []
for user_group_id in user_group:
user_groups.extend(UserGroup.objects.filter(id=user_group_id))
if not set(user_groups).issubset(set(dept_user_groups)):
return False
if user:
dept_users = dept.user_set.all()
users = []
for user_id in user:
users.extend(User.objects.filter(id=user_id))
if not set(users).issubset(set(dept_users)):
return False
if asset_group:
dept_asset_groups = dept.bisgroup_set.all()
asset_group_ids = []
for group in dept_asset_groups:
asset_group_ids.append(str(group.id))
if not set(asset_group).issubset(set(asset_group_ids)):
return False
if asset:
dept_assets = dept.asset_set.all()
asset_ids = []
for a in dept_assets:
asset_ids.append(str(a.id))
print asset, asset_ids
if not set(asset).issubset(set(asset_ids)):
return False
return True
def bash(cmd):
"""
run a bash shell command
执行bash命令
"""
return subprocess.call(cmd, shell=True)
def mkdir(dir_name, username='', mode=755):
"""
insure the dir exist and mode ok
目录存在,如果不存在就建立,并且权限正确
"""
cmd = '[ ! -d %s ] && mkdir -p %s && chmod %s %s' % (dir_name, dir_name, mode, dir_name)
bash(cmd)
if username:
chown(dir_name, username)
def http_success(request, msg):
return render_to_response('success.html', locals())
def http_error(request, emg):
message = emg
return render_to_response('error.html', locals())
def my_render(template, data, request):
return render_to_response(template, data, context_instance=RequestContext(request))
def get_tmp_dir():
seed = uuid.uuid4().hex[:4]
dir_name = os.path.join('/tmp', '%s-%s' % (datetime.datetime.now().strftime('%Y%m%d-%H%M%S'), seed))
mkdir(dir_name, mode=777)
return dir_name
def defend_attack(func):
def _deco(request, *args, **kwargs):
if int(request.session.get('visit', 1)) > 10:
logger.debug('请求次数: %s' % request.session.get('visit', 1))
return HttpResponse('Forbidden', status=403)
request.session['visit'] = request.session.get('visit', 1) + 1
request.session.set_expiry(300)
return func(request, *args, **kwargs)
return _deco
def get_mac_address():
node = uuid.getnode()
mac = uuid.UUID(int=node).hex[-12:]
return mac
CRYPTOR = PyCrypt(KEY)
logger = set_log(LOG_LEVEL)
|
786951355/jumpserver
|
jumpserver/api.py
|
Python
|
gpl-2.0
| 14,614
|
[
"VisIt"
] |
df5ef54e0fd9e782b763748b7b697057754b8e6d59194386290c9abd44d5f21d
|
def visit_Call(self, node):
"""
The visit of a call node.
Ignores all calls except for those we need to modify.
:param node: A call node
"""
name = self.__find_call_name(node)
if name in ATOMIC_SOURCES:
id = self.__get_id()
self.__replace_connection(id, node)
elif name in WRAPPERS:
if self.dw_flag:
raise Exception('There is more than one wrapper in this '
'program')
else:
id = self.dw_id
self.__replace_connection(id, node)
self.dw_flag = True
|
Betaboxguugi/P6
|
documentation/presentation/code/visit_call.py
|
Python
|
gpl-3.0
| 655
|
[
"VisIt"
] |
1ad654f7e138197a818fe1e80073e1d5e3e36c9a45d5381b3deef9096a132be5
|
# -*- coding:utf-8 -*-
# Copyright (c) 2015, Galaxy Authors. All Rights Reserved
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Author: wangtaize@baidu.com
# Date: 2015-04-01
from django.conf import urls
urlpatterns = urls.patterns("console.cluster.views",
(r'^status','get_status'),
)
|
fxsjy/galaxy
|
console/backend/src/console/cluster/urls.py
|
Python
|
bsd-3-clause
| 355
|
[
"Galaxy"
] |
789c32ac2b7f4bf1b061a70669d7d88c9944f91b4ad62b45543d3e993ab22fbf
|
"""
Command-line interface for model evaluation
@author Siddharth Reddy <sgr45@cornell.edu>
"""
from __future__ import division
import click
import logging
import math
import pickle
import os
import pandas as pd
import numpy as np
from lentil import datatools
from lentil import models
from lentil import est
from lentil import evaluate
_logger = logging.getLogger(__name__)
@click.command()
# path to interaction history CSV/pickle input file
@click.argument('history_file', type=click.Path(exists=True))
# path to pickled results file
@click.argument('results_file', type=click.Path(exists=False))
@click.option(
'--verbose', is_flag=True,
help='Makes debug messages visible')
@click.option(
'--using-lessons/--no-using-lessons', default=True,
help='Include embeddings of skill gains from lessons')
@click.option(
'--using-prereqs/--no-using-prereqs', default=True,
help='Include embeddings of prerequisites for lessons')
@click.option(
'--using-bias/--no-using-bias', default=True,
help='Include bias terms in the item response function')
@click.option(
'--embedding-dimension', default=2,
help='Dimensionality of latent skill space')
@click.option(
'--learning-update-variance', default=0.5,
help='Constant variance for Gaussian lesson updates')
@click.option(
'--opt-algo',
type=click.Choice(['l-bfgs-b', 'batch-gd', 'adagrad']),
default='l-bfgs-b',
help='Iterative optimization algorithm used for parameter estimation')
@click.option(
'--regularization-constant', default=1e-6,
help='Coefficient of norm regularization terms')
@click.option(
'--ftol', default=1e-3,
help='Stopping condition for iterative optimization')
@click.option('--learning-rate', default=5e-3, help='Fixed learning rate')
@click.option('--adagrad-eta', default=1e-3, help='Adagrad learning rate')
@click.option('--adagrad-eps', default=0.1, help='Adagrad epsilon')
@click.option('--num-folds', default=10, help='Number of folds in k-fold cross-validation')
@click.option(
'--truncation-style',
type=click.Choice(['random', 'last']),
default='last',
help='Truncate student history at random, or just before last assessment interactions')
def cli(
history_file,
results_file,
verbose,
num_folds,
truncation_style,
using_lessons,
using_prereqs,
using_bias,
embedding_dimension,
learning_update_variance,
opt_algo,
regularization_constant,
ftol,
learning_rate,
adagrad_eta,
adagrad_eps):
"""
This script provides a command-line interface for model evaluation.
It reads an interaction history from file, computes the cross-validated AUC of
an embedding model, and writes the results to file.
The pickled results will be an object of type :py:class:`evaluate.CVResults`
:param str history_file: Input path to CSV/pickle file containing interaction history
:param str results_file: Output path for pickled results of cross-validation
:param bool verbose: True => logger level set to logging.INFO
:param int num_folds: Number of folds in k-fold cross-validation
:param str truncation_style: Hold-out scheme for student histories
:param bool using_lessons: Including lessons in embedding
:param bool using_prereqs: Including lesson prereqs in embedding
:param bool using_bias: Including bias terms in embedding
:param int embedding_dimension: Number of dimensions in latent skill space
:param float learning_update_variance: Variance of Gaussian learning update
:param str opt_algo: Optimization algorithm for parameter estimation
:param float regularization_constant: Coefficient of regularization term in objective function
:param float ftol: Stopping condition for iterative optimization
:param float learning_rate: Fixed learning rate for gradient descent
:param float adagrad_eta: Base learning rate parameter for Adagrad
:param float adagrad_eps: Epsilon parameter for Adagrad
"""
if verbose and opt_algo == 'l-bfgs-b':
raise ValueError('Verbose mode is not currently supported for L-BFGS-B.\
Try turning off verbose mode, or change your choice of optimization algorithm.')
if verbose:
_logger.setLevel(logging.DEBUG)
click.echo('Loading interaction history from %s...' % click.format_filename(history_file))
_, history_file_ext = os.path.splitext(history_file)
if history_file_ext == '.csv':
data = pd.DataFrame.from_csv(history_file)
history = datatools.InteractionHistory(pd.read_csv(history_file))
elif history_file_ext == '.pkl':
with open(history_file, 'rb') as f:
history = pickle.load(f)
else:
raise ValueError('Unrecognized file extension for history_file.\
Please supply a .csv with an interaction history, or a .pkl file containing\
a datatools.InteractionHistory object.')
embedding_kwargs = {
'embedding_dimension' : embedding_dimension,
'using_lessons' : using_lessons,
'using_prereqs' : using_prereqs,
'using_bias' : using_bias,
'learning_update_variance_constant' : learning_update_variance
}
gradient_descent_kwargs = {
'using_adagrad' : opt_algo == 'adagrad',
'eta' : adagrad_eta,
'eps' : adagrad_eps,
'rate' : learning_rate,
'verify_gradient' : False,
'debug_mode_on' : verbose,
'ftol' : ftol,
'num_checkpoints' : 100
}
estimator = est.EmbeddingMAPEstimator(
regularization_constant=regularization_constant,
using_scipy=(opt_algo == 'l-bfgs-b'),
gradient_descent_kwargs=gradient_descent_kwargs,
verify_gradient=False,
debug_mode_on=verbose,
ftol=ftol)
def build_embedding(
embedding_kwargs,
estimator,
history,
filtered_history,
split_history=None):
model = models.EmbeddingModel(history, **embedding_kwargs)
estimator.filtered_history = filtered_history
if split_history is not None:
estimator.split_history = split_history
model.fit(estimator)
return model
model_builders = {
'model' : (lambda *args, **kwargs: build_embedding(
embedding_kwargs,
estimator,
*args,
**kwargs))
}
click.echo(
'Computing cross-validated AUC (num_folds=%d, truncation_style=%s)...' % (
num_folds,
truncation_style))
results = evaluate.cross_validated_auc(
model_builders,
history,
num_folds=num_folds,
random_truncations=(truncation_style == 'random'))
train_auc_mean = results.training_auc_mean('model')
val_auc_mean = results.validation_auc_mean('model')
train_auc_stderr = results.training_auc_stderr('model')
val_auc_stderr = results.validation_auc_stderr('model')
click.echo('AUCs with 95% confidence intervals:')
click.echo('Training AUC = %f (%f, %f)' % (
train_auc_mean,
train_auc_mean - 1.96 * train_auc_stderr,
train_auc_mean + 1.96 * train_auc_stderr))
click.echo('Validation AUC = %f (%f, %f)' % (
val_auc_mean,
val_auc_mean - 1.96 * val_auc_stderr,
val_auc_mean + 1.96 * val_auc_stderr))
with open(results_file, 'wb') as f:
pickle.dump(results, f, pickle.HIGHEST_PROTOCOL)
click.echo('Results written to %s' % results_file)
if __name__ == '__main__':
cli()
|
rddy/lentil
|
scripts/lse_eval.py
|
Python
|
apache-2.0
| 7,567
|
[
"Gaussian"
] |
a1317fee94c290e86ef8cd9138a1dbd3e013c4514716b8a9f00e1dfe0304d368
|
# -*- coding: utf-8 -*-
"""
This script opens data files and extracts relevant data. Then, using a sklearn
gaussian process package, fits a gaussian to the crosswind (1d) temperature
measurements and interpolates temparture values (relative, not absolute) at a
2 mm interval.
TODO: make gaussian process it's own script
Created on Mon Dec 01 11:51:44 2014
@authors: Sharri and Richard
"""
from scipy.optimize import curve_fit
from sklearn import gaussian_process
import numpy as np
import matplotlib.pyplot as plt
import scipy.io as io
import os
mydir = os.path.dirname(__file__)
#pull out tempearture data
lhstore2_file = os.path.join(mydir, "data", 'lhstore2.mat')
lhstore2_data = io.loadmat(lhstore2_file)
T_raw = lhstore2_data['store2'].T
## lhstore2 is lh50's store with two channel error corrected
### temperatures_raw.shape() ==> (215, 4, 20000)
#TODO: Allow for selection or incorporation of other heights (2nd dimension of temperatures_raw)
T_raw = T_raw[:210,0,:] #subset of data to work with - one height, removed unnecessary points at end of wind tunnel
#pull out positional data
lh50_file = os.path.join(mydir, 'data', 'final-lh50.mat')
lh50_data = io.loadmat(lh50_file)
#==============================================================================
# lh50_data.keys() ==> ['p_in', 'p_mm', 'p', 's', 'store', '__header__', '__globals__', '__version__']
# 'p_in' => pos in inches
# 'p_mm' = > positions in mm
# 'p' => pos in grid
# 's' => time averaged temperature
# 'store' => raw temp data
#==============================================================================
#Make array of observed locations (x,y)
xy_observed = np.zeros((2,210),dtype = float)
observed_data = np.zeros((3,210), dtype = float)
#temperatures_time_avg = lh50_data['s'] #time averaged temperature data
xy_observed[0,:] = lh50_data['p_mm'][:210,0] #x (crosswind) axis, observed data
xy_observed[1,:] = lh50_data['p_mm'][:210,1]
#Get temparature data
T_time_avg = np.mean(T_raw,1)
T_sd = np.std(T_raw,1)
#fit all of the observed data into one array, for ease of use of fitting function
observed_data[:2,:] = xy_observed
observed_data[2,:] = T_time_avg
#TODO: move everything below this to a function and/or separate script
#prediction locations, make
x_predicted = np.atleast_2d(np.linspace(0, 254, 15)) #2 mm prediction sites
y_predicted = np.atleast_2d(np.linspace(0, 850, 14))
x1,x2 = np.meshgrid(x_predicted, y_predicted)
xy_predicted = np.vstack([x1.reshape(x1.size), x2.reshape(x2.size)]).T
#xy_predicted = track_1[:,:1]
#calculate noise (required)
nugget = (T_sd/T_time_avg)**2
nugget = nugget[:181] #deletes repeated measurment locations
#TODO: make section into separate function
gp = gaussian_process.GaussianProcess(corr = 'absolute_exponential',
theta0 = 1./25,
thetaL = 1e-2,
thetaU = 1,
normalize = True,
random_start = 100,
nugget = nugget)
gp.fit(xy_observed.T[:181,:], T_time_avg[:181])
#gp.fit(xy_observed.T[:181,:], T_raw[:181,:]) #with time variants, this fits each time step...non ideal.
#Target value error will come up with that last repeated row. It can't have
#multiple measurements at the same location. Consider deleting that repeated
#last row of measurements or take a mean or stack the timeseries onto the
#first measurement, which will effectivly average the values.
T_prediction, y_prediction_MSE = gp.predict(xy_predicted, eval_MSE = True) #produce predicted y values
sigma = np.sqrt(y_prediction_MSE) #get SD of fit at each x_predicted location (for confidence interval)
|
isomerase/GP-temperature-interpolation
|
gptempdata-2d.py
|
Python
|
mit
| 3,799
|
[
"Gaussian"
] |
c57967fffbf09dc971d54d9d791dc26ee1fd7a1b3c981f9e662cb468bd5dff9e
|
__author__ = 'Christo Robison'
from spectral import *
import numpy as np
#import tensorflow as tf
import h5py
import pylab
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from scipy import fftpack
import time
from skimage import io, exposure, img_as_uint, img_as_float
import png
io.use_plugin('freeimage')
output = r'H:\Results'
def getData(filename=None):
if filename is None: filename = 'D:\-_Hyper_Spec_-\HYPER_SPEC_TEST.h5'
f = h5py.File(filename, 'r')
dcb = f['data'][:] #Extract normalized data for svm b/c intensity sensitive
labels = f['labels'][:]
bands = f['bands'][:]
classLabels = f['classLabels'][:]
out = {'dcb': dcb, 'labels': labels, 'lambdas': bands, 'classLabels': classLabels}
f.close()
return out
def shapeData(data, labels, numExamples, numBands, altDims = None):
'''Takes input data matrix and reshapes it into HW,D format
i.e. endmembers and their appropriate class labels'''
if altDims is None: altDims = [443, 313, numBands, numExamples]
temp = np.reshape(data, altDims, 'f')
dataR = np.reshape(temp,[-1, numBands])
labelL = np.reshape(labels, [-1,1])
out = {'data': dataR, 'label': labelL}
return out
if __name__ == '__main__':
trainData = getData(filename='D:\-_Hyper_Spec_-\HYPER_SPEC_TEST_RED.h5')
testData = getData(filename='D:\-_Hyper_Spec_-\HYPER_SPEC_TEST_RED.h5')
print(np.shape(trainData['dcb']))
debug = False
if debug is True:
for i in range(np.shape(trainData['dcb'])[2]):
im = exposure.rescale_intensity(trainData['dcb'][:,:,i], out_range='float')
im = img_as_uint(im)
io.imsave((r'HYPER_SPEC_TEST\band_image_' + str(i) + '.png'), im)
#pf = open(('band_image_' + str(i) + '.png'), 'wb')
#w = png.Writer(width=313, height=443, bitdepth=16, greyscale=True)
# w.write(pf, np.reshape(testData['dcb'], (-1, 443 * 372)))
#pf.close()
### Unsupervised Classification
#img = trainData['dcb'][:,:,1625:1651]
#(m, c) = kmeans(img, 6, 300)
img = trainData['dcb'][:,:,343:370]
(m, c) = kmeans(img, 6, 300)
fig1 = plt.figure(1)
fig1.hold(True)
ax1 = fig1.add_subplot(111)
for i in range(c.shape[0]):
ax1.plot(c[i])
#plt.ion()
#pylab.show()
fig1.savefig('kmeans')
fig1.hold(False)
####Supervised Classification
gt = trainData['classLabels'][:,:,11]
bkgnd = gt == 0
gt[bkgnd] = 6
#pylab.figure()
fig2 = plt.figure(2)
fig2.hold(True)
ax2 = fig2.add_subplot(111)
v = imshow(classes=gt, fignum=None)
plt.savefig('ground_truth')
#plt.show()
#pylab.hold(1)
classes = create_training_classes(img, gt)
###Gaussian Maximum Likelihood Classifier
gmlc = GaussianClassifier(classes, min_samples=200)
clmap = gmlc.classify_image(img)
#pylab.figure()
v = imshow(classes=clmap)
plt.savefig('c_map')
#pylab.hold(1)
gtresults = clmap * (gt !=0)
#pylab.figure()
v = imshow(classes=gtresults)
plt.savefig('gtresults')
#pylab.hold(1)
#pylab.figure()
gterrors = gtresults * (gtresults != gt)
v = imshow(classes=gterrors)
plt.savefig('gterrors')
#pylab.hold(1)
#pylab.figure()
#F1 = fftpack.fft2(img)
#F2 = fftpack.fftshift(F1)
#psd2D = np.abs(F2)**2
F1 = np.fft.rfft2(img)
v = imshow(F1)
plt.savefig('fft2')
pc = principal_components(img)
v = imshow(pc.cov)
plt.savefig('covariance_matrix')
pc_0999 = pc.reduce(fraction=0.999)
len(pc_0999.eigenvalues)
img_pc = pc_0999.transform(img)
v = imshow(img_pc[:,:,:3], stretch_all=True)
plt.savefig('top3components')
classes = create_training_classes(img_pc, gt)
gmlc = GaussianClassifier(classes)
clmap = gmlc.classify_image(img_pc)
clmap_training = clmap * (gt !=0)
v = imshow(classes=clmap_training)
plt.savefig('trainnigDataC_map')
training_errors = clmap_training * (clmap_training != gt)
v = imshow(classes= training_errors)
plt.savefig('trainingDataErrors')
#pylab.show()
time.sleep(1.5)
#k = input('press to close')
|
Crobisaur/HyperSpec
|
Python/final_tests.py
|
Python
|
gpl-3.0
| 4,173
|
[
"Gaussian"
] |
49d3f765150536f6cb66677c16a9ba765c4813896713759ed3fe2db6d7a38be5
|
# This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Created on 15.02.2016
@author: marscher
'''
import functools
import itertools
from mdtraj.geometry.dihedral import (indices_phi,
indices_psi,
indices_chi1,
)
import mdtraj
from pyemma.coordinates.data.featurization._base import Feature
from pyemma.coordinates.data.featurization.util import (_hash_numpy_array,
hash_top, _describe_atom)
import numpy as np
class AngleFeature(Feature):
def __init__(self, top, angle_indexes, deg=False, cossin=False, periodic=True):
self.top = top
self.angle_indexes = np.array(angle_indexes)
if len(self.angle_indexes) == 0:
raise ValueError("empty indices")
self.deg = deg
self.cossin = cossin
self.periodic = periodic
self._dim = len(self.angle_indexes)
if cossin:
self._dim *= 2
def describe(self):
if self.cossin:
sin_cos = ("ANGLE: COS(%s - %s - %s)",
"ANGLE: SIN(%s - %s - %s)")
labels = [s % (_describe_atom(self.top, triple[0]),
_describe_atom(self.top, triple[1]),
_describe_atom(self.top, triple[2]))
for triple in self.angle_indexes
for s in sin_cos]
else:
labels = ["ANGLE: %s - %s - %s " %
(_describe_atom(self.top, triple[0]),
_describe_atom(self.top, triple[1]),
_describe_atom(self.top, triple[2]))
for triple in self.angle_indexes]
return labels
def transform(self, traj):
rad = mdtraj.compute_angles(traj, self.angle_indexes, self.periodic)
if self.cossin:
rad = np.dstack((np.cos(rad), np.sin(rad)))
rad = rad.reshape(functools.reduce(lambda x, y: x * y, rad.shape),)
if self.deg:
return np.rad2deg(rad)
else:
return rad
def __hash__(self):
hash_value = _hash_numpy_array(self.angle_indexes)
hash_value ^= hash_top(self.top)
hash_value ^= hash(self.deg)
hash_value ^= hash(self.cossin)
return hash_value
class DihedralFeature(AngleFeature):
def __init__(self, top, dih_indexes, deg=False, cossin=False, periodic=True):
super(DihedralFeature, self).__init__(top=top,
angle_indexes=dih_indexes,
deg=deg,
cossin=cossin,
periodic=periodic)
def describe(self):
if self.cossin:
sin_cos = (
"DIH: COS(%s - %s - %s - %s)", "DIH: SIN(%s - %s - %s - %s)")
labels = [s %
(_describe_atom(self.top, quad[0]),
_describe_atom(self.top, quad[1]),
_describe_atom(self.top, quad[2]),
_describe_atom(self.top, quad[3]))
for quad in self.angle_indexes
for s in sin_cos]
else:
labels = ["DIH: %s - %s - %s - %s " %
(_describe_atom(self.top, quad[0]),
_describe_atom(self.top, quad[1]),
_describe_atom(self.top, quad[2]),
_describe_atom(self.top, quad[3]))
for quad in self.angle_indexes]
return labels
def transform(self, traj):
rad = mdtraj.compute_dihedrals(traj, self.angle_indexes, self.periodic)
if self.cossin:
rad = np.dstack((np.cos(rad), np.sin(rad)))
rad = rad.reshape(rad.shape[0], rad.shape[1]*rad.shape[2])
# convert to degrees
if self.deg:
rad = np.rad2deg(rad)
return rad
class BackboneTorsionFeature(DihedralFeature):
def __init__(self, topology, selstr=None, deg=False, cossin=False, periodic=True):
indices = indices_phi(topology)
if not selstr:
self._phi_inds = indices
else:
self._phi_inds = indices[np.in1d(indices[:, 1],
topology.select(selstr), assume_unique=True)]
indices = indices_psi(topology)
if not selstr:
self._psi_inds = indices
else:
self._psi_inds = indices[np.in1d(indices[:, 1],
topology.select(selstr), assume_unique=True)]
# alternate phi, psi pairs (phi_1, psi_1, ..., phi_n, psi_n)
dih_indexes = np.array(list(phi_psi for phi_psi in
zip(self._phi_inds, self._psi_inds))).reshape(-1, 4)
super(BackboneTorsionFeature, self).__init__(topology, dih_indexes,
deg=deg, cossin=cossin,
periodic=periodic)
def describe(self):
top = self.top
getlbl = lambda at: "%i %s %i" % (at.residue.chain.index, at.residue.name, at.residue.resSeq)
if self.cossin:
sin_cos = ("COS(PHI %s)", "SIN(PHI %s)")
labels_phi = [(sin_cos[0] % getlbl(top.atom(ires[1])),
sin_cos[1] % getlbl(top.atom(ires[1]))
) for ires in self._phi_inds]
sin_cos = ("COS(PSI %s)", "SIN(PSI %s)")
labels_psi = [(sin_cos[0] % getlbl(top.atom(ires[1])),
sin_cos[1] % getlbl(top.atom(ires[1]))) for ires in self._psi_inds
]
# produce the same ordering as the given indices (phi_1, psi_1, ..., phi_n, psi_n)
# or (cos(phi_1), sin(phi_1), cos(psi_1), sin(psi_1), ..., cos(phi_n), sin(phi_n), cos(psi_n), sin(psi_n))
res = list(itertools.chain.from_iterable(
itertools.chain.from_iterable(zip(labels_phi, labels_psi))))
else:
labels_phi = [
"PHI %s" % getlbl(top.atom(ires[1])) for ires in self._phi_inds]
labels_psi = [
"PSI %s" % getlbl(top.atom(ires[1])) for ires in self._psi_inds]
res = list(itertools.chain.from_iterable(zip(labels_phi, labels_psi)))
return res
class Chi1TorsionFeature(DihedralFeature):
def __init__(self, topology, selstr=None, deg=False, cossin=False, periodic=True):
indices = indices_chi1(topology)
if not selstr:
dih_indexes = indices
else:
dih_indexes = indices[np.in1d(indices[:, 1],
topology.select(selstr),
assume_unique=True)]
super(Chi1TorsionFeature, self).__init__(topology, dih_indexes,
deg=deg, cossin=cossin,
periodic=periodic)
def describe(self):
top = self.top
getlbl = lambda at: "%i %s %i " \
% (at.residue.chain.index, at.residue.name, at.residue.resSeq)
if self.cossin:
cossin = ("COS(CHI1 %s)", "SIN(CHI1 %s)")
labels_chi1 = [s % getlbl(top.atom(ires[1]))
for ires in self.angle_indexes
for s in cossin]
else:
labels_chi1 = ["CHI1" + getlbl(top.atom(ires[1]))
for ires in self.angle_indexes]
return labels_chi1
|
gph82/PyEMMA
|
pyemma/coordinates/data/featurization/angles.py
|
Python
|
lgpl-3.0
| 8,442
|
[
"MDTraj"
] |
a0dfaf8b53f150c1591c38f75a6472574d9669c867fc407e5e470da8d234fab0
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'XtalxplorerMainWindowUI.ui'
#
# Created: Tue Feb 3 00:17:01 2015
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_XtalxplorerMainWindow(object):
def setupUi(self, XtalxplorerMainWindow):
XtalxplorerMainWindow.setObjectName(_fromUtf8("XtalxplorerMainWindow"))
XtalxplorerMainWindow.resize(1024, 740)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(XtalxplorerMainWindow.sizePolicy().hasHeightForWidth())
XtalxplorerMainWindow.setSizePolicy(sizePolicy)
XtalxplorerMainWindow.setMinimumSize(QtCore.QSize(800, 600))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/PowPySol.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
XtalxplorerMainWindow.setWindowIcon(icon)
self.container = QtGui.QWidget(XtalxplorerMainWindow)
self.container.setMinimumSize(QtCore.QSize(780, 580))
self.container.setObjectName(_fromUtf8("container"))
self.gridLayout = QtGui.QGridLayout(self.container)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.hLayout_main = QtGui.QHBoxLayout()
self.hLayout_main.setObjectName(_fromUtf8("hLayout_main"))
self.vLayout_tools = QtGui.QVBoxLayout()
self.vLayout_tools.setObjectName(_fromUtf8("vLayout_tools"))
self.toolButton_open = QtGui.QToolButton(self.container)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/Document-open.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_open.setIcon(icon1)
self.toolButton_open.setIconSize(QtCore.QSize(24, 24))
self.toolButton_open.setObjectName(_fromUtf8("toolButton_open"))
self.vLayout_tools.addWidget(self.toolButton_open)
self.toolButton_copyStructure = QtGui.QToolButton(self.container)
self.toolButton_copyStructure.setObjectName(_fromUtf8("toolButton_copyStructure"))
self.vLayout_tools.addWidget(self.toolButton_copyStructure)
self.progressBar = QtGui.QProgressBar(self.container)
self.progressBar.setProperty("value", 0)
self.progressBar.setOrientation(QtCore.Qt.Vertical)
self.progressBar.setInvertedAppearance(False)
self.progressBar.setTextDirection(QtGui.QProgressBar.BottomToTop)
self.progressBar.setObjectName(_fromUtf8("progressBar"))
self.vLayout_tools.addWidget(self.progressBar)
self.hLayout_main.addLayout(self.vLayout_tools)
self.vLayout_main = QtGui.QVBoxLayout()
self.vLayout_main.setObjectName(_fromUtf8("vLayout_main"))
self.hLayout_top = QtGui.QHBoxLayout()
self.hLayout_top.setObjectName(_fromUtf8("hLayout_top"))
self.tabWidget = QtGui.QTabWidget(self.container)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setMinimumSize(QtCore.QSize(0, 0))
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tabParams = QtGui.QWidget()
self.tabParams.setObjectName(_fromUtf8("tabParams"))
self.gridLayout_2 = QtGui.QGridLayout(self.tabParams)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.gridLayout_params = QtGui.QGridLayout()
self.gridLayout_params.setObjectName(_fromUtf8("gridLayout_params"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.lstWidget_atoms = QtGui.QListWidget(self.tabParams)
self.lstWidget_atoms.setMinimumSize(QtCore.QSize(30, 100))
self.lstWidget_atoms.setMaximumSize(QtCore.QSize(90, 16777215))
self.lstWidget_atoms.setAlternatingRowColors(True)
self.lstWidget_atoms.setObjectName(_fromUtf8("lstWidget_atoms"))
self.horizontalLayout.addWidget(self.lstWidget_atoms)
self.dss_x = DoubleSpinSlider(self.tabParams)
self.dss_x.setEnabled(False)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dss_x.sizePolicy().hasHeightForWidth())
self.dss_x.setSizePolicy(sizePolicy)
self.dss_x.setMinimumSize(QtCore.QSize(30, 100))
self.dss_x.setObjectName(_fromUtf8("dss_x"))
self.horizontalLayout.addWidget(self.dss_x)
self.dss_y = DoubleSpinSlider(self.tabParams)
self.dss_y.setEnabled(False)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dss_y.sizePolicy().hasHeightForWidth())
self.dss_y.setSizePolicy(sizePolicy)
self.dss_y.setMinimumSize(QtCore.QSize(30, 100))
self.dss_y.setObjectName(_fromUtf8("dss_y"))
self.horizontalLayout.addWidget(self.dss_y)
self.dss_z = DoubleSpinSlider(self.tabParams)
self.dss_z.setEnabled(False)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dss_z.sizePolicy().hasHeightForWidth())
self.dss_z.setSizePolicy(sizePolicy)
self.dss_z.setMinimumSize(QtCore.QSize(30, 100))
self.dss_z.setObjectName(_fromUtf8("dss_z"))
self.horizontalLayout.addWidget(self.dss_z)
self.dss_uiso = DoubleSpinSlider(self.tabParams)
self.dss_uiso.setEnabled(False)
self.dss_uiso.setMinimumSize(QtCore.QSize(30, 100))
self.dss_uiso.setObjectName(_fromUtf8("dss_uiso"))
self.horizontalLayout.addWidget(self.dss_uiso)
self.vLayout_paramButtons = QtGui.QVBoxLayout()
self.vLayout_paramButtons.setObjectName(_fromUtf8("vLayout_paramButtons"))
self.groupBox_structureSelect = QtGui.QGroupBox(self.tabParams)
self.groupBox_structureSelect.setMinimumSize(QtCore.QSize(0, 70))
self.groupBox_structureSelect.setObjectName(_fromUtf8("groupBox_structureSelect"))
self.verticalLayoutWidget = QtGui.QWidget(self.groupBox_structureSelect)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 20, 119, 51))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.vLayout_structureSelect = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.vLayout_structureSelect.setMargin(0)
self.vLayout_structureSelect.setObjectName(_fromUtf8("vLayout_structureSelect"))
self.radioButton_structure1 = QtGui.QRadioButton(self.verticalLayoutWidget)
self.radioButton_structure1.setChecked(True)
self.radioButton_structure1.setObjectName(_fromUtf8("radioButton_structure1"))
self.vLayout_structureSelect.addWidget(self.radioButton_structure1)
self.radioButton_structure2 = QtGui.QRadioButton(self.verticalLayoutWidget)
self.radioButton_structure2.setEnabled(False)
self.radioButton_structure2.setObjectName(_fromUtf8("radioButton_structure2"))
self.vLayout_structureSelect.addWidget(self.radioButton_structure2)
self.vLayout_paramButtons.addWidget(self.groupBox_structureSelect)
self.hLayout_Cbuttons = QtGui.QHBoxLayout()
self.hLayout_Cbuttons.setObjectName(_fromUtf8("hLayout_Cbuttons"))
self.toolButton_updateParams = QtGui.QToolButton(self.tabParams)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/Media-playback-start.svg")), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.toolButton_updateParams.setIcon(icon2)
self.toolButton_updateParams.setObjectName(_fromUtf8("toolButton_updateParams"))
self.hLayout_Cbuttons.addWidget(self.toolButton_updateParams)
self.toolButton_randomStructure = QtGui.QToolButton(self.tabParams)
self.toolButton_randomStructure.setIcon(icon)
self.toolButton_randomStructure.setObjectName(_fromUtf8("toolButton_randomStructure"))
self.hLayout_Cbuttons.addWidget(self.toolButton_randomStructure)
self.vLayout_paramButtons.addLayout(self.hLayout_Cbuttons)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.vLayout_paramButtons.addItem(spacerItem)
self.checkBox_autoupdateStructure = QtGui.QCheckBox(self.tabParams)
self.checkBox_autoupdateStructure.setObjectName(_fromUtf8("checkBox_autoupdateStructure"))
self.vLayout_paramButtons.addWidget(self.checkBox_autoupdateStructure)
self.checkBox_showLabels = QtGui.QCheckBox(self.tabParams)
self.checkBox_showLabels.setObjectName(_fromUtf8("checkBox_showLabels"))
self.vLayout_paramButtons.addWidget(self.checkBox_showLabels)
self.checkBox_suffix = QtGui.QCheckBox(self.tabParams)
self.checkBox_suffix.setObjectName(_fromUtf8("checkBox_suffix"))
self.vLayout_paramButtons.addWidget(self.checkBox_suffix)
self.horizontalLayout.addLayout(self.vLayout_paramButtons)
self.horizontalLayout.setStretch(0, 1)
self.horizontalLayout.setStretch(1, 2)
self.horizontalLayout.setStretch(2, 2)
self.horizontalLayout.setStretch(3, 2)
self.horizontalLayout.setStretch(4, 2)
self.horizontalLayout.setStretch(5, 2)
self.gridLayout_params.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout_params, 0, 0, 1, 1)
self.tabWidget.addTab(self.tabParams, _fromUtf8(""))
self.tab_xtalData = QtGui.QWidget()
self.tab_xtalData.setObjectName(_fromUtf8("tab_xtalData"))
self.gridLayout_3 = QtGui.QGridLayout(self.tab_xtalData)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.gridLayout_xtalData = QtGui.QGridLayout()
self.gridLayout_xtalData.setObjectName(_fromUtf8("gridLayout_xtalData"))
self.treeView_xtalData = QtGui.QTreeView(self.tab_xtalData)
self.treeView_xtalData.setObjectName(_fromUtf8("treeView_xtalData"))
self.gridLayout_xtalData.addWidget(self.treeView_xtalData, 0, 0, 1, 1)
self.gridLayout_3.addLayout(self.gridLayout_xtalData, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_xtalData, _fromUtf8(""))
self.tab_log = QtGui.QWidget()
self.tab_log.setObjectName(_fromUtf8("tab_log"))
self.gridLayout_4 = QtGui.QGridLayout(self.tab_log)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.gridLayout_log = QtGui.QGridLayout()
self.gridLayout_log.setObjectName(_fromUtf8("gridLayout_log"))
self.textEdit_log = QtGui.QTextEdit(self.tab_log)
self.textEdit_log.setObjectName(_fromUtf8("textEdit_log"))
self.gridLayout_log.addWidget(self.textEdit_log, 0, 0, 1, 1)
self.gridLayout_4.addLayout(self.gridLayout_log, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_log, _fromUtf8(""))
self.hLayout_top.addWidget(self.tabWidget)
self.QMayavi_structure = MayaviQStructureWidget(self.container)
self.QMayavi_structure.setObjectName(_fromUtf8("QMayavi_structure"))
self.hLayout_top.addWidget(self.QMayavi_structure)
self.hLayout_top.setStretch(0, 1)
self.hLayout_top.setStretch(1, 1)
self.vLayout_main.addLayout(self.hLayout_top)
self.line = QtGui.QFrame(self.container)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.vLayout_main.addWidget(self.line)
self.hLayout_bottom = QtGui.QHBoxLayout()
self.hLayout_bottom.setObjectName(_fromUtf8("hLayout_bottom"))
self.vLayout_mainvis = QtGui.QVBoxLayout()
self.vLayout_mainvis.setObjectName(_fromUtf8("vLayout_mainvis"))
self.hLayout_xyselector = QtGui.QHBoxLayout()
self.hLayout_xyselector.setObjectName(_fromUtf8("hLayout_xyselector"))
self.label_xaxis = QtGui.QLabel(self.container)
self.label_xaxis.setObjectName(_fromUtf8("label_xaxis"))
self.hLayout_xyselector.addWidget(self.label_xaxis)
self.comboBox_x = QtGui.QComboBox(self.container)
self.comboBox_x.setObjectName(_fromUtf8("comboBox_x"))
self.hLayout_xyselector.addWidget(self.comboBox_x)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.hLayout_xyselector.addItem(spacerItem1)
self.checkBox_fine = QtGui.QCheckBox(self.container)
self.checkBox_fine.setObjectName(_fromUtf8("checkBox_fine"))
self.hLayout_xyselector.addWidget(self.checkBox_fine)
self.checkBox_autoupdateRPlots = QtGui.QCheckBox(self.container)
self.checkBox_autoupdateRPlots.setObjectName(_fromUtf8("checkBox_autoupdateRPlots"))
self.hLayout_xyselector.addWidget(self.checkBox_autoupdateRPlots)
self.toolButton_updateRPlots = QtGui.QToolButton(self.container)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/Media-playback-start.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_updateRPlots.setIcon(icon3)
self.toolButton_updateRPlots.setObjectName(_fromUtf8("toolButton_updateRPlots"))
self.hLayout_xyselector.addWidget(self.toolButton_updateRPlots)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.hLayout_xyselector.addItem(spacerItem2)
self.label_yaxis = QtGui.QLabel(self.container)
self.label_yaxis.setObjectName(_fromUtf8("label_yaxis"))
self.hLayout_xyselector.addWidget(self.label_yaxis)
self.comboBox_y = QtGui.QComboBox(self.container)
self.comboBox_y.setObjectName(_fromUtf8("comboBox_y"))
self.hLayout_xyselector.addWidget(self.comboBox_y)
self.vLayout_mainvis.addLayout(self.hLayout_xyselector)
self.hLayout_topView = QtGui.QHBoxLayout()
self.hLayout_topView.setObjectName(_fromUtf8("hLayout_topView"))
self.dss_dmin = DoubleSpinSlider(self.container)
self.dss_dmin.setObjectName(_fromUtf8("dss_dmin"))
self.hLayout_topView.addWidget(self.dss_dmin)
self.QMayavi_top = MayaviQRPlotWidget(self.container)
self.QMayavi_top.setMinimumSize(QtCore.QSize(0, 20))
self.QMayavi_top.setObjectName(_fromUtf8("QMayavi_top"))
self.hLayout_topView.addWidget(self.QMayavi_top)
self.hLayout_topView.setStretch(1, 1)
self.vLayout_mainvis.addLayout(self.hLayout_topView)
self.vLayout_mainvis.setStretch(1, 1)
self.hLayout_bottom.addLayout(self.vLayout_mainvis)
self.line_2 = QtGui.QFrame(self.container)
self.line_2.setFrameShape(QtGui.QFrame.VLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.hLayout_bottom.addWidget(self.line_2)
self.QMayavi_3D = MayaviQRPlotWidget(self.container)
self.QMayavi_3D.setObjectName(_fromUtf8("QMayavi_3D"))
self.hLayout_bottom.addWidget(self.QMayavi_3D)
self.hLayout_bottom.setStretch(0, 1)
self.hLayout_bottom.setStretch(2, 1)
self.vLayout_main.addLayout(self.hLayout_bottom)
self.vLayout_main.setStretch(0, 1)
self.vLayout_main.setStretch(2, 1)
self.hLayout_main.addLayout(self.vLayout_main)
self.gridLayout.addLayout(self.hLayout_main, 0, 0, 1, 1)
XtalxplorerMainWindow.setCentralWidget(self.container)
self.menubar = QtGui.QMenuBar(XtalxplorerMainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1024, 20))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menu_file = QtGui.QMenu(self.menubar)
self.menu_file.setObjectName(_fromUtf8("menu_file"))
self.menu_help = QtGui.QMenu(self.menubar)
self.menu_help.setObjectName(_fromUtf8("menu_help"))
self.menu_Job = QtGui.QMenu(self.menubar)
self.menu_Job.setObjectName(_fromUtf8("menu_Job"))
XtalxplorerMainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(XtalxplorerMainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
XtalxplorerMainWindow.setStatusBar(self.statusbar)
self.action_about = QtGui.QAction(XtalxplorerMainWindow)
self.action_about.setObjectName(_fromUtf8("action_about"))
self.action_open = QtGui.QAction(XtalxplorerMainWindow)
self.action_open.setIcon(icon1)
self.action_open.setPriority(QtGui.QAction.HighPriority)
self.action_open.setObjectName(_fromUtf8("action_open"))
self.action_quit = QtGui.QAction(XtalxplorerMainWindow)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/System-log-out-2.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_quit.setIcon(icon4)
self.action_quit.setObjectName(_fromUtf8("action_quit"))
self.action_run = QtGui.QAction(XtalxplorerMainWindow)
self.action_run.setIcon(icon3)
self.action_run.setObjectName(_fromUtf8("action_run"))
self.action_pause = QtGui.QAction(XtalxplorerMainWindow)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/Media-playback-pause.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_pause.setIcon(icon5)
self.action_pause.setObjectName(_fromUtf8("action_pause"))
self.action_stop = QtGui.QAction(XtalxplorerMainWindow)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/Media-playback-stop.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.action_stop.setIcon(icon6)
self.action_stop.setObjectName(_fromUtf8("action_stop"))
self.menu_file.addAction(self.action_open)
self.menu_file.addSeparator()
self.menu_file.addAction(self.action_quit)
self.menu_help.addAction(self.action_about)
self.menu_Job.addAction(self.action_run)
self.menu_Job.addAction(self.action_pause)
self.menu_Job.addAction(self.action_stop)
self.menubar.addAction(self.menu_file.menuAction())
self.menubar.addAction(self.menu_Job.menuAction())
self.menubar.addAction(self.menu_help.menuAction())
self.retranslateUi(XtalxplorerMainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.action_quit, QtCore.SIGNAL(_fromUtf8("activated()")), XtalxplorerMainWindow.close)
QtCore.QObject.connect(self.action_about, QtCore.SIGNAL(_fromUtf8("activated()")), XtalxplorerMainWindow.about)
QtCore.QObject.connect(self.action_open, QtCore.SIGNAL(_fromUtf8("activated()")), XtalxplorerMainWindow.browse_structure)
QtCore.QObject.connect(self.toolButton_open, QtCore.SIGNAL(_fromUtf8("clicked()")), XtalxplorerMainWindow.browse_structure)
QtCore.QObject.connect(self.lstWidget_atoms, QtCore.SIGNAL(_fromUtf8("currentRowChanged(int)")), XtalxplorerMainWindow.load_atom_params)
QtCore.QObject.connect(self.toolButton_updateParams, QtCore.SIGNAL(_fromUtf8("clicked()")), XtalxplorerMainWindow.update_params)
QtCore.QObject.connect(self.checkBox_autoupdateStructure, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), XtalxplorerMainWindow.handle_autoupdate_structure)
QtCore.QObject.connect(self.checkBox_showLabels, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), XtalxplorerMainWindow.invalidate_cached_params)
QtCore.QObject.connect(self.checkBox_suffix, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), XtalxplorerMainWindow.invalidate_cached_params)
QtCore.QObject.connect(self.checkBox_autoupdateRPlots, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), XtalxplorerMainWindow.handle_autoupdate_r_plots)
QtCore.QObject.connect(self.radioButton_structure1, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), XtalxplorerMainWindow._load_structure)
QtCore.QObject.connect(self.toolButton_randomStructure, QtCore.SIGNAL(_fromUtf8("clicked()")), XtalxplorerMainWindow.randomise_structure)
QtCore.QObject.connect(self.toolButton_copyStructure, QtCore.SIGNAL(_fromUtf8("clicked()")), XtalxplorerMainWindow._copy_structure)
QtCore.QObject.connect(self.toolButton_updateRPlots, QtCore.SIGNAL(_fromUtf8("clicked()")), XtalxplorerMainWindow.update_rplots)
QtCore.QObject.connect(self.checkBox_fine, QtCore.SIGNAL(_fromUtf8("clicked()")), XtalxplorerMainWindow.update_rplots)
QtCore.QMetaObject.connectSlotsByName(XtalxplorerMainWindow)
def retranslateUi(self, XtalxplorerMainWindow):
XtalxplorerMainWindow.setWindowTitle(_translate("XtalxplorerMainWindow", "MainWindow", None))
self.toolButton_open.setText(_translate("XtalxplorerMainWindow", "open file", None))
self.toolButton_copyStructure.setText(_translate("XtalxplorerMainWindow", "Copy", None))
self.groupBox_structureSelect.setTitle(_translate("XtalxplorerMainWindow", "Structure select:", None))
self.radioButton_structure1.setText(_translate("XtalxplorerMainWindow", "trial structure", None))
self.radioButton_structure2.setText(_translate("XtalxplorerMainWindow", "target structure", None))
self.toolButton_updateParams.setText(_translate("XtalxplorerMainWindow", "...", None))
self.toolButton_randomStructure.setToolTip(_translate("XtalxplorerMainWindow", "randomise structure", None))
self.toolButton_randomStructure.setText(_translate("XtalxplorerMainWindow", "...", None))
self.checkBox_autoupdateStructure.setText(_translate("XtalxplorerMainWindow", "Autoupdate -->", None))
self.checkBox_showLabels.setText(_translate("XtalxplorerMainWindow", "Atom labels", None))
self.checkBox_suffix.setToolTip(_translate("XtalxplorerMainWindow", "Add suffixes to symmetry generated atom labels", None))
self.checkBox_suffix.setText(_translate("XtalxplorerMainWindow", "suffix symm.eq.", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabParams), _translate("XtalxplorerMainWindow", "Coordinates", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_xtalData), _translate("XtalxplorerMainWindow", "Crystal data", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_log), _translate("XtalxplorerMainWindow", "Log", None))
self.label_xaxis.setText(_translate("XtalxplorerMainWindow", "x-axis:", None))
self.checkBox_fine.setText(_translate("XtalxplorerMainWindow", "fine", None))
self.checkBox_autoupdateRPlots.setText(_translate("XtalxplorerMainWindow", "Autoupdate", None))
self.toolButton_updateRPlots.setText(_translate("XtalxplorerMainWindow", "...", None))
self.label_yaxis.setText(_translate("XtalxplorerMainWindow", "y-axis:", None))
self.menu_file.setTitle(_translate("XtalxplorerMainWindow", "&File", None))
self.menu_help.setTitle(_translate("XtalxplorerMainWindow", "&Help", None))
self.menu_Job.setTitle(_translate("XtalxplorerMainWindow", "&Job", None))
self.action_about.setText(_translate("XtalxplorerMainWindow", "&About", None))
self.action_about.setStatusTip(_translate("XtalxplorerMainWindow", "About this programme", None))
self.action_about.setShortcut(_translate("XtalxplorerMainWindow", "Ctrl+A", None))
self.action_open.setText(_translate("XtalxplorerMainWindow", "&Open", None))
self.action_open.setShortcut(_translate("XtalxplorerMainWindow", "Ctrl+O", None))
self.action_quit.setText(_translate("XtalxplorerMainWindow", "&Quit", None))
self.action_quit.setStatusTip(_translate("XtalxplorerMainWindow", "Exit this programme", None))
self.action_quit.setShortcut(_translate("XtalxplorerMainWindow", "Ctrl+Q", None))
self.action_run.setText(_translate("XtalxplorerMainWindow", "&Run", None))
self.action_run.setShortcut(_translate("XtalxplorerMainWindow", "Ctrl+R", None))
self.action_pause.setText(_translate("XtalxplorerMainWindow", "&Pause", None))
self.action_pause.setShortcut(_translate("XtalxplorerMainWindow", "Ctrl+P", None))
self.action_stop.setText(_translate("XtalxplorerMainWindow", "&Stop", None))
self.action_stop.setShortcut(_translate("XtalxplorerMainWindow", "Ctrl+S", None))
from gui.doublespinslider import DoubleSpinSlider
from gui.mayaviqwidget import MayaviQStructureWidget, MayaviQRPlotWidget
import gui_rc
|
jamasi/Xtal-xplore-R
|
gui/XtalxplorerMainWindowUI.py
|
Python
|
agpl-3.0
| 25,560
|
[
"CRYSTAL"
] |
832ae67ec99f037e6d075d3019b99dff9b8ef1547e002227528942d42be6d29c
|
# coding=utf-8
import argparse
import json
from selenium import webdriver, common
from common.config import Config
from common.proxy import Proxy
class Anime():
RANDOM_PROXY = -1
NO_PROXY = -2
def __init__(self):
pass
def fetch_anime(self, url, proxy_idx=NO_PROXY):
exec_path = Config().get_property("path", "phantomjs_exec_path")
print("Initializing")
dcap = dict(webdriver.DesiredCapabilities.PHANTOMJS)
# Set header of request
dcap["phantomjs.page.settings.userAgent"] = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/57.0.2987.133 "
"Safari/537.36"
)
# Not to load images
dcap["phantomjs.page.settings.loadImages"] = False
if proxy_idx != self.NO_PROXY:
# Set proxy which is randomly chosen from proxy list
proxy = Proxy()
proxy.load()
service_args = ['--proxy=' + proxy.get_proxy(proxy_idx), '--proxy-type=http']
anime_driver = webdriver.PhantomJS(executable_path=exec_path, desired_capabilities=dcap, service_args=service_args)
else:
anime_driver = webdriver.PhantomJS(executable_path=exec_path, desired_capabilities=dcap)
print("Load anime page")
anime_driver.set_page_load_timeout(Config().get_property("time", "page_load_timeout"))
anime_driver.get(url)
print("Resolve information")
try:
anime_driver.find_element_by_class_name("main-container-wrapper")
except common.exceptions.NoSuchElementException as err:
print("Cannot resolve information. It is usually caused by overseas visit.")
return {}
bg = anime_driver.find_element_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-preview"]/img').get_property("src")
title = anime_driver.find_element_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="b-head"]/'
'h1[@class="info-title"]').text
tag_ele = anime_driver.find_elements_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="b-head"]/'
'a/span[@class="info-style-item"]')
tags = []
for tag in tag_ele:
tags.append(tag.text)
play = anime_driver.find_element_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="info-count"]/'
'span[contains(@class, "info-count-item-play")]/em').text
favorite = anime_driver.find_element_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="info-count"]/'
'span[contains(@class, "info-count-item-fans")]/em').text
danmaku = anime_driver.find_element_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="info-count"]/'
'span[contains(@class, "info-count-item-review")]/em').text
update_date_ele = anime_driver.find_elements_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="info-row info-update"]/'
'em/span')
update_dates = ""
for date in update_date_ele:
update_dates += ", "+date.text
# There is a Chinese caesura sign before cv's name.
# Cv stands for character voice whose Japanese name is Seiyuu(声優).
js = 'document.getElementsByClassName("info-cv")[0].style.overflow = "visible"'
anime_driver.execute_script(js)
cv_ele = anime_driver.find_elements_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="info-row info-cv"]/'
'em/span[@class="info-cv-item"]')
changed_idx = []
for i in range(len(cv_ele)):
if cv_ele[i].is_displayed():
pass
else:
changed_idx.append(str(i))
js = 'document.getElementsByClassName("info-cv-item")['+str(i)+'].style.display = "block"'
anime_driver.execute_script(js)
i -= 1
cvs = []
for i in range(len(cv_ele)):
# print(cv_ele[i].text, cv_ele[i].is_displayed())
if i == 0:
cvs.append(cv_ele[i].text)
else:
cvs.append(cv_ele[i].text[1:])
for idx in changed_idx:
js = 'document.getElementsByClassName("info-cv-item")[' + idx + '].style.display = "inline"'
anime_driver.execute_script(js)
desc = anime_driver.find_element_by_xpath('//div[@class="main-inner"]/div[@class="info-content"]/'
'div[@class="bangumi-info-r"]/div[@class="info-row info-desc-wrp"]/'
'div[@class="info-desc"]').text
episode_ele = anime_driver.find_elements_by_xpath('//a[@class="v1-complete-text"]')
episodes = []
for episode_link in episode_ele:
item = {}
item["link"] = episode_link.get_attribute("href")
item["title"] = episode_link.get_attribute("title")
img_ele = episode_link.find_element_by_tag_name("img")
item["image"] = img_ele.get_attribute("data-img")
if item["image"] == "":
item["image"] = img_ele.get_attribute("src")
episodes.append(item)
sponsor = anime_driver.find_element_by_xpath('//div[contains(@class, "sponsor-tosponsor")]'
'/span').text
similar_ele = anime_driver.find_elements_by_xpath('//li[@class="similar-list-child"]/a/'
'div[@class="similar-name"]/'
'div[@class="similar-name-l"]')
similar = []
for s in similar_ele:
if s.is_displayed():
similar.append(s.text)
js = 'document.getElementsByClassName("v1-bangumi-list-season-wrapper")[0].style.display="block"'
anime_driver.execute_script(js)
anime_driver.save_screenshot("page.png")
season_ele = anime_driver.find_element_by_class_name('v1-bangumi-list-season').find_elements_by_tag_name("li")
seasons = []
for season in season_ele:
item = {"name": season.text,
"cur": True if season.get_attribute("class") == "cur" else False,
"link": "bangumi.bilibili.com/anime/"+season.get_attribute("data-season-id")}
seasons.append(item)
anime_driver.close()
json_obj = {"bg": bg, "title": title, "tags": tags, "play": play, "favorite": favorite,
"danmaku": danmaku, "update_date": update_dates, "cvs": cvs, "desc": desc,
"episodes": episodes, "sponsor": sponsor, "similar": similar, "seasons": seasons}
return json_obj
def display(self, obj):
print("Background image URL:", obj["bg"])
print("Anime title:", obj["title"])
s = ""
for i in range(len(obj["tags"])):
if i == 0:
s += obj["tags"][i]
else:
s += ", " + obj["tags"][i]
print("Anime tags:", s)
print("Play times:", obj["play"])
print("Favorite:", obj["favorite"])
print("Danmaku count:", obj["danmaku"])
print("Update date:", obj["update_date"])
s = ""
for i in range(len(obj["cvs"])):
if i == 0:
s += obj["cvs"][i]
else:
s += ", " + obj["cvs"][i]
print("CVs:", s)
print("Description:", obj["desc"])
print("Seasons:")
for season in obj["seasons"]:
print("Name: %s%s, Link: %s" % (season["name"], "(current)" if season["cur"] else "", season["link"]))
print("Episodes:")
for episode in obj["episodes"]:
print("Title: %s, Link: %s, Image URL: %s" % (episode["title"], episode["link"], episode["image"]))
print("Sponsors:", obj["sponsor"])
s = ""
for i in range(len(obj["similar"])):
if i == 0:
s += obj["similar"][i]
else:
s += ", " + obj["similar"][i]
print("Similar animes:", s)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Obtain information of anime using its URL."\
"For other operation, please use corresponding module.")
group1 = parser.add_mutually_exclusive_group()
group1.add_argument("-j", "--json", action="store_true",
help="Display result in json format.")
group1.add_argument("-l", "--list", action="store_true",
help="Display result in list format.")
group2 = parser.add_mutually_exclusive_group()
group2.add_argument("-rp", "--randproxy", action="store_true",
help="Using random proxy server.")
group2.add_argument("-p", "--proxy", action="store", help="Using proxy server with specific index",
metavar="INDEX", type=int)
parser.add_argument("URL")
args = parser.parse_args()
print(args)
anime = Anime()
if args.proxy is not None:
obj = anime.fetch_anime(args.URL, args.proxy)
elif args.randproxy:
obj = anime.fetch_anime(args.URL, Anime.RANDOM_PROXY)
else:
obj = anime.fetch_anime(args.URL)
if args.json:
print(json.dumps(obj, ensure_ascii=False))
else:
anime.display(obj)
|
lchloride/bilibili_bangumi
|
anime/parser.py
|
Python
|
apache-2.0
| 10,500
|
[
"VisIt"
] |
1fe914e75b9a52baa6d57c2cd2f35f4362f4b65b605229454933c3775cc05f05
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods to create and manage Courses."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import datetime
import os
import urllib
from common import jinja_filters
from common import safe_dom
from controllers import sites
from controllers.utils import ApplicationHandler
from controllers.utils import HUMAN_READABLE_TIME_FORMAT
from controllers.utils import ReflectiveRequestHandler
import jinja2
import jinja2.exceptions
from models import config
from models import courses
from models import custom_modules
from models import jobs
from models import roles
from models import transforms
from models import utils
from models import vfs
from models.models import Student
from course_settings import CourseSettingsHandler
from course_settings import CourseSettingsRESTHandler
import filer
from filer import AssetItemRESTHandler
from filer import AssetUriRESTHandler
from filer import FileManagerAndEditor
from filer import FilesItemRESTHandler
import messages
from peer_review import AssignmentManager
import unit_lesson_editor
from unit_lesson_editor import AssessmentRESTHandler
from unit_lesson_editor import ImportCourseRESTHandler
from unit_lesson_editor import LessonRESTHandler
from unit_lesson_editor import LinkRESTHandler
from unit_lesson_editor import UnitLessonEditor
from unit_lesson_editor import UnitLessonTitleRESTHandler
from unit_lesson_editor import UnitRESTHandler
from google.appengine.api import users
class DashboardHandler(
CourseSettingsHandler, FileManagerAndEditor, UnitLessonEditor,
AssignmentManager, ApplicationHandler, ReflectiveRequestHandler):
"""Handles all pages and actions required for managing a course."""
default_action = 'outline'
get_actions = [
default_action, 'assets', 'settings', 'analytics',
'edit_basic_settings', 'edit_settings', 'edit_unit_lesson',
'edit_unit', 'edit_link', 'edit_lesson', 'edit_assessment',
'add_asset', 'delete_asset', 'import_course', 'edit_assignment']
# Requests to these handlers automatically go through an XSRF token check
# that is implemented in ReflectiveRequestHandler.
post_actions = [
'compute_student_stats', 'create_or_edit_settings', 'add_unit',
'add_link', 'add_assessment', 'add_lesson',
'edit_basic_course_settings', 'add_reviewer', 'delete_reviewer']
@classmethod
def get_child_routes(cls):
"""Add child handlers for REST."""
return [
(AssessmentRESTHandler.URI, AssessmentRESTHandler),
(AssetItemRESTHandler.URI, AssetItemRESTHandler),
(CourseSettingsRESTHandler.URI, CourseSettingsRESTHandler),
(FilesItemRESTHandler.URI, FilesItemRESTHandler),
(AssetItemRESTHandler.URI, AssetItemRESTHandler),
(AssetUriRESTHandler.URI, AssetUriRESTHandler),
(ImportCourseRESTHandler.URI, ImportCourseRESTHandler),
(LessonRESTHandler.URI, LessonRESTHandler),
(LinkRESTHandler.URI, LinkRESTHandler),
(UnitLessonTitleRESTHandler.URI, UnitLessonTitleRESTHandler),
(UnitRESTHandler.URI, UnitRESTHandler),
]
def can_view(self):
"""Checks if current user has viewing rights."""
return roles.Roles.is_course_admin(self.app_context)
def can_edit(self):
"""Checks if current user has editing rights."""
return roles.Roles.is_course_admin(self.app_context)
def get(self):
"""Enforces rights to all GET operations."""
if not self.can_view():
self.redirect(self.app_context.get_slug())
return
# Force reload of properties. It is expensive, but admin deserves it!
config.Registry.get_overrides(force_update=True)
return super(DashboardHandler, self).get()
def post(self):
"""Enforces rights to all POST operations."""
if not self.can_edit():
self.redirect(self.app_context.get_slug())
return
return super(DashboardHandler, self).post()
def get_template(self, template_name, dirs):
"""Sets up an environment and Gets jinja template."""
jinja_environment = jinja2.Environment(
autoescape=True, finalize=jinja_filters.finalize,
loader=jinja2.FileSystemLoader(dirs + [os.path.dirname(__file__)]))
jinja_environment.filters['js_string'] = jinja_filters.js_string
return jinja_environment.get_template(template_name)
def _get_alerts(self):
alerts = []
if not courses.is_editable_fs(self.app_context):
alerts.append('Read-only course.')
if not self.app_context.now_available:
alerts.append('The course is not publicly available.')
return '\n'.join(alerts)
def _get_top_nav(self):
current_action = self.request.get('action')
nav_mappings = [
('', 'Outline'),
('assets', 'Assets'),
('settings', 'Settings'),
('analytics', 'Analytics'),
('edit_assignment', 'Peer Review')]
nav = safe_dom.NodeList()
for action, title in nav_mappings:
class_name = 'selected' if action == current_action else ''
action_href = 'dashboard?action=%s' % action
nav.append(safe_dom.Element(
'a', href=action_href, className=class_name).add_text(
title))
if roles.Roles.is_super_admin():
nav.append(safe_dom.Element(
'a', href='/admin').add_text('Admin'))
nav.append(safe_dom.Element(
'a', href='/studentList').add_text('Student List'))
nav.append(safe_dom.Element(
'a', href='https://code.google.com/p/course-builder/wiki/Dashboard',
target='_blank').add_text('Help'))
return nav
def render_page(self, template_values):
"""Renders a page using provided template values."""
template_values['top_nav'] = self._get_top_nav()
template_values['gcb_course_base'] = self.get_base_href(self)
template_values['user_nav'] = safe_dom.NodeList().append(
safe_dom.Text('%s | ' % users.get_current_user().email())
).append(
safe_dom.Element(
'a', href=users.create_logout_url(self.request.uri)
).add_text('Logout'))
template_values[
'page_footer'] = 'Created on: %s' % datetime.datetime.now()
if not template_values.get('sections'):
template_values['sections'] = []
self.response.write(
self.get_template('view.html', []).render(template_values))
def format_title(self, text):
"""Formats standard title."""
title = self.app_context.get_environ()['course']['title']
return safe_dom.NodeList().append(
safe_dom.Text('Course Builder ')
).append(
safe_dom.Entity('>')
).append(
safe_dom.Text(' %s ' % title)
).append(
safe_dom.Entity('>')
).append(
safe_dom.Text(' Dashboard ')
).append(
safe_dom.Entity('>')
).append(
safe_dom.Text(' %s' % text)
)
def _get_edit_link(self, url):
return safe_dom.NodeList().append(
safe_dom.Text(' ')
).append(
safe_dom.Element('a', href=url).add_text('Edit')
)
def _get_availability(self, resource):
if not hasattr(resource, 'now_available'):
return safe_dom.Text('')
if resource.now_available:
return safe_dom.Text('')
else:
return safe_dom.NodeList().append(
safe_dom.Text(' ')
).append(
safe_dom.Element(
'span', className='draft-label'
).add_text('(%s)' % unit_lesson_editor.DRAFT_TEXT)
)
def render_course_outline_to_html(self):
"""Renders course outline to HTML."""
course = courses.Course(self)
if not course.get_units():
return []
is_editable = filer.is_editable_fs(self.app_context)
lines = safe_dom.Element('ul', style='list-style: none;')
for unit in course.get_units():
if unit.type == 'A':
li = safe_dom.Element('li').add_child(
safe_dom.Element(
'a', href='assessment?name=%s' % unit.unit_id,
className='strong'
).add_text(unit.title)
).add_child(self._get_availability(unit))
if is_editable:
url = self.canonicalize_url(
'/dashboard?%s') % urllib.urlencode({
'action': 'edit_assessment',
'key': unit.unit_id})
li.add_child(self._get_edit_link(url))
lines.add_child(li)
continue
if unit.type == 'O':
li = safe_dom.Element('li').add_child(
safe_dom.Element(
'a', href=unit.href, className='strong'
).add_text(unit.title)
).add_child(self._get_availability(unit))
if is_editable:
url = self.canonicalize_url(
'/dashboard?%s') % urllib.urlencode({
'action': 'edit_link',
'key': unit.unit_id})
li.add_child(self._get_edit_link(url))
lines.add_child(li)
continue
if unit.type == 'U':
li = safe_dom.Element('li').add_child(
safe_dom.Element(
'a', href='unit?unit=%s' % unit.unit_id,
className='strong').add_text(
'Unit %s - %s' % (unit.index, unit.title))
).add_child(self._get_availability(unit))
if is_editable:
url = self.canonicalize_url(
'/dashboard?%s') % urllib.urlencode({
'action': 'edit_unit',
'key': unit.unit_id})
li.add_child(self._get_edit_link(url))
ol = safe_dom.Element('ol')
for lesson in course.get_lessons(unit.unit_id):
li2 = safe_dom.Element('li').add_child(
safe_dom.Element(
'a',
href='unit?unit=%s&lesson=%s' % (
unit.unit_id, lesson.lesson_id),
).add_text(lesson.title)
).add_child(self._get_availability(lesson))
if is_editable:
url = self.get_action_url(
'edit_lesson', key=lesson.lesson_id)
li2.add_child(self._get_edit_link(url))
ol.add_child(li2)
li.add_child(ol)
lines.add_child(li)
continue
raise Exception('Unknown unit type: %s.' % unit.type)
return lines
def get_outline(self):
"""Renders course outline view."""
pages_info = [
safe_dom.Element(
'a', href=self.canonicalize_url('/announcements')
).add_text('Announcements'),
safe_dom.Element(
'a', href=self.canonicalize_url('/course')
).add_text('Course')]
outline_actions = []
if filer.is_editable_fs(self.app_context):
outline_actions.append({
'id': 'edit_unit_lesson',
'caption': 'Organize',
'href': self.get_action_url('edit_unit_lesson')})
outline_actions.append({
'id': 'add_lesson',
'caption': 'Add Lesson',
'action': self.get_action_url('add_lesson'),
'xsrf_token': self.create_xsrf_token('add_lesson')})
outline_actions.append({
'id': 'add_unit',
'caption': 'Add Unit',
'action': self.get_action_url('add_unit'),
'xsrf_token': self.create_xsrf_token('add_unit')})
outline_actions.append({
'id': 'add_link',
'caption': 'Add Link',
'action': self.get_action_url('add_link'),
'xsrf_token': self.create_xsrf_token('add_link')})
outline_actions.append({
'id': 'add_assessment',
'caption': 'Add Assessment',
'action': self.get_action_url('add_assessment'),
'xsrf_token': self.create_xsrf_token('add_assessment')})
if not courses.Course(self).get_units():
outline_actions.append({
'id': 'import_course',
'caption': 'Import',
'href': self.get_action_url('import_course')
})
data_info = self.list_files('/data/')
sections = [
{
'title': 'Pages',
'description': messages.PAGES_DESCRIPTION,
'children': pages_info},
{
'title': 'Course Outline',
'description': messages.COURSE_OUTLINE_DESCRIPTION,
'actions': outline_actions,
'pre': self.render_course_outline_to_html()},
{
'title': 'Data Files',
'description': messages.DATA_FILES_DESCRIPTION,
'children': data_info}]
template_values = {}
template_values['page_title'] = self.format_title('Outline')
template_values['alerts'] = self._get_alerts()
template_values['sections'] = sections
self.render_page(template_values)
def get_action_url(self, action, key=None, extra_args=None):
args = {'action': action}
if key:
args['key'] = key
if extra_args:
args.update(extra_args)
url = '/dashboard?%s' % urllib.urlencode(args)
return self.canonicalize_url(url)
def get_settings(self):
"""Renders course settings view."""
yaml_actions = []
basic_setting_actions = []
# Basic course info.
course_info = [
'Course Title: %s' % self.app_context.get_environ()['course'][
'title'],
'Context Path: %s' % self.app_context.get_slug(),
'Datastore Namespace: %s' % self.app_context.get_namespace_name()]
# Course file system.
fs = self.app_context.fs.impl
course_info.append(('File System: %s' % fs.__class__.__name__))
if fs.__class__ == vfs.LocalReadOnlyFileSystem:
course_info.append(('Home Folder: %s' % sites.abspath(
self.app_context.get_home_folder(), '/')))
# Enable editing if supported.
if filer.is_editable_fs(self.app_context):
yaml_actions.append({
'id': 'edit_course_yaml',
'caption': 'Advanced Edit',
'action': self.get_action_url('create_or_edit_settings'),
'xsrf_token': self.create_xsrf_token(
'create_or_edit_settings')})
yaml_actions.append({
'id': 'edit_basic_course_settings',
'caption': 'Edit',
'action': self.get_action_url('edit_basic_course_settings'),
'xsrf_token': self.create_xsrf_token(
'edit_basic_course_settings')})
# Yaml file content.
yaml_info = []
yaml_stream = self.app_context.fs.open(
self.app_context.get_config_filename())
if yaml_stream:
yaml_lines = yaml_stream.read().decode('utf-8')
for line in yaml_lines.split('\n'):
yaml_info.append(line)
else:
yaml_info.append('< empty file >')
# Prepare template values.
template_values = {}
template_values['page_title'] = self.format_title('Settings')
template_values['page_description'] = messages.SETTINGS_DESCRIPTION
template_values['sections'] = [
{
'title': 'About the Course',
'description': messages.ABOUT_THE_COURSE_DESCRIPTION,
'actions': basic_setting_actions,
'children': course_info},
{
'title': 'Contents of course.yaml file',
'description': messages.CONTENTS_OF_THE_COURSE_DESCRIPTION,
'actions': yaml_actions,
'children': yaml_info}]
self.render_page(template_values)
def list_files(self, subfolder):
"""Makes a list of files in a subfolder."""
home = sites.abspath(self.app_context.get_home_folder(), '/')
files = self.app_context.fs.list(
sites.abspath(self.app_context.get_home_folder(), subfolder))
result = []
for abs_filename in sorted(files):
filename = os.path.relpath(abs_filename, home)
result.append(vfs.AbstractFileSystem.normpath(filename))
return result
def list_and_format_file_list(
self, title, subfolder,
links=False, upload=False, prefix=None, caption_if_empty='< none >',
edit_url_template=None, sub_title=None):
"""Walks files in folders and renders their names in a section."""
items = safe_dom.NodeList()
count = 0
for filename in self.list_files(subfolder):
if prefix and not filename.startswith(prefix):
continue
li = safe_dom.Element('li')
if links:
li.add_child(safe_dom.Element(
'a', href=urllib.quote(filename)).add_text(filename))
if edit_url_template:
edit_url = edit_url_template % urllib.quote(filename)
li.add_child(
safe_dom.Entity(' ')
).add_child(
safe_dom.Element('a', href=edit_url).add_text('[Edit]'))
else:
li.add_text(filename)
count += 1
items.append(li)
output = safe_dom.NodeList()
if filer.is_editable_fs(self.app_context) and upload:
output.append(
safe_dom.Element(
'a', className='gcb-button gcb-pull-right',
href='dashboard?%s' % urllib.urlencode(
{'action': 'add_asset', 'base': subfolder})
).add_text('Upload')
).append(
safe_dom.Element('div', style='clear: both; padding-top: 2px;'))
if title:
h3 = safe_dom.Element('h3')
if count:
h3.add_text('%s (%s)' % (title, count))
else:
h3.add_text(title)
output.append(h3)
if sub_title:
output.append(safe_dom.Element('blockquote').add_text(sub_title))
if items:
output.append(safe_dom.Element('ol').add_children(items))
else:
if caption_if_empty:
output.append(
safe_dom.Element('blockquote').add_text(caption_if_empty))
return output
def get_assets(self):
"""Renders course assets view."""
def inherits_from(folder):
return '< inherited from %s >' % folder
items = safe_dom.NodeList().append(
self.list_and_format_file_list(
'Assessments', '/assets/js/', links=True,
prefix='assets/js/assessment-')
).append(
self.list_and_format_file_list(
'Activities', '/assets/js/', links=True,
prefix='assets/js/activity-')
).append(
self.list_and_format_file_list(
'Images & Documents', '/assets/img/', links=True, upload=True,
edit_url_template='dashboard?action=delete_asset&uri=%s',
sub_title='< inherited from /assets/img/ >',
caption_if_empty=None)
).append(
self.list_and_format_file_list(
'Cascading Style Sheets', '/assets/css/', links=True,
caption_if_empty=inherits_from('/assets/css/'))
).append(
self.list_and_format_file_list(
'JavaScript Libraries', '/assets/lib/', links=True,
caption_if_empty=inherits_from('/assets/lib/'))
).append(
self.list_and_format_file_list(
'View Templates', '/views/',
caption_if_empty=inherits_from('/views/'))
)
template_values = {}
template_values['page_title'] = self.format_title('Assets')
template_values['page_description'] = messages.ASSETS_DESCRIPTION
template_values['main_content'] = items
self.render_page(template_values)
def get_markup_for_basic_analytics(self, job):
"""Renders markup for basic enrollment and assessment analytics."""
subtemplate_values = {}
errors = []
stats_calculated = False
update_message = safe_dom.Text('')
if not job:
update_message = safe_dom.Text(
'Enrollment/assessment statistics have not been calculated '
'yet.')
else:
if job.status_code == jobs.STATUS_CODE_COMPLETED:
stats = transforms.loads(job.output)
stats_calculated = True
subtemplate_values['enrolled'] = stats['enrollment']['enrolled']
subtemplate_values['unenrolled'] = (
stats['enrollment']['unenrolled'])
scores = []
total_records = 0
for key, value in stats['scores'].items():
total_records += value[0]
avg = round(value[1] / value[0], 1) if value[0] else 0
scores.append({'key': key, 'completed': value[0],
'avg': avg})
subtemplate_values['scores'] = scores
subtemplate_values['total_records'] = total_records
update_message = safe_dom.Text("""
Enrollment and assessment statistics were last updated at
%s in about %s second(s).""" % (
job.updated_on.strftime(HUMAN_READABLE_TIME_FORMAT),
job.execution_time_sec))
elif job.status_code == jobs.STATUS_CODE_FAILED:
update_message = safe_dom.NodeList().append(
safe_dom.Text("""
There was an error updating enrollment/assessment
statistics. Here is the message:""")
).append(
safe_dom.Element('br')
).append(
safe_dom.Element('blockquote').add_child(
safe_dom.Element('pre').add_text('\n%s' % job.output)))
else:
update_message = safe_dom.Text(
'Enrollment and assessment statistics update started at %s'
' and is running now. Please come back shortly.' %
job.updated_on.strftime(HUMAN_READABLE_TIME_FORMAT))
subtemplate_values['stats_calculated'] = stats_calculated
subtemplate_values['errors'] = errors
subtemplate_values['update_message'] = update_message
return jinja2.utils.Markup(self.get_template(
'basic_analytics.html', [os.path.dirname(__file__)]
).render(subtemplate_values, autoescape=True))
def get_analytics(self):
"""Renders course analytics view."""
template_values = {}
template_values['page_title'] = self.format_title('Analytics')
at_least_one_job_exists = False
at_least_one_job_finished = False
basic_analytics_job = ComputeStudentStats(self.app_context).load()
stats_html = self.get_markup_for_basic_analytics(basic_analytics_job)
if basic_analytics_job:
at_least_one_job_exists = True
if basic_analytics_job.status_code == jobs.STATUS_CODE_COMPLETED:
at_least_one_job_finished = True
for callback in DashboardRegistry.analytics_handlers:
handler = callback()
handler.app_context = self.app_context
handler.request = self.request
handler.response = self.response
job = handler.stats_computer(self.app_context).load()
stats_html += handler.get_markup(job)
if job:
at_least_one_job_exists = True
if job.status_code == jobs.STATUS_CODE_COMPLETED:
at_least_one_job_finished = True
template_values['main_content'] = jinja2.utils.Markup(self.get_template(
'analytics.html', [os.path.dirname(__file__)]
).render({
'show_recalculate_button': (
at_least_one_job_finished or not at_least_one_job_exists),
'stats_html': stats_html,
'xsrf_token': self.create_xsrf_token('compute_student_stats'),
}, autoescape=True))
self.render_page(template_values)
def post_compute_student_stats(self):
"""Submits a new student statistics calculation task."""
job = ComputeStudentStats(self.app_context)
job.submit()
for callback in DashboardRegistry.analytics_handlers:
job = callback().stats_computer(self.app_context)
job.submit()
self.redirect('/dashboard?action=analytics')
class ScoresAggregator(object):
"""Aggregates scores statistics."""
def __init__(self):
# We store all data as tuples keyed by the assessment type name. Each
# tuple keeps:
# (student_count, sum(score))
self.name_to_tuple = {}
def visit(self, student):
if student.scores:
scores = transforms.loads(student.scores)
for key in scores.keys():
if key in self.name_to_tuple:
count = self.name_to_tuple[key][0]
score_sum = self.name_to_tuple[key][1]
else:
count = 0
score_sum = 0
self.name_to_tuple[key] = (
count + 1, score_sum + float(scores[key]))
class EnrollmentAggregator(object):
"""Aggregates enrollment statistics."""
def __init__(self):
self.enrolled = 0
self.unenrolled = 0
def visit(self, student):
if student.is_enrolled:
self.enrolled += 1
else:
self.unenrolled += 1
class ComputeStudentStats(jobs.DurableJob):
"""A job that computes student statistics."""
def run(self):
"""Computes student statistics."""
enrollment = EnrollmentAggregator()
scores = ScoresAggregator()
mapper = utils.QueryMapper(
Student.all(), batch_size=500, report_every=1000)
def map_fn(student):
enrollment.visit(student)
scores.visit(student)
mapper.run(map_fn)
data = {
'enrollment': {
'enrolled': enrollment.enrolled,
'unenrolled': enrollment.unenrolled},
'scores': scores.name_to_tuple}
return data
class DashboardRegistry(object):
"""Holds registered handlers that produce HTML code for the dashboard."""
analytics_handlers = []
@classmethod
def add_custom_analytics_section(cls, handler):
"""Adds handlers that provide additional data for the Analytics page."""
if handler not in cls.analytics_handlers:
existing_names = [h.name for h in cls.analytics_handlers]
existing_names.append('enrollment')
existing_names.append('scores')
if handler.name in existing_names:
raise Exception('Stats handler name %s is being duplicated.'
% handler.name)
cls.analytics_handlers.append(handler)
custom_module = None
def register_module():
"""Registers this module in the registry."""
dashboard_handlers = [('/dashboard', DashboardHandler)]
global custom_module
custom_module = custom_modules.Module(
'Course Dashboard',
'A set of pages for managing Course Builder course.',
[], dashboard_handlers)
return custom_module
|
graemian/ami-mooc-pilot
|
modules/dashboard/dashboard.py
|
Python
|
apache-2.0
| 29,317
|
[
"VisIt"
] |
c5ad63b33b79c4a29799fd2ce8ea3606db40839a974102d033bd3e4c47dfc0a0
|
from cefpython3 import cefpython as cef
import re, os, platform
import sys
import json
from threading import Thread
from subprocess import Popen, PIPE
def get_python_path():
return os.path.split(os.path.abspath(os.path.dirname(os.__file__)))[
0] + "/python"
def get_application_path(target=None):
if not hasattr(get_application_path, "dir"):
if hasattr(sys, "frozen"):
exe_dir = os.path.dirname(sys.executable)
elif "__file__" in globals():
exe_dir = os.path.dirname(os.path.realpath(__file__))
else:
exe_dir = os.getcwd()
get_application_path.dir = exe_dir
# If file is None return current directory without trailing slash.
if target is None:
target = ""
# Only when relative path.
if not target.startswith("/") and not target.startswith("\\") and (
not re.search(r"^[\w-]+:", target)):
path = get_application_path.dir + os.sep + target
if platform.system() == "Windows":
path = re.sub(r"[/\\]+", re.escape(os.sep), path)
path = re.sub(r"[/\\]+$", "", path)
return path
return str(target)
class BrowserController:
def __init__(self, browser):
self.browser = browser
def search(self, text):
self.browser.Find(123, text, True, False, False)
def stop_search(self):
self.browser.StopFinding(True)
def send_msg(msg):
print(msg)
sys.stdout.flush()
def set_global_handler():
def on_after_create(browser, **_):
cef.WindowUtils.SetTitle(browser, 'Kam1n0')
bindings = cef.JavascriptBindings(
bindToFrames=True, bindToPopups=False)
bindings.SetObject("browser_controller", BrowserController(browser))
bindings.SetFunction("send_msg", send_msg)
browser.SetJavascriptBindings(bindings)
cef.SetGlobalClientCallback("OnAfterCreated", on_after_create)
def set_client_handlers(browser, request_url, session):
client_handlers = [ClientHandler(request_url, session)]
for handler in client_handlers:
browser.SetClientHandler(handler)
def set_javascript_bindings(browser, request_url, request_method,
request_param, external_data):
request_param = '{}' if request_param is None else request_param
external_data = '{}' if external_data is None else external_data
bindings = cef.JavascriptBindings(
bindToFrames=True, bindToPopups=False)
bindings.SetProperty("url", str(request_url))
bindings.SetProperty("method", str(request_method))
bindings.SetProperty("param", request_param)
bindings.SetProperty("external", external_data)
bindings.SetFunction("send_msg", send_msg)
bindings.SetObject("browser_controller", BrowserController(browser))
browser.SetJavascriptBindings(bindings)
class CookieVisitor:
def Visit(self, cookie, count, total, delete_cookie_out):
if count == 0:
print("\n[wxpython.py] CookieVisitor.Visit(): total cookies: %s" \
% total)
print("\n[wxpython.py] CookieVisitor.Visit(): cookie:")
print(" " + str(cookie.Get()))
# True to continue visiting cookies
return True
class ClientHandler(object):
def __init__(self, request_url, session):
self.url = request_url
self.session = session
def GetCookieManager(self, **_):
# set cookie in global manager.
# return None -> all browsers share the same global manager.
global_manager = cef.CookieManager().GetGlobalManager()
# global_manager.VisitAllCookies(CookieVisitor())
if self.session is not None and len(self.session.strip()) > 0:
cookie = cef.Cookie()
cookie.SetDomain('')
cookie.SetName('JSESSIONID')
cookie.SetValue(self.session)
cookie.SetPath('/')
global_manager = cef.CookieManager().GetGlobalManager()
global_manager.SetCookie(self.url, cookie)
return None
def create_form(request_url, request_method='get', request_param=None,
external_data=None, session=None):
sys.excepthook = cef.ExceptHook
settings = {
"product_version": "utilities/2.0.0",
"user_agent": "utilities/2.0.0",
'unique_request_context_per_browser': True,
'persist_session_cookies': False,
'cache_path': os.path.expanduser("~") + "/Kam1n0/client-web-cache/"
}
browser_settings = {
# enable cross-site scripting. since our request sent from local
# but the cookie is from remote (different origin)
"web_security_disabled": True
}
cef.Initialize(settings=settings)
set_global_handler()
browser = cef.CreateBrowserSync(
settings=browser_settings,
url="file://" + get_application_path("resources/operations.html"),
window_title="Kam1n0")
set_client_handlers(browser, request_url, session)
set_javascript_bindings(browser, request_url, request_method,
request_param, external_data)
cef.MessageLoop()
cef.Shutdown()
os._exit(1)
def read_from_std_in():
val = ""
for line in sys.stdin:
val += line
return json.loads(val)
def parse():
opts = sys.argv[1:]
data = read_from_std_in()
create_form(request_url=opts[0],
request_method=opts[1],
request_param=data['param'],
external_data=data['external'],
session=opts[2])
def create_form_process(request_url, request_method='get', request_param=None,
external_data=None, session=None, queue=None):
if request_param is None:
request_param = dict()
if external_data is None:
external_data = dict()
param = {'param': request_param, 'external': external_data}
cmd = [get_python_path(),
os.path.join(get_application_path(), 'RequestPage.py'),
request_url,
request_method,
session]
p = Popen(cmd,
shell=True,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
bufsize=1
)
p.stdin.write(json.dumps(param))
p.stdin.close()
for line in iter(p.stdout.readline, b''):
lr = line.rstrip()
if len(lr) > 0:
queue.put(lr)
p.stdout.close()
def test():
create_form_process(request_url='http://127.0.0.1:8571/userHome',
request_method='get',
request_param=None,
external_data=None,
session='2694D98ED7F4CD02E6332CE1292FA6F5')
if __name__ == '__main__':
parse()
|
McGill-DMaS/Kam1n0-Plugin-IDA-Pro
|
ida-plugin/Kam1n0/utilities/RequestPage.py
|
Python
|
apache-2.0
| 6,740
|
[
"VisIt"
] |
7eca72ba851daae8785957137d9db7b7580ff4a9f73f034f9f80c99f5e0339f8
|
# original from
# http://tech.octopus.energy/news/2016/01/21/testing-for-missing-migrations-in-django.html
from io import StringIO
from django.core.management import call_command
from django.test import TestCase, override_settings
class MigrationTestCase(TestCase):
@override_settings(MIGRATION_MODULES={})
def test_for_missing_migrations(self):
output = StringIO()
options = {
'interactive': False,
'dry_run': True,
'stdout': output,
'check_changes': True,
}
try:
call_command('makemigrations', **options)
except SystemExit as e:
status_code = str(e)
else:
# the "no changes" exit code is 0
status_code = '0'
if status_code == '1':
self.fail('There are missing migrations:\n {}'.format(output.getvalue()))
|
webu/django-filer
|
tests/test_migrations.py
|
Python
|
bsd-3-clause
| 885
|
[
"Octopus"
] |
7f17b12f882f804ec7d9c577c886daf3d224d58d2ad31cd57489d330d6c8fc0a
|
"""
Rewrite of rgFastQC.py for Version 0.11.2 of FastQC.
Changes implemented from tmcgowan at
https://testtoolshed.g2.bx.psu.edu/view/tmcgowan/fastqc
and iuc at https://toolshed.g2.bx.psu.edu/view/iuc/fastqc
with minor changes and bug fixes
SYNOPSIS
rgFastQC.py -i input_file -j input_file.name -o output_html_file [-d output_directory]
[-f fastq|bam|sam] [-n job_name] [-c contaminant_file] [-e fastqc_executable]
EXAMPLE (generated by Galaxy)
rgFastQC.py -i path/dataset_1.dat -j 1000gsample.fastq -o path/dataset_3.dat -d path/job_working_directory/subfolder
-f fastq -n FastQC -c path/dataset_2.dat -e fastqc
"""
import bz2
import glob
import gzip
import mimetypes
import optparse
import os
import re
import shutil
import subprocess
import tempfile
import zipfile
class FastQCRunner(object):
def __init__(self, opts=None):
'''
Initializes an object to run FastQC in Galaxy. To start the process, use the function run_fastqc()
'''
# Check whether the options are specified and saves them into the object
assert opts is not None
self.opts = opts
def prepare_command_line(self):
'''
Develops the Commandline to run FastQC in Galaxy
'''
# Check whether a given file compression format is valid
# This prevents uncompression of already uncompressed files
infname = self.opts.inputfilename
linf = infname.lower()
trimext = False
# decompression at upload currently does NOT remove this now bogus ending - fastqc will barf
# patched may 29 2013 until this is fixed properly
type = mimetypes.guess_type(self.opts.input)
if linf.endswith('.gz') or linf.endswith('.gzip') or type[-1] == "gzip":
f = gzip.open(self.opts.input)
try:
f.readline()
except:
trimext = True
f.close()
elif linf.endswith('bz2'):
f = bz2.BZ2File(self.opts.input, 'r')
try:
f.readline()
except:
trimext = True
f.close()
elif linf.endswith('.zip'):
if not zipfile.is_zipfile(self.opts.input):
trimext = True
if trimext:
f = open(self.opts.input)
try:
f.readline()
except:
raise Exception("Input file corruption, could not identify the filetype")
infname = os.path.splitext(infname)[0]
# Replace unwanted or problematic charaters in the input file name
self.fastqinfilename = re.sub(r'[^a-zA-Z0-9_\-\.]', '_', os.path.basename(infname))
# check that the symbolic link gets a proper ending, fastqc seems to ignore the given format otherwise
if 'fastq' in self.opts.informat:
# with fastq the .ext is ignored, but when a format is actually passed it must comply with fastqc's
# accepted formats..
self.opts.informat = 'fastq'
elif not self.fastqinfilename.endswith(self.opts.informat):
self.fastqinfilename += '.%s' % self.opts.informat
# Build the Commandline from the given parameters
command_line = [opts.executable, '--outdir %s' % self.opts.outputdir]
if self.opts.contaminants is not None:
command_line.append('--contaminants %s' % self.opts.contaminants)
if self.opts.limits is not None:
command_line.append('--limits %s' % self.opts.limits)
command_line.append('--quiet')
command_line.append('--extract') # to access the output text file
if type[-1] != "gzip":
command_line.append('-f %s' % self.opts.informat)
else:
self.fastqinfilename += ".gz"
command_line.append(self.fastqinfilename)
self.command_line = ' '.join(command_line)
def copy_output_file_to_dataset(self):
'''
Retrieves the output html and text files from the output directory and copies them to the Galaxy output files
'''
# retrieve html file
result_file = glob.glob(self.opts.outputdir + '/*html')
with open(result_file[0], 'rb') as fsrc:
with open(self.opts.htmloutput, 'wb') as fdest:
shutil.copyfileobj(fsrc, fdest)
# retrieve text file
text_file = glob.glob(self.opts.outputdir + '/*/fastqc_data.txt')
with open(text_file[0], 'rb') as fsrc:
with open(self.opts.textoutput, 'wb') as fdest:
shutil.copyfileobj(fsrc, fdest)
def run_fastqc(self):
'''
Executes FastQC. Make sure the mandatory import parameters input, inputfilename, outputdir and htmloutput have been specified in the options
'''
# Create a log file
dummy, tlog = tempfile.mkstemp(prefix='rgFastQC', suffix=".log", dir=self.opts.outputdir)
sout = open(tlog, 'w')
self.prepare_command_line()
sout.write(self.command_line)
sout.write('\n')
sout.write("Creating symlink\n") # between the input (.dat) file and the given input file name
os.symlink(self.opts.input, self.fastqinfilename)
sout.write("check_call\n")
subprocess.check_call(self.command_line, shell=True)
sout.write("Copying working %s file to %s \n" % (self.fastqinfilename, self.opts.htmloutput))
self.copy_output_file_to_dataset()
sout.write("Finished")
sout.close()
if __name__ == '__main__':
op = optparse.OptionParser()
op.add_option('-i', '--input', default=None)
op.add_option('-j', '--inputfilename', default=None)
op.add_option('-o', '--htmloutput', default=None)
op.add_option('-t', '--textoutput', default=None)
op.add_option('-d', '--outputdir', default="/tmp/shortread")
op.add_option('-f', '--informat', default='fastq')
op.add_option('-n', '--namejob', default='rgFastQC')
op.add_option('-c', '--contaminants', default=None)
op.add_option('-l', '--limits', default=None)
op.add_option('-e', '--executable', default='fastqc')
opts, args = op.parse_args()
assert opts.input is not None
assert opts.inputfilename is not None
assert opts.htmloutput is not None
if not os.path.exists(opts.outputdir):
os.makedirs(opts.outputdir)
fastqc_runner = FastQCRunner(opts)
fastqc_runner.run_fastqc()
|
yhoogstrate/tools-iuc
|
tools/fastqc/rgFastQC.py
|
Python
|
mit
| 6,425
|
[
"Galaxy"
] |
695ccaceff09549b23b2f8d741e6b3e71075db3c7b8a9b314dc3b8093ce97eb0
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2007-2008 Brian G. Matherly
# Copyright (C) 2009 Gary Burton
# Copyright (C) 2010 Craig J. Anderson
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2011 Matt Keenan (matt.keenan@gmail.com)
# Copyright (C) 2013-2014 Paul Franklin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Reports/Text Reports/Descendant Report.
"""
#------------------------------------------------------------------------
#
# standard python modules
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
#
# GRAMPS modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from gramps.gen.plug.docgen import (IndexMark, FontStyle, ParagraphStyle,
FONT_SANS_SERIF, INDEX_TYPE_TOC,
PARA_ALIGN_CENTER)
from gramps.gen.plug.menu import (NumberOption, PersonOption, BooleanOption,
EnumeratedListOption)
from gramps.gen.errors import ReportError
from gramps.gen.plug.report import Report
from gramps.gen.plug.report import utils as ReportUtils
from gramps.gen.plug.report import MenuReportOptions
from gramps.gen.plug.report import stdoptions
from gramps.gen.sort import Sort
from gramps.gen.utils.db import (get_birth_or_fallback, get_death_or_fallback,
get_marriage_or_fallback,
get_divorce_or_fallback)
#------------------------------------------------------------------------
#
# PrintSimple
# Simple numbering system
#
#------------------------------------------------------------------------
class PrintSimple():
def __init__(self, showdups):
self.showdups = showdups
self.num = {0:1}
def number(self, level):
if self.showdups:
# Just show original simple numbering
to_return = "%d." % level
else:
to_return = str(level)
if level > 1:
to_return += "-" + str(self.num[level-1])
to_return += "."
self.num[level] = 1
self.num[level-1] = self.num[level-1] + 1
return to_return
#------------------------------------------------------------------------
#
# PrintVlliers
# de_Villiers_Pama numbering system
#
#------------------------------------------------------------------------
class PrintVilliers():
def __init__(self):
self.pama = 'abcdefghijklmnopqrstuvwxyz'
self.num = {0:1}
def number(self, level):
to_return = self.pama[level-1]
if level > 1:
to_return += str(self.num[level-1])
to_return += "."
self.num[level] = 1
self.num[level-1] = self.num[level-1] + 1
return to_return
#------------------------------------------------------------------------
#
# class PrintMeurgey
# Meurgey_de_Tupigny numbering system
#
#------------------------------------------------------------------------
class PrintMeurgey():
def __init__(self):
self.childnum = [""]
def number(self, level):
if level == 1:
dash = ""
else:
dash = "-"
if len(self.childnum) < level:
self.childnum.append(1)
to_return = (ReportUtils.roman(level) + dash +
str(self.childnum[level-1]) + ".")
if level > 1:
self.childnum[level-1] += 1
return to_return
#------------------------------------------------------------------------
#
# Printinfo
#
#------------------------------------------------------------------------
class Printinfo():
"""
A base class used to help make the individual numbering system classes.
This class must first be initialized with set_class_vars
"""
def __init__(self, doc, database, numbering, showmarriage, showdivorce,\
name_display, rlocale):
#classes
self._name_display = name_display
self.doc = doc
self.database = database
self.numbering = numbering
#variables
self.showmarriage = showmarriage
self.showdivorce = showdivorce
self._ = rlocale.translation.sgettext # needed for English
self._get_date = rlocale.get_date
def __date_place(self,event):
if event:
date = self._get_date(event.get_date_object())
place_handle = event.get_place_handle()
if place_handle:
place = self.database.get_place_from_handle(
place_handle).get_title()
return("%(event_abbrev)s %(date)s - %(place)s" % {
'event_abbrev': event.type.get_abbreviation(self._),
'date' : date,
'place' : place,
})
else:
return("%(event_abbrev)s %(date)s" % {
'event_abbrev': event.type.get_abbreviation(self._),
'date' : date
})
return ""
def dump_string(self, person, family=None):
string = self.__date_place(
get_birth_or_fallback(self.database, person)
)
tmp = self.__date_place(get_death_or_fallback(self.database, person))
if string and tmp:
string += ", "
string += tmp
if string:
string = " (" + string + ")"
if family and self.showmarriage:
tmp = self.__date_place(get_marriage_or_fallback(self.database,
family))
if tmp:
string += ", " + tmp
if family and self.showdivorce:
tmp = self.__date_place(get_divorce_or_fallback(self.database,
family))
if tmp:
string += ", " + tmp
self.doc.write_text(string)
def print_person(self, level, person):
display_num = self.numbering.number(level)
self.doc.start_paragraph("DR-Level%d" % min(level, 32), display_num)
mark = ReportUtils.get_person_mark(self.database, person)
self.doc.write_text(self._name_display.display(person), mark)
self.dump_string(person)
self.doc.end_paragraph()
return display_num
def print_spouse(self, level, spouse_handle, family_handle):
#Currently print_spouses is the same for all numbering systems.
if spouse_handle:
spouse = self.database.get_person_from_handle(spouse_handle)
mark = ReportUtils.get_person_mark(self.database, spouse)
self.doc.start_paragraph("DR-Spouse%d" % min(level, 32))
name = self._name_display.display(spouse)
self.doc.write_text(
self._("sp. %(spouse)s") % {'spouse':name}, mark)
self.dump_string(spouse, family_handle)
self.doc.end_paragraph()
else:
self.doc.start_paragraph("DR-Spouse%d" % min(level, 32))
self.doc.write_text(
self._("sp. %(spouse)s") % {'spouse':'Unknown'})
self.doc.end_paragraph()
def print_reference(self, level, person, display_num):
#Person and their family have already been printed so
#print reference here
if person:
mark = ReportUtils.get_person_mark(self.database, person)
self.doc.start_paragraph("DR-Spouse%d" % min(level, 32))
name = self._name_display.display(person)
self.doc.write_text(
self._("sp. see %(reference)s : %(spouse)s") %
{'reference':display_num, 'spouse':name}, mark)
self.doc.end_paragraph()
#------------------------------------------------------------------------
#
# RecurseDown
#
#------------------------------------------------------------------------
class RecurseDown():
"""
A simple object to recurse from a person down through their descendants
The arguments are:
max_generations: The max number of generations
database: The database object
objPrint: A Printinfo derived class that prints person
information on the report
"""
def __init__(self, max_generations, database, objPrint, showdups, rlocale):
self.max_generations = max_generations
self.database = database
self.objPrint = objPrint
self.showdups = showdups
self.person_printed = {}
self._ = rlocale.translation.sgettext # needed for English
def recurse(self, level, person, curdepth):
person_handle = person.get_handle()
display_num = self.objPrint.print_person(level, person)
if curdepth is None:
ref_str = display_num
else:
ref_str = curdepth + " " + display_num
if person_handle not in self.person_printed:
self.person_printed[person_handle] = ref_str
for family_handle in person.get_family_handle_list():
family = self.database.get_family_from_handle(family_handle)
spouse_handle = ReportUtils.find_spouse(person, family)
if not self.showdups and spouse_handle in self.person_printed:
# Just print a reference
spouse = self.database.get_person_from_handle(spouse_handle)
self.objPrint.print_reference(level, spouse,
self.person_printed[spouse_handle])
else:
self.objPrint.print_spouse(level, spouse_handle, family)
if spouse_handle:
spouse_num = self._("%s sp." % (ref_str))
self.person_printed[spouse_handle] = spouse_num
if level >= self.max_generations:
continue
childlist = family.get_child_ref_list()[:]
for child_ref in childlist:
child = self.database.get_person_from_handle(child_ref.ref)
self.recurse(level+1, child, ref_str)
#------------------------------------------------------------------------
#
# DescendantReport
#
#------------------------------------------------------------------------
class DescendantReport(Report):
def __init__(self, database, options, user):
"""
Create the DescendantReport object that produces the report.
The arguments are:
database - the GRAMPS database instance
options - instance of the Options class for this report
user - a gen.user.User() instance
This report needs the following parameters (class variables)
that come in the options class.
gen - Maximum number of generations to include.
name_format - Preferred format to display names
dups - Whether to include duplicate descendant trees
incl_private - Whether to include private data
"""
Report.__init__(self, database, options, user)
menu = options.menu
stdoptions.run_private_data_option(self, menu)
self.max_generations = menu.get_option_by_name('gen').get_value()
pid = menu.get_option_by_name('pid').get_value()
self.center_person = self.database.get_person_from_gramps_id(pid)
if (self.center_person == None) :
raise ReportError(_("Person %s is not in the Database") % pid )
sort = Sort(self.database)
lang = menu.get_option_by_name('trans').get_value()
self._locale = self.set_locale(lang)
#Initialize the Printinfo class
self._showdups = menu.get_option_by_name('dups').get_value()
numbering = menu.get_option_by_name('numbering').get_value()
if numbering == "Simple":
obj = PrintSimple(self._showdups)
elif numbering == "de Villiers/Pama":
obj = PrintVilliers()
elif numbering == "Meurgey de Tupigny":
obj = PrintMeurgey()
else:
raise AttributeError("no such numbering: '%s'" % self.numbering)
marrs = menu.get_option_by_name('marrs').get_value()
divs = menu.get_option_by_name('divs').get_value()
stdoptions.run_name_format_option(self, menu)
self.objPrint = Printinfo(self.doc, self.database, obj, marrs, divs,
self._name_display, self._locale)
def write_report(self):
self.doc.start_paragraph("DR-Title")
name = self._name_display.display(self.center_person)
# feature request 2356: avoid genitive form
title = self._("Descendants of %s") % name
mark = IndexMark(title, INDEX_TYPE_TOC, 1)
self.doc.write_text(title, mark)
self.doc.end_paragraph()
recurse = RecurseDown(self.max_generations, self.database,
self.objPrint, self._showdups, self._locale)
recurse.recurse(1, self.center_person, None)
#------------------------------------------------------------------------
#
# DescendantOptions
#
#------------------------------------------------------------------------
class DescendantOptions(MenuReportOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, dbase):
MenuReportOptions.__init__(self, name, dbase)
def add_menu_options(self, menu):
category_name = _("Report Options")
pid = PersonOption(_("Center Person"))
pid.set_help(_("The center person for the report"))
menu.add_option(category_name, "pid", pid)
stdoptions.add_name_format_option(menu, category_name)
numbering = EnumeratedListOption(_("Numbering system"), "Simple")
numbering.set_items([
("Simple", _("Simple numbering")),
("de Villiers/Pama", _("de Villiers/Pama numbering")),
("Meurgey de Tupigny", _("Meurgey de Tupigny numbering"))])
numbering.set_help(_("The numbering system to be used"))
menu.add_option(category_name, "numbering", numbering)
gen = NumberOption(_("Generations"), 10, 1, 15)
gen.set_help(_("The number of generations to include in the report"))
menu.add_option(category_name, "gen", gen)
marrs = BooleanOption(_('Show marriage info'), False)
marrs.set_help(_("Whether to show marriage information in the report."))
menu.add_option(category_name, "marrs", marrs)
divs = BooleanOption(_('Show divorce info'), False)
divs.set_help(_("Whether to show divorce information in the report."))
menu.add_option(category_name, "divs", divs)
dups = BooleanOption(_('Show duplicate trees'), True)
dups.set_help(
_("Whether to show duplicate Family Trees in the report."))
menu.add_option(category_name, "dups", dups)
stdoptions.add_private_data_option(menu, category_name)
stdoptions.add_localization_option(menu, category_name)
def make_default_style(self, default_style):
"""Make the default output style for the Descendant Report."""
f = FontStyle()
f.set_size(12)
f.set_type_face(FONT_SANS_SERIF)
f.set_bold(1)
p = ParagraphStyle()
p.set_header_level(1)
p.set_bottom_border(1)
p.set_top_margin(ReportUtils.pt2cm(3))
p.set_bottom_margin(ReportUtils.pt2cm(3))
p.set_font(f)
p.set_alignment(PARA_ALIGN_CENTER)
p.set_description(_("The style used for the title of the page."))
default_style.add_paragraph_style("DR-Title", p)
f = FontStyle()
f.set_size(10)
for i in range(1, 33):
p = ParagraphStyle()
p.set_font(f)
p.set_top_margin(ReportUtils.pt2cm(f.get_size()*0.125))
p.set_bottom_margin(ReportUtils.pt2cm(f.get_size()*0.125))
p.set_first_indent(-0.5)
p.set_left_margin(min(10.0, float(i-0.5)))
p.set_description(_("The style used for the "
"level %d display.") % i)
default_style.add_paragraph_style("DR-Level%d" % min(i, 32), p)
p = ParagraphStyle()
p.set_font(f)
p.set_top_margin(ReportUtils.pt2cm(f.get_size()*0.125))
p.set_bottom_margin(ReportUtils.pt2cm(f.get_size()*0.125))
p.set_left_margin(min(10.0, float(i-0.5)))
p.set_description(_("The style used for the "
"spouse level %d display.") % i)
default_style.add_paragraph_style("DR-Spouse%d" % min(i, 32), p)
|
pmghalvorsen/gramps_branch
|
gramps/plugins/textreport/descendreport.py
|
Python
|
gpl-2.0
| 17,729
|
[
"Brian"
] |
a07a90986a12809473ddbb1fc515ba994cc15428a745dbf1e58a0b932392ce44
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""Script for updating tensorflow/tools/compatibility/renames_v2.py.
To update renames_v2.py, run:
bazel build tensorflow/tools/compatibility/update:generate_v2_renames_map
bazel-bin/tensorflow/tools/compatibility/update/generate_v2_renames_map
"""
# pylint: enable=line-too-long
import sys
import tensorflow as tf
# This import is needed so that TensorFlow python modules are in sys.modules.
from tensorflow import python as tf_python # pylint: disable=unused-import
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import app
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_export
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
from tensorflow.tools.compatibility import all_renames_v2
_OUTPUT_FILE_PATH = 'third_party/tensorflow/tools/compatibility/renames_v2.py'
_FILE_HEADER = """# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
\"\"\"List of renames to apply when converting from TF 1.0 to TF 2.0.
THIS FILE IS AUTOGENERATED: To update, please run:
bazel build tensorflow/tools/compatibility/update:generate_v2_renames_map
bazel-bin/tensorflow/tools/compatibility/update/generate_v2_renames_map
This file should be updated whenever endpoints are deprecated.
\"\"\"
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
"""
def get_canonical_name(v2_names, v1_name):
if v2_names:
return v2_names[0]
return 'compat.v1.%s' % v1_name
def get_all_v2_names():
"""Get a set of function/class names available in TensorFlow 2.0."""
v2_names = set() # All op names in TensorFlow 2.0
def visit(unused_path, unused_parent, children):
"""Visitor that collects TF 2.0 names."""
for child in children:
_, attr = tf_decorator.unwrap(child[1])
api_names_v2 = tf_export.get_v2_names(attr)
for name in api_names_v2:
v2_names.add(name)
visitor = public_api.PublicAPIVisitor(visit)
visitor.do_not_descend_map['tf'].append('contrib')
visitor.do_not_descend_map['tf.compat'] = ['v1']
traverse.traverse(tf.compat.v2, visitor)
return v2_names
def collect_constant_renames():
"""Looks for constants that need to be renamed in TF 2.0.
Returns:
Set of tuples of the form (current name, new name).
"""
renames = set()
for module in sys.modules.values():
constants_v1_list = tf_export.get_v1_constants(module)
constants_v2_list = tf_export.get_v2_constants(module)
# _tf_api_constants attribute contains a list of tuples:
# (api_names_list, constant_name)
# We want to find API names that are in V1 but not in V2 for the same
# constant_names.
# First, we convert constants_v1_list and constants_v2_list to
# dictionaries for easier lookup.
constants_v1 = {constant_name: api_names
for api_names, constant_name in constants_v1_list}
constants_v2 = {constant_name: api_names
for api_names, constant_name in constants_v2_list}
# Second, we look for names that are in V1 but not in V2.
for constant_name, api_names_v1 in constants_v1.items():
api_names_v2 = constants_v2[constant_name]
for name in api_names_v1:
if name not in api_names_v2:
renames.add((name, get_canonical_name(api_names_v2, name)))
return renames
def collect_function_renames():
"""Looks for functions/classes that need to be renamed in TF 2.0.
Returns:
Set of tuples of the form (current name, new name).
"""
# Set of rename lines to write to output file in the form:
# 'tf.deprecated_name': 'tf.canonical_name'
renames = set()
def visit(unused_path, unused_parent, children):
"""Visitor that collects rename strings to add to rename_line_set."""
for child in children:
_, attr = tf_decorator.unwrap(child[1])
api_names_v1 = tf_export.get_v1_names(attr)
api_names_v2 = tf_export.get_v2_names(attr)
deprecated_api_names = set(api_names_v1) - set(api_names_v2)
for name in deprecated_api_names:
renames.add((name, get_canonical_name(api_names_v2, name)))
visitor = public_api.PublicAPIVisitor(visit)
visitor.do_not_descend_map['tf'].append('contrib')
visitor.do_not_descend_map['tf.compat'] = ['v1', 'v2']
traverse.traverse(tf, visitor)
# It is possible that a different function is exported with the
# same name. For e.g. when creating a different function to
# rename arguments. Exclude it from renames in this case.
v2_names = get_all_v2_names()
renames = set((name, new_name) for name, new_name in renames
if name not in v2_names)
return renames
def get_rename_line(name, canonical_name):
return ' \'tf.%s\': \'tf.%s\'' % (name, canonical_name)
def update_renames_v2(output_file_path):
"""Writes a Python dictionary mapping deprecated to canonical API names.
Args:
output_file_path: File path to write output to. Any existing contents
would be replaced.
"""
function_renames = collect_function_renames()
constant_renames = collect_constant_renames()
all_renames = function_renames.union(constant_renames)
manual_renames = set(
all_renames_v2.manual_symbol_renames.keys())
# List of rename lines to write to output file in the form:
# 'tf.deprecated_name': 'tf.canonical_name'
rename_lines = [
get_rename_line(name, canonical_name)
for name, canonical_name in all_renames
if 'tf.' + name not in manual_renames]
renames_file_text = '%srenames = {\n%s\n}\n' % (
_FILE_HEADER, ',\n'.join(sorted(rename_lines)))
file_io.write_string_to_file(output_file_path, renames_file_text)
def main(unused_argv):
update_renames_v2(_OUTPUT_FILE_PATH)
if __name__ == '__main__':
app.run(main=main)
|
ghchinoy/tensorflow
|
tensorflow/tools/compatibility/update/generate_v2_renames_map.py
|
Python
|
apache-2.0
| 7,196
|
[
"VisIt"
] |
e855d4a34aaf2b413ee4e8fb4396b3d1cddb034ce94d650e9f66522948066e71
|
#!/bin/env python
"""
List the number of requests in the caches of all the ReqProxyies
"""
import DIRAC
from DIRAC.Core.Base.Script import Script
@Script()
def main():
Script.registerSwitch("", "Full", " Print full list of requests")
Script.parseCommandLine()
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
fullPrint = False
for switch in Script.getUnprocessedSwitches():
if switch[0] == "Full":
fullPrint = True
reqClient = ReqClient()
for server, rpcClient in reqClient.requestProxies().items():
DIRAC.gLogger.always("Checking request cache at %s" % server)
reqCache = rpcClient.listCacheDir()
if not reqCache["OK"]:
DIRAC.gLogger.error("Cannot list request cache", reqCache)
continue
reqCache = reqCache["Value"]
if not reqCache:
DIRAC.gLogger.always("No request in cache")
else:
if fullPrint:
DIRAC.gLogger.always("List of requests", reqCache)
else:
DIRAC.gLogger.always("Number of requests in the cache", len(reqCache))
DIRAC.exit(0)
if __name__ == "__main__":
main()
|
DIRACGrid/DIRAC
|
src/DIRAC/RequestManagementSystem/scripts/dirac_rms_list_req_cache.py
|
Python
|
gpl-3.0
| 1,203
|
[
"DIRAC"
] |
53c1d2e5b3e4a25caeb1ba9481c950f2b16734344ed9d4c07c1c5d8127bac4cb
|
#!/usr/bin/env python3
import os
import sys
import gzip
## files from NCBI BLAST tabular format
filename_A = sys.argv[1]
filename_B = sys.argv[2]
def read_tbl(filename):
rv = dict()
f = open(filename,'r')
if filename.endsiwth('.gz'):
f = gzip.open(filename,'rt')
for line in f:
if line.startswith('#'):
continue
tokens = line.strip().split("\t")
q_id = tokens[0]
t_id = tokens[1]
bits = float(tokens[-1])
if not q_id in rv:
rv[q_id] = {'t_id':t_id, 'bits':bits}
elif rv[q_id]['bits'] < bits:
rv[q_id] = {'t_id':t_id, 'bits':bits}
f.close()
return rv
best_A = read_tbl(filename_A)
best_B = read_tbl(filename_B)
for id_A in best_A.keys():
id_B = best_A[id_A]['t_id']
tmp_A = best_A[id_A]
if id_B in best_B and best_B[id_B]['t_id'] == id_A:
tmp_B = best_B[id_B]
print("%s\t%s\t%.1f\t%.1f"%(id_A, id_B, tmp_A['bits'], tmp_B['bits']))
|
taejoonlab/taejoonlab-toolbox
|
align/bp_tbl-to-reciprocal_best.py
|
Python
|
gpl-3.0
| 986
|
[
"BLAST"
] |
05a036adc29e1fa28ad774a5a71d2583b8f51f35f9aff76aa5d2021214258a60
|
# -*- coding:Utf-8 -*-
"""
.. currentmodule:: pylayers.util.geomutil
"""
from __future__ import print_function
import shapely.geometry as sh
import scipy.linalg as la
import pdb
import logging
import networkx as nx
import doctest
import os
#import mayavi.mlab as mlab
import matplotlib.pyplot as plt
import numpy as np
from scipy.linalg import toeplitz
import pylayers.util.project as pro
import pylayers.util.pyutil as pyu
import pylayers.util.graphutil as gru
import numpy.ma as ma
# from antenna import *
import shapely.geometry as shg
from descartes.patch import PolygonPatch
from itertools import combinations, permutations, product
COLOR = {
True: '#6699cc',
False: '#ff3333'
}
def ispoint(tpts, pt, tol=0.05):
""" check if pt is a point in a tuple of points
Parameters
----------
tpts : tuple (points (2xN) , index (1xN))
pt : point (2,1)
tol : float
default (0.05 meters)
if True the point number (<0) is returned
else 0 is return
Returns
-------
k : point index if point exists, 0 otherwise
Examples
--------
>>> from pylayers.util.geomutil.util import *
>>> tpts= (np.array([[1,2,3],[5,6,7]]),np.array([-1,-2,-3]))
>>> pt = np.array([[1],[5]])
>>> ispoint(tpts,pt)
-1
See Also
--------
pylayers.util.geomutil.Polygon.setvnodes
"""
# print("ispoint : pt ", pt)
pts = tpts[0]
ke = tpts[1]
u = pts - pt.reshape(2, 1)
v = np.sqrt(np.sum(u * u, axis=0))
nz = (v > tol)
b = nz.prod()
if b == 1:
# if all points are different from pt
return(0)
else:
nup = np.where(nz == False)[0]
if len(nup) == 1:
return(ke[nup][0])
else:
mi = np.where(min(v[nup]) == v[nup])[0]
return(ke[nup[mi]][0])
def isconvex(poly, tol=1e-2):
""" Determine if a polygon is convex
Parameters
----------
tol : tolerence on aligned point
Returns
-------
True if convex
Notes
-----
the algorithm tests all triplet of points and determine
if the third point is on the left of the 2 first.
a tolerance can be introduced in case the polygon is
almost convex.
"""
p = np.array(poly.exterior.xy)[:, :-1]
a = p
b = np.roll(p, 1, axis=1)
c = np.roll(p, 2, axis=1)
return (np.sum(np.abs(isleft(a, b, c, tol=tol))) < tol) or \
(np.sum(np.abs(isleft(c, b, a, tol=tol))) < tol)
def ptconvex(poly):
""" Determine convex / concave points in the Polygon
Parameters
----------
poly : shapely.Polygon
"""
pts = np.array(poly.exterior.xy)
A = pts[:, :-1]
B = np.roll(A, -1)
C = np.roll(B, -1)
if signedarea(poly) > 0:
cw = ccw(C, B, A)
else:
cw = ccw(A, B, C)
import ipdb
ipdb.set_trace()
cvex = A[:, np.roll(cw, +1)]
ccve = A[:, np.roll(~cw, +1)]
return cvex.tolist(), ccve.tolist()
def ndarray(poly):
""" get a ndarray from a Polygon
Returns
-------
p : ndarray (2xNp)
Examples
--------
>>> from pylayers.util.geomutil import *
>>> p1 = np.array([[0,1,1,0],[0,0,1,1]])
>>> P1 = Polygon(p1)
"""
lring = poly.exterior
x, y = lring.xy
p = np.array([x[0:-1], y[0:-1]])
return(p)
def signedarea(poly):
""" get the signed area of the polygon
"""
p = ndarray(poly)
return sum(np.hstack((p[0, 1::], p[0, 0:1])) * (np.hstack((p[1,2::], p[1,0:2])) - p[1, :])) / 2.
class Plot_shapely(pro.PyLayers):
"""draw Shapely with matplotlib - pylab
Plot_shapely.py
Author : Martin Laloux 2010
"""
def __init__(self, obj, ax, coul=None, alph=1):
""" object constructor
Parameters
----------
ax :
pylab Axes
obj : geometric object
coul : matplotlib color
alph : transparency
Examples
--------
>>> from shapely.wkt import loads
>>> import matplotlib.pylab as plt
>>> ax = plt.gca()
>>> ligne = loads('LINESTRING (3 1, 4 4, 5 5, 5 6)')
>>> a = Plot_shapely(ligne,ax,'r', 0.5)
>>> a.plot
>>> Plot_shapely(ligne,ax,'#FFEC00').plot
>>> plt.show()
"""
self.obj = obj
self.type = obj.geom_type
self.ax = ax
self.coul = coul
self.alph = alph
def plot_coords(self):
""" points
"""
x, y = self.obj.xy
self.ax.plot(x, y, 'o', color=self.coul)
def plot_ligne(self):
"""lines"""
x, y = self.obj.xy
self.ax.plot(x, y, color=self.coul, alpha=self.alph, linewidth=3)
def plot_polygon(self):
"""polygons"""
patch = PolygonPatch(self.obj, facecolor=self.coul,
edgecolor='#000000', alpha=self.alph)
self.ax.add_patch(patch)
def plot_multi(self):
"""multipoints, multilignes,multipolygones + GeometryCollection"""
for elem in self.obj:
Plot_shapely(elem, self.ax, self.coul, self.alph).plot
@property
def plot(self):
"""draw w.r.t geometrical type"""
if self.type == 'Point':
self.plot_coords()
elif self.type == 'Polygon':
self.plot_polygon()
elif self.type == 'LineString':
self.plot_ligne()
elif "Multi" in self.type:
"""ex. MultiPolygon"""
self.plot_multi()
elif self.type == 'GeometryCollection':
self.plot_multi()
elif self.type == 'LinearRing':
self.plot_line()
else:
raise ValueError("unknown: %s" % self.type)
class LineString(pro.PyLayers, shg.LineString):
""" Overloaded shapely LineString class
"""
def __init__(self, p):
if type(p) == shg.polygon.Polygon:
self.Np = shape(p.exterior.xy)[1] - 1
shg.LineString.__init__(self, p)
if type(p) == shg.multipoint.MultiPoint:
self.Np = np.shape(p)[0]
shg.LineString.__init__(self, p)
if type(p) == list:
p = np.array(p)
if type(p) == np.ndarray:
self.Np = np.shape(p)[1]
tp = []
for k in range(self.Np):
tp.append(p[:, k])
tp.append(tp[0])
tu = tuple(tp)
shg.LineString.__init__(self, tu)
def plot(self, **kwargs):
""" plot LineString
Parameters
----------
show : boolean
fig : figure object
ax : axes object
linewidth : int
color : string
default #abcdef"
alpha : float
transparency (default 0.8)
figsize : tuple
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> l1 = np.array([[0,1,1,0],[0,0,1,1]])
>>> L1 = LineString(l1)
>>> l2 = [[3,4,4,3],[1,1,2,2]]
>>> L2 = LineString(l2)
>>> fig,ax = L1.plot(color='red',alpha=0.3,linewidth=3)
>>> fig,ax = L2.plot(fig=fig,ax=ax,color='blue',alpha=0.7,linewidth=2)
>>> title = plt.title('test plotting LineString')
"""
defaults = {'show': False,
'fig': [],
'ax': [],
'color': '#abcdef',
'linewidth': 1,
'alpha': 0.8,
'figsize': (10, 10)
}
#
# update default values
#
for key, value in defaults.items():
if key not in kwargs:
kwargs[key] = value
#
# getting fig and ax
#
if kwargs['fig'] == []:
fig = plt.figure(figsize=kwargs['figsize'])
fig.set_frameon(True)
else:
fig = kwargs['fig']
if kwargs['ax'] == []:
ax = fig.gca()
else:
ax = kwargs['ax']
x, y = self.xy
ax.plot(x, y,
color=kwargs['color'],
alpha=kwargs['alpha'],
linewidth=kwargs['linewidth'])
if kwargs['show']:
plt.show()
return fig, ax
# -----------------------------------------------------------
# Functions used for calculation of visibility graph Gv
# -----------------------------------------------------------
class Polygon(pro.PyLayers, shg.Polygon):
""" Overloaded shapely Polygon class
Attributes
----------
Methods
-------
plot
ptconvex
buildGv
ndarray :
get a ndarray from a Polygon
signedarea :
get the signed area of the polygon
"""
def __init__(self, p=[[3, 4, 4, 3], [1, 1, 2, 2]], vnodes=[], delta=0):
""" object constructor
Parameters
----------
p : list
2xNp np.array
shg.MultiPoint
shg.Polygon
tuple : self.ax
vnodes : list of alternating points and segments numbers
default = [] in this case a regular ordered sequence
is generated.
Notes
-----
Convention : a Polygon as an equal number of points and segments
There is an implicit closure between first and last point
"""
if type(p) == sh.multipolygon.MultiPolygon:
raise AttributeError('MultiPolygon are not allowed')
if type(p) == shg.polygon.Polygon:
self.Np = np.shape(p.exterior.xy)[1] - 1
p = np.vstack((p.exterior.xy[0][0:-1], p.exterior.xy[1][0:-1]))
# shg.Polygon.__init__(self, pt)
#
if type(p) == tuple:
xmin = p[0] - delta
xmax = p[1] + delta
ymin = p[2] - delta
ymax = p[3] + delta
p = [[xmin, xmin, xmax, xmax], [ymin, ymax, ymax, ymin]]
if type(p) == shg.multipoint.MultiPoint:
self.Np = np.shape(p)[0]
shg.Polygon.__init__(self, p)
if type(p) == list:
p = np.array(p)
if type(p) == np.ndarray:
if np.shape(p)[1] == 2:
p = p.T
self.Np = np.shape(p)[1]
tp = []
for k in range(self.Np):
tp.append(p[:, k])
tp.append(tp[0])
tu = tuple(tp)
shg.Polygon.__init__(self, tu)
self.Np = np.shape(self.exterior.xy)[1] - 1
if vnodes != []:
self.vnodes = np.array(vnodes)
# check if always True
# very important fic for buildGv
# now vnodes starts always with <0
if self.vnodes[0] > 0:
self.vnodes = np.roll(self.vnodes, -1)
print ('WARNING:Polygon.vnodes == Polygon.ndarray() modulo -1')
else:
# create sequence
#
# -1 1 -2 2 -3 3 ... -(Np-1) (Np-1)
#
u = np.array([-1, 1])
v = np.arange(self.Np) + 1
self.vnodes = np.kron(v, u)
pass
def __reduce__(self):
# Get the parent's __reduce__ tuple
pickled_state = super(Polygon, self).__reduce__()
# Create our own tuple to pass to __setstate__
new_state = (pickled_state[2],) + (self.vnodes,)
# Return a tuple that replaces the parent's __setstate__ tuple
return (pickled_state[0], pickled_state[1], new_state)
def __setstate__(self, state):
self.vnodes = state[-1] # Set the info attribute
staten=state[0:-1][0]
# Call the parent's __setstate__ with the other tuple elements.
super(Polygon, self).__setstate__(staten)
def __add__(self, p):
""" add 2 polygons
Parameters
----------
p : Polygon
Returns
-------
pm : merged polygon or unchanged polygon
"""
pnew = self.union(p)
# v0 = self.vnodes
# v1 = p.vnodes
# nseg0 = filter(lambda x:x>0,v0)
# nseg1 = filter(lambda x:x>0,v1)
# commseg = np.intersect1d(nseg0,nseg1)[0]
# is0 = np.where(nseg0==commseg)[0][0]
# is1 = np.where(nseg1==commseg)[0][0]
# rs0 = np.roll(v0,2*is0-1)[1:]
# rs1 = np.roll(v1,2*is1-1)[1:]
# if rs1[0]==rs0[0]:
# rs1=rs1[::-1]
# print(rs0)
# print(rs1)
# assert(rs0[0]==rs1[-1])
# assert(rs0[-1]==rs1[0])
# vnodes = np.hstack((rs0,rs1[1:-1]))
# self.vnodes = vnodes
# p2 = Polygon(pnew,vnodes=vnodes)
p2 = Polygon(pnew)
#
# Not finished
#
return(p2)
# p1 = np.vstack((pnew.exterior.xy[0],pnew.exterior.xy[1]))
# p2 = Polygon(p1)
# return(p2)
# if isinstance(pnew,sh.polygon.Polygon):
# p1 = np.vstack((pnew.exterior.xy[0],pnew.exterior.xy[1]))
# return(p2)
# else:
# pdb.set_trace()
# return(self)
def __repr__(self):
st = ''
p = self.ndarray()
sh = np.shape(p)
for k in range(sh[1]):
st = st + '(' + str(p[0, k]) + ',' + str(p[1, k]) + ')\n'
# vnodes to link with external nodes numerotation
st = st + '\nvnodes : ('
for k in range(len(self.vnodes)):
st = st + str(self.vnodes[k]) + ' '
st = st + ')\n'
return(st)
@property
def xy(self):
return self._xy
@xy.setter
def xy(self, xy):
self._xy = xy
@xy.getter
def xy(self):
return self._xy
def setvnodes(self, L):
""" update vnodes member from Layout
Parameters
----------
L : pylayers.layout.Layout
See Also
--------
pylayers.layout.Layout.ispoint
vnodes is a list of points and segments of the polygon.
If there are iso-segments the sequence of iso segments
is repeated between the termination points.
L.numseg has been adapted in order to return either the first segment (default)
or the list of all segments
"""
# get coordinates of the exterior of the polygon
x, y = self.exterior.xy
# npts = map(lambda x :
# L.ispoint(np.array(x),tol=0.01),zip(x[0:-1],y[0:-1]))
#
# npts : list of point which are in the layout (with tolerance 1cm) 0 means not in the layout
#
npts = [L.ispoint(np.array(xx), tol=0.01) for xx in zip(x[0:-1], y[0:-1])]
assert (0 not in npts), pdb.set_trace()
# seg list of tuple [(n1,n2),(n2,n3),....(,)]
seg = zip(npts, np.roll(npts, -1))
vnodes = []
for pseg in seg:
vnodes = vnodes + [pseg[0]]
nseg = L.numseg(pseg[0], pseg[1], first=False)
# if nseg==0:
# pdb.set_trace()
if type(nseg) == int:
nseg = [nseg]
else:
nseg = list(nseg)
vnodes = vnodes + nseg
# pdb.set_trace()
# try:
# nseg = map(lambda x : L.numseg(x[0],x[1],first=False),seg)
# except:
# import ipdb
# ipdb.set_trace()
# vnodes = np.kron(npts,np.array([1,0]))+np.kron(nseg,np.array([0,1]))
self.vnodes = np.array(vnodes)
def setvnodes_new(self,tpts,L):
""" update vnodes members from Layout
Parameters
----------
tpts : tuple
tpts[0] : points coordinates
tpts[1] : points index
L : pylayers.layout.Layout
See Also
--------
pylayers.layout.Layout.ispoint
vnodes is a list of point and segments of the polygon.
If there are isosegments the sequence of iso segments
is repeated between the termination points.
L.numseg has been adapted in order to return either the first segment (default)
or the list of all segments
"""
# get coordinates of the exterior of the polygon
x, y = self.exterior.xy
#
# npts : list of points which are in the layout (with tolerance 1cm)
# 0 means not in the layout
#
# TODO : Sometimes polygon points are not exactly correspondong to nodes of Layout (Why ? )
# This is the reason of the applied tolerance of 5cm
#
npts = [ispoint(tpts,np.array(xx), tol=0.05) for xx in zip(x[0:-1], y[0:-1])]
assert (0 not in npts), pdb.set_trace()
# seg list of tuple [(n1,n2),(n2,n3),....(,)]
seg = zip(npts, np.roll(npts, -1))
vnodes = []
for pseg in seg:
vnodes = vnodes + [pseg[0]]
# get the list of associated segments
nseg = L.numseg(pseg[0], pseg[1], first=False)
if type(nseg) == int:
nseg = [nseg]
else:
nseg = list(nseg)
vnodes = vnodes + nseg
# pdb.set_trace()
# try:
# nseg = map(lambda x : L.numseg(x[0],x[1],first=False),seg)
# except:
# import ipdb
# ipdb.set_trace()
# vnodes = np.kron(npts,np.array([1,0]))+np.kron(nseg,np.array([0,1]))
self.vnodes = np.array(vnodes)
# self.
def ndarray(self):
""" get a ndarray from a Polygon
Returns
-------
p : ndarray (2xNp)
Examples
--------
>>> from pylayers.util.geomutil import *
>>> p1 = np.array([[0,1,1,0],[0,0,1,1]])
>>> P1 = Polygon(p1)
"""
lring = self.exterior
x, y = lring.xy
p = np.array([x[0:-1], y[0:-1]])
return(p)
def signedarea(self):
""" get the signed area of the polygon
"""
p = self.ndarray()
return sum(np.hstack((p[0, 1::], p[0, 0:1])) *
(np.hstack((p[1, 2::], p[1, 0:2])) - p[1, :])) / 2.
def coorddeter(self):
""" determine polygon coordinates
"""
self.xy = np.array([self.exterior.xy[0], self.exterior.xy[1]])
def isconvex(self, tol=1e-2):
""" Determine if a polygon is convex
Parameters
----------
tol : tolerance on aligned point
Returns
-------
boolean : True if convex
Notes
-----
the algorithm tests all triplet of points and determines
if the third point is at the left to the 2 first.
a tolerance can be introduce in cases the polygon is
*almost* convex.
"""
self.coorddeter()
p = self.xy[:, :-1]
a = p
b = np.roll(p, 1, axis=1)
c = np.roll(p, 2, axis=1)
return (np.sum(isleft(a, b, c, tol=tol)) == 0 ) or \
(np.sum(isleft(c, b, a, tol=tol)) == 0)
def reverberation(self, fGHz, L):
""" calculate reverberation time of the polygon
Parameters
----------
fGHz : frequency GHz
L : Layout
Returns
-------
V : float
Volume
A : float
Area
eta : float
absorption coefficient
tau_sab : float
Sabine delay
tau_eyr : float
Eyring delay
:math:`\tau_g = \frac{4V}{c\eta A}`
Sabine's Model
where :math:`\eta` is the absorbtion coefficient
"""
# get the sequence of segments
# handle subsegments
lseg = filter(lambda x: x > 0, self.vnodes)
S1 = []
S2 = []
AS2 = []
AS1 = []
# S unsigned polygon area
# P polygon Perimeter
# A unsigned room area
# V room Volume
# H room Height
S = abs(self.area)
P = 0
for k in lseg:
npt = L.Gs.node[k]['connect']
slname = L.Gs.node[k]['name']
sl = L.sl[slname]
# calculate Loss
Lo, Lp = sl.loss0(fGHz)
Abs = 10**(-Lo[0] / 10.)
# print(slname,Abs)
n1 = npt[0]
n2 = npt[1]
p1 = L.Gs.pos[n1]
p2 = L.Gs.pos[n2]
Lseg = np.sqrt((p2[0] - p1[0])**2 + (p2[1] - p1[1])**2)
P = P + Lseg
H = L.Gs.node[k]['z'][1] - L.Gs.node[k]['z'][0]
if 'ss_z' in L.Gs.node[k]:
SS = 0
for k2, ss in enumerate(L.Gs.node[k]['ss_z']):
ssname = L.Gs.node[k]['ss_name'][k2]
sssl = L.sl[ssname]
Loss, Lpss = sssl.loss0(fGHz)
Absss = 10**(-Loss[0] / 10.)
# print(ssname,Absss)
val = Lseg * (ss[1] - ss[0])
SS = SS + val
S1.append(val)
AS1.append(val * Absss)
St = H * Lseg
S1.append(St - SS)
AS1.append((St - SS) * Abs)
else:
S2.append(H * Lseg)
AS2.append(H * Lseg * Abs)
V = S * H
A = P * H + 2 * S
sfloor = L.sl['FLOOR']
sceil = L.sl['CEIL']
Lofloor, Lpfloor = sfloor.loss0(fGHz)
Loceil, Lpceil = sceil.loss0(fGHz)
etaFloor = S * 10**(-Lofloor[0] / 10.)
etaCeil = S * 10**(-Loceil[0] / 10.)
eta = (sum(AS1) + sum(AS2) + etaFloor + etaCeil) / A
tau_sab = 4 * V / (0.3 * A * eta)
tau_eyr = -4 * V / (0.3 * A * np.log(1 - eta))
return(V, A, eta, tau_sab, tau_eyr)
def plot(self, **kwargs):
""" plot function
Parameters
----------
color : string
default #abcdef"
alpha : float
transparency (default 0.8)
vnodes : bool
display vnodes
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> p1 = np.array([[0,1,1,0],[0,0,1,1]])
>>> P1 = Polygon(p1)
>>> p2 = [[3,4,4,3],[1,1,2,2]]
>>> P2 = Polygon(p2)
>>> p3 = [np.array([10,10]),np.array([11,10]),np.array([11,11]),np.array([10,11])]
>>> P3 = Polygon(p3)
>>> fig,ax = P1.plot(color='red',alpha=0.3)
>>> fig,ax = P2.plot(fig=fig,ax=ax,color='blue',alpha=0.7)
>>> fig,ax = P3.plot(fig=fig,ax=ax,color='green',alpha=1)
>>> title = plt.title('test plotting polygons')
"""
defaults = {'show': False,
'fig': [],
'ax': [],
'vnodes': False,
'color': '#abcdef',
'edgecolor': '#000000',
'alpha': 0.8,
'figsize': (10, 10)
}
#
# update default values
#
for key, value in defaults.items():
if key not in kwargs:
kwargs[key] = value
#
# getting fig and ax
#
if kwargs['fig'] == []:
fig = plt.figure(figsize=kwargs['figsize'])
fig.set_frameon(True)
else:
fig = kwargs['fig']
if kwargs['ax'] == []:
ax = fig.gca()
else:
ax = kwargs['ax']
x, y = self.exterior.xy
numpt = filter(lambda z: z < 0, self.vnodes)
ax.fill(x, y,
color=kwargs['color'],
alpha=kwargs['alpha'],
ec=kwargs['edgecolor'])
if kwargs['vnodes']:
for k in range(len(numpt)):
ax.text(x[k], y[k], numpt[k])
if kwargs['show']:
plt.show()
return fig, ax
def simplify(self):
""" simplify polygon - suppress adjacent colinear segments
Returns
-------
poly2 : simplified polygon
Examples
--------
Before
After
"""
p = np.array(self.exterior.xy)
N = np.shape(p)[1]
q = p[:, 0].reshape(2, 1)
for k in range(N - 2):
v1 = p[:, k + 1] - p[:, k]
v2 = p[:, k + 2] - p[:, k + 1]
v1n = v1 / np.sqrt(np.dot(v1, v1))
v2n = v2 / np.sqrt(np.dot(v2, v2))
u = np.dot(v1n, v2n)
if u < 0.98:
q = np.hstack((q, p[:, k + 1].reshape(2, 1)))
vini = q[:, 1] - q[:, 0]
vin = vini / np.sqrt(np.dot(vini, vini))
v = np.dot(v2n, vin)
if v > 0.98:
q = q[:, 1:]
y = q.T.copy()
ls = shg.asLineString(y)
poly2 = shg.Polygon(ls)
return(poly2)
def buildGvc(self, **kwargs):
""" Create visibility graph for a convex polygon
Parameters
----------
display : boolean
default : False
fig : matplotlib.figure.pyplot
ax : axes
udeg2 : np.array indexes of points of degree 2
default = []
eded : boolean
default True
indoor : boolean
default True
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> import shapely.geometry as shg
>>> import matplotlib.pyplot as plt
>>> points = shg.MultiPoint([(0, 0), (0, 1), (2.5,1), (2.5, 2), \
(2.8,2), (2.8, 1.1), (3.2, 1.1), \
(3.2, 0.7), (0.4, 0.7), (0.4, 0)])
>>> polyg = Polygon(points)
>>> Gv = polyg.buildGv(show=True)
>>> plt.axis('off')
(-0.5, 4.0, -0.5, 2.5)
>>> title = plt.title('Testing buildGv')
Notes
-----
Segment k and (k+1)%N share segment (k+1)%N
The degree of a point is dependent from other polygons around
Topological error can be raised if the point coordinates accuracy
is not limited.
Nodes of polygon are numbered in the global graph in vnodes member.
See Also
--------
pylayers.gis.layout.Layout.buildGv
"""
defaults = {'udeg2': np.array([]),
'eded': True,
'open': True,
'indoor': True
}
# initialize function attributes
for key, value in defaults.items():
if key in kwargs:
setattr(self, key, kwargs[key])
else:
setattr(self, key, value)
kwargs[key] = value
Gv = nx.Graph()
Gv.pos = {}
if kwargs['open']:
pass
else:
pass
lring = self.exterior
#
# Calculate interior normals
#
x, y = lring.xy
p = np.array([x[0:-1], y[0:-1]])
#
# determine convex points
#
# pdb.set_trace()
tcc, n = self.ptconvex()
# Np = self.Np
Np = np.shape(self.exterior.xy)[1] - 1
#
# retrieve
# npt points label sequence
# nseg segments label sequence
#
# vnodes do not necessarily start with a point
#
npt = filter(lambda x: x < 0, self.vnodes)
nseg = filter(lambda x: x > 0, self.vnodes)
#
# in convex case all segments see all segments
#
for nk in combinations(nseg, 2):
Gv.add_edge(nk[0], nk[1], weight=0)
#
# Update position of points in Gv
#
for nk in Gv.node:
Gv.pos[nk] = (p[0, nk], p[1, nk])
xr, yr = lring.xy
#
# Determine diffraction points
#
# deg2 : if null:
# the point is kept
# if convex:
# the point is kept
# else:
# the point is not kept
#
if indoor:
uconvex = np.nonzero(tcc == 1)[0] # convex point position
else:
uconvex = np.nonzero(tcc == -1)[0] # convex point position
# planar point (joining two parallel segment)
uzero = np.nonzero(tcc == 0)[0]
# degree 2 paralell points are often doors and windows
udiffdoor = np.intersect1d(uzero, udeg2)
udiff = np.hstack((uconvex, udiffdoor)).astype(
'int') # diffracting point
#
# 1) Calculate node-node visibility
#
#
# Between all combinations of diffracting points
# create a segment and check whether it is fully included in the
# polygon.
# If verified then there is a visibility between the 2 points.
#
for nk in combinations(udiff, 2):
p1 = p[:, nk[0]]
p2 = p[:, nk[1]]
seg = shg.LineString(((p1[0], p1[1]), (p2[0], p2[1])))
if self.contains(seg):
Gv.add_edge(npt[nk[0]], npt[nk[1]], weight=0)
#
# 2) Calculate edge-edge and node-edge visibility
#
for nk in range(Np): # loop on range of number of points
ptk = p[:, nk] # tail point
# head point (%Np to get 0 as last point)
phk = p[:, (nk + 1) % Np]
# lnk : unitary vector on segment nk
lk = phk - ptk
nlk = np.sqrt(np.dot(lk, lk))
lnk = lk / nlk
# the epsilon is (1/1000) of the segment length
epsilonk = nlk / \
1000. # this can be dangerous (epsilon can be large)
# x--o----------------------o--x
# +eps -eps
pcornert = ptk + lnk * epsilonk # + n[:,nk]*epsilon
pcornerh = phk - lnk * epsilonk # + n[:,nk]*epsilon
#
# in any case no ray towark nk
# if nk is convex no ray toward (nk-1)%Np
#
# start from the two extremity of the segment
for i, pcorner in enumerate([pcornert, pcornerh]):
#
# if tail point
# remove nk segment
# and if the point is convex
# remove previous segment
#
# si point head
#
listpoint = range(Np)
listpoint.remove(nk) # remove current point
if i == 0: # first iteration pcornert
if nk in uconvex: # == 1
listpoint.remove((nk - 1) % Np)
if i == 1: # second iteration pcornerh
if (nk + 1) % Np in uconvex: # ==1
listpoint.remove((nk + 1) % Np)
for ns in listpoint:
pts = p[:, ns]
phs = p[:, (ns + 1) % Np]
# Add B.Uguen 2/01/2014 no possible visibility relation
# between aligned segments
if (not (is_aligned3(pts, phs, ptk) & is_aligned3(pts, phs, phk))):
ls = phs - pts
nls = np.sqrt(np.dot(ls, ls))
lns = ls / nls
epsilons = nls / 1000.
pte = pts + lns * epsilons # + n[:,ns]*epsilon
phe = phs - lns * epsilons # + n[:,ns]*epsilon
tbr = pyu.bitreverse(16, 5) / 16.
for alpha in tbr:
pa = pte + alpha * (phe - pte)
seg = shg.LineString((pcorner, pa))
# print "seg: ",seg.xy
# if npt[nk] == -3:
# plt.plot(np.array([pcorner[0],pa[0]]),np.array([pcorner[1],pa[1]]),linewidth=0.2,color='k')
# plt.draw()
# topological error can be raised here
seg2 = self.intersection(seg)
# if self.contains(seg):
if seg2.almost_equals(seg, decimal=4):
# print alpha,nk,ns
# plt.plot(np.array([pcorner[0],pa[0]]),np.array([pcorner[1],pa[1]]),linewidth=2,color='r')
# Gv.add_edge(-(uconvex[nk]+1),ns+1,weight=10)
if i == 0:
if nk in udiff:
Gv.add_edge(
npt[nk], nseg[ns], weight=1)
# plt.plot(np.array([Gv.pos[npt[nk]][0],Gv.pos[nseg[ns]][0]]),np.array([Gv.pos[npt[nk]][1],Gv.pos[nseg[ns]][1]]),'r')
if i == 1:
if (nk + 1) % Np in udiff:
Gv.add_edge(
npt[(nk + 1) % Np], nseg[ns], weight=1)
# plt.plot(np.array([Gv.pos[npt[(nk+1)%Np]][0],Gv.pos[nseg[ns]][0]]),np.array([Gv.pos[npt[(nk+1)%Np]][1],Gv.pos[nseg[ns]][1]]),'g')
# plt.draw()
# if i==1:
# if (((nseg[nk]==10) & (nseg[ns]==7)) or
# ((nseg[nk]==7) & (nseg[ns]==10))):
# pdb.set_trace()
if nseg[nk] != nseg[ns]:
if kwargs['eded']:
Gv.add_edge(
nseg[nk], nseg[ns], weight=1)
# else:
# print nseg[nk],nseg[ns]
# print pts,phs
# print ptk,phk
# if (((nseg[nk]==10) & (nseg[ns]==7)) or
# ((nseg[nk]==7) & (nseg[ns]==10))):
# plt.plot(np.array([Gv.pos[nseg[nk]][0],Gv.pos[nseg[ns]][0]]),np.array([Gv.pos[nseg[nk]][1],Gv.pos[nseg[ns]][1]]),'b')
# plt.plot(np.array([pcorner[0],pa[0]]),np.array([pcorner[1],pa[1]]),'b')
# print "seg: ",seg.xy
# print "seg2: ",seg2.xy
# print nseg[nk],nseg[ns]
# print pcorner , ptk
# print alpha , pa ,pte
# plt.draw()
# raw_input()
break
# else:
# print p
# print ns
# print nk
# print 'nsegnk : ',nseg[nk]
# print 'nsegns', nseg[ns]
# print 'ptk : ',ptk
# print 'phk : ',phk
# print 'pts : ',pts
# print 'phs : ',phs
# print "aligne :",nseg[nk],nseg[ns]
# pdb.set_trace()
if kwargs['show']:
nodes = np.array(Gv.nodes())
uneg = list(nodes[np.nonzero(nodes < 0)[0]])
upos = list(nodes[np.nonzero(nodes > 0)[0]])
nx.draw_networkx_nodes(Gv, Gv.pos, nodelist=upos,
node_color='blue', node_size=300, alpha=0.3)
nx.draw_networkx_nodes(Gv, Gv.pos, nodelist=uneg,
node_color='red', node_size=300, alpha=0.3)
nx.draw_networkx_labels(Gv, Gv.pos)
ndnd, nded, eded = gru.edgetype(Gv)
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=eded,
edge_color='blue', width=2)
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=ndnd,
edge_color='red', width=2)
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=nded,
edge_color='green', width=2)
# label = {}
# for (u,v) in Gv.edges():
# d = Gv.get_edge_data(u,v)
# label[(u,v)]=d['weight']
# edge_label=nx.draw_networkx_edge_labels(Gv,Gv.pos,edge_labels=label)
return(Gv)
def buildGv(self, **kwargs):
""" Create visibility graph for a polygon
Parameters
----------
display : boolean
default : False
fig : matplotlib.figure.pyplot
ax : axes
udeg2 : np.array indexes of points of degree 2
default = []
eded : boolean
default True
indoor : boolean
default True
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> import shapely.geometry as shg
>>> import matplotlib.pyplot as plt
>>> points = shg.MultiPoint([(0, 0), (0, 1), (2.5,1), (2.5, 2), \
(2.8,2), (2.8, 1.1), (3.2, 1.1), \
(3.2, 0.7), (0.4, 0.7), (0.4, 0)])
>>> polyg = Polygon(points)
>>> Gv = polyg.buildGv(show=True)
>>> plt.axis('off')
(-0.5, 4.0, -0.5, 2.5)
>>> title = plt.title('Testing buildGv')
Notes
-----
Segment k and (k+1)%N share segment (k+1)%N
The degree of a point is dependent from other polygons around
Topological error can be raised if the point coordinates accuracy
is not limited.
Nodes of polygon are numbered in the global graph in vnodes member.
See Also
--------
pylayers.gis.layout.Layout.buildGv
"""
defaults = {'show': False,
'fig': [],
'ax': [],
'udeg2': np.array([]),
'eded': True,
'indoor': True
}
# initialize function attributes
for key, value in defaults.items():
if key in kwargs:
setattr(self, key, kwargs[key])
else:
setattr(self, key, value)
kwargs[key] = value
# self.args=args
if kwargs['show']:
if kwargs['fig'] == []:
fig = plt.figure(figsize=(20, 20))
fig.set_frameon(True)
else:
fig = kwargs['fig']
if kwargs['ax'] == []:
ax = fig.gca()
else:
ax = kwargs['ax']
plt.ion()
udeg2 = kwargs['udeg2']
GRAY = '#999999'
Gv = nx.Graph()
Gv.pos = {}
# pdb.set_trace()
lring = self.exterior
#
# Calculate interior normals
#
x, y = lring.xy
p = np.array([x[0:-1], y[0:-1]])
#
# determine convex points
#
# pdb.set_trace()
tcc, n = self.ptconvex()
# Np = self.Np
Np = np.shape(self.exterior.xy)[1] - 1
#
# retrieve
# npt points label sequence
# nseg segments label sequence
#
# vnodes do not necessarily start with a point
#
if self.vnodes[0] < 0:
ipt = 2 * np.arange(Np)
iseg = 2 * np.arange(Np) + 1
else:
ipt = 2 * np.arange(Np) + 1
iseg = 2 * np.arange(Np)
npt = self.vnodes[ipt]
nseg = self.vnodes[iseg]
# print("npt : ",nptr)
# print("nseg : ",nseg)
assert np.all(npt < 0), "something wrong with points"
assert np.all(nseg > 0), "something wrong with segments"
#
#
# Create middle point on lring
#
# Warning lring recopy the node at the end of the sequence
#
# A problem arises from the fact that a vnodes sequence
# do no necessarily starts with a point (negative node)
#
#
tpm = []
for ik, k in enumerate(lring.coords):
pt = np.array(k)
try:
pm = (pt + pm1) / 2.
if self.vnodes[0] < 0:
Gv.pos[nseg[ik - 1]] = (pm[0], pm[1])
else:
Gv.pos[nseg[ik % Np]] = (pm[0], pm[1])
tpm.append(pm)
pm1 = pt
except:
pm1 = pt
#
# Update position of points in Gv
#
for nk in range(Np):
#nnode = -(nk+1)
Gv.pos[npt[nk]] = (p[0, nk], p[1, nk])
xr, yr = lring.xy
#
# Determine diffraction points
#
# deg2 : if null:
# the point is kept
# if convex:
# the point is kept
# else:
# the point is not kept
#
if kwargs['indoor']:
uconvex = np.nonzero(tcc == 1)[0] # convex point position
else:
uconvex = np.nonzero(tcc == -1)[0] # convex point position
# planar point (joining two parallel segment)
uzero = np.nonzero(tcc == 0)[0]
# degree 2 paralell points are often doors and windows
udiffdoor = np.intersect1d(uzero, udeg2)
udiff = np.hstack((uconvex, udiffdoor)).astype(
'int') # diffracting point
# print("vnodes",self.vnodes
# print("tcc : ",tcc
# print("uzero : ",uzero
# print("udiffdoor : ",udiffdoor
# print("udiff",udiff
# print("udeg2",udeg2
# print("npt",npt
# if udiff!=[]:
# print("diff : ",npt[udiff]
# if udeg2!=[]:
# print "deg2 : ",npt[udeg2]
# if uzero!=[]:
# print "zero :",npt[uzero]
#
# if show == True display points and polygon
#
if kwargs['show']:
points1 = shg.MultiPoint(lring)
for k, pt in enumerate(points1):
if k in uconvex:
ax.plot(pt.x, pt.y, 'o', color='red')
elif k in udiffdoor:
ax.plot(pt.x, pt.y, 'o', color='blue')
else:
ax.plot(pt.x, pt.y, 'o', color=GRAY)
patch = PolygonPatch(self, facecolor='#6699cc',
edgecolor='#000000', alpha=0.5, zorder=2)
ax.add_patch(patch)
# pdb.set_trace()
#
# 1) Calculate node-node visibility
#
# The algorithm exploits definition of convexity.
#
# Between all combinations of diffracting points
# create a segment and check whether it is fully included in the
# polygon.
# If verified then there is a visibility between the 2 points.
#
for nk in combinations(udiff, 2):
p1 = p[:, nk[0]]
p2 = p[:, nk[1]]
seg = shg.LineString(((p1[0], p1[1]), (p2[0], p2[1])))
if self.contains(seg):
Gv.add_edge(npt[nk[0]], npt[nk[1]], weight=0)
#
# 2) Calculate edge-edge and node-edge visibility
#
for nk in range(Np): # loop on range of number of points
ptk = p[:, nk] # tail point
# head point (%Np to get 0 as last point)
phk = p[:, (nk + 1) % Np]
# lnk : unitary vector on segment nk
lk = phk - ptk
nlk = np.sqrt(np.dot(lk, lk))
lnk = lk / nlk
# the epsilon is (1/1000) of the segment length
epsilonk = nlk / \
1000. # this can be dangerous (epsilon can be large)
# x--o----------------------o--x
# +eps -eps
pcornert = ptk + lnk * epsilonk # + n[:,nk]*epsilon
pcornerh = phk - lnk * epsilonk # + n[:,nk]*epsilon
#
# in any case no ray towark nk
# if nk is convex no ray toward (nk-1)%Np
#
# start from the two extremity of the segment
for i, pcorner in enumerate([pcornert, pcornerh]):
#
# if tail point
# remove nk segment
# and if the point is convex
# remove previous segment
#
# si point head
#
listpoint = range(Np)
listpoint.remove(nk) # remove current point
if i == 0: # first iteration pcornert
if nk in uconvex: # == 1
listpoint.remove((nk - 1) % Np)
if i == 1: # second iteration pcornerh
if (nk + 1) % Np in uconvex: # ==1
listpoint.remove((nk + 1) % Np)
for ns in listpoint:
pts = p[:, ns]
phs = p[:, (ns + 1) % Np]
# Add B.Uguen 2/01/2014 no possible visibility relation
# between aligned segments
if (not (is_aligned3(pts, phs, ptk) & is_aligned3(pts, phs, phk))):
ls = phs - pts
nls = np.sqrt(np.dot(ls, ls))
lns = ls / nls
epsilons = nls / 1000.
pte = pts + lns * epsilons # + n[:,ns]*epsilon
phe = phs - lns * epsilons # + n[:,ns]*epsilon
tbr = pyu.bitreverse(16, 5) / 16.
for alpha in tbr:
pa = pte + alpha * (phe - pte)
seg = shg.LineString((pcorner, pa))
# print "seg: ",seg.xy
# if npt[nk] == -3:
# plt.plot(np.array([pcorner[0],pa[0]]),np.array([pcorner[1],pa[1]]),linewidth=0.2,color='k')
# plt.draw()
# topological error can be raised here
seg2 = self.intersection(seg)
# if self.contains(seg):
if seg2.almost_equals(seg, decimal=4):
# print alpha,nk,ns
# plt.plot(np.array([pcorner[0],pa[0]]),np.array([pcorner[1],pa[1]]),linewidth=2,color='r')
# Gv.add_edge(-(uconvex[nk]+1),ns+1,weight=10)
if i == 0:
if nk in udiff:
Gv.add_edge(
npt[nk], nseg[ns], weight=1)
# plt.plot(np.array([Gv.pos[npt[nk]][0],Gv.pos[nseg[ns]][0]]),np.array([Gv.pos[npt[nk]][1],Gv.pos[nseg[ns]][1]]),'r')
if i == 1:
if (nk + 1) % Np in udiff:
Gv.add_edge(
npt[(nk + 1) % Np], nseg[ns], weight=1)
# plt.plot(np.array([Gv.pos[npt[(nk+1)%Np]][0],Gv.pos[nseg[ns]][0]]),np.array([Gv.pos[npt[(nk+1)%Np]][1],Gv.pos[nseg[ns]][1]]),'g')
# plt.draw()
# if i==1:
# if (((nseg[nk]==10) & (nseg[ns]==7)) or
# ((nseg[nk]==7) & (nseg[ns]==10))):
# pdb.set_trace()
if nseg[nk] != nseg[ns]:
if kwargs['eded']:
Gv.add_edge(
nseg[nk], nseg[ns], weight=1)
# else:
# print nseg[nk],nseg[ns]
# print pts,phs
# print ptk,phk
# if (((nseg[nk]==10) & (nseg[ns]==7)) or
# ((nseg[nk]==7) & (nseg[ns]==10))):
# plt.plot(np.array([Gv.pos[nseg[nk]][0],Gv.pos[nseg[ns]][0]]),np.array([Gv.pos[nseg[nk]][1],Gv.pos[nseg[ns]][1]]),'b')
# plt.plot(np.array([pcorner[0],pa[0]]),np.array([pcorner[1],pa[1]]),'b')
# print "seg: ",seg.xy
# print "seg2: ",seg2.xy
# print nseg[nk],nseg[ns]
# print pcorner , ptk
# print alpha , pa ,pte
# plt.draw()
# raw_input()
break
# else:
# print p
# print ns
# print nk
# print 'nsegnk : ',nseg[nk]
# print 'nsegns', nseg[ns]
# print 'ptk : ',ptk
# print 'phk : ',phk
# print 'pts : ',pts
# print 'phs : ',phs
# print "aligne :",nseg[nk],nseg[ns]
# pdb.set_trace()
if kwargs['show']:
nodes = np.array(Gv.nodes())
uneg = list(nodes[np.nonzero(nodes < 0)[0]])
upos = list(nodes[np.nonzero(nodes > 0)[0]])
nx.draw_networkx_nodes(Gv, Gv.pos, nodelist=upos,
node_color='blue', node_size=300, alpha=0.3)
nx.draw_networkx_nodes(Gv, Gv.pos, nodelist=uneg,
node_color='red', node_size=300, alpha=0.3)
nx.draw_networkx_labels(Gv, Gv.pos)
ndnd, nded, eded = gru.edgetype(Gv)
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=eded,
edge_color='blue', width=2)
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=ndnd,
edge_color='red', width=2)
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=nded,
edge_color='green', width=2)
#label = {}
# for (u,v) in Gv.edges():
# d = Gv.get_edge_data(u,v)
# label[(u,v)]=d['weight']
# edge_label=nx.draw_networkx_edge_labels(Gv,Gv.pos,edge_labels=label)
return(Gv)
def showGv(self, **kwargs):
""" show graph Gv
Parameters
----------
display
fig
ax
ndnd : boolean
display node/node
nded : boolean
display node/edge
eded : boolean
display edge/edge
linewidth: float
default 2
"""
defaults = {'display': False,
'fig': [],
'ax': [],
'ndnd': True,
'nded': False,
'ndnd': False,
'linewidth': 2
}
for key, value in defaults.items():
if key in kwargs:
setattr(self, key, kwargs[key])
else:
setattr(self, key, value)
kwargs[key] = value
if kwargs['fig'] == []:
fig = plt.figure()
fig.set_frameon(True)
else:
fig = kwargs['fig']
if kwargs['ax'] == []:
ax = fig.gca()
else:
ax = kwargs['ax']
lring = self.exterior
points = shg.MultiPoint(lring)
for k, pt in enumerate(points):
if tcc[k % Np] == 1:
ax.plot(pt.x, pt.y, 'o', color='red')
else:
ax.plot(pt.x, pt.y, 'o', color=GRAY)
k = k + 1
patch = PolygonPatch(self, facecolor='#6699cc',
edgecolor='#6699cc', alpha=0.5, zorder=2)
ax.add_patch(patch)
nodes = np.array(Gv.nodes())
uneg = list(nodes[np.nonzero(nodes < 0)[0]])
upos = list(nodes[np.nonzero(nodes > 0)[0]])
if kwargs['nodes']:
nx.draw_networkx_nodes(Gv, Gv.pos, nodelist=upos,
node_color='blue', node_size=300, alpha=0.3)
nx.draw_networkx_nodes(Gv, Gv.pos, nodelist=uneg,
node_color='red', node_size=300, alpha=0.3)
nx.draw_networkx_labels(Gv, Gv.pos)
ndnd, nded, eded = gru.edgetype(Gv)
if kwargs['eded']:
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=eded,
edge_color='blue', width=2)
if kwargs['ndnd']:
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=ndnd,
edge_color='red', width=2)
if kwargs['nded']:
nx.draw_networkx_edges(Gv, Gv.pos, edgelist=nded,
edge_color='green', width=2)
return(fig, ax)
def ptconvex2(self):
""" Determine convex / concave points in the Polygon
!!! Warning !!! cvex and ccve can be switched
depends on the Polygon direction of travel
Returns
-------
cvex : list of convex points
ccve : list of concave points
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> import shapely.geometry as shg
>>> import matplotlib.pyplot as plt
>>> points = shg.MultiPoint([(0, 0), (0, 1), (3.2, 1), (3.2, 0.7), (0.4, 0.7), (0.4, 0)])
>>> polyg1 = Polygon(points)
>>> cvex,ccave = polyg.ptconvex2()
>>> points = shg.MultiPoint([(0, 0), (0, 1), (-3.2, 1), (-3.2, 0.7), (-0.4, 0.7), (-0.4, 0)])
>>> polyg1 = Polygon(points)
>>> cvex,ccave = polyg.ptconvex2()
"""
if not hasattr(self, 'xy'):
self.coorddeter()
pts = filter(lambda x: x < 0, self.vnodes)
A = self.xy[:, :-1]
B = np.roll(A, -1)
C = np.roll(B, -1)
if self.signedarea() > 0:
cw = ccw(C, B, A)
else:
cw = ccw(A, B, C)
cvex = np.array(pts)[np.roll(cw, +1)]
ccve = np.array(pts)[np.roll(~cw, +1)]
return cvex.tolist(), ccve.tolist()
def ptconvex(self, display=False):
""" Return a list of booleans indicating points convexity
Parameters
----------
display : boolean
default False
Returns
-------
tcc : np.array (1x Nseg)
1 if convex , -1 if concav , 0 if plane
n : array(2xNseg)
segments normals
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> import shapely.geometry as shg
>>> import matplotlib.pyplot as plt
>>> points = shg.MultiPoint([(0, 0), (0, 1), (3.2, 1), (3.2, 0.7), (0.4, 0.7), (0.4, 0)])
>>> N = len(points)
>>> polyg = Polygon(points)
>>> tcc,n = polyg.ptconvex()
>>> #k = 0
>>> #for p in points:
>>> # if tcc[k] == 1 :
>>> # plt.plot(p.x, p.y, 'o', color='red',alpha=1)
>>> # else:
>>> # plt.plot(p.x, p.y, 'o', color='blue',alpha=0.3)
>>> # k = k+1
>>> #polyg.plot()
>>> #plt.figure()
>>> #points = shg.MultiPoint([(0, 0), (1, 1), (2, 0), (1, 0)])
>>> #poly = Polygon(points)
>>> #tcc,n = polyg.ptconvex()
>>> #poly.plot()
Notes
------
This function determines the convex and concav points of a polygon.
As there is no orientation convention for the polygon the sign of the cross
product can't be directly interpreted. So we exploit the following
property :
Let N be the number of points of the Polygon. N = Nx + Nc where
Nx is the number of convex points and Nc the number of concav points
We have Nx >= Nc
If a point is common to two parallel segments, the cross product is = 0
See Also
--------
Lr2n
"""
lring = self.exterior
#
# Calculate interior normals
#
x, y = lring.xy
Np = len(x) - 1
Nseg = Np
p = np.array([x[0:-1], y[0:-1]])
n = Lr2n(p)
tcc = np.zeros(Np)
#
# cross product between two adjascent normals
#
for k in range(Nseg):
nk = n[:, (k - 1) % Nseg]
nkp1 = n[:, k]
v = np.cross(nk, nkp1)
tcc[k] = v
#
# warning this test is fragile
#
# debug : print tcc
#
# The purpose here is to remove flat transition
#
upos = np.nonzero(tcc > 1e-2)[0]
uneg = np.nonzero(tcc < -1e-2)[0]
if len(upos) > len(uneg):
nconvex = uneg
nconcav = upos
if len(upos) < len(uneg):
nconvex = upos
nconcav = uneg
if len(upos) == len(uneg):
logging.warning("polygon is a star")
# self.plot()
# pdb.set_trace()
tcc = np.zeros(Np)
tcc[nconvex] = 1
tcc[nconcav] = -1
# print "ptseg tcc ",tcc
upos = np.nonzero(tcc > 1e-4)[0]
return(tcc, n)
class Geomview(pro.PyLayers):
""" Geomview file class
This class is parent of GeomVect Geomlist Geomoff
Methods
-------
show3
"""
def __init__(self, _filename, clear=False):
filename = pyu.getlong(_filename, "geom")
self.filename = filename
if clear:
fd = open(self.filename, 'w')
fd.close()
def show3(self):
"""
.. todo:
change background
look for other geomview options
"""
chaine = "geomview -b 1 1 1 " + self.filename + " 2>/dev/null &"
os.system(chaine)
class Geomlist(Geomview):
"""
"""
def __init__(self, _filename, clear=False):
_filename = _filename + '.list'
Geomview.__init__(self, _filename, clear=clear)
def append(self, strg):
"""
append a line in .list file
"""
fd = open(self.filename, 'a')
fd.write(strg)
fd.close()
class GeomVect(Geomview):
""" Geomview VECT file class
+ NPolylines NVertices NColors
+ Nv[0] ... Nv[NPolylines-1] # number of vertices in each polyline
+ Nc[0] ... Nc[NPolylines-1] # number of colors supplied in each polyline
+ Vert[0] ... Vert[NVertices-1] # All the vertices (3*NVertices floats)
+ Color[0] ... Color[NColors-1] # All the colors (4*NColors floats, RGBA)
VECT objects represent lists of polylines (strings of connected line segments, possibly closed).
A degenerate polyline can be used to represent a point:
A VECT file begins with the key word VECT or 4VECT and three integers:
NLines, NVertices, and NColors.
Here NLines is the number of polylines in the file,
NVertices the total number of vertices, and NColors the number of
colors as explained below.
Next come NLines 16-bit integers
Nv[0] Nv[1] Nv[2] ... Nv[NLines-1]
giving the number of vertices in each polyline.
A negative number indicates a closed polyline; 1 denotes a single-pixel point.
The sum (of absolute values) of the Nv[i] must equal NVertices.
Next come NLines more 16-bit integers
Nc[i]: the number of colors in each polyline.
Normally one of three values:
0 : No color is specified for this polyline.
It's drawn in the same color as the previous polyline.
1 : A single color is specified.
The entire polyline is drawn in that color.
abs(Nv[i]) : Each vertex has a color.
Either each segment is drawn in the corresponding color,
or the colors are smoothly interpolated along the line segments,
depending on the implementation.
Next come NVertices groups of 3 or 4 floating-point numbers:
the coordinates of all the vertices.
If the keyword is 4VECT then there are 4 values per vertex.
The first abs(Nv[0]) of them form the first polyline,
the next abs(Nv[1]) form the second and so on.
Finally NColors groups of 4 floating-point numbers give red,
green, blue and alpha (opacity) values.
The first Nc[0] of them apply to the first polyline, and so on.
Methods
-------
geomBase
display a frame
ellipse
display an ellipse
points
display a set of points
"""
def __init__(self, _filename='geomdef', clear=False):
_filename = _filename + '.vect'
Geomview.__init__(self, _filename, clear=clear)
def segments(self, ds, i2d=True, linewidth=2):
""" display segments
Parameters
----------
ds : dictionnary
len ds
i2d : boolean (defaut True)
2d indicator
linewidth : float
default 2
"""
fo = open(self.filename, "w")
fo.write("appearance { linewidth %d }\n" % linewidth)
fo.write("VECT\n")
Ns = len(ds)
fo.write("%d %d %d\n" % (Ns, 2 * Ns, 0))
# 3 Lines 6 Vertices 3 colors
for k in range(Ns):
fo.write("2 ")
fo.write("\n")
for k in range(Ns):
fo.write("0 ")
fo.write("\n")
for k in ds:
(pta, phe) = ds[k]
if i2d:
fo.write("%6.3f %6.3f %6.3f\n" % (pta[0], pta[1], 0.0))
fo.write("%6.3f %6.3f %6.3f\n" % (phe[0], phe[1], 0.0))
else:
fo.write("%6.3f %6.3f %6.3f\n" % (pta[0], pta[1], pta[2]))
fo.write("%6.3f %6.3f %6.3f\n" % (phe[0], phe[1], phe[2]))
fo.close()
def geomBase(self, M, pt=np.array([0., 0., 0.]),
col=np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]]),
linewidth=3, scale=1):
""" Construct a geomview vect file for vizualisation of a frame
Notes
-----
by default the geomview filename is base0.vect
Parameters
----------
M : ndarray (3 x 3 )
[ v1, v2, v3 ]
pt : np.array
origin point (default (0,0,0))
col :
color (3x3)
linewidth :
linewidth (default 3)
Examples
--------
>>> from pylayers.util.geomutil import *
>>> import numpy as np
>>> v1 = np.array([1,0,0])
>>> v2 = np.array([0,1,0])
>>> v3 = np.array([0,0,1])
>>> M = np.vstack((v1,v2,v3))
>>> #gv = GeomVect('test')
>>> #gv.geomBase(M)
>>> #gv.show3()
"""
M = M * scale
fo = open(self.filename, "w")
fo.write("appearance { linewidth %d }\n" % linewidth)
fo.write("VECT\n")
fo.write("3 6 3\n") # 3 Lines 6 Vertices 3 colors
fo.write("2 2 2\n") # 2 points per lines
fo.write("1 1 1\n") # 1 color per line
fo.write("%6.3f %6.3f %6.3f\n" % (pt[0], pt[1], pt[2]))
fo.write("%6.3f %6.3f %6.3f\n" % (pt[0] + M[0, 0], pt[1] +
M[1, 0], pt[2] + M[2, 0]))
fo.write("%6.3f %6.3f %6.3f\n" % (pt[0], pt[1], pt[2]))
fo.write("%6.3f %6.3f %6.3f\n" % (pt[0] + M[0, 1], pt[1] +
M[1, 1], pt[2] + M[2, 1]))
fo.write("%6.3f %6.3f %6.3f\n" % (pt[0], pt[1], pt[2]))
fo.write("%6.3f %6.3f %6.3f\n" % (pt[0] + M[0, 2], pt[1] +
M[1, 2], pt[2] + M[2, 2]))
fo.write("%6.3f %6.3f %6.3f 0.\n" % (col[0, 0], col[0, 1], col[0, 2]))
fo.write("%6.3f %6.3f %6.3f 0.\n" % (col[1, 0], col[1, 1], col[1, 2]))
fo.write("%6.3f %6.3f %6.3f 0.\n" % (col[2, 0], col[2, 1], col[2, 2]))
# fo.write("{<}\n")
fo.close()
def points(self, pt, colorname='blue'):
""" Geomview display a set of points with color
Parameters
----------
pt
sequence of points np.ndarray or dictionnary whose value is a tuple (x,y,z)
colorname
a colorname from coldict keys
Examples
--------
>>> import numpy as np
>>> from pylayers.util.geomutil import *
>>> import scipy as sp
>>> pt1 = sp.rand(3,10)
>>> pt2 = { 1:(0,0,0),2:(10,10,10),3:(0,10,0),4:(10,0,0)}
>>> gv1 = GeomVect('test1')
>>> gv1.points(pt1)
>>> #gv1.show3()
>>> gv2 = GeomVect('test2')
>>> gv2.points(pt2)
>>> #gv2.show3()
.. todo::
colorbar depending of a value associated with point
"""
fo = open(self.filename, "w")
if type(pt) == list:
pt = np.array(pt).reshape(3, 1)
if type(pt) == dict:
npt = len(pt.keys())
if type(pt) == np.ndarray:
npt = np.shape(pt)[1]
snpt = str(npt) + "\n"
snpt2 = str(npt) + " " + str(npt) + " " + str(npt) + "\n"
if npt > 1:
fo.write("appearance{\n")
fo.write("linewidth 8}\n")
fo.write("VECT\n")
fo.write(snpt2)
fo.write("1 " * npt + "\n")
fo.write("1 " * npt + "\n")
else:
fo.write("ESPHERE\n")
fo.write("0.2\n")
if type(pt) == dict:
for i in range(npt):
x = str(pt[pt.keys()[i]][0])
y = str(pt[pt.keys()[i]][1])
try:
z = str(pt[pt.keys()[i]][2])
except:
z = str(0.0)
chaine = x + " " + y + " " + z + "\n"
fo.write(chaine)
if type(pt) == np.ndarray:
for i in range(npt):
x = str(pt[0, i]).replace(',', '.')
y = str(pt[1, i]).replace(',', '.')
try:
z = str(pt[2, i]).replace(',', '.')
except:
z = str(0.0)
chaine = x + " " + y + " " + z + "\n"
fo.write(chaine)
coldic = pyu.coldict()
col = pyu.rgb(coldic[colorname], 'float')
if npt > 1:
for i in range(npt):
fo.write("%6.3f %6.3f %6.3f 1\n" % (col[0], col[1], col[2]))
fo.close()
class Geomoff(Geomview):
"""
Notes
-----
Class Geomview OFF File (Object File Format)
[ST][C][N][4][n]OFF #header keyword
[Ndim] # spac dimension of vertices, present only if nOFF
NVertices NFaces NEdges
x[0],y[0] z[0]
# Vertices,possibly with normals
#colors, and/or texture coordinates, in that order, if the
# prefixes N , C , ST are present
# If 4OFF , each vertex has 4 components
# including a final homogeneous component
# If nOFF, each vertex has Ndim components
# If 4nOFF , each vertex has Ndim+1 components
....
x[NVertices-1],y[NVertices-1],z[NVertices-1]
# Faces
# Nv = # vertices on this face
# v[0] ... v[Nv-1] : vertex indices
# in range 0... NVertices -1
Nv v[0] v[1] ....v[Nv-1] colorspec
# colorspec continues past v[Nv-1]
# to end-of-line may be 0 to 4 numbers
# nothing default
# integer : colormap index (read from the file cmap.fmap)
# 3 or 4 integers RGB[A] values 0..255
#
"""
def __init__(self, _filename='geomoff'):
_filename = _filename + '.off'
Geomview.__init__(self, _filename)
def loadpt(self):
""" load points
"""
fo = open(self.filename, 'r')
lis = fo.readlines()
typ, nv, nf, ne = lis[0].split(' ')
if typ != 'OFF':
logging.critical('not an off file')
nv = eval(nv)
nf = eval(nf)
ne = eval(ne)
for k in range(nv):
#x,y,z = lis[k+1].split(' ')
pt = np.fromstring(lis[k + 1], dtype=float, sep=' ')
try:
t = np.vstack((t, pt))
except:
t = pt
return(t)
def savept(self, ptnew, _fileoff):
"""
"""
fo = open(self.filename, 'r')
lis = fo.readlines()
typ, nv, nf, ne = lis[0].split(' ')
if typ != 'OFF':
logging.critical('not an off file')
else:
try:
nv = eval(nv)
nf = eval(nf)
ne = eval(ne)
except:
logging.critical('load off wrong number of values')
fo.close()
fileoff = pyu.getlong(_fileoff, "geom")
fo = open(fileoff, 'w')
fo.write(lis[0])
for k in range(nv):
fo.write(str(ptnew[k, 0]) + ' ' + str(ptnew[k, 1]
) + ' ' + str(ptnew[k, 2]) + ' ' + '\n')
for li in lis[k + 2:]:
fo.write(li)
fo.close()
def polygon(self, p, poly):
""" create geomview off for polygon
Parameters
----------
p : nparray
sequence of points
poly : list
point numbers (index starting in 0)
"""
fo = open(self.filename, 'w')
npt = np.shape(p)[0]
npoly = len(poly)
fo.write("OFF\n")
fo.write("%d 1 \n" % (npt + 1))
fo.write("0.000 0.000 0.000 \n")
for i in range(npt):
fo.write("%6.3f %6.3f %6.3f \n" % (p[i, 0], p[i, 2], p[i, 1]))
fo.write("%i " % (npoly - 1))
for k in poly[:-1]:
fo.write("%i " % (k + 1))
# fo.write(%6.3f %6.3f %6.3f 0.4\n" % (col[0],col[1],col[2]))
fo.write("1.0 1.0 1.0 0.4\n")
fo.close()
def polygons(self, p, polys):
""" create a gemoff file for a list of polygons
Parameters
----------
p : nparray
sequence of points
poly : list
point numbers (index starting in 0)
Examples
--------
"""
fo = open(self.filename, 'w')
npt = np.shape(p)[0]
npoly = len(polys)
fo.write("OFF\n")
fo.write("%d %d \n" % (npt + 1, npoly))
fo.write("0.000 0.000 0.000 \n")
for i in range(npt):
fo.write("%6.3f %6.3f %6.3f \n" % (p[i, 0], p[i, 2], p[i, 1]))
for poly in polys:
nv = len(poly)
fo.write("%i " % (nv))
for k in poly:
fo.write("%i " % (k + 1))
# fo.write(%6.3f %6.3f %6.3f 0.4\n" % (col[0],col[1],col[2]))
fo.write("1.0 0.0 1.0 0.4\n")
fo.close()
def cylinder(self, r, l, nphi=20, nl=3, col=[1., 0.0, 1.0], alpha=0.1):
""" create a cylinder
Parameters
----------
r : radius
l : length
nphi : number of phi
nl : number of l
col : list [r,g,b]
alpha : transparency
"""
tphi = np.linspace(0, 2 * np.pi, nphi, endpoint=False)
tz = np.linspace(-l / 2., l / 2., nl)
npoly = nphi * (nl - 1)
nedges = nphi * (2 * nl - 1)
fo = open(self.filename, 'w')
# fo.write("OFF\n")
fo.write("OFF %d %d %d\n" % (nphi * nl + 1, npoly, nedges))
fo.write("0.000 0.000 0.000 \n")
for z in tz:
for phi in tphi:
x = r * np.cos(phi)
y = r * np.sin(phi)
fo.write("%6.3f %6.3f %6.3f \n" % (x, y, z))
for k in range(npoly):
il = k / nphi
iphi = k % nphi
a = il * nphi + iphi
b = il * nphi + (iphi + 1) % nphi
c = (il + 1) * nphi + iphi
d = (il + 1) * nphi + (iphi + 1) % nphi
fo.write("4 %i %i %i %i " % (a + 1, b + 1, d + 1, c + 1))
str1 = str(col[0]) + ' ' + str(col[1]) + ' ' + \
str(col[2]) + ' ' + str(alpha) + '\n'
fo.write(str1)
fo.close()
def box(self, extrem=np.array([-1, 1, -1, 1, -3, 3])):
""" create a box
Parameters
----------
extrem : ndarray
(1x6) [xmin,xmax,ymin,ymax,zmin,zmax]
Examples
--------
>>> geo = Geomoff('test')
>>> geo.box()
"""
xmin = extrem[0]
xmax = extrem[1]
ymin = extrem[2]
ymax = extrem[3]
zmin = extrem[4]
zmax = extrem[5]
p = np.zeros((8, 3))
p[0, :] = np.array([xmin, ymin, zmin])
p[1, :] = np.array([xmax, ymin, zmin])
p[2, :] = np.array([xmax, ymax, zmin])
p[3, :] = np.array([xmin, ymax, zmin])
p[4, :] = np.array([xmin, ymin, zmax])
p[5, :] = np.array([xmax, ymin, zmax])
p[6, :] = np.array([xmax, ymax, zmax])
p[7, :] = np.array([xmin, ymax, zmax])
fo = open(self.filename, 'w')
fo.write("OFF\n")
fo.write("8 6 12\n")
for i in range(8):
fo.write("%6.3f %6.3f %6.3f \n" % (p[i, 0], p[i, 2], p[i, 1]))
fo.write("4 0 1 2 3 1 0 0 0.3\n")
fo.write("4 7 4 0 3 1 0 0 0.3\n")
fo.write("4 4 5 1 0 1 0 0 0.3\n")
fo.write("4 5 6 2 1 1 0 0 0.3\n")
fo.write("4 3 2 6 7 1 0 0 0.3\n")
fo.write("4 6 5 4 7 1 0 0 0.3\n")
fo.close()
def pattern(self, theta, phi, E, **kwargs):
""" export antenna pattern in a geomview format
Parameters
----------
theta : np.array (,Nt)
phi : np.array (,Np)
E : np.array complex (Nt,Np)
po : origin (1x3)
T : rotation matrix (3x3)
minr : radius of minimum
maxr : radius of maximum
ilog : True (log) False (linear)
Examples
--------
>>> from pylayers.util.geomutil import *
>>> import numpy as np
>>> th = np.arange(0,np.pi,0.05)
>>> ph = np.arange(0,2*np.pi,0.05)
>>> E = 1.5*np.sin(th[:,np.newaxis])*np.cos(0*ph[np.newaxis,:])
>>> g = Geomoff('dipole')
>>> g.pattern(th,ph,E)
>>> g.show3()
"""
defaults = {'po': np.array([0, 0, 0]),
'T': np.eye(3),
'minr': 0.1,
'maxr': 1,
'tag': 'Pat',
'ilog': False}
for key, value in defaults.items():
if key not in kwargs:
kwargs[key] = value
minr = kwargs['minr']
maxr = kwargs['maxr']
tag = kwargs['tag']
ilog = kwargs['ilog']
po = kwargs['po']
# T is an unitary matrix
T = kwargs['T']
assert (abs(la.det(T)) > 0.99)
# retrieving dimensions
Nt = len(theta) # np.shape(theta)[0]
Np = len(phi) # np.shape(phi)[1]
theta = theta[:, np.newaxis]
phi = phi[np.newaxis, :]
if ilog:
R = 10 * np.log10(abs(E))
else:
R = abs(E)
#Th = np.outer(theta, np.ones(Np))
#Ph = np.outer(np.ones(Nt), phi)
if R.min() != R.max():
U = (R - R.min()) / (R.max() - R.min())
Ry = minr + (maxr - minr) * U
else:
Ry = maxr
# x (Nt,Np)
# y (Nt,Np)
# z (Nt,Np)
x = Ry * np.sin(theta) * np.cos(phi)
y = Ry * np.sin(theta) * np.sin(phi)
z = Ry * np.cos(theta) * np.ones(phi.shape)
# p : Nt x Np x 3
p = np.concatenate(
(x[..., np.newaxis], y[..., np.newaxis], z[..., np.newaxis]), axis=2)
#
# antenna cs -> glogal cs
# q : Nt x Np x 3
q = np.einsum('ij,klj->kli', T, p)
#
# translation
#
q[..., 0] = q[..., 0] + po[0]
q[..., 1] = q[..., 1] + po[1]
q[..., 2] = q[..., 2] + po[2]
Npoints = Nt * Np
Nfaces = (Nt - 1) * Np
Nedge = 0
#
# Colormap
#
colmap = plt.get_cmap()
Ncol = colmap.N
cmap = colmap(np.arange(Ncol))
if R.min() != R.max():
g = np.round(U * (Ncol - 1)).astype(int)
else:
g = np.round(np.ones((Nt, Np)) * (Ncol - 1)).astype(int)
fd = open(self.filename, 'w')
fd.write('COFF\n')
chaine = str(Npoints) + ' ' + str(Nfaces) + ' ' + str(Nedge) + '\n'
fd.write(chaine)
for ii in range(Nt):
for jj in range(Np):
cpos = str(q[ii, jj, 0]) + ' ' + \
str(q[ii, jj, 1]) + ' ' + str(q[ii, jj, 2])
cpos = cpos.replace(',', '.')
ik = g[ii, jj]
ccol = str(cmap[ik, 0]) + ' ' + str(cmap[ik, 1]) + \
' ' + str(cmap[ik, 2])
ccol = ccol.replace(',', '.')
fd.write(cpos + ' ' + ccol + ' 0.2\n')
for ii in range(Nt - 1):
for jj in range(Np):
p1 = ii * Np + jj
p2 = ii * Np + np.mod(jj + 1, Np)
p3 = (ii + 1) * Np + jj
p4 = (ii + 1) * Np + np.mod(jj + 1, Np)
chaine = '4 ' + str(p1) + ' ' + str(p2) + ' ' + \
str(p4) + ' ' + str(p3) + ' 0.5\n'
fd.write(chaine)
fd.close()
def angular(p1, p2):
""" determine angle between p1 and p2 in inerval [0 2pi]
Parameters
----------
p1
point p1
p2
point p2 origin
Notes
-----
weird the origin is p2
Examples
--------
>>> import numpy as np
>>> p1 = np.array([0,0])
>>> p21 = np.array([1,0])
>>> p22 = np.array([1,1])
>>> p23 = np.array([0,1])
>>> p24 = np.array([-1,1])
>>> p25 = np.array([-1,0])
>>> p26 = np.array([-1,-1])
>>> p27 = np.array([0,-1])
>>> p28 = np.array([1,-1])
>>> a1 = angular(p21,p1)
>>> a2 = angular(p22,p1)
>>> a3 = angular(p23,p1)
>>> a4 = angular(p24,p1)
>>> a5 = angular(p25,p1)
>>> a6 = angular(p26,p1)
>>> a7 = angular(p27,p1)
>>> a8 = angular(p28,p1)
See Also
--------
vecang
"""
# print DeprecationWarning('DEPRECATION WARNING : geomutil.angular going
# deprecated because wrong')
if p1[0] < p2[0] and p1[1] < p2[1]:
angle = np.arctan2((p2[1] - p1[1]), (p2[0] - p1[0])) + np.pi
elif p1[0] > p2[0] and p1[1] < p2[1]:
angle = np.arctan2((p2[1] - p1[1]), (p2[0] - p1[0])) + np.pi
elif p1[0] > p2[0] and p1[1] > p2[1]:
angle = np.arctan2((p2[1] - p1[1]), (p2[0] - p1[0])) + np.pi
else:
angle = np.arctan2((p2[1] - p1[1]), (p2[0] - p1[0])) + np.pi
return(angle)
def vecang(v1, v2):
""" angle between v1 and v2 , result in [0,2*pi]
Parameters
----------
v1 : np.array (3 x Np)
vector
v2 : np.array (3 x Np)
vector
Returns
-------
alpha : np.array (3 x Np)
radians
"""
if len(v1.shape) == 1:
v1 = v1.reshape(v1.shape[0], 1)
if len(v2.shape) == 1:
v2 = v2.reshape(v2.shape[0], 1)
ang = np.arctan2(v2[1, :], v2[0, :]) - np.arctan2(v1[1, :], v1[0, :])
uneg = np.where(ang < 0)[0]
ang[uneg] = 2 * np.pi + ang[uneg]
return ang
# if ang <0 :
# return (2*np.pi+ang)
# else :
# return ang
def SignedArea(p=np.array([[0, 10, 10, 0], [0, 0, -2, -2]])):
""" Calculate the signed area of a sequence of points in a plane
Parameters
----------
p : array 2 x Np
Returns
-------
A : float
signed area of the sequence of points
Examples
--------
>>> from pylayers.util.geomutil import *
>>> p = np.array([[0,10,10,0],[0,0,-2,-2]])
>>> A = SignedArea(p)
>>> assert(A+20<1e-15)
"""
return sum(np.hstack((p[0, 1::], p[0, 0:1])) * (np.hstack((p[1, 2::], p[1, 0:2])) - p[1, :])) / 2.
def Centroid(p=np.array([[0, 10, 10, 0], [0, 0, -2, -2]])):
""" Determine the centroid of the polygon defined by a sequence of points in a plane
References
----------
http://en.wikipedia.org/wiki/Centroid
Parameters
----------
p : np array
polygon (2xNp)
Returns
-------
pc = Centroid()
Examples
--------
>>> from pylayers.util.geomutil import *
>>> p = np.array([[0,10,10,0],[0,0,-2,-2]])
>>> pc = Centroid(p)
>>> d = pc-np.array([5.,-1])
>>> md = np.dot(d,d)
>>> assert(md<1e-15)
"""
A = SignedArea(p)
assert(A != 0)
T = p[0, :] * np.hstack((p[1, 1::], p[1, 0:1])) - \
p[1, :] * np.hstack((p[0, 1::], p[0, 0:1]))
Cx = sum(T * (p[0, :] + np.hstack((p[0, 1::], p[0, 0:1])))) / (6 * A)
Cy = sum(T * (p[1, :] + np.hstack((p[1, 1::], p[1, 0:1])))) / (6 * A)
pc = np.array([Cx, Cy])
return(pc)
def Lr2n(p=np.array([[0, 10, 10, 0], [0, 0, -2, -2]]), closed=True):
""" Linear ring to normal
Parameters
----------
p : np.array (2xN)
closed : boolean
default True
Returns
-------
n : np.array (2xN)
normal
Notes
-----
This function returns the internal normals to the LinearString of a Polygon
The algoritm exploits the algebraic relation which exists
between points coordinates and normal coordinates which involves
the quasi toeplitz matrix M
[-1 1 0 0 0 ...]
[0 -1 1 0 0 ...]
[
[
[ 0 -1 1] (truncate here if LineRing is open)
-------------------
[1 0 0 -1] (add this line if LineRing is closed)
p0 p1
x------------------x
| | |
| v l |
| |
|-> <-|
| ^ |
| | |
p3 x------------------x p2
Examples
--------
>>> import shapely.geometry as shg
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> points1 = shg.MultiPoint([(0, 0), (0, 1), (1, 1), (1,0 )])
>>> points2 = shg.MultiPoint([(0, 0), (1, 0), (1, 1), (0,1 )])
>>> poly1 = shg.Polygon(points1)
>>> poly2 = shg.Polygon(points2)
>>> lring1 = poly1.exterior
>>> lring2 = poly2.exterior
>>> x1,y1 = lring1.xy
>>> x2,y2 = lring2.xy
>>> p1 = np.array([x1[0:-1],y1[0:-1]])
>>> p2 = np.array([x2[0:-1],y2[0:-1]])
>>> n1 = Lr2n(p1)
>>> n2 = Lr2n(p2)
"""
Np = np.shape(p)[1]
l = np.hstack((np.array([-1, 1]), np.zeros(Np - 2)))
M = np.triu(toeplitz(l))
if closed:
M[Np - 1, 0] = 1
else:
M = M[0:Np - 1, :]
n = np.dot(M, np.flipud(p).T)
n[:, 1] = -n[:, 1]
#
# normalize normal
#
n = n.T
modn = np.sqrt(np.sum(n * n, 0))
assert(modn.all() > 0)
nn = n / modn
#
# enforce inwards normal whatever the linear ring orientation
#
sa = SignedArea(p)
if sa > 0:
nn = -nn
return nn
def isBetween(p1, p2, p, epsilon=1e-5):
""" test if p is between p1 and p2
Parameters
----------
p1 : np.array
p2 : np.array
p : np.array
epsilon : float
tolerance default 1e-5
Returns
-------
boolean
Examples
--------
>>> p1 = np.array([0,0])
>>> p2 = np.array([2,0])
>>> p = np.array([1,0])
>>> assert(isBetween(p1,p2,p)),'error'
"""
crossproduct = np.cross(p - p1, p2 - p1)
if abs(crossproduct) > epsilon:
return False
dotproduct = np.dot(p - p1, p2 - p1)
if dotproduct < 0:
return False
squaredlengthba = np.dot(p2 - p1, p2 - p1)
if dotproduct > squaredlengthba:
return False
else:
return True
def pvec(v1, v2):
""" cross product between v1 and v2
Parameters
----------
v1 : numpy array
v2 : numpy array
Returns
-------
v3 = v1 x v2
See Also
--------
np.cross
Examples
--------
>>> v1 = np.array([1,0,0])
>>> v2 = np.array([0,1,0])
>>> v3 = pvec(v1,v2)
"""
A = np.array(
[[0., -v1[2], v1[1]], [v1[2], 0., -v1[0]], [-v1[1], v1[0], 0.]])
v3 = np.dot(A, v2)
return(v3)
def pvecn(v1, v2):
""" cross product and normalization
Parameters
----------
v1 : numpy array
v2 : numpy array
Returns
-------
v3 = v1 x v2 / | v1 x v2 |
Examples
--------
>>> v1 = np.array([2,0,0])
>>> v2 = np.array([0,2,0])
>>> v3 = pvecn(v1,v2)
See Also
--------
numpy.cross
"""
v3 = np.cross(v1, v2)
try:
v4 = v3 / np.sqrt(np.dot(v3, v3))
except:
print("error divide by zero in pvecn")
return(v4)
def onb(A, B, v):
""" orthonormal basis from 2 points defining an axe and a vector
Parameters
----------
A : np.array
3 x n
B : np.array
3 x n
v : np.array
3 x n
Returns
-------
T basis (un,vn,wn)
3 x n x 3
(un,vn) is a basis in the plane transverse to the axis vn
wn is the unitary vector along vector AB
Examples
--------
>>> A = np.array([[0,0,0,0],[1,2,3,4],[0,0,0,0]])
>>> B = np.array([[0,0,0,0],[1,2,3,4],[10,10,10,10]])
>>> v = np.array([[1,1,1,1],[0,0,0,0],[0,0,0,0]])
>>> onb(A,B,v)
array([[[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]]
<BLANKLINE>
[[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]],
<BLANKLINE>
[[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]],
<BLANKLINE>
[[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]]])
see also
--------
pylayers.util.geomutil.Geomvect.geomBase
pylayers.util.mobility.body
"""
# np.random.seed(0)
N = np.shape(A)[1]
# modab 1xN
modab = np.sqrt(np.sum((B - A) * (B - A), axis=0))
# wn 3xN
wn = (B - A) / modab
#random_vector = np.random.rand(3,N)
u = v - np.sum(v * wn, axis=0) * wn
modu = np.sqrt(np.sum(u * u, axis=0))
# un : 3xN
un = u / modu
# vn : 3xN
vn = np.cross(wn, un, axis=0)
# pdb.set_trace()
T = np.dstack((un, vn, wn))
# reshape dimension for having index of cylinder axe first
# N x 3 x 3
T = T.swapaxes(0, 1)
return T
def dist_sph(u1,u2,mode=1):
""" distance betwwen points on the sphere
Parameters
----------
u1 : np.array (Nx2)
(theta,phi)
u2 : np.array (Mx2)
(theta,phi)
"""
v1 = np.array((np.cos(u1[:, 1])*np.sin(u1[:, 0]),
np.sin(u1[:, 1])*np.sin(u1[:, 0]),
np.cos(u1[:, 0])))
v2 = np.array((np.cos(u2[:, 1])*np.sin(u2[:, 0]),
np.sin(u2[:, 1])*np.sin(u2[:, 0]),
np.cos(u2[:, 0])))
v1dv2 = np.dot(v1.T,v2)
v1dv2 = np.maximum(v1dv2,-1)
v1dv2 = np.minimum(v1dv2,1)
if mode==0:
A = np.arccos(v1dv2)/np.pi
elif mode==1:
A = 1.-np.dot(v1.T,v2)
elif mode ==2:
A = (1.-np.dot(v1.T,v2))/2.0
return A
def vec_sph(th, ph):
"""
vec_sph(th,ph)
return Spherical orthonormal frame
[ [ eth]
[ eph] (theta,phi)
[ er ] ]
See Also
--------
SphericalBasis
"""
e_th = np.array(
(np.cos(th) * np.cos(ph), np.cos(th) * np.sin(ph), -np.sin(th)))
e_ph = np.array((-np.sin(ph), np.cos(ph), 0))
e_r = np.array(
(np.cos(ph) * np.sin(th), np.sin(ph) * np.sin(th), np.cos(th)))
B = np.vstack((e_th, e_ph, e_r))
return(B)
def ellipse(fd, p, vth, vph, Eth, Eph, N):
""" build a geomview file of an ellipse
Parameters
----------
fd : file descriptor
p : ellipse center
vth : unitary vector along theta
vph : unitary vector along phi
Eth : complex
Eph : complex
N : descretization step
"""
pas = 2 * np.pi / N
alpha = np.linspace(0, 2 * np.pi - pas, N)
Rth = abs(Eth)
Rph = abs(Eph)
delta_th = np.arctan2(np.imag(Eth), np.real(Eth))
delta_ph = np.arctan2(np.imag(Eph), np.real(Eph))
pu1 = p + Rth * vth
pu2 = p + Rph * vph
u3 = np.ones(3)
uN = np.ones(N)
Al_th = np.outer(u3, alpha + delta_th)
Al_ph = np.outer(u3, alpha + delta_ph)
U1 = np.outer(vth, uN)
U2 = np.outer(vph, uN)
P = np.outer(p, uN)
#
# Un point de l'ellipse
#
pc = P + (Rth * U1 * np.cos(Al_th) + Rph * U2 * np.cos(Al_ph))
vEre = p + (np.real(Eth) * vth + np.real(Eph) * vph)
vEim = p + (np.imag(Eth) * vth + np.imag(Eph) * vph)
fd.write("appearance { linewidth 3 }\n")
fd.write("VECT\n")
fd.write("%d %d %d \n" % (N, 2 * N, N))
fd.write("\n")
for i in range(N):
fd.write("%d " % 2)
fd.write("\n")
for i in range(N):
fd.write("%d " % 1)
fd.write("\n")
for i in range(N - 1):
fd.write("%6.3f %6.3f %6.3f\n" % (pc[0, i], pc[1, i], pc[2, i]))
fd.write("%6.3f %6.3f %6.3f\n" %
(pc[0, i + 1], pc[1, i + 1], pc[2, i + 1]))
fd.write("\n")
fd.write("%6.3f %6.3f %6.3f\n" %
(pc[0, N - 1], pc[1, N - 1], pc[2, N - 1]))
fd.write("%6.3f %6.3f %6.3f\n" % (pc[0, 0], pc[1, 0], pc[2, 0]))
fd.write("\n")
for i in range(N):
v = float(i - 1) / N
fd.write("%g %g %g %g\n" % (v, v, v, 1))
def normalize(vec):
""" normalize an array of N ndim vectors
Parameters
----------
vec : ndarray (N x ndim)
N ndim vectors
Returns
-------
vecn : ndarray (N x ndim)
N normalized ndim vectors
Example
-------
>>> from pylayers.util.geomutil import *
>>> vec = np.array([[1,1,0],[1,1,0],[1,0,1],[1,1,1]])
>>> normalize(vec)
array([[ 0.70710678, 0.70710678, 0. ],
[ 0.70710678, 0.70710678, 0. ],
[ 0.70710678, 0. , 0.70710678],
[ 0.57735027, 0.57735027, 0.57735027]])
Notes
-----
"""
N = np.shape(vec)[0]
m = np.sqrt(np.sum(vec * vec, axis=1)).reshape(N, 1)
vecn = vec / m
return(vecn)
def ptonseg(pta, phe, pt):
""" return a point on the segment (pta,pte)
Parameters
----------
pta : ndarray
phe : ndarray
pt : ndarray
Returns
-------
p : ndarray
Example
-------
"""
v = phe - pta
u = pt - pta
Lv = np.sqrt(np.dot(v, v))
Lu = np.sqrt(np.dot(u, u))
assert(Lv != 0)
assert(Lu != 0)
vn = v / Lv
un = u / Lu
ctheta = np.dot(un, vn)
alpha = ctheta * Lu
if (alpha > 0) & (alpha < Lv):
p = pta + alpha * vn
else:
p = np.array([])
return p
def dptseg(p, pt, ph):
""" distance between a set of points and a segment
Parameters
----------
ps : ndim x Np
array of Np points
pt : ndim x 1
tail coordinates of segment
ph : ndim x 1
head coordinates of segment
Returns
-------
d1 : 1 x Np
distance between pt and ortho projection of ps
d2 : 1 x Np
distance between ph and ortho projection of ps
h : distance between ps and ortho projection of ps
Examples
--------
.. plot::
:include-source:
>>> import numpy as np
>>> from pylayers.util.geomutil import *
>>> pt = np.array([0,0])
>>> ph = np.array([10,0])
>>> p = np.array([[-1,1 ,3,4,11],[8,1,2,3,3]])
>>> d1,d2,h = dptseg(p,pt,ph)
"""
ndim = len(pt)
l = ph.reshape(ndim, 1) - pt.reshape(ndim, 1)
norml = np.sqrt(np.dot(l.T, l))
ln = l / norml
ptp = p - pt.reshape(2, 1)
d1 = np.dot(ln.T, ptp)
d2 = norml - d1
ptpl = d1 * ln
ptpo = ptp - ptpl
h = np.sqrt(np.sum(ptpo * ptpo, axis=0))
return(d1, d2, h)
def linet(ax, p1, p2, al=0.9, color='blue', linewidth=1):
""" draw a short line segment
Parameters
----------
ax : axes
p1 : np.array
start point
p2 : np.array
end point
al : float
0 < al < 1 percentage of drawing default 0.9
color : string
color default 'blue'
linewidth : float
line width default 1
Returns
-------
ax : Axes instance
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax = fig.gca()
>>> p1 = np.array([0,0])
>>> p2 = np.array([1,0])
>>> p3 = np.array([0,1])
>>> p4 = np.array([1,1])
>>> ax = linet(ax,p1,p2,al=0.7,color='red',linewidth=3)
>>> ax = linet(ax,p2,p3,al=0.8,color='blue',linewidth=2)
>>> ax = linet(ax,p3,p4,al=0.9,color='green',linewidth=1)
>>> ax = linet(ax,p4,p1,al=1,color='cyan',linewidth=0.2)
"""
v = p2 - p1
L = np.sqrt(np.dot(v, v))
vn = v / L
pi = p1 + vn * (1 - al) * L
pf = p2 - vn * (1 - al) * L
ax.plot([pi[0], pf[0]], [pi[1], pf[1]], color=color, linewidth=linewidth)
return(ax)
def ccw(a, b, c):
""" counter clock wise order
Parameters
----------
a : ndarray (2,N)
b : ndarray (2,N)
c : ndarray (2,N)
Returns
-------
array of booleans
References
----------
`Line Segment Intersection <http://www.bryceboe.com/2006/10/23/line-segment-intersection-algorithm/>`_
Examples
--------
>>> import scipy as sp
>>> a = sp.rand(2,100)
>>> b = sp.rand(2,100)
>>> c = sp.rand(2,100)
>>> u = ccw(a,b,c)
"""
assert a.shape[0] == 2
assert b.shape[0] == 2
assert c.shape[0] == 2
# return((c[1, :] - a[1, :]) * (b[0, :] - a[0, :]) > (b[1, :] - a[1, :]) *
# (c[0, :] - a[0, :]))
return((c[1, ...] - a[1, ...]) * (b[0, ...] - a[0, ...]) >
(b[1, ...] - a[1, ...]) * (c[0, ...] - a[0, ...]))
def are_points_inside_cone_old(points,apex,v,radius=np.inf):
""" determine if a set of points are inside a cone
Parameters
----------
points : np.array (Noints x Ndim )
apex : (Ndim x 1)
v : (Ndim x Nvec)
radius : float
"""
# points (N,2)
# apex (,2)
w = points - apex[None,:]
Nvec = v.shape[1]
# vcone : cone axis
vcone = np.mean(v/np.linalg.norm(v,axis=0),axis=1)
# cliping half space
bhs = np.dot(w,vcone)>0
# cliping distance
brad = np.linalg.norm(w[bhs,:],axis=1) < radius
Npoints = np.sum(brad)
Ndim = points.shape[1]
if Ndim>2:
cw = np.empty((Nvec,Npoints,Ndim))
for k in range(Nvec):
cw[k,...] = np.cross(w[bhs,:][brad,:],v[:,k])
pcw = np.sum(np.prod(cw,axis=0),axis=1)
bcone = (pcw<0)
else:
cw = np.empty((Nvec,Npoints))
for k in range(Nvec):
cw[k,...] = np.cross(w[bhs,:][brad,:],v[:,k])
pcw = np.prod(cw,axis=0)
#bcone = (cw[0,:]<0) & (cw[1,:]>0)
#cwv = np.cross(w,v)
#bb = (cwu<0) & (cwv>0) & (rad<radius)
#"brad_ = np.zeros(len(bhs),dtype=bool)
#brad_[bhs] = brad
#
# be careful the unbitable part is below (avoid np.where)
#
bcone_ = np.zeros(len(brad),dtype=bool)
bcone__ = np.zeros(len(bhs),dtype=bool)
bcone_[brad] = bcone
bcone__[bhs] = bcone_
return bcone__
def are_points_inside_cone1(points,apex,v,radius=np.inf):
""" determine if a set of points are inside a cone
Parameters
----------
points : np.array (Noints x Ndim )
apex : (Ndim x 1)
v : (Ndim x Nvec)
radius : float
"""
# points (N,2)
# apex (,2)
w = points - apex[None,:]
Nvec = v.shape[1]
# vcone : cone axis
v_n = v/np.linalg.norm(v,axis=0)
vcone = np.mean(v_n,axis=1)
# cliping half space
bhs = np.dot(w,vcone)>0
#return(bhs)
# cliping distance
brad = np.linalg.norm(w[bhs,:],axis=1) < radius
Npoints = np.sum(brad)
Ndim = points.shape[1]
# desactivate clipping (to be commented)
#bhs = np.ones(len(bhs),dtype=bool)
#brad = np.ones(len(brad),dtype=bool)
tk = [ c for c in combinations(range(Nvec),2) ]
bcw = np.empty((len(tk),Npoints),dtype=bool)
#print("w : ",w)
#print("v :",v)
w_vec = w[bhs,:][brad,:]
for k, (k1,k2) in enumerate(tk):
if Ndim>2:
zk1k2 = np.cross(v_n[:,k1],v_n[:,k2])
zk1k2_n = zk1k2/np.linalg.norm(zk1k2)
w_proj = w_vec - np.dot(w_vec,zk1k2_n)[:,None]*zk1k2_n[None,:]
#w_proj = w_proj/np.linalg.norm(w_proj,axis=1)[:,None]
else:
w_proj = w_vec
pvec1 = np.cross(w_proj,v_n[:,k1])
pvec2 = np.cross(w_proj,v_n[:,k2])
if Ndim>2:
u1 = np.sum(pvec1*zk1k2_n[None,:],axis=1)
u2 = np.sum(pvec2*zk1k2_n[None,:],axis=1)
dp1p2 = u1*u2
else:
dp1p2 = pvec1*pvec2
bcw[k,...] = dp1p2 < 0
#
# be careful the unbitable part is below (avoid np.where)
#
bcone = np.prod(bcw,axis=0)
bcone_ = np.zeros(len(brad),dtype=bool)
bcone__ = np.zeros(len(bhs),dtype=bool)
bcone_[brad] = bcone
bcone__[bhs] = bcone_
return bcone__
def are_points_inside_cone(points,apex,v,radius=np.inf):
""" determine if a set of points are inside a cone
Parameters
----------
points : np.array (Npoints x Ndim )
apex : (Ndim x 1)
v : (Ndim x Nvec)
radius : float
"""
assert(type(points)==np.ndarray)
assert(type(apex)==np.ndarray)
assert(type(v)==np.ndarray)
w = points - apex[None,:]
nw = np.linalg.norm(w,axis=1)
# remove point which are too close to the apex
bvalid = ~np.isclose(nw,0)
Nvec = v.shape[1]
# vcone : cone axis
v_n = v/np.linalg.norm(v,axis=0)
vcone = np.mean(v_n,axis=1)
# cliping half space
bhs = bvalid & (np.dot(w,vcone)>0)
# cliping distance
brad = np.linalg.norm(w[bhs,:],axis=1) < radius
w_vec = w[bhs,:][brad,:]
try:
x = np.linalg.solve(v_n,w_vec.T)
except:
pdb.set_trace()
bx = x>0
bcone = np.prod(bx,axis=0)
bcone_ = np.zeros(len(brad),dtype=bool)
bcone__ = np.zeros(len(bhs),dtype=bool)
bcone_[brad] = bcone
bcone__[bhs] = bcone_
return bcone__
def intersect_cone_seg(line0,line1,seg,bvis=False,bbool=False):
""" intersection of a cone and a segment
Parameters
----------
line0 : tuple(np.array,np.array)
( apex , pt1 )
line1 : tuple(np.array,np.array)
( apex , pt2 )
seg : tuple(np.array,np.array)
(pta , ptb )
bvis
bbool
See Also
--------
Signature.run
are_points_inside_cone
intersect_halfline_seg
"""
tahe = []
ratio = 0
# points : np.array 2 x 2
points = np.vstack((seg[0],seg[1]))
apex = line0[0]
# if second point of lines are the same (problem)
if ( (line0[1][0]==line1[1][0]) and
(line0[1][1]==line1[1][1]) ):
pdb.set_trace()
# v : np.array 2 x 2
# first column termination of line0
# second column termination of line1
v = np.vstack((line0[1],line1[1])).T
bb = are_points_inside_cone(points,apex,v,radius=np.inf)
x0,p0 = intersect_halfline_seg(line0, seg)
x1,p1 = intersect_halfline_seg(line1, seg)
if bb[0] & bb[1]: # termination of segment fully inside the cone
tahe = seg
if (np.abs(x0)!=np.inf) and (np.abs(x1)!=np.inf):
ratio = np.linalg.norm(seg[1]-seg[0])/np.linalg.norm(p1-p0)
else:
ratio = 1
if ~bb[0] & ~bb[1]: # termination segment fully outside the cone
if (( ( (x1>0) or np.isclose(x1,0)) & ((x1<1) or np.isclose(x1,1)) ) and
( ( (x0>0) or np.isclose(x0,0)) & ((x0<1) or np.isclose(x0,1)) ) ):
tahe = [p0,p1]
ratio = 1
else:
tahe = []
ratio = 0
if bb[0] & ~bb[1]: # seg0 inside seg1 outside
if (( (x1>0) or np.isclose(x1,0)) & ((x1<1) or np.isclose(x1,1)) ):
tahe = [seg[0],p1]
if (( (x0>0) or np.isclose(x0,0)) & ((x0<1) or np.isclose(x0,1)) ):
tahe = [seg[0],p0]
if (np.abs(x0)!=np.inf) and (np.abs(x1)!=np.inf):
try:
ratio = np.linalg.norm(tahe[1]-tahe[0])/np.linalg.norm(p1-p0)
except:
pdb.set_trace()
else:
ratio = 1
if ~bb[0] & bb[1]: # seg0 outside seg1 inside
if (( (x0>0) or np.isclose(x0,0)) & ((x0<1) or np.isclose(x0,1)) ):
tahe = [seg[1],p0]
if (( (x1>0) or np.isclose(x1,0)) & ((x1<1) or np.isclose(x1,1)) ):
tahe = [seg[1],p1]
if (np.abs(x0)!=np.inf) and (np.abs(x1)!=np.inf ):
ratio = np.linalg.norm(tahe[1]-tahe[0])/np.linalg.norm(p1-p0)
else:
ratio = 1
if bvis:
ax = plt.gca()
linet(ax,line0[0],line0[0]+10*line0[1],color='blue',al=1)
linet(ax,line1[0],line1[0]+10*line1[1],color='blue',al=1)
linet(ax,seg[0],seg[1],color='red',al=1)
#if bdp0i:
# ax.scatter(seg[0][0],seg[0][1],s=100,color='green')
#else:
# ax.scatter(seg[0][0],seg[0][1],s=100,color='red')
#if bdp1i:
# ax.scatter(seg[1][0],seg[1][1],s=100,color='green')
#else:
# ax.scatter(seg[1][0],seg[1][1],s=100,color='red')
#if len(tahe)==2:
# linet(ax,tahe[0],tahe[1],color='green',al=1,linewidth=2)
#plt.show()
return(tahe,ratio)
def intersect_cone_seg_old(line0,line1,seg,bvis=False,bbool=False):
"""
Parameters
----------
line0
line1
seg
bvis
"""
x0,p0 = intersect_halfline_seg(line0, seg)
x1,p1 = intersect_halfline_seg(line1, seg)
tahe = []
# non degenerated case
if ((np.abs(x0)!=np.inf) and (np.abs(x1)!=np.inf)):
v = p1-p0
nv2 = np.dot(v,v)
if not(nv2==0):
# a above
# b below
# i in
bx0a = x0>1
bx0b = x0<0
bx0i = (not bx0a) and (not bx0b)
bx1a = x1>1
bx1b = x1<0
bx1i = (not bx1a) and (not bx1b)
baa = bx0a and bx1a #
bab = bx0a and bx1b #
bai = bx0a and bx1i
bba = bx0b and bx1a #
bbb = bx0b and bx1b #
bbi = bx0b and bx1i
bia = bx0i and bx1a
bib = bx0i and bx1b
bii = bx0i and bx1i #
if bbool:
print("baa ",baa)
print("bab ",bab)
print("bai ",bai)
print("bba ",bba)
print("bbb ",bbb)
print("bbi ",bbi)
print("bia ",bia)
print("bib ",bib)
print("bii ",bii)
if baa or bbb: # above and above or below and below ->segment is out
tahe = []
bdp0i = False
bdp1i = False
#print "baa or bbb"
elif bab or bba: # segment is fully inside the cone take seg
tahe = seg
bdp0i = True
bdp1i = True
#print "bab or bba"
elif bii:
tahe = [p0,p1]
bdp0i = False
bdp1i = False
#print "bii"
else:
# reference point is chosen
# as the point p0 or p1 which is
# the farest from both segment termination
# this is to avoid having null vector
d0seg = np.minimum(np.linalg.norm(p0-seg[0]),np.linalg.norm(p0-seg[1]))
d1seg = np.minimum(np.linalg.norm(p1-seg[0]),np.linalg.norm(p1-seg[1]))
if d0seg>d1seg:
pref = p0
else:
v = -v
pref = p1
pseg0 = seg[0]-pref
dp0 = np.dot(v,pseg0)/nv2
# seg0 is out cone
bdp0o = (dp0>1) or (dp0<0)
# seg0 is in cone
bdp0i = not bdp0o
pseg1 = seg[1]-pref
dp1 = np.dot(v,pseg1)/nv2
# seg0 is out
bdp1o = (dp1>1) or (dp1<0)
# seg0 is in
bdp1i = not bdp1o
if bbool:
print("bdp0i :",bdp0i)
print("bdp1i :",bdp1i)
if bai or bbi :
#print "bai or bbi"
if bdp0i:
if not np.isclose(p1-seg[0],0).all():
tahe = [p1, seg[0]]
else:
tahe = [p1]
if bdp1i:
if not np.isclose(p1-seg[1],0).all():
tahe = [p1, seg[1]]
elif (len(tahe)<2):
tahe = [p1]
elif bia or bib:
#print "bia or bib"
if bdp0i:
if not np.isclose(p0-seg[0],0).all():
tahe = [p0, seg[0]]
else:
tahe = [p0]
if bdp1i:
if not np.isclose(p0-seg[1],0).all():
tahe = [p0, seg[1]]
elif (len(tahe)<2):
tahe = [p0]
if len(tahe)>1:
w = tahe[1]-tahe[0]
nw = np.linalg.norm(w)
ratio = nw/np.sqrt(nv2)
else:
ratio = 0
else:
tahe = seg
ratio = 1
else:
pt = seg[0]-line0[0]
pt = pt/np.linalg.norm(pt)
ph = seg[1]-line0[0]
ph = ph/np.linalg.norm(ph)
tahe = seg
ratio = 1.
if bvis:
ax = plt.gca()
linet(ax,line0[0],line0[0]+10*line0[1],color='blue',al=1)
linet(ax,line1[0],line1[0]+10*line1[1],color='blue',al=1)
linet(ax,seg[0],seg[1],color='red',al=1)
if bdp0i:
ax.scatter(seg[0][0],seg[0][1],s=100,color='green')
else:
ax.scatter(seg[0][0],seg[0][1],s=100,color='red')
if bdp1i:
ax.scatter(seg[1][0],seg[1][1],s=100,color='green')
else:
ax.scatter(seg[1][0],seg[1][1],s=100,color='red')
if len(tahe)==2:
linet(ax,tahe[0],tahe[1],color='green',al=1,linewidth=2)
plt.show()
return(tahe,ratio)
def intersect_halfline_seg(line, seg):
""" intersect a half line and a segment
Parameters
----------
line : tuple
(point,vec)
seg : tuple
(pta,phe)
Returns
-------
k : intersection parameter (0<k<1 if intersection)
P : intersection point P = pta + k vseg
"""
ptO, u = line
pta, phe = seg
v = phe-pta
u = u/np.linalg.norm(u)
A = np.array([[u[0],-v[0]],
[u[1],-v[1]]])
b = np.array([[pta[0]-ptO[0]],
[pta[1]-ptO[1]]])
detA = np.linalg.det(A)
if not (np.isclose(detA,0)):
x = np.linalg.solve(A,b)
if x[0]>0:
P = pta + x[1]*v
else:
x = np.array([[None],[+np.inf]])
P = seg[0]
else:
x = np.array([[None],[-np.inf]])
P = seg[0]
# xht = phe[0] - pta[0]-v[]
# yth = pta[1] - phe[1]
# num = -(v[1] * (pta[0] - pt[0]) + v[0] * (pt[1] - pta[1]))
# den = (v[1] * xht + v[0] * yth)
# if (abs(den) > 0):
# k = num / den::
# M = pta + k * vseg
# else:
# si = np.sign(np.dot(v, vseg))
# k = np.inf * si
# M = pta + 2 * vseg
return(x[1][0], P)
def intersect3(a, b, pg, u1, u2, l1, l2,binter=False):
""" Intersection of a line and a 3D rectangle screen
Parameters
----------
a : np.array (3,Nseg) of floats
transmiter coordinates
b : np.array (3,Nseg) of floats
receiver coordinates
pg : np.array (3,Nscreen) of floats
center of gravity of the screen
u1 : np.array (3,Nscreen) of floats
unitary vector along first dimension
u2 : np.array (3,Nscreen) of floats
unitary vector along second dimension
l1 : np.array (,Nscreen)
length along first dimension in meters
l2 : np.array (,Nscreen)
length along second dimension in meters
Returns
-------
bool : True => intersection (occultation)
False
Examples
--------
>>> a = np.array([[1,0,1]]).T
>>> b = np.array([[10,0,1]]).T
>>> pg = np.array([[5,0,0]]).T
>>> u1 = np.array([[0,1,0]]).T
>>> u2 = np.array([[0,0,1]]).T
>>> l1 = np.array([3]).T
>>> l2 = np.array([3]).T
>>> bo = intersect3(a,b,pg,u1,u2,l1,l2)
>>> assert bo
See Also
--------
pylayers.gis.layout.Layout.angleonlink3
"""
Nseg = a.shape[1]
Nscreen = u1.shape[1]
ba = b - a # (3,Nseg) LOS distance
# A : (Nseg,Nscreen,3,3)
# c : (Nseg,Nscreen,3)
# ba.T (Nseg,3)
# u1.T (Nscreen, 3)
# u2.T (Nscreen, 3)
# U : Nseg,1,3,1
U = ba.T[:, None, :, None]
assert(U.shape == (Nseg, 1, 3, 1))
# U1 : 1,Nscreen,3,1
U1 = u1.T[None, :, :, None] + np.zeros((1, Nscreen, 3, 1))
assert(U1.shape == (1, Nscreen, 3, 1))
# U1 : 1,Nscreen,3,1
U2 = u2.T[None, :, :, None] + np.zeros((1, Nscreen, 3, 1))
assert(U2.shape == (1, Nscreen, 3, 1))
# U1e : Nseg,Nscreen,3,1
U1e = U1 + np.zeros(U.shape)
# U2e : Nseg,Nscreen,3,1
U2e = U2 + np.zeros(U.shape)
# Ue : Nseg,Nscreen,3,1
Ue = U + np.zeros(U2e.shape)
A = np.concatenate((Ue, -U1e, -U2e), axis=3)
# visi : Nseg,Nscreen
visi = np.zeros((A.shape[0],A.shape[1]),dtype=bool)
pinter = np.nan*np.zeros((Nseg,Nscreen,3))
# check non singularity
# detA (Nseg,Nscreen)
detA = np.linalg.det(A)
# matrix A (Nseg,Nscreen,3,3) is valid if not singular
boolvalid = ~ (np.isclose(detA,0))
c = pg.T[None, :, :] - a.T[:, None, :]
# ba (3,Nseg)
# ba.T (Nseg,3)
# ba.T[:,None,:] (Nseg,1,3)
# x (Nseg,Nscreen,3)
# pinter (Nseg,Nscreen,3)
if boolvalid.all():
x = np.linalg.solve(A, c)
# calculate intersection points
pinter = ba.T[:,None,:]*x+a.T[:,None,:]
condseg = ((x[:, :, 0] > 1) + (x[:, :, 0] < 0))
cond1 = ((x[:, :, 1] > l1[None, :] / 2.) +
(x[:, :, 1] < -l1[None, :] / 2.))
cond2 = ((x[:, :, 2] > l2[None, :] / 2.) +
(x[:, :, 2] < -l2[None, :] / 2.))
visi = ~(((condseg + cond1 + cond2) % 2).astype(bool))
#i0 = np.kron(np.arange(A.shape[0],dtype=int),np.ones(A.shape[1],dtype=int))
#i1 = np.kron(np.ones(A.shape[0],dtype=int),np.arange(A.shape[1],dtype=int))
#ui = (i0,i1)
#boolvalid = (np.ones(A.shape[0],dtype=bool),np.ones(A.shape[1],dtype=bool))
else:
ui = np.where(boolvalid)
#pdb.set_trace()
Am = A[ui[0],ui[1],:,:]
#Am = A[boolvalid,:,:]
if len(Am.shape)==3:
Am=Am[None,...]
cm = c[ui[0],ui[1],:]
# test if loosing one axis
if (len(c.shape)!=len(cm.shape)):
cm=cm[None,...]
#
# Warning scipy.linalg do not handle MDA
#
# x : Nseg x Nscreen
if Am.size > 0:
x = np.linalg.solve(Am, cm)
pinter = ba.T[ui[0],None,:]*x+a.T[ui[0],None,:]
# condition of occultation
condseg = ((x[:, :, 0] > 1) + (x[:, :, 0] < 0))
cond1 = ((x[:, :, 1] > l1[None, ui[1]] / 2.) +
(x[:, :, 1] < -l1[None, ui[1]] / 2.))
cond2 = ((x[:, :, 2] > l2[None, ui[1]] / 2.) +
(x[:, :, 2] < -l2[None, ui[1]] / 2.))
visi[ui[0],ui[1]] = ~(((condseg + cond1 + cond2) % 2).astype(bool))
#print boolvalid
#visi[boolvalid] = ~(((condseg + cond1 + cond2) % 2).astype(bool))
if binter:
return visi, pinter
else:
return visi,None
def intersect(a, b, c, d):
""" check if segment AB intersects segment CD in 2D
Parameters
----------
a : np.array (2xN)
b : np.array (2xN)
c : np.array (2xN)
d : np.array (2xN)
Examples
--------
.. plot::
:include-source:
>>> import scipy as sp
>>> import numpy as np
>>> from pylayers.util.geomutil import *
>>> from pylayers.util.plotutil import *
>>> import matplotlib.pylab as plt
>>> N = 10
>>> A = sp.rand(2,N)
>>> B = sp.rand(2,N)
>>> C = sp.rand(2,N)
>>> D = sp.rand(2,N)
>>> b1 = intersect(A,B,C,D)
>>> pt1 = A[:,b1]
>>> ph1 = B[:,b1]
>>> pt2 = C[:,b1]
>>> ph2 = D[:,b1]
>>> f1,a1 = displot(pt1,ph1,'r')
>>> f2,a2 = displot(pt2,ph2,'b')
>>> ti = plt.title('test intersect')
>>> A = np.array([[0],[0]])
>>> B = np.array([[1],[1]])
>>> C = np.array([[1],[0]])
>>> D = np.array([[0],[1]])
>>> intersect(A,B,C,D)
array([ True], dtype=bool)
>>> intersect(A,B,C,D)[0]
True
See Also
--------
ccw : counter clock wise detection
"""
return ((ccw(a, c, d) != ccw(b, c, d)) & (ccw(a, b, c) != ccw(a, b, d)))
def is_aligned4(a, b, c, d, tol=1e-2):
""" test aligment of 4 points
Parameters
----------
a : np.array
b : np.array
c : np.array
d : np.array
tol : float
default 1e-2
"""
cond = is_aligned3(a, b, c, tol=tol) & is_aligned3(a, b, d, tol=tol)
return cond
def is_aligned3(a, b, c, tol=1e-2):
""" test aligment of 3 points
Parameters
----------
a : np.array
b : np.array
c : np.array
tol : float
default 1e-2
"""
# return abs(((b[0,:]-a[0,:])*(c[1,:]-a[1,:]) -
# (b[1,:]-a[1,:])*(c[0,:]-a[0,:])))<1e-8
val = abs(((b[0] - a[0]) * (c[1] - a[1]) - (b[1] - a[1]) * (c[0] - a[0])))
cond = val < tol
# print val
return cond
def isleft(a, b, c, tol=0.):
""" Test if point c is on the left of the vector a-->b
Parameters
----------
a : np.array (2xN)
b : np.array (2xN)
c : np.array (2xN)
tol : tolerance
Returns
-------
boolean array (1xN)
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.plotutil import *
>>> import scipy as sp
>>> import numpy as np
>>> from pylayers.util.geomutil import *
>>> from pylayers.util.plotutil import *
>>> import matplotlib.pylab as plot
>>> N = 20
>>> A = sp.rand(2,N)
>>> B = sp.rand(2,N)
>>> C = np.array(([0.5,0.5])).reshape(2,1)
>>> left=isleft(A,B,C)
>>> il = np.where(left)[0]
>>> inl = np.where(~left)[0]
>>> plt.scatter(C[0],C[1],color='b',s=10)
>>> displot(A[:,il],B[:,il],arrow=True,color='g')
>>> displot(A[:,inl],B[:,inl],arrow=True,color='r')
See Also
--------
pylayers.antprop.signature
"""
return ((b[0, :] - a[0, :]) * (c[1, :] - a[1, :])) - ((b[1, :] - a[1, :]) * (c[0, :] - a[0, :])) > tol
def isleftorequal(a, b, c):
""" Test if point c is on the left of the vector a-->b
Parameters
----------
a : np.array (2xN)
b : np.array (2xN)
c : np.array (2xN)
Returns
-------
boolean array (1xN)
See Also
--------
isleft
"""
return ((b[0, :] - a[0, :]) * (c[1, :] - a[1, :])) - ((b[1, :] - a[1, :]) * (c[0, :] - a[0, :])) >= 0
def affine(X, Y):
""" find affine transformation
Parameters
----------
X : np.array
3xN
Y
3xN
Returns
-------
A : np.array
3x3
B : np.array
3x1
Notes
-----
Given X and Y find the affine transformation
Y = A X + B
"""
B = Y[:, 0][:, np.newaxis]
Yc = Y - B
pX = la.pinv(X)
A = np.dot(Yc, pX)
return(A, B)
def cylmap(Y, r=0.0625, l=0.5):
""" find affine transformation for a specific cylinder
Parameters
----------
Y
3xN
Returns
-------
A : np.array
3x3
B : np.array
3x1
Notes
-----
Y = A X + B
"""
#X = np.array([[0,0,0],[0,0,-0.25],[0,0,0.25],[0.0625,0,0],[0,0.0625,0],[0.0625,0,0.25]]).T
X = np.array([[0, 0, 0], [0, 0, -l / 2], [0, 0, l / 2],
[r, 0, 0], [0, r, 0], [r, 0, l / 2]]).T
B = Y[:, 0][:, np.newaxis]
Yc = Y - B
pX = la.pinv(X)
A = np.dot(Yc, pX)
return(A, B)
def MRot3(a, axe):
"""
Return a 3D rotation matrix along axe 0|1|2
Parameters
----------
a : angle (radians)
axe : 0:x 1:y 2:z
"""
M3 = np.eye(3)
M2 = np.array(((np.cos(a), -np.sin(a)), (np.sin(a), np.cos(a))))
if (axe == 0):
M3[1:3, 1:3] = M2
if (axe == 1):
M3[0::2, 0::2] = M2
if (axe == 2):
M3[0:2, 0:2] = M2
return(M3)
def MATP(sl,el,phi,tilt,pol):
""" Calculate a rotation matrix for antenna pointing and orientation control
Parameters
----------
sl : np.array (,3) unitary
main radiation direction in antenna local frame
el : np.array(,3) unitary
main direction in the E field plane
phi : float 0<phi<2*pi
tilt : float -pi/2<tilt<pi/2
pol : string 'H' (Horizontal) or 'V' (Vertical)
"""
assert np.isclose(np.dot(sl,sl),1)
assert np.isclose(np.dot(el,el),1)
assert np.isclose(np.dot(sl,el),0,atol=1e-1)
#
# local frame completion (vl,pl,ql) direct frame
#
hl = np.cross(sl,el)
Tl = np.vstack((sl,el,hl)).T
# global frame construction
#
# (vg,pV,pH) direct V case
# (vg,-pH,pV) direct H case
#
z = np.array([0,0,1.0])
vg = np.array([np.cos(phi)*np.cos(tilt),np.sin(phi)*np.cos(tilt),-np.sin(tilt)])
pH = np.cross(vg,z)
pH = pH/np.linalg.norm(pH)
assert np.isclose(np.dot(pH,pH),1)
pV = np.cross(pH,vg)
assert np.isclose(np.dot(pV,pV),1)
if pol=='V':
Tg = np.vstack([vg,pV,pH]).T
if pol=='H':
Tg = np.vstack([vg,-pH,pV]).T
# Tg = R Tl
# R = Tg.Tl.T
M = np.dot(Tg,Tl.T)
return(M)
def MEulerAngle(alpha, beta, gamma):
""" Calculate a rotation matrix from 3 Euler angles
Parameters
----------
alpha : float
rotation along axis z
beta : float
rotation along axis x
gamma : float
rotation along axis y
Returns
-------
T : np.array (3x3)
rotation matrix
Examples
--------
>>> import numpy as np
>>> T=MEulerAngle(np.pi/2,np.pi/2,np.pi/2)
Warnings
--------
Bizarre I was expected
-1 0 0
0 0 1
0 1 0
"""
Ra = MRot3(alpha, 2)
Rb = MRot3(beta, 0)
Rg = MRot3(gamma, 1)
T = np.dot(np.dot(Ra, Rb), Rg)
#T = np.dot(np.dot(Rg,Rb),Ra)
return(T)
def SphericalBasis(a):
""" construct a spherical basis from a direction theta,phi
Parameters
----------
a : N x 2
a[:,0] : N theta angle
a[:,1] : N phi angle
Returns
-------
M : np.array
N x [th,ph,s] : 3 x 3 x N
Notes
-----
The unit vector uth,uph,us are places along the lines of the
3 x 3 matrices
uth
uph
us
Examples
--------
>>> a = np.array([[0,0]])
>>> SphericalBasis(a)
See Also
--------
angledir
"""
assert(a.shape[1]==2)
tha = np.vstack((np.cos(a[:, 0]) * \
np.cos(a[:, 1]), np.cos(a[:, 0]) * \
np.sin(a[:, 1]), -np.sin(a[:, 0]))).T
pha = np.vstack((-np.sin(a[:, 1]),
np.cos(a[:, 1]),
0 * a[:, 0])).T
sa = np.vstack((np.sin(a[:, 0]) * np.cos(a[:, 1]),
np.sin(a[:, 0]) * np.sin(a[:, 1]),
np.cos(a[:, 0]))).T
M = np.dstack((tha, pha, sa)).T
M = np.swapaxes(M,0,1)
return M
def angledir(s):
""" evaluate (theta,phi) from direction vector
Parameters
----------
s : ndarray N x 3
N direction vector
Returns
-------
a : ndarray 2xN
N angle (theta,phi)
Notes
-----
.. math::
\\theta = \\arccos{(\\frac{\\mathbf{s}}{\\hat{\mathbf{z}})}}
Example
-------
.. plot::
:include-source:
>>> import numpy as np
>>> s = np.array([[2,0,0],[0,2,0],[0,0,1],[1,1,1]])
>>> angledir(s)*180/np.pi
array([[ 90. , 0. ],
[ 90. , 90. ],
[ 0. , 0. ],
[ 54.73561032, 45. ]])
See Also
--------
BTB (Base to base)
"""
s = normalize(s)
N = np.shape(s)[0]
x = np.array((1, 0, 0)).reshape(1, 3)
y = np.array((0, 1, 0)).reshape(1, 3)
z = np.array((0, 0, 1)).reshape(1, 3)
u = np.dot(s, z.T)
theta = np.arccos(u)
v = s - z
n = np.sqrt(np.sum(v * v, axis=1)).reshape(N, 1)
inull = np.where(n == 0)[0]
n[inull] = 1
vn = v / n
vnx = np.dot(vn, x.T)
vny = np.dot(vn, y.T)
phi = np.arctan2(vny, vnx)
a_new = np.hstack((theta, phi))
a_new[inull, 0] = 0
a_new[inull, 1] = 0
return(a_new)
def Bthph(th,ph,M):
""" Return theta and phi tranformed from a rotation matrix M
th (N)
ph (N)
M (3,3)
Returns
-------
theta,phi
Notes
-----
This function is convenient for Antennas in addition of
MATP.
MATP returns a rotation matrix M which allow the
transformation from a local basis to a global basis.
Using Bthph with MATP allows to evaluate the Antenna
for given theta phi in a global basis and determine
associated gain values in the Antenna local basis
"""
if not isinstance(th,np.ndarray):
th = np.ndarray([th])
if not isinstance(ph,np.ndarray):
ph = np.ndarray([ph])
# spherical to cartesian
sp2cart = np.array([np.cos(ph)*np.sin(th),
np.sin(ph)*np.sin(th),
np.cos(th)])
# apply rotation matrix
Cloc = np.einsum('ij,ik->kj',sp2cart,M)
# return in psherical coodinates
cart2sp = np.array([np.arctan2(Cloc[1],Cloc[0]),np.arccos(Cloc[2])])
return cart2sp[1,:],cart2sp[0,:]
def BTB(a_g, T):
""" Produce a set of rotation matrices for passage between global and local frame
Parameters
----------
a_g : angle in global reference frame Nx2 : (theta,phi) in columns
T : Tx rotation matrix 3 x 3
Returns
-------
R : np.array (2x2xN)
Rotation matrix in the wave plane
a_l : np.array (Nx2)
angle in local frame
See Also
--------
SphericalBasis
"""
# 3 x 3 x r
G = SphericalBasis(a_g)
# old version of SphericalBasis
#th_g = G[0, :, :]
#ph_g = G[1, :, :]
th_g = G[:, 0, :]
ph_g = G[:, 1, :]
#
# 2 x 3 x r
#
B_gT = np.dstack((th_g, ph_g)).transpose((2, 0, 1))
# express s in the local frame (after rotation T)
# s = G[2,:,:] 3 x N
# s_l : N x 3
#
#s_l = np.dot(T.T, G[2, :, :]).T
s_l = np.dot(T.T, G[:, 2, :]).T
# get the N couples of angles in local frame
# a_l : r x 2
a_l = angledir(s_l)
L = SphericalBasis(a_l)
# old version of SphericalBasis
#th_l = L[0, :, :]
#ph_l = L[1, :, :]
th_l = L[:, 0, :]
ph_l = L[:, 1, :]
#
# B_l : 3 x 2 x r
#
B_l = np.dstack((th_l, ph_l)).transpose((0, 2, 1))
#
# R : (2 x 3 x r ) (3 x 3 x r ) ( 3 x 2 x r )
# R : 2 x 2 x r
#
# U : 2 x 3 x r
U = np.einsum('ijk,jlk->ilk',B_gT,T[:,:,None])
R = np.einsum('ijk,jlk->ilk',U,B_l)
return a_l,R
def plot_coords(ax, ob, color='#999999'):
""" plotting coord of a `shapely` object
Parameters
----------
ax : matplotlib axes
ob : shapely object
color : string
default '#999999'
References
----------
`Shapely <http://pypi.python.org/pypi/Shapely>`_
"""
x, y = ob.xy
ax.plot(x, y, 'o', color=color, zorder=2) # color='#999999'
def plot_bounds(ax, ob, color='#000000'):
""" plot bounds
Parameters
----------
ax : matplotlib axes
ob : shapely object
color : string
default '#999999'
References
----------
`Shapely <http://pypi.python.org/pypi/Shapely>`_
"""
x, y = zip(*list((p.x, p.y) for p in ob.boundary))
ax.plot(x, y, color=color, zorder=0.1) # '#000000'
# ax.plot(x, y, 'o', color='#000000', zorder=0.1) #'#000000'
def plot_line(ax, ob, color="#999999"):
""" plot line
Parameters
----------
ax : matplotlib axes
ob : shapely object
color : string
default '#999999'
References
----------
`Shapely <http://pypi.python.org/pypi/Shapely>`_
Notes
-----
color = v_color(ob)
Examples
--------
.. plot:
:include-source:
>>> from pylayers.util.geomutil import *
>>> import matplotlib.pyplot as plt
>>> seg = shg.LineString([(0,0),(1,1)])
>>> fig = plt.figure()
>>> ax = fig.gca()
>>> plot_line(ax,seg)
>>> plt.show()
"""
x, y = ob.xy
ax.plot(x, y, color=color, alpha=0.7, linewidth=2,
solid_capstyle='round', zorder=0.5)
def v_color(ob):
""" return color
Parameters
----------
ob :
References
----------
http://pypi.python.org/pypi/Shapely
"""
return COLOR[ob.is_simple]
# def createPolygons(
def plotPolygon(poly, color="#abcdef", alpha=0.8):
""" plot a shapely Polygon
Parameters
----------
poly : shapely polygon
color : default "#abcdef"
alpha : float
transparency (default 0.8)
"""
fig = plt.gcf()
gax = fig.get_axes()
if len(gax) != 0:
ax = gax[0]
else:
ax = fig.add_subplot(111)
patch = PolygonPatch(poly, facecolor=color, alpha=alpha)
ax.add_patch(patch)
plt.show()
def shrinkPolygon(poly, d=0.1):
""" shrink polygon
Parameters
----------
poly : shapely polygon
d : float
0.1
Returns
-------
poly
"""
poly1 = simplifyPolygon(poly)
A1 = poly1.area
p = np.array(poly1.exterior.xy)
# enleve le dernier point
q = p[:, 0:-1]
n1 = Lr2n(q)
N = np.shape(q)[1]
for k in range(N):
nrm = n1[:, k]
p[:, k] = p[:, k] + d * nrm
p[:, k + 1] = p[:, k + 1] + d * nrm
q[:, 0] = q[:, 0] + d * nrm
y = q.T.copy()
ls = shg.asLineString(y)
poly2 = shg.Polygon(ls)
A2 = poly2.area
if (A2 > A1):
poly2 = shrinkPolygon(poly1, -d)
return(poly2)
def shrinkPolygon2(poly1, d=0.1):
""" shrink Polygon
Parameters
----------
poly1 Polygon
"""
poly1 = simplifyPolygon(poly)
p = np.array(poly1.exterior.xy)
# enleve le dernier point
q = p[:, 0:-1]
n1 = Lr2n(q)
N = np.shape(q)[1]
for k in range(N):
norm = n1[:, k]
if k > 0:
u = np.dot(norm, normold)
else:
u = 0
norm1 = norm
if u < 0.8: # changement de direction
p[:, k] = p[:, k] + d * norm
if k != (N - 1):
p[:, k + 1] = p[:, k + 1] + d * norm
else:
v = np.dot(norm, norm1)
if v < 0.8:
p[:, k + 1] = p[:, k + 1] + d * norm
else: # meme direction
p[:, k] = p[:, k] + d * normold
if k != (N - 1):
p[:, k + 1] = p[:, k + 1] + d * norm
else:
v = np.dot(norm, norm1)
if v < 0.8:
p[:, k + 1] = p[:, k + 1] + d * norm
normold = norm
#n2 = np.hstack((n1[:,-1].reshape(2,1),n1[:,0:-1]))
y = q.T.copy()
ls = shg.asLineString(y)
poly2 = shg.Polygon(ls)
return(poly2)
def simplifyPolygon(poly1):
""" Simplify polygon : suppress adjacent colinear segments
Parameters
----------
poly1
"""
p = np.array(poly1.exterior.xy)
N = np.shape(p)[1]
q = p[:, 0].reshape(2, 1)
for k in range(N - 2):
v1 = p[:, k + 1] - p[:, k]
v2 = p[:, k + 2] - p[:, k + 1]
v1n = v1 / np.sqrt(np.dot(v1, v1))
v2n = v2 / np.sqrt(np.dot(v2, v2))
u = np.dot(v1n, v2n)
if u < 0.98:
q = np.hstack((q, p[:, k + 1].reshape(2, 1)))
vini = q[:, 1] - q[:, 0]
vin = vini / np.sqrt(np.dot(vini, vini))
v = np.dot(v2n, vin)
if v > 0.98:
q = q[:, 1:]
y = q.T.copy()
ls = shg.asLineString(y)
poly2 = shg.Polygon(ls)
return(poly2)
#----------------------
# Taguhi
#----------------------
def wall_delta(x1, y1, x2, y2, delta=0.0001):
""" Identification of new points
After defining a tolerance length those points which are situated in
the extremities of the walls at a distance equivalent to the
tolerance length are identified.
Parameters
----------
x1 : float
The x component of the point of the first extremity
y1 : float
The x component of the point of the first extremity
x2 : float
The x component of the point of the second extremity
y2 : float
The x component of the point of the second extremity.
Returns
-------
bx : float
The x component of the new point of the first extremity
by : float
The y component of the new point of the first extremity
cx : float
The x component of the new point of the second extremity
cy : float
The y component of the new point of the second extremity.
Notes
-----
.. math:: bx=x1+(x2-x1) \\frac{\\delta}{mod(a)}.
Examples
--------
>>> x1=-2.
>>> y1=2.
>>> x2=-1.
>>> y2=1.
>>> bx,by,cx,cy = wall_delta(x1,y1,x2,y2,delta=0.0001)
>>> assert bx==-1.9999292893218814,'Mistake'
>>> assert by==1.9999292893218814,'Mistake'
>>> assert cx==-1.0000707106781186,'Mistake'
>>> assert cy==1.0000707106781186,'Mistake'
"""
ax = x2 - x1
ay = y2 - y1
a_mod = np.sqrt(ax ** 2 + ay ** 2)
a_ch_x = ax / a_mod
a_ch_y = ay / a_mod
bx = x1 + ax * delta / a_mod
by = y1 + ay * delta / a_mod
cx = x2 - ax * delta / a_mod
cy = y2 - ay * delta / a_mod
return(bx, by, cx, cy)
def plot_coords2(ax, ob):
""" plot point from coordinates
References
----------
http://pypi.python.org/pypi/Shapely
"""
x, y = ob.xy
ax.plot(x, y, 'o', color='#999999', zorder=2)
def plot_bounds2(ax, ob):
""" plot bounds v2
References
----------
http://pypi.python.org/pypi/Shapely
"""
x, y = zip(*list((p.x, p.y) for p in ob.boundary))
ax.plot(x, y, color='#000000', zorder=0.1)
def plot_line2(ax, ob):
""" plot line v2
References
----------
http://pypi.python.org/pypi/Shapely
"""
x, y = ob.xy
ax.plot(x, y, color=v_color(
ob), alpha=0.7, linewidth=2, solid_capstyle='round', zorder=0.5)
def plot_coords3(ax, ob, color):
""" plot coors v3
References
----------
http://pypi.python.org/pypi/Shapely
"""
x, y = ob.xy
ax.plot(x, y, 'o', color=color, zorder=2)
def plot_bounds3(ax, ob, color):
""" plot bounds v3
References
----------
http://pypi.python.org/pypi/Shapely
"""
x, y = zip(*list((p.x, p.y) for p in ob.boundary))
ax.plot(x, y, color=color, zorder=1)
def plot_line3(ax, ob, color):
""" plot lines v3
References
----------
http://pypi.python.org/pypi/Shapely
"""
x, y = ob.xy
ax.plot(x, y, color=color, alpha=0.7, linewidth=2,
solid_capstyle='round', zorder=1)
#
# wedge functions
##
def valid_wedge(ps, pw, p1, p2, grazing):
""" check set of N wedge sector validity for point ps
Parameters
----------
ps : source point
pw : np.array (Nx2) wedge apex point
p1 : np.array (Nx2) point 1 of wedge
p2 : np.array (Nx2) point 2 of wedge
grazing : 0 (without grazing)
1 (authorize grazing)
xps
x pw
/ \
/ \
/ \
x p1 x p2
Returns
-------
valid : np.array (Nx1)
valid = 1 if ps is in the convex sector
valid = 0 if ps is in the concav sector
Examples
--------
>>> p1 = np.array([-2,-2]).reshape(1,2)
>>> p2 = np.array([2,-2]).reshape(1,2)
>>> pw = np.array([0,0]).reshape(1,2)
>>> ps1 = np.array([3,3]).reshape(1,2)
>>> ps2 = np.array([0,-3]).reshape(1,2)
>>> valid_wedge(ps1,pw,p1,p2,0)[0][0]
1.0
>>> valid_wedge(ps2,pw,p1,p2,0)[0][0]
1.0
Author
-------
Bernard.uguen@univ-rennes1.fr
"""
x1 = p1[:, 0] - pw[:, 0]
y1 = p1[:, 1] - pw[:, 1]
a1 = np.arctan2(y1, x1)
x2 = p2[:, 0] - pw[:, 0]
y2 = p2[:, 1] - pw[:, 1]
a2 = np.arctan2(y2, x2)
xs = ps[:, 0] - pw[:, 0]
ys = ps[:, 1] - pw[:, 1]
aas = np.arctan2(ys, xs)
valid = np.zeros((1, len(x1)))
b1_I2 = min(a1.all(), a2.all())
b2_I2 = max(a1.all(), a2.all())
mu_I2 = b2_I2 - b1_I2
#
# un >= ou <= permet de valider les points qui sont sur une tangente au diedre
#
if (grazing == 0):
in_I2 = np.nonzero(
(aas > b1_I2) & (aas < b2_I2) & (mu_I2 > np.pi))[0]
valid[in_I2] = 1
out_I2 = np.nonzero(
((aas < b1_I2) | (aas > b2_I2)) & (mu_I2 < np.pi))[0]
valid[out_I2] = 1
if (grazing == 1):
in_I2 = np.nonzero(
(aas >= b1_I2) & (aas <= b2_I2) & (mu_I2 > np.pi))[0]
valid[in_I2] = 1
out_I2 = np.nonzero(
((aas <= b1_I2) | (aas >= b2_I2)) & (mu_I2 < np.pi))[0]
valid[out_I2] = 1
return(valid)
def agwed_old(v, lwe):
"""
Parameters:
-----------
lwe : np.array
3x1 wedge vector
v : np.array(3x4)
3x4 ( 4 stacked vectors)
first vector of v is on face 0 perp to lwe
second vector of v is on face n perp to lwe
third vector is on the direction of incident ray (-si)
fourth vector is on the direction of diffracted ray (sd)
all vectors of v are defined outgoing from the diffracting point
Returns
-------
np.array([N*pi,phi0,phi])
Example
-------
>>> import numpy as np
>>> import pylayers.util.geomutil as geu
>>> lwe = np.array([0,0,1])
>>> u = np.array([1,0,0])
>>> v1 = np.array([1,1,0])
>>> si = np.array([-1,-1,0])
>>> sd = np.array([-1,1,0])
>>> v = np.vstack([u,v1,si,sd]).T
>>> M = geu.agwed(v,lwe)
>>> print(M*180/np.pi)
[ 315. 135. 225.]
"""
print(DeprecationWarning('Please use vectorized version : agwed'))
# lwe : (,3)
lwe = lwe / np.sqrt(np.sum(lwe * lwe, axis=0))
# v : (3,4)
v = v / np.sqrt(np.sum(v * v, axis=0))
# ps (,4)
ps = np.dot(lwe, v)
vp1 = v - v * ps
vpn = vp1 / np.sqrt(np.sum(vp1 * vp1, axis=0))
vpt = vpn[0:2, :].T
w = np.vstack((vpt[:, 1], -vpt[:, 0]))
C = np.dot(vpt, w)
D = np.dot(vpt, vpt.T)
M = np.mod(2 * np.pi - np.arctan2(np.dot(vpt, w),
np.dot(vpt, vpt.T)), 2 * np.pi)[0, 1:]
return M
def agwed(v, lwe):
"""
Parameters:
-----------
lwe : np.array
3xNp wedge vector
v : np.array(3x4xNp)
3x4xNp ( 4 stacked vectors)
first vector of v is on face 0 perp to lwe
second vector of v is on face n perp to lwe
third vector is on the direction of incident ray (-si)
fourth vector is on the direction of diffracted ray (sd)
all vectors of v are defined outgoing from the diffracting point
Returns
-------
np.array([[N*pi,phi0,phi],...xNp])
(3xNp)
Example
-------
>>> import pylayers.util.geomutil as geu
>>> import numpy as np
>>> lwe = np.array([[0,0,1],[0,0,1]]).T
>>> u = np.array([[1,0,0],[1,0,0]]).T
>>> v1 = np.array([[1,1,0],[1,1,0]]).T
>>> si = np.array([[-1,-1,0],[-1,1,0]]).T
>>> sd = np.array([[-1,1,0],[1,-1,0]]).T
>>> v = np.hstack((u[:,None,:],v1[:,None,:],si[:,None,:],sd[:,None,:]))
>>> M = geu.agwed(v,lwe)
>>> print(M*180/np.pi)
array([[ 315., 315.],
[ 135., 225.],
[ 225., 45.]])
"""
import ipdb
ipdb.set_trace()
# lwe : (3,N)
lwe = lwe / np.sqrt(np.sum(lwe * lwe, axis=0))
# v : (3,4,N)
v = v / np.sqrt(np.sum(v * v, axis=0))
# ps (4,N)
#ps = np.dot(lwe,v)
ps = np.einsum('ik,ijk->jk', lwe, v)
vp1 = v - v * ps
vpn = vp1 / np.sqrt(np.sum(vp1 * vp1, axis=0))
# vpt = (N,4,2)
vpt = vpn[0:2, :, :]
# w(4,N,2)
w = np.dstack((vpt[1, :, :].T, -vpt[0, :, :].T)).T
# C = np.dot(vpt,w)
# D = np.dot(vpt,vpt.T)
# vpt(2,4,N) x w(2,4,N) => C(4,4,N)
C = np.einsum('kil,kjl->ijl', vpt, w)
# D(4,4,N)
D = np.einsum('kil,kjl->ijl', vpt, vpt)
#M = np.mod(2*np.pi-np.arctan2(np.dot(vpt,w),np.dot(vpt,vpt.T)),2*np.pi)[0,1:]
M = np.mod(2 * np.pi - np.arctan2(C, D), 2 * np.pi)[0, 1:, :]
return M
def sector(p1, p2, pt):
""" non signed angular sector between
(p1,pt) and (p2,pt)
p1 x-----------x pt
| /
alpha \/
/
x p2
Parameters
----------
p1 : np.array (3 x Np)
point
p2 : np.array (3 x Np)
point
pt : np.array (3 x Np)
point
Returns
-------
alpha : np.array (3 x Np)
degree
Notes
-----
Useful for AAS calculation
"""
if len(p1.shape) == 1:
p1 = p1.reshape(p1.shape[0], 1)
if len(p2.shape) == 1:
p2 = p2.reshape(p2.shape[0], 1)
if len(pt.shape) == 1:
pt = pt.reshape(pt.shape[0], 1)
p1pt = p1 - pt
p2pt = p2 - pt
u = p1pt / np.sqrt(np.sum((p1pt) * (p1pt), axis=0))
v = p2pt / np.sqrt(np.sum((p2pt) * (p2pt), axis=0))
# sum(a[i,j,:] * b[k,:,m])
alpha = np.arctan2(u[1], u[0])
beta = np.arctan2(v[1], v[0])
v0 = abs(alpha - beta)
v1 = 2 * np.pi - abs(alpha - beta)
um0 = v0 < v1
um1 = ~um0
sector = np.empty(np.shape(u)[1])
sector[um0] = v0[um0]
sector[um1] = v1[um1]
return sector * 180 / np.pi
# if (abs(alpha + sector - sp.mod(beta, 2 * np.pi)) < 1e-3):
# return(np.array([alpha, beta]) * 180 / np.pi)
# else:
# return(np.array([beta, alpha]) * 180 / np.pi)
def sectorold(p1, p2, pt):
""" angular sector p1 pt p2
Parameters
----------
p1 : np.array
point
p2 : np.array
point
pt : np.array
point
Returns
-------
alpha : np.array
degree
Notes
-----
Useful for AAS calculation
"""
u = (p1 - pt) / np.sqrt(np.dot(p1 - pt, p1 - pt))
v = (p2 - pt) / np.sqrt(np.dot(p2 - pt, p2 - pt))
alpha = np.arctan2(u[1], u[0])
beta = np.arctan2(v[1], v[0])
sector = min(abs(alpha - beta), 2 * np.pi - abs(alpha - beta))
return sector * 180 / np.pi
def dist(x, y, ax):
""" calculates distance between two arrays along a given axis
Parameters
----------
x : numpy.ndarray
y : numpy.ndarray
ax : integer (0,1)
Returns
-------
d : numpy.ndarray
Examples
--------
.. plot::
:include-source:
>>> import numpy as np
>>> x = np.array([[0., 0., 10., 10.],[0., 10., 10., 0.]])
>>> y = np.array([[5.],[5.]])
>>> ax = 0
>>> d = dist(x,y,ax)
"""
d = np.sqrt(np.sum((x - y)**2, axis=ax))
return d
def angle_intersection(a1,a2,b1,b2):
def inters(b,ags,age):
if ags>age:
if b >=ags or b<=age:
return True
else:
if b>ags and b<=age:
return True
return False
bol = inters(b1,a1,a2) | inters (b2,a1,a2) | inters(a1,b1,b2) | inters(a2,b1,b2)
return bol
def angle_intersection2(a1,a2,b1,b2):
""" angle intersection2
Parameters
----------
a1 : angle in [0,2*pi] first angular sector
a2 : angle in [0,2*pi] first angular sector
b1 : angle in [0,2*pi] first angular sector
b2 : angle in [0,2*pi] first angular sector
Returns
-------
intersect_angle : float
Notes
-----
Given 2 angular sectors (a1,a2) and (b1,b2), this function returns the intersection of the 2
sector if it exists
See Also
--------
Signature.run
Examples
--------
>>> from pylayers.util.geomutil import *
>>> a1 = 0.
>>> a2 = np.pi/4.
>>> b1 = np.pi/3.
>>> b2 = np.pi/2.
>>> angle_intersection2(a1,a2,b1,b2)
0
>>> a1 = 0.
>>> a2 = np.pi/3.
>>> b1 = np.pi/4.
>>> b2 = np.pi/2.
>>> angle_intersection2(a1,a2,b1,b2)
0.26179938779914935
>>> a1 = 0.
>>> a2 = np.pi-np.pi/3.
>>> b1 = np.pi/2.
>>> b2 = 3*np.pi/2.
>>> angle_intersection2(a1,a2,b1,b2)
0.5235987755982991
"""
r1 = (max(a1,a2)-min(a1,a2))/2
if r1 > np.pi/2:
r1 = np.pi-r1
ainf = max(a1,a2)
asup = min(a1,a2)
else:
ainf = min(a1,a2)
asup = max(a1,a2)
r2 = (max(b1,b2)-min(b1,b2))/2
if r2 > np.pi/2.:
r2 = np.pi-r2
binf = max(b1,b2)
bsup = min(b1,b2)
else:
binf = min(b1,b2)
bsup = max(b1,b2)
c1 = (ainf+asup)/2
if (c1<ainf) & (c1>asup):
c1 = np.mod(c1+np.pi,2*np.pi)
c2 = (binf+bsup)/2
if (c2<binf) & (c2>bsup):
c2 = np.mod(c2+np.pi,2*np.pi)
dc = max(c2,c1)-min(c2,c1)
if dc > np.pi:
dc = 2*np.pi-dc
if ((r1+r2)-dc)>0:
return((r1+r2)-dc)
else:
return(0)
def line_intersection(l1, l2):
""" intersection between two 2D lines using shapely
Parameters
----------
l1: numpy.ndarray
coordinates of l1 points
l2: numpy.ndarray
coordinates of l2 points
Returns
-------
p: numpy.ndarray
coordinates of intersection point
"""
shl1 = sh.LineString((l1[:, 0], l1[:, 1]))
shl2 = sh.LineString((l2[:, 0], l2[:, 1]))
if shl1.intersects(shl2):
psh = shl1.intersection(shl2)
return np.array([[psh.x], [psh.y]])
else:
return None
def linepoly_intersection(l, poly):
""" intersection between a 2D line and a 2D polygon using shapely
Parameters
----------
l: numpy.ndarray
coordinates of l points
poly: numpy.ndarray
coordinates of poly points
Returns
-------
p: numpy.ndarray
coordinates of intersection point
"""
shl = sh.LineString((l[:, 0], l[:, 1]))
shpoly = sh.polygon((poly[:, 0], poly[:, 1], poly[:, 2]))
psh = shl.intersection(shpoly)
return np.array([[psh.x], [psh.y]])
def mirror3b(tp, aplane, pplane):
""" compute recursively the image of p wrt the list of facet
Parameters
----------
tp : numpy.ndarray (3 x Ns x Npt)
Ns : number of screen
Npt : number of points
aplane : numpy.ndarray
array of planes (3xNplanex2))
pplane : numpy.ndarray
array of points (3xNplane)
Returns
-------
tp : np.array
sequence of images
tp[:,-1] is the final image
tp[:,0] is the original point
Examples
--------
>>> tp = np.array([[1,1,1]]).T
>>> p1 = np.array([[0,0] ,[1,0],[0,1]]) #yz
>>> p2 = np.array([[1,0] ,[0,0],[0,1]]) #yz
>>> p3 = np.array([[1,0] ,[0,1],[0,0]]) #xy
>>> aplane = np.hstack((p1,p2,p3))
"""
# take last points of the sequence
# p : 3 x 1 x n
#
p = tp[:,[-1],:]
Nplane = aplane.shape[1]# vector plane normalisation
# norm : 3 x Nplane
norm = np.cross(aplane[:,:,0],aplane[:,:,1],axis=0)
# T change basis matrix
# T (3 x Nplane,3)
T = np.dstack((aplane,norm[:,:,None]))
# take last transformation matrix
T_ = T[:,-1,:]
# v : 3 x 1 x n
v = p-pplane[:,[-1]][:,:,None]
# go to frame attached to reflection plane
#Tpmp = np.dot(T_.T,v)
# Tpmp : 3 x 1 x n
Tpmp = np.einsum('sv,vln->sln',T_.T,v)
# apply symmetry
R = np.eye(3)
R[2,2] = -1
#RTpmp = np.dot(R,Tpmp)
RTpmp = np.einsum('sv,vln->sln',R,Tpmp)
#go back to global frame
#TTRTpmp = np.dot(T_,RTpmp)
TTRTpmp = np.einsum('sv,vln->sln',T_,RTpmp)
# append image to list of points
pim = TTRTpmp + pplane[:,[-1]][:,:,None]
tp = np.concatenate((tp,pim),axis=1)
# if there are other plane enter recursion
if Nplane>1:
tp = mirror3b(tp,aplane[:,0:-1,:],pplane[:,0:-1])
return tp
def mirror3c(tp, aplane, pplane):
""" compute recursively the image of p wrt the list of facets
Parameters
----------
tp : MDA
Collection of images points from screen in 3D space from set of points
(3 x Nf x Npt x Nc)
Ns : number of screen
Npt : number of points
(s x f x p x c )
aplane : numpy.ndarray
MDarray of (c)ollection of ()vector (f)aces n 3D ((s)pace
(3 x Nfaces x 2 x Nc)
(s x f x v x c)
pplane : numpy.ndarray
array of points (3xNplanexNsig)
Returns
-------
tp : np.array
sequence of images
tp[:,-1] is the final image
tp[:,0] is the original point
Examples
--------
"""
# take last points of the sequence tp
# tp : (s x f x p x c )
# p : s x 1 x p x c
#
p = tp[:,[-1],:,:]
Nplane = aplane.shape[1]# vector plane normalisation
# norm : 3 x Nplane
norm = np.cross(aplane[:,:,0,:],aplane[:,:,1,:],axis=0)
# T change basis matrix
# s x f x v x c
# T (3 x Nplane,3)
T = np.concatenate((aplane,norm[:,:,None,:]),axis=2)
# take last transformation matrix
# T_ : s x v x c
T_ = T[:,-1,:,:]
# pplane : s x f x c
# pplane[:,[-1],:] : s x 1 x c
# p : s x 1 x p x c
# v : s x 1 x p x c
v = p-pplane[:,[-1],:][:,:,None,:]
# go to frame attached to reflection plane
# TT : v x s x c
#TT = np.swapaxes(T_,0,1)
# TT
# Tpmp = np.einsum('svm,vlnm->slnm',TT,v)
#Tpmp = np.einsum('vsc,sfpc->vfpc',TT,v)
Tpmp = np.einsum('svc,sfpc->vfpc',T_,v)
# apply symmetry
R = np.eye(3)
R[2,2] = -1
R = R[:,:,None]
#RTpmp = np.dot(R,Tpmp)
#RTpmp = np.einsum('svm,vlnm->slnm',R,Tpmp)
RTpmp = np.einsum('svc,vfpc->sfpc',R,Tpmp)
#go back to global frame
#TTRTpmp = np.dot(T_,RTpmp)
#TTRTpmp = np.einsum('svm,vlnm->slnm',T_,RTpmp)
TTRTpmp = np.einsum('vsc,sfpc->vfpc',T_,RTpmp)
# append image to list of points
pim = TTRTpmp + pplane[:,[-1]][:,:,None,:]
tp = np.concatenate((tp,pim),axis=1)
# if there are other plane enter recursion
if Nplane>1:
tp = mirror3c(tp,aplane[:,0:-1,:,:],pplane[:,0:-1,:])
return tp
def intersect3c(tp, ti, aplane, pplane):
# take last points of the sequence tp
# tp : (s x f x p x c )
# p : s x 1 x p x c
#
p0 = tp[:,[-1],:,:]
p1 = ti[:,[-1],:,:]
Nplane = aplane.shape[1]# vector plane normalisation
# norm : 3 x Nplane
norm = np.cross(aplane[:,:,0,:],aplane[:,:,1,:],axis=0)
# T change basis matrix
# s x f x v x c
# T (3 x Nplane,3)
T = np.concatenate((aplane,norm[:,:,None,:]),axis=2)
# take last transformation matrix
# T_ : s x v x c
T_ = T[:,-1,:,:]
# pplane : s x f x c
# pplane[:,[-1],:] : s x 1 x c
# p : s x 1 x p x c
# v : s x 1 x p x c
v = p-pplane[:,[-1],:][:,:,None,:]
# go to frame attached to reflection plane
# TT : v x s x c
#TT = np.swapaxes(T_,0,1)
# TT
# Tpmp = np.einsum('svm,vlnm->slnm',TT,v)
#Tpmp = np.einsum('vsc,sfpc->vfpc',TT,v)
Tpmp = np.einsum('svc,sfpc->vfpc',T_,v)
# apply symmetry
R = np.eye(3)
R[2,2] = -1
R = R[:,:,None]
#RTpmp = np.dot(R,Tpmp)
#RTpmp = np.einsum('svm,vlnm->slnm',R,Tpmp)
RTpmp = np.einsum('svc,vfpc->sfpc',R,Tpmp)
#go back to global frame
#TTRTpmp = np.dot(T_,RTpmp)
#TTRTpmp = np.einsum('svm,vlnm->slnm',T_,RTpmp)
TTRTpmp = np.einsum('vsc,sfpc->vfpc',T_,RTpmp)
# append image to list of points
pim = TTRTpmp + pplane[:,[-1]][:,:,None,:]
tp = np.concatenate((tp,pim),axis=1)
# if there are other plane enter recursion
if Nplane>1:
tp = intersect3c(tp,aplane[:,0:-1,:,:],pplane[:,0:-1,:])
return tp
def mirror3(tp, aplane, pplane):
""" compute recursively the image of p wrt the list of facet
Parameters
----------
tp : numpy.ndarray (3 x Ns )
Ns : number of screen
Npt : number of points
aplane : numpy.ndarray
array of planes (3xNplanex2))
pplane : numpy.ndarray
array of points (3xNplane)
Returns
-------
tp : np.array
sequence of images
tp[:,-1] is the final image
tp[:,0] is the original point
Examples
--------
>>> tp = np.array([[1,1,1]]).T
>>> p1 = np.array([[0,0] ,[1,0],[0,1]]) #yz
>>> p2 = np.array([[1,0] ,[0,0],[0,1]]) #yz
>>> p3 = np.array([[1,0] ,[0,1],[0,0]]) #xy
>>> aplane = np.hstack((p1,p2,p3))
"""
# take last point of the sequence
p = tp[:,[-1]]
Nplane = aplane.shape[1]# vector plane normalisation
# norm : 3 x Nplane
norm = np.cross(aplane[:,:,0],aplane[:,:,1],axis=0)
# T change basis matrix
# T (3 x Nplane,3)
T = np.dstack((aplane,norm[:,:,None]))
# take last transformation matrix
T_ = T[:,-1,:]
# go to frame attached to reflection plane
Tpmp = np.dot(T_.T,p-pplane[:,[-1]])
# apply symmetry
R = np.eye(3)
R[2,2] = -1
RTpmp = np.dot(R,Tpmp)
#go back to global frame
TTRTpmp = np.dot(T_,RTpmp)
# append image to list of points
try:
tp = np.hstack((tp,TTRTpmp + pplane[:,[-1]]))
except:
tp = TTRTpmp + pplane[:,[-1]]
# if there are other plane enter recursion
if Nplane>1:
tp = mirror3(tp,aplane[:,0:-1,:],pplane[:,0:-1])
return tp
def mirror(p, pa, pb):
""" compute the image of p wrt the segment (pa,pb)
Parameters
----------
p : numpy.ndarray
point to image
pa : numpy.ndarray
segment tail
pb : numpy.ndarray
segment head
Returns
-------
M : numpy.ndarray
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> from pylayers.util.plotutil import *
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> np.random.seed(0)
>>> p = np.random.randint(-2,2,(2,3))
>>> pa = np.array([-0.5,1])
>>> pb = np.array([0,0])
>>> M = mirror(p,pa,pb)
>>> print(M)
[[ 2.8 -1.4 -0.2]
[ 0.4 -0.2 1.4]]
>>> plt.plot(p[0,:],p[1,:],'or',alpha=0.2)
>>> plt.plot(M[0,:],M[1,:],'ob',alpha=0.2)
>>> displot(p,M,alpha=0.2)
>>> axis = np.vstack((pa,pb))
>>> plt.plot(axis[:,0],axis[:,1])
"""
if np.shape(pa) == (2,):
pa = pa.reshape(2, 1)
if np.shape(pb) == (2,):
pb = pb.reshape(2, 1)
if np.shape(p) == (2,):
p = p.reshape(2, 1)
pab = pb - pa
alpha = np.sum(pab * pab, axis=0)
zalpha = np.where(alpha == 0.)
alpha[zalpha] = 1.
dsa = 2.0/alpha
pab0 = pa[0, :] - pb[0, :]
pab1 = pa[1, :] - pb[1, :]
#a = 1 - (2. / alpha) * (pa[1, :] - pb[1, :]) ** 2
a = 1 - dsa * (pab1** 2)
#b = (2. / alpha) * (pb[0, :] - pa[0, :]) * (pa[1, :] - pb[1, :])
b = -dsa * pab0 * pab1
#c = (2. / alpha) * (pa[0, :] * (pa[1, :] - pb[1, :]) ** 2 +
# pa[1, :] * (pa[1, :] - pb[1, :]) *
# (pb[0, :] - pa[0, :]))
c = dsa * (pa[0, :]*pab1**2 - pa[1, :]*pab1*pab0)
#d = (2. / alpha) * (pa[1, :] * (pb[0, :] - pa[0, :]) ** 2 +
# pa[0, :] * (pa[1, :] - pb[1, :]) *
# (pb[0, :] - pa[0, :]))
d = dsa * (pa[1, :]*pab0**2 - pa[0, :]*pab1*pab0)
N = 1
S = np.zeros((2, 2))
S[0, 0] = -a
S[0, 1] = b
S[1, 0] = b
S[1, 1] = a
vc0 = np.array([c[0], d[0]]).reshape(2, 1)
v0 = np.dot(-S, p) + vc0
return v0
def axmat(pa, pb):
""" Compute the image of p wrt the segment pa pb
Parameters
----------
pa : numpy.ndarray
segment tail
pb : numpy.ndarray
segment head
Returns
-------
S : numpy.ndarray
symmetry matrix
v : numpy.ndarray
translatrion vector
Notes
-----
fir x is corrdiante of the point to mirror,
the mirrored point x' from pa and pb can be obtain with :
x' = np.dot(x,S) + v
Examples
--------
.. plot::
:include-source:
>>> from pylayers.util.geomutil import *
>>> from pylayers.util.plotutil import *
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> p = np.random.randn(2,10)
>>> pa = np.array([-0.5,1])
>>> pb = np.array([0,0])
>>> S,v = axmat(pa,pb)
>>> M = np.dot(p,S) + v
>>> plt.plot(p[0,:],p[1,:],'or',alpha=0.2)
>>> plt.plot(M[0,:],M[1,:],'ob',alpha=0.2)
>>> displot(p,M,alpha=0.2)
>>> axis = np.vstack((pa,pb))
>>> plt.plot(axis[:,0],axis[:,1])
"""
if np.shape(pa) == (2,):
pa = pa.reshape(2, 1)
if np.shape(pb) == (2,):
pb = pb.reshape(2, 1)
pab = pb - pa
alpha = np.sum(pab * pab, axis=0)
zalpha = np.where(alpha == 0.)
alpha[zalpha] = 1.
dsal = (2. / alpha)
pampby = pa[1, :] - pb[1, :]
pbmpax = pb[0, :] - pa[0, :]
prod = pbmpax * pampby
a = 1 - dsal * (pampby ** 2)
b = dsal * prod
c = dsal * (pa[0, :] * (pampby ** 2) + pa[1, :] * prod)
d = dsal * (pa[1, :] * (pbmpax ** 2) + pa[0, :] * prod)
# a = 1 - (2. / alpha) * (pa[1, :] - pb[1, :]) ** 2
# b = (2. / alpha) * (pb[0, :] - pa[0, :]) * (pa[1, :] - pb[1, :])
# c = (2. / alpha) * (pa[0, :] * (pa[1, :] - pb[1, :]) ** 2 +
# pa[1, :] * (pa[1, :] - pb[1, :]) *
# (pb[0, :] - pa[0, :]))
# d = (2. / alpha) * (pa[1, :] * (pb[0, :] - pa[0, :]) ** 2 +
# pa[0, :] * (pa[1, :] - pb[1, :]) *
# (pb[0, :] - pa[0, :]))
N = 1
S = np.array([[a[0], -b[0]], [-b[0], -a[0]]])
vc0 = np.array([c[0], d[0]])
# v0 = np.dot(-S, p) + vc0
return S, vc0
def distseg(a, b, c, d, alpha, beta):
""" distance to segments
Parameters
----------
a : (3xN) initial point segment 1
b : (3xN) end point segment 1
c : (3xN) starting point segment 2
d : (3xN) end point segment 2
alpha :
beta :
Returns
-------
f : square of the distance to the segment
Examples
--------
>>> import numpy as np
>>> np.random.seed(0)
>>> a = np.random.rand(3,10)
>>> b = np.random.rand(3,10)
>>> c = np.random.rand(3,10)
>>> d = np.random.rand(3,10)
>>> alpha,beta,dmin = dmin3d(a,b,c,d)
>>> alpha[alpha<0]=0
>>> alpha[alpha>1]=1
>>> beta[beta<0]=0
>>> beta[beta>1]=1
>>> f = distseg(a,b,c,d,alpha,beta)
>>> p1 = a - alpha*(a-b)
>>> p2 = c + beta*(d-c)
>>> v = p1-p2
>>> g = np.sum(v*v,axis=0)
>>> diff = np.sum(f-g,axis=0)
>>> np.testing.assert_almost_equal(diff,0)
"""
ac = c - a
cd = d - c
ba = a - b
u0 = np.sum(ac * ac, axis=0)
u4 = np.sum(ba * ba, axis=0)
u5 = np.sum(cd * cd, axis=0)
u1 = np.sum(ba * ac, axis=0)
u2 = np.sum(cd * ac, axis=0)
u3 = np.sum(cd * ba, axis=0)
f = u0 + 2 * (alpha * u1 + beta * u2 + alpha * beta * u3) + \
alpha * alpha * u4 + beta * beta * u5
# m = a - alpha*ba
# n = c + beta*cd
# g = np.dot(m-n,m-n)
return f
def dmin3d(a, b, c, d):
""" evaluate the minimal distance between 2 set of segments
Parameters
----------
a : (3xN) initial point segment 1
b : (3xN) end point segment 1
c : (3xN) starting point segment 2
d : (3xN) end point segment 2
Returns
-------
alpha : segment parameterization
beta : segment parameterization
dmin : minimal distance between 2 segments
Examples
--------
"""
ac = c - a
cd = d - c
ba = a - b
u0 = np.sum(ac * ac, axis=0)
u4 = np.sum(ba * ba, axis=0)
u5 = np.sum(cd * cd, axis=0)
u1 = np.sum(ba * ac, axis=0)
u2 = np.sum(cd * ac, axis=0)
u3 = np.sum(cd * ba, axis=0)
den = u4 * u5 - u3 * u3
alpha = (u2 * u3 - u1 * u5) / (1. * den)
beta = (u1 * u3 - u2 * u4) / (1. * den)
dmin = np.sqrt(u0 + 2 * (alpha * u1 + beta * u2 + alpha *
beta * u3) + alpha * alpha * u4 + beta * beta * u5)
return(alpha, beta, dmin)
# def gram_schmid(V):
# """
# Gram-Schmid orthonormalization of a set of `M` vectors, in-place.
# Parameters
# ----------
# V : array, shape (N, M)
# Notes
# -----
# from
# http://numpy-discussion.10968.n7.nabble.com/Efficient-orthogonalisation-with-scipy-numpy-td23635.html
# """
# # XXX: speed can be improved by using routines from scipy.lib.blas
# # XXX: maybe there's an orthonormalization routine in LAPACK, too,
# # apart from QR. too lazy to check...
# n = V.shape[1]
# for k in xrange(n):
# V[:,k] /= np.linalg.norm(V[:,k])
# for j in xrange(k+1, n):
# V[:,j] -= np.vdot(V[:,j], V[:,k]) * V[:,k]
# return V
def gram_schmidt(Vini, force_direct=True):
"""
Gram-Schmidt orthonormalization of a set of `M` vectors, in-place.
Parameters
----------
Vini : array,
shape (3,Nv,nf) where number of vectors Nv = 3 and nf is an integer
force_direct : boolean
force basis to be direct (det>0)
Example
-------
>>> import pylayers.util.geomutil as geu
>>> import numpy as np
>>> Nv = 3
>>> Nframes = 10
>>> V = np.random.rand(3,Nv,Nframes)
>>> VG = geu.gram_schmid(V)
"""
# check direct basis
if force_direct:
per = permutations((0, 1, 2), 3)
for p in per:
P = np.vstack(
(Vini[:, p[0], 0], Vini[:, p[1], 0], Vini[:, p[2], 0]))
if np.linalg.det(P) > 0:
Vini = Vini[:, p, :]
break
v0 = Vini[:, 0, :]
v1 = Vini[:, 1, :]
v2 = Vini[:, 2, :]
n0 = np.linalg.norm(v0, axis=0)
vn0 = v0 / n0
pv10 = np.einsum('ij,ij->j', v1, vn0)
v1p = v1 - pv10 * vn0
nv1 = np.linalg.norm(v1p, axis=0)
vn1 = v1p / nv1
pv20 = np.einsum('ij,ij->j', v2, vn0)
pv21 = np.einsum('ij,ij->j', v2, vn1)
v2p = v2 - pv20 * vn0 - pv21 * vn1
nv2 = np.linalg.norm(v2p, axis=0)
vn2 = v2p / nv2
V = np.hstack((vn0[:, None, :], vn1[:, None, :], vn2[:, None, :]))
if force_direct:
# assert det >0
assert len(np.where(np.linalg.det(np.rollaxis(V, 2)) < 0)[0]) == 0
# assert det != 0
assert len(np.where(np.linalg.det(np.rollaxis(V, 2)) == 0.)[0]) == 0
return V
def qrdecomp(V):
"""
Gram-Schmid orthonormalization of a set of `Nv` vectors, in-place.
using qr decomp
Parameters
----------
V : array,
shape (3,Nv,nf) where number of vectors Nv = 3 and nf is an integer
Returns
-------
V : array,
References
----------
from http://numpy-discussion.10968.n7.nabble.com/Efficient-orthogonalisation-with-scipy-numpy-td23635.html
Example
-------
>>> import numpy as np
>>> import pylayers.util.geomutil as geu
>>> u=np.random.rand(3,1,10)
>>> v=np.random.rand(3,1,10)
>>> w=np.random.rand(3,1,10)
>>> V = np.hstack((u,v,w))
>>> W = geu.qrdecomp(V)
>>> assert np.allclose(abs(np.linalg.det(W[:,:,0])),1.0)
"""
# speed can be improved by using routines from scipy.lib.blas
# maybe there's an orthonormalization routine in LAPACK, too,
# apart from QR. too lazy to check...
import copy
# nn = np.linalg.norm(V,axis=(0))
# # for i in range(3):
# # V[i,:,:]=V[i,:,:]/nn
# V=V/nn
lv = np.shape(V)[2]
V2 = copy.deepcopy(V)
for k in xrange(lv):
V[:, :, k], R = np.linalg.qr(V[:, :, k])
# check where the vector along cylinder axis is colinear with the 1st basis axis
# col = np.einsum('ij,ij->j',V[:,0,:],V2[:,0,:])
# ucol = np.where(col < 0)
# import ipdb
# ipdb.set_trace()
# V[:,:,ucol]=-V[:,:,ucol]
import ipdb
ipdb.set_trace()
return V
def check_point_unicity(A):
""" check if all rows of an array are unique
Parameters
----------
A : np.ndarray (Npt, 2|3)
Return
------
similar : list
list of index of similar points
if void list, all poitns are differents
Example
-------
>>> import numpy as np
>>> a = np.arange(10)
>>> a = np.np.vstack((a,a))
>>> check_point_unicity(a.T)
[]
>>> b=np.array([4,4])
>>> aa=np.concatenate((a,b[:,None]),axis=1)
>>> check_point_unicity(aa.T)
[4, 10]
"""
similar = []
for ua in xrange(len(A)):
rA = np.roll(A, -ua, axis=0)
# print rA
if any((A[ua] == x).all() for x in rA[1:]):
similar.append(ua)
return similar
def get_pol_angles(poly, unit='rad', inside=True):
""" find angles of a single Gt cycle of the layout.
Parameters
----------
poly : polygon
unit : str
'deg' : degree values
'rad' : radian values
inside : bollean
True : compute the inside angles of the cycle.
(a.k.a. in regard of the interior of the polygon)
False : compute the outside angles of the cycle.
(a.k.a. in regard of the exterior of the polygon)
Returns
-------
(u,a)
u : int (Np)
point number
a : float (Np)
associated angle to the point
Notes
-----
http://www.mathopenref.com/polygonexteriorangles.html
"""
pt = np.array(poly.exterior.xy)[:, :-1]
if hasattr(poly, 'vnodes'):
upt = poly.vnodes[poly.vnodes < 0]
else:
upt = range(np.array(poly.exterior.xy).shape[1])
# flip orientation in case of negative area
if SignedArea(pt) < 0:
upt = upt[::-1]
pt = pt[:, ::-1]
ptroll = np.roll(pt, 1, axis=1)
v = pt - ptroll
v = np.hstack((v, v[:, 0][:, None]))
vn = v / np.sqrt(np.sum((v) * (v), axis=0))
v0 = vn[:, :-1]
v1 = vn[:, 1:]
cross = np.cross(v0.T, v1.T)
dot = np.sum(v0 * v1, axis=0)
ang = np.arctan2(cross, dot)
uneg = ang < 0
ang[uneg] = -ang[uneg] + np.pi
ang[~uneg] = np.pi - ang[~uneg]
if not inside:
ang = 2 * np.pi - ang
if unit == 'deg':
return upt.astype(int), ang * 180 / np.pi
elif unit == 'rad':
return upt.astype(int), ang
def reflection_matrix(U):
"""
https://en.wikipedia.org/wiki/Transformation_matrix#Reflection
u = np.ndarray (2,Nvec)
Returns
-------
M : nd array
(2,2,Nvec)
u = np.array([2,2])
U=np.vstack((u,u/2.,2*u)).T
"""
diag_term = (U[0,:]**2)-(U[1,:]**2)
anti_diag = 2*U[0,:]*U[1,:]
scale = 1/np.linalg.norm(U,axis=0)**2
M = scale * np.array(([[diag_term, anti_diag],[anti_diag, -diag_term]]))
return M
def ellipse2D(pa, pb, l, N):
""" points on an ellipse
pa : np.array
focus a
pb : np.array
focus b
l : float
excess
N : int
Number of points
Returns
-------
points : np.array
2 x Npt
Examples
--------
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> pa = np.array([0,1])
>>> pb = np.array([10,3])
>>> N = 100
>>> l = 1
>>> p = ellipse2D(pa,pb,l,N)
>>> plt.plot(pa[0],pa[1],'ob')
>>> plt.plot(pb[0],pb[1],'or')
>>> plt.plot(p[0,:],p[1,:])
"""
dmax = np.sqrt(np.dot(pb-pa, pb-pa))
a = (dmax/2. + l/4.)
b = 0.5*np.sqrt(dmax*l)
pg = (pa+pb)/2.
u = (pb-pa)/dmax
z = np.array([0, 0, 1])
v = np.cross(z, u)[0:2]
ag = np.linspace(0, 2*np.pi, N)
p = pg[:, None] + a*u[:, None]*np.cos(ag[None, :])+b*v[:, None]*np.sin(ag[None, :])
return(p)
if __name__ == "__main__":
plt.ion()
doctest.testmod()
|
dialounke/pylayers
|
pylayers/util/geomutil.py
|
Python
|
mit
| 167,030
|
[
"Mayavi"
] |
720e3d654b271aa310aa567a768e37ccd709da80f1c92ae408980ba66d5fa248
|
# Copyright (C) 2011-2012 Mark Burnett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import operator
import bisect
import datetime
import itertools
import random
import time
import numpy
import scipy
import scipy.stats
from actin_dynamics import database
from actin_dynamics.numerical import interpolation, utils
from . import logger
log = logger.getLogger(__file__)
# parameters to vary
# search method
# brent's
# successive parabolic?
# our own brew?
# Optimization targets/fitnesses
# End conditions
# fitness variation across bracket
# parameter difference (bracket midpoint + fitting error (bracket width / 2)
# track running jobs
# use polling
# maintain job queue
# what fitting algorithm am I going to use?
# can I insert fitpy into this whole shebang?
# the real question is job management, etc.
# what am I actually going to use this for right now?
# at least fitting melki rates given ftc
# at most fitting rates + ftcs at once
# so:
# at least: 1 parameter, 1 objective
# at most: 2 parameters, 2 objectives
# therefore, I can scrape by with a simple 1-d search
# this is the best way to get started
# however, it doesn't make sense to use these simple 1-d searches with
# parallel function evaluations
#
# ultimately,
# -> I virtually *have* to use some sort of swarming or genetic algorithm
# -> I don't have to implement a multi-objective evaluator
# having waiting worker proc's does the following:
# means that the algorithm has to work with sparse/random data
# e.g. can't wait for a GA generation to finish
# -> continuous GA
# I want to be able to resume a session (load existing, then add on new jobs)
# Open questions
# How big a problem is our statistical/simulation error?
# Just calculate worst case intervals at each step based on our errors?
# Log warning when interval grows.
# (but *do* allow interval to grow..we might have been wrong)
# calculate optimistic and pessimistic fitness based on reported error?
# when we kill population, remove the ones with the least upside
# (lowest "optimistic" fitness)
# report best fit and best pessimistic fit
# report overlapping range
# min/max parameter which overlaps with the best optimistic fit
# this is our best guess at fitting error
# * actually this would be a good guess of the fitting error
# if we have sampled well near the best fit
class SimplePopulation(object):
def __init__(self, parameter_guess=None,
# gaussian_width_fraction=0.6/2,
gaussian_width_fraction=1.2/2,
parameter_name=None, objective_name=None, dbs=None,
session=None, process=None, plot=False, max_population_size=None,
parameter_tolerance=0.00001, parameter_distance_fraction=0.1):
self.dbs = dbs
self.session = session
self.model = session.models[0]
self.experiment = session.experiments[0]
self.process = process
self.objective_name = objective_name
self.parameter_name = parameter_name
self.parabola_peak = parameter_guess
self.gaussian_width_fraction = gaussian_width_fraction
self.max_population_size = max_population_size
self.parameter_tolerance = parameter_tolerance
self.parameter_distance_fraction = parameter_distance_fraction
self.plot = plot
self.coeffs = None
self.last_parabola_peak = self.parabola_peak * 10
self.pairs = []
self.inverted_parabola = False
self._num_completed_jobs = 0
def log_report(self):
log.critical('Best fit: %s = %s.',
self.parameter_name, self.parabola_peak)
log.critical('Completed %s jobs.', self._num_completed_jobs)
def add_completed_job(self, job):
return self.add_completed_jobs([job])
# It's slightly better to work on a whole list, then update our stats.
def add_completed_jobs(self, jobs):
for job in jobs:
run = job.run
fitness = run.get_objective(self.objective_name)
parameter = run.parameters[self.parameter_name]
if fitness is not None:
bisect.insort(self.pairs, (fitness, parameter))
self.pairs = self.pairs[:self.max_population_size]
self._num_completed_jobs += 1
self.fit_parabola()
def acceptable_fit(self):
ordered_y, ordered_x = zip(*sorted(self.pairs,
key=operator.itemgetter(1)))
parabola_y = scipy.polyval(self.coeffs, ordered_x)
if self.plot:
import matplotlib.pyplot
pyplot = matplotlib.pyplot
pyplot.ion()
pyplot.draw()
a = pyplot.subplot(1, 1, 1)
a.clear()
# a.set_xscale('log')
# a.set_yscale('log')
pyplot.plot(ordered_x, ordered_y, 'ro')
pyplot.plot(ordered_x, parabola_y, 'b-')
pyplot.axvline(self.parabola_peak, 0, 1,
linestyle=':', color='g')
pyplot.draw()
return ((self.last_parabola_peak is not None) and
((abs(self.parabola_peak - self.last_parabola_peak)
/ self.parabola_peak) < self.parameter_tolerance))
# ordered_x = sorted(self._x)
# ordered_y = numpy.array(sorted(self._y))
#
# parabola_fit_differences = numpy.array(self._y
# - scipy.polyval(self.coeffs, self._x))
# parabola_fit_differences -= numpy.mean(parabola_fit_differences)
# parabola_fit_differences /= numpy.sqrt(
# numpy.var(parabola_fit_differences))
# parabola_fit_differences = sorted(parabola_fit_differences**2)
# length = len(parabola_fit_differences)
#
# data = numpy.array(list(utils.running_total(parabola_fit_differences)))
# data /= length
#
# order = 3
# cdf = scipy.stats.chi2.cdf(parabola_fit_differences, order)
# self.chi2_difference = sum((data - cdf)**2)/length
#
# if self.plot:
# log.info('Best parameter = %s, R2/N = %s, expected error = %s',
# self.best_parameter, self.R2/len(self.pairs),
# self.chi2_difference)
# import matplotlib.pyplot
# pyplot = matplotlib.pyplot
#
# pyplot.ion()
# pyplot.draw()
#
# a = pyplot.subplot(2, 1, 1)
# a.clear()
# pyplot.plot(self._x, self._y, 'ro')
# pyplot.plot(ordered_x, scipy.polyval(self.coeffs, ordered_x), 'b-')
#
# pyplot.axvline(self.best_parameter, 0, 1,
# linestyle=':', color='g')
#
#
# a = pyplot.subplot(2, 1, 2)
# a.clear()
# a.set_xscale('log')
# pyplot.plot(parabola_fit_differences, data, 'r-')
# pyplot.plot(parabola_fit_differences, cdf, 'b-')
#
# pyplot.draw()
def fit_parabola(self):
if self.pairs:
# Choose points for parabolic fit
if (self.coeffs is not None and not self.inverted_parabola):
minx = (1 - self.parameter_distance_fraction) * self.parabola_peak
maxx = (1 + self.parameter_distance_fraction) * self.parabola_peak
pairs = [(y, x) for y, x in self.pairs if minx < x < maxx]
if len(pairs) < 5:
pairs = self.pairs
else:
pairs = self.pairs
self._y, self._x = zip(*pairs)
self.coeffs, R2, n, svs, rcond = scipy.polyfit(self._x, self._y,
2, full=True)
self.inverted_parabola = self.coeffs[0] < 0
self.last_parabola_peak = self.parabola_peak
self.parabola_peak = - self.coeffs[1] / (2 * self.coeffs[0])
# self.parabola_peak = scipy.polyval(self.coeffs, self.best_parameter)
# if R2 > 0:
# self.R2 = float(R2 / self.parabola_peak)
# else:
# self.R2 = R2
def create_jobs(self, number):
if self.inverted_parabola or self.parabola_peak < 0:
# Use the best fit parameter as the center of the gaussian
center = min(self.pairs)[1]
return self._gaussian_create_jobs(number, center=center)
else:
return self._gaussian_create_jobs(number)
def _gaussian_create_jobs(self, number, center=None):
if center is None:
center = self.parabola_peak
log.debug('Generating new parameters from a Gaussian: center = %s.',
center)
parameters = scipy.stats.norm.rvs(loc=center,
scale=(center * self.gaussian_width_fraction),
size=number)
parameters = filter(lambda x: x >= 0, parameters)
jobs = []
with self.dbs.transaction:
for p in parameters:
run_pars = {self.parameter_name: p}
run = _create_run(run_pars, self.model, self.experiment)
job = database.Job(run=run, creator=self.process)
jobs.append(job)
log.info('Created %s new jobs.', number)
result = set([j.id for j in jobs])
if None in result:
result.discard(None)
log.error('Some jobs not added to the job queue. Added ids: %s',
result)
return result
class SimpleFitController(object):
def __init__(self, dbs=None, session=None, process=None, population=None,
min_queue_size=0, max_queue_size=200, initial_population_size=300,
polling_period=5, min_iterations=5, max_iterations=1):
self.dbs = dbs
self.session = session
self.process = process
self.population = population
if initial_population_size:
assert initial_population_size >= min_queue_size
self.initial_population_size = initial_population_size
else:
self.initial_population_size = max_queue_size
self.polling_period = polling_period
self.min_queue_size = min_queue_size
self.max_queue_size = max_queue_size
self.min_iterations = min_iterations
self.max_iterations = max_iterations
def run(self):
t_initial = datetime.datetime.now()
queued_job_ids = self.population.create_jobs(
self.initial_population_size)
# for iteration in xrange(self.max_iterations):
# # Wait until we drop below our queue size threshold
# current_queue_size = self.min_queue_size + 1
# while current_queue_size > self.min_queue_size:
# time.sleep(self.polling_period)
#
# current_queue_size = self.dbs.query(database.Job
# ).filter_by(creator=self.process
# ).filter_by(worker=None
# ).filter_by(complete=False).count()
#
# # While waiting, add the completed jobs to the population
# newly_completed_jobs = _get_finished_jobs(self.dbs,
# queued_job_ids)
# for job in newly_completed_jobs:
# queued_job_ids.discard(job.id)
# self.population.add_completed_jobs(newly_completed_jobs)
#
# # If this fit is good enough, then break.
# if (iteration >= self.min_iterations
# and self.population.acceptable_fit()):
# break
#
# # Otherwise, make more jobs
# newly_queued_job_ids = self.population.create_jobs(
# self.max_queue_size - current_queue_size)
# for new_job_id in newly_queued_job_ids:
# queued_job_ids.add(new_job_id)
# log.info('Added %s jobs to the queue.', len(newly_queued_job_ids))
#
# t_final = datetime.datetime.now()
#
# total_runtime = t_final - t_initial
# self.population.log_report()
# log.critical('Completed %s iterations in %s.',
# iteration + 1, total_runtime)
#
# return self.population.parabola_peak
def _create_initial_jobs(dbs, model=None, experiment=None,
initial_population_size=None,
parameter_name=None, process=None,
parameter_min=None, parameter_max=None):
initial_jobs = []
with dbs.transaction:
for i in xrange(initial_population_size):
run_pars = {parameter_name: _random_value(
parameter_min, parameter_max)}
run = _create_run(run_pars, model, experiment)
job = database.Job(run=run, creator=process)
initial_jobs.append(job)
log.info('Created %s initial jobs.', initial_population_size)
result = set([j.id for j in initial_jobs])
if None in result:
result.discard(None)
log.error('Some jobs not added to the job queue. Added ids: %s',
result)
return result
def _random_value(parameter_min, parameter_max):
width = parameter_max - parameter_min
return parameter_min + width * random.random()
def _choose_two(sequence, select_function):
"""
Picks 2 unique items from the sequence.
"""
p1 = select_function(sequence)
p2 = p1
while p1 == p2:
p2 = select_function(sequence)
return p1, p2
def _weighted_choice(sequence, width=None):
"""
Choose a random element from sequence, weighted toward the
front of the list.
"""
if not width:
width = float(len(sequence))/4
j = len(sequence)
while j >= len(sequence):
j = abs(int(random.normalvariate(0, width)))
return sequence[j]
def _get_finished_jobs(dbs, queued_job_ids):
with dbs.transaction:
result = dbs.query(database.Job).filter_by(complete=True
).filter(database.Job.id.in_(queued_job_ids)).all()
return result
def _create_run(parameters, model, experiment):
run = database.Run(parameters=parameters, model=model,
experiment=experiment)
for bind in experiment.objective_list:
database.Objective(parameters={}, bind=bind, run=run)
return run
|
mark-burnett/filament-dynamics
|
actin_dynamics/fitting_controller.py
|
Python
|
gpl-3.0
| 14,927
|
[
"Gaussian"
] |
c6ab78d7c6db2aa5d6dbaf981a30a052e1f33e7c9c88594170318e98c8592390
|
#!/usr/bin/env python
"check build refs"
#
# Copyright (C) 2011-2021 ABINIT Group (Yann Pouillon)
#
# This file is part of the ABINIT software package. For license information,
# please see the COPYING file in the top-level directory of the ABINIT source
# distribution.
#
from __future__ import unicode_literals, division, print_function, absolute_import
from abirules_tools import find_abinit_toplevel_directory
from time import gmtime,strftime
import os
import re
import sys
def getstatusoutput(cmd):
"""
Return (status, output) of executing cmd in a shell.
Execute the string 'cmd' in a shell with 'check_output' and
return a 2-tuple (status, output). Universal newlines mode is used,
meaning that the result with be decoded to a string.
A trailing newline is stripped from the output.
The exit status for the command can be interpreted
according to the rules for the function 'wait'. Example:
"""
from subprocess import check_output, STDOUT, CalledProcessError
try:
data = check_output(cmd, shell=True, universal_newlines=True, stderr=STDOUT)
status = 0
except CalledProcessError as ex:
data = ex.output
status = ex.returncode
if data[-1:] == '\n':
data = data[:-1]
return status, data
def main():
home_dir = find_abinit_toplevel_directory()
# Init
nerr = 0
bex_diffs = list()
bex_missing = list()
bex_dir = os.path.join(home_dir,"doc/build/config-examples")
ref_dir = os.path.join(home_dir,"abichecks/buildsys/Refs")
assert os.path.exists(bex_dir) and os.path.exists(ref_dir)
# Check files
ref_list = os.listdir(ref_dir)
ref_list.sort()
for ref_file in ref_list:
if os.path.exists("%s/%s" % (bex_dir,ref_file)):
ret, tmp = getstatusoutput("diff %s/%s %s/%s" % (ref_dir,ref_file,bex_dir,ref_file))
if ret != 0:
bex_diffs.append(ref_file)
sys.stdout.write(tmp)
else:
bex_missing.append(ref_file)
nerr = len(bex_diffs) + len(bex_missing)
# Report any mismatch
if nerr > 0:
sys.stderr.write("%s: reporting wrongly generated build examples\n\n" % (os.path.basename(sys.argv[0])))
sys.stderr.write("Reference files are in ~abinit/abichecks/buildsys")
sys.stderr.write("X: D=Difference detected / M=Missing File\n\n")
sys.stderr.write("%s %-64s\n" % ("X","File"))
sys.stderr.write("%s %s\n" % ("-","-" * 64))
for bex in bex_diffs:
sys.stderr.write("%s %-64s\n" % ("D",bex))
for bex in bex_missing:
sys.stderr.write("%s %-64s\n" % ("M",bex))
sys.stderr.write("\n")
return nerr
if __name__ == "__main__":
sys.exit(main())
|
abinit/abinit
|
abichecks/scripts/check-build-refs.py
|
Python
|
gpl-3.0
| 2,656
|
[
"ABINIT"
] |
feeacb24f78e5a00c280781ec3e0f13a1c7900d8c0ee3c538ff549c7029acf3a
|
import numpy as np
import pylab as pl
import matplotlib
import matplotlib.pyplot as plt
from collections import defaultdict
import glob
import readline # otherwise the wrong readline is imported by rpy2
SAGE_XPS = 11
SAGE = 12
EAGE = 31
N_MONTHS = EAGE-SAGE+1
#TYPES = ["basic", "single-context", "topics"]
#TYPES = ["basic", "topics"]
TYPES = ["basic", "single-context"]
TEST = False # if True, just use the values evaluated on a test test
ITERS = range(499, 520) + range(1000,1005)
#ITERS = range(1000,1005)
#ITERS = range(600, 620)
PREFIX = ""
#PREFIX = "old_naima_XPs/"
TAKE_MAX_SCORE = False # in case of several results, otherwise do the mean+std
SORTED = True # sort the histograms by score, disable at your own risk!
FACTOR_STD = 1. # 1.96 for 95% confidence interval
OLDVERSION = False # version before March 10
LAST_ITERS = 10 # take the last XX iterations as results (considering converged)
# USED ONLY FOR TEST currently
if LAST_ITERS > 1 and TEST:
TAKE_MAX_SCORE = False
DO_ONLY = {'colloc_syll': 'baseline',
't_colloc_syll': 'split vocab',
't_readapt_colloc_syll': 'share vocab',
't_colloc_syll_wth_common': 'with common',
#'t_permuted_colloc_syll': 'permuted split vocab',
### 't_permuted_colloc_syll_wth_common': 'permuted with common',
#'t_random_colloc_syll': 'random split vocab',
### 't_random_colloc_syll_wth_common': 'random with common',
'colloc3_syll': 'colloc3 syll',
't_colloc3_syll_collocs_common': 'colloc3 syll collocs common'}
#'syll': 'syll',
#'t_syll': 'syll split vocab',
#'t_readapt_syll': 'syll share vocab'}
#'unigram': 'unigram', 't_readapt_unigram': 'unigram share vocab',
#'t_unigram': 'unigram split vocab'}
#'t_readapt_colloc_syll_wth_common': 'share vocab with common',
#'t_readapt_colloc_syll_wth_common2': 'share vocab with common 2'}
if OLDVERSION:
DO_ONLY = {'syll': 'syll', 'colloc': 'colloc',
't_readapt_colloc': 't_colloc_shr_vocab',
't_syll': 't_syll_spl_vocab',
't_readapt_colloc_wth_common': 't_colloc_wth_common',
'colloc_syll': 'colloc_syll',
't_colloc_syll': 't_colloc_syll_spl_vocab',
't_readapt_colloc_syll': 't_colloc_syll_shr_vocab',
't_colloc_syll_wth_common': 't_colloc_syll_wth_common'}
if TEST:
DO_ONLY = {'t_nopfx_colloc_syll_wth_common': 'with common no prefix',
't_test_colloc_syll_wth_common': 'with common test',
't_nopfx_colloc_syll': 'split vocab no prefix',
'test_coll_syll': 'baseline test',
't_test_colloc_syll': 'split vocab test'}
if OLDVERSION:
DO_ONLY = {'t_nopfx_coll_syll_wth_common': 't_colloc_syll_wth_common_nopfx',
't_test_coll_syll_wth_common': 't_colloc_syll_wth_common_test',
't_nopfx_coll_syll': 't_colloc_syll_spl_vocab_nopfx',
'test_coll_syll': 'colloc_syll_test',
't_test_coll_syll': 't_colloc_syll_spl_vocab_test'}
#DO_ONLY = {}
# for cosmetics when preparing figures for papers
# e.g. DO_ONLY = {'t_colloc': 'colloc with topics'}
scores_order = "token_f-score token_precision token_recall boundary_f-score boundary_precision boundary_recall".split()
results = defaultdict(lambda: [dict(zip(scores_order, [[] for i in range(len(scores_order))])) for tmp_i in range(N_MONTHS)])
if TAKE_MAX_SCORE:
results = defaultdict(lambda: [dict(zip(scores_order, [0 for i in range(len(scores_order))])) for tmp_i in range(N_MONTHS)])
for month in xrange(SAGE, EAGE+1):
for fname in glob.iglob(PREFIX+'naima_' + str(SAGE_XPS) + 'to' + str(month)
+ 'm/nai*-' + str(SAGE_XPS) + '-' + str(month) + '*.o*'):
if TEST and (not "test" in fname and not "nopfx" in fname):
continue
elif not TEST and ("test" in fname or "nopfx" in fname):
continue
if "-sc" in fname and not "single-context" in TYPES:
continue
if "docs" in fname and not "topics" in TYPES:
continue
# always plots basic results currently
doit = False
with open (fname.replace(".o", ".e")) as f:
line = ""
for line in f:
for iternumber in ITERS:
striter = str(iternumber)
if striter + " iterations" in line or "Iteration " + striter in line:
doit = True
break
if not doit:
print "NOT DOING:", fname
else:
print fname
scores = []
s_dict = {}
with open(fname) as f:
last_lines = []
for line in f:
last_lines.append(line)
try:
if TEST and LAST_ITERS > 1 and len(last_lines) > LAST_ITERS+1:
for iter_to_take in range(1,LAST_ITERS+1):
scores = [float(last_lines[-iter_to_take].split('\t')[i]) for i in range(6)]
if not len(s_dict):
s_dict = [dict(zip(scores_order, scores))]
else:
s_dict.append(dict(zip(scores_order, scores)))
else:
scores = [float(last_lines[-1].split('\t')[i]) for i in range(6)]
s_dict = dict(zip(scores_order, scores))
except:
print "PARSE ERROR: parse went wrongly for", fname
fname = '/'.join(fname.split('/')[1:])
fname = fname.replace('coll-', 'colloc-') # old names
if 'docs' in fname:
condname = '_'.join(fname.split('/')[-1].split('-')[-1].split('.')[0].split('_')[2:])
if condname == '': # topics-based unigram
condname = 'uni'
condname = 'd_' + condname
elif '-sc' in fname:
fname = fname.replace('-sc', '')
condname = 't'
if '-r+' in fname or '-r.' in fname:
condname = 't_readapt'
fname = fname.replace('-r', '')
if '-w+' in fname:
fname = fname.replace('-w+', '_words_common')
elif '-c+' in fname:
fname = fname.replace('-c+', '_collocs_common')
elif '+' in fname:
fname = fname.replace('+', '_wth_common')
condname = '_'.join([condname] + fname.split('/')[-1].split('-')[3:]).split('.')[0]
else:
condname = '_'.join(fname.split('/')[-1].split('-')[3:]).split('.')[0]
########## cosmetic (for legends) ##########
if len(DO_ONLY):
if condname in DO_ONLY:
condname = DO_ONLY[condname]
else:
continue
########## /cosmetic (for legends) ##########
if type(s_dict) == type({}) and len(s_dict) == 6:
if TAKE_MAX_SCORE:
if results[condname][month-SAGE]['token_f-score'] == 0 or s_dict['token_f-score'] > results[condname][month-SAGE]['token_f-score']:
results[condname][month-SAGE] = s_dict
else:
for k, v in s_dict.iteritems():
results[condname][month-SAGE][k].append(v)
elif type(s_dict) == type([]):
for e in s_dict:
for k, v in e.iteritems():
results[condname][month-SAGE][k].append(v)
print results
fig = plt.figure(figsize=(12, 9), dpi=1200)
plt.xticks(xrange(N_MONTHS))
ax = plt.gca()
ax.set_ylim([0.55, 0.90])
ax.set_xlim([-0.1, N_MONTHS - 0.9])
ax.set_xticklabels(map(str, range(SAGE, EAGE+1)))
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(24)
for cond, a in results.iteritems():
linetype = ''
if "syll" in cond:
linetype = '^-.'
else:
linetype = 'v-.'
if "d_" or "t_" in cond:
linetype = linetype[0] + '--'
vals = None
stddevs = None
if TAKE_MAX_SCORE:
vals = [x['token_f-score'] for x in a]
else:
vals = [np.mean(x['token_f-score']) for x in a]
stddevs = [FACTOR_STD*np.std(x['token_f-score']) for x in a] # TODO (gaussian process or some smoothing)
plt.plot(map(lambda x: 'NaN' if x <= 0.0 else x, vals), linetype, linewidth=3.5, alpha=0.8)
plt.xlabel('months')
plt.ylabel('token f-score')
plt.legend([l for l in results.iterkeys()], loc='best', ncol=4)
plt.setp(ax.get_legend().get_texts(), fontsize=20)
plt.savefig('progress_ages.png')
matplotlib.rcParams.update({'font.size': 20})
matplotlib.rcParams.update({'text.color': "black"})
matplotlib.rcParams.update({'axes.labelcolor': "black"})
matplotlib.rcParams.update({'xtick.color': "black"})
matplotlib.rcParams.update({'ytick.color': "black"})
plotted_results = {} # plotted_results[month][cond][score_type] = mean
for month in xrange(SAGE, EAGE+1):
y_pos = [0.5]
scores = []
stddevs = []
conds = []
s_dicts = []
for cond, a in results.iteritems():
score = 0
stddev = 0
if TAKE_MAX_SCORE:
score = a[month-SAGE]['token_f-score']
else:
score = np.mean(a[month-SAGE]['token_f-score'])
stddev = FACTOR_STD*np.std(a[month-SAGE]['token_f-score'])
if score > 0:
y_pos.append(y_pos[-1] + 1)
scores.append(score)
stddevs.append(stddev)
conds.append(cond)
s_dicts.append({'token_f-score': score,
'token_precision': np.mean(a[month-SAGE]['token_precision']),
'token_recall': np.mean(a[month-SAGE]['token_recall']),
'boundary_f-score': np.mean(a[month-SAGE]['boundary_f-score']),
'boundary_precision': np.mean(a[month-SAGE]['boundary_precision']),
'boundary_recall': np.mean(a[month-SAGE]['boundary_recall'])})
plotted_results[month] = dict(zip(conds, s_dicts))
if len(conds) == 0:
continue
y_pos = y_pos[:-1]
fig = plt.figure(figsize=(9, len(y_pos)), dpi=1200)
ax = plt.gca()
ax.set_ylim([0, len(y_pos)+1])
ax.set_xlim([0.6, 0.86])
if TEST:
ax.set_xlim([0.7, 0.86])
tmp = ()
if TAKE_MAX_SCORE:
tmp = zip(y_pos, scores, conds, ['g' for tmp_i in range(len(y_pos))])
if OLDVERSION:
tmp = map(lambda (y, s, cond, color): (y, s, cond, 'b') if 't' == cond[0] or 'd' == cond[0] else (y, s, cond, color), tmp)
else:
tmp = map(lambda (y, s, cond, color): (y, s, cond, 'b') if 'b' != cond[0] or 'd' == cond[0] else (y, s, cond, color), tmp) # cond[0]=='b' for cond=='baseline'
else:
tmp = zip(y_pos, scores, stddevs, conds, ['g' for tmp_i in range(len(y_pos))])
if OLDVERSION:
tmp = map(lambda (y, s, sd, cond, color): (y, s, sd, cond, 'b') if 't' == cond[0] or 'd' == cond[0] else (y, s, sd, cond, color), tmp)
else:
if TEST:
tmp = map(lambda (y, s, sd, cond, color): (y, s, sd, cond, 'b') if 'no prefix' in cond else (y, s, sd, cond, color), tmp) # "no prefix" cond => different color
tmp = map(lambda (y, s, sd, cond, color): (y, s, sd, cond, 'grey') if 'b' == cond[0] else (y, s, sd, cond, color), tmp) # cond[0]=='b' for cond=='baseline'
else:
tmp = map(lambda (y, s, sd, cond, color): (y, s, sd, cond, 'b') if 'b' != cond[0] else (y, s, sd, cond, color), tmp) # cond[0]=='b' for cond=='baseline'
if SORTED:
ys = map(lambda x: x[0], tmp)
tmp = sorted(tmp, key=lambda x: x[1])
tmp = map(lambda y,t: sum(((y,), t[1:]), ()), ys, tmp)
if TAKE_MAX_SCORE:
y_pos, scores, conds, colors = zip(*tmp)
plt.barh(y_pos, scores, color=colors, ecolor='r', alpha=0.8)
else:
y_pos, scores, stddev, conds, colors = zip(*tmp)
plt.barh(y_pos, scores, xerr=stddev, color=colors, ecolor='r', alpha=0.8)
plt.yticks(map(lambda x: x+0.5, y_pos), conds)
plt.xlabel('token f-score')
#plt.title('')
plt.savefig('histogram_' + str(SAGE_XPS) + 'to' + str(month) + 'm.png', bbox_inches='tight')
from pandas import DataFrame
from copy import deepcopy
import pandas as pd
mydata = defaultdict(lambda: [])
ages_max_points = [0 for i in xrange(SAGE, EAGE+1)]
results_m = deepcopy(results)
for cond, a in results_m.iteritems():
for i, x in enumerate(a):
if len(x['token_f-score']) > ages_max_points[i]:
ages_max_points[i] = len(x['token_f-score'])
mydata[cond].append(x['token_f-score'])
mydata['months'] = [[m for i in range(ages_max_points[m-SAGE])] for m in xrange(SAGE, EAGE+1)]
#mydata['months'] = [[str(m) for i in range(ages_max_points[m-SAGE])] for m in xrange(SAGE, EAGE+1)] # TODO if we don't want the stat_smooth to know about X (months)
for key, value in mydata.iteritems():
for i, l in enumerate(value):
value[i] = l + [np.nan for j in range(ages_max_points[i] - len(l))]
mydata[key] = [j for i in value for j in i]
if np.all(map(np.isnan, mydata[key])): # remove data that is only nan
mydata.pop(key)
print mydata
print ">>> conditions that will be plotted"
print mydata.keys()
mydataframe = DataFrame(mydata)
my_lng = pd.melt(mydataframe[['months'] + [k for k in mydata.keys() if k != 'months']], id_vars='months')
#my_lng = pd.melt(mydataframe[['months', 'share vocab', 'baseline', 'with common', 'split vocab']], id_vars='months')
#my_lng = pd.melt(mydataframe[['months', 't_permuted_colloc_syll', 't_permuted_colloc_syll_wth_common', 'unigram', 't_unigram', 't_readapt_unigram', 'colloc_syll', 't_colloc_syll', 't_colloc_syll_wth_common']], id_vars='months')
if OLDVERSION:
my_lng = pd.melt(mydataframe[['months', 't_colloc_syll_shr_vocab', 'colloc_syll', 't_colloc_syll_wth_common', 't_colloc_syll_spl_vocab']], id_vars='months')
# from ggplot_import_*
# #p = ggplot(aes(x='months', y='colloc'), data=mydataframe) + geom_point(color='lightgreen') + stat_smooth(se=True) + xlab('age in months') + ylab('token f-score')
# my_lng = pd.melt(mydataframe[['months', 't_colloc syll shr vocab', 'colloc syll', 't_colloc_syll_wth_common', 't_colloc_syll_spl_vocab', 'colloc', 'syll', 't_syll_spl_vocab']], id_vars='months')
# #p = ggplot(aes(x='months', y='value', color='variable'), data=my_lng) + stat_smooth(se=True, method='lm', level=0.95) + xlab('age in months') + ylab('token f-score')
# p = ggplot(aes(x='months', y='value', color='variable'), data=my_lng) + stat_smooth(se=False) + xlab('age in months') + ylab('token f-score')
# ggsave(p, 'ggplot_progress.png')
import rpy2.robjects as robj
import rpy2.robjects.pandas2ri # for dataframe conversion
from rpy2.robjects.packages import importr
from rpy2.robjects import globalenv
import pandas.rpy.common as com
#grdevices = importr('grDevices')
#robj.pandas2ri.activate()
#data_r = robj.conversion.py2ri(mydata)
lng_r = com.convert_to_r_dataframe(my_lng)
data_r = com.convert_to_r_dataframe(mydataframe)
globalenv['lng_r'] = lng_r
globalenv['data_r'] = data_r
globalenv['eage'] = EAGE
globalenv['sage'] = SAGE
print "==================="
print "and now for the R part"
print "==================="
rstring = """
library("ggplot2")
library("grid")
#print(lng_r)
#print(factor(lng_r$months))
#print(factor(lng_r$variable))
cLevels <- levels(lng_r$variable)
p <- ggplot(data=lng_r, aes(x=months, y=value, group=variable, colour=variable, fill=variable, shape=variable, linetype=variable))\
+ scale_y_continuous(name='token f-score')\
+ scale_x_discrete('age in months', breaks=seq(eage,sage), labels=seq(eage,sage))\
+ coord_cartesian(xlim = c(eage, sage))\
+ theme_bw()\
+ scale_colour_discrete("model", drop=TRUE, limits=cLevels)\
+ scale_fill_discrete("model", drop=TRUE, limits=cLevels)\
+ scale_shape_discrete("model", drop=TRUE, limits=cLevels)\
+ scale_linetype_discrete("model", drop=TRUE, limits=cLevels)\
+ stat_smooth(level=0.68, size=1.8)\
+ theme(text = element_text(size=44))\
"""
#+ geom_point()\
#+ xlab('age in months')\
#+ ylab('token f-score')\
#+ scale_x_continuous('age in months', breaks=seq(eage,sage), limits=c(eage,sage))\
# + scale_x_discrete('age in months')
if len(DO_ONLY) and len(DO_ONLY) < 5:
rstring += """+ opts(legend.position = c(0.96, 0.5),
legend.justification = c(1, 0.5),
legend.background = element_rect(colour = "grey70", fill = "white"),
legend.text=element_text(size=44),
legend.title=element_text(size=44),
legend.key.size=unit(2, "cm"),
plot.margin=unit(c(1,1,1,1), "cm"))
"""
else:
rstring += """+ opts(legend.background = element_rect(colour = "grey70", fill = "white"),
legend.text=element_text(size=44),
legend.title=element_text(size=44),
legend.key.size=unit(2, "cm"),
plot.margin=unit(c(1,1,1,1), "cm"))
"""
rstring += """
ggsave('ggplot2_progress.pdf', plot=p, width=22, height=16)
"""
plotFunc_2 = robj.r(rstring)
print "==================="
print "and now for the LaTeX tables"
print "==================="
header_table = """
\\begin{table*}[ht] \caption{Mean f-scores (f), precisions (p), and recalls (r) for different models depending on the size of dataset}
\\vspace{-0.5cm}
\\begin{center}
\\begin{scriptsize}
\\begin{tabular}{|c|ccc|ccc|ccc|ccc|ccc|ccc|ccc|ccc|}
\hline
& \multicolumn{3}{|c|}{syll}
& \multicolumn{3}{|c|}{t\_syll}
& \multicolumn{3}{|c|}{colloc}
& \multicolumn{3}{|c|}{t\_coll\_wth\_common}
& \multicolumn{3}{|c|}{coll\_syll}
& \multicolumn{3}{|c|}{t\_coll\_syll\_shr\_voc}
& \multicolumn{3}{|c|}{t\_coll\_syll\_spl\_voc}
& \multicolumn{3}{|c|}{t\_coll\_syll\_wth\_com}\\\\
"""
print header_table
for typ in ['token', 'boundary']:
print typ + """ & f & p & r & f & p & r & f & p & r & f & p & r & f & p & r & f & p & r & f & p & r & f & p & r \\\\
\hline """
for month, d in plotted_results.iteritems():
print str(SAGE_XPS) + "-" + str(month),
if OLDVERSION:
listmodels = ['syll', 't_syll_spl_vocab', 'colloc', 't_colloc_wth_common', 'colloc_syll', 't_colloc_syll_shr_vocab', 't_colloc_syll_spl_vocab', 't_colloc_syll_wth_common']
listmodels = ['unigram', 'unigram share vocab', 'unigram split vocab', 'baseline', 'share vocab', 'split vocab', 'with common']
for cond in listmodels:
s_dict = d[cond]
f = s_dict[typ+'_f-score']
p = s_dict[typ+'_precision']
r = s_dict[typ+'_recall']
print " & ",
print "%.3f" % f,
print " & ",
print "%.3f" % p,
print " & ",
print "%.3f" % r,
print "\\\\"
print "\hline"
footer_table = """
\end{tabular}
\label{results}
\end{scriptsize}
\end{center}
\end{table*}
"""
print footer_table
|
SnippyHolloW/contextual_word_segmentation
|
src/plot_AGs_results.py
|
Python
|
mit
| 18,834
|
[
"Gaussian"
] |
742c307bd97ab11476f8dbf82a83dfd13d7ca9b83f1c9576e2d0067535c4c8da
|
import sys
sys.path.append("../")
import numpy as np
import pandas as pd
# from matplotlib.pyplot import plot,show,draw
import scipy.io
from functions import *
from pylab import *
from sklearn.decomposition import PCA
import _pickle as cPickle
import matplotlib.cm as cm
import os
import neuroseries as nts
# def softmax(x, b1 = 10.0, b2 = 0.5, lb = 0.2):
# x -= x.min()
# x /= x.max()
# return (1.0/(1.0+np.exp(-(x-b2)*b1)) + lb)/(1.0+lb)
###############################################################################################################
# TO LOAD
###############################################################################################################
data_directory = '/mnt/DataGuillaume/MergedData/'
datasets = np.loadtxt(data_directory+'datasets_ThalHpc.list', delimiter = '\n', dtype = str, comments = '#')
theta_mod, theta_ses = loadThetaMod('/mnt/DataGuillaume/MergedData/THETA_THAL_mod.pickle', datasets, return_index=True)
swr_mod, swr_ses = loadSWRMod('/mnt/DataGuillaume/MergedData/SWR_THAL_corr.pickle', datasets, return_index=True)
spind_mod, spind_ses = loadSpindMod('/mnt/DataGuillaume/MergedData/SPINDLE_mod.pickle', datasets, return_index=True)
spike_spindle_phase = cPickle.load(open('/mnt/DataGuillaume/MergedData/SPIKE_SPINDLE_PHASE.pickle', 'rb'))
spike_theta_phase = cPickle.load(open('/mnt/DataGuillaume/MergedData/SPIKE_THETA_PHASE.pickle', 'rb'))
nbins = 400
binsize = 5
times = np.arange(0, binsize*(nbins+1), binsize) - (nbins*binsize)/2
theta = pd.DataFrame( index = theta_ses['rem'],
columns = ['phase', 'pvalue', 'kappa'],
data = theta_mod['rem'])
# filtering swr_mod
swr = pd.DataFrame( columns = swr_ses,
index = times,
data = gaussFilt(swr_mod, (5,)).transpose())
# Cut swr_mod from -500 to 500
swr = swr.loc[-500:500]
# CHECK FOR NAN
tmp1 = swr.columns[swr.isnull().any()].values
tmp2 = theta.index[theta.isnull().any(1)].values
# CHECK P-VALUE
tmp3 = theta.index[(theta['pvalue'] > 1).values].values
tmp = np.unique(np.concatenate([tmp1,tmp2,tmp3]))
# copy and delete
if len(tmp):
swr_modth = swr.drop(tmp, axis = 1)
theta_modth = theta.drop(tmp, axis = 0)
swr_modth_copy = swr_modth.copy()
neuron_index = swr_modth.columns
times = swr_modth.loc[-500:500].index.values
m = 'Mouse12'
data = cPickle.load(open("../../data/maps/"+m+".pickle", 'rb'))
theta = data['movies']['theta']
swr = data['movies']['swr']
total = data['total']
x = data['x']
y = data['y']
headdir = data['headdir']
jpc = data['jpc']
interval_to_cut = { 'Mouse12':[88,120],
'Mouse17':[84,123]}
# 'Mouse20':[92,131],
# 'Mouse32':[80,125]}
exemples = {'ldvl':['Mouse12-120807_7', 'Mouse12-120807_8', 'Mouse12-120807_9',
'Mouse12-120807_10', 'Mouse12-120807_11', 'Mouse12-120807_12',
'Mouse12-120807_13'],
're':['Mouse12-120819_3', 'Mouse12-120819_5'],
'av':['Mouse12-120814_20', 'Mouse12-120814_22', 'Mouse12-120814_23',
'Mouse12-120814_24']
}
depths = [0.07, 0.49, 1.61]
shanks = [1.2, 1.0 ,0.8]
nbins = 200
binsize = 5
times = np.arange(0, binsize*(nbins+1), binsize) - (nbins*binsize)/2
times2 = times
space = 0.01
from scipy.ndimage import gaussian_filter
# swr = gaussian_filter(swr, (1,0.2,0.2))
swr_copy = swr.copy()
times = times[interval_to_cut[m][0]:interval_to_cut[m][1]]
swr = swr[:,:,interval_to_cut[m][0]:interval_to_cut[m][1]]
##############################################################################################################
# TOTAL NEURON
##############################################################################################################
total = total / total.max()
xnew, ynew, xytotal = interpolate(total.copy(), x, y, space)
filtotal = gaussian_filter(xytotal, (10, 10))
newtotal = softmax(filtotal, 15.0, 0.25)
# newtotal[newtotal > 0.9] = np.NaN
##############################################################################################################
# HEAD DIRECTION
##############################################################################################################
xnew, ynew, newheaddir = interpolate(headdir.copy(), x, y, space)
newheaddir[newheaddir < np.percentile(newheaddir, 95)] = 0.0
##############################################################################################################
# THALAMUS LINES
##############################################################################################################
thl_lines = scipy.ndimage.imread("../../figures/mapping_to_align/"+m+"_thalamus_lines.png").sum(2)
xlines, ylines, thl_lines = interpolate(thl_lines, np.linspace(x.min(), x.max(), thl_lines.shape[1]),
np.linspace(y.min(), y.max(), thl_lines.shape[0]), space*0.1)
thl_lines[thl_lines < 200] = np.NaN
thl_lines[thl_lines > 200] = 1.0
# thl_lines[thl_lines < 230] = np.NaN
# thl_lines[thl_lines > 230] = 1.0
##############################################################################################################
# SWR
##############################################################################################################
newswr = []
for t in range(len(times)):
xnew, ynew, frame = interpolate(swr[:,:,t].copy(), x, y, space)
frame = gaussian_filter(frame, (10, 10))
newswr.append(frame)
newswr = np.array(newswr)
newswr = gaussian_filter(newswr, (1,0.2,0.2))
newswr = newswr - newswr.min()
newswr = newswr / newswr.max()
##############################################################################################################
# THETA
##############################################################################################################
phase = np.linspace(0, 2*np.pi, theta.shape[-1])
newtheta = []
for i in range(len(phase)):
xnew, ynew, frame = interpolate(theta[:,:,i].copy(), x, y, space)
newtheta.append(frame)
newtheta = np.array(newtheta)
###############################################################################################################
# PLOT
###############################################################################################################
def figsize(scale):
fig_width_pt = 483.69687 # Get this from LaTeX using \the\textwidth
inches_per_pt = 1.0/72.27 # Convert pt to inch
golden_mean = (np.sqrt(5.0)-1.0)/2.0 # Aesthetic ratio (you could change this)
fig_width = fig_width_pt*inches_per_pt*scale # width in inches
fig_height = fig_width*golden_mean*1.2 # height in inches
fig_size = [fig_width*0.9,fig_height]
return fig_size
def simpleaxis(ax):
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# ax.xaxis.set_tick_params(size=6)
# ax.yaxis.set_tick_params(size=6)
def noaxis(ax):
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.set_xticks([])
ax.set_yticks([])
# ax.xaxis.set_tick_params(size=6)
# ax.yaxis.set_tick_params(size=6)
import matplotlib as mpl
from mpl_toolkits.axes_grid1 import make_axes_locatable
import matplotlib.patches as patches
mpl.use("pdf")
pdf_with_latex = { # setup matplotlib to use latex for output
"pgf.texsystem": "pdflatex", # change this if using xetex or lautex
"text.usetex": True, # use LaTeX to write all text
"font.family": "serif",
"font.serif": [], # blank entries should cause plots to inherit fonts from the document
"font.sans-serif": [],
"font.monospace": [],
"axes.labelsize": 9, # LaTeX default is 10pt font.
"font.size": 8,
"legend.fontsize": 8, # Make the legend/label fonts a little smaller
"xtick.labelsize": 5,
"ytick.labelsize": 5,
"pgf.preamble": [
r"\usepackage[utf8x]{inputenc}", # use utf8 fonts becasue your computer can handle it :)
r"\usepackage[T1]{fontenc}", # plots will be generated using this preamble
],
"lines.markeredgewidth" : 0.2,
"axes.linewidth" : 0.5,
"ytick.major.size" : 1.0,
"xtick.major.size" : 1.0
}
mpl.rcParams.update(pdf_with_latex)
import matplotlib.gridspec as gridspec
from matplotlib.pyplot import *
from mpl_toolkits.axes_grid.inset_locator import inset_axes
fig = figure(figsize = figsize(1))
n = 4
# to_plot = [0, 11, 16]
if m == 'Mouse12':
to_plot = [0, 11, 22]
elif m == 'Mouse17':
to_plot = [4, 17, 38]
##############################################################
# ORBIT
##############################################################
gs1 = gridspec.GridSpec(2,3)
gs1.update(hspace = 0.4, bottom = 0.01, top = 0.95, right = 0.98, left = 0.04)
ax = subplot(gs1[0, 0])
# axis('off')
start, stop = (10, -65)
simpleaxis(ax)
plot(jpc[start,0], jpc[start,1], 'o', markersize = 3, color = '#5c7d6f')
plot(jpc[start:stop,0], jpc[start:stop,1], linewidth = 0.8, color = '#5c7d6f')
arrow(jpc[stop-2,0],jpc[stop-2,1],jpc[stop-1,0]-jpc[stop-2,0],jpc[stop-1,1]-jpc[stop-2,1], color = '#5c7d6f', head_width = 0.06)
ax.spines['left'].set_bounds(np.min(jpc[:,1]), np.min(jpc[:,1]+0.1))
ax.spines['bottom'].set_bounds(np.min(jpc[:,0]), np.min(jpc[:,0]+0.1))
xticks([], [])
yticks([], [])
ax.xaxis.set_label_coords(0.25, -0.02)
ax.yaxis.set_label_coords(-0.02, 0.15)
ylabel('jPC2')
xlabel('jPC1')
xlim(-0.4,0.4)
ylim(-0.4,0.4)
specialposition = { to_plot[0]:[-0.15, 0.05],
to_plot[1]:[-0.10, -0.05],
to_plot[2]:[0.0, 0.05]}
#to_plot[3]:[0.01, 0.01]}
for i in to_plot:
idx = np.where(times[i] == times2)[0][0]
plot(jpc[idx,0], jpc[idx,1], 'o', markersize = 4, color = 'green')
if i == 11:
text(jpc[idx,0]+specialposition[i][0], jpc[idx,1]+specialposition[i][1], "0 ms")
else :
text(jpc[idx,0]+specialposition[i][0], jpc[idx,1]+specialposition[i][1], str(int(times[i]))+" ms")
title("SWR projection \n (one mouse)", y = 0.91)
##############################################################
# MAP
##############################################################
# for i,j in zip(range(4), ((0,2), (1,0), (1,1), (1,2))):
for i,j in zip(range(3), ((1,0), (1,1), (1,2))):
ax = subplot(gs1[j[0], j[1]])
frame = newswr[to_plot[i]]
rgbframe = get_rgb(frame.copy(), np.ones_like(newtotal), newtotal.copy(), 0.65)
# rgbframe = get_rgb(frame, )
imshow(rgbframe, aspect = 'equal', extent = (xnew[0], xnew[-1], ynew[-1], ynew[0]))
# imshow(newtotal, extent = (xnew[0], xnew[-1], ynew[-1], ynew[0]), cmap = 'gist_gray', alpha = 0.64)
if i == 1:
title("T = 0 ms")
else:
title("T = "+str(int(times[to_plot[i]]))+" ms")
# contour(newheaddir, aspect = 'equal',origin = 'upper', extent = (xnew[0], xnew[-1], ynew[-1], ynew[0]), cmap = 'winter')
imshow(thl_lines, aspect = 'equal', origin = 'upper', extent = (xlines[0], xlines[-1], ylines[-1], ylines[0]))
xticks([], [])
yticks([], [])
##############################################################
# SWR
##############################################################
if m == 'Mouse12':
gs00 = gridspec.GridSpecFromSubplotSpec(3, 1, subplot_spec=gs1[0,2])
titles = ['LVDL', 'AV', 'Re']
axswr = {}
for i,z in zip(range(3),['ldvl', 'av', 're']):
ax1 = subplot(gs00[i,0])
axswr[z] = ax1
simpleaxis(ax1)
if i in [0,1]:
ax1.set_xticks([])
ax1.spines['bottom'].set_visible(False)
# bounds = [-200, 200]
mean = swr_modth[exemples[z]].mean(1)
sem = swr_modth[exemples[z]].sem(1)
times = mean.index.values
# plot(swr_modth[exemples[z]], linestyle = '--', color = 'red', linewidth = 0.9)
plot(times, mean, color = 'black', linewidth = 2)
fill_between(times, mean-sem, mean+sem, alpha = 0.4, color = 'grey')
title(titles[i], loc = 'right')
ylim(-4,4)
if i == 2:
xlabel("Time from SPWR (ms)", fontsize = 8)
# axvline(-60, color = 'grey', linewidth = 0.6)
# axvline(-5, color = 'grey', linewidth = 0.6)
# axvline(20, color = 'grey', linewidth = 0.6)
# if i == 1:
# ax1.set_xticks([-60, -5, 20])
# fill_between(mean.index.values, mean - sem, mean + sem, alpha = 0.5)
# ylim(-1.5, 1.5)
##############################################################
# THALAMUS
##############################################################
ax = subplot(gs1[0, 1])
ax.imshow(thl_lines, aspect = 'equal', origin = 'upper', extent = (xlines[0], xlines[-1], ylines[-1], ylines[0]))
ax.contour(newheaddir, origin = 'upper', aspect = 'equal', extent = (xnew[0], xnew[-1], ynew[-1], ynew[0]), cmap = 'winter')
# ax.set_xticks(x)
# ax.set_xticklabels(np.arange(1,9))
ax.set_xlabel("Shanks")
ax.set_ylabel("Depth per session")
ax.set_yticks(y)
ax.set_title("Thalamus Map")
ax.text(0.82, 0.21, '$\mathbf{AD}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(0.67, 0.8, '$\mathbf{IAD}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold', rotation = 70)
ax.text(0.8, 1.05, '$\mathbf{AM}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(1.1, 0.4, '$\mathbf{AV}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(1.24, 0.07, '$\mathbf{LDVL}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(0.55, 0.21, '$\mathbf{sm}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(0.45, 0.49, '$\mathbf{MD}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(1.22, 1.13, '$\mathbf{VA}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(0.28, 0.65, '$\mathbf{PVA}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(0.7, 1.53, '$\mathbf{Re}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
ax.text(0.5, 0.77, '$\mathbf{PT}$' , horizontalalignment = 'center', verticalalignment = 'center', fontweight='bold')
scatter(shanks[0], depths[0], 7, color = 'red', zorder = 2)
scatter(shanks[1], depths[1], 7, color = 'red')
scatter(shanks[2], depths[2], 7, color = 'red')
##############################################################
# ARROWS
##############################################################
if m == 'Mouse12':
ax1tr = ax.transData
axad = axswr['ldvl'].transData
axam = axswr['re'].transData
axav = axswr['av'].transData
figtr = fig.transFigure.inverted()
ptB = figtr.transform(ax1tr.transform((shanks[0],depths[0])))
ptE = figtr.transform(axad.transform((-700,0)))
style="simple,head_width=2,head_length=3"
kw = dict(arrowstyle=style, color="k")
arrow = matplotlib.patches.FancyArrowPatch(
ptB, ptE, transform=fig.transFigure, # Place arrow in figure coord system
fc = "None", connectionstyle="arc3,rad=-0.1", alpha = 0.5,
mutation_scale = 3., **kw)
fig.patches.append(arrow)
ptB = figtr.transform(ax1tr.transform((shanks[2],depths[2])))
ptE = figtr.transform(axam.transform((-700,0)))
style="<->,head_width=2,head_length=3"
arrow = matplotlib.patches.FancyArrowPatch(
ptB, ptE, transform=fig.transFigure, # Place arrow in figure coord system
fc = "None", connectionstyle="arc3,rad=-0.1", alpha = 0.5,
mutation_scale = 3., **kw)
fig.patches.append(arrow)
ptB = figtr.transform(ax1tr.transform((shanks[1],depths[1])))
ptE = figtr.transform(axav.transform((-700,0)))
style="<->,head_width=2,head_length=3"
arrow = matplotlib.patches.FancyArrowPatch(
ptB, ptE, transform=fig.transFigure, # Place arrow in figure coord system
fc = "None", connectionstyle="arc3,rad=0.0", alpha = 0.5,
mutation_scale = 3., **kw)
fig.patches.append(arrow)
cbaxes = fig.add_axes([0.34, 0.41, 0.01, 0.06])
cmap = cm.jet
norm = matplotlib.colors.Normalize(swr.min(), swr.max())
cb = matplotlib.colorbar.ColorbarBase(cbaxes, cmap = cmap, norm = norm)
cbaxes.axes.set_xlabel('SWR \n mod')
cbaxes = fig.add_axes([0.34, 0.25, 0.01, 0.06])
cmap = cm.gist_gray
norm = matplotlib.colors.Normalize(0, 1)
cb = matplotlib.colorbar.ColorbarBase(cbaxes, cmap = cmap, norm = norm)
cbaxes.axes.set_xlabel('Neurons \n density')
# cbaxes = fig.add_axes([0.34, 0.1, 0.01, 0.06])
# cmap = cm.winter
# norm = matplotlib.colors.Normalize(0, 1)
# cb = matplotlib.colorbar.ColorbarBase(cbaxes, cmap = cmap, norm = norm)
# cbaxes.axes.set_xlabel('HD \n neurons')
savefig("../../figures/figures_articles/figart_4"+m+".pdf", dpi = 900, facecolor = 'white')
os.system("evince ../../figures/figures_articles/figart_4"+m+".pdf &")
sys.exit()
newswr = []
for t in range(len(times)):
xnew, ynew, frame = interpolate(swr_copy[:,:,t].copy(), x, y, space)
frame = gaussian_filter(frame, (10, 10))
newswr.append(frame)
newswr = np.array(newswr)
newswr = gaussian_filter(newswr, (0,0.2,0.2))
newswr = newswr - newswr.min()
newswr = newswr / newswr.max()
from matplotlib import animation, rc
from IPython.display import HTML, Image
rc('animation', html='html5')
fig, axes = plt.subplots(1,1)
start = 70
frame = newswr[t]
rgbframe = get_rgb(frame.copy(), np.ones_like(newtotal), newtotal.copy(), 0.65)
images = [axes.imshow(rgbframe, aspect = 'equal', extent = (xnew[0], xnew[-1], ynew[-1], ynew[0]))]
axes.imshow(thl_lines, aspect = 'equal', origin = 'upper', extent = (xlines[0], xlines[-1], ylines[-1], ylines[0]))
def init():
images[0].set_data(rgbframe)
return images
def animate(t):
frame = newswr[t]
rgbframe = get_rgb(frame.copy(), np.ones_like(newtotal), newtotal.copy(), 0.65)
images[0].set_data(rgbframe)
images[0].axes.set_title("time = "+str(times[t]))
return images
anim = animation.FuncAnimation(fig, animate, init_func=init,
frames=range(start,132), interval=10, blit=False, repeat_delay = 1000)
anim.save('../figures/swr_mod_'+m+'.gif', writer='imagemagick', fps=15)
# show()
# sys.exit()
|
gviejo/ThalamusPhysio
|
python/pyfigures/main_article_old_fig_mouse12.py
|
Python
|
gpl-3.0
| 17,980
|
[
"NEURON"
] |
7290bb894c5835e1e7be3baa3903ab075309911a1f636cfd8830c00cb0860e42
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mantis_actionables', '0023_auto_20150310_1419'),
]
operations = [
migrations.AddField(
model_name='status',
name='most_restrictive_tlp',
field=models.SmallIntegerField(default=0, choices=[(0, b'Unknown'), (40, b'White'), (30, b'Green'), (20, b'Amber'), (10, b'Red')]),
preserve_default=True,
),
migrations.AlterField(
model_name='source',
name='origin',
field=models.SmallIntegerField(choices=[(0, b'Origin unknown'), (10, b'Origin external, but provenance uncertain'), (10, b'Origin public'), (20, b'Provided by vendor'), (30, b'Provided by partner')]),
preserve_default=True,
),
]
|
siemens/django-mantis-actionables
|
mantis_actionables/migrations/0024_auto_20150311_1335.py
|
Python
|
gpl-2.0
| 903
|
[
"Amber"
] |
91d8f5510b13db70881eba4fb7de9f4021d7d863998a56a8d6cf3cf8523524ac
|
#!/usr/bin/env python
##########################################################################
#
# Copyright 2008 Tungsten Graphics, Inc., Cedar Park, Texas.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sub license, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice (including the
# next paragraph) shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
# IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
# ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
##########################################################################
'''Trace data model.'''
import sys
import string
import format
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class Node:
def visit(self, visitor):
raise NotImplementedError
def __str__(self):
stream = StringIO()
formatter = format.DefaultFormatter(stream)
pretty_printer = PrettyPrinter(formatter)
self.visit(pretty_printer)
return stream.getvalue()
class Literal(Node):
def __init__(self, value):
self.value = value
def visit(self, visitor):
visitor.visit_literal(self)
class NamedConstant(Node):
def __init__(self, name):
self.name = name
def visit(self, visitor):
visitor.visit_named_constant(self)
class Array(Node):
def __init__(self, elements):
self.elements = elements
def visit(self, visitor):
visitor.visit_array(self)
class Struct(Node):
def __init__(self, name, members):
self.name = name
self.members = members
def visit(self, visitor):
visitor.visit_struct(self)
class Pointer(Node):
def __init__(self, address):
self.address = address
def visit(self, visitor):
visitor.visit_pointer(self)
class Call:
def __init__(self, no, klass, method, args, ret):
self.no = no
self.klass = klass
self.method = method
self.args = args
self.ret = ret
def visit(self, visitor):
visitor.visit_call(self)
class Trace:
def __init__(self, calls):
self.calls = calls
def visit(self, visitor):
visitor.visit_trace(self)
class Visitor:
def visit_literal(self, node):
raise NotImplementedError
def visit_named_constant(self, node):
raise NotImplementedError
def visit_array(self, node):
raise NotImplementedError
def visit_struct(self, node):
raise NotImplementedError
def visit_pointer(self, node):
raise NotImplementedError
def visit_call(self, node):
raise NotImplementedError
def visit_trace(self, node):
raise NotImplementedError
class PrettyPrinter:
def __init__(self, formatter):
self.formatter = formatter
def visit_literal(self, node):
if isinstance(node.value, basestring):
if len(node.value) >= 4096 or node.value.strip(string.printable):
self.formatter.text('...')
return
self.formatter.literal('"' + node.value + '"')
return
self.formatter.literal(repr(node.value))
def visit_named_constant(self, node):
self.formatter.literal(node.name)
def visit_array(self, node):
self.formatter.text('{')
sep = ''
for value in node.elements:
self.formatter.text(sep)
value.visit(self)
sep = ', '
self.formatter.text('}')
def visit_struct(self, node):
self.formatter.text('{')
sep = ''
for name, value in node.members:
self.formatter.text(sep)
self.formatter.variable(name)
self.formatter.text(' = ')
value.visit(self)
sep = ', '
self.formatter.text('}')
def visit_pointer(self, node):
self.formatter.address(node.address)
def visit_call(self, node):
self.formatter.text('%s ' % node.no)
if node.klass is not None:
self.formatter.function(node.klass + '::' + node.method)
else:
self.formatter.function(node.method)
self.formatter.text('(')
sep = ''
for name, value in node.args:
self.formatter.text(sep)
self.formatter.variable(name)
self.formatter.text(' = ')
value.visit(self)
sep = ', '
self.formatter.text(')')
if node.ret is not None:
self.formatter.text(' = ')
node.ret.visit(self)
def visit_trace(self, node):
for call in node.calls:
call.visit(self)
self.formatter.newline()
|
aYukiSekiguchi/ACCESS-Chromium
|
third_party/mesa/MesaLib/src/gallium/tests/python/retrace/model.py
|
Python
|
bsd-3-clause
| 5,631
|
[
"VisIt"
] |
bd6974c427fc372721d10e99862623492c00ccd0335c859cc551391d869c73a0
|
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module define a WulffShape class to generate the Wulff shape from
a lattice, a list of indices and their corresponding surface energies,
and the total area and volume of the wulff shape,the weighted surface energy,
the anisotropy and shape_factor can also be calculated.
In support of plotting from a given view in terms of miller index.
The lattice is from the conventional unit cell, and (hkil) for hexagonal
lattices.
If you use this code extensively, consider citing the following:
Tran, R.; Xu, Z.; Radhakrishnan, B.; Winston, D.; Persson, K. A.; Ong, S. P.
(2016). Surface energies of elemental crystals. Scientific Data.
"""
import itertools
import logging
import warnings
import numpy as np
import plotly.graph_objs as go
from scipy.spatial import ConvexHull
from pymatgen.core.structure import Structure
from pymatgen.util.coord import get_angle
from pymatgen.util.string import unicodeify_spacegroup
__author__ = "Zihan Xu, Richard Tran, Shyue Ping Ong"
__copyright__ = "Copyright 2013, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Zihan Xu"
__email__ = "zix009@eng.ucsd.edu"
__date__ = "May 5 2016"
logger = logging.getLogger(__name__)
def hkl_tuple_to_str(hkl):
"""
Prepare for display on plots
"(hkl)" for surfaces
Agrs:
hkl: in the form of [h, k, l] or (h, k, l)
"""
str_format = "($"
for x in hkl:
if x < 0:
str_format += "\\overline{" + str(-x) + "}"
else:
str_format += str(x)
str_format += "$)"
return str_format
def get_tri_area(pts):
"""
Given a list of coords for 3 points,
Compute the area of this triangle.
Args:
pts: [a, b, c] three points
"""
a, b, c = pts[0], pts[1], pts[2]
v1 = np.array(b) - np.array(a)
v2 = np.array(c) - np.array(a)
area_tri = abs(np.linalg.norm(np.cross(v1, v2)) / 2)
return area_tri
class WulffFacet:
"""
Helper container for each Wulff plane.
"""
def __init__(self, normal, e_surf, normal_pt, dual_pt, index, m_ind_orig, miller):
"""
:param normal:
:param e_surf:
:param normal_pt:
:param dual_pt:
:param index:
:param m_ind_orig:
:param miller:
"""
self.normal = normal
self.e_surf = e_surf
self.normal_pt = normal_pt
self.dual_pt = dual_pt
self.index = index
self.m_ind_orig = m_ind_orig
self.miller = miller
self.points = []
self.outer_lines = []
class WulffShape:
"""
Generate Wulff Shape from list of miller index and surface energies,
with given conventional unit cell.
surface energy (Jm^2) is the length of normal.
Wulff shape is the convex hull.
Based on:
http://scipy.github.io/devdocs/generated/scipy.spatial.ConvexHull.html
Process:
1. get wulff simplices
2. label with color
3. get wulff_area and other properties
.. attribute:: debug (bool)
.. attribute:: alpha
transparency
.. attribute:: color_set
.. attribute:: grid_off (bool)
.. attribute:: axis_off (bool)
.. attribute:: show_area
.. attribute:: off_color
color of facets off wulff
.. attribute:: structure
Structure object, input conventional unit cell (with H ) from lattice
.. attribute:: miller_list
list of input miller index, for hcp in the form of hkil
.. attribute:: hkl_list
modify hkill to hkl, in the same order with input_miller
.. attribute:: e_surf_list
list of input surface energies, in the same order with input_miller
.. attribute:: lattice
Lattice object, the input lattice for the conventional unit cell
.. attribute:: facets
[WulffFacet] for all facets considering symm
.. attribute:: dual_cv_simp
simplices from the dual convex hull (dual_pt)
.. attribute:: wulff_pt_list
.. attribute:: wulff_cv_simp
simplices from the convex hull of wulff_pt_list
.. attribute:: on_wulff
list for all input_miller, True is on wulff.
.. attribute:: color_area
list for all input_miller, total area on wulff, off_wulff = 0.
.. attribute:: miller_area
($hkl$): area for all input_miller
"""
def __init__(self, lattice, miller_list, e_surf_list, symprec=1e-5):
"""
Args:
lattice: Lattice object of the conventional unit cell
miller_list ([(hkl), ...]: list of hkl or hkil for hcp
e_surf_list ([float]): list of corresponding surface energies
symprec (float): for recp_operation, default is 1e-5.
"""
if any(se < 0 for se in e_surf_list):
warnings.warn("Unphysical (negative) surface energy detected.")
self.color_ind = list(range(len(miller_list)))
self.input_miller_fig = [hkl_tuple_to_str(x) for x in miller_list]
# store input data
self.structure = Structure(lattice, ["H"], [[0, 0, 0]])
self.miller_list = tuple(tuple(x) for x in miller_list)
self.hkl_list = tuple((x[0], x[1], x[-1]) for x in miller_list)
self.e_surf_list = tuple(e_surf_list)
self.lattice = lattice
self.symprec = symprec
# 2. get all the data for wulff construction
# get all the surface normal from get_all_miller_e()
self.facets = self._get_all_miller_e()
logger.debug(len(self.facets))
# 3. consider the dual condition
dual_pts = [x.dual_pt for x in self.facets]
dual_convex = ConvexHull(dual_pts)
dual_cv_simp = dual_convex.simplices
# simplices (ndarray of ints, shape (nfacet, ndim))
# list of [i, j, k] , ndim = 3
# i, j, k: ind for normal_e_m
# recalculate the dual of dual, get the wulff shape.
# conner <-> surface
# get cross point from the simplices of the dual convex hull
wulff_pt_list = [self._get_cross_pt_dual_simp(dual_simp) for dual_simp in dual_cv_simp]
wulff_convex = ConvexHull(wulff_pt_list)
wulff_cv_simp = wulff_convex.simplices
logger.debug(", ".join([str(len(x)) for x in wulff_cv_simp]))
# store simplices and convex
self.dual_cv_simp = dual_cv_simp
self.wulff_pt_list = wulff_pt_list
self.wulff_cv_simp = wulff_cv_simp
self.wulff_convex = wulff_convex
self.on_wulff, self.color_area = self._get_simpx_plane()
miller_area = []
for m, in_mill_fig in enumerate(self.input_miller_fig):
miller_area.append(in_mill_fig + " : " + str(round(self.color_area[m], 4)))
self.miller_area = miller_area
def _get_all_miller_e(self):
"""
from self:
get miller_list(unique_miller), e_surf_list and symmetry
operations(symmops) according to lattice
apply symmops to get all the miller index, then get normal,
get all the facets functions for wulff shape calculation:
|normal| = 1, e_surf is plane's distance to (0, 0, 0),
normal[0]x + normal[1]y + normal[2]z = e_surf
return:
[WulffFacet]
"""
all_hkl = []
color_ind = self.color_ind
planes = []
recp = self.structure.lattice.reciprocal_lattice_crystallographic
recp_symmops = self.lattice.get_recp_symmetry_operation(self.symprec)
for i, (hkl, energy) in enumerate(zip(self.hkl_list, self.e_surf_list)):
for op in recp_symmops:
miller = tuple(int(x) for x in op.operate(hkl))
if miller not in all_hkl:
all_hkl.append(miller)
normal = recp.get_cartesian_coords(miller)
normal /= np.linalg.norm(normal)
normal_pt = [x * energy for x in normal]
dual_pt = [x / energy for x in normal]
color_plane = color_ind[divmod(i, len(color_ind))[1]]
planes.append(WulffFacet(normal, energy, normal_pt, dual_pt, color_plane, i, hkl))
# sort by e_surf
planes.sort(key=lambda x: x.e_surf)
return planes
def _get_cross_pt_dual_simp(self, dual_simp):
"""
|normal| = 1, e_surf is plane's distance to (0, 0, 0),
plane function:
normal[0]x + normal[1]y + normal[2]z = e_surf
from self:
normal_e_m to get the plane functions
dual_simp: (i, j, k) simplices from the dual convex hull
i, j, k: plane index(same order in normal_e_m)
"""
matrix_surfs = [self.facets[dual_simp[i]].normal for i in range(3)]
matrix_e = [self.facets[dual_simp[i]].e_surf for i in range(3)]
cross_pt = np.dot(np.linalg.inv(matrix_surfs), matrix_e)
return cross_pt
def _get_simpx_plane(self):
"""
Locate the plane for simpx of on wulff_cv, by comparing the center of
the simpx triangle with the plane functions.
"""
on_wulff = [False] * len(self.miller_list)
surface_area = [0.0] * len(self.miller_list)
for simpx in self.wulff_cv_simp:
pts = [self.wulff_pt_list[simpx[i]] for i in range(3)]
center = np.sum(pts, 0) / 3.0
# check whether the center of the simplices is on one plane
for plane in self.facets:
abs_diff = abs(np.dot(plane.normal, center) - plane.e_surf)
if abs_diff < 1e-5:
on_wulff[plane.index] = True
surface_area[plane.index] += get_tri_area(pts)
plane.points.append(pts)
plane.outer_lines.append([simpx[0], simpx[1]])
plane.outer_lines.append([simpx[1], simpx[2]])
plane.outer_lines.append([simpx[0], simpx[2]])
# already find the plane, move to the next simplices
break
for plane in self.facets:
plane.outer_lines.sort()
plane.outer_lines = [line for line in plane.outer_lines if plane.outer_lines.count(line) != 2]
return on_wulff, surface_area
def _get_colors(self, color_set, alpha, off_color, custom_colors={}):
"""
assign colors according to the surface energies of on_wulff facets.
return:
(color_list, color_proxy, color_proxy_on_wulff, miller_on_wulff,
e_surf_on_wulff_list)
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
color_list = [off_color] * len(self.hkl_list)
color_proxy_on_wulff = []
miller_on_wulff = []
e_surf_on_wulff = [(i, e_surf) for i, e_surf in enumerate(self.e_surf_list) if self.on_wulff[i]]
c_map = plt.get_cmap(color_set)
e_surf_on_wulff.sort(key=lambda x: x[1], reverse=False)
e_surf_on_wulff_list = [x[1] for x in e_surf_on_wulff]
if len(e_surf_on_wulff) > 1:
cnorm = mpl.colors.Normalize(vmin=min(e_surf_on_wulff_list), vmax=max(e_surf_on_wulff_list))
else:
# if there is only one hkl on wulff, choose the color of the median
cnorm = mpl.colors.Normalize(
vmin=min(e_surf_on_wulff_list) - 0.1,
vmax=max(e_surf_on_wulff_list) + 0.1,
)
scalar_map = mpl.cm.ScalarMappable(norm=cnorm, cmap=c_map)
for i, e_surf in e_surf_on_wulff:
color_list[i] = scalar_map.to_rgba(e_surf, alpha=alpha)
if tuple(self.miller_list[i]) in custom_colors.keys():
color_list[i] = custom_colors[tuple(self.miller_list[i])]
color_proxy_on_wulff.append(plt.Rectangle((2, 2), 1, 1, fc=color_list[i], alpha=alpha))
miller_on_wulff.append(self.input_miller_fig[i])
scalar_map.set_array([x[1] for x in e_surf_on_wulff])
color_proxy = [plt.Rectangle((2, 2), 1, 1, fc=x, alpha=alpha) for x in color_list]
return (
color_list,
color_proxy,
color_proxy_on_wulff,
miller_on_wulff,
e_surf_on_wulff_list,
)
def show(self, *args, **kwargs):
r"""
Show the Wulff plot.
Args:
*args: Passed to get_plot.
**kwargs: Passed to get_plot.
"""
self.get_plot(*args, **kwargs).show()
def get_line_in_facet(self, facet):
"""
Returns the sorted pts in a facet used to draw a line
"""
lines = list(facet.outer_lines)
pt = []
prev = None
while len(lines) > 0:
if prev is None:
l = lines.pop(0)
else:
for i, l in enumerate(lines):
if prev in l:
l = lines.pop(i)
if l[1] == prev:
l.reverse()
break
# make sure the lines are connected one by one.
# find the way covering all pts and facets
pt.append(self.wulff_pt_list[l[0]].tolist())
pt.append(self.wulff_pt_list[l[1]].tolist())
prev = l[1]
return pt
def get_plot(
self,
color_set="PuBu",
grid_off=True,
axis_off=True,
show_area=False,
alpha=1,
off_color="red",
direction=None,
bar_pos=(0.75, 0.15, 0.05, 0.65),
bar_on=False,
units_in_JPERM2=True,
legend_on=True,
aspect_ratio=(8, 8),
custom_colors={},
):
"""
Get the Wulff shape plot.
Args:
color_set: default is 'PuBu'
grid_off (bool): default is True
axis_off (bool): default is Ture
show_area (bool): default is False
alpha (float): chosen from 0 to 1 (float), default is 1
off_color: Default color for facets not present on the Wulff shape.
direction: default is (1, 1, 1)
bar_pos: default is [0.75, 0.15, 0.05, 0.65]
bar_on (bool): default is False
legend_on (bool): default is True
aspect_ratio: default is (8, 8)
custom_colors ({(h,k,l}: [r,g,b,alpha}): Customize color of each
facet with a dictionary. The key is the corresponding Miller
index and value is the color. Undefined facets will use default
color site. Note: If you decide to set your own colors, it
probably won't make any sense to have the color bar on.
units_in_JPERM2 (bool): Units of surface energy, defaults to
Joules per square meter (True)
Return:
(matplotlib.pyplot)
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d as mpl3
(
color_list,
color_proxy,
color_proxy_on_wulff,
miller_on_wulff,
e_surf_on_wulff,
) = self._get_colors(color_set, alpha, off_color, custom_colors=custom_colors)
if not direction:
# If direction is not specified, use the miller indices of
# maximum area.
direction = max(self.area_fraction_dict.items(), key=lambda x: x[1])[0]
fig = plt.figure()
fig.set_size_inches(aspect_ratio[0], aspect_ratio[1])
azim, elev = self._get_azimuth_elev([direction[0], direction[1], direction[-1]])
wulff_pt_list = self.wulff_pt_list
ax = mpl3.Axes3D(fig, azim=azim, elev=elev)
for plane in self.facets:
# check whether [pts] is empty
if len(plane.points) < 1:
# empty, plane is not on_wulff.
continue
# assign the color for on_wulff facets according to its
# index and the color_list for on_wulff
plane_color = color_list[plane.index]
pt = self.get_line_in_facet(plane)
# plot from the sorted pts from [simpx]
tri = mpl3.art3d.Poly3DCollection([pt])
tri.set_color(plane_color)
tri.set_edgecolor("#808080")
ax.add_collection3d(tri)
# set ranges of x, y, z
# find the largest distance between on_wulff pts and the origin,
# to ensure complete and consistent display for all directions
r_range = max(np.linalg.norm(x) for x in wulff_pt_list)
ax.set_xlim([-r_range * 1.1, r_range * 1.1])
ax.set_ylim([-r_range * 1.1, r_range * 1.1])
ax.set_zlim([-r_range * 1.1, r_range * 1.1]) # pylint: disable=E1101
# add legend
if legend_on:
color_proxy = color_proxy
if show_area:
ax.legend(
color_proxy,
self.miller_area,
loc="upper left",
bbox_to_anchor=(0, 1),
fancybox=True,
shadow=False,
)
else:
ax.legend(
color_proxy_on_wulff,
miller_on_wulff,
loc="upper center",
bbox_to_anchor=(0.5, 1),
ncol=3,
fancybox=True,
shadow=False,
)
ax.set_xlabel("x")
ax.set_ylabel("y")
ax.set_zlabel("z")
# Add colorbar
if bar_on:
cmap = plt.get_cmap(color_set)
cmap.set_over("0.25")
cmap.set_under("0.75")
bounds = [round(e, 2) for e in e_surf_on_wulff]
bounds.append(1.2 * bounds[-1])
norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
# display surface energies
ax1 = fig.add_axes(bar_pos)
cbar = mpl.colorbar.ColorbarBase(
ax1,
cmap=cmap,
norm=norm,
boundaries=[0] + bounds + [10],
extend="both",
ticks=bounds[:-1],
spacing="proportional",
orientation="vertical",
)
units = "$J/m^2$" if units_in_JPERM2 else r"$eV/\AA^2$"
cbar.set_label("Surface Energies (%s)" % (units), fontsize=25)
if grid_off:
ax.grid("off")
if axis_off:
ax.axis("off")
return plt
def get_plotly(
self,
color_set="PuBu",
off_color="red",
alpha=1,
custom_colors={},
units_in_JPERM2=True,
):
"""
Get the Wulff shape as a plotly Figure object.
Args:
color_set: default is 'PuBu'
alpha (float): chosen from 0 to 1 (float), default is 1
off_color: Default color for facets not present on the Wulff shape.
custom_colors ({(h,k,l}: [r,g,b,alpha}): Customize color of each
facet with a dictionary. The key is the corresponding Miller
index and value is the color. Undefined facets will use default
color site. Note: If you decide to set your own colors, it
probably won't make any sense to have the color bar on.
units_in_JPERM2 (bool): Units of surface energy, defaults to
Joules per square meter (True)
Return:
(plotly.graph_objs.Figure)
"""
units = "Jm⁻²" if units_in_JPERM2 else "eVÅ⁻²"
(
color_list,
color_proxy,
color_proxy_on_wulff,
miller_on_wulff,
e_surf_on_wulff,
) = self._get_colors(color_set, alpha, off_color, custom_colors=custom_colors)
planes_data, color_scale, ticktext, tickvals = [], [], [], []
for plane in self.facets:
if len(plane.points) < 1:
# empty, plane is not on_wulff.
continue
plane_color = color_list[plane.index]
plane_color = (1, 0, 0, 1) if plane_color == off_color else plane_color # set to red for now
pt = self.get_line_in_facet(plane)
x_pts, y_pts, z_pts = [], [], []
for p in pt:
x_pts.append(p[0])
y_pts.append(p[1])
z_pts.append(p[2])
# remove duplicate x y z pts to save time
all_xyz = []
# pylint: disable=E1133,E1136
[all_xyz.append(list(coord)) for coord in np.array([x_pts, y_pts, z_pts]).T if list(coord) not in all_xyz]
all_xyz = np.array(all_xyz).T
x_pts, y_pts, z_pts = all_xyz[0], all_xyz[1], all_xyz[2]
index_list = [int(i) for i in np.linspace(0, len(x_pts) - 1, len(x_pts))]
tri_indices = np.array(list(itertools.combinations(index_list, 3))).T
hkl = self.miller_list[plane.index]
hkl = unicodeify_spacegroup("(" + "%s" * len(hkl) % hkl + ")")
color = "rgba(%.5f, %.5f, %.5f, %.5f)" % tuple(np.array(plane_color) * 255)
# note hoverinfo is incompatible with latex, need unicode instead
planes_data.append(
go.Mesh3d(
x=x_pts,
y=y_pts,
z=z_pts,
i=tri_indices[0],
j=tri_indices[1],
k=tri_indices[2],
hovertemplate="<br>%{text}<br>" + "{}={:.3f} {}<br>".format("\u03b3", plane.e_surf, units),
color=color,
text=[r"Miller index: %s" % hkl] * len(x_pts),
hoverinfo="name",
name="",
)
)
# normalize surface energy from a scale of 0 to 1 for colorbar
norm_e = (plane.e_surf - min(e_surf_on_wulff)) / (max(e_surf_on_wulff) - min(e_surf_on_wulff))
c = [norm_e, color]
if c not in color_scale:
color_scale.append(c)
ticktext.append("%.3f" % plane.e_surf)
tickvals.append(norm_e)
# Add colorbar
color_scale = sorted(color_scale, key=lambda c: c[0])
colorbar = go.Mesh3d(
x=[0],
y=[0],
z=[0],
colorbar=go.ColorBar(
title={
"text": r"Surface energy %s" % units,
"side": "right",
"font": {"size": 25},
},
ticktext=ticktext,
tickvals=tickvals,
),
colorscale=[[0, "rgb(255,255,255, 255)"]] + color_scale, # fix the scale
intensity=[0, 0.33, 0.66, 1],
i=[0],
j=[0],
k=[0],
name="y",
showscale=True,
)
planes_data.append(colorbar)
# Format aesthetics: background, axis, etc.
axis_dict = dict(
title="",
autorange=True,
showgrid=False,
zeroline=False,
ticks="",
showline=False,
showticklabels=False,
showbackground=False,
)
fig = go.Figure(data=planes_data)
fig.update_layout(
dict(
showlegend=True,
scene=dict(xaxis=axis_dict, yaxis=axis_dict, zaxis=axis_dict),
)
)
return fig
def _get_azimuth_elev(self, miller_index):
"""
Args:
miller_index: viewing direction
Returns:
azim, elev for plotting
"""
if miller_index in [(0, 0, 1), (0, 0, 0, 1)]:
return 0, 90
cart = self.lattice.get_cartesian_coords(miller_index)
azim = get_angle([cart[0], cart[1], 0], (1, 0, 0))
v = [cart[0], cart[1], 0]
elev = get_angle(cart, v)
return azim, elev
@property
def volume(self):
"""
Volume of the Wulff shape
"""
return self.wulff_convex.volume
@property
def miller_area_dict(self):
"""
Returns {hkl: area_hkl on wulff}
"""
return dict(zip(self.miller_list, self.color_area))
@property
def miller_energy_dict(self):
"""
Returns {hkl: surface energy_hkl}
"""
return dict(zip(self.miller_list, self.e_surf_list))
@property
def surface_area(self):
"""
Total surface area of Wulff shape.
"""
return sum(self.miller_area_dict.values())
@property
def weighted_surface_energy(self):
"""
Returns:
sum(surface_energy_hkl * area_hkl)/ sum(area_hkl)
"""
return self.total_surface_energy / self.surface_area
@property
def area_fraction_dict(self):
"""
Returns:
(dict): {hkl: area_hkl/total area on wulff}
"""
return {hkl: area / self.surface_area for hkl, area in self.miller_area_dict.items()}
@property
def anisotropy(self):
"""
Returns:
(float) Coefficient of Variation from weighted surface energy
The ideal sphere is 0.
"""
square_diff_energy = 0
weighted_energy = self.weighted_surface_energy
area_frac_dict = self.area_fraction_dict
miller_energy_dict = self.miller_energy_dict
for hkl, energy in miller_energy_dict.items():
square_diff_energy += (energy - weighted_energy) ** 2 * area_frac_dict[hkl]
return np.sqrt(square_diff_energy) / weighted_energy
@property
def shape_factor(self):
"""
This is useful for determining the critical nucleus size.
A large shape factor indicates great anisotropy.
See Ballufi, R. W., Allen, S. M. & Carter, W. C. Kinetics
of Materials. (John Wiley & Sons, 2005), p.461
Returns:
(float) Shape factor.
"""
return self.surface_area / (self.volume ** (2 / 3))
@property
def effective_radius(self):
"""
Radius of the Wulffshape when the
Wulffshape is approximated as a sphere.
Returns:
(float) radius.
"""
return ((3 / 4) * (self.volume / np.pi)) ** (1 / 3)
@property
def total_surface_energy(self):
"""
Total surface energy of the Wulff shape.
Returns:
(float) sum(surface_energy_hkl * area_hkl)
"""
tot_surface_energy = 0
for hkl, energy in self.miller_energy_dict.items():
tot_surface_energy += energy * self.miller_area_dict[hkl]
return tot_surface_energy
@property
def tot_corner_sites(self):
"""
Returns the number of vertices in the convex hull.
Useful for identifying catalytically active sites.
"""
return len(self.wulff_convex.vertices)
@property
def tot_edges(self):
"""
Returns the number of edges in the convex hull.
Useful for identifying catalytically active sites.
"""
all_edges = []
for facet in self.facets:
edges = []
pt = self.get_line_in_facet(facet)
lines = []
for i, p in enumerate(pt):
if i == len(pt) / 2:
break
lines.append(tuple(sorted(tuple([tuple(pt[i * 2]), tuple(pt[i * 2 + 1])]))))
for i, p in enumerate(lines):
if p not in all_edges:
edges.append(p)
all_edges.extend(edges)
return len(all_edges)
|
vorwerkc/pymatgen
|
pymatgen/analysis/wulff.py
|
Python
|
mit
| 27,746
|
[
"pymatgen"
] |
e3e034207642e1ceb817f0214573b0e80d895a6f9094d1f595096007fca8de12
|
#
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# MIT License
#
# Copyright (c) 2018 CMU Locus Lab
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This file is adapted from
# https://github.com/locuslab/TCN/blob/master/TCN/tcn.py
# https://github.com/locuslab/TCN/blob/master/TCN/adding_problem/add_test.py
import warnings
import torch
import torch.nn as nn
from torch.nn.utils import weight_norm
from zoo.orca.automl.model.base_pytorch_model import PytorchBaseModel, \
PYTORCH_REGRESSION_LOSS_MAP
class Chomp1d(nn.Module):
def __init__(self, chomp_size):
super(Chomp1d, self).__init__()
self.chomp_size = chomp_size
def forward(self, x):
return x[:, :, :-self.chomp_size].contiguous()
class TemporalBlock(nn.Module):
def __init__(self, n_inputs, n_outputs, kernel_size, stride, dilation, padding, dropout=0.2,
repo_initialization=True):
super(TemporalBlock, self).__init__()
self.conv1 = weight_norm(nn.Conv1d(n_inputs, n_outputs, kernel_size,
stride=stride, padding=padding, dilation=dilation))
self.chomp1 = Chomp1d(padding)
self.relu1 = nn.ReLU()
self.dropout1 = nn.Dropout(dropout)
self.conv2 = weight_norm(nn.Conv1d(n_outputs, n_outputs, kernel_size,
stride=stride, padding=padding, dilation=dilation))
self.chomp2 = Chomp1d(padding)
self.relu2 = nn.ReLU()
self.dropout2 = nn.Dropout(dropout)
self.net = nn.Sequential(self.conv1, self.chomp1, self.relu1, self.dropout1,
self.conv2, self.chomp2, self.relu2, self.dropout2)
self.downsample = nn.Conv1d(n_inputs, n_outputs, 1) if n_inputs != n_outputs else None
self.relu = nn.ReLU()
if repo_initialization:
self.init_weights()
def init_weights(self):
self.conv1.weight.data.normal_(0, 0.01)
self.conv2.weight.data.normal_(0, 0.01)
if self.downsample is not None:
self.downsample.weight.data.normal_(0, 0.01)
def forward(self, x):
out = self.net(x)
res = x if self.downsample is None else self.downsample(x)
return self.relu(out + res)
class TemporalConvNet(nn.Module):
def __init__(self,
past_seq_len,
input_feature_num,
future_seq_len,
output_feature_num,
num_channels,
kernel_size=3,
dropout=0.1,
repo_initialization=True):
super(TemporalConvNet, self).__init__()
num_channels.append(output_feature_num)
layers = []
num_levels = len(num_channels)
for i in range(num_levels):
dilation_size = 2 ** i
in_channels = input_feature_num if i == 0 else num_channels[i - 1]
out_channels = num_channels[i]
layers += [TemporalBlock(in_channels, out_channels, kernel_size,
stride=1, dilation=dilation_size,
padding=(kernel_size-1) * dilation_size,
dropout=dropout, repo_initialization=repo_initialization)]
self.tcn = nn.Sequential(*layers)
self.linear = nn.Linear(past_seq_len, future_seq_len)
if repo_initialization:
self.init_weights()
def init_weights(self):
self.linear.weight.data.normal_(0, 0.01)
def forward(self, x):
x = x.permute(0, 2, 1)
y = self.tcn(x)
y = self.linear(y)
y = y.permute(0, 2, 1)
return y
def model_creator(config):
if config.get("num_channels") and (config.get("nhid") and config.get("levels")):
warnings.warn(f"WARNING: You set both num_channels and (nhid, levels) for TCN. "
f"Only num_channels={config['num_channels']} will be effective.")
if config.get("num_channels"):
num_channels = config["num_channels"]
else:
n_hid = config["nhid"] if config.get("nhid") else 30
levels = config["levels"] if config.get("levels") else 8
num_channels = [n_hid] * (levels - 1)
return TemporalConvNet(past_seq_len=config["past_seq_len"],
input_feature_num=config["input_feature_num"],
future_seq_len=config["future_seq_len"],
output_feature_num=config["output_feature_num"],
num_channels=num_channels.copy(),
kernel_size=config.get("kernel_size", 7),
dropout=config.get("dropout", 0.2),
repo_initialization=config.get("repo_initialization", True))
def optimizer_creator(model, config):
return getattr(torch.optim, config.get("optim", "Adam"))(model.parameters(),
lr=config.get("lr", 4e-3))
def loss_creator(config):
loss_name = config.get("loss", "mse")
if loss_name in PYTORCH_REGRESSION_LOSS_MAP:
loss_name = PYTORCH_REGRESSION_LOSS_MAP[loss_name]
else:
raise RuntimeError(f"Got \"{loss_name}\" for loss name,\
where \"mse\", \"mae\" or \"huber_loss\" is expected")
return getattr(torch.nn, loss_name)()
class TCNPytorch(PytorchBaseModel):
def __init__(self, check_optional_config=False):
super().__init__(model_creator=model_creator,
optimizer_creator=optimizer_creator,
loss_creator=loss_creator,
check_optional_config=check_optional_config)
def _get_required_parameters(self):
return {
"past_seq_len",
"input_feature_num",
"future_seq_len",
"output_feature_num"
}
def _get_optional_parameters(self):
return {
"nhid",
"levels",
"kernel_size",
} | super()._get_optional_parameters()
|
intel-analytics/analytics-zoo
|
pyzoo/zoo/chronos/model/tcn.py
|
Python
|
apache-2.0
| 7,620
|
[
"ORCA"
] |
112f617f4809fbfe2dbfb03cddc7cce505ac89600a3548a0bdedf62a4d55b2c3
|
"""Contains the MoveIn transition class."""
# move_in.py
# Mission Pinball Framework
# Written by Brian Madden & Gabe Knuth
# Released under the MIT License. (See license info at the end of this file.)
# Documentation and more info at http://missionpinball.com/mpf
import time
import pygame
from mpf.system.timing import Timing
from mpf.media_controller.core.display import Transition
class MoveIn(Transition):
"""Move In Transition. The new slide moves in on top of the current slide.
Args:
mpfdisplay: The MPFDIsplay this transition is applying to.
machine: The main machine object.
slide_a: Slide object representing the existing (current) slide.
slide_b: Slide object representing the incoming (new) slide.
duration: MPF time string of the how long this transition should take.
direction: String which defines which direction the new slide will come
in from. Options are 'top', 'bottom', 'left' and 'right'
**kwargs: Not used but needed because there might be extra kwargs
depending on how this transition is called.
"""
def __init__(self, mpfdisplay, machine, slide_a, slide_b, duration='1s',
direction='top', **kwargs):
# Assumes slides are the same size
self.name = 'Slide_Transition_' + slide_a.name + '_' + slide_b.name
super(MoveIn, self).__init__(mpfdisplay, machine, slide_a, slide_b,
duration, **kwargs)
self.slide_b_start_x = 0
self.slide_b_start_y = 0
# calculate the original slide_b position
if direction == 'top':
self.slide_b_start_y = -self.slide_a.surface.get_height()
elif direction == 'bottom':
self.slide_b_start_y = self.slide_a.surface.get_height()
elif direction == 'left':
self.slide_b_start_x = -self.slide_a.surface.get_width()
elif direction == 'right':
self.slide_b_start_x = self.slide_a.surface.get_width()
self.slide_b_current_x = self.slide_b_start_x
self.slide_b_current_y = self.slide_b_start_y
def update(self):
"""Called each display loop to update the slide positions."""
super(MoveIn, self).update()
# figure out which direction is non-zero and move it towards zero
if self.slide_b_current_x:
self.slide_b_current_x = int(
self.slide_b_start_x * (1 - self.percent))
if self.slide_b_current_y:
self.slide_b_current_y = int(
self.slide_b_start_y * (1 - self.percent))
# blit slide_a as the background
self.surface.blit(self.slide_a.surface, (0, 0))
# blit slide_b on top of it
self.surface.blit(self.slide_b.surface,
(self.slide_b_current_x, self.slide_b_current_y))
# The MIT License (MIT)
# Copyright (c) 2013-2015 Brian Madden and Gabe Knuth
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
|
jabdoa2/mpf
|
mpf/media_controller/transitions/move_in.py
|
Python
|
mit
| 4,014
|
[
"Brian"
] |
bae8b159a4125f683b5ac2556bf2efa1c2476b67d9064247bf0a249d238cb8cc
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2003-2007 Donald N. Allingham
# Copyright (C) 2007-2008 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2011-2016 Paul Franklin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Written by Alex Roitman,
# largely based on the BaseDoc classes by Don Allingham
""" the non-UI-specific (i.e. common, shared) classes for books """
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
import copy
import os
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
LOG = logging.getLogger(".Book")
#-------------------------------------------------------------------------
#
# SAX interface
#
#-------------------------------------------------------------------------
from xml.sax import make_parser, handler, SAXParseException
from xml.sax.saxutils import escape
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from ...const import HOME_DIR
from ...utils.cast import get_type_converter_by_name, type_name
from ..docgen import StyleSheet, StyleSheetList
from .. import BasePluginManager
from . import book_categories
#------------------------------------------------------------------------
#
# Private Constants
#
#------------------------------------------------------------------------
_UNSUPPORTED = _("Unsupported")
#------------------------------------------------------------------------
#
# Book Item class
#
#------------------------------------------------------------------------
class BookItem:
"""
Interface into the book item -- a smallest element of the book.
"""
def __init__(self, dbase, name):
"""
Create a new empty BookItem.
TODO: it should be possible to make a non-empty BookItem, a copy
name: the book item is retrieved
from the book item registry using name for lookup
"""
self.dbase = dbase
self.style_name = "default"
pmgr = BasePluginManager.get_instance()
for pdata in pmgr.get_reg_bookitems():
if pdata.id == name:
self.translated_name = pdata.name
if not pdata.supported:
self.category = _UNSUPPORTED
else:
self.category = book_categories[pdata.category]
mod = pmgr.load_plugin(pdata)
self.write_item = eval('mod.' + pdata.reportclass)
self.name = pdata.id
oclass = eval('mod.' + pdata.optionclass)
self.option_class = oclass(self.name, self.dbase)
self.option_class.load_previous_values()
def get_name(self):
"""
Return the name of the item.
"""
return self.name
def get_translated_name(self):
"""
Return the translated name of the item.
"""
return self.translated_name
def get_category(self):
"""
Return the category of the item.
"""
return self.category
def get_write_item(self):
"""
Return the report-writing function of the item.
"""
return self.write_item
def set_style_name(self, style_name):
"""
Set the style name for the item.
style_name: name of the style to set.
"""
self.style_name = style_name
def get_style_name(self):
"""
Return the style name of the item.
"""
return self.style_name
#------------------------------------------------------------------------
#
# Book class
#
#------------------------------------------------------------------------
class Book:
"""
Interface into the user-defined Book -- a collection of book items.
"""
def __init__(self, obj=None, exact_copy=True):
"""
Create a new empty Book.
@param obj: if not None, creates the Book from obj, from the
items in obj, instead of creating an empty Book.
@type obj: a :class:`.Book` instance
@param exact_copy: if True (and obj is not None) the exact same
BookItem objects will be in the new Book;
if False (and obj is not None) the same number
and same type of BookItem objects will be created
@type exact_copy: boolean
"""
self.name = "" # this is tested for, in several places
self.dbname = ""
self.paper_name = None
self.paper_orientation = None
self.paper_metric = None
self.paper_custom_size = None
self.paper_margins = None
self.paper_format = None
self.paper_output = None
self.item_list = []
if obj:
if exact_copy:
self.item_list = obj.item_list
else:
for item in obj.get_item_list():
new_item = BookItem(item.dbase, item.get_name())
orig_opt_dict = item.option_class.handler.options_dict
new_opt_dict = new_item.option_class.handler.options_dict
menu = new_item.option_class.menu
for optname in orig_opt_dict:
new_opt_dict[optname] = orig_opt_dict[optname]
menu_option = menu.get_option_by_name(optname)
if menu_option:
menu_option.set_value(new_opt_dict[optname])
new_item.set_style_name(item.get_style_name())
self.item_list.append(new_item)
def set_name(self, name):
"""
Set the name of the book.
name: the name to set.
"""
self.name = name
def get_name(self):
"""
Return the name of the book.
"""
return self.name
def get_dbname(self):
"""
Return the name of the database file used for the book.
"""
return self.dbname
def set_dbname(self, name):
"""
Set the name of the database file used for the book.
name: a filename to set.
"""
self.dbname = name
def clear(self):
"""
Clears the contents of the book.
"""
self.item_list = []
def append_item(self, item):
"""
Add an item to the book.
item: an item to append.
"""
self.item_list.append(item)
def insert_item(self, index, item):
"""
Inserts an item into the given position in the book.
index: a position index.
item: an item to append.
"""
self.item_list.insert(index, item)
def pop_item(self, index):
"""
Pop an item from given position in the book.
index: a position index.
"""
return self.item_list.pop(index)
def get_item(self, index):
"""
Return an item at a given position in the book.
index: a position index.
"""
return self.item_list[index]
def set_item(self, index, item):
"""
Set an item at a given position in the book.
index: a position index.
item: an item to set.
"""
self.item_list[index] = item
def get_item_list(self):
"""
Return list of items in the current book.
"""
return self.item_list
def set_paper_name(self, paper_name):
"""
Set the paper name for the Book.
@param paper_name: name of the paper to set.
@type paper_name: str
"""
self.paper_name = paper_name
def get_paper_name(self):
"""
Return the paper name of the Book.
@returns: returns the paper name
@rtype: str
"""
return self.paper_name
def set_orientation(self, orientation):
"""
Set the paper orientation for the Book.
@param orientation: orientation to set. Possible values are
PAPER_LANDSCAPE or PAPER_PORTRAIT
@type orientation: int
"""
self.paper_orientation = orientation
def get_orientation(self):
"""
Return the paper orientation for the Book.
@returns: returns the selected orientation. Valid values are
PAPER_LANDSCAPE or PAPER_PORTRAIT
@rtype: int
"""
return self.paper_orientation
def set_paper_metric(self, paper_metric):
"""
Set the paper metric for the Book.
@param paper_metric: whether to use metric.
@type paper_metric: boolean
"""
self.paper_metric = paper_metric
def get_paper_metric(self):
"""
Return the paper metric of the Book.
@returns: returns whether to use metric
@rtype: boolean
"""
return self.paper_metric
def set_custom_paper_size(self, paper_size):
"""
Set the custom paper size for the Book.
@param paper_size: paper size to set in cm.
@type paper_size: [float, float]
"""
self.paper_custom_size = paper_size
def get_custom_paper_size(self):
"""
Return the custom paper size for the Book.
@returns: returns the custom paper size in cm
@rtype: [float, float]
"""
return self.paper_custom_size
def set_margins(self, margins):
"""
Set the paper margins for the Book.
@param margins: margins to set. Possible values are floats in cm
@type margins: [float, float, float, float]
"""
self.paper_margins = copy.copy(margins)
def get_margins(self):
"""
Return the paper margins for the Book.
@returns margins: returns the margins, floats in cm
@rtype margins: [float, float, float, float]
"""
return copy.copy(self.paper_margins)
def set_margin(self, pos, value):
"""
Set a paper margin for the Book.
@param pos: Position of margin [left, right, top, bottom]
@param value: floating point in cm
@type pos: int
@type value: float
"""
self.paper_margins[pos] = value
def get_margin(self, pos):
"""
Return a paper margin for the Book.
@param pos: Position of margin [left, right, top, bottom]
@type pos: int
@returns: float cm of margin
@rtype: float
"""
return self.paper_margins[pos]
def set_format_name(self, format_name):
"""
Set the format name for the Book.
@param format_name: name of the format to set.
@type format_name: str
"""
self.paper_format = format_name
def get_format_name(self):
"""
Return the format name of the Book.
@returns: returns the format name
@rtype: str
"""
return self.paper_format
def set_output(self, output):
"""
Set the output for the Book.
@param output: name of the output to set.
@type output: str
"""
self.paper_output = output
def get_output(self):
"""
Return the output of the Book.
@returns: returns the output name
@rtype: str
"""
return self.paper_output
#------------------------------------------------------------------------
#
# BookList class
#
#------------------------------------------------------------------------
class BookList:
"""
Interface into the user-defined list of books.
BookList is loaded from a specified XML file if it exists.
"""
def __init__(self, filename, dbase):
"""
Create a new BookList from the books that may be defined in the
specified file.
file: XML file that contains book items definitions
"""
self.dbase = dbase
self.bookmap = {}
self._needs_saving = None
self.file = os.path.join(HOME_DIR, filename)
self.parse()
def delete_book(self, name):
"""
Remove a book from the list. Since each book must have a
unique name, the name is used to delete the book.
name: name of the book to delete
"""
del self.bookmap[name]
## 2/2016 the string "get_book_map" appears nowhere else in gramps
## def get_book_map(self):
## """
## Return the map of names to books.
## """
## return self.bookmap
##
def get_book(self, name):
"""
Return the Book associated with the name
name: name associated with the desired Book.
"""
return self.bookmap[name]
def get_book_names(self):
"Return a list of all the book names in the BookList, sorted"
return sorted(self.bookmap.keys())
def set_book(self, name, book):
"""
Add or replaces a Book in the BookList.
name: name associated with the Book to add or replace.
book: definition of the book -- a :class:`.Book` instance
"""
self.bookmap[name] = book
def set_needs_saving(self, needs_saving):
"""
Set the needs_saving flag for the BookList.
@param needs_saving: whether the current BookList needs saving
@type needs_saving: boolean
"""
self._needs_saving = needs_saving
def get_needs_saving(self):
"""
Return the needs_saving flag of the BookList.
@returns: returns whether the current BookList needs saving to a file
@rtype: boolean
"""
return self._needs_saving
def save(self):
"""
Saves the current BookList to the associated file.
"""
with open(self.file, "w", encoding="utf-8") as b_f:
b_f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n")
b_f.write('<booklist>\n')
for name in sorted(self.bookmap): # enable a diff of archived copies
book = self.get_book(name)
dbname = escape(book.get_dbname())
b_f.write(' <book name="%s" database="%s">'
'\n' % (escape(name), dbname))
for item in book.get_item_list():
b_f.write(' <item name="%s" '
'trans_name="%s">\n' % (
item.get_name(),
item.get_translated_name()))
options = item.option_class.handler.options_dict
for option_name in sorted(options.keys()): # enable a diff
option_value = options[option_name]
if isinstance(option_value, (list, tuple)):
b_f.write(' <option name="%s" value="" '
'length="%d">\n' % (
escape(option_name),
len(options[option_name])))
for list_index in range(len(option_value)):
option_type = type_name(
option_value[list_index])
value = escape(str(option_value[list_index]))
value = value.replace('"', '"')
b_f.write(' <listitem number="%d" '
'type="%s" value="%s"/>\n' % (
list_index,
option_type,
value))
b_f.write(' </option>\n')
else:
option_type = type_name(option_value)
value = escape(str(option_value))
value = value.replace('"', '"')
b_f.write(' <option name="%s" type="%s" '
'value="%s"/>\n' % (
escape(option_name),
option_type,
value))
b_f.write(' <style name="%s"/>'
'\n' % item.get_style_name())
b_f.write(' </item>\n')
if book.get_paper_name():
b_f.write(' <paper name="%s"/>'
'\n' % book.get_paper_name())
if book.get_orientation() is not None: # 0 is legal
b_f.write(' <orientation value="%s"/>'
'\n' % book.get_orientation())
if book.get_paper_metric() is not None: # 0 is legal
b_p_metric = book.get_paper_metric()
if isinstance(b_p_metric, bool):
b_p_metric = int(b_p_metric)
b_f.write(' <metric value="%s"/>'
'\n' % b_p_metric)
if book.get_custom_paper_size():
size = book.get_custom_paper_size()
b_f.write(' <size value="%f %f"/>'
'\n' % (size[0], size[1]))
if book.get_margins():
for pos in range(len(book.get_margins())):
b_f.write(' <margin number="%s" '
'value="%f"/>\n' % (
pos, book.get_margin(pos)))
if book.get_format_name():
b_f.write(' <format name="%s"/>'
'\n' % book.get_format_name())
if book.get_output():
b_f.write(' <output name="%s"/>'
'\n' % escape(book.get_output()))
b_f.write(' </book>\n')
b_f.write('</booklist>\n')
def parse(self):
"""
Loads the BookList from the associated file, if it exists.
"""
try:
parser = make_parser()
parser.setContentHandler(BookParser(self, self.dbase))
# bug 10387; XML should be utf8, but was not previously saved
# that way. So try to read utf8, if fails, try with system
# encoding. Only an issue on non-utf8 systems.
try:
with open(self.file, encoding="utf-8") as the_file:
parser.parse(the_file)
except UnicodeDecodeError:
with open(self.file) as the_file:
parser.parse(the_file)
except (IOError, OSError, ValueError, SAXParseException, KeyError,
AttributeError):
LOG.debug("Failed to parse book list", exc_info=True)
#-------------------------------------------------------------------------
#
# BookParser
#
#-------------------------------------------------------------------------
class BookParser(handler.ContentHandler):
"""
SAX parsing class for the Books XML file.
"""
def __init__(self, booklist, dbase):
"""
Create a BookParser class that populates the passed booklist.
booklist: BookList to be loaded from the file.
"""
handler.ContentHandler.__init__(self)
self.dbase = dbase
self.booklist = booklist
self.book = None
self.item = None
self.option = None
self.an_opt_name = None
self.an_opt_value = None
self.style = None
self.bname = None
self.iname = None
self.dbname = None
self.b_p_name = None
self.b_p_orient = None
self.b_p_metric = None
self.b_p_size = None
self.b_p_margins = None
self.b_p_format = None
self.b_p_output = None
def startElement(self, tag, attrs):
"""
Overridden class that handles the start of a XML element
"""
if tag == "book":
self.book = Book()
self.bname = attrs['name']
self.book.set_name(self.bname)
self.dbname = attrs['database']
self.book.set_dbname(self.dbname)
self.b_p_name = None
self.b_p_orient = None
self.b_p_metric = None
self.b_p_size = None
self.b_p_margins = None
self.b_p_format = None
self.b_p_output = None
elif tag == "item":
self.item = BookItem(self.dbase, attrs['name'])
self.option = {}
elif tag == "option":
self.an_opt_name = attrs['name']
if 'length' in attrs:
self.an_opt_value = []
else:
converter = get_type_converter_by_name(attrs['type'])
self.an_opt_value = converter(attrs['value'])
elif tag == "listitem":
converter = get_type_converter_by_name(attrs['type'])
self.an_opt_value.append(converter(attrs['value']))
elif tag == "style":
self.style = attrs['name']
elif tag == 'paper':
self.b_p_name = attrs['name']
elif tag == 'orientation':
self.b_p_orient = int(attrs['value'])
elif tag == 'metric':
self.b_p_metric = int(attrs['value'])
elif tag == 'size':
width, height = attrs['value'].split()
self.b_p_size = [float(width), float(height)]
elif tag == 'margin':
if self.b_p_margins is None:
self.b_p_margins = [0.0, 0.0, 0.0, 0.0]
self.b_p_margins[int(attrs['number'])] = float(attrs['value'])
elif tag == 'format':
self.b_p_format = attrs['name']
elif tag == 'output':
self.b_p_output = attrs['name']
else:
pass
def endElement(self, tag):
"""
Overridden class that handles the end of a XML element
"""
if tag == "option":
self.option[self.an_opt_name] = self.an_opt_value
elif tag == "item":
self.item.option_class.handler.options_dict.update(self.option)
self.item.set_style_name(self.style)
self.book.append_item(self.item)
elif tag == "book":
if self.b_p_name:
self.book.set_paper_name(self.b_p_name)
if self.b_p_orient is not None: # 0 is legal
self.book.set_orientation(self.b_p_orient)
if self.b_p_metric is not None: # 0 is legal
self.book.set_paper_metric(self.b_p_metric)
if self.b_p_size:
self.book.set_custom_paper_size(self.b_p_size)
if self.b_p_margins:
self.book.set_margins(self.b_p_margins)
if self.b_p_format:
self.book.set_format_name(self.b_p_format)
if self.b_p_output:
self.book.set_output(self.b_p_output)
self.booklist.set_book(self.bname, self.book)
#-------------------------------------------------------------------------
#
# Functions
#
#-------------------------------------------------------------------------
def append_styles(selected_style, item):
"""
Append the styles for a book item to the stylesheet.
"""
ihandler = item.option_class.handler
# Set up default style
ihandler.set_default_stylesheet_name(item.get_style_name())
default_style = StyleSheet()
make_default_style = item.option_class.make_default_style
make_default_style(default_style)
# Read all style sheets available for this item
style_file = ihandler.get_stylesheet_savefile()
style_list = StyleSheetList(style_file, default_style)
# Get the selected stylesheet
style_name = ihandler.get_default_stylesheet_name()
style_sheet = style_list.get_style_sheet(style_name)
for this_style_name in style_sheet.get_paragraph_style_names():
selected_style.add_paragraph_style(
this_style_name,
style_sheet.get_paragraph_style(this_style_name))
for this_style_name in style_sheet.get_draw_style_names():
selected_style.add_draw_style(
this_style_name,
style_sheet.get_draw_style(this_style_name))
for this_style_name in style_sheet.get_table_style_names():
selected_style.add_table_style(
this_style_name,
style_sheet.get_table_style(this_style_name))
for this_style_name in style_sheet.get_cell_style_names():
selected_style.add_cell_style(
this_style_name,
style_sheet.get_cell_style(this_style_name))
|
prculley/gramps
|
gramps/gen/plug/report/_book.py
|
Python
|
gpl-2.0
| 25,807
|
[
"Brian"
] |
720a4d6b9b7570c76a88c4ff268921f0d8621493abd39f46d0f89978f1f28f9d
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
"""
:mod:`MDAnalysis` --- analysis of molecular simulations in python
=================================================================
MDAnalysis (https://www.mdanalysis.org) is a python toolkit to analyze
molecular dynamics trajectories generated by CHARMM, NAMD, Amber,
Gromacs, or LAMMPS.
It allows one to read molecular dynamics trajectories and access the
atomic coordinates through numpy arrays. This provides a flexible and
relatively fast framework for complex analysis tasks. In addition,
CHARMM-style atom selection commands are implemented. Trajectories can
also be manipulated (for instance, fit to a reference structure) and
written out. Time-critical code is written in C for speed.
Help is also available through the mailinglist at
http://groups.google.com/group/mdnalysis-discussion
Please report bugs and feature requests through the issue tracker at
https://github.com/MDAnalysis/mdanalysis/issues
Citation
--------
When using MDAnalysis in published work, please cite
R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
MDAnalysis: A Python package for the rapid analysis of molecular dynamics
simulations. In S. Benthall and S. Rostrup, editors, Proceedings of the 15th
Python in Science Conference, pages 98-105, Austin, TX, 2016. SciPy,
doi:10.25080/majora-629e541a-00e
N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and
O. Beckstein. MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics
Simulations. J. Comput. Chem. 32 (2011), 2319--2327, doi:`10.1002/jcc.21787`_
https://www.mdanalysis.org
For citations of included algorithms and sub-modules please see the references_.
.. _`10.1002/jcc.21787`: http://dx.doi.org/10.1002/jcc.21787
.. _references: https://docs.mdanalysis.org/documentation_pages/references.html
Getting started
---------------
Import the package::
>>> import MDAnalysis
(note that not everything in MDAnalysis is imported right away; for
additional functionality you might have to import sub-modules
separately, e.g. for RMS fitting ``import MDAnalysis.analysis.align``.)
Build a "universe" from a topology (PSF, PDB) and a trajectory (DCD, XTC/TRR);
here we are assuming that PSF, DCD, etc contain file names. If you don't have
trajectories at hand you can play with the ones that come with MDAnalysis for
testing (see below under `Examples`_)::
>>> u = MDAnalysis.Universe(PSF, DCD)
Select the C-alpha atoms and store them as a group of atoms::
>>> ca = u.select_atoms('name CA')
>>> len(ca)
214
Calculate the centre of mass of the CA and of all atoms::
>>> ca.center_of_mass()
array([ 0.06873595, -0.04605918, -0.24643682])
>>> u.atoms.center_of_mass()
array([-0.01094035, 0.05727601, -0.12885778])
Calculate the CA end-to-end distance (in angstroem)::
>>> import numpy as np
>>> coord = ca.positions
>>> v = coord[-1] - coord[0] # last Ca minus first one
>>> np.sqrt(np.dot(v, v,))
10.938133
Define a function eedist():
>>> def eedist(atoms):
... coord = atoms.positions
... v = coord[-1] - coord[0]
... return sqrt(dot(v, v,))
...
>>> eedist(ca)
10.938133
and analyze all timesteps *ts* of the trajectory::
>>> for ts in u.trajectory:
... print eedist(ca)
10.9381
10.8459
10.4141
9.72062
....
See Also
--------
:class:`MDAnalysis.core.universe.Universe` for details
Examples
--------
MDAnalysis comes with a number of real trajectories for testing. You
can also use them to explore the functionality and ensure that
everything is working properly::
from MDAnalysis import *
from MDAnalysis.tests.datafiles import PSF,DCD, PDB,XTC
u_dims_adk = Universe(PSF,DCD)
u_eq_adk = Universe(PDB, XTC)
The PSF and DCD file are a closed-form-to-open-form transition of
Adenylate Kinase (from [Beckstein2009]_) and the PDB+XTC file are ten
frames from a Gromacs simulation of AdK solvated in TIP4P water with
the OPLS/AA force field.
.. [Beckstein2009] O. Beckstein, E.J. Denning, J.R. Perilla and T.B. Woolf,
Zipping and Unzipping of Adenylate Kinase: Atomistic Insights into the
Ensemble of Open <--> Closed Transitions. J Mol Biol 394 (2009), 160--176,
doi:10.1016/j.jmb.2009.09.009
"""
__all__ = ['Universe', 'Writer', 'fetch_mmtf',
'AtomGroup', 'ResidueGroup', 'SegmentGroup']
import logging
import warnings
logger = logging.getLogger("MDAnalysis.__init__")
from .version import __version__
try:
from .authors import __authors__
except ImportError:
logger.info('Could not find authors.py, __authors__ will be empty.')
__authors__ = []
# Registry of Readers, Parsers and Writers known to MDAnalysis
# Metaclass magic fills these as classes are declared.
_READERS = {}
_READER_HINTS = {}
_SINGLEFRAME_WRITERS = {}
_MULTIFRAME_WRITERS = {}
_PARSERS = {}
_PARSER_HINTS = {}
_SELECTION_WRITERS = {}
_CONVERTERS = {}
# Registry of TopologyAttributes
_TOPOLOGY_ATTRS = {} # {attrname: cls}
_TOPOLOGY_TRANSPLANTS = {} # {name: [attrname, method, transplant class]}
_TOPOLOGY_ATTRNAMES = {} # {lower case name w/o _ : name}
# custom exceptions and warnings
from .exceptions import (
SelectionError, NoDataError, ApplicationError, SelectionWarning,
MissingDataWarning, ConversionWarning, FileFormatWarning,
StreamWarning
)
from .lib import log
from .lib.log import start_logging, stop_logging
logging.getLogger("MDAnalysis").addHandler(log.NullHandler())
del logging
# only MDAnalysis DeprecationWarnings are loud by default
warnings.filterwarnings(action='once', category=DeprecationWarning,
module='MDAnalysis')
from . import units
# Bring some often used objects into the current namespace
from .core.universe import Universe, Merge
from .core.groups import AtomGroup, ResidueGroup, SegmentGroup
from .coordinates.core import writer as Writer
# After Universe import
from .coordinates.MMTF import fetch_mmtf
from . import converters
from .due import due, Doi, BibTeX
due.cite(Doi("10.25080/majora-629e541a-00e"),
description="Molecular simulation analysis library",
path="MDAnalysis", cite_module=True)
due.cite(Doi("10.1002/jcc.21787"),
description="Molecular simulation analysis library",
path="MDAnalysis", cite_module=True)
del Doi, BibTeX
|
MDAnalysis/mdanalysis
|
package/MDAnalysis/__init__.py
|
Python
|
gpl-2.0
| 7,425
|
[
"Amber",
"CHARMM",
"Gromacs",
"LAMMPS",
"MDAnalysis",
"NAMD"
] |
f4cc206f8a5dc0701fc7e027e63bbe19d912debc91b4be2ab185993d4172910a
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import re
import logging
import os
from monty.io import zopen
from monty.json import MSONable
from .utils import read_table_pattern, read_pattern
"""
Classes for reading/manipulating/writing QChem ouput files.
"""
__author__ = "Samuel Blau, Brandon Woods, Shyam Dwaraknath"
__copyright__ = "Copyright 2018, The Materials Project"
__version__ = "0.1"
logger = logging.getLogger(__name__)
class QCOutput(MSONable):
"""
Data in a single QChem Calculations
Args:
filename (str): OUTCAR filename to parse.
"""
def __init__(self, filename):
self.filename = filename
self.data = {}
self.text = ""
with zopen(filename, 'rt') as f:
self.text = f.read()
# Check if output file contains multiple output files. If so, print an error message and exit
self.data["multiple_outputs"] = read_pattern(
self.text, {
"key": r"Job\s+\d+\s+of\s+(\d+)\s+"
}, terminate_on_match=True).get('key')
if not (self.data.get('multiple_outputs') == None or self.data.get('multiple_outputs') == [['1']]):
print("ERROR: multiple calculation outputs found in file " + filename +
". Please instead call QCOutput.mulitple_outputs_from_file(QCOutput,'" + filename + "')")
print("Exiting...")
exit()
# Check if calculation finished. If not, proceed with caution
self.data["completion"] = read_pattern(self.text, {
"key": r"Thank you very much for using Q-Chem.\s+Have a nice day."
}).get('key')
# if not self.data.get('completion'):
# print("WARNING: calculation did not reach successful completion")
# Check if calculation is unrestricted
self.data["unrestricted"] = read_pattern(
self.text, {
"key": r"A(?:n)*\sunrestricted[\s\w\-]+SCF\scalculation\swill\sbe"
}, terminate_on_match=True).get('key')
# Check if calculation uses GEN_SCFMAN
self.data["using_GEN_SCFMAN"] = read_pattern(
self.text, {
"key": r"\s+GEN_SCFMAN: A general SCF calculation manager"
}, terminate_on_match=True).get('key')
# Parse the SCF
if self.data.get('using_GEN_SCFMAN', []):
self._read_GEN_SCFMAN()
else:
self._read_SCF()
# Parse the Mulliken charges
if self.data.get('unrestricted', []):
self._read_unrestricted_mulliken()
else:
self._read_restricted_mulliken()
# Parse the final energy
self.data["final_energy"] = read_pattern(self.text, {"key": r"Final\senergy\sis\s+([\d\-\.]+)"}).get('key')
# Parse the S2 values in the case of an unrestricted calculation
if self.data.get('unrestricted', []):
self.data["S2"] = read_pattern(self.text, {"key": r"<S\^2>\s=\s+([\d\-\.]+)"}).get('key')
# Check if the calculation is a geometry optimization. If so, parse the relevant output
self.data["optimization"] = read_pattern(self.text, {"key": r"(?i)\s*job(?:_)*type\s+=\s+opt"}).get('key')
if self.data.get('optimization', []):
self.data["energy_trajectory"] = read_pattern(self.text, {"key": r"\sEnergy\sis\s+([\d\-\.]+)"}).get('key')
self._read_optimized_geometry()
# Check if the calculation is a frequency analysis. If so, parse the relevant output
self.data["frequency_job"] = read_pattern(
self.text, {
"key": r"(?i)\s*job(?:_)*type\s+=\s+freq"
}, terminate_on_match=True).get('key')
if self.data.get('frequency_job', []):
temp_dict = read_pattern(
self.text, {
"frequencies": r"\s*Frequency:\s+([\d\-\.]+)(?:\s+([\d\-\.]+)(?:\s+([\d\-\.]+))*)*",
"enthalpy": r"\s*Total Enthalpy:\s+([\d\-\.]+)\s+kcal/mol",
"entropy": r"\s*Total Entropy:\s+([\d\-\.]+)\s+cal/mol\.K"
})
for key in temp_dict:
self.data[key] = temp_dict.get(key)
@staticmethod
def multiple_outputs_from_file(cls, filename, keep_sub_files=True):
"""
Parses a QChem output file with multiple calculations
1.) Seperates the output into sub-files
e.g. qcout -> qcout.0, qcout.1, qcout.2 ... qcout.N
a.) Find delimeter for multiple calcualtions
b.) Make seperate output sub-files
2.) Creates seperate QCCalcs for each one from the sub-files
"""
to_return = []
with zopen(filename, 'rt') as f:
text = re.split('\s*(?:Running\s+)*Job\s+\d+\s+of\s+\d+\s+', f.read())
if text[0] == '':
text = text[1:]
for i, sub_text in enumerate(text):
temp = open(filename + '.' + str(i), 'w')
temp.write(sub_text)
temp.close()
tempOutput = cls(filename + '.' + str(i))
to_return.append(tempOutput)
if not keep_sub_files:
os.remove(filename + '.' + str(i))
return to_return
def _read_GEN_SCFMAN(self):
"""
Parses all GEN_SCFMANs
"""
header_pattern = r"^\s*\-+\s+Cycle\s+Energy\s+(?:(?:DIIS)*\s+[Ee]rror)*(?:RMS Gradient)*\s+\-+(?:\s*\-+\s+OpenMP\s+Integral\s+computing\s+Module\s+(?:Release:\s+version\s+[\d\-\.]+\,\s+\w+\s+[\d\-\.]+\, Q-Chem Inc\. Pittsburgh\s+)*\-+)*\n"
table_pattern = r"(?:\s*Inaccurate integrated density:\n\s+Number of electrons\s+=\s+[\d\-\.]+\n\s+Numerical integral\s+=\s+[\d\-\.]+\n\s+Relative error\s+=\s+[\d\-\.]+\s+\%\n)*\s*\d+\s+([\d\-\.]+)\s+([\d\-\.]+)e([\d\-\.\+]+)(?:\s+Convergence criterion met)*(?:\s+Preconditoned Steepest Descent)*(?:\s+Roothaan Step)*(?:\s+(?:Normal\s+)*BFGS [Ss]tep)*(?:\s+LineSearch Step)*(?:\s+Line search: overstep)*(?:\s+Descent step)*"
footer_pattern = r"^\s*\-+\n"
self.data["GEN_SCFMAN"] = read_table_pattern(self.text, header_pattern, table_pattern, footer_pattern)
def _read_SCF(self):
"""
Parses all old-style SCFs. Starts by checking if the SCF failed to converge and setting the footer accordingly.
"""
self.data["SCF_failed_to_converge"] = read_pattern(
self.text, {
"key": r"SCF failed to converge"
}, terminate_on_match=True).get('key')
if self.data.get("SCF_failed_to_converge", []):
footer_pattern = r"^\s*\d+\s*[\d\-\.]+\s+[\d\-\.]+E[\d\-\.]+\s+Convergence\s+failure\n"
else:
footer_pattern = r"^\s*\-+\n"
header_pattern = r"^\s*\-+\s+Cycle\s+Energy\s+DIIS Error\s+\-+\n"
table_pattern = r"\s*\d+\s*([\d\-\.]+)\s+([\d\-\.]+)E([\d\-\.\+]+)(?:\s*\n\s*cpu\s+[\d\-\.]+\swall\s+[\d\-\.]+)*(?:\nin dftxc\.C, eleTot sum is:[\d\-\.]+, tauTot is\:[\d\-\.]+)*(?:\s+Convergence criterion met)*(?:\s+Done RCA\. Switching to DIIS)*(?:\n\s*Warning: not using a symmetric Q)*(?:\nRecomputing EXC\s*[\d\-\.]+\s*[\d\-\.]+\s*[\d\-\.]+(?:\s*\nRecomputing EXC\s*[\d\-\.]+\s*[\d\-\.]+\s*[\d\-\.]+)*)*"
self.data["SCF"] = read_table_pattern(self.text, header_pattern, table_pattern, footer_pattern)
def _read_restricted_mulliken(self):
"""
Parses Mulliken charges given a restricted SCF.
"""
header_pattern = r"\-+\s+Ground-State Mulliken Net Atomic Charges\s+Atom\s+Charge \(a\.u\.\)\s+\-+"
table_pattern = r"\s+\d+\s(\w+)\s+([\d\-\.]+)"
footer_pattern = r"\s\s\-+\s+Sum of atomic charges"
self.data["restricted_Mulliken"] = read_table_pattern(self.text, header_pattern, table_pattern, footer_pattern)
def _read_unrestricted_mulliken(self):
"""
Parses Mulliken charges and spins given an unrestricted SCF.
"""
header_pattern = r"\-+\s+Ground-State Mulliken Net Atomic Charges\s+Atom\s+Charge \(a\.u\.\)\s+Spin\s\(a\.u\.\)\s+\-+"
table_pattern = r"\s+\d+\s(\w+)\s+([\d\-\.]+)\s+([\d\-\.]+)"
footer_pattern = r"\s\s\-+\s+Sum of atomic charges"
self.data["unrestricted_Mulliken"] = read_table_pattern(self.text, header_pattern, table_pattern,
footer_pattern)
def _read_optimized_geometry(self):
"""
Parses optimized XYZ coordinates. If not present, parses optimized Z-matrix.
"""
header_pattern = r"\*+\s+OPTIMIZATION\s+CONVERGED\s+\*+\s+\*+\s+Coordinates \(Angstroms\)\s+ATOM\s+X\s+Y\s+Z"
table_pattern = r"\s+\d+\s+(\w+)\s+([\d\-\.]+)\s+([\d\-\.]+)\s+([\d\-\.]+)"
footer_pattern = r"\s+Z-matrix Print:"
self.data["optimized_geometry"] = read_table_pattern(self.text, header_pattern, table_pattern, footer_pattern)
if self.data.get('optimized_geometry') == []:
header_pattern = r"^\s+\*+\s+OPTIMIZATION CONVERGED\s+\*+\s+\*+\s+Z-matrix\s+Print:\s+\$molecule\s+[\d\-]+\s+[\d\-]+\n"
table_pattern = r"\s*(\w+)(?:\s+(\d+)\s+([\d\-\.]+)(?:\s+(\d+)\s+([\d\-\.]+)(?:\s+(\d+)\s+([\d\-\.]+))*)*)*(?:\s+0)*"
footer_pattern = r"^\$end\n"
self.data["optimized_zmat"] = read_table_pattern(self.text, header_pattern, table_pattern, footer_pattern)
|
czhengsci/pymatgen
|
pymatgen/io/qchem_io/outputs.py
|
Python
|
mit
| 9,412
|
[
"Q-Chem",
"pymatgen"
] |
2e5f85e3ed2ff5851fe73273c6714a838d1cb439d65771c2f7045b50051df488
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
The fusion module provides higher-level interfaces to some of the operations
that can be performed with the seg_LabFusion command-line program.
"""
import os
import warnings
from nipype.interfaces.niftyseg.base import NIFTYSEGCommandInputSpec, NIFTYSEGCommand, getNiftySegPath
from nipype.interfaces.base import (TraitedSpec, File, traits, OutputMultiPath, isdefined)
from ...utils.filemanip import (load_json, save_json, split_filename,
fname_presuffix)
warn = warnings.warn
warnings.filterwarnings('always', category=UserWarning)
class STEPSInputSpec(NIFTYSEGCommandInputSpec):
in_file = File(argstr='%s', exists=True, mandatory=True,
desc='Input image to segment',
position=4)
kernel_size = traits.Float(desc="Gaussian kernel size in mm to compute the local similarity",
argstr='-STEPS %f', mandatory=True,
position=2)
template_num = traits.Int(desc='Number of images to fuse',
argstr='%i', mandatory=True,
position=3)
warped_seg_file = File(argstr='-in %s', exists=True, mandatory=True,
desc='Input 4D image containing the propagated segmentations',
position=1)
warped_img_file = File(argstr='%s', exists=True, mandatory=True,
desc='Input 4D image containing the propagated template images',
position=5)
mask_file = File(argstr='-mask %s', exists=True, mandatory=False,
desc='Filename of the ROI for label fusion')
mrf_value = traits.Float(argstr='-MRF_beta %s', mandatory=False,
desc='MRF prior strength (between 0 and 5)')
out_file = File(argstr='-out %s', genfile=True,
desc='Output consensus segmentation')
prob_flag = traits.Bool(desc='Probabilistic/Fuzzy segmented image',
argstr='-outProb')
prob_update_flag = traits.Bool(desc='Update label proportions at each iteration',
argstr='-prop_update')
class STEPSOutputSpec(TraitedSpec):
out_file = File(desc="Output consensus segmentation")
class STEPS(NIFTYSEGCommand):
_cmd = getNiftySegPath('seg_LabFusion')
_suffix = '_steps'
input_spec = STEPSInputSpec
output_spec = STEPSOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['out_file'] = self.inputs.out_file
if not isdefined(self.inputs.out_file):
outputs['out_file'] = self._gen_fname(self.inputs.in_file, suffix=self._suffix)
outputs['out_file'] = os.path.abspath(outputs['out_file'])
return outputs
def _gen_filename(self, name):
if name == 'out_file':
return self._list_outputs()['out_file']
return None
class CalcTopNCCInputSpec(NIFTYSEGCommandInputSpec):
in_file = File(argstr='-target %s', exists=True, mandatory=True,
desc='Target file',
position=1)
num_templates = traits.Int(argstr='-templates %s', mandatory=True, position=2,
desc='Number of Templates')
in_templates = traits.List(File(exists=True), argstr="%s", position=3,
mandatory=True)
top_templates = traits.Int(argstr='-n %s', mandatory=True, position=4,
desc='Number of Top Templates')
mask_file = File(argstr='-mask %s', exists=True, mandatory=False,
desc='Filename of the ROI for label fusion')
class CalcTopNCCOutputSpec(TraitedSpec):
out_files = traits.Any(File(exists=True))
class CalcTopNCC(NIFTYSEGCommand):
_cmd = getNiftySegPath('seg_CalcTopNCC')
_suffix = '_topNCC'
input_spec = CalcTopNCCInputSpec
output_spec = CalcTopNCCOutputSpec
def aggregate_outputs(self, runtime=None, needed_outputs=None):
outputs = self._outputs()
# local caching for backward compatibility
outfile = os.path.join(os.getcwd(), 'CalcTopNCC.json')
if runtime is None:
try:
out_stat = load_json(outfile)['files']
except IOError:
return self.run().outputs
else:
out_files = []
for line in runtime.stdout.split('\n'):
if line:
values = line.split()
if len(values) > 1:
out_files.append([str(val) for val in values])
else:
out_files.extend([str(val) for val in values])
if len(out_files) == 1:
out_files = out_files[0]
save_json(outfile, dict(files=out_files))
outputs.out_files = out_files
return outputs
|
fprados/nipype
|
nipype/interfaces/niftyseg/steps.py
|
Python
|
bsd-3-clause
| 5,022
|
[
"Gaussian"
] |
e31567a30a8106039f6d623f9a59716fdd7ce8f05cfbf097f14d2cb7bd5ce953
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import re
import os
import warnings
from string import Template
import numpy as np
from monty.io import zopen
from pymatgen.core.structure import Molecule, Structure
from monty.json import MSONable
from pymatgen.core.units import Energy
from pymatgen.core.units import FloatWithUnit
from pymatgen.analysis.excitation import ExcitationSpectrum
"""
This module implements input and output processing from Nwchem.
2015/09/21 - Xin Chen (chenxin13@mails.tsinghua.edu.cn):
NwOutput will read new kinds of data:
1. normal hessian matrix. ["hessian"]
2. projected hessian matrix. ["projected_hessian"]
3. normal frequencies. ["normal_frequencies"]
For backward compatibility, the key for accessing the projected frequencies
is still 'frequencies'.
2015/10/12 - Xin Chen
NwOutput will read new kinds of data:
1. forces. ["forces"]
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "6/5/13"
NWCHEM_BASIS_LIBRARY = None
if os.environ.get("NWCHEM_BASIS_LIBRARY"):
NWCHEM_BASIS_LIBRARY = set(os.listdir(os.environ["NWCHEM_BASIS_LIBRARY"]))
class NwTask(MSONable):
"""
Base task for Nwchem.
"""
theories = {"g3gn": "some description",
"scf": "Hartree-Fock",
"dft": "DFT",
"esp": "ESP",
"sodft": "Spin-Orbit DFT",
"mp2": "MP2 using a semi-direct algorithm",
"direct_mp2": "MP2 using a full-direct algorithm",
"rimp2": "MP2 using the RI approximation",
"ccsd": "Coupled-cluster single and double excitations",
"ccsd(t)": "Coupled-cluster linearized triples approximation",
"ccsd+t(ccsd)": "Fourth order triples contribution",
"mcscf": "Multiconfiguration SCF",
"selci": "Selected CI with perturbation correction",
"md": "Classical molecular dynamics simulation",
"pspw": "Pseudopotential plane-wave DFT for molecules and "
"insulating solids using NWPW",
"band": "Pseudopotential plane-wave DFT for solids using NWPW",
"tce": "Tensor Contraction Engine",
"tddft": "Time Dependent DFT"}
operations = {"energy": "Evaluate the single point energy.",
"gradient": "Evaluate the derivative of the energy with "
"respect to nuclear coordinates.",
"optimize": "Minimize the energy by varying the molecular "
"structure.",
"saddle": "Conduct a search for a transition state (or "
"saddle point).",
"hessian": "Compute second derivatives.",
"frequencies": "Compute second derivatives and print out an "
"analysis of molecular vibrations.",
"freq": "Same as frequencies.",
"vscf": "Compute anharmonic contributions to the "
"vibrational modes.",
"property": "Calculate the properties for the wave "
"function.",
"dynamics": "Perform classical molecular dynamics.",
"thermodynamics": "Perform multi-configuration "
"thermodynamic integration using "
"classical MD.",
"": "dummy"}
def __init__(self, charge, spin_multiplicity, basis_set,
basis_set_option="cartesian",
title=None, theory="dft", operation="optimize",
theory_directives=None, alternate_directives=None):
"""
Very flexible arguments to support many types of potential setups.
Users should use more friendly static methods unless they need the
flexibility.
Args:
charge: Charge of the molecule. If None, charge on molecule is
used. Defaults to None. This allows the input file to be set a
charge independently from the molecule itself.
spin_multiplicity: Spin multiplicity of molecule. Defaults to None,
which means that the spin multiplicity is set to 1 if the
molecule has no unpaired electrons and to 2 if there are
unpaired electrons.
basis_set: The basis set used for the task as a dict. E.g.,
{"C": "6-311++G**", "H": "6-31++G**"}.
basis_set_option: cartesian (default) | spherical,
title: Title for the task. Defaults to None, which means a title
based on the theory and operation of the task is
autogenerated.
theory: The theory used for the task. Defaults to "dft".
operation: The operation for the task. Defaults to "optimize".
theory_directives: A dict of theory directives. For example,
if you are running dft calculations, you may specify the
exchange correlation functional using {"xc": "b3lyp"}.
alternate_directives: A dict of alternate directives. For
example, to perform cosmo calculations and dielectric
constant of 78, you'd supply {'cosmo': {"dielectric": 78}}.
"""
# Basic checks.
if theory.lower() not in NwTask.theories.keys():
raise NwInputError("Invalid theory {}".format(theory))
if operation.lower() not in NwTask.operations.keys():
raise NwInputError("Invalid operation {}".format(operation))
self.charge = charge
self.spin_multiplicity = spin_multiplicity
self.title = title if title is not None else "{} {}".format(theory,
operation)
self.theory = theory
self.basis_set = basis_set or {}
if NWCHEM_BASIS_LIBRARY is not None:
for b in set(self.basis_set.values()):
if re.sub(r'\*', "s", b.lower()) not in NWCHEM_BASIS_LIBRARY:
warnings.warn(
"Basis set %s not in in NWCHEM_BASIS_LIBRARY" % b)
self.basis_set_option = basis_set_option
self.operation = operation
self.theory_directives = theory_directives or {}
self.alternate_directives = alternate_directives or {}
def __str__(self):
bset_spec = []
for el, bset in sorted(self.basis_set.items(), key=lambda x: x[0]):
bset_spec.append(" {} library \"{}\"".format(el, bset))
theory_spec = []
if self.theory_directives:
theory_spec.append("{}".format(self.theory))
for k in sorted(self.theory_directives.keys()):
theory_spec.append(" {} {}".format(k, self.theory_directives[
k]))
theory_spec.append("end")
for k in sorted(self.alternate_directives.keys()):
theory_spec.append(k)
for k2 in sorted(self.alternate_directives[k].keys()):
theory_spec.append(" {} {}".format(
k2, self.alternate_directives[k][k2]))
theory_spec.append("end")
t = Template("""title "$title"
charge $charge
basis $basis_set_option
$bset_spec
end
$theory_spec
""")
output = t.substitute(
title=self.title, charge=int(self.charge),
spinmult=self.spin_multiplicity,
basis_set_option=self.basis_set_option,
bset_spec="\n".join(bset_spec),
theory_spec="\n".join(theory_spec),
theory=self.theory)
if self.operation is not None:
output += "task %s %s" % (self.theory, self.operation)
return output
def as_dict(self):
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"charge": self.charge,
"spin_multiplicity": self.spin_multiplicity,
"title": self.title, "theory": self.theory,
"operation": self.operation, "basis_set": self.basis_set,
"basis_set_option": self.basis_set_option,
"theory_directives": self.theory_directives,
"alternate_directives": self.alternate_directives}
@classmethod
def from_dict(cls, d):
return NwTask(charge=d["charge"],
spin_multiplicity=d["spin_multiplicity"],
title=d["title"], theory=d["theory"],
operation=d["operation"], basis_set=d["basis_set"],
basis_set_option=d['basis_set_option'],
theory_directives=d["theory_directives"],
alternate_directives=d["alternate_directives"])
@classmethod
def from_molecule(cls, mol, theory, charge=None, spin_multiplicity=None,
basis_set="6-31g", basis_set_option="cartesian",
title=None, operation="optimize", theory_directives=None,
alternate_directives=None):
"""
Very flexible arguments to support many types of potential setups.
Users should use more friendly static methods unless they need the
flexibility.
Args:
mol: Input molecule
charge: Charge of the molecule. If None, charge on molecule is
used. Defaults to None. This allows the input file to be set a
charge independently from the molecule itself.
spin_multiplicity: Spin multiplicity of molecule. Defaults to None,
which means that the spin multiplicity is set to 1 if the
molecule has no unpaired electrons and to 2 if there are
unpaired electrons.
basis_set: The basis set to be used as string or a dict. E.g.,
{"C": "6-311++G**", "H": "6-31++G**"} or "6-31G". If string,
same basis set is used for all elements.
basis_set_option: cartesian (default) | spherical,
title: Title for the task. Defaults to None, which means a title
based on the theory and operation of the task is
autogenerated.
theory: The theory used for the task. Defaults to "dft".
operation: The operation for the task. Defaults to "optimize".
theory_directives: A dict of theory directives. For example,
if you are running dft calculations, you may specify the
exchange correlation functional using {"xc": "b3lyp"}.
alternate_directives: A dict of alternate directives. For
example, to perform cosmo calculations with DFT, you'd supply
{'cosmo': "cosmo"}.
"""
title = title if title is not None else "{} {} {}".format(
re.sub(r"\s", "", mol.formula), theory, operation)
charge = charge if charge is not None else mol.charge
nelectrons = - charge + mol.charge + mol.nelectrons
if spin_multiplicity is not None:
spin_multiplicity = spin_multiplicity
if (nelectrons + spin_multiplicity) % 2 != 1:
raise ValueError(
"Charge of {} and spin multiplicity of {} is"
" not possible for this molecule".format(
charge, spin_multiplicity))
elif charge == mol.charge:
spin_multiplicity = mol.spin_multiplicity
else:
spin_multiplicity = 1 if nelectrons % 2 == 0 else 2
elements = set(mol.composition.get_el_amt_dict().keys())
if isinstance(basis_set, str):
basis_set = {el: basis_set for el in elements}
basis_set_option = basis_set_option
return NwTask(charge, spin_multiplicity, basis_set,
basis_set_option=basis_set_option,
title=title, theory=theory, operation=operation,
theory_directives=theory_directives,
alternate_directives=alternate_directives)
@classmethod
def dft_task(cls, mol, xc="b3lyp", **kwargs):
"""
A class method for quickly creating DFT tasks with optional
cosmo parameter .
Args:
mol: Input molecule
xc: Exchange correlation to use.
\\*\\*kwargs: Any of the other kwargs supported by NwTask. Note the
theory is always "dft" for a dft task.
"""
t = NwTask.from_molecule(mol, theory="dft", **kwargs)
t.theory_directives.update({"xc": xc,
"mult": t.spin_multiplicity})
return t
@classmethod
def esp_task(cls, mol, **kwargs):
"""
A class method for quickly creating ESP tasks with RESP
charge fitting.
Args:
mol: Input molecule
\\*\\*kwargs: Any of the other kwargs supported by NwTask. Note the
theory is always "dft" for a dft task.
"""
return NwTask.from_molecule(mol, theory="esp", **kwargs)
class NwInput(MSONable):
"""
An object representing a Nwchem input file, which is essentially a list
of tasks on a particular molecule.
Args:
mol: Input molecule. If molecule is a single string, it is used as a
direct input to the geometry section of the Gaussian input
file.
tasks: List of NwTasks.
directives: List of root level directives as tuple. E.g.,
[("start", "water"), ("print", "high")]
geometry_options: Additional list of options to be supplied to the
geometry. E.g., ["units", "angstroms", "noautoz"]. Defaults to
("units", "angstroms").
symmetry_options: Addition list of option to be supplied to the
symmetry. E.g. ["c1"] to turn off the symmetry
memory_options: Memory controlling options. str.
E.g "total 1000 mb stack 400 mb"
"""
def __init__(self, mol, tasks, directives=None,
geometry_options=("units", "angstroms"),
symmetry_options=None,
memory_options=None):
self._mol = mol
self.directives = directives if directives is not None else []
self.tasks = tasks
self.geometry_options = geometry_options
self.symmetry_options = symmetry_options
self.memory_options = memory_options
@property
def molecule(self):
"""
Returns molecule associated with this GaussianInput.
"""
return self._mol
def __str__(self):
o = []
if self.memory_options:
o.append('memory ' + self.memory_options)
for d in self.directives:
o.append("{} {}".format(d[0], d[1]))
o.append("geometry "
+ " ".join(self.geometry_options))
if self.symmetry_options:
o.append(" symmetry " + " ".join(self.symmetry_options))
for site in self._mol:
o.append(" {} {} {} {}".format(site.specie.symbol, site.x, site.y,
site.z))
o.append("end\n")
for t in self.tasks:
o.append(str(t))
o.append("")
return "\n".join(o)
def write_file(self, filename):
with zopen(filename, "w") as f:
f.write(self.__str__())
def as_dict(self):
return {
"mol": self._mol.as_dict(),
"tasks": [t.as_dict() for t in self.tasks],
"directives": [list(t) for t in self.directives],
"geometry_options": list(self.geometry_options),
"symmetry_options": self.symmetry_options,
"memory_options": self.memory_options
}
@classmethod
def from_dict(cls, d):
return NwInput(Molecule.from_dict(d["mol"]),
tasks=[NwTask.from_dict(dt) for dt in d["tasks"]],
directives=[tuple(li) for li in d["directives"]],
geometry_options=d["geometry_options"],
symmetry_options=d["symmetry_options"],
memory_options=d["memory_options"])
@classmethod
def from_string(cls, string_input):
"""
Read an NwInput from a string. Currently tested to work with
files generated from this class itself.
Args:
string_input: string_input to parse.
Returns:
NwInput object
"""
directives = []
tasks = []
charge = None
spin_multiplicity = None
title = None
basis_set = None
basis_set_option = None
theory_directives = {}
geom_options = None
symmetry_options = None
memory_options = None
lines = string_input.strip().split("\n")
while len(lines) > 0:
l = lines.pop(0).strip()
if l == "":
continue
toks = l.split()
if toks[0].lower() == "geometry":
geom_options = toks[1:]
l = lines.pop(0).strip()
toks = l.split()
if toks[0].lower() == "symmetry":
symmetry_options = toks[1:]
l = lines.pop(0).strip()
# Parse geometry
species = []
coords = []
while l.lower() != "end":
toks = l.split()
species.append(toks[0])
coords.append([float(i) for i in toks[1:]])
l = lines.pop(0).strip()
mol = Molecule(species, coords)
elif toks[0].lower() == "charge":
charge = int(toks[1])
elif toks[0].lower() == "title":
title = l[5:].strip().strip("\"")
elif toks[0].lower() == "basis":
# Parse basis sets
l = lines.pop(0).strip()
basis_set = {}
while l.lower() != "end":
toks = l.split()
basis_set[toks[0]] = toks[-1].strip("\"")
l = lines.pop(0).strip()
elif toks[0].lower() in NwTask.theories:
# read the basis_set_option
if len(toks) > 1:
basis_set_option = toks[1]
# Parse theory directives.
theory = toks[0].lower()
l = lines.pop(0).strip()
theory_directives[theory] = {}
while l.lower() != "end":
toks = l.split()
theory_directives[theory][toks[0]] = toks[-1]
if toks[0] == "mult":
spin_multiplicity = float(toks[1])
l = lines.pop(0).strip()
elif toks[0].lower() == "task":
tasks.append(
NwTask(charge=charge,
spin_multiplicity=spin_multiplicity,
title=title, theory=toks[1],
operation=toks[2], basis_set=basis_set,
basis_set_option=basis_set_option,
theory_directives=theory_directives.get(toks[1])))
elif toks[0].lower() == "memory":
memory_options = ' '.join(toks[1:])
else:
directives.append(l.strip().split())
return NwInput(mol, tasks=tasks, directives=directives,
geometry_options=geom_options,
symmetry_options=symmetry_options,
memory_options=memory_options)
@classmethod
def from_file(cls, filename):
"""
Read an NwInput from a file. Currently tested to work with
files generated from this class itself.
Args:
filename: Filename to parse.
Returns:
NwInput object
"""
with zopen(filename) as f:
return cls.from_string(f.read())
class NwInputError(Exception):
"""
Error class for NwInput.
"""
pass
class NwOutput:
"""
A Nwchem output file parser. Very basic for now - supports only dft and
only parses energies and geometries. Please note that Nwchem typically
outputs energies in either au or kJ/mol. All energies are converted to
eV in the parser.
Args:
filename: Filename to read.
"""
def __init__(self, filename):
self.filename = filename
with zopen(filename) as f:
data = f.read()
chunks = re.split(r"NWChem Input Module", data)
if re.search(r"CITATION", chunks[-1]):
chunks.pop()
preamble = chunks.pop(0)
self.raw = data
self.job_info = self._parse_preamble(preamble)
self.data = [self._parse_job(c) for c in chunks]
def parse_tddft(self):
"""
Parses TDDFT roots. Adapted from nw_spectrum.py script.
Returns:
{
"singlet": [
{
"energy": float,
"osc_strength: float
}
],
"triplet": [
{
"energy": float
}
]
}
"""
start_tag = "Convergence criterion met"
end_tag = "Excited state energy"
singlet_tag = "singlet excited"
triplet_tag = "triplet excited"
state = "singlet"
inside = False # true when we are inside output block
lines = self.raw.split("\n")
roots = {"singlet": [], "triplet": []}
while lines:
line = lines.pop(0).strip()
if start_tag in line:
inside = True
elif end_tag in line:
inside = False
elif singlet_tag in line:
state = "singlet"
elif triplet_tag in line:
state = "triplet"
elif inside and "Root" in line and "eV" in line:
toks = line.split()
roots[state].append({"energy": float(toks[-2])})
elif inside and "Dipole Oscillator Strength" in line:
osc = float(line.split()[-1])
roots[state][-1]["osc_strength"] = osc
return roots
def get_excitation_spectrum(self, width=0.1, npoints=2000):
"""
Generate an excitation spectra from the singlet roots of TDDFT
calculations.
Args:
width (float): Width for Gaussian smearing.
npoints (int): Number of energy points. More points => smoother
curve.
Returns:
(ExcitationSpectrum) which can be plotted using
pymatgen.vis.plotters.SpectrumPlotter.
"""
roots = self.parse_tddft()
data = roots["singlet"]
en = np.array([d["energy"] for d in data])
osc = np.array([d["osc_strength"] for d in data])
epad = 20.0 * width
emin = en[0] - epad
emax = en[-1] + epad
de = (emax - emin) / npoints
# Use width of at least two grid points
if width < 2 * de:
width = 2 * de
energies = [emin + ie * de for ie in range(npoints)]
cutoff = 20.0 * width
gamma = 0.5 * width
gamma_sqrd = gamma * gamma
de = (energies[-1] - energies[0]) / (len(energies) - 1)
prefac = gamma / np.pi * de
x = []
y = []
for energy in energies:
xx0 = energy - en
stot = osc / (xx0 * xx0 + gamma_sqrd)
t = np.sum(stot[np.abs(xx0) <= cutoff])
x.append(energy)
y.append(t * prefac)
return ExcitationSpectrum(x, y)
def _parse_preamble(self, preamble):
info = {}
for l in preamble.split("\n"):
toks = l.split("=")
if len(toks) > 1:
info[toks[0].strip()] = toks[-1].strip()
return info
def __iter__(self):
return self.data.__iter__()
def __getitem__(self, ind):
return self.data[ind]
def __len__(self):
return len(self.data)
def _parse_job(self, output):
energy_patt = re.compile(r'Total \w+ energy\s+=\s+([.\-\d]+)')
energy_gas_patt = re.compile(r'gas phase energy\s+=\s+([.\-\d]+)')
energy_sol_patt = re.compile(r'sol phase energy\s+=\s+([.\-\d]+)')
coord_patt = re.compile(r'\d+\s+(\w+)\s+[.\-\d]+\s+([.\-\d]+)\s+'
r'([.\-\d]+)\s+([.\-\d]+)')
lat_vector_patt = re.compile(r'a[123]=<\s+([.\-\d]+)\s+'
r'([.\-\d]+)\s+([.\-\d]+)\s+>')
corrections_patt = re.compile(r'([\w\-]+ correction to \w+)\s+='
r'\s+([.\-\d]+)')
preamble_patt = re.compile(r'(No. of atoms|No. of electrons'
r'|SCF calculation type|Charge|Spin '
r'multiplicity)\s*:\s*(\S+)')
force_patt = re.compile(r'\s+(\d+)\s+(\w+)' + 6 * r'\s+([0-9\.\-]+)')
time_patt = re.compile(
r'\s+ Task \s+ times \s+ cpu: \s+ ([.\d]+)s .+ ', re.VERBOSE)
error_defs = {
"calculations not reaching convergence": "Bad convergence",
"Calculation failed to converge": "Bad convergence",
"geom_binvr: #indep variables incorrect": "autoz error",
"dft optimize failed": "Geometry optimization failed"}
def fort2py(x):
return x.replace("D", "e")
def isfloatstring(s):
return s.find(".") == -1
parse_hess = False
parse_proj_hess = False
hessian = None
projected_hessian = None
parse_force = False
all_forces = []
forces = []
data = {}
energies = []
frequencies = None
normal_frequencies = None
corrections = {}
molecules = []
structures = []
species = []
coords = []
lattice = []
errors = []
basis_set = {}
bset_header = []
parse_geom = False
parse_freq = False
parse_bset = False
parse_projected_freq = False
job_type = ""
parse_time = False
time = 0
for l in output.split("\n"):
for e, v in error_defs.items():
if l.find(e) != -1:
errors.append(v)
if parse_time:
m = time_patt.search(l)
if m:
time = m.group(1)
parse_time = False
if parse_geom:
if l.strip() == "Atomic Mass":
if lattice:
structures.append(Structure(lattice, species, coords,
coords_are_cartesian=True))
else:
molecules.append(Molecule(species, coords))
species = []
coords = []
lattice = []
parse_geom = False
else:
m = coord_patt.search(l)
if m:
species.append(m.group(1).capitalize())
coords.append([float(m.group(2)), float(m.group(3)),
float(m.group(4))])
m = lat_vector_patt.search(l)
if m:
lattice.append([float(m.group(1)), float(m.group(2)),
float(m.group(3))])
if parse_force:
m = force_patt.search(l)
if m:
forces.extend(map(float, m.groups()[5:]))
elif len(forces) > 0:
all_forces.append(forces)
forces = []
parse_force = False
elif parse_freq:
if len(l.strip()) == 0:
if len(normal_frequencies[-1][1]) == 0:
continue
else:
parse_freq = False
else:
vibs = [float(vib) for vib in l.strip().split()[1:]]
num_vibs = len(vibs)
for mode, dis in zip(normal_frequencies[-num_vibs:], vibs):
mode[1].append(dis)
elif parse_projected_freq:
if len(l.strip()) == 0:
if len(frequencies[-1][1]) == 0:
continue
else:
parse_projected_freq = False
else:
vibs = [float(vib) for vib in l.strip().split()[1:]]
num_vibs = len(vibs)
for mode, dis in zip(
frequencies[-num_vibs:], vibs):
mode[1].append(dis)
elif parse_bset:
if l.strip() == "":
parse_bset = False
else:
toks = l.split()
if toks[0] != "Tag" and not re.match(r"-+", toks[0]):
basis_set[toks[0]] = dict(zip(bset_header[1:],
toks[1:]))
elif toks[0] == "Tag":
bset_header = toks
bset_header.pop(4)
bset_header = [h.lower() for h in bset_header]
elif parse_hess:
if l.strip() == "":
continue
if len(hessian) > 0 and l.find("----------") != -1:
parse_hess = False
continue
toks = l.strip().split()
if len(toks) > 1:
try:
row = int(toks[0])
except Exception:
continue
if isfloatstring(toks[1]):
continue
vals = [float(fort2py(x)) for x in toks[1:]]
if len(hessian) < row:
hessian.append(vals)
else:
hessian[row - 1].extend(vals)
elif parse_proj_hess:
if l.strip() == "":
continue
nat3 = len(hessian)
toks = l.strip().split()
if len(toks) > 1:
try:
row = int(toks[0])
except Exception:
continue
if isfloatstring(toks[1]):
continue
vals = [float(fort2py(x)) for x in toks[1:]]
if len(projected_hessian) < row:
projected_hessian.append(vals)
else:
projected_hessian[row - 1].extend(vals)
if len(projected_hessian[-1]) == nat3:
parse_proj_hess = False
else:
m = energy_patt.search(l)
if m:
energies.append(Energy(m.group(1), "Ha").to("eV"))
parse_time = True
continue
m = energy_gas_patt.search(l)
if m:
cosmo_scf_energy = energies[-1]
energies[-1] = dict()
energies[-1].update({"cosmo scf": cosmo_scf_energy})
energies[-1].update({"gas phase": Energy(m.group(1), "Ha").to("eV")})
m = energy_sol_patt.search(l)
if m:
energies[-1].update({"sol phase": Energy(m.group(1), "Ha").to("eV")})
m = preamble_patt.search(l)
if m:
try:
val = int(m.group(2))
except ValueError:
val = m.group(2)
k = m.group(1).replace("No. of ", "n").replace(" ", "_")
data[k.lower()] = val
elif l.find("Geometry \"geometry\"") != -1:
parse_geom = True
elif l.find("Summary of \"ao basis\"") != -1:
parse_bset = True
elif l.find("P.Frequency") != -1:
parse_projected_freq = True
if frequencies is None:
frequencies = []
toks = l.strip().split()[1:]
frequencies.extend([(float(freq), []) for freq in toks])
elif l.find("Frequency") != -1:
toks = l.strip().split()
if len(toks) > 1 and toks[0] == "Frequency":
parse_freq = True
if normal_frequencies is None:
normal_frequencies = []
normal_frequencies.extend([(float(freq), []) for freq
in l.strip().split()[1:]])
elif l.find("MASS-WEIGHTED NUCLEAR HESSIAN") != -1:
parse_hess = True
if not hessian:
hessian = []
elif l.find("MASS-WEIGHTED PROJECTED HESSIAN") != -1:
parse_proj_hess = True
if not projected_hessian:
projected_hessian = []
elif l.find("atom coordinates gradient") != -1:
parse_force = True
elif job_type == "" and l.strip().startswith("NWChem"):
job_type = l.strip()
if job_type == "NWChem DFT Module" and \
"COSMO solvation results" in output:
job_type += " COSMO"
else:
m = corrections_patt.search(l)
if m:
corrections[m.group(1)] = FloatWithUnit(
m.group(2), "kJ mol^-1").to("eV atom^-1")
if frequencies:
for freq, mode in frequencies:
mode[:] = zip(*[iter(mode)] * 3)
if normal_frequencies:
for freq, mode in normal_frequencies:
mode[:] = zip(*[iter(mode)] * 3)
if hessian:
n = len(hessian)
for i in range(n):
for j in range(i + 1, n):
hessian[i].append(hessian[j][i])
if projected_hessian:
n = len(projected_hessian)
for i in range(n):
for j in range(i + 1, n):
projected_hessian[i].append(projected_hessian[j][i])
data.update({"job_type": job_type, "energies": energies,
"corrections": corrections,
"molecules": molecules,
"structures": structures,
"basis_set": basis_set,
"errors": errors,
"has_error": len(errors) > 0,
"frequencies": frequencies,
"normal_frequencies": normal_frequencies,
"hessian": hessian,
"projected_hessian": projected_hessian,
"forces": all_forces,
"task_time": time})
return data
|
fraricci/pymatgen
|
pymatgen/io/nwchem.py
|
Python
|
mit
| 35,922
|
[
"Gaussian",
"NWChem",
"pymatgen"
] |
7c6a362daf6b62e14aba6b5ef42fc41d6ff76ac7ed01f5a15e79e1c79ecf0f86
|
'''
Created on Jul 21, 2011
@author: mkiyer
'''
'''
Created on Jun 4, 2011
@author: mkiyer
chimerascan: chimeric transcript discovery using RNA-seq
Copyright (C) 2011 Matthew Iyer
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import logging
import os
import collections
import itertools
import operator
from chimerascan import pysam
from chimerascan.lib import config
from chimerascan.lib.chimera import Chimera, \
DiscordantTags, DISCORDANT_TAG_NAME, \
OrientationTags, ORIENTATION_TAG_NAME, \
DiscordantRead, ChimeraTypes, ChimeraPartner
from chimerascan.lib.gene_to_genome import build_tid_tx_maps
def parse_pairs(bamfh):
bam_iter = iter(bamfh)
try:
while True:
r1 = bam_iter.next()
r2 = bam_iter.next()
yield r1,r2
except StopIteration:
pass
def parse_gene_chimeric_reads(bamfh):
# create a dictionary structure to hold read pairs
chimera_dict = collections.defaultdict(lambda: [])
for r1,r2 in parse_pairs(bamfh):
#
# TODO:
# for now we are only going to deal with gene-gene
# chimeras and leave other chimeras for study at a
# later time
#
dr1 = r1.opt(DISCORDANT_TAG_NAME)
dr2 = r2.opt(DISCORDANT_TAG_NAME)
if (dr1 != DiscordantTags.DISCORDANT_GENE or
dr2 != DiscordantTags.DISCORDANT_GENE):
continue
# organize key in 5' to 3' order
or1 = r1.opt(ORIENTATION_TAG_NAME)
or2 = r2.opt(ORIENTATION_TAG_NAME)
assert or1 != or2
if or1 == OrientationTags.FIVEPRIME:
pair = (r1,r2)
else:
pair = (r2,r1)
# store pertinent information in lightweight structure
# convert to DiscordantRead objects
r5p = DiscordantRead.from_read(pair[0])
r3p = DiscordantRead.from_read(pair[1])
# keep list of discordant chimeric reads
chimera_dict[(r5p.tid, r3p.tid)].append((r5p,r3p))
for key,pairs in chimera_dict.iteritems():
rname1,rname2 = key
yield rname1, rname2, pairs
def get_chimera_type(fiveprime_gene, threeprime_gene, gene_trees):
"""
return tuple containing ChimeraType and distance
between 5' and 3' genes
"""
# get gene information
chrom5p, start5p, end5p, strand1 = fiveprime_gene.chrom, fiveprime_gene.tx_start, fiveprime_gene.tx_end, fiveprime_gene.strand
chrom3p, start3p, end3p, strand2 = threeprime_gene.chrom, threeprime_gene.tx_start, threeprime_gene.tx_end, threeprime_gene.strand
# interchromosomal
if chrom5p != chrom3p:
return ChimeraTypes.INTERCHROMOSOMAL, None
# orientation
same_strand = strand1 == strand2
# genes on same chromosome so check overlap
is_overlapping = (start5p < end3p) and (start3p < end5p)
if is_overlapping:
if not same_strand:
if ((start5p <= start3p and strand1 == "+") or
(start5p > start3p and strand1 == "-")):
return (ChimeraTypes.OVERLAP_CONVERGE, 0)
else:
return (ChimeraTypes.OVERLAP_DIVERGE, 0)
else:
if ((start5p <= start3p and strand1 == "+") or
(end5p >= end3p and strand1 == "-")):
return (ChimeraTypes.OVERLAP_SAME, 0)
else:
return (ChimeraTypes.OVERLAP_COMPLEX, 0)
# if code gets here then the genes are on the same chromosome but do not
# overlap. first calculate distance (minimum distance between genes)
if start5p <= start3p:
distance = start3p - end5p
between_start,between_end = end5p,start3p
else:
distance = end3p - start5p
between_start,between_end = end3p,start5p
# check whether there are genes intervening between the
# chimera candidates
genes_between = []
genes_between_same_strand = []
for hit in gene_trees[chrom5p].find(between_start,
between_end):
if (hit.start > between_start and
hit.end < between_end):
if hit.strand == strand1:
genes_between_same_strand.append(hit)
genes_between.append(hit)
if same_strand:
if len(genes_between_same_strand) == 0:
return ChimeraTypes.READTHROUGH, distance
else:
return ChimeraTypes.INTRACHROMOSOMAL, distance
else:
# check for reads between neighboring genes
if len(genes_between) == 0:
if ((start5p <= start3p and strand1 == "+") or
(start5p > start3p and strand1 == "-")):
return (ChimeraTypes.ADJ_CONVERGE, distance)
elif ((start5p >= start3p and strand1 == "+") or
(start5p < start3p and strand1 == "-")):
return (ChimeraTypes.ADJ_DIVERGE, distance)
elif ((start5p <= start3p and strand1 == "+") or
(start5p > start3p and strand1 == "-")):
return (ChimeraTypes.ADJ_SAME, distance)
elif ((start5p >= start3p and strand1 == "+") or
(start5p < start3p and strand1 == '-')):
return (ChimeraTypes.ADJ_COMPLEX, distance)
else:
return ChimeraTypes.INTRA_COMPLEX, distance
return ChimeraTypes.UNKNOWN, distance
def read_pairs_to_chimera(chimera_name, tid5p, tid3p, readpairs,
tid_tx_map, genome_tx_trees, trim_bp):
# get gene information
tx5p = tid_tx_map[tid5p]
tx3p = tid_tx_map[tid3p]
# categorize chimera type
chimera_type, distance = get_chimera_type(tx5p, tx3p, genome_tx_trees)
# create chimera object
c = Chimera()
iter5p = itertools.imap(operator.itemgetter(0), readpairs)
iter3p = itertools.imap(operator.itemgetter(1), readpairs)
c.partner5p = ChimeraPartner.from_discordant_reads(iter5p, tx5p, trim_bp)
c.partner3p = ChimeraPartner.from_discordant_reads(iter3p, tx3p, trim_bp)
c.name = chimera_name
c.chimera_type = chimera_type
c.distance = distance
# raw reads
c.encomp_read_pairs = readpairs
return c
def nominate_chimeras(index_dir, input_bam_file, output_file, trim_bp):
logging.debug("Reading gene information")
gene_file = os.path.join(index_dir, config.GENE_FEATURE_FILE)
bamfh = pysam.Samfile(input_bam_file, "rb")
# build a lookup table to get genomic intervals from transcripts
tid_tx_map, genome_tx_trees = build_tid_tx_maps(bamfh, gene_file,
rname_prefix=config.GENE_REF_PREFIX)
# group discordant read pairs by gene
chimera_num = 0
outfh = open(output_file, "w")
logging.debug("Parsing discordant reads")
for tid5p,tid3p,readpairs in parse_gene_chimeric_reads(bamfh):
c = read_pairs_to_chimera("C%07d" % (chimera_num), tid5p, tid3p,
readpairs, tid_tx_map,
genome_tx_trees, trim_bp)
fields = c.to_list()
chimera_num += 1
print >>outfh, '\t'.join(map(str, fields))
outfh.close()
bamfh.close()
def main():
from optparse import OptionParser
logging.basicConfig(level=logging.DEBUG,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
parser = OptionParser("usage: %prog [options] <index> "
"<discordant_reads.srt.bedpe> <chimeras.txt>")
parser.add_option("--trim", dest="trim", type="int",
default=config.EXON_JUNCTION_TRIM_BP)
options, args = parser.parse_args()
index_dir = args[0]
input_file = args[1]
output_file = args[2]
nominate_chimeras(index_dir, input_file, output_file, options.trim)
if __name__ == '__main__':
main()
|
tectronics/chimerascan
|
chimerascan/deprecated/nominate_chimeras_v0.4.1.py
|
Python
|
gpl-3.0
| 8,443
|
[
"pysam"
] |
2d330529eb4d8c966340890cc65beaf075091ef4b61cb4904314ad6b1e633df4
|
#
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2015, Mostapha Sadeghipour Roudsari <Sadeghipour@gmail.com>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Use this component to set EnergyPlus Simulation Controls such as whether to run certain types of HVAC sizing calculations, etc.
-
Provided by Honeybee 0.0.57
Args:
doZoneSizingCalculation_: Set to "True" to have EnergyPlus do a sizing calculation for the zones. The default is set to "True."
doSystemSizingCalculation_: Set to "True" to have EnergyPlus do a sizing calculation for the HVAC system. The default is set to "True."
doPlantSizingCalculation_: Set to "True" to have EnergyPlus do a sizing calculation for the HVAC plant (boiler and chiller). The default is set to "True", although with ideal air loads, there is no plant as each zone has its own ideal air system and there is no central plant between zones.
runSimForSizingPeriods_: Set to "True" to have EnergyPlus run a simulation for the Sizing periods specified in the IDF. The default is set to "False." By default, the sizing periods are set to the extreme hot and extreme cold weeks of the weather file but a custom ddy file can also be specified with the "Honeybee_Energy Simulation Par" component.
runSimForRunPeriods_: Set to "True" to have EnergyPlus run the simulation for energy use over the entire year of the EPW. The default is set to "True."
maxWarmupDays_: The maximum number of warmup days that you want the energyplus simulation to run before recording result values. The default is set to 25.
maxWarmupDays_: The minimum number of warmup days that you want the energyplus simulation to run before recording result values. The default is set to 6.
Returns:
simControls: A set of simulation controls tha can be plugged into the "Honeybee_Energy Simulation Par" component.
"""
ghenv.Component.Name = "Honeybee_Simulation Control"
ghenv.Component.NickName = 'simControl'
ghenv.Component.Message = 'VER 0.0.57\nJUL_06_2015'
ghenv.Component.Category = "Honeybee"
ghenv.Component.SubCategory = "09 | Energy | Energy"
#compatibleHBVersion = VER 0.0.56\nFEB_01_2015
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "0"
except: pass
def main(doZoneSizingCalc, doSystemSizingCalc, doPlantSizingCalc,runSimForSizing, runSimForRunPeriods, maxWarmupDays, minWarmupDays):
# I will add check for inputs later
if doZoneSizingCalc == None: doZoneSizingCalc = True
if doSystemSizingCalc == None: doSystemSizingCalc = True
if doPlantSizingCalc == None: doPlantSizingCalc = True
if runSimForSizing == None: runSimForSizing = False
if runSimForRunPeriods == None: runSimForRunPeriods = True
if maxWarmupDays_ == None: maxWarmupDays = 25
if minWarmupDays_ == None: minWarmupDays = 6
return doZoneSizingCalc, doSystemSizingCalc, doPlantSizingCalc,runSimForSizing, runSimForRunPeriods, maxWarmupDays, minWarmupDays
simControls = main(doZoneSizingCalculation_,
doSystemSizingCalculation_,
doPlantSizingCalculation_,
runSimForSizingPeriods_,
runSimForRunPeriods_, maxWarmupDays_, minWarmupDays_)
|
samuto/Honeybee
|
src/Honeybee_Simulation Control.py
|
Python
|
gpl-3.0
| 4,014
|
[
"EPW"
] |
2a99f7fab866856e643f3562617ed65aa41c0b334f7f27ba02bb4417d4915e76
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Processors that need to visit each page of the score in one pass.
These are intended for detecting musical elements, where musical context may
span staff systems and pages (e.g. the time signature). Musical elements (e.g.
notes) are added to the `Score` message directly.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from moonlight.score import reader
def create_processors():
yield reader.ScoreReader()
def process(score):
"""Processes a Score.
Detects notes in the Score, and returns the Score in place.
Args:
score: A `Score` message.
Returns:
A `Score` message with `Note`s added to the `Glyph`s where applicable.
"""
for processor in create_processors():
score = processor(score)
return score
|
tensorflow/moonlight
|
moonlight/score_processors.py
|
Python
|
apache-2.0
| 1,382
|
[
"VisIt"
] |
31439d36a30e7248155267849bff6af954ddbdd3898e3e7b902b54058742a477
|
#!/usr/bin/env python
"""
From lammps logs file(s), finds key output such as system energy and temperature
"""
from __future__ import print_function
import argparse
import os
import sys
import re
from md_utils.md_common import (InvalidDataError, warning, file_rows_to_list, IO_ERROR, GOOD_RET, INPUT_ERROR,
INVALID_DATA, get_fname_root, create_out_fname, write_csv)
try:
# noinspection PyCompatibility
from ConfigParser import ConfigParser, NoSectionError
except ImportError:
# noinspection PyCompatibility
from configparser import ConfigParser, NoSectionError
__author__ = 'hmayes'
# Constants #
# For log file processing
SEC_TIMESTEP = 'timestep'
# For evb processing and output
FILE_NAME = 'filename'
TIMESTEP = 'timestep'
STEP_PAT = re.compile(r"^---------------- Step.*")
TOTENG = 'TotEng'
POTENG = 'PotEng'
E_DIHED = 'E_dihed'
E_COUL = 'E_coul'
KINENG = 'KinEng'
E_BOND = 'E_bond'
E_IMPRO = 'E_impro'
E_LONG = 'E_long'
TEMP = 'Temp'
E_ANGL = 'E_angl'
E_VDWL = 'E_vdwl'
PRESS = 'Press'
LOG_FIELDNAMES = [FILE_NAME, TIMESTEP, TOTENG, POTENG, E_DIHED, E_COUL,
KINENG, E_BOND, E_IMPRO, E_LONG,
TEMP, E_ANGL, E_VDWL, PRESS, ]
def parse_cmdline(argv):
"""
Returns the parsed argument list and return code.
`argv` is a list of arguments, or `None` for ``sys.argv[1:]``.
"""
if argv is None:
argv = sys.argv[1:]
# initialize the parser object:
parser = argparse.ArgumentParser(description='For each timestep, gather the energy information output by LAMMPS '
'in the log file.')
parser.add_argument("-f", "--file", help="The log file to be processed.",
default=None)
parser.add_argument("-l", "--list_file", help="The a file with a list of log files to be processes.",
default=None)
args = None
try:
args = parser.parse_args(argv)
if args.file is None:
args.file_list = []
else:
if os.path.isfile(args.file):
args.file_list = [args.file]
args.source_name = args.file
else:
raise IOError("Could not find specified log file: {}".format(args.file))
if args.list_file is not None:
args.file_list += file_rows_to_list(args.list_file)
args.source_name = args.list_file
if len(args.file_list) < 1:
raise InvalidDataError("Found no log file names to process. Specify one or more files as specified in "
"the help documentation ('-h').")
except IOError as e:
warning("Problems reading file:", e)
parser.print_help()
return args, IO_ERROR
except (KeyError, InvalidDataError, SystemExit) as e:
if hasattr(e, 'code') and e.code == 0:
return args, GOOD_RET
warning(e)
parser.print_help()
return args, INPUT_ERROR
return args, GOOD_RET
def process_log(log_file):
"""
Gather key info from lammps log file
@param log_file: name of log file
@return: lists of dicts of key data extracted; 1 dict per timestep
"""
result_list = []
file_root = get_fname_root(log_file)
with open(log_file) as l_file:
reading_steps = False
result_dict = {}
for line in l_file:
line = line.strip()
if STEP_PAT.match(line):
reading_steps = True
result_dict[FILE_NAME] = file_root
result_dict[TIMESTEP] = int(line.split()[2])
elif reading_steps:
if len(line) == 0:
break
s_line = line.split()
if s_line[0] == TOTENG:
for key_id, key in enumerate([TOTENG, KINENG, TEMP]):
result_dict[key] = float(s_line[2 + key_id * 3])
elif s_line[0] == POTENG:
for key_id, key in enumerate([POTENG, E_BOND, E_ANGL]):
result_dict[key] = float(s_line[2 + key_id * 3])
elif s_line[0] == E_DIHED:
for key_id, key in enumerate([E_DIHED, E_IMPRO, E_VDWL]):
result_dict[key] = float(s_line[2 + key_id * 3])
elif s_line[0] == E_COUL:
for key_id, key in enumerate([E_COUL, E_LONG, PRESS]):
result_dict[key] = float(s_line[2 + key_id * 3])
result_list.append(dict(result_dict))
else:
# when stop matching, done reading file (either by normal or abnormal termination)
break
return result_list
def process_log_files(source_name, log_file_list):
"""
Loops through all files and prints output
@param source_name: the source name to use as the base for creating an outfile name
@param log_file_list: list of file names to read and process
"""
result_list = []
out_fname = create_out_fname(source_name, suffix='_sum', ext=".csv")
for log_file in log_file_list:
result_list += process_log(log_file)
if len(result_list) == 0:
warning("Found no lammps log data to process from: {}".format(source_name))
else:
write_csv(result_list, out_fname, LOG_FIELDNAMES, extrasaction="ignore")
def main(argv=None):
# Read input
args, ret = parse_cmdline(argv)
if ret != GOOD_RET or args is None:
return ret
try:
process_log_files(args.source_name, args.file_list, )
except IOError as e:
warning("Problems reading file:", e)
return IO_ERROR
except InvalidDataError as e:
warning("Problems reading data:", e)
return INVALID_DATA
return GOOD_RET # success
if __name__ == '__main__':
status = main()
sys.exit(status)
|
team-mayes/md_utils
|
md_utils/lammps_log_proc.py
|
Python
|
bsd-3-clause
| 5,922
|
[
"LAMMPS"
] |
90d158388f56ce3acd6be4805c0d8bfd5d67bc97529573e936945354a0712ee3
|
#-*- coding: utf-8 -*-
#! /usr/bin/env python
'''
#------------------------------------------------------------
filename: lab10_runTCcheckReLu_spiraldata.py
To check effect of Relu activation function over
Deep neural networks.
This script wants to see Relu activation can mitigate
Gradient Vanishing problem in
A Multi-Hidden Layers Fully Connected Neural Network.
This example data set is using two class spiral data.
Applying the Relu activation to lab7 example
instead of softmax activation
written by Jaewook Kang @ Jan 2018
#------------------------------------------------------------
'''
from os import getcwd
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from pandas import DataFrame
from sklearn import metrics
import tensorflow as tf
from tensorflow.contrib.learn.python.learn import learn_io
# reading data set from csv file ==========================
xsize = 2
ysize = 2
data = pd.read_csv('./data/twospirals_N5000.csv')
data.columns=['xdata1','xdata2','tdata']
permutation_index = np.random.permutation(data.index)
permutated_data = data.reindex(permutation_index)
permutated_data.columns=['xdata1','xdata2','tdata']
x_data = np.zeros([permutated_data.xdata1.size,xsize])
x_data[:,0] = permutated_data.xdata1.values
x_data[:,1] = permutated_data.xdata2.values
t_data = np.zeros([permutated_data.tdata.size,ysize])
t_data[:,0] = permutated_data.tdata.values
t_data[:,1] = np.invert(permutated_data.tdata.values) + 2
total_size = permutated_data.xdata1.size
training_size = int(np.floor(permutated_data.xdata1.size * 0.8))
validation_size = total_size - training_size
# data dividing
x_training_data = x_data[0:training_size,:]
t_training_data = t_data[0:training_size,:]
x_validation_data = x_data[training_size:-1,:]
t_validation_data = t_data[training_size:-1,:]
# configure training parameters =====================================
# To see mitigation of vanishing gradient problem
learning_rate = 5E-3
training_epochs = 5000
batch_size = 500
display_step = 1
total_batch = int(training_size / batch_size)
weight_init_fn = tf.contrib.layers.xavier_initializer()
# weight_init_fn = tf.contrib.layers.variance_scaling_initializer()
# weight_init_fn = tf.random_normal_initializer()
## for convergence
# learning_rate = 5E-3
# training_epochs = 5000
# batch_size = 500
# display_step = 1
# total_batch = int(training_size / batch_size)
# computational TF graph construction ================================
# Network Parameters
n_hidden_1 = 10 # 1st layer number of neurons
n_hidden_2 = 7 # 2nd layer number of neurons
n_hidden_3 = 7 # 3rd layer number of neurons
n_hidden_4 = 4 # 4rd layer number of neurons
n_hidden_5 = 4 # 5rd layer number of neurons
num_input = xsize # two-dimensional input X = [1x2]
num_classes = ysize # 2 class
# tf Graph input
X = tf.placeholder(tf.float32, [None, num_input])
Y = tf.placeholder(tf.float32, [None, num_classes])
# Store layers weight & bias
'''
'h1': tf.Variable(tf.random_normal([num_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'h3': tf.Variable(tf.random_normal([n_hidden_2, n_hidden_3])),
'h4': tf.Variable(tf.random_normal([n_hidden_3, n_hidden_4])),
'h5': tf.Variable(tf.random_normal([n_hidden_4, n_hidden_5])),
'out':tf.Variable(tf.random_normal([n_hidden_5, num_classes]))
'''
weights = {
'h1': tf.get_variable(name='h1_weight',
shape=[num_input, n_hidden_1],
initializer=weight_init_fn),
'h2': tf.get_variable(name='h2_weight',
shape=[n_hidden_1,n_hidden_2],
initializer=weight_init_fn),
'h3': tf.get_variable(name='h3_weight',
shape=[n_hidden_2, n_hidden_3],
initializer=weight_init_fn),
'h4': tf.get_variable(name='h4_weight',
shape=[n_hidden_3, n_hidden_4],
initializer=weight_init_fn),
'h5': tf.get_variable(name='h5_weight',
shape=[n_hidden_4, n_hidden_5],
initializer=weight_init_fn),
'out': tf.get_variable(name='out_weight',
shape=[n_hidden_5, num_classes],
initializer=weight_init_fn)
}
'''
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'b3': tf.Variable(tf.random_normal([n_hidden_3])),
'b4': tf.Variable(tf.random_normal([n_hidden_4])),
'b5': tf.Variable(tf.random_normal([n_hidden_5])),
'out': tf.Variable(tf.random_normal([num_classes]))
'''
biases = {
'b1': tf.get_variable(name='b1_bias',
shape=[n_hidden_1],
initializer= weight_init_fn),
'b2': tf.get_variable(name='b2_bias',
shape=[n_hidden_2],
initializer=weight_init_fn),
'b3': tf.get_variable(name='b3_bias',
shape=[n_hidden_3],
initializer=weight_init_fn),
'b4': tf.get_variable(name='b4_bias',
shape=[n_hidden_4],
initializer=weight_init_fn),
'b5': tf.get_variable(name='b5_bias',
shape=[n_hidden_5],
initializer=weight_init_fn),
'out': tf.get_variable(name='out_bias',
shape=[num_classes],
initializer=weight_init_fn)
}
# Create model
def neural_net(x):
# Input fully connected layer with 10 neurons
layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
layer_1 = tf.nn.relu(layer_1)
# Hidden fully connected layer with 7 neurons
layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
layer_2 = tf.nn.relu(layer_2)
# Hidden fully connected layer with 7 neurons
layer_3 = tf.add(tf.matmul(layer_2, weights['h3']), biases['b3'])
layer_3 = tf.nn.relu(layer_3)
# Hidden fully connected layer with 4 neurons
layer_4 = tf.add(tf.matmul(layer_3, weights['h4']), biases['b4'])
layer_4 = tf.nn.relu(layer_4)
# Hidden fully connected layer with 4 neurons
layer_5 = tf.add(tf.matmul(layer_4, weights['h5']), biases['b5'])
layer_5 = tf.nn.relu(layer_5)
# Output fully connected layer with a neuron for each class
out_layer = tf.matmul(layer_5, weights['out']) + biases['out']
return out_layer
# Construct model
logits = neural_net(X)
prediction = tf.nn.softmax(logits)
# Define loss and optimizer
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=Y))
# optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)
# optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate,momentum=0.8).minimize(cost)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# Evaluate model
correct_pred = tf.equal(tf.argmax(prediction, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
errRatebyTrainingSet = np.zeros(training_epochs)
errRatebyValidationSet = np.zeros(training_epochs)
# Initialize the variables (i.e. assign their default value)
init = tf.global_variables_initializer()
# for visualization of vanishing gradient problem
grad_wrt_weight_layer1_tensor = tf.gradients(cost,weights['h1'],\
name='grad_wrt_weight_layer1')
grad_wrt_weight_layer2_tensor = tf.gradients(cost,weights['h2'],\
name='grad_wrt_weight_layer2')
grad_wrt_weight_layer3_tensor = tf.gradients(cost,weights['h3'],\
name='grad_wrt_weight_layer3')
grad_wrt_weight_layer4_tensor = tf.gradients(cost,weights['h4'],\
name='grad_wrt_weight_layer4')
grad_wrt_weight_layer5_tensor = tf.gradients(cost,weights['h5'],\
name='grad_wrt_weight_layer5')
grad_wrt_weight_layer1_iter = np.zeros([total_batch,1])
grad_wrt_weight_layer2_iter = np.zeros([total_batch,1])
grad_wrt_weight_layer3_iter = np.zeros([total_batch,1])
grad_wrt_weight_layer4_iter = np.zeros([total_batch,1])
grad_wrt_weight_layer5_iter = np.zeros([total_batch,1])
# Start training ===============================================
with tf.Session() as sess:
# Run the initializer
sess.run(init)
print("--------------------------------------------")
for epoch in range(training_epochs):
avg_cost = 0.
for i in range(total_batch):
data_start_index = i * batch_size
data_end_index = (i + 1) * batch_size
# feed traing data --------------------------
batch_xs = x_training_data[data_start_index:data_end_index, :]
batch_ts = t_training_data[data_start_index:data_end_index, :]
#----------------------------------------------
# Run optimization op (backprop) and cost op (to get loss value)
# feedign training data
_, local_batch_cost = sess.run([optimizer,cost], feed_dict={X: batch_xs,
Y: batch_ts})
if epoch == training_epochs - 1:
# print ('Gradient calculation to see gradient vanishing problem')
_, grad_wrt_weight_layer1 = sess.run([optimizer,grad_wrt_weight_layer1_tensor], feed_dict={X: batch_xs,
Y: batch_ts})
_, grad_wrt_weight_layer2 = sess.run([optimizer,grad_wrt_weight_layer2_tensor], feed_dict={X: batch_xs,
Y: batch_ts})
_, grad_wrt_weight_layer3 = sess.run([optimizer,grad_wrt_weight_layer3_tensor], feed_dict={X: batch_xs,
Y: batch_ts})
_, grad_wrt_weight_layer4 = sess.run([optimizer,grad_wrt_weight_layer4_tensor], feed_dict={X: batch_xs,
Y: batch_ts})
_, grad_wrt_weight_layer5 = sess.run([optimizer,grad_wrt_weight_layer5_tensor], feed_dict={X: batch_xs,
Y: batch_ts})
grad_wrt_weight_layer1 = np.array(grad_wrt_weight_layer1)
grad_wrt_weight_layer2 = np.array(grad_wrt_weight_layer2)
grad_wrt_weight_layer3 = np.array(grad_wrt_weight_layer3)
grad_wrt_weight_layer4 = np.array(grad_wrt_weight_layer4)
grad_wrt_weight_layer5 = np.array(grad_wrt_weight_layer5)
grad_wrt_weight_layer1 = grad_wrt_weight_layer1.reshape(grad_wrt_weight_layer1.shape[1],
grad_wrt_weight_layer1.shape[2])
grad_wrt_weight_layer2 = grad_wrt_weight_layer2.reshape(grad_wrt_weight_layer2.shape[1],
grad_wrt_weight_layer2.shape[2])
grad_wrt_weight_layer3 = grad_wrt_weight_layer3.reshape(grad_wrt_weight_layer3.shape[1],
grad_wrt_weight_layer3.shape[2])
grad_wrt_weight_layer4 = grad_wrt_weight_layer4.reshape(grad_wrt_weight_layer4.shape[1],
grad_wrt_weight_layer4.shape[2])
grad_wrt_weight_layer5 = grad_wrt_weight_layer5.reshape(grad_wrt_weight_layer5.shape[1],
grad_wrt_weight_layer5.shape[2])
grad_wrt_weight_layer1_iter[i] = grad_wrt_weight_layer1.mean()
grad_wrt_weight_layer2_iter[i] = grad_wrt_weight_layer2.mean()
grad_wrt_weight_layer3_iter[i] = grad_wrt_weight_layer3.mean()
grad_wrt_weight_layer4_iter[i] = grad_wrt_weight_layer4.mean()
grad_wrt_weight_layer5_iter[i] = grad_wrt_weight_layer5.mean()
# Compute average loss
avg_cost += local_batch_cost / total_batch
# print ("At %d-th batch in %d-epoch, avg_cost = %f" % (i,epoch,avg_cost) )
# Display logs per epoch step
if display_step == 0:
continue
elif (epoch + 1) % display_step == 0:
# print("Iteration:", '%04d' % (epoch + 1), "cost=", "{:.9f}".format(avg_cost))
batch_train_xs = x_training_data
batch_train_ys = t_training_data
batch_valid_xs = x_validation_data
batch_valid_ys = t_validation_data
errRatebyTrainingSet[epoch] = 1.0 - accuracy.eval({X: batch_train_xs, \
Y: batch_train_ys}, session=sess)
errRatebyValidationSet[epoch] = 1.0 - accuracy.eval({X: batch_valid_xs, \
Y: batch_valid_ys}, session=sess)
print("Training set Err rate: %s" % errRatebyTrainingSet[epoch])
print("Validation set Err rate: %s" % errRatebyValidationSet[epoch])
print("--------------------------------------------")
print("Optimization Finished!")
# Training result visualization ===============================================
hfig1= plt.figure(1,figsize=[10,10])
plt.scatter(data.xdata1.values[0:int(data.xdata1.size/2)],\
data.xdata2.values[0:int(data.xdata1.size/2)], \
color='b',label='class0')
plt.scatter(data.xdata1.values[int(data.xdata1.size/2)+2:-1],\
data.xdata2.values[int(data.xdata1.size/2)+2:-1], \
color='r',label='class1')
plt.title('Two Spiral data Example')
plt.legend()
hfig2 = plt.figure(2,figsize=(10,10))
batch_index = np.array([elem for elem in range(total_batch)])
plt.plot(batch_index,grad_wrt_weight_layer1_iter,label='layer1',color='b',marker='o')
plt.plot(batch_index,grad_wrt_weight_layer4_iter,label='layer4',color='y',marker='o')
plt.plot(batch_index,grad_wrt_weight_layer5_iter,label='layer5',color='r',marker='o')
plt.legend()
plt.title('Weight Gradient with ReLu over minibatch iter @ training epoch = %s' % training_epochs)
plt.xlabel('minibatch iter')
plt.ylabel('Weight Gradient')
hfig3 = plt.figure(3,figsize=(10,10))
epoch_index = np.array([elem for elem in range(training_epochs)])
plt.plot(epoch_index,errRatebyTrainingSet,label='Training data',color='r',marker='o')
plt.plot(epoch_index,errRatebyValidationSet,label='Validation data',color='b',marker='x')
plt.legend()
plt.title('Train/Valid Err')
plt.xlabel('Iteration epoch')
plt.ylabel('error Rate')
plt.show()
|
jwkanggist/EveryBodyTensorFlow
|
lab10_runTFcheckReLu_spiraldata.py
|
Python
|
unlicense
| 14,810
|
[
"NEURON"
] |
c0fb020959ff578483ccc6e0ccbdb8607f71d9671f83c39762ea400ca25563fb
|
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
from __future__ import print_function
import contextlib
import time
import unittest
from astroid import builder
from astroid import nodes
from astroid import parse
from astroid import transforms
@contextlib.contextmanager
def add_transform(manager, node, transform, predicate=None):
manager.register_transform(node, transform, predicate)
try:
yield
finally:
manager.unregister_transform(node, transform, predicate)
class TestTransforms(unittest.TestCase):
def setUp(self):
self.transformer = transforms.TransformVisitor()
def parse_transform(self, code):
module = parse(code, apply_transforms=False)
return self.transformer.visit(module)
def test_function_inlining_transform(self):
def transform_call(node):
# Let's do some function inlining
inferred = next(node.infer())
return inferred
self.transformer.register_transform(nodes.Call,
transform_call)
module = self.parse_transform('''
def test(): return 42
test() #@
''')
self.assertIsInstance(module.body[1], nodes.Expr)
self.assertIsInstance(module.body[1].value, nodes.Const)
self.assertEqual(module.body[1].value.value, 42)
def test_recursive_transforms_into_astroid_fields(self):
# Test that the transformer walks properly the tree
# by going recursively into the _astroid_fields per each node.
def transform_compare(node):
# Let's check the values of the ops
_, right = node.ops[0]
# Assume they are Consts and they were transformed before
# us.
return nodes.const_factory(node.left.value < right.value)
def transform_name(node):
# Should be Consts
return next(node.infer())
self.transformer.register_transform(nodes.Compare, transform_compare)
self.transformer.register_transform(nodes.Name, transform_name)
module = self.parse_transform('''
a = 42
b = 24
a < b
''')
self.assertIsInstance(module.body[2], nodes.Expr)
self.assertIsInstance(module.body[2].value, nodes.Const)
self.assertFalse(module.body[2].value.value)
def test_transform_patches_locals(self):
def transform_function(node):
assign = nodes.Assign()
name = nodes.AssignName()
name.name = 'value'
assign.targets = [name]
assign.value = nodes.const_factory(42)
node.body.append(assign)
self.transformer.register_transform(nodes.FunctionDef,
transform_function)
module = self.parse_transform('''
def test():
pass
''')
func = module.body[0]
self.assertEqual(len(func.body), 2)
self.assertIsInstance(func.body[1], nodes.Assign)
self.assertEqual(func.body[1].as_string(), 'value = 42')
def test_predicates(self):
def transform_call(node):
inferred = next(node.infer())
return inferred
def should_inline(node):
return node.func.name.startswith('inlineme')
self.transformer.register_transform(nodes.Call,
transform_call,
should_inline)
module = self.parse_transform('''
def inlineme_1():
return 24
def dont_inline_me():
return 42
def inlineme_2():
return 2
inlineme_1()
dont_inline_me()
inlineme_2()
''')
values = module.body[-3:]
self.assertIsInstance(values[0], nodes.Expr)
self.assertIsInstance(values[0].value, nodes.Const)
self.assertEqual(values[0].value.value, 24)
self.assertIsInstance(values[1], nodes.Expr)
self.assertIsInstance(values[1].value, nodes.Call)
self.assertIsInstance(values[2], nodes.Expr)
self.assertIsInstance(values[2].value, nodes.Const)
self.assertEqual(values[2].value.value, 2)
def test_transforms_are_separated(self):
# Test that the transforming is done at a separate
# step, which means that we are not doing inference
# on a partially constructed tree anymore, which was the
# source of crashes in the past when certain inference rules
# were used in a transform.
def transform_function(node):
if node.decorators:
for decorator in node.decorators.nodes:
inferred = next(decorator.infer())
if inferred.qname() == 'abc.abstractmethod':
return next(node.infer_call_result(node))
return None
manager = builder.MANAGER
with add_transform(manager, nodes.FunctionDef, transform_function):
module = builder.parse('''
import abc
from abc import abstractmethod
class A(object):
@abc.abstractmethod
def ala(self):
return 24
@abstractmethod
def bala(self):
return 42
''')
cls = module['A']
ala = cls.body[0]
bala = cls.body[1]
self.assertIsInstance(ala, nodes.Const)
self.assertEqual(ala.value, 24)
self.assertIsInstance(bala, nodes.Const)
self.assertEqual(bala.value, 42)
def test_transforms_are_called_for_builtin_modules(self):
# Test that transforms are called for builtin modules.
def transform_function(node):
name = nodes.AssignName()
name.name = 'value'
node.args.args = [name]
return node
manager = builder.MANAGER
predicate = lambda node: node.root().name == 'time'
with add_transform(manager, nodes.FunctionDef,
transform_function, predicate):
builder_instance = builder.AstroidBuilder()
module = builder_instance.module_build(time)
asctime = module['asctime']
self.assertEqual(len(asctime.args.args), 1)
self.assertIsInstance(asctime.args.args[0], nodes.AssignName)
self.assertEqual(asctime.args.args[0].name, 'value')
def test_builder_apply_transforms(self):
def transform_function(node):
return nodes.const_factory(42)
manager = builder.MANAGER
with add_transform(manager, nodes.FunctionDef, transform_function):
astroid_builder = builder.AstroidBuilder(apply_transforms=False)
module = astroid_builder.string_build('''def test(): pass''')
# The transform wasn't applied.
self.assertIsInstance(module.body[0], nodes.FunctionDef)
def test_transform_crashes_on_is_subtype_of(self):
# Test that we don't crash when having is_subtype_of
# in a transform, as per issue #188. This happened
# before, when the transforms weren't in their own step.
def transform_class(cls):
if cls.is_subtype_of('django.db.models.base.Model'):
return cls
return cls
self.transformer.register_transform(nodes.ClassDef,
transform_class)
self.parse_transform('''
# Change environ to automatically call putenv() if it exists
import os
putenv = os.putenv
try:
# This will fail if there's no putenv
putenv
except NameError:
pass
else:
import UserDict
''')
if __name__ == '__main__':
unittest.main()
|
lucidmotifs/auto-aoc
|
.venv/lib/python3.5/site-packages/astroid/tests/unittest_transforms.py
|
Python
|
mit
| 8,122
|
[
"VisIt"
] |
5767391f5f24eec28953f71b55a882730beebcdd7b5d2c6b4bd84ce527af2f5c
|
import pathlib
import numpy as np
import pytest
from util import full
from loprop.core import penalty_function, AU2ANG, pairs
from loprop.dalton import MolFragDalton
from .common import LoPropTestCase
from . import h2o_rot_data as ref
thisdir = pathlib.Path(__file__).parent
case = "h2o_rot"
tmpdir = thisdir / case / "tmp"
@pytest.fixture
def molfrag(request):
cls = request.param
return cls(tmpdir, freqs=(0.0,), pf=penalty_function(2.0 / AU2ANG ** 2))
@pytest.mark.parametrize("molfrag", [MolFragDalton], ids=["dalton"], indirect=True)
class TestNew(LoPropTestCase):
# def setup(self):
# molfrag = MolFrag(tmpdir, freqs=(0, ), pf=penalty_function(2.0/AU2ANG**2))
# molfragaxDiff = None
# def tearDown(self):
# pass
def test_nuclear_charge(self, molfrag):
Z = molfrag.Z
self.assert_allclose(Z, ref.Z)
def test_coordinates_au(self, molfrag):
R = molfrag.R
self.assert_allclose(R, ref.R)
def test_default_gauge(self, molfrag):
self.assert_allclose(molfrag.Rc, ref.Rc)
def test_total_charge(self, molfrag):
Qtot = molfrag.Qab.sum()
self.assert_allclose(Qtot, ref.Qtot)
def test_charge(self, molfrag):
Qaa = molfrag.Qa
self.assert_allclose(ref.Q, Qaa)
def test_total_dipole(self, molfrag):
self.assert_allclose(molfrag.Dtot, ref.Dtot)
def test_dipole_allbonds(self, molfrag):
D = full.matrix(ref.D.shape)
Dab = molfrag.Dab
for ab, a, b in pairs(molfrag.noa):
D[:, ab] += Dab[:, a, b]
if a != b:
D[:, ab] += Dab[:, b, a]
self.assert_allclose(D, ref.D)
def test_dipole_allbonds_sym(self, molfrag):
Dsym = molfrag.Dsym
self.assert_allclose(Dsym, ref.D)
def test_dipole_nobonds(self, molfrag):
Daa = molfrag.Dab.sum(axis=2).view(full.matrix)
self.assert_allclose(Daa, ref.Daa)
def test_quadrupole_total(self, molfrag):
rRab = full.matrix((6, molfrag.noa, molfrag.noa))
RRab = full.matrix((6, molfrag.noa, molfrag.noa))
Rabc = 1.0 * molfrag.Rab
for a in range(molfrag.noa):
for b in range(molfrag.noa):
Rabc[a, b, :] -= molfrag.Rc
for a in range(molfrag.noa):
for b in range(molfrag.noa):
ij = 0
for i in range(3):
for j in range(i, 3):
rRab[ij, a, b] = (
molfrag.Dab[i, a, b] * Rabc[a, b, j]
+ molfrag.Dab[j, a, b] * Rabc[a, b, i]
)
RRab[ij, a, b] = (
molfrag.Qab[a, b]
* (molfrag.R[a, i] - molfrag.Rc[i])
* (molfrag.R[b, j] - molfrag.Rc[j])
)
ij += 1
QUcab = molfrag.QUab + rRab + RRab
QUc = QUcab.sum(axis=2).sum(axis=1).view(full.matrix)
self.assert_allclose(QUc, ref.QUc)
def test_nuclear_quadrupole(self, molfrag):
QUN = molfrag.QUN
self.assert_allclose(QUN, ref.QUN)
def test_quadrupole_allbonds(self, molfrag):
QU = full.matrix(ref.QU.shape)
QUab = molfrag.QUab
for ab, a, b in pairs(molfrag.noa):
QU[:, ab] += QUab[:, a, b]
if a != b:
QU[:, ab] += QUab[:, b, a]
self.assert_allclose(QU, ref.QU)
def test_quadrupole_allbonds_sym(self, molfrag):
QUsym = molfrag.QUsym
self.assert_allclose(QUsym, ref.QU)
def test_quadrupole_nobonds(self, molfrag):
QUaa = (molfrag.QUab + molfrag.dQUab).sum(axis=2).view(full.matrix)
self.assert_allclose(QUaa, ref.QUaa)
def test_Fab(self, molfrag):
Fab = molfrag.Fab
self.assert_allclose(Fab, ref.Fab)
def test_molcas_shift(self, molfrag):
Fab = molfrag.Fab
Lab = Fab + molfrag.sf(Fab)
self.assert_allclose(Lab, ref.Lab)
def test_total_charge_shift(self, molfrag):
dQ = molfrag.dQa[0].sum(axis=0).view(full.matrix)
dQref = [0.0, 0.0, 0.0]
self.assert_allclose(dQref, dQ)
def test_atomic_charge_shift(self, molfrag):
dQa = molfrag.dQa[0]
dQaref = (ref.dQa[:, 1::2] - ref.dQa[:, 2::2]) * (1 / (2 * ref.ff))
self.assert_allclose(dQa, dQaref, atol=0.006)
def test_lagrangian(self, molfrag):
# values per "perturbation" as in atomic_charge_shift below
la = molfrag.la[0]
laref = (ref.la[:, 0:6:2] - ref.la[:, 1:6:2]) * (1 / (2 * ref.ff))
# The sign difference is because mocas sets up rhs with opposite sign
self.assert_allclose(-laref, la, atol=100)
def test_bond_charge_shift(self, molfrag):
dQab = molfrag.dQab[0]
noa = molfrag.noa
dQabref = (ref.dQab[:, 1:7:2] - ref.dQab[:, 2:7:2]) * (1 / (2 * ref.ff))
dQabcmp = full.matrix((3, 3))
ab = 0
for a in range(noa):
for b in range(a):
dQabcmp[ab, :] = dQab[a, b, :]
ab += 1
# The sign difference is because mocas sets up rhs with opposite sign
self.assert_allclose(-dQabref, dQabcmp, atol=0.006)
def test_bond_charge_shift_sum(self, molfrag):
dQa = molfrag.dQab[0].sum(axis=1).view(full.matrix)
dQaref = molfrag.dQa[0]
self.assert_allclose(dQa, dQaref)
def test_polarizability_total(self, molfrag):
Am = molfrag.Am[0]
self.assert_allclose(Am, ref.Am, 0.015)
def test_polarizability_allbonds_molcas_internal(self, molfrag):
O = ref.O
H1O = ref.H1O
H1 = ref.H1
H2O = ref.H2O
H2H1 = ref.H2H1
H2 = ref.H2
rMP = ref.rMP
RO, RH1, RH2 = molfrag.R
ROx, ROy, ROz = RO
RH1x, RH1y, RH1z = RH1
RH2x, RH2y, RH2z = RH2
ihff = 1 / (2 * ref.ff)
q, x, y, z = range(4)
dx1, dx2, dy1, dy2, dz1, dz2 = 1, 2, 3, 4, 5, 6
o, h1o, h1, h2o, h2h1, h2 = range(6)
Oxx = ihff * (rMP[x, dx1, o] - rMP[x, dx2, o])
Oyx = (
ihff
* (rMP[y, dx1, o] - rMP[y, dx2, o] + rMP[x, dy1, o] - rMP[x, dy2, o])
/ 2
)
Oyy = ihff * (rMP[y, dy1, o] - rMP[y, dy2, o])
Ozx = (
ihff
* (rMP[z, dx1, o] - rMP[z, dx2, o] + rMP[x, dz1, o] - rMP[x, dz2, o])
/ 2
)
Ozy = (
ihff
* (rMP[z, dy1, o] - rMP[z, dy2, o] + rMP[y, dz1, o] - rMP[y, dz2, o])
/ 2
)
Ozz = ihff * (rMP[z, dz1, o] - rMP[z, dz2, o])
H1Oxx = ihff * (
rMP[x, dx1, h1o]
- rMP[x, dx2, h1o]
- (rMP[q, dx1, h1o] - rMP[q, dx2, h1o]) * (RH1x - ROx)
)
H1Oyx = ihff * (
(rMP[y, dx1, h1o] - rMP[y, dx2, h1o] + rMP[x, dy1, h1o] - rMP[x, dy2, h1o])
/ 2
- (rMP[q, dx1, h1o] - rMP[q, dx2, h1o]) * (RH1y - ROy)
# - (rMP[0, dy1, h1o] - rMP[0, dy2, h1o])*(RH1x-ROx) THIS IS REALLY... A BUG?
)
H1Oyy = ihff * (
rMP[y, dy1, h1o]
- rMP[y, dy2, h1o]
- (rMP[q, dy1, h1o] - rMP[q, dy2, h1o]) * (RH1y - ROy)
)
H1Ozx = ihff * (
(rMP[z, dx1, h1o] - rMP[z, dx2, h1o] + rMP[x, dz1, h1o] - rMP[x, dz2, h1o])
/ 2
- (rMP[q, dx1, h1o] - rMP[q, dx2, h1o]) * (RH1z - ROz)
# - (rMP[q, dz1, h1o] - rMP[q, dz2, h1o])*(RH1x-ROx) #THIS IS REALLY... A BUG?
)
H1Ozy = ihff * (
(rMP[z, dy1, h1o] - rMP[z, dy2, h1o] + rMP[y, dz1, h1o] - rMP[y, dz2, h1o])
/ 2
- (rMP[q, dy1, h1o] - rMP[q, dy2, h1o]) * (RH1z - ROz)
# - (rMP[q, dz1, h1o] - rMP[q, dz2, h1o])*(RH1y-ROy) THIS IS REALLY... A BUG?
)
H1Ozz = ihff * (
rMP[z, dz1, h1o]
- rMP[z, dz2, h1o]
- (rMP[q, dz1, h1o] - rMP[q, dz2, h1o]) * (RH1z - ROz)
)
H1xx = ihff * (rMP[x, dx1, h1] - rMP[x, dx2, h1])
H1yx = (
ihff * (rMP[y, dx1, h1] - rMP[y, dx2, h1])
+ ihff * (rMP[x, dy1, h1] - rMP[x, dy2, h1])
) / 2
H1yy = ihff * (rMP[y, dy1, h1] - rMP[y, dy2, h1])
H1zx = (
ihff * (rMP[z, dx1, h1] - rMP[z, dx2, h1])
+ ihff * (rMP[x, dz1, h1] - rMP[x, dz2, h1])
) / 2
H1zy = (
ihff * (rMP[z, dy1, h1] - rMP[z, dy2, h1])
+ ihff * (rMP[y, dz1, h1] - rMP[y, dz2, h1])
) / 2
H1zz = ihff * (rMP[z, dz1, h1] - rMP[z, dz2, h1])
H2Oxx = ihff * (
rMP[x, dx1, h2o]
- rMP[x, dx2, h2o]
- (rMP[q, dx1, h2o] - rMP[q, dx2, h2o]) * (RH2x - ROx)
)
H2Oyx = ihff * (
(rMP[y, dx1, h2o] - rMP[y, dx2, h2o] + rMP[x, dy1, h2o] - rMP[x, dy2, h2o])
/ 2
- (rMP[q, dx1, h2o] - rMP[q, dx2, h2o]) * (RH2y - ROy)
# - (rMP[q, dy1, h1o] - rMP[q, dy2, h1o])*(RH2x-ROx) THIS IS REALLY... A BUG?
)
H2Oyy = ihff * (
rMP[y, dy1, h2o]
- rMP[y, dy2, h2o]
- (rMP[q, dy1, h2o] - rMP[q, dy2, h2o]) * (RH2y - ROy)
)
H2Ozx = ihff * (
(rMP[z, dx1, h2o] - rMP[z, dx2, h2o] + rMP[x, dz1, h2o] - rMP[x, dz2, h2o])
/ 2
- (rMP[q, dx1, h2o] - rMP[q, dx2, h2o]) * (RH2z - ROz)
# - (rMP[q, dz1, h1o] - rMP[q, dz2, h1o])*(RH2x-ROx) #THIS IS REALLY... A BUG?
)
H2Ozy = ihff * (
(rMP[z, dy1, h2o] - rMP[z, dy2, h2o] + rMP[y, dz1, h2o] - rMP[y, dz2, h2o])
/ 2
- (rMP[q, dy1, h2o] - rMP[q, dy2, h2o]) * (RH2z - ROz)
# - (rMP[q, dz1, h2o] - rMP[q, dz2, h2o])*(RH2y-ROy) THIS IS REALLY... A BUG?
)
H2Ozz = ihff * (
rMP[z, dz1, h2o]
- rMP[z, dz2, h2o]
- (rMP[q, dz1, h2o] - rMP[q, dz2, h2o]) * (RH2z - ROz)
)
H2H1xx = ihff * (
rMP[x, dx1, h2h1]
- rMP[x, dx2, h2h1]
- (rMP[q, dx1, h2h1] - rMP[q, dx2, h2h1]) * (RH2x - RH1x)
)
H2H1yx = ihff * (
(
rMP[y, dx1, h2h1]
- rMP[y, dx2, h2h1]
+ rMP[x, dy1, h2h1]
- rMP[x, dy2, h2h1]
)
/ 2
- (rMP[q, dx1, h2h1] - rMP[q, dx2, h2h1]) * (RH1y - ROy)
# - (rMP[q, dy1, h2h1] - rMP[q, dy2, h2h1])*(RH1x-ROx) THIS IS REALLY... A BUG?
)
H2H1yy = ihff * (
rMP[y, dy1, h2h1]
- rMP[y, dy2, h2h1]
- (rMP[q, dy1, h2h1] - rMP[q, dy2, h2h1]) * (RH2y - RH1y)
)
H2H1zx = ihff * (
(
rMP[z, dx1, h2h1]
- rMP[z, dx2, h2h1]
+ rMP[x, dz1, h2h1]
- rMP[x, dz2, h2h1]
)
/ 2
- (rMP[q, dx1, h2h1] - rMP[q, dx2, h2h1]) * (RH1z - ROz)
# - (rMP[q, dz1, h2h1] - rMP[q, dz2, h2h1])*(RH1x-ROx) #THIS IS REALLY... A BUG?
)
H2H1zy = ihff * (
(
rMP[z, dy1, h2h1]
- rMP[z, dy2, h2h1]
+ rMP[y, dz1, h2h1]
- rMP[y, dz2, h2h1]
)
/ 2
- (rMP[q, dy1, h2h1] - rMP[q, dy2, h2h1]) * (RH1z - ROz)
# - (rMP[q, dz1, h2h1] - rMP[q, dz2, h2h1])*(RH1y-RO[1]) THIS IS REALLY... A BUG?
)
H2H1zz = ihff * (
rMP[z, dz1, h2h1]
- rMP[z, dz2, h2h1]
- (rMP[q, dz1, h2h1] - rMP[q, dz2, h2h1]) * (RH2z - RH1z)
)
H2xx = ihff * (rMP[x, dx1, h2] - rMP[x, dx2, h2])
H2yx = (
ihff * (rMP[y, dx1, h2] - rMP[y, dx2, h2])
+ ihff * (rMP[x, dy1, h2] - rMP[x, dy2, h2])
) / 2
H2yy = ihff * (rMP[y, dy1, h2] - rMP[y, dy2, h2])
H2zx = (
ihff * (rMP[z, dx1, h2] - rMP[z, dx2, h2])
+ ihff * (rMP[x, dz1, h2] - rMP[x, dz2, h2])
) / 2
H2zy = (
ihff * (rMP[z, dy1, h2] - rMP[z, dy2, h2])
+ ihff * (rMP[y, dz1, h2] - rMP[y, dz2, h2])
) / 2
H2zz = ihff * (rMP[z, dz1, h2] - rMP[z, dz2, h2])
self.assert_allclose(O[0], Oxx, text="Oxx")
self.assert_allclose(O[1], Oyx, text="Oyx")
self.assert_allclose(O[2], Oyy, text="Oyy")
self.assert_allclose(O[3], Ozx, text="Ozx")
self.assert_allclose(O[4], Ozy, text="Ozy")
self.assert_allclose(O[5], Ozz, text="Ozz")
self.assert_allclose(H1O[0], H1Oxx, text="H1Oxx")
self.assert_allclose(H1O[1], H1Oyx, text="H1Oyx")
self.assert_allclose(H1O[2], H1Oyy, text="H1Oyy")
self.assert_allclose(H1O[3], H1Ozx, text="H1Ozx")
self.assert_allclose(H1O[4], H1Ozy, text="H1Ozy")
self.assert_allclose(H1O[5], H1Ozz, text="H1Ozz")
self.assert_allclose(H1[0], H1xx, text="H1xx")
self.assert_allclose(H1[1], H1yx, text="H1yx")
self.assert_allclose(H1[2], H1yy, text="H1yy")
self.assert_allclose(H1[3], H1zx, text="H1zx")
self.assert_allclose(H1[4], H1zy, text="H1zy")
self.assert_allclose(H1[5], H1zz, text="H1zz")
self.assert_allclose(H2O[0], H2Oxx, text="H2Oxx")
self.assert_allclose(H2O[1], H2Oyx, text="H2Oyx")
self.assert_allclose(H2O[2], H2Oyy, text="H2Oyy")
self.assert_allclose(H2O[3], H2Ozx, text="H2Ozx")
self.assert_allclose(H2O[4], H2Ozy, text="H2Ozy")
self.assert_allclose(H2O[5], H2Ozz, text="H2Ozz")
self.assert_allclose(H2H1[0], H2H1xx, text="H2H1xx")
self.assert_allclose(H2H1[1], H2H1yx, text="H2H1yx")
self.assert_allclose(H2H1[2], H2H1yy, text="H2H1yy")
self.assert_allclose(H2H1[3], H2H1zx, text="H2H1zx")
self.assert_allclose(H2H1[4], H2H1zy, text="H2H1zy")
self.assert_allclose(H2H1[5], H2H1zz, text="H2H1zz")
self.assert_allclose(H2[0], H2xx, text="H2xx")
self.assert_allclose(H2[1], H2yx, text="H2yx")
self.assert_allclose(H2[2], H2yy, text="H2yy")
self.assert_allclose(H2[3], H2zx, text="H2zx")
self.assert_allclose(H2[4], H2zy, text="H2zy")
self.assert_allclose(H2[5], H2zz, text="H2zz")
def test_altint(self, molfrag):
R = molfrag.R
rMP = ref.rMP
diff = [(1, 2), (3, 4), (5, 6)]
bonds = (1, 3, 4)
ablab = ("O", "H1O", "H1", "H2O", "H2H1", "H2")
ijlab = ("xx", "yx", "yy", "zx", "zy", "zz")
pol = np.zeros((6, molfrag.noa * (molfrag.noa + 1) // 2))
for ab, a, b in pairs(molfrag.noa):
for ij, i, j in pairs(3):
i1, i2 = diff[i]
j1, j2 = diff[j]
pol[ij, ab] += (
rMP[i + 1, j1, ab]
- rMP[i + 1, j2, ab]
+ rMP[j + 1, i1, ab]
- rMP[j + 1, i2, ab]
) / (4 * ref.ff)
if ab in bonds:
pol[ij, ab] -= (
(R[a][i] - R[b][i])
* (rMP[0, j1, ab] - rMP[0, j2, ab])
/ (2 * ref.ff)
)
self.assert_allclose(
ref.Aab[ij, ab], pol[ij, ab], text="%s%s" % (ablab[ab], ijlab[ij])
)
def test_polarizability_allbonds_atoms(self, molfrag):
Aab = molfrag.Aab[0] # + molfrag.dAab[0]
noa = molfrag.noa
Acmp = full.matrix(ref.Aab.shape)
ab = 0
for a in range(noa):
for b in range(a):
Acmp[:, ab] = (Aab[:, :, a, b] + Aab[:, :, b, a]).pack()
ab += 1
Acmp[:, ab] = Aab[:, :, a, a].pack()
ab += 1
# atoms
self.assert_allclose(ref.Aab[:, 0], Acmp[:, 0], atol=0.005)
self.assert_allclose(ref.Aab[:, 2], Acmp[:, 2], atol=0.005)
self.assert_allclose(ref.Aab[:, 5], Acmp[:, 5], atol=0.005)
def test_polarizability_allbonds_bonds(self, molfrag):
Aab = molfrag.Aab[0] + molfrag.dAab[0] * .5
noa = molfrag.noa
Acmp = full.matrix(ref.Aab.shape)
ab = 0
for a in range(noa):
for b in range(a):
Acmp[:, ab] = (Aab[:, :, a, b] + Aab[:, :, b, a]).pack()
ab += 1
Acmp[:, ab] = Aab[:, :, a, a].pack()
ab += 1
# atoms
self.assert_allclose(ref.Aab[:, 1], Acmp[:, 1], atol=0.150, err_msg="H1O")
self.assert_allclose(ref.Aab[:, 3], Acmp[:, 3], atol=0.150, err_msg="H2O")
self.assert_allclose(ref.Aab[:, 4], Acmp[:, 4], atol=0.005, err_msg="H2H1")
def test_polarizability_nobonds(self, molfrag):
Aab = molfrag.Aab[0] + molfrag.dAab[0] * .5
noa = molfrag.noa
Acmp = full.matrix((6, noa))
Aa = Aab.sum(axis=3).view(full.matrix)
for a in range(noa):
Acmp[:, a] = Aa[:, :, a].pack()
# atoms
self.assert_allclose(Acmp, ref.Aa, atol=0.07)
def test_potfile_PAn0(self, molfrag):
PAn0 = molfrag.output_potential_file(maxl=-1, pol=0, hyper=0)
assert PAn0 == ref.PAn0
def test_potfile_PA00(self, molfrag):
PA00 = molfrag.output_potential_file(maxl=0, pol=0, hyper=0)
assert PA00 == ref.PA00
def test_potfile_PA10(self, molfrag):
PA10 = molfrag.output_potential_file(maxl=1, pol=0, hyper=0)
assert PA10 == ref.PA10
def test_potfile_PA20(self, molfrag):
PA20 = molfrag.output_potential_file(maxl=2, pol=0, hyper=0)
assert PA20 == ref.PA20
def test_potfile_PA21(self, molfrag):
PA21 = molfrag.output_potential_file(maxl=2, pol=1, hyper=0)
assert PA21 == ref.PA21
def test_potfile_PA22(self, molfrag):
PA22 = molfrag.output_potential_file(maxl=2, pol=2, hyper=0)
assert PA22 == ref.PA22
|
vahtras/loprop
|
tests/test_h2o_rot.py
|
Python
|
gpl-3.0
| 18,025
|
[
"Dalton"
] |
e70bfa3f20d34a117cf74dd1218ff0136b4c1eab85be4feb60f0d1a4f980b1f0
|
import scipy.linalg
import numpy as np
import matplotlib.pyplot as pl
def eta(x, threshold):
return np.sign(x) * np.fmax(np.abs(x) - threshold, 0)
def etaprime(x, threshold):
return (x > threshold) + (x < -threshold)
def largestElement(x, n):
lenx = len(x)
if (n > lenx):
n = lenx-1
if (n < 0):
n = 0
t = np.sort(x)[::-1]
return t[n]
def damp(A, AT, x0, denoiser, b, maxIter=5000, tol=1e-8, alpha=1.0):
"""Solve a linear system of equations imposing a sparsity constraint using the
Denoising-based Approximate Message Passing (DAMP) algorithm
Ax=b, where A is a matrix, b is a vector and x is the solution, over which a sparsity
constraint is used.
It solves the following problem
|Ax-b|_2^2 + lambda*psi(x)
where psi(x) is a regularization function whose proximal operator can be obtained
Args:
A (function): operator that applies the matrix A to an arbitry vector (e.g., A = lambda z : AMatrix.dot(z))
AT (function): operator that applies the transpose matrix A.T to an arbitry vector (e.g., A = lambda z : AMatrix.T.dot(z))
x0 (array): vector with the initial solution
eta (function): denoiser (e.g., A = lambda z, beta : )
b (array): vector with the right-hand-side of the equation
mu (float): regularization parameter
maxIter (int, optional): maximum number of iterations
tol (float, optional): final tolerance
alpha (float, optional): parameter that can be used to damp the iterations. Useful when using a sensing matrix
that is not iid Gaussian, the only situation in which AMP is proved to converge
Returns:
TYPE: Description
"""
m = len(b)
n = len(x0)
xt = np.zeros(n)
zt = np.copy(b)
eps = np.finfo(1.0).resolution
err = []
loop = 0
continueIteration = True
while(continueIteration):
pseudoData = AT(zt) + xt
sigmaHat = np.sqrt(np.sum(zt**2) / m)
xt = denoiser(pseudoData, sigmaHat)
epsilon = np.max(pseudoData) / 1000 + eps
eta = np.random.randn(n)
div = np.sum(eta * (denoiser(pseudoData + epsilon * eta, sigmaHat) - xt) / epsilon)
zt = alpha * (b - A(xt) + 1.0 / m * zt * div) + (1.0-alpha) * zt
stopping = np.linalg.norm(b - A(xt)) / np.linalg.norm(b)
err.append(stopping)
continueIteration = (stopping > tol) and (loop < maxIter)
if (loop % 10 == 0):
print("It: {0} - rel. error: {1}".format(loop, stopping))
loop += 1
return xt, err
if (__name__ == "__main__"):
M = 200
N = 1000
K = 20
sigma = 0.00000
# Create sparse signal
x = np.zeros(N)
ind = np.random.permutation(N)
x[ind[0:K]] = 1.0
# Define matrix
AMat = np.random.normal(size=(M,N))
AMat /= np.linalg.norm(AMat, 2)
# Define observation vector
b = AMat.dot(x)
#b += np.random.normal(scale=sigma, size=b.shape)
# Initial state
x0 = np.ones(N)
A = lambda z : AMat.dot(z)
At = lambda z : AMat.T.dot(z)
denoiser = lambda z, t : eta(z, t)
sol, err = damp(A, At, x0, denoiser, b, maxIter=500, tol=1e-8, alpha=0.5)
pl.plot(sol)
pl.plot(x, 'o')
|
aasensio/pyiacsun
|
pyiacsun/sparse/damp.py
|
Python
|
mit
| 3,311
|
[
"Gaussian"
] |
05024f7fc0eacd708c3f2a9e72b02635df38cda2e8175520ce3e21141da45337
|
"""
QAPI event generator
Copyright (c) 2014 Wenchao Xia
Copyright (c) 2015-2018 Red Hat Inc.
Authors:
Wenchao Xia <wenchaoqemu@gmail.com>
Markus Armbruster <armbru@redhat.com>
This work is licensed under the terms of the GNU GPL, version 2.
See the COPYING file in the top-level directory.
"""
from qapi.common import *
from qapi.gen import QAPISchemaModularCVisitor, ifcontext
from qapi.schema import QAPISchemaEnumMember
from qapi.types import gen_enum, gen_enum_lookup
def build_event_send_proto(name, arg_type, boxed):
return 'void qapi_event_send_%(c_name)s(%(param)s)' % {
'c_name': c_name(name.lower()),
'param': build_params(arg_type, boxed)}
def gen_event_send_decl(name, arg_type, boxed):
return mcgen('''
%(proto)s;
''',
proto=build_event_send_proto(name, arg_type, boxed))
# Declare and initialize an object 'qapi' using parameters from build_params()
def gen_param_var(typ):
assert not typ.variants
ret = mcgen('''
%(c_name)s param = {
''',
c_name=typ.c_name())
sep = ' '
for memb in typ.members:
ret += sep
sep = ', '
if memb.optional:
ret += 'has_' + c_name(memb.name) + sep
if memb.type.name == 'str':
# Cast away const added in build_params()
ret += '(char *)'
ret += c_name(memb.name)
ret += mcgen('''
};
''')
if not typ.is_implicit():
ret += mcgen('''
%(c_name)s *arg = ¶m;
''',
c_name=typ.c_name())
return ret
def gen_event_send(name, arg_type, boxed, event_enum_name, event_emit):
# FIXME: Our declaration of local variables (and of 'errp' in the
# parameter list) can collide with exploded members of the event's
# data type passed in as parameters. If this collision ever hits in
# practice, we can rename our local variables with a leading _ prefix,
# or split the code into a wrapper function that creates a boxed
# 'param' object then calls another to do the real work.
have_args = boxed or (arg_type and not arg_type.is_empty())
ret = mcgen('''
%(proto)s
{
QDict *qmp;
''',
proto=build_event_send_proto(name, arg_type, boxed))
if have_args:
ret += mcgen('''
QObject *obj;
Visitor *v;
''')
if not boxed:
ret += gen_param_var(arg_type)
ret += mcgen('''
qmp = qmp_event_build_dict("%(name)s");
''',
name=name)
if have_args:
ret += mcgen('''
v = qobject_output_visitor_new(&obj);
''')
if not arg_type.is_implicit():
ret += mcgen('''
visit_type_%(c_name)s(v, "%(name)s", &arg, &error_abort);
''',
name=name, c_name=arg_type.c_name())
else:
ret += mcgen('''
visit_start_struct(v, "%(name)s", NULL, 0, &error_abort);
visit_type_%(c_name)s_members(v, ¶m, &error_abort);
visit_check_struct(v, &error_abort);
visit_end_struct(v, NULL);
''',
name=name, c_name=arg_type.c_name())
ret += mcgen('''
visit_complete(v, &obj);
qdict_put_obj(qmp, "data", obj);
''')
ret += mcgen('''
%(event_emit)s(%(c_enum)s, qmp);
''',
event_emit=event_emit,
c_enum=c_enum_const(event_enum_name, name))
if have_args:
ret += mcgen('''
visit_free(v);
''')
ret += mcgen('''
qobject_unref(qmp);
}
''')
return ret
class QAPISchemaGenEventVisitor(QAPISchemaModularCVisitor):
def __init__(self, prefix):
super().__init__(
prefix, 'qapi-events',
' * Schema-defined QAPI/QMP events', None, __doc__)
self._event_enum_name = c_name(prefix + 'QAPIEvent', protect=False)
self._event_enum_members = []
self._event_emit_name = c_name(prefix + 'qapi_event_emit')
def _begin_user_module(self, name):
events = self._module_basename('qapi-events', name)
types = self._module_basename('qapi-types', name)
visit = self._module_basename('qapi-visit', name)
self._genc.add(mcgen('''
#include "qemu/osdep.h"
#include "%(prefix)sqapi-emit-events.h"
#include "%(events)s.h"
#include "%(visit)s.h"
#include "qapi/error.h"
#include "qapi/qmp/qdict.h"
#include "qapi/qobject-output-visitor.h"
#include "qapi/qmp-event.h"
''',
events=events, visit=visit,
prefix=self._prefix))
self._genh.add(mcgen('''
#include "qapi/util.h"
#include "%(types)s.h"
''',
types=types))
def visit_end(self):
self._add_system_module('emit', ' * QAPI Events emission')
self._genc.preamble_add(mcgen('''
#include "qemu/osdep.h"
#include "%(prefix)sqapi-emit-events.h"
''',
prefix=self._prefix))
self._genh.preamble_add(mcgen('''
#include "qapi/util.h"
'''))
self._genh.add(gen_enum(self._event_enum_name,
self._event_enum_members))
self._genc.add(gen_enum_lookup(self._event_enum_name,
self._event_enum_members))
self._genh.add(mcgen('''
void %(event_emit)s(%(event_enum)s event, QDict *qdict);
''',
event_emit=self._event_emit_name,
event_enum=self._event_enum_name))
def visit_event(self, name, info, ifcond, features, arg_type, boxed):
with ifcontext(ifcond, self._genh, self._genc):
self._genh.add(gen_event_send_decl(name, arg_type, boxed))
self._genc.add(gen_event_send(name, arg_type, boxed,
self._event_enum_name,
self._event_emit_name))
# Note: we generate the enum member regardless of @ifcond, to
# keep the enumeration usable in target-independent code.
self._event_enum_members.append(QAPISchemaEnumMember(name, None))
def gen_events(schema, output_dir, prefix):
vis = QAPISchemaGenEventVisitor(prefix)
schema.visit(vis)
vis.write(output_dir)
|
dslutz/qemu
|
scripts/qapi/events.py
|
Python
|
gpl-2.0
| 6,148
|
[
"VisIt"
] |
88a4f9422d5d2f1b0add22ef140e4619b07020eaa048e0e1d22a16c4ccabe275
|
##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2014 Stanford University and the Authors
#
# Authors: Robert McGibbon
# Contributors: Kyle A. Beauchamp, TJ Lane, Joshua Adelman, Lee-Ping Wang, Jason Swails
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
##############################################################################
# Imports
##############################################################################
from __future__ import print_function, division
import os
import warnings
import functools
from copy import deepcopy
from collections import Iterable
import numpy as np
from mdtraj.formats import DCDTrajectoryFile
from mdtraj.formats import BINPOSTrajectoryFile
from mdtraj.formats import XTCTrajectoryFile
from mdtraj.formats import TRRTrajectoryFile
from mdtraj.formats import HDF5TrajectoryFile
from mdtraj.formats import NetCDFTrajectoryFile
from mdtraj.formats import LH5TrajectoryFile
from mdtraj.formats import PDBTrajectoryFile
from mdtraj.formats import MDCRDTrajectoryFile
from mdtraj.formats import ArcTrajectoryFile
from mdtraj.formats import DTRTrajectoryFile
from mdtraj.formats import LAMMPSTrajectoryFile
from mdtraj.formats import XYZTrajectoryFile
from mdtraj.formats import GroTrajectoryFile
from mdtraj.formats import AmberNetCDFRestartFile
from mdtraj.formats import AmberRestartFile
from mdtraj.formats.prmtop import load_prmtop
from mdtraj.formats.psf import load_psf
from mdtraj.formats.mol2 import load_mol2
from mdtraj.formats.gro import load_gro
from mdtraj.formats.arc import load_arc
from mdtraj.formats.hoomdxml import load_hoomdxml
from mdtraj.core.topology import Topology
from mdtraj.core.residue_names import _SOLVENT_TYPES
from mdtraj.utils import (ensure_type, in_units_of, lengths_and_angles_to_box_vectors,
box_vectors_to_lengths_and_angles, cast_indices,
deprecated)
from mdtraj.utils.six.moves import xrange
from mdtraj.utils.six import PY3, string_types
from mdtraj import _rmsd
from mdtraj import _FormatRegistry
from mdtraj.geometry import distance
##############################################################################
# Globals
##############################################################################
__all__ = ['open', 'load', 'iterload', 'load_frame', 'load_topology', 'Trajectory']
# supported extensions for constructing topologies
_TOPOLOGY_EXTS = ['.pdb', '.pdb.gz', '.h5','.lh5', '.prmtop', '.parm7',
'.psf', '.mol2', '.hoomdxml', '.gro', '.arc']
##############################################################################
# Utilities
##############################################################################
def _assert_files_exist(filenames):
"""Throw an IO error if files don't exist
Parameters
----------
filenames : {str, [str]}
String or list of strings to check
"""
if isinstance(filenames, string_types):
filenames = [filenames]
for fn in filenames:
if not (os.path.exists(fn) and os.path.isfile(fn)):
raise IOError('No such file: %s' % fn)
def _assert_files_or_dirs_exist(names):
"""Throw an IO error if files don't exist
Parameters
----------
filenames : {str, [str]}
String or list of strings to check
"""
if isinstance(names, string_types):
names = [names]
for fn in names:
if not (os.path.exists(fn) and \
(os.path.isfile(fn) or os.path.isdir(fn))):
raise IOError('No such file: %s' % fn)
def load_topology(filename):
"""Load a topology
Parameters
----------
filename : str
Path to a file containing a system topology. The following extensions
are supported: '.pdb', '.pdb.gz', '.h5','.lh5', '.prmtop', '.parm7',
'.psf', '.mol2', '.hoomdxml'
Returns
-------
topology : md.Topology
"""
return _parse_topology(filename)
def _parse_topology(top):
"""Get the topology from a argument of indeterminate type
If top is a string, we try loading a pdb, if its a trajectory
we extract its topology.
Returns
-------
topology : md.Topology
"""
if isinstance(top, string_types):
ext = _get_extension(top)
else:
ext = None # might not be a string
if isinstance(top, string_types) and (ext in ['.pdb', '.pdb.gz', '.h5','.lh5']):
_traj = load_frame(top, 0)
topology = _traj.topology
elif isinstance(top, string_types) and (ext in ['.prmtop', '.parm7']):
topology = load_prmtop(top)
elif isinstance(top, string_types) and (ext in ['.psf']):
topology = load_psf(top)
elif isinstance(top, string_types) and (ext in ['.mol2']):
topology = load_mol2(top).topology
elif isinstance(top, string_types) and (ext in ['.gro']):
topology = load_gro(top).topology
elif isinstance(top, string_types) and (ext in ['.arc']):
topology = load_arc(top).topology
elif isinstance(top, string_types) and (ext in ['.hoomdxml']):
topology = load_hoomdxml(top).topology
elif isinstance(top, Trajectory):
topology = top.topology
elif isinstance(top, Topology):
topology = top
elif isinstance(top, string_types):
raise IOError('The topology is loaded by filename extension, and the '
'detected "%s" format is not supported. Supported topology '
'formats include %s and "%s".' % (
ext, ', '.join(['"%s"' % e for e in _TOPOLOGY_EXTS[:-1]]),
_TOPOLOGY_EXTS[-1]))
else:
raise TypeError('A topology is required. You supplied top=%s' % str(top))
return topology
def _get_extension(filename):
(base, extension) = os.path.splitext(filename)
if extension == '.gz':
extension2 = os.path.splitext(base)[1]
return extension2 + extension
return extension
##############################################################################
# Utilities
##############################################################################
def open(filename, mode='r', force_overwrite=True, **kwargs):
"""Open a trajectory file-like object
This factor function returns an instance of an open file-like
object capable of reading/writing the trajectory (depending on
'mode'). It does not actually load the trajectory from disk or
write anything.
Parameters
----------
filename : str
Path to the trajectory file on disk
mode : {'r', 'w'}
The mode in which to open the file, either 'r' for read or 'w' for
write.
force_overwrite : bool
If opened in write mode, and a file by the name of `filename` already
exists on disk, should we overwrite it?
Other Parameters
----------------
kwargs : dict
Other keyword parameters are passed directly to the file object
Returns
-------
fileobject : object
Open trajectory file, whose type is determined by the filename
extension
See Also
--------
load, ArcTrajectoryFile, BINPOSTrajectoryFile, DCDTrajectoryFile,
HDF5TrajectoryFile, LH5TrajectoryFile, MDCRDTrajectoryFile,
NetCDFTrajectoryFile, PDBTrajectoryFile, TRRTrajectoryFile,
XTCTrajectoryFile
"""
extension = _get_extension(filename)
try:
loader = _FormatRegistry.fileobjects[extension]
except KeyError:
raise IOError('Sorry, no loader for filename=%s (extension=%s) '
'was found. I can only load files with extensions in %s'
% (filename, extension, _FormatRegistry.fileobjects.keys()))
return loader(filename, mode=mode, force_overwrite=force_overwrite, **kwargs)
def load_frame(filename, index, top=None, atom_indices=None):
"""Load a single frame from a trajectory file
Parameters
----------
filename : str
Path to the trajectory file on disk
index : int
Load the `index`-th frame from the specified file
top : {str, Trajectory, Topology}
Most trajectory formats do not contain topology information. Pass in
either the path to a RCSB PDB file, a trajectory, or a topology to
supply this information.
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates from the
file. These indices are zero-based (not 1 based, as used by the PDB
format).
Examples
--------
>>> import mdtraj as md
>>> first_frame = md.load_frame('traj.h5', 0)
>>> print first_frame
<mdtraj.Trajectory with 1 frames, 22 atoms>
See Also
--------
load, load_frame
Returns
-------
trajectory : md.Trajectory
The resulting conformation, as an md.Trajectory object containing
a single frame.
"""
extension = _get_extension(filename)
try:
loader = _FormatRegistry.loaders[extension]
except KeyError:
raise IOError('Sorry, no loader for filename=%s (extension=%s) '
'was found. I can only load files with extensions in %s'
% (filename, extension, _FormatRegistry.loaders.keys()))
kwargs = {'atom_indices': atom_indices}
if extension not in _TOPOLOGY_EXTS:
kwargs['top'] = top
if loader.__name__ not in ['load_dtr']:
_assert_files_exist(filename)
else:
_assert_files_or_dirs_exist(filename)
return loader(filename, frame=index, **kwargs)
def load(filename_or_filenames, discard_overlapping_frames=False, **kwargs):
"""Load a trajectory from one or more files on disk.
This function dispatches to one of the specialized trajectory loaders based
on the extension on the filename. Because different trajectory formats save
different information on disk, the specific keyword argument options supported
depend on the specific loaded.
Parameters
----------
filename_or_filenames : {str, list of strings}
Filename or list of filenames containing trajectory files of a single format.
discard_overlapping_frames : bool, default=False
Look for overlapping frames between the last frame of one filename and
the first frame of a subsequent filename and discard them
Other Parameters
----------------
top : {str, Trajectory, Topology}
Most trajectory formats do not contain topology information. Pass in
either the path to a RCSB PDB file, a trajectory, or a topology to
supply this information. This option is not required for the .h5, .lh5,
and .pdb formats, which already contain topology information.
stride : int, default=None
Only read every stride-th frame
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates from the
file. This may be slightly slower than the standard read because it
requires an extra copy, but will save memory.
See Also
--------
load_frame, iterload
Examples
--------
>>> import mdtraj as md
>>> traj = md.load('output.xtc', top='topology.pdb')
>>> print traj
<mdtraj.Trajectory with 500 frames, 423 atoms at 0x110740a90>
>>> traj2 = md.load('output.xtc', stride=2, top='topology.pdb')
>>> print traj2
<mdtraj.Trajectory with 250 frames, 423 atoms at 0x11136e410>
>>> traj3 = md.load_hdf5('output.xtc', atom_indices=[0,1] top='topology.pdb')
>>> print traj3
<mdtraj.Trajectory with 500 frames, 2 atoms at 0x18236e4a0>
Returns
-------
trajectory : md.Trajectory
The resulting trajectory, as an md.Trajectory object.
"""
if "top" in kwargs: # If applicable, pre-loads the topology from PDB for major performance boost.
kwargs["top"] = _parse_topology(kwargs["top"])
# grab the extension of the filename
if isinstance(filename_or_filenames, string_types): # If a single filename
extension = _get_extension(filename_or_filenames)
filename = filename_or_filenames
else: # If multiple filenames, take the first one.
extensions = [_get_extension(f) for f in filename_or_filenames]
if len(set(extensions)) == 0:
raise ValueError('No trajectories specified. '
'filename_or_filenames was an empty list')
elif len(set(extensions)) > 1:
raise TypeError("Each filename must have the same extension. "
"Received: %s" % ', '.join(set(extensions)))
else:
# we know the topology is equal because we sent the same topology
# kwarg in. Therefore, we explictly throw away the topology on all
# but the first trajectory and use check_topology=False on the join.
# Throwing the topology away explictly allows a large number of pdb
# files to be read in without using ridiculous amounts of memory.
trajectories = []
for (i, f) in enumerate(filename_or_filenames):
t = load(f, **kwargs)
if i != 0:
t.topology = None
trajectories.append(t)
return trajectories[0].join(trajectories[1:],
discard_overlapping_frames=discard_overlapping_frames,
check_topology=False)
try:
#loader = _LoaderRegistry[extension][0]
loader = _FormatRegistry.loaders[extension]
except KeyError:
raise IOError('Sorry, no loader for filename=%s (extension=%s) '
'was found. I can only load files '
'with extensions in %s' % (filename, extension, _FormatRegistry.loaders.keys()))
if extension in _TOPOLOGY_EXTS:
# this is a little hack that makes calling load() more predicable. since
# most of the loaders take a kwargs "top" except for load_hdf5, (since
# it saves the topology inside the file), we often end up calling
# load_hdf5 via this function with the top kwarg specified. but then
# there would be a signature binding error. it's easier just to ignore
# it.
if 'top' in kwargs:
warnings.warn('top= kwarg ignored since file contains topology information')
kwargs.pop('top', None)
if loader.__name__ not in ['load_dtr']:
_assert_files_exist(filename_or_filenames)
else:
_assert_files_or_dirs_exist(filename_or_filenames)
value = loader(filename, **kwargs)
return value
def iterload(filename, chunk=100, **kwargs):
"""An iterator over a trajectory from one or more files on disk, in fragments
This may be more memory efficient than loading an entire trajectory at
once
Parameters
----------
filename : str
Path to the trajectory file on disk
chunk : int
Number of frames to load at once from disk per iteration. If 0, load all.
Other Parameters
----------------
top : {str, Trajectory, Topology}
Most trajectory formats do not contain topology information. Pass in
either the path to a RCSB PDB file, a trajectory, or a topology to
supply this information. This option is not required for the .h5, .lh5,
and .pdb formats, which already contain topology information.
stride : int, default=None
Only read every stride-th frame.
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates from the
file. This may be slightly slower than the standard read because it
requires an extra copy, but will save memory.
skip : int, default=0
Skip first n frames.
See Also
--------
load, load_frame
Examples
--------
>>> import mdtraj as md
>>> for chunk in md.iterload('output.xtc', top='topology.pdb')
... print chunk
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
<mdtraj.Trajectory with 100 frames, 423 atoms at 0x110740a90>
"""
stride = kwargs.pop('stride', 1)
atom_indices = cast_indices(kwargs.pop('atom_indices', None))
top = kwargs.pop('top', None)
skip = kwargs.pop('skip', 0)
extension = _get_extension(filename)
if extension not in _TOPOLOGY_EXTS:
topology = _parse_topology(top)
if chunk % stride != 0:
raise ValueError('Stride must be a divisor of chunk. stride=%d does not go '
'evenly into chunk=%d' % (stride, chunk))
if chunk == 0:
# If chunk was 0 then we want to avoid filetype-specific code
# in case of undefined behavior in various file parsers.
# TODO: this will first apply stride, then skip!
if extension not in _TOPOLOGY_EXTS:
kwargs['top'] = top
yield load(filename, **kwargs)[skip:]
elif extension in ('.pdb', '.pdb.gz'):
# the PDBTrajectortFile class doesn't follow the standard API. Fixing it
# to support iterload could be worthwhile, but requires a deep refactor.
t = load(filename, stride=stride, atom_indices=atom_indices)
for i in range(0, len(t), chunk):
yield t[i:i+chunk]
else:
with (lambda x: open(x, n_atoms=topology.n_atoms)
if extension in ('.crd', '.mdcrd')
else open(filename))(filename) as f:
if skip > 0:
f.seek(skip)
while True:
if extension not in _TOPOLOGY_EXTS:
traj = f.read_as_traj(topology, n_frames=chunk*stride, stride=stride, atom_indices=atom_indices, **kwargs)
else:
traj = f.read_as_traj(n_frames=chunk*stride, stride=stride, atom_indices=atom_indices, **kwargs)
if len(traj) == 0:
raise StopIteration()
yield traj
class Trajectory(object):
"""Container object for a molecular dynamics trajectory
A Trajectory represents a collection of one or more molecular structures,
generally (but not necessarily) from a molecular dynamics trajectory. The
Trajectory stores a number of fields describing the system through time,
including the cartesian coordinates of each atoms (``xyz``), the topology
of the molecular system (``topology``), and information about the
unitcell if appropriate (``unitcell_vectors``, ``unitcell_length``,
``unitcell_angles``).
A Trajectory should generally be constructed by loading a file from disk.
Trajectories can be loaded from (and saved to) the PDB, XTC, TRR, DCD,
binpos, NetCDF or MDTraj HDF5 formats.
Trajectory supports fancy indexing, so you can extract one or more frames
from a Trajectory as a separate trajectory. For example, to form a
trajectory with every other frame, you can slice with ``traj[::2]``.
Trajectory uses the nanometer, degree & picosecond unit system.
Examples
--------
>>> # loading a trajectory
>>> import mdtraj as md
>>> md.load('trajectory.xtc', top='native.pdb')
<mdtraj.Trajectory with 1000 frames, 22 atoms at 0x1058a73d0>
>>> # slicing a trajectory
>>> t = md.load('trajectory.h5')
>>> print(t)
<mdtraj.Trajectory with 100 frames, 22 atoms>
>>> print(t[::2])
<mdtraj.Trajectory with 50 frames, 22 atoms>
>>> # calculating the average distance between two atoms
>>> import mdtraj as md
>>> import numpy as np
>>> t = md.load('trajectory.h5')
>>> np.mean(np.sqrt(np.sum((t.xyz[:, 0, :] - t.xyz[:, 21, :])**2, axis=1)))
See Also
--------
mdtraj.load : High-level function that loads files and returns an ``md.Trajectory``
Attributes
----------
n_frames : int
n_atoms : int
n_residues : int
time : np.ndarray, shape=(n_frames,)
timestep : float
topology : md.Topology
top : md.Topology
xyz : np.ndarray, shape=(n_frames, n_atoms, 3)
unitcell_vectors : {np.ndarray, shape=(n_frames, 3, 3), None}
unitcell_lengths : {np.ndarray, shape=(n_frames, 3), None}
unitcell_angles : {np.ndarray, shape=(n_frames, 3), None}
"""
# this is NOT configurable. if it's set to something else, things will break
# (thus why I make it private)
_distance_unit = 'nanometers'
@property
def topology(self):
"""Topology of the system, describing the organization of atoms into residues, bonds, etc
Returns
-------
topology : md.Topology
The topology object, describing the organization of atoms into
residues, bonds, etc
"""
return self._topology
@topology.setter
def topology(self, value):
"Set the topology of the system, describing the organization of atoms into residues, bonds, etc"
# todo: more typechecking
self._topology = value
@property
def n_frames(self):
"""Number of frames in the trajectory
Returns
-------
n_frames : int
The number of frames in the trajectory
"""
return self._xyz.shape[0]
@property
def n_atoms(self):
"""Number of atoms in the trajectory
Returns
-------
n_atoms : int
The number of atoms in the trajectory
"""
return self._xyz.shape[1]
@property
def n_residues(self):
"""Number of residues (amino acids) in the trajectory
Returns
-------
n_residues : int
The number of residues in the trajectory's topology
"""
if self.top is None:
return 0
return sum([1 for r in self.top.residues])
@property
def n_chains(self):
"""Number of chains in the trajectory
Returns
-------
n_chains : int
The number of chains in the trajectory's topology
"""
if self.top is None:
return 0
return sum([1 for c in self.top.chains])
@property
def top(self):
"""Alias for self.topology, describing the organization of atoms into residues, bonds, etc
Returns
-------
topology : md.Topology
The topology object, describing the organization of atoms into
residues, bonds, etc
"""
return self._topology
@top.setter
def top(self, value):
"Set the topology of the system, describing the organization of atoms into residues, bonds, etc"
# todo: more typechecking
self._topology = value
@property
def timestep(self):
"""Timestep between frames, in picoseconds
Returns
-------
timestep : float
The timestep between frames, in picoseconds.
"""
if self.n_frames <= 1:
raise(ValueError("Cannot calculate timestep if trajectory has one frame."))
return self._time[1] - self._time[0]
@property
def time(self):
"""The simulation time corresponding to each frame, in picoseconds
Returns
-------
time : np.ndarray, shape=(n_frames,)
The simulation time corresponding to each frame, in picoseconds
"""
return self._time
@time.setter
def time(self, value):
"Set the simulation time corresponding to each frame, in picoseconds"
if isinstance(value, list):
value = np.array(value)
if np.isscalar(value) and self.n_frames == 1:
value = np.array([value])
elif not value.shape == (self.n_frames,):
raise ValueError('Wrong shape. Got %s, should be %s' % (value.shape,
(self.n_frames)))
self._time = value
@property
def unitcell_vectors(self):
"""The vectors that define the shape of the unit cell in each frame
Returns
-------
vectors : np.ndarray, shape(n_frames, 3, 3)
Vectors defining the shape of the unit cell in each frame.
The semantics of this array are that the shape of the unit cell
in frame ``i`` are given by the three vectors, ``value[i, 0, :]``,
``value[i, 1, :]``, and ``value[i, 2, :]``.
"""
if self._unitcell_lengths is None or self._unitcell_angles is None:
return None
v1, v2, v3 = lengths_and_angles_to_box_vectors(
self._unitcell_lengths[:, 0], # a
self._unitcell_lengths[:, 1], # b
self._unitcell_lengths[:, 2], # c
self._unitcell_angles[:, 0], # alpha
self._unitcell_angles[:, 1], # beta
self._unitcell_angles[:, 2], # gamma
)
return np.swapaxes(np.dstack((v1, v2, v3)), 1, 2)
@unitcell_vectors.setter
def unitcell_vectors(self, vectors):
"""Set the three vectors that define the shape of the unit cell
Parameters
----------
vectors : tuple of three arrays, each of shape=(n_frames, 3)
The semantics of this array are that the shape of the unit cell
in frame ``i`` are given by the three vectors, ``value[i, 0, :]``,
``value[i, 1, :]``, and ``value[i, 2, :]``.
"""
if vectors is None or np.all(np.abs(vectors) < 1e-15):
self._unitcell_lengths = None
self._unitcell_angles = None
return
if not len(vectors) == len(self):
raise TypeError('unitcell_vectors must be the same length as '
'the trajectory. you provided %s' % str(vectors))
v1 = vectors[:, 0, :]
v2 = vectors[:, 1, :]
v3 = vectors[:, 2, :]
a, b, c, alpha, beta, gamma = box_vectors_to_lengths_and_angles(v1, v2, v3)
self._unitcell_lengths = np.vstack((a, b, c)).T
self._unitcell_angles = np.vstack((alpha, beta, gamma)).T
@property
def unitcell_volumes(self):
"""Volumes of unit cell for each frame.
Returns
-------
volumes : {np.ndarray, shape=(n_frames), None}
Volumes of the unit cell in each frame, in nanometers^3, or None
if the Trajectory contains no unitcell information.
"""
if self.unitcell_lengths is not None:
return np.array(list(map(np.linalg.det, self.unitcell_vectors)))
else:
return None
@property
def unitcell_lengths(self):
"""Lengths that define the shape of the unit cell in each frame.
Returns
-------
lengths : {np.ndarray, shape=(n_frames, 3), None}
Lengths of the unit cell in each frame, in nanometers, or None
if the Trajectory contains no unitcell information.
"""
return self._unitcell_lengths
@property
def unitcell_angles(self):
"""Angles that define the shape of the unit cell in each frame.
Returns
-------
lengths : np.ndarray, shape=(n_frames, 3)
The angles between the three unitcell vectors in each frame,
``alpha``, ``beta``, and ``gamma``. ``alpha' gives the angle
between vectors ``b`` and ``c``, ``beta`` gives the angle between
vectors ``c`` and ``a``, and ``gamma`` gives the angle between
vectors ``a`` and ``b``. The angles are in degrees.
"""
return self._unitcell_angles
@unitcell_lengths.setter
def unitcell_lengths(self, value):
"""Set the lengths that define the shape of the unit cell in each frame
Parameters
----------
value : np.ndarray, shape=(n_frames, 3)
The distances ``a``, ``b``, and ``c`` that define the shape of the
unit cell in each frame, or None
"""
self._unitcell_lengths = ensure_type(value, np.float32, 2,
'unitcell_lengths', can_be_none=True, shape=(len(self), 3),
warn_on_cast=False, add_newaxis_on_deficient_ndim=True)
@unitcell_angles.setter
def unitcell_angles(self, value):
"""Set the lengths that define the shape of the unit cell in each frame
Parameters
----------
value : np.ndarray, shape=(n_frames, 3)
The angles ``alpha``, ``beta`` and ``gamma`` that define the
shape of the unit cell in each frame. The angles should be in
degrees.
"""
self._unitcell_angles = ensure_type(value, np.float32, 2,
'unitcell_angles', can_be_none=True, shape=(len(self), 3),
warn_on_cast=False, add_newaxis_on_deficient_ndim=True)
@property
def xyz(self):
"""Cartesian coordinates of each atom in each simulation frame
Returns
-------
xyz : np.ndarray, shape=(n_frames, n_atoms, 3)
A three dimensional numpy array, with the cartesian coordinates
of each atoms in each frame.
"""
return self._xyz
@xyz.setter
def xyz(self, value):
"Set the cartesian coordinates of each atom in each simulation frame"
if self.top is not None:
# if we have a topology and its not None
shape = (None, self.topology._numAtoms, 3)
else:
shape = (None, None, 3)
value = ensure_type(value, np.float32, 3, 'xyz', shape=shape,
warn_on_cast=False, add_newaxis_on_deficient_ndim=True)
self._xyz = value
self._rmsd_traces = None
def _string_summary_basic(self):
"""Basic summary of traj in string form."""
unitcell_str = 'and unitcells' if self._have_unitcell else 'without unitcells'
value = "mdtraj.Trajectory with %d frames, %d atoms, %d residues, %s" % (
self.n_frames, self.n_atoms, self.n_residues, unitcell_str)
return value
def __len__(self):
return self.n_frames
def __add__(self, other):
"Concatenate two trajectories"
return self.join(other)
def __str__(self):
return "<%s>" % (self._string_summary_basic())
def __repr__(self):
return "<%s at 0x%02x>" % (self._string_summary_basic(), id(self))
# def describe(self):
# """Diagnostic summary statistics on the trajectory"""
# # What information do we want to display?
# # Goals: easy to figure out if a trajectory is blowing up or contains
# # bad data, easy to diagonose other problems. Generally give a
# # high-level description of the data in the trajectory.
# # Possibly show std. dev. of differnt coordinates in the trajectory
# # or maybe its RMSD drift or something?
# # Also, check for any NaNs or Infs in the data. Or other common issues
# # like that?
# # Note that pandas.DataFrame has a describe() method, which gives
# # min/max/mean/std.dev./percentiles of each column in a DataFrame.
# raise NotImplementedError()
def superpose(self, reference, frame=0, atom_indices=None, parallel=True):
"""Superpose each conformation in this trajectory upon a reference
Parameters
----------
reference : md.Trajectory
Align self to a particular frame in `reference`
frame : int
The index of the conformation in `reference` to align to.
atom_indices : array_like, or None
The indices of the atoms to superpose. If not
supplied, all atoms will be used.
parallel : bool
Use OpenMP to run the superposition in parallel over multiple cores
Returns
-------
self
"""
if atom_indices is None:
atom_indices = slice(None)
n_frames = self.xyz.shape[0]
self_align_xyz = np.asarray(self.xyz[:, atom_indices, :], order='c')
self_displace_xyz = np.asarray(self.xyz, order='c')
ref_align_xyz = np.array(reference.xyz[frame, atom_indices, :], copy=True, order='c').reshape(1, -1, 3)
offset = np.mean(self_align_xyz, axis=1, dtype=np.float64).reshape(n_frames, 1, 3)
self_align_xyz -= offset
if self_align_xyz.ctypes.data != self_displace_xyz.ctypes.data:
# when atom_indices is None, these two arrays alias the same memory
# so we only need to do the centering once
self_displace_xyz -= offset
ref_offset = ref_align_xyz[0].astype('float64').mean(0)
ref_align_xyz[0] -= ref_offset
self_g = np.einsum('ijk,ijk->i', self_align_xyz, self_align_xyz)
ref_g = np.einsum('ijk,ijk->i', ref_align_xyz , ref_align_xyz)
_rmsd.superpose_atom_major(
ref_align_xyz, self_align_xyz, ref_g, self_g, self_displace_xyz,
0, parallel=parallel)
self_displace_xyz += ref_offset
self.xyz = self_displace_xyz
return self
def join(self, other, check_topology=True, discard_overlapping_frames=False):
"""Join two trajectories together along the time/frame axis.
This method joins trajectories along the time axis, giving a new trajectory
of length equal to the sum of the lengths of `self` and `other`.
It can also be called by using `self + other`
Parameters
----------
other : Trajectory or list of Trajectory
One or more trajectories to join with this one. These trajectories
are *appended* to the end of this trajectory.
check_topology : bool
Ensure that the topology of `self` and `other` are identical before
joining them. If false, the resulting trajectory will have the
topology of `self`.
discard_overlapping_frames : bool, optional
If True, compare coordinates at trajectory edges to discard overlapping
frames. Default: False.
See Also
--------
stack : join two trajectories along the atom axis
"""
if isinstance(other, Trajectory):
other = [other]
if isinstance(other, list):
if not all(isinstance(o, Trajectory) for o in other):
raise TypeError('You can only join Trajectory instances')
if not all(self.n_atoms == o.n_atoms for o in other):
raise ValueError('Number of atoms in self (%d) is not equal '
'to number of atoms in other' % (self.n_atoms))
if check_topology and not all(self.topology == o.topology for o in other):
raise ValueError('The topologies of the Trajectories are not the same')
if not all(self._have_unitcell == o._have_unitcell for o in other):
raise ValueError('Mixing trajectories with and without unitcell')
else:
raise TypeError('`other` must be a list of Trajectory. You supplied %d' % type(other))
# list containing all of the trajs to merge, including self
trajectories = [self] + other
if discard_overlapping_frames:
for i in range(len(trajectories)-1):
# last frame of trajectory i
x0 = trajectories[i].xyz[-1]
# first frame of trajectory i+1
x1 = trajectories[i + 1].xyz[0]
# check that all atoms are within 2e-3 nm
# (this is kind of arbitrary)
if np.all(np.abs(x1 - x0) < 2e-3):
trajectories[i] = trajectories[i][:-1]
xyz = np.concatenate([t.xyz for t in trajectories])
time = np.concatenate([t.time for t in trajectories])
angles = lengths = None
if self._have_unitcell:
angles = np.concatenate([t.unitcell_angles for t in trajectories])
lengths = np.concatenate([t.unitcell_lengths for t in trajectories])
# use this syntax so that if you subclass Trajectory,
# the subclass's join() will return an instance of the subclass
return self.__class__(xyz, deepcopy(self._topology), time=time,
unitcell_lengths=lengths, unitcell_angles=angles)
def stack(self, other):
"""Stack two trajectories along the atom axis
This method joins trajectories along the atom axis, giving a new trajectory
with a number of atoms equal to the sum of the number of atoms in
`self` and `other`.
Notes
-----
The resulting trajectory will have the unitcell and time information
the left operand.
Examples
--------
>>> t1 = md.load('traj1.h5')
>>> t2 = md.load('traj2.h5')
>>> # even when t2 contains no unitcell information
>>> t2.unitcell_vectors = None
>>> stacked = t1.stack(t2)
>>> # the stacked trajectory inherits the unitcell information
>>> # from the first trajectory
>>> np.all(stacked.unitcell_vectors == t1.unitcell_vectors)
True
Parameters
----------
other : Trajectory
The other trajectory to join
See Also
--------
join : join two trajectories along the time/frame axis.
"""
if not isinstance(other, Trajectory):
raise TypeError('You can only stack two Trajectory instances')
if self.n_frames != other.n_frames:
raise ValueError('Number of frames in self (%d) is not equal '
'to number of frames in other (%d)' % (self.n_frames, other.n_frames))
if self.topology is not None:
topology = self.topology.join(other.topology)
else:
topology = None
xyz = np.hstack((self.xyz, other.xyz))
return self.__class__(xyz=xyz, topology=topology, unitcell_angles=self.unitcell_angles,
unitcell_lengths=self.unitcell_lengths, time=self.time)
def __getitem__(self, key):
"Get a slice of this trajectory"
return self.slice(key)
def slice(self, key, copy=True):
"""Slice trajectory, by extracting one or more frames into a separate object
This method can also be called using index bracket notation, i.e
`traj[1] == traj.slice(1)`
Parameters
----------
key : {int, np.ndarray, slice}
The slice to take. Can be either an int, a list of ints, or a slice
object.
copy : bool, default=True
Copy the arrays after slicing. If you set this to false, then if
you modify a slice, you'll modify the original array since they
point to the same data.
"""
xyz = self.xyz[key]
time = self.time[key]
unitcell_lengths, unitcell_angles = None, None
if self.unitcell_angles is not None:
unitcell_angles = self.unitcell_angles[key]
if self.unitcell_lengths is not None:
unitcell_lengths = self.unitcell_lengths[key]
if copy:
xyz = xyz.copy()
time = time.copy()
topology = deepcopy(self._topology)
if self.unitcell_angles is not None:
unitcell_angles = unitcell_angles.copy()
if self.unitcell_lengths is not None:
unitcell_lengths = unitcell_lengths.copy()
newtraj = self.__class__(
xyz, topology, time, unitcell_lengths=unitcell_lengths,
unitcell_angles=unitcell_angles)
if self._rmsd_traces is not None:
newtraj._rmsd_traces = np.array(self._rmsd_traces[key],
ndmin=1, copy=True)
return newtraj
def __init__(self, xyz, topology, time=None, unitcell_lengths=None, unitcell_angles=None):
# install the topology into the object first, so that when setting
# the xyz, we can check that it lines up (e.g. n_atoms), with the topology
self.topology = topology
self.xyz = xyz
# _rmsd_traces are the inner product of each centered conformation,
# which are required for computing RMSD. Normally these values are
# calculated on the fly in the cython code (rmsd/_rmsd.pyx), but
# optionally, we enable the use precomputed values which can speed
# up the calculation (useful for clustering), but potentially be unsafe
# if self._xyz is modified without a corresponding change to
# self._rmsd_traces. This array is populated computed by
# center_conformations, and no other methods should really touch it.
self._rmsd_traces = None
# box has no default, it'll just be none normally
self.unitcell_lengths = unitcell_lengths
self.unitcell_angles = unitcell_angles
# time will take the default 1..N
self._time_default_to_arange = (time is None)
if time is None:
time = np.arange(len(self.xyz))
self.time = time
if (topology is not None) and (topology._numAtoms != self.n_atoms):
raise ValueError("Number of atoms in xyz (%s) and "
"in topology (%s) don't match" % (self.n_atoms, topology._numAtoms))
def openmm_positions(self, frame):
"""OpenMM-compatable positions of a single frame.
Examples
--------
>>> t = md.load('trajectory.h5')
>>> context.setPositions(t.openmm_positions(0))
Parameters
----------
frame : int
The index of frame of the trajectory that you wish to extract
Returns
-------
positions : list
The cartesian coordinates of specific trajectory frame, formatted
for input to OpenMM
"""
from simtk.openmm import Vec3
from simtk.unit import nanometer
Pos = []
for xyzi in self.xyz[frame]:
Pos.append(Vec3(xyzi[0], xyzi[1], xyzi[2]))
return Pos * nanometer
def openmm_boxes(self, frame):
"""OpenMM-compatable box vectors of a single frame.
Examples
--------
>>> t = md.load('trajectory.h5')
>>> context.setPeriodicBoxVectors(t.openmm_positions(0))
Parameters
----------
frame : int
Return box for this single frame.
Returns
-------
box : tuple
The periodic box vectors for this frame, formatted for input to
OpenMM.
"""
from simtk.openmm import Vec3
from simtk.unit import nanometer
vectors = self.unitcell_vectors[frame]
if vectors is None:
raise ValueError("this trajectory does not contain box size information")
v1, v2, v3 = vectors
return (Vec3(*v1), Vec3(*v2), Vec3(*v3)) * nanometer
@staticmethod
# im not really sure if the load function should be just a function or a method on the class
# so effectively, lets make it both?
def load(filenames, **kwargs):
"""Load a trajectory from disk
Parameters
----------
filenames : {str, [str]}
Either a string or list of strings
Other Parameters
----------------
As requested by the various load functions -- it depends on the extension
"""
return load(filenames, **kwargs)
def _savers(self):
"""Return a dictionary mapping extensions to the appropriate format-specific save function"""
return {'.xtc': self.save_xtc,
'.trr': self.save_trr,
'.pdb': self.save_pdb,
'.pdb.gz': self.save_pdb,
'.dcd': self.save_dcd,
'.h5': self.save_hdf5,
'.binpos': self.save_binpos,
'.nc': self.save_netcdf,
'.netcdf': self.save_netcdf,
'.ncrst' : self.save_netcdfrst,
'.crd': self.save_mdcrd,
'.mdcrd': self.save_mdcrd,
'.ncdf': self.save_netcdf,
'.lh5': self.save_lh5,
'.lammpstrj': self.save_lammpstrj,
'.xyz': self.save_xyz,
'.xyz.gz': self.save_xyz,
'.gro': self.save_gro,
'.rst7' : self.save_amberrst7,
}
def save(self, filename, **kwargs):
"""Save trajectory to disk, in a format determined by the filename extension
Parameters
----------
filename : str
filesystem path in which to save the trajectory. The extension will
be parsed and will control the format.
Other Parameters
----------------
lossy : bool
For .h5 or .lh5, whether or not to use compression.
no_models: bool
For .pdb. TODO: Document this?
force_overwrite : bool
For .binpos, .xtc, .dcd. If `filename` already exists, overwrite it.
"""
# grab the extension of the filename
extension = _get_extension(filename)
savers = self._savers()
try:
saver = savers[extension]
except KeyError:
raise IOError('Sorry, no saver for filename=%s (extension=%s) '
'was found. I can only save files '
'with extensions in %s' % (filename, extension, savers.keys()))
# run the saver, and return whatever output it gives
return saver(filename, **kwargs)
def save_hdf5(self, filename, force_overwrite=True):
"""Save trajectory to MDTraj HDF5 format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with HDF5TrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(coordinates=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
time=self.time,
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit),
cell_angles=self.unitcell_angles)
f.topology = self.topology
def save_lammpstrj(self, filename, force_overwrite=True):
"""Save trajectory to LAMMPS custom dump format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with LAMMPSTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit),
cell_angles=self.unitcell_angles)
def save_xyz(self, filename, force_overwrite=True):
"""Save trajectory to .xyz format.
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with XYZTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
types=[a.name for a in self.top.atoms])
def save_pdb(self, filename, force_overwrite=True, bfactors=None):
"""Save trajectory to RCSB PDB format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
bfactors : array_like, default=None, shape=(n_frames, n_atoms) or (n_atoms,)
Save bfactors with pdb file. If the array is two dimensional it should
contain a bfactor for each atom in each frame of the trajectory.
Otherwise, the same bfactor will be saved in each frame.
"""
self._check_valid_unitcell()
if not bfactors is None:
if len(np.array(bfactors).shape) == 1:
if len(bfactors) != self.n_atoms:
raise ValueError("bfactors %s should be shaped as (n_frames, n_atoms) or (n_atoms,)" % str(np.array(bfactors).shape))
bfactors = [bfactors] * self.n_frames
else:
if np.array(bfactors).shape != (self.n_frames, self.n_atoms):
raise ValueError("bfactors %s should be shaped as (n_frames, n_atoms) or (n_atoms,)" % str(np.array(bfactors).shape))
else:
bfactors = [None] * self.n_frames
with PDBTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
for i in xrange(self.n_frames):
if self._have_unitcell:
f.write(in_units_of(self._xyz[i], Trajectory._distance_unit, f.distance_unit),
self.topology,
modelIndex=i,
bfactors=bfactors[i],
unitcell_lengths=in_units_of(self.unitcell_lengths[i], Trajectory._distance_unit, f.distance_unit),
unitcell_angles=self.unitcell_angles[i])
else:
f.write(in_units_of(self._xyz[i], Trajectory._distance_unit, f.distance_unit),
self.topology,
modelIndex=i,
bfactors=bfactors[i])
def save_xtc(self, filename, force_overwrite=True):
"""Save trajectory to Gromacs XTC format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with XTCTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
time=self.time,
box=in_units_of(self.unitcell_vectors, Trajectory._distance_unit, f.distance_unit))
def save_trr(self, filename, force_overwrite=True):
"""Save trajectory to Gromacs TRR format
Notes
-----
Only the xyz coordinates and the time are saved, the velocities
and forces in the trr will be zeros
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with TRRTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
time=self.time,
box=in_units_of(self.unitcell_vectors, Trajectory._distance_unit, f.distance_unit))
def save_dcd(self, filename, force_overwrite=True):
"""Save trajectory to CHARMM/NAMD DCD format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filenames, if its already there
"""
self._check_valid_unitcell()
with DCDTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit),
cell_angles=self.unitcell_angles)
def save_dtr(self, filename, force_overwrite=True):
"""Save trajectory to DESMOND DTR format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filenames, if its already there
"""
self._check_valid_unitcell()
with DTRTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(xyz=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit),
cell_angles=self.unitcell_angles,
times=self.time)
def save_binpos(self, filename, force_overwrite=True):
"""Save trajectory to AMBER BINPOS format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
with BINPOSTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit))
def save_mdcrd(self, filename, force_overwrite=True):
"""Save trajectory to AMBER mdcrd format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if its already there
"""
self._check_valid_unitcell()
if self._have_unitcell:
if not np.all(self.unitcell_angles == 90):
raise ValueError('Only rectilinear boxes can be saved to mdcrd files')
with MDCRDTrajectoryFile(filename, mode='w', force_overwrite=force_overwrite) as f:
f.write(xyz=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit),
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit))
def save_netcdf(self, filename, force_overwrite=True):
"""Save trajectory in AMBER NetCDF format
Parameters
----------
filename : str
filesystem path in which to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if it's already there
"""
self._check_valid_unitcell()
with NetCDFTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(coordinates=in_units_of(self._xyz, Trajectory._distance_unit, NetCDFTrajectoryFile.distance_unit),
time=self.time,
cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit),
cell_angles=self.unitcell_angles)
def save_netcdfrst(self, filename, force_overwrite=True):
"""Save trajectory in AMBER NetCDF restart format
Parameters
----------
filename : str
filesystem path in which to save the restart
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if it's already there
Notes
-----
NetCDF restart files can only store a single frame. If only one frame
exists, "filename" will be written. Otherwise, "filename.#" will be
written, where # is a zero-padded number from 1 to the total number of
frames in the trajectory
"""
self._check_valid_unitcell()
if self.n_frames == 1:
with AmberNetCDFRestartFile(filename, 'w', force_overwrite=force_overwrite) as f:
coordinates = in_units_of(self._xyz, Trajectory._distance_unit,
AmberNetCDFRestartFile.distance_unit)
lengths = in_units_of(self.unitcell_lengths, Trajectory._distance_unit,
AmberNetCDFRestartFile.distance_unit)
f.write(coordinates=coordinates, time=self.time[0],
cell_lengths=lengths, cell_angles=self.unitcell_angles)
else:
fmt = '%s.%%0%dd' % (filename, len(str(self.n_frames)))
for i in xrange(self.n_frames):
with AmberNetCDFRestartFile(fmt % (i+1), 'w', force_overwrite=force_overwrite) as f:
coordinates = in_units_of(self._xyz, Trajectory._distance_unit,
AmberNetCDFRestartFile.distance_unit)
lengths = in_units_of(self.unitcell_lengths, Trajectory._distance_unit,
AmberNetCDFRestartFile.distance_unit)
f.write(coordinates=coordinates[i], time=self.time[i],
cell_lengths=lengths[i], cell_angles=self.unitcell_angles[i])
def save_amberrst7(self, filename, force_overwrite=True):
"""Save trajectory in AMBER ASCII restart format
Parameters
----------
filename : str
filesystem path in which to save the restart
force_overwrite : bool, default=True
Overwrite anything that exists at filename, if it's already there
Notes
-----
Amber restart files can only store a single frame. If only one frame
exists, "filename" will be written. Otherwise, "filename.#" will be
written, where # is a zero-padded number from 1 to the total number of
frames in the trajectory
"""
self._check_valid_unitcell()
if self.n_frames == 1:
with AmberRestartFile(filename, 'w', force_overwrite=force_overwrite) as f:
coordinates = in_units_of(self._xyz, Trajectory._distance_unit,
AmberRestartFile.distance_unit)
lengths = in_units_of(self.unitcell_lengths, Trajectory._distance_unit,
AmberRestartFile.distance_unit)
f.write(coordinates=coordinates, time=self.time[0],
cell_lengths=lengths, cell_angles=self.unitcell_angles)
else:
fmt = '%s.%%0%dd' % (filename, len(str(self.n_frames)))
for i in xrange(self.n_frames):
with AmberRestartFile(fmt % (i+1), 'w', force_overwrite=force_overwrite) as f:
coordinates = in_units_of(self._xyz, Trajectory._distance_unit,
AmberRestartFile.distance_unit)
lengths = in_units_of(self.unitcell_lengths, Trajectory._distance_unit,
AmberRestartFile.distance_unit)
f.write(coordinates=coordinates[i], time=self.time[0],
cell_lengths=lengths[i], cell_angles=self.unitcell_angles[i])
def save_lh5(self, filename):
"""Save trajectory in deprecated MSMBuilder2 LH5 (lossy HDF5) format.
Parameters
----------
filename : str
filesystem path in which to save the trajectory
"""
with LH5TrajectoryFile(filename, 'w', force_overwrite=True) as f:
f.write(coordinates=self.xyz)
f.topology = self.topology
def save_gro(self, filename, force_overwrite=True, precision=3):
"""Save trajectory in Gromacs .gro format
Parameters
----------
filename : str
Path to save the trajectory
force_overwrite : bool, default=True
Overwrite anything that exists at that filename if it exists
precision : int, default=3
The number of decimal places to use for coordinates in GRO file
"""
self._check_valid_unitcell()
with GroTrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f:
f.write(self.xyz, self.topology, self.time, self.unitcell_vectors,
precision=precision)
def center_coordinates(self, mass_weighted=False):
"""Center each trajectory frame at the origin (0,0,0).
This method acts inplace on the trajectory. The centering can
be either uniformly weighted (mass_weighted=False) or weighted by
the mass of each atom (mass_weighted=True).
Parameters
----------
mass_weighted : bool, optional (default = False)
If True, weight atoms by mass when removing COM.
Returns
-------
self
"""
if mass_weighted and self.top is not None:
self.xyz -= distance.compute_center_of_mass(self)[:, np.newaxis, :]
else:
self._rmsd_traces = _rmsd._center_inplace_atom_major(self._xyz)
return self
@deprecated('restrict_atoms was replaced by atom_slice and will be removed in 2.0')
def restrict_atoms(self, atom_indices, inplace=True):
"""Retain only a subset of the atoms in a trajectory
Deletes atoms not in `atom_indices`, and re-indexes those that remain
Parameters
----------
atom_indices : array-like, dtype=int, shape=(n_atoms)
List of atom indices to keep.
inplace : bool, default=True
If ``True``, the operation is done inplace, modifying ``self``.
Otherwise, a copy is returned with the restricted atoms, and
``self`` is not modified.
Returns
-------
traj : md.Trajectory
The return value is either ``self``, or the new trajectory,
depending on the value of ``inplace``.
"""
return self.atom_slice(atom_indices, inplace=inplace)
def atom_slice(self, atom_indices, inplace=False):
"""Create a new trajectory from a subset of atoms
Parameters
----------
atom_indices : array-like, dtype=int, shape=(n_atoms)
List of indices of atoms to retain in the new trajectory.
inplace : bool, default=False
If ``True``, the operation is done inplace, modifying ``self``.
Otherwise, a copy is returned with the sliced atoms, and
``self`` is not modified.
Returns
-------
traj : md.Trajectory
The return value is either ``self``, or the new trajectory,
depending on the value of ``inplace``.
See Also
--------
stack : stack multiple trajectories along the atom axis
"""
xyz = np.array(self.xyz[:, atom_indices], order='C')
topology = None
if self._topology is not None:
topology = self._topology.subset(atom_indices)
if inplace:
if self._topology is not None:
self._topology = topology
self._xyz = xyz
return self
unitcell_lengths = unitcell_angles = None
if self._have_unitcell:
unitcell_lengths = self._unitcell_lengths.copy()
unitcell_angles = self._unitcell_angles.copy()
time = self._time.copy()
return Trajectory(xyz=xyz, topology=topology, time=time,
unitcell_lengths=unitcell_lengths,
unitcell_angles=unitcell_angles)
def remove_solvent(self, exclude=None, inplace=False):
"""
Create a new trajectory without solvent atoms
Parameters
----------
exclude : array-like, dtype=str, shape=(n_solvent_types)
List of solvent residue names to retain in the new trajectory.
inplace : bool, default=False
The return value is either ``self``, or the new trajectory,
depending on the value of ``inplace``.
Returns
-------
traj : md.Trajectory
The return value is either ``self``, or the new trajectory,
depending on the value of ``inplace``.
"""
solvent_types = list(_SOLVENT_TYPES)
if exclude is not None:
if isinstance(exclude, str):
raise TypeError('exclude must be array-like')
if not isinstance(exclude, Iterable):
raise TypeError('exclude is not iterable')
for type in exclude:
if type not in solvent_types:
raise ValueError(type + 'is not a valid solvent type')
solvent_types.remove(type)
atom_indices = [atom.index for atom in self.topology.atoms if
atom.residue.name not in solvent_types]
return self.atom_slice(atom_indices, inplace = inplace)
def _check_valid_unitcell(self):
"""Do some sanity checking on self.unitcell_lengths and self.unitcell_angles
"""
if self.unitcell_lengths is not None and self.unitcell_angles is None:
raise AttributeError('unitcell length data exists, but no angles')
if self.unitcell_lengths is None and self.unitcell_angles is not None:
raise AttributeError('unitcell angles data exists, but no lengths')
if self.unitcell_lengths is not None and np.any(self.unitcell_lengths < 0):
raise ValueError('unitcell length < 0')
if self.unitcell_angles is not None and np.any(self.unitcell_angles < 0):
raise ValueError('unitcell angle < 0')
@property
def _have_unitcell(self):
return self._unitcell_lengths is not None and self._unitcell_angles is not None
|
hainm/mdtraj
|
mdtraj/core/trajectory.py
|
Python
|
lgpl-2.1
| 66,597
|
[
"Amber",
"CHARMM",
"Desmond",
"Gromacs",
"LAMMPS",
"MDTraj",
"NAMD",
"NetCDF",
"OpenMM"
] |
c335e22901ed42b01822a14f396066f577b15e150446f9d5e54bd39c4d9d798f
|
import os
import os.path as osp
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import sys
if os.getenv("READTHEDOCS") == "True":
# to make versioneer working, we need to unshallow this repo
# because RTD does a checkout with --depth 50
import subprocess as spr
rootdir = osp.dirname(__file__)
spr.call(["git", "-C", rootdir, "fetch", "--unshallow", "origin"])
import versioneer
def readme():
with open('README.rst') as f:
return f.read()
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ''
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
cmdclass = versioneer.get_cmdclass({'test': PyTest})
setup(name='psy-simple',
version=versioneer.get_version(),
description='Psyplot plugin for simple visualization tasks',
long_description=readme(),
long_description_content_type="text/x-rst",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: Scientific/Engineering :: GIS',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Operating System :: OS Independent',
],
keywords='visualization netcdf raster cartopy earth-sciences psyplot',
url='https://github.com/psyplot/psy-simple',
author='Philipp S. Sommer',
author_email='philipp.sommer@hzg.de',
license="GPLv2",
packages=find_packages(exclude=['docs', 'tests*', 'examples']),
install_requires=[
'psyplot>=1.3.0',
'matplotlib>=2.0',
],
package_data={'psy_simple': [
osp.join('psy_simple', 'widgets', 'icons', '*.png'),
osp.join('psy_simple', 'widgets', 'icons', 'cmaps', '*.png'),
]},
project_urls={
'Documentation': 'https://psyplot.readthedocs.io/projects/psy-simple',
'Source': 'https://github.com/psyplot/psy-simple',
'Tracker': 'https://github.com/psyplot/psy-simple/issues',
},
python_requires=">=3.7",
include_package_data=True,
tests_require=['pytest'],
cmdclass=cmdclass,
entry_points={'psyplot': ['plugin=psy_simple.plugin',
'patches=psy_simple.plugin:patches']},
zip_safe=False)
|
Chilipp/psy-simple
|
setup.py
|
Python
|
gpl-2.0
| 2,916
|
[
"NetCDF"
] |
db1db4e8ad675c7af4c8d6a02022bcefe4380d3226f8ec794e23f54b5ac55e25
|
import os
import shutil
import tempfile
import unittest
from collections import Iterable
from PIL import Image
import numpy
from mayavi.sources.vtk_data_source import VTKDataSource
from mayavi.modules.iso_surface import IsoSurface
from mayavi.modules.text3d import Text3D
from mayavi.tests import datasets
from mayavi import mlab
from simphony_mayavi.restore_scene import restore_scene
from simphony_mayavi.tests.testing_utils import is_mayavi_older
def finally_mlab_close(func):
''' Ensure that at the end of calling a function
any mayavi scene opened are closed '''
def new_func(*args, **kwargs):
try:
func(*args, **kwargs)
finally:
mlab.close(all=True)
return new_func
@unittest.skipIf(is_mayavi_older("4.4.4"),
"restore_scene is not supported by Mayavi < 4.4.4")
class TestRestoreScene(unittest.TestCase):
@finally_mlab_close
def setUp(self):
# set up source
sgrid = datasets.generateStructuredGrid()
source = VTKDataSource(data=sgrid)
self.engine = mlab.get_engine()
# set up scene, first scene is empty
# second scene has the settings we want to restore
for _ in range(2):
fig = mlab.figure()
fig.scene.off_screen_rendering = True
# add source
self.engine.add_source(source)
# add more modules
self.engine.add_module(IsoSurface())
self.engine.add_module(Text3D())
self.modules = source.children[0].children
# set camera
self.view = (25., 14., 20., [0., 0., 2.5])
mlab.view(*self.view)
# save the visualisation
self.temp_dir = tempfile.mkdtemp()
self.filename = os.path.join(self.temp_dir, "test_vis.mv2")
self.engine.save_visualization(self.filename)
# save the scene as an image for comparison later
self.ref_saved_filename = os.path.join(self.temp_dir, "ref_saved.png")
mlab.savefig(self.ref_saved_filename)
def tearDown(self):
shutil.rmtree(self.temp_dir)
@finally_mlab_close
def test_restore_scene(self):
# create a new scene with new data source
fig = mlab.figure()
fig.scene.off_screen_rendering = True
sgrid_2 = datasets.generateStructuredGrid()
source = VTKDataSource(data=sgrid_2)
self.engine.add_source(source)
# when
restore_scene(self.filename, scene_index=1)
# then
modules = source.children[0].children
self.check_items_same_types(modules, self.modules)
self.check_items_not_same_object(modules, self.modules)
self.check_camera_view(mlab.view(), self.view)
# save the scene to a file
saved_filename = os.path.join(self.temp_dir, "test_restore.png")
mlab.savefig(saved_filename)
# compare the pixels to the desired one
self.check_images_almost_identical(saved_filename,
self.ref_saved_filename)
@finally_mlab_close
def test_pass_restore_scene_with_extra_sources(self):
# create a new scene
fig = mlab.figure()
fig.scene.off_screen_rendering = True
# add two data sources
for _ in range(2):
sgrid_2 = datasets.generateStructuredGrid()
source = VTKDataSource(data=sgrid_2)
self.engine.add_source(source)
# when
restore_scene(self.filename, scene_index=1)
# then
# only the first source is restored
source = self.engine.current_scene.children[0]
modules = source.children[0].children
self.check_items_same_types(modules, self.modules)
self.check_items_not_same_object(modules, self.modules)
self.check_camera_view(mlab.view(), self.view)
# save the scene to a file
saved_filename = os.path.join(self.temp_dir, "test_extra.png")
mlab.savefig(saved_filename)
# compare the pixels to the desired one
self.check_images_almost_identical(saved_filename,
self.ref_saved_filename)
@finally_mlab_close
def test_pass_restore_scene_with_different_source(self):
# create a new scene
fig = mlab.figure()
fig.scene.off_screen_rendering = True
# add two data sources
sgrid_2 = datasets.generateUnstructuredGrid_mixed()
source = VTKDataSource(data=sgrid_2)
self.engine.add_source(source)
# when
restore_scene(self.filename, scene_index=1)
# then
modules = source.children[0].children
# the data content is different
# but the modules should be there anyway
self.check_items_same_types(modules, self.modules)
self.check_items_not_same_object(modules, self.modules)
self.check_camera_view(mlab.view(), self.view)
@finally_mlab_close
def test_pass_restore_empty_scene(self):
# create a new scene
fig = mlab.figure()
fig.scene.off_screen_rendering = True
sgrid_2 = datasets.generateStructuredGrid()
source = VTKDataSource(data=sgrid_2)
self.engine.add_source(source)
# when
# first scene is empty
restore_scene(self.filename, scene_index=0)
# then
# save the scene to a file
saved_filename = os.path.join(self.temp_dir, "test_extra.png")
mlab.savefig(saved_filename)
# compare the pixels to the desired one
self.check_images_empty(saved_filename)
def check_camera_view(self, actual_view, desired_view):
for this_view, ref_view in zip(actual_view, desired_view):
if isinstance(this_view, Iterable):
self.assertItemsEqual(this_view, ref_view)
else:
self.assertEqual(this_view, ref_view)
def check_items_same_types(self, actual_items, desired_items):
for actual, desired in zip(actual_items, desired_items):
self.assertEqual(type(actual), type(desired))
def check_items_not_same_object(self, actual_items, other_items):
for actual, other in zip(actual_items, other_items):
self.assertNotEqual(actual, other)
def check_images_empty(self, image_file):
'''Check if the image in `image_file` is blank'''
image = numpy.array(Image.open(image_file))
msg = "Image is not empty, min:{}, max:{}"
self.assertAlmostEqual(image.min(), image.max(), places=3,
msg=msg.format(image.min(), image.max()))
def check_images_almost_identical(self, actual_file, desired_file):
''' Check if two images are almost identical (within 5% error)'''
actual = numpy.array(Image.open(actual_file))
desired = numpy.array(Image.open(desired_file))
err = float(numpy.abs(actual-desired).sum())/desired.sum()*100.
message = "Actual image is not close to the desired, error: {}%"
self.assertTrue(err < 5., message.format(err))
|
simphony/simphony-mayavi
|
simphony_mayavi/tests/test_restore_scene.py
|
Python
|
bsd-2-clause
| 7,067
|
[
"Mayavi"
] |
b8c48b5b44c1f3e43209a21a27ff40b4394924f0be309824b824f030d62aea95
|
"""
example script to show the detector parameter determination for area detectors
from images recorded in the primary beam and at known symmetric coplanar Bragg
reflections of a reference crystal
"""
import os
import numpy
import xrayutilities as xu
Si = xu.materials.Si
datadir = 'data'
specfile = "si_align.spec"
en = 15000 # eV
wl = xu.en2lam(en)
imgdir = os.path.join(datadir, "si_align_") # data path for CCD files
filetmp = "si_align_12_%04d.edf.gz"
qconv = xu.QConversion(['z+', 'y-'], ['z+', 'y-'], [1, 0, 0])
hxrd = xu.HXRD(Si.Q(1, 1, -2), Si.Q(1, 1, 1), wl=wl, qconv=qconv)
# manually selected images
s = xu.io.SPECFile(specfile, path=datadir)
imagenrs = []
for num in [61, 62, 63, 20, 21, 26, 27, 28]:
s[num].ReadData()
imagenrs = numpy.append(imagenrs, s[num].data['ccd_n'])
# avoid images which do not have to full beam on the detector as well as
# other which show signal due to cosmic radiation
avoid_images = [37, 57, 62, 63, 65, 87, 99, 106, 110, 111, 126, 130, 175,
181, 183, 185, 204, 206, 207, 208, 211, 212, 233, 237, 261,
275, 290]
images = []
ang1 = [] # outer detector angle
ang2 = [] # inner detector angle
sang = [] # sample rocking angle
hkls = [] # Miller indices of the reference reflections
def hotpixelkill(ccd):
"""
function to remove hot pixels from CCD frames
ADD REMOVE VALUES IF NEEDED!
"""
ccd[304, 97] = 0
ccd[303, 96] = 0
return ccd
# read images and angular positions from the data file
# this might differ for data taken at different beamlines since
# they way how motor positions are stored is not always consistent
for imgnr in numpy.sort(list(set(imagenrs) - set(avoid_images))[::4]):
filename = os.path.join(imgdir, filetmp % imgnr)
edf = xu.io.EDFFile(filename)
ccd = hotpixelkill(edf.data)
images.append(ccd)
ang1.append(float(edf.header['motor_pos'].split()[4]))
ang2.append(float(edf.header['motor_pos'].split()[3]))
sang.append(float(edf.header['motor_pos'].split()[1]))
if imgnr > 1293.:
hkls.append((0, 0, 0))
elif imgnr < 139:
hkls.append((0, 0, numpy.sqrt(27))) # (3,3,3))
else:
hkls.append((0, 0, numpy.sqrt(75))) # (5,5,5))
# call the fit for the detector parameters.
# Detector arm rotations and primary beam direction need to be given
# in total 8 detector parameters + 2 additional parameters for the reference
# crystal orientation and the wavelength are fitted, however the 4 misalignment
# parameters of the detector and the 3 other parameters can be fixed.
# The fixable parameters are detector tilt azimuth, the detector tilt angle,
# the detector rotation around the primary beam, the outer angle offset, sample
# tilt, sample tilt azimuth and the x-ray wavelength
# Additionally if accurately known the detector pixel size can be given and
# fixed and instead the detector distance can be fitted.
param, eps = xu.analysis.area_detector_calib_hkl(
sang, ang1, ang2, images, hkls, hxrd, Si, ['z+', 'y-'], 'x+',
start=(None, None, 1.0, 45, 1.69, -0.55, -1.0, 1.3, 60., wl),
fix=(False, False, True, False, False, False, False, False, False, False),
plot=True)
# Following is an example of the output of the summary of the
# area_detector_calib_hkl function
# total time needed for fit: 624.51sec
# fitted parameters: epsilon: 9.9159e-08 (2,['Parameter convergence'])
# param:
# (cch1,cch2,pwidth1,pwidth2,tiltazimuth,tilt,detrot,outerangle_offset,
# sampletilt,stazimuth,wavelength)
# param: 367.12 349.27 6.8187e-05 6.8405e-05 131.4 2.87 -0.390 -0.061 1.201
# 318.44 0.8254
# please check the resulting data (consider setting plot=True)
# detector rotation axis / primary beam direction (given by user): ['z+', 'y-']
# / x+
# detector pixel directions / distance: z- y+ / 1
# detector initialization with:
# init_area('z-', 'y+', cch1=367.12, cch2=349.27, Nch1=516, Nch2=516,
# pwidth1=6.8187e-05, pwidth2=6.8405e-05, distance=1., detrot=-0.390,
# tiltazimuth=131.4, tilt=2.867)
# AND ALWAYS USE an (additional) OFFSET of -0.0611deg in the OUTER
# DETECTOR ANGLE!
|
dkriegner/xrayutilities
|
doc/source/example_xu_ccd_parameter_hkl.py
|
Python
|
gpl-2.0
| 4,109
|
[
"CRYSTAL"
] |
5c2d3868f4c5db588192848d12d82e79854257817cbfd49d03db4961107d83a3
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, Extension
import codecs
import os
import sys
readme_note = """\
.. note::
For the latest source, discussion, etc, please visit the
`GitHub repository <https://github.com/spotify/annoy>`_\n\n
.. image:: https://img.shields.io/github/stars/spotify/annoy.svg
:target: https://github.com/spotify/annoy
"""
with codecs.open('README.rst', encoding='utf-8') as fobj:
long_description = readme_note + fobj.read()
setup(name='annoy',
version='1.5.1',
description='Approximate Nearest Neighbors in C++/Python optimized for memory usage and loading/saving to disk.',
packages=['annoy'],
ext_modules=[
Extension(
'annoy.annoylib', ['src/annoymodule.cc'],
depends=['src/annoylib.h'],
extra_compile_args=['-O3', '-march=native', '-ffast-math'],
)
],
long_description=long_description,
author='Erik Bernhardsson',
author_email='mail@erikbern.com',
url='https://github.com/spotify/annoy',
license='Apache License 2.0',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='nns, approximate nearest neighbor search',
setup_requires=['nose>=1.0']
)
|
BeifeiZhou/annoy
|
setup.py
|
Python
|
apache-2.0
| 2,123
|
[
"VisIt"
] |
a84af307064c38c5bbab1a93494b071cfac1caefe1f42d03d6bf8fc5df2bef55
|
from __future__ import print_function
import queue
import cancat
import struct
from cancat.J1939db import *
from cancat import *
from cancat.vstruct.bitfield import *
PF_RQST = 0xea
PF_TP_DT = 0xeb
PF_TP_CM = 0xec
PF_ADDRCLAIM = 0xee
PF_PROPRIETRY= 0xef
PF_KWP1 = 0xdb
PF_KWP2 = 0xda
PF_KWP3 = 0xce
PF_KWP4 = 0xcd
CM_RTS = 0x10
CM_CTS = 0x11
CM_EOM = 0x13
CM_ABORT = 0xff
CM_BAM = 0x20
TP_BAM = 20
TP_DIRECT = 10
TP_DIRECT_BROKEN=9
class NAME(VBitField):
def __init__(self):
VBitField.__init__(self)
self.arbaddrcap = v_bits(1)
self.ind_group = v_bits(3)
self.vehicle_system_instance = v_bits(4)
self.vehicle_system = v_bits(7)
self.reserved = v_bits(1)
self.function = v_bits(8)
self.function_instance = v_bits(5)
self.ecu_instance = v_bits(3)
self.mfg_code = v_bits(11)
self.identity_number = v_bits(21)
def minrepr(self):
mfgname = mfg_lookup.get(self.mfg_code)
return "id: 0x%x mfg: %s" % (self.identity_number, mfgname)
def parseName(name):
namebits= NAME()
rname = name[::-1]
namebits.vsParse(rname)
return namebits
def reprExtMsgs(msgs):
out = ['Ext Msg: %.2x->%.2x (%.2x%.2x%.2x) (len: 0x%x)' % (msgs['sa'], msgs['da'], msgs['pgn2'], msgs['pgn1'], msgs['pgn0'], msgs['totsize'])]
for arbtup, msg in msgs.get('msgs'):
out.append(msg[1:].encode('hex'))
data = ''.join(out[1:]).decode('hex')
strings = getAscii(data)
if len(strings):
return ' '.join(out) + " %r" % (strings)
return ' '.join(out)
def meldExtMsgs(msgs):
out = []
length = msgs.get('totsize')
for arbtup, msg in msgs.get('msgs'):
out.append(msg[1:])
outval = ''.join(out)
if outval[length:] == '\xff'*(len(outval)-length):
#print("truncating %r to size %r" % (outval, length))
outval = outval[:length]
#else:
#print("NOT truncating %r to size %r" % (outval, length))
return outval
### renderers for specific PF numbers
def pf_c9(idx, ts, arbtup, data, j1939):
b4 = data[3]
req = "%.2x %.2x %.2x" % ([d for d in data[:3]])
usexferpfn = ('', 'Use_Transfer_PGN', 'undef', 'NA')[b4 & 3]
return "Request2: %s %s" % (req, usexferpgn)
def pf_ea(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, ps, sa) = arbtup
return "Request: %s" % (data[:3].encode('hex'))
def pf_eb(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, da, sa) = arbtup
if len(data) < 1:
return 'TP ERROR: NO DATA!'
tpidx = data[0]
msgdata = 'TP.DT idx: %.x' % tpidx
nextline = ''
extmsgs = j1939.getExtMsgs(sa, da)
extmsgs['msgs'].append((arbtup, data))
if len(extmsgs['msgs']) >= extmsgs['length']:
j1939.clearExtMsgs(sa, da)
nextline = ' %3.3f: %s' % (extmsgs['ts'], reprExtMsgs(extmsgs))
j1939._last_extmsgs = idx, extmsgs
if j1939.skip_TPDT:
if not len(nextline):
return cancat.DONT_PRINT_THIS_MESSAGE
else:
return (cancat.DONT_PRINT_THIS_MESSAGE, nextline)
if len(extmsgs['msgs']) > extmsgs['length']:
#print("ERROR: too many messages in Extended Message between %.2x -> %.2x\n\t%r" % (sa, da, extmsgs['msgs']))
pass
if len(nextline):
return msgdata, nextline+'\n'
return msgdata
def pf_ec(idx, ts, arbtup, data, j1939):
def tp_cm_10(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, totsize, pktct, maxct,
pgn2, pgn1, pgn0) = struct.unpack('<BHBBBBB', data)
# check for old stuff
prefix = ''
extmsgs = j1939.getExtMsgs(sa, da)
if len(extmsgs['msgs']):
extmsgs['sa'] = sa
extmsgs['da'] = da
prefix = " new TP message, without closure...: \n\t%r\n" % reprExtMsgs(extmsgs)
j1939.clearExtMsgs(sa, da)
# store extended message information for other stuff...
extmsgs = j1939.getExtMsgs(sa, da)
extmsgs['sa'] = sa
extmsgs['da'] = da
extmsgs['ts'] = ts
extmsgs['idx'] = idx
extmsgs['pgn2'] = pgn2
extmsgs['pgn1'] = pgn1
extmsgs['pgn0'] = pgn0
extmsgs['maxct'] = maxct
extmsgs['length'] = pktct
extmsgs['totsize'] = totsize
extmsgs['type'] = TP_DIRECT
extmsgs['adminmsgs'].append((arbtup, data))
return prefix + 'TP.CM_RTS size:%.2x pktct:%.2x maxpkt:%.2x PGN: %.2x%.2x%.2x' % \
(totsize, pktct, maxct, pgn2, pgn1, pgn0)
def tp_cm_11(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, maxpkts, nextpkt, reserved,
pgn2, pgn1, pgn0) = struct.unpack('<BBBHBBB', data)
# store extended message information for other stuff...
extmsgs = j1939.getExtMsgs(sa, da)
extmsgs['adminmsgs'].append((arbtup, data))
return 'TP.CM_CTS maxpkt:%.2x nxtpkt:%.2x PGN: %.2x%.2x%.2x' % \
(maxpkts, nextpkt, pgn2, pgn1, pgn0)
def tp_cm_13(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, totsize, pktct, maxct,
pgn2, pgn1, pgn0) = struct.unpack('<BHBBBBB', data)
# not sure what to do with this now that we've cleared buffers by this point...
# for now, just drop it.
#extmsgs = j1939.getExtMsgs(sa, da)
#extmsgs['adminmsgs'].append((arbtup, data))
return 'TP.EndOfMsgACK PGN: %.2x%.2x%.2x\n\t%r' % \
(pgn2, pgn1, pgn0, msgdata)
def tp_cm_20(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, totsize, pktct, reserved,
pgn2, pgn1, pgn0) = struct.unpack('<BHBBBBB', data)
# check for old stuff
prefix=''
extmsgs = j1939.getExtMsgs(sa, da)
if len(extmsgs['msgs']):
extmsgs['sa'] = sa
extmsgs['da'] = da
prefix = " new TP message, without closure...: \n\t%r\n" % reprExtMsgs(extmsgs)
j1939.clearExtMsgs(sa, da)
# store extended message information for other stuff...
extmsgs = j1939.getExtMsgs(sa, da)
extmsgs['sa'] = sa
extmsgs['da'] = da
extmsgs['ts'] = ts
extmsgs['idx'] = idx
extmsgs['pgn2'] = pgn2
extmsgs['pgn1'] = pgn1
extmsgs['pgn0'] = pgn0
extmsgs['maxct'] = reserved
extmsgs['length'] = pktct
extmsgs['totsize'] = totsize
extmsgs['type'] = TP_BAM
extmsgs['adminmsgs'].append((arbtup, data))
return prefix + 'TP.CM_BAM-Broadcast size:%.2x pktct:%.2x PGN: %.2x%.2x%.2x' % \
(totsize, pktct, pgn2, pgn1, pgn0)
tp_cm_handlers = {
CM_RTS: ('RTS', tp_cm_10),
CM_CTS: ('CTS', tp_cm_11),
CM_EOM: ('EndOfMsgACK', None),
CM_BAM: ('BAM-Broadcast', tp_cm_20),
CM_ABORT: ('Abort', None),
}
cb = data[0]
htup = tp_cm_handlers.get(cb)
if htup != None:
subname, cb_handler = htup
if cb_handler == None:
if j1939.skip_TPDT:
return cancat.DONT_PRINT_THIS_MESSAGE
return 'TP.CM_%s' % subname
newmsg = cb_handler(idx, ts, arbtup, data, j1939)
if j1939.skip_TPDT:
return cancat.DONT_PRINT_THIS_MESSAGE
if newmsg == None:
return 'TP.CM_%s' % subname
return newmsg
return 'TP.CM_%.2x' % cb
def pf_ee(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, ps, sa) = arbtup
if ps == 255 and sa == 254:
return 'CANNOT CLAIM ADDRESS'
addrinfo = parseName(data).minrepr()
return "Address Claim: %s" % addrinfo
def pf_ef(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, ps, sa) = arbtup
if dp:
return 'Proprietary A2'
return 'Proprietary A1'
def pf_ff(idx, ts, arbtup, data, j1939):
(prio, edp, dp, pf, ps, sa) = arbtup
pgn = "%.2x :: %.2x:%.2x - %s" % (sa, pf,ps, data.encode('hex'))
return "Proprietary B %s" % pgn
pgn_pfs = {
0x93: ("Name Management", None),
0xc9: ("Request2", pf_c9),
0xca: ('Transfer', None),
0xe8: ("ACK ", None),
0xea: ("Request ", pf_ea),
0xeb: ("TP.DT", pf_eb),
0xec: ("TP.CM", pf_ec),
0xee: ("Address Claim", pf_ee),
0xef: ("Proprietary", pf_ef),
#0xfe: ("Command Address", None),
0xff: ("Proprietary B", pf_ff),
}
def parseArbid(arbid):
(prioPlus,
pf,
ps,
sa) = struct.unpack('BBBB', struct.pack(">I", arbid))
prio = prioPlus >> 2
edp = (prioPlus >> 1) & 1
dp = prioPlus & 1
return prio, edp, dp, pf, ps, sa
def emitArbid(prio, edp, dp, pf, ps, sa):
prioPlus = prio<<2 | (edp<<1) | dp
return struct.unpack(">I", struct.pack('BBBB', prioPlus, pf, ps, sa))[0]
def ec_handler(j1939, idx, ts, arbtup, data):
def tp_cm_10(arbtup, data, j1939, idx, ts):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, totsize, pktct, maxct,
pgn2, pgn1, pgn0) = struct.unpack('<BHBBBBB', data)
# check for old stuff
extmsgs = j1939.getRealExtMsgs(sa, da)
if len(extmsgs['msgs']):
if j1939.verbose: print("clearing out old extmsgs: %r" % extmsgs)
extmsgs['sa'] = sa
extmsgs['da'] = da
j1939.saveRealExtMsg(idx-1, ts, sa, da, (0,0,0), meldExtMsgs(extmsgs), TP_DIRECT_BROKEN, idx-1)
j1939.clearRealExtMsgs(sa, da)
# store extended message information for other stuff...
extmsgs = j1939.getRealExtMsgs(sa, da)
extmsgs['sa'] = sa
extmsgs['da'] = da
extmsgs['ts'] = ts
extmsgs['idx'] = idx
extmsgs['pgn2'] = pgn2
extmsgs['pgn1'] = pgn1
extmsgs['pgn0'] = pgn0
extmsgs['maxct'] = maxct
extmsgs['length'] = pktct
extmsgs['totsize'] = totsize
extmsgs['type'] = TP_DIRECT
extmsgs['adminmsgs'].append((arbtup, data))
if j1939.verbose:
print("new TP_CM message: %r, %r\t\t%r" % (arbtup, data.encode('hex'), extmsgs))
print('==1 %x %x->%x' % (pf, sa, da), extmsgs)
# RESPOND!
if da in j1939.myIDs:
response = struct.pack('<BBBHBBB', CM_CTS, pktct, 1, 0, pgn2, pgn1, pgn0)
j1939.J1939xmit(0xec, sa, da, response, prio)
def tp_cm_11(arbtup, data, j1939, idx, ts):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, maxpkts, nextpkt, reserved,
pgn2, pgn1, pgn0) = struct.unpack('<BBBHBBB', data)
if j1939.verbose: print('==3 %x %x->%x' % (pf, sa, da), j1939.getRealExtMsgs(sa, da))
# store extended message information for other stuff...
extmsgs = j1939.getRealExtMsgs(sa, da)
extmsgs['adminmsgs'].append((arbtup, data))
# SOMEHOW WE TRIGGER THE CONTINUAITON OF TRANSMISSION
def tp_cm_13(arbtup, data, j1939, idx, ts):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, totsize, pktct, maxct,
pgn2, pgn1, pgn0) = struct.unpack('<BHBBBBB', data)
# print(out extended message and clear the buffers.)
extmsgs = j1939.getRealExtMsgs(sa, da)
extmsgs['adminmsgs'].append((arbtup, data))
j1939.clearRealExtMsgs(sa, da)
# Coolio, they just confirmed receipt, we're done!
# Probably need to trigger some mechanism telling the originator
def tp_cm_20(arbtup, data, j1939, idx, ts):
(prio, edp, dp, pf, da, sa) = arbtup
(cb, totsize, pktct, reserved,
pgn2, pgn1, pgn0) = struct.unpack('<BHBBBBB', data)
# check for old stuff
extmsgs = j1939.getRealExtMsgs(sa, da)
if len(extmsgs['msgs']):
extmsgs['sa'] = sa
extmsgs['da'] = da
j1939.saveRealExtMsg(idx-1, ts, sa, da, (0,0,0), meldExtMsgs(extmsgs), TP_DIRECT_BROKEN, idx-1)
j1939.clearRealExtMsgs(sa, da)
# store extended message information for other stuff...
extmsgs = j1939.getRealExtMsgs(sa, da)
extmsgs['sa'] = sa
extmsgs['da'] = da
extmsgs['ts'] = ts
extmsgs['idx'] = idx
extmsgs['pgn2'] = pgn2
extmsgs['pgn1'] = pgn1
extmsgs['pgn0'] = pgn0
extmsgs['maxct'] = 0
extmsgs['length'] = pktct
extmsgs['totsize'] = totsize
extmsgs['type'] = TP_BAM
extmsgs['adminmsgs'].append((arbtup, data))
tp_cm_handlers = {
CM_RTS: ('RTS', tp_cm_10),
CM_CTS: ('CTS', tp_cm_11),
CM_EOM: ('EndOfMsgACK', tp_cm_13),
CM_BAM: ('BAM-Broadcast', tp_cm_20),
CM_ABORT: ('Abort', None),
}
cb = data[0]
#print("ec: %.2x%.2x %.2x" % (arbtup[3], arbtup[4], cb))
htup = tp_cm_handlers.get(cb)
if htup != None:
subname, cb_handler = htup
if cb_handler != None:
cb_handler(arbtup, data, j1939, idx, ts)
da, sa = arbtup[-2:]
if j1939.verbose: print('==2 ', j1939.getRealExtMsgs(sa, da))
def eb_handler(j1939, idx, ts, arbtup, data):
(prio, edp, dp, pf, da, sa) = arbtup
if len(data) < 1:
j1939.log('pf=0xeb: TP ERROR: NO DATA!')
return
extmsgs = j1939.getRealExtMsgs(sa, da)
extmsgs['msgs'].append((arbtup, data))
if len(extmsgs['msgs']) >= extmsgs['length']:
if j1939.verbose:
print("eb_handler: saving: %r->%r %r %r" % (sa, da, len(extmsgs['msgs']) , extmsgs['length']))
tidx = extmsgs['idx']
pgn2 = extmsgs['pgn2']
pgn1 = extmsgs['pgn1']
pgn0 = extmsgs['pgn0']
mtype = extmsgs['type']
j1939.saveRealExtMsg(tidx, ts, sa, da, (pgn2, pgn1, pgn0), meldExtMsgs(extmsgs), mtype, idx)
j1939.clearRealExtMsgs(sa, da)
# if this is the end of a message to *me*, reply accordingly
if da in j1939.myIDs:
if extmsgs['idx'] == -1:
j1939.log("TP_DT_handler: missed beginning of message, not sending EOM: %r" % \
repr(extmsgs), 1)
return
j1939.log("tp_stack: sending EOM extmsgs: %r" % extmsgs, 1)
pgn2 = extmsgs['pgn2']
pgn1 = extmsgs['pgn1']
pgn0 = extmsgs['pgn0']
totsize = extmsgs['totsize']
maxct = extmsgs['maxct']
pktct = extmsgs['length']
data = struct.pack('<BHBBBBB', CM_EOM, totsize, pktct, maxct, pgn2, pgn1, pgn0)
j1939.J1939xmit(PF_TP_CM, sa, da, data, prio=prio)
pfhandlers = {
PF_TP_CM : ec_handler,
PF_TP_DT : eb_handler,
}
class TimeoutException(Exception):
pass
class J1939(cancat.CanInterface):
def __init__(self, port=None, baud=baud, verbose=False, cmdhandlers=None, comment='', load_filename=None, orig_iface=None):
self.myIDs = []
self.extMsgs = {}
self._RealExtMsgs = {}
self._RealExtMsgParts = {}
self.skip_TPDT = False
self._last_recv_idx = -1
self._repr_spns_by_pgn = {}
self._repr_all_spns = False
self._last_extmsgs = None
self._threads = []
CanInterface.__init__(self, port=port, baud=baud, verbose=verbose, cmdhandlers=cmdhandlers, comment=comment, load_filename=load_filename, orig_iface=orig_iface)
# setup the message handler event offload thread
self._mhe_queue = queue.Queue()
mhethread = threading.Thread(target=self._mhe_runner)
mhethread.setDaemon(True)
mhethread.start()
self._threads.append(mhethread)
self.register_handler(CMD_CAN_RECV, self._j1939_can_handler)
def _reprCanMsg(self, idx, ts, arbid, data, comment=None):
if comment == None:
comment = ''
arbtup = parseArbid(arbid)
prio, edp, dp, pf, ps, sa = arbtup
# give name priority to the Handler, then the manual name (this module), then J1939PGNdb
pfmeaning, handler = pgn_pfs.get(pf, ('', None))
# prepopulate these as they will be checked in a couple places
if pf < 0xec:
pgn = pf << 8
else:
pgn = (pf << 8) | ps
res = J1939PGNdb.get(pgn)
nextline = ''
if handler is not None:
enhanced = handler(idx, ts, arbtup, data, self)
if enhanced == cancat.DONT_PRINT_THIS_MESSAGE:
return enhanced
if enhanced is not None:
if type(enhanced) in (list, tuple) and len(enhanced):
pfmeaning = enhanced[0]
if len(enhanced) > 1:
nextline = '\n'.join(list(enhanced[1:]))
# if we get multiple lines and the first is DONT_PRINT_THIS_MESSAGE,
# then just return nextline
if pfmeaning == cancat.DONT_PRINT_THIS_MESSAGE:
return nextline
nextline = '\n' + nextline
else:
pfmeaning = enhanced
elif not len(pfmeaning):
if res is not None:
pfmeaning = res.get("Name")
# msg will be sent in for SPN parsing, if appropriate
msg = data
# hack to see if this message completed a long message)
#if self._last_extmsgs is not None: print(idx, self._last_extmsgs[0], self._last_extmsgs)
if self._last_extmsgs is not None and self._last_extmsgs[0] == idx:
#print(" DEBUG: SAME INDEX!", self._last_extmsgs)
midx, extmsgs = self._last_extmsgs
if extmsgs['totsize'] > 0:
msg = ''.join([msg for arbtup, msg in extmsgs['msgs']])
pgn1 = extmsgs['pgn1']
pgn0 = extmsgs['pgn0']
if pgn1 < 240:
pgn = pgn1 << 8
else:
pgn = (pgn1 << 8) | pgn0
res = J1939PGNdb.get(pgn)
#print("changing pgn: 0x%x" % pgn)
if (pgn < 0xeb00 or pgn > 0xecff) and res and (self._repr_all_spns or self._repr_spns_by_pgn.get(pgn)):
spnlines = None
spns = res.get("SPNs")
if spns is not None:
spnlines = reprSPNdata(spns, msg)
if spnlines is not None:
nextline = "\n\t" + '\n\t'.join(spnlines)
return "%.8d %8.3f pri/edp/dp: %d/%d/%d, PG: %.2x %.2x Source: %.2x Data: %-18s %s\t\t%s%s" % \
(idx, ts, prio, edp, dp, pf, ps, sa, data.encode('hex'), pfmeaning, comment, nextline)
def _getLocals(self, idx, ts, arbid, data):
prio, edp, dp, pf, ps, sa = parseArbid(arbid)
pgn = (pf<<8) | ps
lcls = {'idx':idx,
'ts':ts,
'arbid':arbid,
'data':data,
'priority':prio,
'edp':edp,
'dp':dp,
'pf':pf,
'ps':ps,
'sa':sa,
'pgn':pgn,
'da':ps,
'ge':ps,
}
return lcls
def _j1939_can_handler(self, message, none):
'''
this function is run for *Every* received CAN message... and is executed from the
XMIT/RECV thread. it *must* be fast!
'''
#print(repr(self), repr(cmd), repr(message))
arbid, data = self._splitCanMsg(message)
idx, ts = self._submitMessage(CMD_CAN_RECV, message)
arbtup = parseArbid(arbid)
prio, edp, dp, pf, ps, sa = arbtup
pfhandler = pfhandlers.get(pf)
if pfhandler is not None:
self.queueMessageHandlerEvent(pfhandler, idx, ts, arbtup, data)
#pfhandler(self, idx, ts, arbtup, data)
#print("submitted message: %r" % (message.encode('hex')))
def queueMessageHandlerEvent(self, pfhandler, idx, ts, arbtup, data):
self._mhe_queue.put((pfhandler, idx, ts, arbtup, data))
def _mhe_runner(self):
while self._config['go']:
worktup = None
try:
worktup = self._mhe_queue.get(1)
if worktup == None:
continue
pfhandler, idx, ts, arbtup, data = worktup
#if self.verbose: print("_mhe_runner: %r %r %r %r %r" % (worktup))
pfhandler(self, idx, ts, arbtup, data)
except Exception as e:
print("(j1939)MsgHandler ERROR: %r (%r)" % (e, worktup))
if self.verbose:
sys.excepthook(*sys.exc_info())
# functions to support the J1939TP Stack (real stuff, not just repr)
def getRealExtMsgs(self, sa, da):
'''
# functions to support the J1939TP Stack (real stuff, not just repr)
returns a message list for a given source and destination (sa, da)
if no list exists for this pairing, one is created and an empty list is returned
'''
#if self.verbose: print('getRealExtMsgs: %r' % (threading.current_thread()))
self.mquelock.acquire()
try:
msglists = self._RealExtMsgParts.get(sa)
if msglists == None:
if self.verbose: print(".get(sa) returned None. creating msglists")
msglists = {}
self._RealExtMsgParts[sa] = msglists
mlist = msglists.get(da)
if mlist == None:
if self.verbose: print("--mlist == None, creating for sa:%x da:%x" % (sa, da))
mlist = {'msgs':[],
'type' : -1,
'adminmsgs' : [],
'sa': -1,
'da': -1,
'ts': -1,
'idx': -1,
'pgn2': -1,
'pgn1': -1,
'pgn0': -1,
'maxct': -1,
'length': 0,
'totsize': 0,
}
msglists[da] = mlist
except Exception as e:
print("getRealExtMsgs: ERROR: %r" % e)
finally:
self.mquelock.release()
return mlist
def clearRealExtMsgs(self, sa, da=None):
'''
# functions to support the J1939TP Stack (real stuff, not just repr)
clear out extended messages metadata.
if da == None, this clears *all* message data for a given source address
returns whether the thing deleted exists previously
* if da == None, returns whether the sa had anything previously
* otherwise, if the list
'''
#if self.verbose: print('clearRealExtMsgs: %r' % (threading.current_thread()))
exists = False
if da != None:
if self.verbose: print("++clearing sa:%x da:%x" % (sa, da))
msglists = self._RealExtMsgParts.get(sa)
exists = bool(msglists != None and len(msglists))
self._RealExtMsgParts[sa] = {}
return exists
if self.verbose: print("++clearing sa:%x COMPLETELY!" % (sa))
msglists = self._RealExtMsgParts.get(sa)
if msglists == None:
msglists = {}
self._RealExtMsgParts[sa] = msglists
mlist = msglists.get(da, {'length':0})
msglists[da] = {'length':0, 'msgs':[], 'type':None, 'adminmsgs':[]}
return bool(mlist['length'])
def saveRealExtMsg(self, idx, ts, sa, da, pgn, msg, tptype, lastidx):
'''
# functions to support the J1939TP Stack (real stuff, not just repr)
store a TP message.
'''
# FIXME: do we need thread-safety wrappers here?
msglist = self._RealExtMsgs.get((sa, da))
if msglist is None:
msglist = []
self._RealExtMsgs[(sa, da)] = msglist
msglist.append((idx, ts, sa, da, pgn, msg, tptype, lastidx))
if self.verbose:
print("-=-= saving sa:%x da:%x" % (sa, da))
# This is for the pretty printing stuff...
def getExtMsgs(self, sa, da):
'''
returns a message list for a given source and destination (sa, da)
if no list exists for this pairing, one is created and an empty list is returned
'''
msglists = self.extMsgs.get(sa)
if msglists is None:
msglists = {}
self.extMsgs[sa] = msglists
mlist = msglists.get(da)
if mlist is None or not len(mlist):
mlist = {'msgs':[],
'type' : -1,
'adminmsgs' : [],
'sa': -1,
'da': -1,
'ts': -1,
'idx': -1,
'pgn2': -1,
'pgn1': -1,
'pgn0': -1,
'maxct': -1,
'length': 0,
'totsize': 0,
}
msglists[da] = mlist
return mlist
def clearExtMsgs(self, sa, da=None):
'''
clear out extended messages metadata.
if da == None, this clears *all* message data for a given source address
returns whether the thing deleted exists previously
* if da == None, returns whether the sa had anything previously
* otherwise, if the list
'''
exists = False
msglists = self.extMsgs.get(sa)
# if da is included, clear only the message
if da != None:
exists = bool(msglists != None and len(msglists))
if msglists is not None:
msglists[da] = {}
return exists
if msglists is not None:
exists = True
msglists = {}
self.extMsgs[sa] = msglists
return exists
def setReprVerbosePGNs(self, pgnlist):
'''
provide a list of s which should be printed
'''
if pgnlist == 'ON':
self._repr_all_spns = True
elif pgnlist == 'OFF':
self._repr_all_spns = False
elif type(pgnlist) == list:
self._repr_spns_by_pgn = {pgn:True for pgn in pgnlist}
elif pgnlist is None:
self._repr_spns_by_pgn = {}
self._repr_all_spns = False
def setReprVerbosePGNs(self, pgnlist):
'''
provide a list of s which should be printed
'''
if pgnlist == 'ON':
self._repr_all_spns = True
elif pgnlist == 'OFF':
self._repr_all_spns = False
elif type(pgnlist) == list:
self._repr_spns_by_pgn = {pgn:True for pgn in pgnlist}
elif pgnlist is None:
self._repr_spns_by_pgn = {}
self._repr_all_spns = False
def addID(self, newid):
if newid not in self.myIDs:
self.myIDs.append(newid)
def delID(self, curid):
if curid in self.myIDs:
self.myIDs.remove(curid)
def J1939xmit(self, pf, ps, sa, data, prio=6, edp=0, dp=0):
arbid = emitArbid(prio, edp, dp, pf, ps, sa)
return self.CANxmit(arbid, data, extflag=1)
def J1939xmit_tp(self, da, sa, pgn2, pgn1, pgn0, message, prio=6, edp=0, dp=0):
msgs = ['%c'%(x+1) + message[x*7:(x*7)+7] for x in range((len(message)+6)//7)]
if len(msgs) > 255:
raise Exception("J1939xmit_tp: attempt to send message that's too large")
cm_msg = struct.pack('<BHBBBBB', CM_RTS, len(message), len(msgs), 0xff,
pgn2, pgn1, pgn0)
self.J1939xmit(PF_TP_CM, da, sa, cm_msg, prio=prio)
time.sleep(.01) # hack: should watch for CM_CTS
for msg in msgs:
self.J1939xmit(PF_TP_DT, da, sa, msg, prio=prio)
# hack: should watch for CM_EOM
def recvRealExtMsg(self, sa, da, pgn2, pgn1, pgn0, start_msg=None, block=True, timeout=1):
'''
Find the first recv'd message from the J1939tp stack after start_msg, for PGN made up of pgn2,pgn1,pgn0
wait until timeout seconds have lapsed
if start_msg == None, returns the next message since last J1939recv/tp
'''
if start_msg == None:
start_msg = self._last_recv_idx
#print("resuming last recv'd index: %d" % start_msg)
count = 0
starttime = time.time()
while (count==0 or (block and time.time()-starttime < timeout)):
#sys.stderr.write('.')
count += 1
self.mquelock.acquire()
try:
msgs = self._RealExtMsgs.get((sa, da))
if msgs == None or not len(msgs):
#print("no message for %.2x -> %.2x" % (sa, da))
continue
if msgs[-1][0] < start_msg:
self.log("last msg before start_msg %r %r" % (msgs[-1][0],start_msg), 2)
#sys.stderr.write('.')
continue
# if we have messages, check each for the last idx.
for midx in range(len(msgs)):
msg = msgs[midx]
midx = msg[0]
mpgn = msg[4]
mlastidx = msg[7]
#print(" %r ?>= %r" % (midx, start_msg))
#print(" %r ?= %r" % (mpgn, (pgn2, pgn1, pgn0)))
if mlastidx < start_msg:
continue
if mpgn != (pgn2, pgn1, pgn0):
continue
#print("success! %s" % repr(msg))
#print("setting last recv'd index: %d" % mlastidx)
self._last_recv_idx = mlastidx
##FIXME: make this threadsafe
#msgs.pop(midx)
return msg
except Exception as e:
print("recvRealExtMsg: ERROR: %r" % e)
finally:
self.mquelock.release()
time.sleep(.001)
raise TimeoutException('recvRealExtMsg: Timeout waiting for message from: 0x%.2x -> 0x%.2x PGN: %.2x%.2x%.2x (%d secs)' % \
(sa, da, pgn2,pgn1,pgn0, (time.time()-starttime)))
def J1939recv_tp(self, pgn2, pgn1, pgn0, sa=0x0, da=0xf9, msgcount=1, timeout=1, advfilters=[], start_msg=None):
if start_msg == None:
start_msg = self._last_recv_idx
print("J1939recv_tp: Searching for response at or after msg idx: %d" % start_msg)
msg = self.recvRealExtMsg(sa, da, pgn2, pgn1, pgn0, start_msg, timeout=timeout)
if msg == None:
return None
out = msg[5]
return out
def J1939recv(self, msgcount=1, timeout=1, advfilters=[], start_msg=None):
out = []
if start_msg == None:
start_msg = self._last_recv_idx
for msg in self.filterCanMsgs(start_msg=start_msg, advfilters=advfilters, tail=True, maxsecs=timeout):
#(idx, ts, arbid, data) = msg
out.append(msg)
self._last_recv_idx = msg[0]
if len(out) >= msgcount:
return out
return out
def J1939xmit_recv(self, pf, ps, sa, data, recv_arbid=None, recv_count=1, prio=6, edp=0, dp=0, timeout=1, advfilters=[]):
msgidx = self.getCanMsgCount()
res = self.J1939xmit(pf, ps, sa, data, prio, edp, dp)
res = self.J1939recv(recv_count, timeout, advfilters, start_msg=msgidx)
return res
def J1939_Request(self, rpf, rda_ge=0, redp=0, rdp=0, da=0xff, sa=0xfe, prio=0x6, recv_count=255, timeout=2, advfilters=[]):
pgnbytes = [rda_ge, rpf, redp<<1 | rdp]
data = ''.join([chr(x) for x in pgnbytes])
data += '\xff' * (8-len(data))
if not len(advfilters):
advfilters = 'pf in (0x%x, 0xeb, 0xec)' % rpf
# FIXME: this is only good for short requests... anything directed is likely to send back a TP message
msgs = self.J1939xmit_recv(PF_RQST, da, sa, data, recv_count=recv_count, prio=prio, timeout=timeout, advfilters=advfilters)
return msgs
def J1939_ClaimAddress(self, addr, name=0x4040404040404040, prio=6):
data = struct.pack(">Q", name)
out = self.J1939xmit_recv(pf=PF_ADDRCLAIM, ps=0xff, sa=addr, data=data, recv_count=10, prio=prio<<2, timeout=2, advfilters=['pf==0xee'])
self.addID(addr)
return out
def J1939_ArpAddresses(self):
'''
Sends a request for all used addresses... not fully tested
'''
#idx = self.getCanMsgCount()
msgs = self.J1939_Request(PF_ADDRCLAIM, recv_count=255, advfilters=['pf==0xee'])
'''
# FIXME: these are way too loose, for discovery only. tighten down.
recv_filters = [
'pf < 0xf0',
#'pf == 0xee',
]
msgs = self.J1939recv(msgcount=200, timeout=3, advfilters=recv_filters, start_msg=idx)
'''
for msg in msgs:
try:
msgrepr = self._reprCanMsg(*msg)
if msgrepr != cancat.DONT_PRINT_THIS_MESSAGE:
print(msgrepr)
except Exception as e:
print(e)
'''
example (from start of ECU):
00000000 1545142410.990 pri/edp/dp: 6/0/0, PG: ea ff Source: fe Len: 03, Data: 00ee00 Request
00000001 1545142411.077 pri/edp/dp: 6/0/0, PG: ee ff Source: 00 Len: 08, Data: 4cca4d0100000000 Address Claim: id: 0xdca4c mfg: Cummins Inc (formerly Cummins Engine Co) Columbus, IN USA
currently ours:
00001903 1545142785.127 pri/edp/dp: 6/0/0, PG: ea ff Source: fe Len: 03, Data: 00ee00 Request
'''
MAX_WORD = 64
bu_masks = [(2 ** (i)) - 1 for i in range(8*MAX_WORD+1)]
def reprSPNdata(spnlist, msg):
spnlines = []
# loop through the SPNs listed for this PGN
for spnum in spnlist:
spn = J1939SPNdb.get(spnum)
if spn is None:
continue
# graciously refactored code from TruckDevil (hey LBD!)
spnlen = spn.get('SPNLength')
pgnlen = spn.get('PGNLength')
spnName = spn.get('Name')
spnData = ''
# skip variable-length PGNs for now
if (type(pgnlen) == str and 'ariable' in pgnlen):
pass
else:
startBit = spn.get('StartBit')
endBit = spn.get('EndBit')
startByte = startBit // 8
startBitO = startBit % 8
endByte = (endBit + 7) // 8
endBitO = endBit % 8
datablob = msg[startByte:endByte]
#print("sb: %d\t eb: %d\t sB:%d\t SBO:%d\t eB:%d\t eBO:%d\t %r" % (startBit, endBit, startByte, startBitO, endByte, endBitO, datablob))
units = spn.get("Units")
if units == 'ASCII':
spnData = repr(datablob)
else:
try:
# carve out the number
datanum = 0
numbytes = struct.unpack('%dB' % len(datablob), datablob)
for n in numbytes:
datanum <<= 8
datanum |= n
datanum >>= (7 - endBitO)
#print("datanum: %x" % datanum)
mask = bu_masks[endBit - startBit + 1]
datanum &= mask
#print("datanum: %x (mask: %x)" % (datanum, mask))
if units == 'bit':
meaning = ''
bitdecode = J1939BitDecodings.get(spnum)
if bitdecode is not None:
meaning = bitdecode.get(datanum)
spnData = '0x%x (%s)' % (datanum, meaning)
elif units == 'binary':
spnData = bin(datanum)
else:
# some other unit with a resolution
datanum = 0
numbytes = struct.unpack('%dB' % len(datablob), datablob)
for n in numbytes:
datanum <<= 8
datanum |= n
datanum >> (7 - endBitO)
resolution = spn.get('Resolution')
if resolution is not None:
datanum *= resolution
offset = spn.get('Offset')
if offset is not None:
datanum + offset
spnData = '%.3f %s' % (datanum, units)
except Exception as e:
spnData = "ERROR"
print("SPN: %r %r (%r)" % (e, msg, spn))
spnlines.append(' SPN(%d): %-20s\t %s' % (spnum, spnData, spnName))
return spnlines
|
atlas0fd00m/CanCat
|
cancat/j1939.py
|
Python
|
bsd-2-clause
| 37,051
|
[
"COLUMBUS"
] |
929263b1ab66278ce224d65bfe3460674bab51907d13f5c5967688831f58e3cf
|
"""Forest of trees-based ensemble methods
Those methods include random forests and extremely randomized trees.
The module structure is the following:
- The ``BaseForest`` base class implements a common ``fit`` method for all
the estimators in the module. The ``fit`` method of the base ``Forest``
class calls the ``fit`` method of each sub-estimator on random samples
(with replacement, a.k.a. bootstrap) of the training set.
The init of the sub-estimator is further delegated to the
``BaseEnsemble`` constructor.
- The ``ForestClassifier`` and ``ForestRegressor`` base classes further
implement the prediction logic by computing an average of the predicted
outcomes of the sub-estimators.
- The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived
classes provide the user with concrete implementations of
the forest ensemble method using classical, deterministic
``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as
sub-estimator implementations.
- The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived
classes provide the user with concrete implementations of the
forest ensemble method using the extremly randomized trees
``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as
sub-estimator implementations.
"""
# Authors: Gilles Louppe, Brian Holt
# License: BSD 3
import itertools
import numpy as np
from ..base import ClassifierMixin, RegressorMixin
from ..externals.joblib import Parallel, delayed, cpu_count
from ..feature_selection.selector_mixin import SelectorMixin
from ..tree import DecisionTreeClassifier, DecisionTreeRegressor, \
ExtraTreeClassifier, ExtraTreeRegressor
from ..utils import check_random_state
from ..metrics import r2_score
from .base import BaseEnsemble
__all__ = ["RandomForestClassifier",
"RandomForestRegressor",
"ExtraTreesClassifier",
"ExtraTreesRegressor"]
MAX_INT = np.iinfo(np.int32).max
def _parallel_build_trees(n_trees, forest, X, y,
sample_mask, X_argsorted, seed):
"""Private function used to build a batch of trees within a job."""
random_state = check_random_state(seed)
trees = []
for i in xrange(n_trees):
seed = random_state.randint(MAX_INT)
tree = forest._make_estimator(append=False)
tree.set_params(compute_importances=forest.compute_importances)
tree.set_params(random_state=check_random_state(seed))
if forest.bootstrap:
n_samples = X.shape[0]
indices = random_state.randint(0, n_samples, n_samples)
tree.fit(X[indices], y[indices],
sample_mask=sample_mask, X_argsorted=X_argsorted)
tree.indices_ = indices
else:
tree.fit(X, y,
sample_mask=sample_mask, X_argsorted=X_argsorted)
trees.append(tree)
return trees
def _parallel_predict_proba(trees, X, n_classes):
"""Private function used to compute a batch of predictions within a job."""
p = np.zeros((X.shape[0], n_classes))
for tree in trees:
if n_classes == tree.n_classes_:
p += tree.predict_proba(X)
else:
proba = tree.predict_proba(X)
for j, c in enumerate(tree.classes_):
p[:, c] += proba[:, j]
return p
def _parallel_predict_regression(trees, X):
"""Private function used to compute a batch of predictions within a job."""
return sum(tree.predict(X) for tree in trees)
def _partition_trees(forest):
"""Private function used to partition trees between jobs."""
# Compute the number of jobs
if forest.n_jobs == -1:
n_jobs = min(cpu_count(), forest.n_estimators)
else:
n_jobs = min(forest.n_jobs, forest.n_estimators)
# Partition trees between jobs
n_trees = [forest.n_estimators / n_jobs] * n_jobs
for i in xrange(forest.n_estimators % n_jobs):
n_trees[i] += 1
starts = [0] * (n_jobs + 1)
for i in xrange(1, n_jobs + 1):
starts[i] = starts[i - 1] + n_trees[i - 1]
return n_jobs, n_trees, starts
class BaseForest(BaseEnsemble, SelectorMixin):
"""Base class for forests of trees.
Warning: This class should not be used directly. Use derived classes
instead.
"""
def __init__(self, base_estimator,
n_estimators=10,
estimator_params=[],
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None):
super(BaseForest, self).__init__(
base_estimator=base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params)
self.bootstrap = bootstrap
self.compute_importances = compute_importances
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = check_random_state(random_state)
self.feature_importances_ = None
def fit(self, X, y):
"""Build a forest of trees from the training set (X, y).
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples]
The target values (integers that correspond to classes in
classification, real numbers in regression).
Returns
-------
self : object
Returns self.
"""
# Precompute some data
X = np.atleast_2d(X)
y = np.atleast_1d(y)
if self.bootstrap:
sample_mask = None
X_argsorted = None
else:
if self.oob_score:
raise ValueError("Out of bag estimation only available"
" if bootstrap=True")
sample_mask = np.ones((X.shape[0],), dtype=np.bool)
X_argsorted = np.asfortranarray(
np.argsort(X.T, axis=1).astype(np.int32).T)
if isinstance(self.base_estimator, ClassifierMixin):
self.classes_ = np.unique(y)
self.n_classes_ = len(self.classes_)
y = np.searchsorted(self.classes_, y)
# Assign chunk of trees to jobs
n_jobs, n_trees, _ = _partition_trees(self)
# Parallel loop
all_trees = Parallel(n_jobs=n_jobs)(
delayed(_parallel_build_trees)(
n_trees[i],
self,
X,
y,
sample_mask,
X_argsorted,
self.random_state.randint(MAX_INT))
for i in xrange(n_jobs))
# Reduce
self.estimators_ = [tree for tree in itertools.chain(*all_trees)]
# Calculate out of bag predictions and score
if self.oob_score:
if isinstance(self, ClassifierMixin):
predictions = np.zeros((X.shape[0], self.n_classes_))
for estimator in self.estimators_:
mask = np.ones(X.shape[0], dtype=np.bool)
mask[estimator.indices_] = False
predictions[mask, :] += estimator.predict_proba(X[mask, :])
self.oob_decision_function_ = (predictions
/ predictions.sum(axis=1)[:, np.newaxis])
self.oob_score_ = np.mean(y == np.argmax(predictions, axis=1))
else:
# Regression:
predictions = np.zeros(X.shape[0])
n_predictions = np.zeros(X.shape[0])
for estimator in self.estimators_:
mask = np.ones(X.shape[0], dtype=np.bool)
mask[estimator.indices_] = False
predictions[mask] += estimator.predict(X[mask, :])
n_predictions[mask] += 1
predictions /= n_predictions
self.oob_prediction_ = predictions
self.oob_score_ = r2_score(y, predictions)
# Sum the importances
if self.compute_importances:
self.feature_importances_ = \
sum(tree.feature_importances_ for tree in self.estimators_) \
/ self.n_estimators
return self
class ForestClassifier(BaseForest, ClassifierMixin):
"""Base class for forest of trees-based classifiers.
Warning: This class should not be used directly. Use derived classes
instead.
"""
def __init__(self, base_estimator,
n_estimators=10,
estimator_params=[],
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None):
super(ForestClassifier, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state)
def predict(self, X):
"""Predict class for X.
The predicted class of an input sample is computed as the majority
prediction of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape = [n_samples]
The predicted classes.
"""
return self.classes_.take(
np.argmax(self.predict_proba(X), axis=1), axis=0)
def predict_proba(self, X):
"""Predict class probabilities for X.
The predicted class probabilities of an input sample is computed as
the mean predicted class probabilities of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
p : array of shape = [n_samples]
The class probabilities of the input samples. Classes are
ordered by arithmetical order.
"""
# Check data
X = np.atleast_2d(X)
# Assign chunk of trees to jobs
n_jobs, n_trees, starts = _partition_trees(self)
# Parallel loop
all_p = Parallel(n_jobs=self.n_jobs)(
delayed(_parallel_predict_proba)(
self.estimators_[starts[i]:starts[i + 1]],
X, self.n_classes_)
for i in xrange(n_jobs))
# Reduce
p = sum(all_p) / self.n_estimators
return p
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
The predicted class log-probabilities of an input sample is computed as
the mean predicted class log-probabilities of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
p : array of shape = [n_samples]
The class log-probabilities of the input samples. Classes are
ordered by arithmetical order.
"""
return np.log(self.predict_proba(X))
class ForestRegressor(BaseForest, RegressorMixin):
"""Base class for forest of trees-based regressors.
Warning: This class should not be used directly. Use derived classes
instead.
"""
def __init__(self, base_estimator,
n_estimators=10,
estimator_params=[],
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None):
super(ForestRegressor, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state)
def predict(self, X):
"""Predict regression target for X.
The predicted regression target of an input sample is computed as the
mean predicted regression targets of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
y: array of shape = [n_samples]
The predicted values.
"""
# Check data
X = np.atleast_2d(X)
# Assign chunk of trees to jobs
n_jobs, n_trees, starts = _partition_trees(self)
# Parallel loop
all_y_hat = Parallel(n_jobs=self.n_jobs)(
delayed(_parallel_predict_regression)(
self.estimators_[starts[i]:starts[i + 1]], X)
for i in xrange(n_jobs))
# Reduce
y_hat = sum(all_y_hat) / self.n_estimators
return y_hat
class RandomForestClassifier(ForestClassifier):
"""A random forest classifier.
A random forest is a meta estimator that fits a number of classifical
decision trees on various sub-samples of the dataset and use averaging
to improve the predictive accuracy and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features` on regression
problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
`feature_importances_` : array, shape = [n_features]
The feature importances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_decision_function_` : array, shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set.
Notes
-----
**References**:
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeClassifier, ExtraTreesClassifier
"""
def __init__(self, n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=True,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None):
super(RandomForestClassifier, self).__init__(
base_estimator=DecisionTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
class RandomForestRegressor(ForestRegressor):
"""A random forest regressor.
A random forest is a meta estimator that fits a number of classifical
decision trees on various sub-samples of the dataset and use averaging
to improve the predictive accuracy and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. The only supported
criterion is "mse" for the mean squared error.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features`
on regression problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
`feature_importances_` : array of shape = [n_features]
The feature mportances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_prediction_` : array, shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
Notes
-----
**References**:
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeRegressor, ExtraTreesRegressor
"""
def __init__(self, n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=True,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None):
super(RandomForestRegressor, self).__init__(
base_estimator=DecisionTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
class ExtraTreesClassifier(ForestClassifier):
"""An extra-trees classifier.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split.
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features`
on regression problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
`feature_importances_` : array of shape = [n_features]
The feature mportances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_decision_function_` : array, shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set.
Notes
-----
**References**:
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble.
RandomForestClassifier : Ensemble Classifier based on trees with optimal
splits.
"""
def __init__(self, n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None):
super(ExtraTreesClassifier, self).__init__(
base_estimator=ExtraTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
class ExtraTreesRegressor(ForestRegressor):
"""An extra-trees regressor.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. The only supported
criterion is "mse" for the mean squared error.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features`
on regression problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
Note: this parameter is tree-specific.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
`feature_importances_` : array of shape = [n_features]
The feature mportances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_prediction_` : array, shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
Notes
-----
**References**:
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeRegressor: Base estimator for this ensemble.
RandomForestRegressor: Ensemble regressor using trees with optimal splits.
"""
def __init__(self, n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None):
super(ExtraTreesRegressor, self).__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
|
cdegroc/scikit-learn
|
sklearn/ensemble/forest.py
|
Python
|
bsd-3-clause
| 34,977
|
[
"Brian"
] |
3c10492f3755f0e77c1fc55233e8a0f76d105c1828bc6afd6cb94ec82abae59e
|
# PSTH experiments
#
# Copyright (C) 2010-2012 Huang Xin
#
# See LICENSE.TXT that came with this file.
from __future__ import division
import os
import sys
import time
import Pyro.core
import subprocess
from Experiment import ExperimentConfig,Experiment
class PSTHExperiment(Experiment):
PSTH_SERVER_PROCESS = None
PSTH_SERVER_PORT = 6743
def __init__(self,*args,**kwargs):
super(PSTHExperiment, self).__init__(*args,**kwargs)
self.pyro_source = ''
self.exp_param = ''
def psth_analysis(self, psth_type=None):
#self.psth_server = self.get_psth_server()
try:
self.psth_server = self.get_psth_server()
except Exception,e:
self.logger.error('Failed to get psth app. ' + str(e))
#self.psth_server.start_psth()
try:
self.logger.info('Starting psth data.')
self.psth_server.start_data()
except Exception,e:
self.logger.error('Failed to start psth app. ' + str(e))
try:
self.logger.info('Setting up psth app before stimulation.')
self.pre_stim_setup()
except Exception,e:
self.logger.error('Failed to setup psth app. ' + str(e))
try:
self.wait_for_stim()
except Exception,e:
self.logger.error('Failed to wait for stimulation. ' + str(e))
try:
self.logger.info('Setting up psth app after stimulation.')
self.post_stim_setup()
except Exception,e:
self.logger.error('Failed to setup psth app. ' + str(e))
try:
data = self.psth_server.get_data()
except Exception,e:
self.logger.error('Failed to get data from psth. ' + str(e))
try:
self.log_psth_data(data)
except Exception,e:
self.logger.error('Failed to log psth data. ' + str(e))
try:
results = self.extract_results(data)
except Exception,e:
self.logger.error('Failed to extract psth data. ' + str(e))
try:
chart_file = ExperimentConfig.CELLDIR + os.path.sep + self.exp_name + '.png'
self.logger.info('Exporting chart to: ' + chart_file)
self.psth_server.export_chart(chart_file)
except Exception,e:
self.logger.error('Failed to export psth chart. ' + str(e))
try:
# wait for complete of preceding pyro operationsg
time.sleep(3.0)
self.logger.info('Stopping psth data.')
self.psth_server.stop_data()
except Exception,e:
self.logger.error('Failed to stop psth app. ' + str(e))
try:
# wait for complete of preceding pyro operationsg
time.sleep(3.0)
self.logger.info('Closing psth server.')
self.psth_server.close()
except Exception,e:
self.logger.error('Failed to close psth server. ' + str(e))
try:
return results
except Exception,e:
self.logger.error('Failed to return psth result. ' + str(e))
def log_psth_data(self, data):
data_file = ExperimentConfig.CELLDIR + os.path.sep + self.exp_name + '.csv'
param = self.exp_param
with open(data_file,'w') as data_output:
if 'param' in data:
data_output.writelines('param,%s\n' %data['param'])
if 'x' in data:
data_output.writelines('%s,%s\n' %(param , ','.join([str(x) for x in data['x']])))
if 'means' in data:
data_output.writelines('means,%s\n' % ','.join([str(mean) for mean in data['means']]))
if 'stds' in data:
data_output.writelines('stds,%s\n' % ','.join([str(std) for std in data['stds']]))
if 'max_param' in data:
data_output.writelines('opt %s,%s\n' %(param , str(data['max_param'])))
if 'max_value' in data:
data_output.writelines('opt rate,%s\n' % str(data['max_value']))
if 'min_param' in data:
data_output.writelines('nul %s,%s\n' %(param , str(data['min_param'])))
if 'max_value' in data:
data_output.writelines('nul rate,%s\n' % str(data['min_value']))
if 'F1/F0' in data:
data_output.writelines('F1/F0,%s\n' % str(data['F1/F0']))
if 'BII' in data:
data_output.writelines('BII,%s\n' % str(data['BII']))
if 'S/N' in data:
data_output.writelines('S/N,%s\n' % str(data['S/N']))
def get_psth_server(self):
self.logger.info('Fetching psth server.')
try:
if PSTHExperiment.PSTH_SERVER_PROCESS.poll() is not None:
self.logger.info('PSTH server is dead.')
raise
except:
self.logger.info('Creating new psth app.')
psth_app_path = os.path.dirname(__file__) + os.path.sep + 'app' + os.path.sep + self.pyro_source
args = [sys.executable, psth_app_path, str(PSTHExperiment.PSTH_SERVER_PORT)]
PSTHExperiment.PSTH_SERVER_PROCESS = subprocess.Popen(args)
time.sleep(3.0)
else:
self.logger.info('Psth app has been launched.')
assert PSTHExperiment.PSTH_SERVER_PROCESS.poll() is None
URI = "PYROLOC://localhost:%d/%s" % (PSTHExperiment.PSTH_SERVER_PORT, 'psth_server')
Pyro.core.initClient()
return Pyro.core.getProxyForURI(URI)
def pre_stim_setup(self):
self.psth_server.set_title(self.exp_name)
def post_stim_setup(self):
pass
def extract_results(self, _data):
raise RuntimeError("Must override extract_results method with exp implementation!")
class ORITunExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(ORITunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'orientation_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-ori-tun-' + eye
self.exp_param = 'ori'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(ORITunExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
ori = self.psth_analysis()
return ori
def pre_stim_setup(self):
super(ORITunExp, self).pre_stim_setup()
self.logger.info('Choose no curve fitting for this experiment.')
self.psth_server.check_fitting('none')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class SPFTunExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(SPFTunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'spatial_freq_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-spf-tun-' + eye
self.exp_param = 'spf'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(SPFTunExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
spf = self.psth_analysis()
return spf
def pre_stim_setup(self):
super(SPFTunExp, self).pre_stim_setup()
self.logger.info('Choose Gaussian curve fitting.')
self.psth_server.check_fitting('gauss')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class PHATunExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(PHATunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'phase_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-pha-tun-' + eye
self.exp_param = 'pha'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(PHATunExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
pha = self.psth_analysis()
return pha
def pre_stim_setup(self):
super(PHATunExp, self).pre_stim_setup()
self.logger.info('Choose no curve fitting for this experiment.')
self.psth_server.check_fitting('none')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class DSPTunExp(PSTHExperiment):
def __init__(self,left_params,right_params,repeats,postfix,*args,**kwargs):
super(DSPTunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'disparity_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-dsp-tun-' + postfix
self.exp_param = 'dsp'
self.eye = ['left','right']
self.left_params = left_params
self.right_params = right_params
self.repeats = repeats
self.assignments = ['repeats = %d' %repeats]
def run(self):
super(DSPTunExp, self).run()
self.run_stimulus(self.left_params,self.right_params,assignments=self.assignments)
pha = self.psth_analysis()
return pha
def pre_stim_setup(self):
super(DSPTunExp, self).pre_stim_setup()
self.logger.info('Choose Sinusoid curve fitting.')
self.psth_server.check_fitting('sin')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class SpikeLatencyExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(SpikeLatencyExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_average.py'
self.stim_source = 'rand_phase.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-latency-' + eye
self.exp_param = 'lat'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(SpikeLatencyExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
latency = self.psth_analysis()
return latency
def pre_stim_setup(self):
super(SpikeLatencyExp, self).pre_stim_setup()
def extract_results(self, data):
if 'maxima' not in data:
self.logger.error('Failed to get spike latency from %s experiment.' %self.exp_name)
else:
first_peak = data['maxima_index'][0]
self.logger.info('Get spike latency from %s experiment: %f' %(self.exp_name, first_peak))
return first_peak/1000.0
def log_psth_data(self, data):
data_file = ExperimentConfig.CELLDIR + os.path.sep + self.exp_name + '.csv'
data_lines = ''
if 'time' in data and 'psth' in data:
data_lines += 'Time,Value\n'
for psth_time, psth_value in zip(data['time'], data['psth']):
data_lines += '{0},{1:.2f}\n'.format(psth_time, psth_value)
extrima_lines = ''
if 'maxima_indices' in data and 'maxima' in data:
extrima_lines += 'Maxima,Value\n'
for maxima_time,maxima_value in zip(data['maxima_indices'],data['maxima']):
extrima_lines += '{0},{1:.2f}\n'.format(maxima_time,maxima_value)
if 'minima_indices' in data and 'minima' in data:
extrima_lines += 'Minima,Value\n'
for minima_time,minima_value in zip(data['minima_indices'],data['minima']):
extrima_lines += '{0},{1:.2f}\n'.format(minima_time,minima_value)
with open(data_file,'w') as data_output:
data_output.writelines(extrima_lines + data_lines)
|
chrox/RealTimeElectrophy
|
Experimenter/Experiments/PSTHExperiment.py
|
Python
|
bsd-2-clause
| 13,693
|
[
"Gaussian"
] |
b5e70b4974a85c70f3514b2276ca27aad5eb1d8f93776e2cdfdcdef8c9b91d0e
|
import numpy as np
from numpy.dual import svd
from scipy.spatial.distance import pdist, squareform, cdist
from StringGPy.utilities.gpy_kernels import StringGPKern, string_cov
import sys
from multiprocessing import Pool, cpu_count
'''
Computes the (unconditional) covariance matrix between two vectors.
'''
def covMatrix(X, Y, theta, symmetric = True, kernel = lambda u, theta: theta[0]*theta[0]*np.exp(-0.5*u*u/(theta[1]*theta[1])), \
dist_f=None):
if len(np.array(X).shape) == 1:
_X = np.array([X]).T
else:
_X = np.array(X)
if len(np.array(Y).shape) == 1:
_Y = np.array([Y]).T
else:
_Y = np.array(Y)
if dist_f == None:
if symmetric:
cM = pdist(_X)
M = squareform(cM)
M = kernel(M, theta)
return M
else:
cM = cdist(_X, _Y)
M = kernel(cM, theta)
return M
else:
if symmetric:
cM = pdist(_X, dist_f)
M = squareform(cM)
M = kernel(M, theta)
return M
else:
cM = cdist(_X, _Y, dist_f)
M = kernel(cM, theta)
return M
return
def get_kernel_lambda(k_type):
'''
From string to functional form of the kernel as a lambda.
'''
if k_type == "se":
kernel = lambda u, theta: theta[0]*theta[0]*np.exp(-0.5*u*u/(theta[1]*theta[1]))
if k_type == "ma32":
kernel = lambda u, theta: theta[0]*theta[0]*(1+(np.sqrt(3.0)/theta[1])*\
np.abs(u))*np.exp(-(np.sqrt(3.0)/theta[1])*np.abs(u))
if k_type == "ma52":
kernel = lambda u, theta: theta[0]*theta[0]*(1.0 + (np.sqrt(5.0)/theta[1])*np.abs(u) +\
(5.0/(3.0*theta[1]*theta[1]))*u*u)*np.exp(-(np.sqrt(5.0)/theta[1])*np.abs(u))
if k_type == "rq":
self.kernel = lambda u, theta: theta[0]*theta[0]*((1.0+u*u/(2*theta[2]*theta[1]*theta[1]))**(-theta[2]))
if k_type == "sse":
kernel = lambda u, theta: sum([theta[3*i]*theta[3*i]*\
np.exp(-0.5*u*u/(theta[3*i+1]*theta[3*i+1]))*\
np.cos(2.0*np.pi*theta[3*i+2]) for i in xrange(len(theta)/3)])
if k_type == "sma32":
kernel = lambda u, theta: sum([theta[3*i]*theta[3*i]*\
(1+(np.sqrt(3.0)/theta[3*i+1])*np.abs(u))*np.exp(-(np.sqrt(3.0)/theta[3*i+1])*\
np.abs(u))*np.cos(2.0*np.pi*theta[3*i+2]) for i in xrange(len(theta)/3)])
if k_type == "sma52":
kernel = lambda u, theta: sum([theta[3*i]*theta[3*i]*\
(1.0 + (np.sqrt(5.0)/theta[3*i+1])*np.abs(u) + (5.0/(3.0*theta[3*i+1]*theta[3*i+1]))*u*u)*\
np.exp(-(np.sqrt(5.0)/theta[3*i+1])*np.abs(u))*\
np.cos(2.0*np.pi*theta[3*i+2]) for i in xrange(len(theta)/3)])
return kernel
'''
Inverts a positive-definite matrix taking care of conditioning
'''
def inv_cov(cov):
U, S, V = svd(cov)
eps = 0.0
oc = np.max(S)/np.min(S)
if oc > 1e8:
nc = np.min([oc, 1e8])
eps = np.min(S)*(oc-nc)/(nc-1.0)
LI = np.dot(np.diag(1.0/(np.sqrt(np.absolute(S) + eps))), U.T)
covI= np.dot(LI.T, LI)
return covI
'''
Computes the inverse and the determinant of a covariance matrix in one go, using
SVD.
Returns a structure containing the following keys:
inv: the inverse of the covariance matrix,
L: the pseudo-cholesky factor US^0.5,
det: the determinant of the covariance matrix.
'''
def SVDFactorise(cov, max_cn=1e8):
U, S, V = svd(cov)
eps = 0.0
oc = np.max(S)/np.min(S)
if oc > max_cn:
nc = np.min([oc, max_cn])
eps = np.min(S)*(oc-nc)/(nc-1.0)
L = np.dot(U, np.diag(np.sqrt(S+eps)))
LI = np.dot(np.diag(1.0/(np.sqrt(np.absolute(S) + eps))), U.T)
covI= np.dot(LI.T, LI)
res = {}
res['inv'] = covI.copy()
res['L'] = L.copy()
res['det'] = np.prod(S+eps)
res['log_det'] = np.sum(np.log(S+eps))
res['LI'] = LI.copy()
res['eigen_vals'] = S+eps
res['u'] = U.copy()
res['v'] = V.copy()
return res
'''
Computes the hyper-parameters and the noise variance of the GP regression model
under i.i.d Gaussian noise.
'''
def gp_regression_calibrate(X, Y, hyper_type = 'SE', x_0 = np.array([1.0, 1.0, 1.0 ]),\
penalty_center=0.0):
from numpy.core.umath_tests import inner1d
if hyper_type.lower() == 'ma32':
kernel = lambda u, theta: theta[0]*theta[0]*(1+(np.sqrt(3.0)/theta[1])*\
np.abs(u))*np.exp(-(np.sqrt(3.0)/theta[1])*np.abs(u))
# Derivative of the kernel with respect to the input length scale
kernel_d2 = lambda u, theta: theta[0]*theta[0]*(3.0/(theta[1]**3)*u*u)*\
np.exp(-(np.sqrt(3.0)/theta[1])*np.abs(u))
else:
kernel = lambda u, theta: theta[0]*theta[0]*np.exp(-0.5*u*u/(theta[1]*theta[1]))
# Derivative of the kernel with respect to the input length scale
kernel_d2 = lambda u, theta: kernel(u, theta)*u*u/(theta[1]*theta[1]*theta[1])
def log_marginal(x):
noise_var = x[0]*x[0]
theta = np.abs(x[1:])
cov = covMatrix(X, X, theta, symmetric=True, kernel=kernel) + noise_var*np.eye(len(X))
try:
svd_factor = SVDFactorise(cov, max_cn=1e6)
except:
print theta, x
raise ValueError
cov_i = svd_factor['inv']
cov_det = svd_factor['det']
res = np.log(cov_det)+np.dot(Y, np.dot(cov_i, Y))
if penalty_center != None:
res += 0.5*((theta[1]-np.array([penalty_center]))/1.0)**2
return res
from scipy.optimize import minimize
# Attempt 1: warm-up/smart initialisation
res = minimize(log_marginal, x_0, method='L-BFGS-B')
x_opt = res.x
# Attempt 2: max from smart initialisation
res = minimize(log_marginal, x_0, method='L-BFGS-B')
x_opt = res.x
return (x_opt[0]*x_opt[0], np.abs(x_opt[1:]))
'''
Computes the hyper-parameters and the noise variance of the GP regression model
under i.i.d Gaussian noise.
'''
def string_gp_regression_calibrate(X, Y, n_string, min_t, max_t, x_0, hyper_type = 'SE', ):
from scipy.optimize import fmin_bfgs
K = n_string # Number of strings
# Create the array of input string gp indices (X might not be sorted)
X_couples = [(X[i], i) for i in xrange(len(X))]
from operator import itemgetter
X_couples.sort(key=itemgetter(0))
X_sorted = [elt[0] for elt in X_couples]
def log_marginal(x):
noise_vars = x[:K]**2 # The first K terms are string noise variances
thetas = []
for _ in xrange(K):
thetas += [np.abs([x[K+2*_], x[K+1+2*_]])] # The next 2K are thetas
thetas = np.array(thetas)
drvs = x[-n_string:] # The last K are used to determine boundary times
b_X_sorted = boundaries_from_drivers(drvs, min_t, max_t)
if n_string > 1:
X_sorted_string_ids = []
idx = 1
for x in X_sorted:
while x > b_X_sorted[idx]:
idx += 1
X_sorted_string_ids += [idx]
else:
X_sorted_string_ids = [1]*len(X_sorted)
X_sorted_string_ids_couples = [(X_sorted_string_ids[i], X_couples[i][1]) for i in xrange(len(X_couples))]
X_sorted_string_ids_couples.sort(key=itemgetter(1))
X_string_ids = np.array([elt[0] for elt in X_sorted_string_ids_couples])-1 #String indexed from 0 here
cov = string_cov(X, X, thetas, b_X_sorted, hyper_type.lower()) + np.diag(noise_vars[X_string_ids])
try:
svd_factor = SVDFactorise(cov)
except:
print thetas
print b_X_sorted
raise ValueError
cov_i = svd_factor['inv']
cov_det = svd_factor['det']
res = np.log(cov_det)+np.dot(Y, np.dot(cov_i, Y))
return res
# Attempt 1: warm-up/smart initialisation
x_opt = fmin_bfgs(log_marginal, x_0, disp=False)
# Attempt 2: max from smart initialisation
x_opt = fmin_bfgs(log_marginal, np.abs(x_opt), disp=False)
return np.abs(x_opt)
'''
Utility function that maps K real numbers (drvs) to a partition
of the interval [min_t, max_t] in K.
'''
def boundaries_from_drivers(drvs, min_t, max_t):
const_drivers = 1.0 + 9.0/(1.0+np.exp(-drvs))
probas = np.cumsum(const_drivers)/sum(const_drivers)
return np.array([min_t] + list(min_t + (max_t-min_t)*probas))
###################################
# LOG TO STD-OUT AND FILE #
###################################
class Tee(object):
def __init__(self, fl_name, mode):
"""
:type mode: str
:type fl_name: str
"""
self.file = open(fl_name, mode)
self.stdout = sys.stdout
sys.stdout = self
def __del__(self):
sys.stdout = self.stdout
self.file.close()
def write(self, data):
self.file.write(data)
self.stdout.write(data)
def release(self):
sys.stdout = self.stdout
self.file.close()
def flush(self):
self.file.flush()
self.stdout.flush()
def print_compiler_options():
import distutils.sysconfig
import distutils.ccompiler
compiler = distutils.ccompiler.new_compiler()
distutils.sysconfig.customize_compiler(compiler)
print compiler.compiler_so
def robust_invert_noisy_cov(args):
'''
Computes (robustly) the invert of a
noisy auto-covariance matrix.
'''
Xs = args[0]
hypers = args[1]
k_type = args[2]
noise_var = args[3]
kernel = get_kernel_lambda(k_type)
cov_train_train = covMatrix(Xs, Xs, hypers, symmetric=True, kernel=kernel)\
+ noise_var*np.eye(len(Xs))
cov_train_train_inv = inv_cov(cov_train_train)
return cov_train_train_inv
def parallel_invert_noisy_cov(args_list, M):
'''
'''
p = Pool(min(cpu_count()-1, 30, M))
cov_invs = p.map(robust_invert_noisy_cov, args_list)
p.close()
p.join()
return cov_invs
def robust_neg_log_lik(args):
'''
Computes (robustly) the invert of a
noisy auto-covariance matrix.
'''
Xs = args[0]
hypers = args[1]
k_type = args[2]
noise_var = args[3]
kernel = get_kernel_lambda(k_type)
Ys = args[4]
cov_train_train = covMatrix(Xs, Xs, hypers, symmetric=True, kernel=kernel)\
+ noise_var*np.eye(len(Xs))
try:
svd_factor = SVDFactorise(cov_train_train, max_cn=1e5)
except:
print "Error in robust_neg_log_lik", hypers
raise ValueError
cov_inv = svd_factor['inv']
log_cov_det = svd_factor['log_det']
ll = 0.5*(log_cov_det + np.dot(Ys, np.dot(cov_inv, Ys)) + len(Xs)*np.log(2.0*np.pi))
return ll
def parallel_neg_log_lik(args_list, M):
'''
'''
p = Pool(min(cpu_count()-1, 30, M))
lls = map(robust_neg_log_lik, args_list)
p.close()
p.join()
return np.sum(lls)
|
YLnKS/StringGPy
|
StringGPy/utilities/other_goodies.py
|
Python
|
bsd-3-clause
| 11,053
|
[
"Gaussian"
] |
a2a6ad0230d862e11259cc047d8c60ea5405ce7aa9ac6104c8346056798faa38
|
from DIRAC import gLogger, S_OK
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.StorageManagementSystem.Client.StorageManagerClient import StorageManagerClient
from DIRAC.Resources.Storage.StorageElement import StorageElement
from DIRAC.StorageManagementSystem.DB.StorageManagementDB import THROTTLING_STEPS, THROTTLING_TIME
import re
AGENT_NAME = "StorageManagement/StageRequestAgent"
class StageRequestAgent(AgentModule):
def initialize(self):
self.stagerClient = StorageManagerClient()
# self.storageDB = StorageManagementDB()
# pin lifetime = 1 day
self.pinLifetime = self.am_getOption("PinLifetime", THROTTLING_TIME)
# This sets the Default Proxy to used as that defined under
# /Operations/Shifter/DataManager
# the shifterProxy option in the Configuration can be used to change this default.
self.am_setOption("shifterProxy", "DataManager")
return S_OK()
def execute(self):
# Get the current submitted stage space and the amount of pinned space for each storage element
res = self.getStorageUsage()
if not res["OK"]:
return res
return self.submitStageRequests()
def getStorageUsage(self):
"""Fill the current Status of the SE Caches from the DB"""
self.storageElementCache = {}
res = self.stagerClient.getSubmittedStagePins()
if not res["OK"]:
gLogger.fatal(
"StageRequest.getStorageUsage: Failed to obtain submitted requests from StorageManagementDB.",
res["Message"],
)
return res
self.storageElementUsage = res["Value"]
if self.storageElementUsage:
gLogger.info("StageRequest.getStorageUsage: Active stage/pin requests found at the following sites:")
for storageElement in sorted(self.storageElementUsage.keys()):
seDict = self.storageElementUsage[storageElement]
# Convert to GB for printout
seDict["TotalSize"] = seDict["TotalSize"] / (1000 * 1000 * 1000.0)
gLogger.info(
"StageRequest.getStorageUsage: %s: %s replicas with a size of %.3f GB."
% (storageElement.ljust(15), str(seDict["Replicas"]).rjust(6), seDict["TotalSize"])
)
if not self.storageElementUsage:
gLogger.info("StageRequest.getStorageUsage: No active stage/pin requests found.")
return S_OK()
def submitStageRequests(self):
"""This manages the following transitions of the Replicas
* Waiting -> Offline (if the file is not found Cached)
* Waiting -> StageSubmitted (if the file is found Cached)
* Offline -> StageSubmitted (if there are not more Waiting replicas)
"""
# Retry Replicas that have not been Staged in a previous attempt
res = self._getMissingReplicas()
if not res["OK"]:
gLogger.fatal(
"StageRequest.submitStageRequests: Failed to get replicas from StorageManagementDB.", res["Message"]
)
return res
seReplicas = res["Value"]["SEReplicas"]
allReplicaInfo = res["Value"]["AllReplicaInfo"]
if seReplicas:
gLogger.info("StageRequest.submitStageRequests: Completing partially Staged Tasks")
for storageElement, seReplicaIDs in seReplicas.items():
gLogger.debug("Staging at %s:" % storageElement, seReplicaIDs)
self._issuePrestageRequests(storageElement, seReplicaIDs, allReplicaInfo)
# Check Waiting Replicas and select those found Online and all other Replicas from the same Tasks
res = self._getOnlineReplicas()
if not res["OK"]:
gLogger.fatal(
"StageRequest.submitStageRequests: Failed to get replicas from StorageManagementDB.", res["Message"]
)
return res
seReplicas = res["Value"]["SEReplicas"]
allReplicaInfo = res["Value"]["AllReplicaInfo"]
# Check Offline Replicas that fit in the Cache and all other Replicas from the same Tasks
res = self._getOfflineReplicas()
if not res["OK"]:
gLogger.fatal(
"StageRequest.submitStageRequests: Failed to get replicas from StorageManagementDB.", res["Message"]
)
return res
# Merge info from both results
for storageElement, seReplicaIDs in res["Value"]["SEReplicas"].items():
seReplicas.setdefault(storageElement, []).extend(seReplicaIDs)
allReplicaInfo.update(res["Value"]["AllReplicaInfo"])
gLogger.info("StageRequest.submitStageRequests: Obtained %s replicas for staging." % len(allReplicaInfo))
for storageElement, seReplicaIDs in seReplicas.items():
gLogger.debug("Staging at %s:" % storageElement, seReplicaIDs)
self._issuePrestageRequests(storageElement, seReplicaIDs, allReplicaInfo)
return S_OK()
def _getMissingReplicas(self):
"""This recovers Replicas that were not Staged on a previous attempt (the stage request failed or timed out),
while other Replicas of the same task are already Staged. If left behind they can produce a deadlock.
All SEs are considered, even if their Cache is full
"""
# Get Replicas that are in Staged/StageSubmitted
gLogger.info("StageRequest._getMissingReplicas: Checking Staged Replicas")
res = self.__getStagedReplicas()
if not res["OK"]:
gLogger.fatal(
"StageRequest._getMissingReplicas: Failed to get replicas from StorageManagementDB.", res["Message"]
)
return res
seReplicas = {}
allReplicaInfo = res["Value"]["AllReplicaInfo"]
replicasToStage = []
for seReplicaIDs in res["Value"]["SEReplicas"].values():
# Consider all SEs
replicasToStage += seReplicaIDs
# Get Replicas from the same Tasks as those selected
res = self.__addAssociatedReplicas(replicasToStage, seReplicas, allReplicaInfo)
if not res["OK"]:
gLogger.fatal("StageRequest._getMissingReplicas: Failed to get associated Replicas.", res["Message"])
return res
def _getOnlineReplicas(self):
"""This manages the transition
* Waiting -> Offline (if the file is not found Cached)
and returns the list of Cached Replicas for which the pin time has to be extended
SEs for which the cache is currently full are not considered
"""
# Get all Replicas in Waiting Status associated to Staging Tasks
gLogger.verbose("StageRequest._getOnlineReplicas: Checking Online Replicas to be handled")
res = self.__getWaitingReplicas()
if not res["OK"]:
gLogger.fatal(
"StageRequest._getOnlineReplicas: Failed to get replicas from StorageManagementDB.", res["Message"]
)
return res
seReplicas = {}
allReplicaInfo = res["Value"]["AllReplicaInfo"]
if not len(allReplicaInfo):
gLogger.info("StageRequest._getOnlineReplicas: There were no Waiting replicas found")
return res
gLogger.info("StageRequest._getOnlineReplicas: Obtained %s replicas Waiting for staging." % len(allReplicaInfo))
replicasToStage = []
for storageElement, seReplicaIDs in res["Value"]["SEReplicas"].items():
if not self.__usage(storageElement) < self.__cache(storageElement):
gLogger.info(
"StageRequest._getOnlineReplicas: Skipping %s, current usage above limit ( %s GB )"
% (storageElement, self.__cache(storageElement))
)
# Do not consider those SE that have the Cache full
continue
# Check if the Replica Metadata is OK and find out if they are Online or Offline
res = self.__checkIntegrity(storageElement, seReplicaIDs, allReplicaInfo)
if not res["OK"]:
gLogger.error(
"StageRequest._getOnlineReplicas: Failed to check Replica Metadata",
"(%s): %s" % (storageElement, res["Message"]),
)
else:
# keep only Online Replicas
seReplicas[storageElement] = res["Value"]["Online"]
replicasToStage += res["Value"]["Online"]
# Get Replicas from the same Tasks as those selected
res = self.__addAssociatedReplicas(replicasToStage, seReplicas, allReplicaInfo)
if not res["OK"]:
gLogger.fatal("StageRequest._getOnlineReplicas: Failed to get associated Replicas.", res["Message"])
return res
def _getOfflineReplicas(self):
"""This checks Replicas in Offline status
and returns the list of Replicas to be Staged
SEs for which the cache is currently full are not considered
"""
# Get all Replicas in Waiting Status associated to Staging Tasks
gLogger.verbose("StageRequest._getOfflineReplicas: Checking Offline Replicas to be handled")
res = self.__getOfflineReplicas()
if not res["OK"]:
gLogger.fatal(
"StageRequest._getOfflineReplicas: Failed to get replicas from StorageManagementDB.", res["Message"]
)
return res
seReplicas = {}
allReplicaInfo = res["Value"]["AllReplicaInfo"]
if not len(allReplicaInfo):
gLogger.info("StageRequest._getOfflineReplicas: There were no Offline replicas found")
return res
gLogger.info(
"StageRequest._getOfflineReplicas: Obtained %s replicas Offline for staging." % len(allReplicaInfo)
)
replicasToStage = []
for storageElement, seReplicaIDs in res["Value"]["SEReplicas"].items():
if not self.__usage(storageElement) < self.__cache(storageElement):
gLogger.info(
"StageRequest._getOfflineReplicas: Skipping %s, current usage above limit ( %s GB )"
% (storageElement, self.__cache(storageElement))
)
# Do not consider those SE that have the Cache full
continue
seReplicas[storageElement] = []
for replicaID in sorted(seReplicaIDs):
seReplicas[storageElement].append(replicaID)
replicasToStage.append(replicaID)
self.__add(storageElement, allReplicaInfo[replicaID]["Size"])
if not self.__usage(storageElement) < self.__cache(storageElement):
# Stop adding Replicas when the cache is full
break
# Get Replicas from the same Tasks as those selected
res = self.__addAssociatedReplicas(replicasToStage, seReplicas, allReplicaInfo)
if not res["OK"]:
gLogger.fatal("StageRequest._getOfflineReplicas: Failed to get associated Replicas.", res["Message"])
return res
def __usage(self, storageElement):
"""Retrieve current usage of SE"""
# Set it if not yet done
self.storageElementUsage.setdefault(storageElement, {"TotalSize": 0.0})
return self.storageElementUsage[storageElement]["TotalSize"]
def __cache(self, storageElement):
"""Retrieve cache size for SE"""
if storageElement not in self.storageElementCache:
diskCacheTB = float(StorageElement(storageElement).options.get("DiskCacheTB", 1.0))
self.storageElementCache[storageElement] = diskCacheTB * 1000.0 / THROTTLING_STEPS
return self.storageElementCache[storageElement]
def __add(self, storageElement, size):
"""Add size (in bytes) to current usage of storageElement (in GB)"""
self.storageElementUsage.setdefault(storageElement, {"TotalSize": 0.0})
size /= 1000.0 * 1000.0 * 1000.0
self.storageElementUsage[storageElement]["TotalSize"] += size
return size
def _issuePrestageRequests(self, storageElement, seReplicaIDs, allReplicaInfo):
"""Make the request to the SE and update the DB"""
# Since we are in a give SE, the lfn is a unique key
lfnRepIDs = {}
for replicaID in seReplicaIDs:
lfn = allReplicaInfo[replicaID]["LFN"]
lfnRepIDs[lfn] = replicaID
# Now issue the prestage requests for the remaining replicas
stageRequestMetadata = {}
updatedLfnIDs = []
if lfnRepIDs:
gLogger.info(
"StageRequest._issuePrestageRequests: Submitting %s stage requests for %s."
% (len(lfnRepIDs), storageElement)
)
res = StorageElement(storageElement).prestageFile(lfnRepIDs, lifetime=self.pinLifetime)
gLogger.debug("StageRequest._issuePrestageRequests: StorageElement.prestageStorageFile: res=", res)
# Daniela: fishy result from ReplicaManager!!! Should NOT return OK
# res= {'OK': True, 'Value': {'Successful': {}, 'Failed': {'srm://srm-lhcb.cern.ch/castor/cern.ch/grid/lhcb/data/2010/RAW/EXPRESS/LHCb/COLLISION10/71476/071476_0000000241.raw': ' SRM2Storage.__gfal_exec: Failed to perform gfal_prestage.[SE][BringOnline][SRM_INVALID_REQUEST] httpg://srm-lhcb.cern.ch:8443/srm/managerv2: User not able to access specified space token\n'}}}
# res= {'OK': True, 'Value': {'Successful': {'srm://gridka-dCache.fzk.de/pnfs/gridka.de/lhcb/data/2009/RAW/FULL/LHCb/COLLISION09/63495/063495_0000000001.raw': '-2083846379'}, 'Failed': {}}}
if not res["OK"]:
gLogger.error(
"StageRequest._issuePrestageRequests: Completely failed to submit stage requests for replicas.",
res["Message"],
)
else:
for lfn, requestID in res["Value"]["Successful"].items():
stageRequestMetadata.setdefault(requestID, []).append(lfnRepIDs[lfn])
updatedLfnIDs.append(lfnRepIDs[lfn])
if stageRequestMetadata:
gLogger.info(
"StageRequest._issuePrestageRequests: %s stage request metadata to be updated."
% len(stageRequestMetadata)
)
res = self.stagerClient.insertStageRequest(stageRequestMetadata, self.pinLifetime)
if not res["OK"]:
gLogger.error(
"StageRequest._issuePrestageRequests: Failed to insert stage request metadata.", res["Message"]
)
return res
res = self.stagerClient.updateReplicaStatus(updatedLfnIDs, "StageSubmitted")
if not res["OK"]:
gLogger.error("StageRequest._issuePrestageRequests: Failed to insert replica status.", res["Message"])
return
def __sortBySE(self, replicaDict):
seReplicas = {}
replicaIDs = {}
for replicaID, info in replicaDict.items():
lfn = info["LFN"]
storageElement = info["SE"]
size = info["Size"]
pfn = info["PFN"]
replicaIDs[replicaID] = {"LFN": lfn, "PFN": pfn, "Size": size, "StorageElement": storageElement}
seReplicas.setdefault(storageElement, []).append(replicaID)
return S_OK({"SEReplicas": seReplicas, "AllReplicaInfo": replicaIDs})
def __getStagedReplicas(self):
"""This obtains the Staged replicas from the Replicas table and for each LFN the requested storage element"""
# First obtain the Waiting replicas from the Replicas table
res = self.stagerClient.getStagedReplicas()
if not res["OK"]:
gLogger.error(
"StageRequest.__getStagedReplicas: Failed to get replicas with Waiting status.", res["Message"]
)
return res
if not res["Value"]:
gLogger.debug("StageRequest.__getStagedReplicas: No Waiting replicas found to process.")
else:
gLogger.debug(
"StageRequest.__getStagedReplicas: Obtained %s Waiting replicas(s) to process." % len(res["Value"])
)
return self.__sortBySE(res["Value"])
def __getWaitingReplicas(self):
"""This obtains the Waiting replicas from the Replicas table and for each LFN the requested storage element"""
# First obtain the Waiting replicas from the Replicas table
res = self.stagerClient.getWaitingReplicas()
if not res["OK"]:
gLogger.error(
"StageRequest.__getWaitingReplicas: Failed to get replicas with Waiting status.", res["Message"]
)
return res
if not res["Value"]:
gLogger.debug("StageRequest.__getWaitingReplicas: No Waiting replicas found to process.")
else:
gLogger.debug(
"StageRequest.__getWaitingReplicas: Obtained %s Waiting replicas(s) to process." % len(res["Value"])
)
return self.__sortBySE(res["Value"])
def __getOfflineReplicas(self):
"""This obtains the Offline replicas from the Replicas table and for each LFN the requested storage element"""
# First obtain the Waiting replicas from the Replicas table
res = self.stagerClient.getOfflineReplicas()
if not res["OK"]:
gLogger.error(
"StageRequest.__getOfflineReplicas: Failed to get replicas with Waiting status.", res["Message"]
)
return res
if not res["Value"]:
gLogger.debug("StageRequest.__getOfflineReplicas: No Waiting replicas found to process.")
else:
gLogger.debug(
"StageRequest.__getOfflineReplicas: Obtained %s Waiting replicas(s) to process." % len(res["Value"])
)
return self.__sortBySE(res["Value"])
def __addAssociatedReplicas(self, replicasToStage, seReplicas, allReplicaInfo):
"""Retrieve the list of Replicas that belong to the same Tasks as the provided list"""
res = self.stagerClient.getAssociatedReplicas(replicasToStage)
if not res["OK"]:
gLogger.fatal("StageRequest.__addAssociatedReplicas: Failed to get associated Replicas.", res["Message"])
return res
addReplicas = {"Offline": {}, "Waiting": {}}
replicaIDs = {}
for replicaID, info in res["Value"].items():
lfn = info["LFN"]
storageElement = info["SE"]
size = info["Size"]
pfn = info["PFN"]
status = info["Status"]
if status in ["Waiting", "Offline"]:
replicaIDs[replicaID] = {"LFN": lfn, "PFN": pfn, "Size": size, "StorageElement": storageElement}
addReplicas[status].setdefault(storageElement, []).append(replicaID)
waitingReplicas = addReplicas["Waiting"]
offlineReplicas = addReplicas["Offline"]
newReplicaInfo = replicaIDs
allReplicaInfo.update(newReplicaInfo)
# First handle Waiting Replicas for which metadata is to be checked
for storageElement, seReplicaIDs in waitingReplicas.items():
for replicaID in list(seReplicaIDs):
if replicaID in replicasToStage:
seReplicaIDs.remove(replicaID)
res = self.__checkIntegrity(storageElement, seReplicaIDs, allReplicaInfo)
if not res["OK"]:
gLogger.error(
"StageRequest.__addAssociatedReplicas: Failed to check Replica Metadata",
"(%s): %s" % (storageElement, res["Message"]),
)
else:
# keep all Replicas (Online and Offline)
seReplicas.setdefault(storageElement, []).extend(res["Value"]["Online"])
replicasToStage.extend(res["Value"]["Online"])
seReplicas[storageElement].extend(res["Value"]["Offline"])
replicasToStage.extend(res["Value"]["Offline"])
# Then handle Offline Replicas for which metadata is already checked
for storageElement, seReplicaIDs in offlineReplicas.items():
for replicaID in sorted(seReplicaIDs):
if replicaID in replicasToStage:
seReplicaIDs.remove(replicaID)
seReplicas.setdefault(storageElement, []).extend(seReplicaIDs)
replicasToStage.extend(seReplicaIDs)
for replicaID in list(allReplicaInfo):
if replicaID not in replicasToStage:
del allReplicaInfo[replicaID]
totalSize = 0
for storageElement in sorted(seReplicas.keys()):
replicaIDs = seReplicas[storageElement]
size = 0
for replicaID in replicaIDs:
size += self.__add(storageElement, allReplicaInfo[replicaID]["Size"])
gLogger.info(
"StageRequest.__addAssociatedReplicas: Considering %s GB to be staged at %s" % (size, storageElement)
)
totalSize += size
gLogger.info("StageRequest.__addAssociatedReplicas: Obtained %s GB for staging." % totalSize)
return S_OK({"SEReplicas": seReplicas, "AllReplicaInfo": allReplicaInfo})
def __checkIntegrity(self, storageElement, seReplicaIDs, allReplicaInfo):
"""Check the integrity of the files to ensure they are available
Updates status of Offline Replicas for a later pass
Return list of Online replicas to be Stage
"""
if not seReplicaIDs:
return S_OK({"Online": [], "Offline": []})
# Since we are with a given SE, the LFN is a unique key
lfnRepIDs = {}
for replicaID in seReplicaIDs:
lfn = allReplicaInfo[replicaID]["LFN"]
lfnRepIDs[lfn] = replicaID
gLogger.info(
"StageRequest.__checkIntegrity: Checking the integrity of %s replicas at %s."
% (len(lfnRepIDs), storageElement)
)
res = StorageElement(storageElement).getFileMetadata(lfnRepIDs)
if not res["OK"]:
gLogger.error(
"StageRequest.__checkIntegrity: Completely failed to obtain metadata for replicas.", res["Message"]
)
return res
terminalReplicaIDs = {}
onlineReplicaIDs = []
offlineReplicaIDs = []
for lfn, metadata in res["Value"]["Successful"].items():
if metadata["Size"] != allReplicaInfo[lfnRepIDs[lfn]]["Size"]:
gLogger.error("StageRequest.__checkIntegrity: LFN StorageElement size does not match FileCatalog", lfn)
terminalReplicaIDs[lfnRepIDs[lfn]] = "LFN StorageElement size does not match FileCatalog"
lfnRepIDs.pop(lfn)
elif metadata.get("Lost", False):
gLogger.error("StageRequest.__checkIntegrity: LFN has been Lost by the StorageElement", lfn)
terminalReplicaIDs[lfnRepIDs[lfn]] = "LFN has been Lost by the StorageElement"
lfnRepIDs.pop(lfn)
elif metadata.get("Unavailable", False):
gLogger.error("StageRequest.__checkIntegrity: LFN is declared Unavailable by the StorageElement", lfn)
terminalReplicaIDs[lfnRepIDs[lfn]] = "LFN is declared Unavailable by the StorageElement"
lfnRepIDs.pop(lfn)
elif metadata.get("Cached", metadata["Accessible"]):
gLogger.verbose("StageRequest.__checkIntegrity: Cache hit for file.")
onlineReplicaIDs.append(lfnRepIDs[lfn])
else:
offlineReplicaIDs.append(lfnRepIDs[lfn])
for lfn, reason in res["Value"]["Failed"].items():
if re.search("File does not exist", reason):
gLogger.error("StageRequest.__checkIntegrity: LFN does not exist in the StorageElement", lfn)
terminalReplicaIDs[lfnRepIDs[lfn]] = "LFN does not exist in the StorageElement"
lfnRepIDs.pop(lfn)
# Update the states of the replicas in the database #TODO Sent status to integrity DB
if terminalReplicaIDs:
gLogger.info("StageRequest.__checkIntegrity: %s replicas are terminally failed." % len(terminalReplicaIDs))
res = self.stagerClient.updateReplicaFailure(terminalReplicaIDs)
if not res["OK"]:
gLogger.error("StageRequest.__checkIntegrity: Failed to update replica failures.", res["Message"])
if onlineReplicaIDs:
gLogger.info("StageRequest.__checkIntegrity: %s replicas found Online." % len(onlineReplicaIDs))
if offlineReplicaIDs:
gLogger.info("StageRequest.__checkIntegrity: %s replicas found Offline." % len(offlineReplicaIDs))
res = self.stagerClient.updateReplicaStatus(offlineReplicaIDs, "Offline")
return S_OK({"Online": onlineReplicaIDs, "Offline": offlineReplicaIDs})
|
DIRACGrid/DIRAC
|
src/DIRAC/StorageManagementSystem/Agent/StageRequestAgent.py
|
Python
|
gpl-3.0
| 24,958
|
[
"DIRAC"
] |
7d7d4c8c264f7e661798e80d654bf0d8ebb739ee9b028c7ebce449c47a30c364
|
# -*- coding: utf-8 -*-
'''
Created on 28 Nov 2013
@author: Kimon Tsitsikas
Copyright © 2012-2013 Kimon Tsitsikas, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License version 2 as published by the Free Software
Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
'''
from __future__ import division
import logging
import math
from numpy import random
import numpy
from numpy.random import shuffle
from numpy.random import uniform
from odemis.acq.align import coordinates
from odemis.acq.align import transform
from odemis.dataio import hdf5
from odemis.util import spot
import operator
import unittest
from builtins import range
logging.getLogger().setLevel(logging.DEBUG)
# @unittest.skip("skip")
class TestDivideInNeighborhoods(unittest.TestCase):
"""
Test DivideInNeighborhoods
"""
def setUp(self):
random.seed(0)
# @unittest.skip("skip")
def test_divide_and_find_center_grid(self):
"""
Test DivideInNeighborhoods combined with FindCenterCoordinates
"""
grid_data = hdf5.read_data("grid_10x10.h5")
C, T, Z, Y, X = grid_data[0].shape
grid_data[0].shape = Y, X
subimages, subimage_coordinates = coordinates.DivideInNeighborhoods(grid_data[0], (10, 10), 40)
spot_coordinates = [spot.FindCenterCoordinates(i) for i in subimages]
optical_coordinates = coordinates.ReconstructCoordinates(subimage_coordinates, spot_coordinates)
self.assertEqual(len(subimages), 100)
# @unittest.skip("skip")
def test_divide_and_find_center_grid_noise(self):
"""
Test DivideInNeighborhoods combined with FindCenterCoordinates for noisy input
"""
grid_data = hdf5.read_data("grid_10x10.h5")
C, T, Z, Y, X = grid_data[0].shape
grid_data[0].shape = Y, X
# Add Gaussian noise
noise = random.normal(0, 40, grid_data[0].size)
noise_array = noise.reshape(grid_data[0].shape[0], grid_data[0].shape[1])
noisy_grid_data = grid_data[0] + noise_array
subimages, subimage_coordinates = coordinates.DivideInNeighborhoods(noisy_grid_data, (10, 10), 40)
spot_coordinates = [spot.FindCenterCoordinates(i) for i in subimages]
optical_coordinates = coordinates.ReconstructCoordinates(subimage_coordinates, spot_coordinates)
self.assertEqual(len(subimages), 100)
# @unittest.skip("skip")
def test_divide_and_find_center_grid_missing_point(self):
"""
Test DivideInNeighborhoods combined with FindCenterCoordinates for grid that misses one point
"""
grid_data = hdf5.read_data("grid_missing_point.h5")
C, T, Z, Y, X = grid_data[0].shape
grid_data[0].shape = Y, X
# Add Gaussian noise
noise = random.normal(0, 40, grid_data[0].size)
noise_array = noise.reshape(grid_data[0].shape[0], grid_data[0].shape[1])
noisy_grid_data = grid_data[0] + noise_array
subimages, subimage_coordinates = coordinates.DivideInNeighborhoods(noisy_grid_data, (10, 10), 40)
spot_coordinates = [spot.FindCenterCoordinates(i) for i in subimages]
optical_coordinates = coordinates.ReconstructCoordinates(subimage_coordinates, spot_coordinates)
self.assertEqual(len(subimages), 99)
# @unittest.skip("skip")
def test_divide_and_find_center_grid_cosmic_ray(self):
"""
Test DivideInNeighborhoods combined with FindCenterCoordinates for grid that misses one point
and contains cosmic ray
"""
grid_data = hdf5.read_data("grid_cosmic_ray.h5")
C, T, Z, Y, X = grid_data[0].shape
grid_data[0].shape = Y, X
# Add Gaussian noise
noise = random.normal(0, 40, grid_data[0].size)
noise_array = noise.reshape(grid_data[0].shape[0], grid_data[0].shape[1])
noisy_grid_data = grid_data[0] + noise_array
subimages, subimage_coordinates = coordinates.DivideInNeighborhoods(noisy_grid_data, (10, 10), 40)
spot_coordinates = [spot.FindCenterCoordinates(i) for i in subimages]
optical_coordinates = coordinates.ReconstructCoordinates(subimage_coordinates, spot_coordinates)
self.assertEqual(len(subimages), 99)
# @unittest.skip("skip")
def test_divide_and_find_center_grid_noise_missing_point_cosmic_ray(self):
"""
Test DivideInNeighborhoods combined with FindCenterCoordinates for noisy input that
misses one point and contains cosmic ray
"""
grid_data = hdf5.read_data("grid_cosmic_ray.h5")
C, T, Z, Y, X = grid_data[0].shape
grid_data[0].shape = Y, X
# Add Gaussian noise
noise = random.normal(0, 40, grid_data[0].size)
noise_array = noise.reshape(grid_data[0].shape[0], grid_data[0].shape[1])
noisy_grid_data = grid_data[0] + noise_array
subimages, subimage_coordinates = coordinates.DivideInNeighborhoods(noisy_grid_data, (10, 10), 40)
spot_coordinates = [spot.FindCenterCoordinates(i) for i in subimages]
optical_coordinates = coordinates.ReconstructCoordinates(subimage_coordinates, spot_coordinates)
self.assertEqual(len(subimages), 99)
# @unittest.skip("skip")
class TestMatchCoordinates(unittest.TestCase):
"""
Test MatchCoordinates
"""
def setUp(self):
random.seed(0)
self.electron_coordinates_1x1 = [(1, 1)]
self.electron_coordinates_3x3 = [(1, 1), (1, 2), (1, 3), (2, 1), (2, 2), (2, 3), (3, 1), (3, 2), (3, 3)]
self.electron_coordinates_10x10 = []
self.electron_coordinates_40x40 = []
for i in range(10):
for j in range(10):
self.electron_coordinates_10x10.append((i + 1, j + 1))
for i in range(40):
for j in range(40):
self.electron_coordinates_40x40.append((i + 1, j + 1))
# self.translation_x, self.translation_y = 1.3000132631489385, 2.3999740720548788
self.translation_x, self.translation_y = uniform(-20, 20), uniform(-20, 20)
# self.scale = 4
self.scale = uniform(4, 4.2)
self.scale_x, self.scale_y = self.scale, self.scale
# self.rotation = -0.4517
self.rotation = math.radians(uniform(-2, 2))
def test_precomputed_output(self):
"""
Test MatchCoordinates for precomputed output
"""
optical_coordinates = [(9.1243, 6.7570), (10.7472, 16.8185), (4.7271, 12.6429), (13.9714, 6.0185), (5.6263, 17.5885), (14.8142, 10.9271), (10.0384, 11.8815), (15.5146, 16.0694), (4.4803, 7.5966)]
electron_coordinates = self.electron_coordinates_3x3
estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(optical_coordinates, electron_coordinates, 0.25, 0.25)
assert 0 <= max_dist < 0.25
numpy.testing.assert_equal(estimated_coordinates, [(2, 1), (2, 3), (1, 2), (3, 1), (1, 3), (3, 2), (2, 2), (3, 3), (1, 1)])
def test_single_element(self):
"""
Test MatchCoordinates for single element lists, error should be raised
"""
optical_coordinates = [(9.1243, 6.7570)]
electron_coordinates = self.electron_coordinates_1x1
with self.assertRaises(LookupError):
r = coordinates.MatchCoordinates(optical_coordinates, electron_coordinates, 0.25, 0.25)
def test_precomputed_transformation_3x3(self):
"""
Test MatchCoordinates for applied transformation
"""
electron_coordinates = self.electron_coordinates_3x3
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
transformed_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(transformed_coordinates, electron_coordinates, 0.25, 0.25)
assert 0 <= max_dist < 0.25
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(known_optical_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 1)
def test_shuffled_3x3(self):
"""
Test MatchCoordinates for shuffled optical coordinates, comparing the order of the shuffled optical list and the estimated coordinates
generated by MatchCoordinates
"""
electron_coordinates = self.electron_coordinates_3x3
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
shuffled_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
shuffle(shuffled_coordinates)
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(shuffled_coordinates, electron_coordinates, 0.25, 0.25)
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(known_optical_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 1)
def test_shuffled_distorted_3x3(self):
"""
Test MatchCoordinates for shuffled and distorted optical coordinates, comparing the order of the shuffled optical list and the estimated coordinates
generated by MatchCoordinates
"""
electron_coordinates = self.electron_coordinates_3x3
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
shuffled_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
shuffle(shuffled_coordinates)
distorted_coordinates = []
# Add noise to the coordinates
for c in shuffled_coordinates:
distortion = (uniform(-0.1, 0.1), uniform(-0.1, 0.1))
distorted_coordinates.append(tuple(map(operator.add, c, distortion)))
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(distorted_coordinates, electron_coordinates, 0.25, 0.25)
# if known_estimated_coordinates != []:
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(shuffled_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 1)
def test_precomputed_output_missing_point_3x3(self):
"""
Test MatchCoordinates if NaN is returned in the corresponding position in case of missing point
"""
electron_coordinates = self.electron_coordinates_3x3
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
transformed_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
rand = random.randint(0, len(transformed_coordinates) - 1)
del transformed_coordinates[rand]
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(transformed_coordinates, electron_coordinates, 0.25, 0.25)
self.assertEqual(len(known_estimated_coordinates), len(electron_coordinates) - 1)
def test_precomputed_transformation_10x10(self):
"""
Test MatchCoordinates for applied transformation
"""
electron_coordinates = self.electron_coordinates_10x10
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
transformed_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(transformed_coordinates, electron_coordinates, 0.25, 0.25)
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(known_optical_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 1)
def test_shuffled_10x10(self):
"""
Test MatchCoordinates for shuffled optical coordinates, comparing the order of the shuffled optical list and the estimated coordinates
generated by MatchCoordinates
"""
electron_coordinates = self.electron_coordinates_10x10
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
shuffled_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
shuffle(shuffled_coordinates)
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(shuffled_coordinates, electron_coordinates, 0.25, 0.25)
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(known_optical_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 1)
def test_shuffled__distorted_10x10(self):
"""
Test MatchCoordinates for shuffled and distorted optical coordinates, comparing the order of the shuffled optical list and the estimated coordinates
generated by MatchCoordinates
"""
electron_coordinates = self.electron_coordinates_10x10
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
shuffled_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
shuffle(shuffled_coordinates)
distorted_coordinates = []
# Add noise to the coordinates
for c in shuffled_coordinates:
distortion = tuple((uniform(-0.1, 0.1), uniform(-0.1, 0.1)))
distorted_coordinates.append(tuple(map(operator.add, c, distortion)))
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(distorted_coordinates, electron_coordinates, 0.25, 0.25)
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(shuffled_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 1)
def test_precomputed_transformation_40x40(self):
"""
Test MatchCoordinates for applied transformation
"""
electron_coordinates = self.electron_coordinates_40x40
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
transformed_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(transformed_coordinates, electron_coordinates, 0.25, 0.25)
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(known_optical_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 0)
def test_shuffled_40x40(self):
"""
Test MatchCoordinates for shuffled optical coordinates, comparing the order of the shuffled optical list and the estimated coordinates
generated by MatchCoordinates
"""
electron_coordinates = self.electron_coordinates_40x40
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
shuffled_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
shuffle(shuffled_coordinates)
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(shuffled_coordinates, electron_coordinates, 0.25, 0.25)
(calc_translation_x, calc_translation_y), (calc_scaling_x, calc_scaling_y), calc_rotation = transform.CalculateTransform(known_optical_coordinates, known_estimated_coordinates)
numpy.testing.assert_almost_equal((calc_translation_x, calc_translation_y, calc_scaling_x, calc_scaling_y, calc_rotation), (translation_x, translation_y, scale_x, scale_y, rotation), 1)
def test_precomputed_output_missing_point_40x40(self):
"""
Test MatchCoordinates if NaN is returned in the corresponding position in case of missing point
"""
electron_coordinates = self.electron_coordinates_40x40
translation_x, translation_y = self.translation_x, self.translation_y
scale_x, scale_y = self.scale_x, self.scale_y
rotation = self.rotation
transformed_coordinates = coordinates._TransformCoordinates(electron_coordinates, (translation_x, translation_y), rotation, (scale_x, scale_y))
rand = random.randint(0, len(transformed_coordinates) - 1)
del transformed_coordinates[rand]
known_estimated_coordinates, known_optical_coordinates, max_dist = coordinates.MatchCoordinates(transformed_coordinates, electron_coordinates, 0.25, 0.25)
self.assertEqual(len(known_estimated_coordinates), len(electron_coordinates) - 1)
if __name__ == '__main__':
unittest.main()
|
delmic/odemis
|
src/odemis/acq/align/test/coordinates_test.py
|
Python
|
gpl-2.0
| 19,482
|
[
"Gaussian"
] |
df20a56b91d6e4e96e117a20466fd80a13ab08dfb835b50e299b59e81f1a97f8
|
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
__author__ = "Donovan Parks"
__copyright__ = "Copyright 2015"
__credits__ = ["Donovan Parks"]
__license__ = "GPL3"
__maintainer__ = "Donovan Parks"
__email__ = "donovan.parks@gmail.com"
__status__ = "Development"
import gzip
from collections import namedtuple
class BlastParser():
"""Parses output files produced with Blast."""
def __init__(self):
"""Initialization."""
self.BlastHit = namedtuple('BlastHit', """query_id
subject_id
perc_identity
aln_length
mismatch_count
gap_open_count
query_start
query_end
subject_start
subject_end
evalue
bitscore""")
def read_hit(self, table):
"""Generator function to read hits from a blast output table.
The table should be in blast format 6. This is
also the format used by Diamond.
Parameters
----------
table : str
Name of table to read.
Yields
------
namedtuple
Information about blast hit.
"""
if table.endswith('.gz'):
open_file = gzip.open
else:
open_file = open
for line in open_file(table):
line_split = line.split('\t')
hit = self.BlastHit(query_id=line_split[0],
subject_id=line_split[1],
perc_identity=float(line_split[2]),
aln_length=int(line_split[3]),
mismatch_count=int(line_split[4]),
gap_open_count=int(line_split[5]),
query_start=int(line_split[6]),
query_end=int(line_split[7]),
subject_start=int(line_split[8]),
subject_end=int(line_split[9]),
evalue=float(line_split[10]),
bitscore=float(line_split[11]))
yield hit
def identify_homologs(self,
blast_table,
evalue_threshold,
per_identity_threshold,
per_aln_len_threshold,
seq_lens):
"""Identify homologs among blast hits.
Identifies hits satisfying the criteria required for a
gene to be considered a homolog. The table should be in
blast format 6.
Parameters
----------
blast_table : str
File containing blast hits in the custom tabular format produced by BlastRunner.
evalue_threshold : float
E-value threshold used to define homologous gene.
per_identity_threshold : float
Percent identity threshold used to define a homologous gene.
per_aln_len_threshold : float
Alignment length threshold used to define a homologous gene.
seq_lens : dict
Length of sequences indexed by their unique id.
Returns
-------
set
Identifiers for homologous genes.
"""
homologs = set()
for hit in self.read_hit(blast_table):
if hit.evalue <= evalue_threshold and hit.perc_identity >= per_identity_threshold:
query_len = seq_lens[hit.query_id]
per_aln_len = hit.aln_length * 100.0 / query_len
if per_aln_len >= per_aln_len_threshold:
homologs.add(hit.subject_id)
return homologs
|
dparks1134/biolib
|
biolib/blast_parser.py
|
Python
|
gpl-3.0
| 5,252
|
[
"BLAST"
] |
20d1c10739f75cb4d5b390d68445fd6435d236c887056dc3bb32a5a73d094c2a
|
import numpy
import numpy.linalg
import scipy.linalg
import scipy.interpolate
from scipy.signal import wiener, filtfilt, butter, gaussian
from scipy.ndimage import filters
from matplotlib import pyplot as plt
plt.style.use('classic')
from assimulo.solvers import IDA
from assimulo.problem import Implicit_Problem
from scipy.sparse.linalg import spsolve as sparseSolve
from scipy.sparse import csr_matrix as sparseMat
import scipy.sparse as sps
import scipy.sparse as sparse
import math
from copy import deepcopy
def compute_deriv( func, x0 ) :
y0 = func(x0)
J = numpy.zeros( (len(x0),len(x0)), dtype='d' )
x_higher = deepcopy(x0)
eps = 1e-8
for ivar in range(len(x0)) :
x_higher[ivar] = x_higher[ivar] + eps
# evaluate the function
y_higher = func(x_higher)
dy_dx = (y_higher-y0) / eps
J[:,ivar] = dy_dx
x_higher[ivar] = x0[ivar]
return J
def right_side_coeffs( h_n, h_n1 ) :
a_n = h_n / ( h_n1 * (h_n1+h_n) )
b_n = -( h_n1 + h_n) / ( h_n1 * h_n )
c_n = ( 2*h_n + h_n1 ) / ( h_n * (h_n1+h_n) )
return a_n, b_n, c_n
def left_side_coeffs( h_n, h_n1 ) :
a_n = -( 2*h_n + h_n1 ) / ( h_n * (h_n1+h_n) )
b_n = ( h_n1 + h_n) / ( h_n1 * h_n )
c_n = - h_n / ( h_n1 * (h_n1+h_n) )
return a_n, b_n, c_n
def build_interp_2d( path ) :
raw_map = numpy.loadtxt( path, delimiter="," )
v1 = raw_map[1:,0]
v2 = raw_map[0,1:]
dat_map = raw_map[1:,1:]
if v1[1] < v1[0] :
v1 = numpy.flipud( v1 )
dat_map = numpy.flipud(dat_map)
if v2[1] < v2[0] :
v2 = numpy.flipud( v2 )
dat_map = numpy.fliplr(dat_map)
return scipy.interpolate.RectBivariateSpline( v1, v2, dat_map )
def ButterworthFilter( x, y, ff=0.2 ) :
b, a = butter(1, ff)
fl = filtfilt( b, a, y )
return fl
def get_smooth_Uref_data( Ua_path, Uc_path, ffa=0.4, ffc=0.2 ) :
"""
Smooth the Uref data to aid in improving numerical stability.
This should be verified by the user to ensure it is not changing the original
Uref data beyond a tolerable amount (defined by the user).
A linear interpolator class is output for Uref and dUref_dx for both anode
and cathode.
"""
## Load the data files
uref_a_map = numpy.loadtxt( Ua_path, delimiter=',' )
uref_c_map = numpy.loadtxt( Uc_path, delimiter=',' )
if uref_a_map[1,0] < uref_a_map[0,0] :
uref_a_map = numpy.flipud( uref_a_map )
if uref_c_map[1,0] < uref_c_map[0,0] :
uref_c_map = numpy.flipud( uref_c_map )
xa = uref_a_map[:,0]
xc = uref_c_map[:,0]
# big_xa = numpy.linspace( xa[0], xa[-1], 300 )
# big_xc = numpy.linspace( xc[0], xc[-1], 300 )
# big_Ua = numpy.interp( big_xa, xa, uref_a_map[:,1] )
# big_Uc = numpy.interp( big_xc, xc, uref_c_map[:,1] )
# numpy.savetxt( bsp_dir + '/data/Model_v1/Model_Pars/solid/thermodynamics/uref_anode_bigx.csv', numpy.array([big_xa, big_Ua]).T, delimiter=',' )
# numpy.savetxt( bsp_dir + '/data/Model_v1/Model_Pars/solid/thermodynamics/uref_cathode_bigx.csv', numpy.array([big_xc, big_Uc]).T, delimiter=',' )
## Smooth the signals
Ua_butter = ButterworthFilter( xa, uref_a_map[:,1], ff=ffa )
Uc_butter = ButterworthFilter( xc, uref_c_map[:,1], ff=ffc )
## Create the interpolators
Ua_intp = scipy.interpolate.interp1d( xa, Ua_butter, kind='linear' )
Uc_intp = scipy.interpolate.interp1d( xc, Uc_butter, kind='linear' )
# duref_a_map = numpy.gradient( uref_a_map[:,1] ) / numpy.gradient( xa )
# duref_c_map = numpy.gradient( uref_c_map[:,1] ) / numpy.gradient( xc )
duref_a = numpy.gradient( Ua_butter ) / numpy.gradient( xa )
duref_c = numpy.gradient( Uc_butter ) / numpy.gradient( xc )
dUa_intp = scipy.interpolate.interp1d( xa, duref_a, kind='linear' )
dUc_intp = scipy.interpolate.interp1d( xc, duref_c, kind='linear' )
# # Plot the Uref data for verification
# plt.figure()
# plt.plot( xa, uref_a_map[:,1], label='Ua map' )
# plt.plot( xc, uref_c_map[:,1], label='Uc map' )
## plt.plot( xa, Ua_butter, label='Ua butter' )
## plt.plot( xc, Uc_butter, label='Uc butter' )
# plt.plot( xa, self.uref_a(xa), label='Ua interp lin' )
# plt.plot( xc, self.uref_c(xc), label='Uc interp lin' )
# plt.legend()
# plt.figure()
# plt.plot( xa, duref_a_map, label='dUa map' )
# plt.plot( xc, duref_c_map, label='dUc map' )
## plt.plot( xa, duref_a_b, label='dUa B' )
## plt.plot( xc, duref_c_b, label='dUc B' )
# plt.plot( xa, self.duref_a_interp(xa), label='dUa interp butter' )
# plt.plot( xc, self.duref_c_interp(xc), label='dUc interp butter' )
# plt.legend()
# plt.show()
return Ua_intp, Uc_intp, dUa_intp, dUc_intp
def nonlinspace( Rf,k,N ) :
r = numpy.zeros(N)
for i in range(N) :
r[i] = (1./k)**(-i)
if k!=1 :
r=max(r)-r
r=r/max(r)*Rf
else :
r=r*Rf
return r
def mid_to_edge( var_mid, x_e ) :
var_edge = numpy.array( [var_mid[0]] + [ var_mid[i]*var_mid[i+1]/( ((x_e[i+1]-x_e[i])/((x_e[i+2]-x_e[i+1])+(x_e[i+1]-x_e[i])))*var_mid[i+1] + (1- ((x_e[i+1]-x_e[i])/((x_e[i+2]-x_e[i+1])+(x_e[i+1]-x_e[i]))))*var_mid[i] ) for i in range(len(var_mid)-1) ] + [var_mid[-1]] )
return var_edge
def flux_mat_builder( N, x_m, vols, P ) :
A = numpy.zeros([N,N], dtype='d')
for i in range(1,N-1) :
A[i,i-1] = (1./vols[i]) * (P[i ]) / (x_m[i ] - x_m[i-1])
A[i,i ] = -(1./vols[i]) * (P[i ]) / (x_m[i ] - x_m[i-1]) - (1./vols[i]) * (P[i+1]) / (x_m[i+1] - x_m[i])
A[i,i+1] = (1./vols[i]) * (P[i+1]) / (x_m[i+1] - x_m[i ])
i=0
A[0,0] = -(1./vols[i]) * (P[i+1]) / (x_m[i+1] - x_m[i])
A[0,1] = (1./vols[i]) * (P[i+1]) / (x_m[i+1] - x_m[i])
i=N-1
A[i,i-1] = (1./vols[i]) * (P[i]) / (x_m[i] - x_m[i-1])
A[i,i ] = -(1./vols[i]) * (P[i]) / (x_m[i] - x_m[i-1])
return A
class MyProblem( Implicit_Problem ) :
def __init__(self, Na, Ns, Nc, Nra, Nrc, X, Ra, Rc, Ac, bsp_dir, y0, yd0, name ) :
Implicit_Problem.__init__(self,y0=y0,yd0=yd0,name=name)
self.T = 298.15 # Cell temperature, [K]
self.Ac = Ac # Cell coated area, [m^2]
### Control volumes and node points (mid node points and edge node points)
self.Ns = Ns
self.Na = Na
self.Nc = Nc
self.N = Na + Ns + Nc
self.X = X
self.x_e = numpy.linspace( 0.0, X, N+1 )
self.x_m = numpy.array( [ 0.5*(self.x_e[i+1]+self.x_e[i]) for i in range(N) ], dtype='d' )
self.vols = numpy.array( [ (self.x_e[i+1] - self.x_e[i]) for i in range(N)], dtype='d' )
# Radial mesh
self.Nra = Nra
self.Nrc = Nrc
k=0.85
self.r_e_a = nonlinspace( Ra, k, Nra+1 )
self.r_m_a = numpy.array( [ 0.5*(self.r_e_a[i+1]+self.r_e_a[i]) for i in range(Nra) ], dtype='d' )
self.r_e_c = nonlinspace( Rc, k, Nrc+1 )
self.r_m_c = numpy.array( [ 0.5*(self.r_e_c[i+1]+self.r_e_c[i]) for i in range(Nrc) ], dtype='d' )
self.vols_ra_m = numpy.array( [ 1/3.*(self.r_e_a[i+1]**3 - self.r_e_a[i]**3) for i in range(Nra)], dtype='d' )
self.vols_rc_m = numpy.array( [ 1/3.*(self.r_e_c[i+1]**3 - self.r_e_c[i]**3) for i in range(Nrc)], dtype='d' )
# Useful sub-meshes for the phi_s functions
self.x_m_a = self.x_m[:Na]
self.x_m_c = self.x_m[-Nc:]
self.x_e_a = self.x_e[:Na+1]
self.x_e_c = self.x_e[-Nc-1:]
self.vols_a = self.vols[:Na]
self.vols_c = self.vols[-Nc:]
self.num_diff_vars = self.N + self.Nra*self.Na + self.Nrc*self.Nc
self.num_algr_vars = self.Na+self.Nc + self.N + self.Na+self.Nc
### Volume fraction vectors and matrices for effective parameters
self.La, self.Ls, self.Lc = self.Na*X/self.N, self.Ns*X/self.N, self.Nc*X/self.N
self.Na, self.Ns, self.Nc = Na, Ns, Nc
eps_a = 0.25
eps_s = 0.45
eps_c = 0.2
ba, bs, bc = 1.2, 0.5, 0.5
eps_a_vec = [ eps_a for i in range(Na) ] # list( eps_a + eps_a/2.*numpy.sin(numpy.linspace(0.,Na/4,Na)) ) # list(eps_a + eps_a*numpy.random.randn(Na)/5.) #
eps_s_vec = [ eps_s for i in range(Ns) ]
eps_c_vec = [ eps_c for i in range(Nc) ] # list( eps_c + eps_c/2.*numpy.sin(numpy.linspace(0.,Nc/4,Nc)) ) # list(eps_c + eps_c*numpy.random.randn(Nc)/5.) #
self.eps_m = numpy.array( eps_a_vec + eps_s_vec + eps_c_vec, dtype='d' )
self.k_m = 1./self.eps_m
self.eps_mb = numpy.array( [ ea**ba for ea in eps_a_vec ] + [ es**bs for es in eps_s_vec ] + [ ec**bc for ec in eps_c_vec ], dtype='d' )
self.eps_eff = numpy.array( [ ea**(1.+ba) for ea in eps_a_vec ] + [ es**(1.+bs) for es in eps_s_vec ] + [ ec**(1.+bc) for ec in eps_c_vec ], dtype='d' )
self.eps_a_eff = self.eps_eff[:Na]
self.eps_c_eff = self.eps_eff[-Nc:]
self.K_m = numpy.diag( self.k_m )
t_plus = 0.36
F = 96485.0
self.t_plus = t_plus
self.F = F
self.R_gas = 8.314
self.Rp_a = Ra
self.Rp_c = Rc
as_a = 3.*(1.0-numpy.array(eps_a_vec, dtype='d'))/self.Rp_a
as_c = 3.*(1.0-numpy.array(eps_c_vec, dtype='d'))/self.Rp_c
self.as_a = as_a
self.as_c = as_c
self.as_a_mean = 1./self.La*sum( [ asa*v for asa,v in zip(as_a, self.vols[:Na]) ] )
self.as_c_mean = 1./self.Lc*sum( [ asc*v for asc,v in zip(as_c, self.vols[-Nc:]) ] )
print 'asa diff', self.as_a_mean - as_a[0]
print 'asc diff', self.as_c_mean - as_c[0]
### Electrolyte constant B_ce matrix
Ba = [ (1.-t_plus)*asa/ea for ea, asa in zip(eps_a_vec,as_a) ]
Bs = [ 0.0 for i in range(Ns) ]
Bc = [ (1.-t_plus)*asc/ec for ec, asc in zip(eps_c_vec,as_c) ]
self.B_ce = numpy.diag( numpy.array(Ba+Bs+Bc, dtype='d') )
Bap = [ asa*F for asa in as_a ]
Bsp = [ 0.0 for i in range(Ns) ]
Bcp = [ asc*F for asc in as_c ]
self.B2_pe = numpy.diag( numpy.array(Bap+Bsp+Bcp, dtype='d') )
# Interpolators for De, ke
self.De_intp = build_interp_2d( bsp_dir+'data/Model_v1/Model_Pars/electrolyte/De.csv' )
self.ke_intp = build_interp_2d( bsp_dir+'data/Model_v1/Model_Pars/electrolyte/kappa.csv' )
self.fca_intp = build_interp_2d( bsp_dir+'data/Model_v1/Model_Pars/electrolyte/fca.csv' )
### Solid phase parameters and j vector matrices
self.sig_a = 100. # [S/m]
self.sig_c = 40. # [S/m]
self.sig_a_eff = self.sig_a * self.eps_a_eff
self.sig_c_eff = self.sig_c * self.eps_c_eff
self.A_ps_a = flux_mat_builder( self.Na, self.x_m_a, numpy.ones_like(self.vols_a), self.sig_a_eff )
self.A_ps_c = flux_mat_builder( self.Nc, self.x_m_c, numpy.ones_like(self.vols_c), self.sig_c_eff )
# Grounding form for BCs (was only needed during testing, before BVK was incorporated for coupling
# self.A_ps_a[-1,-1] = 2*self.A_ps_a[-1,-1]
# self.A_ps_c[ 0, 0] = 2*self.A_ps_c[ 0, 0]
Baps = numpy.array( [ asa*F*dxa for asa,dxa in zip(as_a, self.vols_a) ], dtype='d' )
Bcps = numpy.array( [ asc*F*dxc for asc,dxc in zip(as_c, self.vols_c) ], dtype='d' )
self.B_ps_a = numpy.diag( Baps )
self.B_ps_c = numpy.diag( Bcps )
self.B2_ps_a = numpy.zeros( self.Na, dtype='d' )
self.B2_ps_a[ 0] = -1.
self.B2_ps_c = numpy.zeros( self.Nc, dtype='d' )
self.B2_ps_c[-1] = -1.
### Solid phase diffusion model
# Load the Ds data files
Dsa_map = numpy.loadtxt( bsp_dir+'data/Model_v1/Model_Pars/solid/diffusion/Ds_anode.csv', delimiter="," )
Dsc_map = numpy.loadtxt( bsp_dir+'data/Model_v1/Model_Pars/solid/diffusion/Ds_cathode.csv', delimiter="," )
if Dsa_map[1,0] < Dsa_map[0,0] :
Dsa_map = numpy.flipud( Dsa_map )
if Dsc_map[1,0] < Dsc_map[0,0] :
Dsc_map = numpy.flipud( Dsc_map )
## Create the interpolators
self.Dsa_intp = scipy.interpolate.interp1d( Dsa_map[:,0], Dsa_map[:,1], kind='linear' )
self.Dsc_intp = scipy.interpolate.interp1d( Dsc_map[:,0], Dsc_map[:,1], kind='linear' )
Dsa = numpy.mean(Dsa_map[:,1])
Dsc = numpy.mean(Dsc_map[:,1])
self.Dsa = Dsa
self.Dsc = Dsc
self.csa_max = 30555.0 # [mol/m^3]
self.csc_max = 51554.0 # [mol/m^3]
## Two parameter Solid phase diffusion model
# self.B_cs_a = numpy.diag( numpy.array( [-3.0/self.Rp_a for i in range(Na)], dtype='d' ) )
# self.B_cs_c = numpy.diag( numpy.array( [-3.0/self.Rp_c for i in range(Nc)], dtype='d' ) )
# self.C_cs_a = numpy.eye(Na)
# self.C_cs_c = numpy.eye(Nc)
# self.D_cs_a = numpy.diag( numpy.array( [-self.Rp_a/Dsa/5.0 for i in range(Na)], dtype='d' ) )
# self.D_cs_c = numpy.diag( numpy.array( [-self.Rp_c/Dsc/5.0 for i in range(Nc)], dtype='d' ) )
## 1D spherical diffusion model
# A_cs pre build
self.A_csa_single = flux_mat_builder( Nra, self.r_m_a, self.vols_ra_m, Dsa*(self.r_e_a**2) )
self.A_csc_single = flux_mat_builder( Nrc, self.r_m_c, self.vols_rc_m, Dsc*(self.r_e_c**2) )
# A_cs build up to the stacked full cs size (Nr and Nx)
b = [self.A_csa_single]*Na
self.A_cs_a = scipy.linalg.block_diag( *b )
b = [self.A_csc_single]*Nc
self.A_cs_c = scipy.linalg.block_diag( *b )
# B_cs and C_cs are constant (i.e., are not state-dependent)
self.B_csa_single = numpy.array( [ 0. for i in range(Nra-1) ]+[-1.*self.r_e_a[-1]**2/self.vols_ra_m[-1]], dtype='d' )
self.B_csc_single = numpy.array( [ 0. for i in range(Nrc-1) ]+[-1.*self.r_e_c[-1]**2/self.vols_rc_m[-1]], dtype='d' )
b = [self.B_csa_single]*Na
self.B_cs_a = scipy.linalg.block_diag( *b ).T
b = [self.B_csc_single]*Nc
self.B_cs_c = scipy.linalg.block_diag( *b ).T
# Particle surface concentration
h_na = self.r_e_a[-1] - self.r_m_a[-1]
h_n1a = self.r_m_a[-1] - self.r_m_a[-2]
h_nc = self.r_e_c[-1] - self.r_m_c[-1]
h_n1c = self.r_m_c[-1] - self.r_m_c[-2]
self.a_n_a, self.b_n_a, self.c_n_a = right_side_coeffs( h_na, h_n1a )
self.a_n_c, self.b_n_c, self.c_n_c = right_side_coeffs( h_nc, h_n1c )
self.C_cs_a_single = numpy.array( [0. for i in range(Nra-2)]+[-self.a_n_a/self.c_n_a, -self.b_n_a/self.c_n_a], dtype='d' )
self.C_cs_c_single = numpy.array( [0. for i in range(Nrc-2)]+[-self.a_n_c/self.c_n_c, -self.b_n_c/self.c_n_c], dtype='d' )
self.C_cs_a = scipy.linalg.block_diag( *[self.C_cs_a_single]*Na )
self.C_cs_c = scipy.linalg.block_diag( *[self.C_cs_c_single]*Nc )
# Particle core concentration
h_na = self.r_e_a[0] - self.r_m_a[0]
h_n1a = self.r_m_a[1] - self.r_m_a[0]
h_nc = self.r_e_c[0] - self.r_m_c[0]
h_n1c = self.r_m_c[1] - self.r_m_c[0]
a_n_a, b_n_a, c_n_a = left_side_coeffs( h_na, h_n1a )
a_n_c, b_n_c, c_n_c = left_side_coeffs( h_nc, h_n1c )
C_cso_a_single = numpy.array( [-b_n_a/a_n_a, -c_n_a/a_n_a] + [0. for i in range(Nra-2)], dtype='d' )
C_cso_c_single = numpy.array( [-b_n_c/a_n_c, -c_n_c/a_n_c] + [0. for i in range(Nrc-2)], dtype='d' )
self.C_cso_a = scipy.linalg.block_diag( *[C_cso_a_single]*Na )
self.C_cso_c = scipy.linalg.block_diag( *[C_cso_c_single]*Nc )
# D_cs prelim values, note this is Ds(cs) dependent and therefore requires updating for state dependent Ds
self.D_cs_a = -1.0/(Dsa*self.c_n_a)*numpy.eye( Na )
self.D_cs_c = -1.0/(Dsc*self.c_n_c)*numpy.eye( Nc )
### OCV
Ua_path = bsp_dir+'data/Model_v1/Model_Pars/solid/thermodynamics/uref_anode_bigx.csv'
Uc_path = bsp_dir+'data/Model_v1/Model_Pars/solid/thermodynamics/uref_cathode_bigx.csv'
self.uref_a, self.uref_c, self.duref_a, self.duref_c = get_smooth_Uref_data( Ua_path, Uc_path, ffa=0.4, ffc=0.2 )
### Reaction kinetics parameters
self.io_a = 5.0 # [A/m^2]
self.io_c = 5.0 # [A/m^2]
### System indices
self.ce_inds = range( self.N )
self.ce_inds_r = numpy.reshape( self.ce_inds, [len(self.ce_inds),1] )
self.ce_inds_c = numpy.reshape( self.ce_inds, [1,len(self.ce_inds)] )
self.csa_inds = range( self.N, self.N + (self.Na*self.Nra) )
self.csa_inds_r = numpy.reshape( self.csa_inds, [len(self.csa_inds),1] )
self.csa_inds_c = numpy.reshape( self.csa_inds, [1,len(self.csa_inds)] )
self.csc_inds = range( self.N + (self.Na*self.Nra), self.N + (self.Na*self.Nra) + (self.Nc*self.Nrc) )
self.csc_inds_r = numpy.reshape( self.csc_inds, [len(self.csc_inds),1] )
self.csc_inds_c = numpy.reshape( self.csc_inds, [1,len(self.csc_inds)] )
self.T_ind = self.N + (self.Na*self.Nra) + (self.Nc*self.Nrc)
c_end = self.N + (self.Na*self.Nra) + (self.Nc*self.Nrc) + 1
self.ja_inds = range(c_end, c_end+self.Na)
self.ja_inds_r = numpy.reshape( self.ja_inds, [len(self.ja_inds),1] )
self.ja_inds_c = numpy.reshape( self.ja_inds, [1,len(self.ja_inds)] )
self.jc_inds = range(c_end+self.Na, c_end+self.Na +self.Nc)
self.jc_inds_r = numpy.reshape( self.jc_inds, [len(self.jc_inds),1] )
self.jc_inds_c = numpy.reshape( self.jc_inds, [1,len(self.jc_inds)] )
self.pe_inds = range( c_end+self.Na+self.Nc, c_end+self.Na+self.Nc +self.N )
self.pe_inds_r = numpy.reshape( self.pe_inds, [len(self.pe_inds),1] )
self.pe_inds_c = numpy.reshape( self.pe_inds, [1,len(self.pe_inds)] )
self.pe_a_inds = range( c_end+self.Na+self.Nc, c_end+self.Na+self.Nc +self.Na )
self.pe_a_inds_r = numpy.reshape( self.pe_a_inds, [len(self.pe_a_inds),1] )
self.pe_a_inds_c = numpy.reshape( self.pe_a_inds, [1,len(self.pe_a_inds)] )
self.pe_c_inds = range( c_end+self.Na+self.Nc +self.Na+self.Ns, c_end+self.Na+self.Nc +self.N )
self.pe_c_inds_r = numpy.reshape( self.pe_c_inds, [len(self.pe_c_inds),1] )
self.pe_c_inds_c = numpy.reshape( self.pe_c_inds, [1,len(self.pe_c_inds)] )
self.pa_inds = range( c_end+self.Na+self.Nc+self.N, c_end+self.Na+self.Nc+self.N +self.Na )
self.pa_inds_r = numpy.reshape( self.pa_inds, [len(self.pa_inds),1] )
self.pa_inds_c = numpy.reshape( self.pa_inds, [1,len(self.pa_inds)] )
self.pc_inds = range( c_end+self.Na+self.Nc+self.N+self.Na, c_end+self.Na+self.Nc+self.N+self.Na +self.Nc )
self.pc_inds_r = numpy.reshape( self.pc_inds, [len(self.pc_inds),1] )
self.pc_inds_c = numpy.reshape( self.pc_inds, [1,len(self.pc_inds)] )
# second set for manual jac version
c_end = 0
self.ja_inds2 = range(c_end, c_end+self.Na)
self.ja_inds_r2 = numpy.reshape( self.ja_inds2, [len(self.ja_inds2),1] )
self.ja_inds_c2 = numpy.reshape( self.ja_inds2, [1,len(self.ja_inds2)] )
self.jc_inds2 = range(c_end+self.Na, c_end+self.Na +self.Nc)
self.jc_inds_r2 = numpy.reshape( self.jc_inds2, [len(self.jc_inds2),1] )
self.jc_inds_c2 = numpy.reshape( self.jc_inds2, [1,len(self.jc_inds2)] )
self.pe_inds2 = range( c_end+self.Na+self.Nc, c_end+self.Na+self.Nc +self.N )
self.pe_inds_r2 = numpy.reshape( self.pe_inds2, [len(self.pe_inds2),1] )
self.pe_inds_c2 = numpy.reshape( self.pe_inds2, [1,len(self.pe_inds2)] )
self.pe_a_inds2 = range( c_end+self.Na+self.Nc, c_end+self.Na+self.Nc +self.Na )
self.pe_a_inds_r2 = numpy.reshape( self.pe_a_inds2, [len(self.pe_a_inds2),1] )
self.pe_a_inds_c2 = numpy.reshape( self.pe_a_inds2, [1,len(self.pe_a_inds2)] )
self.pe_c_inds2 = range( c_end+self.Na+self.Nc +self.Na+self.Ns, c_end+self.Na+self.Nc +self.N )
self.pe_c_inds_r2 = numpy.reshape( self.pe_c_inds2, [len(self.pe_c_inds2),1] )
self.pe_c_inds_c2 = numpy.reshape( self.pe_c_inds2, [1,len(self.pe_c_inds2)] )
self.pa_inds2 = range( c_end+self.Na+self.Nc+self.N, c_end+self.Na+self.Nc+self.N +self.Na )
self.pa_inds_r2 = numpy.reshape( self.pa_inds2, [len(self.pa_inds2),1] )
self.pa_inds_c2 = numpy.reshape( self.pa_inds2, [1,len(self.pa_inds2)] )
self.pc_inds2 = range( c_end+self.Na+self.Nc+self.N+self.Na, c_end+self.Na+self.Nc+self.N+self.Na +self.Nc )
self.pc_inds_r2 = numpy.reshape( self.pc_inds2, [len(self.pc_inds2),1] )
self.pc_inds_c2 = numpy.reshape( self.pc_inds2, [1,len(self.pc_inds2)] )
def set_iapp( self, I_app ) :
self.i_app = I_app / self.Ac
# cs mats
def update_cs_mats( self, csa, csc, csa_ss, csc_ss, csa_o, csc_o ) :
Acsa_list = [ [] for i in range(self.Na) ]
Acsc_list = [ [] for i in range(self.Nc) ]
Dsa_ss = [ 0. for i in range(self.Na) ]
Dsc_ss = [ 0. for i in range(self.Nc) ]
for ia in range(self.Na) :
csa_m = csa[ia*self.Nra:(ia+1)*self.Nra]
csa_e = numpy.array( [csa_o[ia]] + [ 0.5*(csa_m[i+1]+csa_m[i]) for i in range(self.Nra-1) ] + [csa_ss[ia]] )
Ua_e = self.uref_a( csa_e/self.csa_max )
Dsa_e = self.Dsa_intp( Ua_e )
Acsa_list[ia] = flux_mat_builder( self.Nra, self.r_m_a, self.vols_ra_m, Dsa_e*(self.r_e_a**2) )
Dsa_ss[ia] = Dsa_e[-1]
for ic in range(self.Nc) :
csc_m = csc[ic*self.Nrc:(ic+1)*self.Nrc]
csc_e = numpy.array( [csc_o[ic]] + [ 0.5*(csc_m[i+1]+csc_m[i]) for i in range(self.Nrc-1) ] + [csc_ss[ic]] )
Uc_e = self.uref_c( csc_e/self.csc_max )
Dsc_e = self.Dsc_intp( Uc_e )
Acsc_list[ic] = flux_mat_builder( self.Nrc, self.r_m_c, self.vols_rc_m, Dsc_e*(self.r_e_c**2) )
Dsc_ss[ic] = Dsc_e[-1]
# b = self.A_csa_single.reshape(1,Nra,Nra).repeat(Na,axis=0)
self.A_cs_a = scipy.linalg.block_diag( *Acsa_list )
self.A_cs_c = scipy.linalg.block_diag( *Acsc_list )
self.D_cs_a = numpy.diag( -1.0/(numpy.array(Dsa_ss)*self.c_n_a) )
self.D_cs_c = numpy.diag( -1.0/(numpy.array(Dsc_ss)*self.c_n_c) )
## Define c_e functions
def build_Ace_mat( self, c ) :
D_eff = self.Diff_ce( c )
A = self.K_m.dot( flux_mat_builder( self.N, self.x_m, self.vols, D_eff ) )
return A
def Diff_ce( self, c ) :
T = self.T
# D_ce = 1e-4 * 10.0**( -4.43 - (54./(T-229.-5e-3*c)) - (0.22e-3*c) ) ## Torchio (LIONSIMBA) ECS paper
D_ce = self.De_intp( c, T, grid=False ).flatten()
D_mid = D_ce * self.eps_eff
if type(c) == float :
D_edge = D_mid
else :
D_edge = mid_to_edge( D_mid, self.x_e )
return D_edge
## Define phi_e functions
def build_Ape_mat( self, c ) :
k_eff = self.kapp_ce( c )
A = flux_mat_builder( self.N, self.x_m, self.vols, k_eff )
A[-1,-1] = 2*A[-1,-1] # BC update for phi_e = 0
return A
def build_Bpe_mat( self, c ) :
gam = 2.*(1.-self.t_plus)*self.R_gas*self.T / self.F
k_eff = self.kapp_ce( c )
c_edge = mid_to_edge( c, self.x_e )
B1 = flux_mat_builder( self.N, self.x_m, self.vols, k_eff*gam/c_edge )
return B1
def kapp_ce( self, c, mid_on=0 ) :
T = self.T
# k_ce = 1e-4 * c *( -10.5 +0.668e-3*c + 0.494e-6*c**2
# + (0.074 - 1.78*1e-5*c - 8.86e-10*c**2)*T
# + (-6.96e-5 + 2.8e-8*c)*T**2 )**2 ## Torchio (LIONSIMBA) ECS paper
k_ce = 1e-1*self.ke_intp( c, T, grid=False ).flatten() # 1e-1 converts from mS/cm to S/m (model uses SI units)
k_mid = k_ce * self.eps_eff
if mid_on :
k_out = k_mid
else :
if type(c) == float :
k_out = k_mid
else :
k_out = mid_to_edge( k_mid, self.x_e )
return k_edge
def build_Bjac_mat( self, eta, a, b ) :
d = a*numpy.cosh( b*eta )*b
return numpy.diag( d )
def get_voltage( self, y ) :
"""
Return the cell potential
"""
pc = y[self.pc_inds]
pa = y[self.pa_inds]
Vcell = pc[-1] - pa[0]
return Vcell
def calc_heat( self, ce, csa, csc, ja, jc, phi, phi_s_a, phi_s_c, eta_a, eta_c ) :
"""
Return the total integrated heat source across the cell sandwich
"""
# Gradients for heat calc
dphi_s_a = numpy.gradient( phi_s_a ) / numpy.gradient( self.x_m_a )
dphi_s_c = numpy.gradient( phi_s_c ) / numpy.gradient( self.x_m_c )
dphi = numpy.gradient( phi ) / numpy.gradient( self.x_m )
dlnce = 1./ce * ( numpy.gradient(ce) / numpy.gradient( self.x_m ) )
kapp_eff_m = self.kapp_ce( c, mid_on=1 ) # kapp_eff at the node points (middle of control volume, rather than edge)
K = numpy.diag(kapp_eff_m)
dp = self.G.dot(phi)
# Reaction kinetics heat
Q_rxn_a = sum( (self.F*self.as_a*ja*eta_a)*self.vols_a )
Q_rxn_c = sum( (self.F*self.as_c*jc*eta_c)*self.vols_c )
Q_rxn = Q_rxn_a + Q_rxn_c
# Ohmic heat in electrolyte and solid
Q_ohm_e = sum( ( kapp_eff_m*(dphi)**2 + (2*kapp_eff_m*self.R*self.T/self.F*(1-self.t_plus))*dlnce*dphi )*self.vols )
Q_ohm_s = sum( (self.sig_a_eff*(dphi_s_a)**2)*self.vols_a ) + sum( (self.sig_c_eff*(dphi_s_c)**2)*self.vols_c )
Q_ohm = Q_ohm_e + Q_ohm_s
# Entropic heat
## ??
# Total heat
Q_tot = Q_ohm + Q_rxn
return Q_tot
## Define system equations
def res( self, t, y, yd ) :
## Parse out the states
# E-lyte conc
ce = y[ self.ce_inds]
c_dots = yd[self.ce_inds]
# Solid conc a:anode, c:cathode
csa = y[ self.csa_inds]
csc = y[ self.csc_inds]
csa_dt = yd[self.csa_inds]
csc_dt = yd[self.csc_inds]
# Reaction (Butler-Volmer Kinetics)
ja_rxn = y[self.ja_inds]
jc_rxn = y[self.jc_inds]
# E-lyte potential
phi = y[self.pe_inds]
# Solid potential
phi_s_a = y[self.pa_inds]
phi_s_c = y[self.pc_inds]
## Grab state dependent matrices
# For E-lyte conc and potential (i.e., De(ce), kapp_e(ce))
A_ce = self.build_Ace_mat( ce )
A_pe = self.build_Ape_mat( ce )
B_pe = self.build_Bpe_mat( ce )
# For Solid conc Ds
csa_ss = (self.C_cs_a.dot(csa)).flatten() + (self.D_cs_a.dot(ja_rxn)).flatten()
csc_ss = (self.C_cs_c.dot(csc)).flatten() + (self.D_cs_c.dot(jc_rxn)).flatten()
csa_o = (self.C_cso_a.dot(csa)).flatten()
csc_o = (self.C_cso_c.dot(csc)).flatten()
self.update_cs_mats( csa, csc, csa_ss, csc_ss, csa_o, csc_o )
## Compute extra variables
# For the reaction kinetics
Uref_a = self.uref_a( csa_ss/self.csa_max ) # anode equilibrium potential
Uref_c = self.uref_c( csc_ss/self.csc_max ) # cathode equilibrium potential
eta_a = phi_s_a - phi[:self.Na] - Uref_a # anode overpotential
eta_c = phi_s_c - phi[-self.Nc:] - Uref_c # cathode overpotential
# ja = 2.0*self.io_a * numpy.sqrt( ce[:self.Na]/self.ce_nom * (1.0 - csa_ss/self.csa_max) * (csa_ss/self.csa_max) ) * numpy.sinh( self.R_gas/(2.0*self.F*self.T)*eta_a )
# jc = 2.0*self.io_c * numpy.sqrt( ce[-self.Nc:]/self.ce_nom * (1.0 - csc_ss/self.csc_max) * (csc_ss/self.csc_max) ) * numpy.sinh( self.R_gas/(2.0*self.F*self.T)*eta_c )
ja = 2.0*self.io_a/self.F * numpy.sinh( 0.5*self.F/(self.R_gas*self.T)*eta_a )
jc = 2.0*self.io_c/self.F * numpy.sinh( 0.5*self.F/(self.R_gas*self.T)*eta_c )
j = numpy.concatenate( [ ja_rxn, numpy.zeros(self.Ns), jc_rxn ] )
## Compute the residuals
# Time deriv components
r1 = c_dots - ( ((A_ce.dot(ce)).flatten() + (self.B_ce.dot(j)).flatten()) ) # E-lyte conc
r2 = csa_dt - (self.A_cs_a.dot(csa).flatten() + self.B_cs_a.dot(ja_rxn).flatten()) # Anode conc
r3 = csc_dt - (self.A_cs_c.dot(csc).flatten() + self.B_cs_c.dot(jc_rxn).flatten()) # Cathode conc
# Algebraic components
r4 = ja_rxn - ja
r5 = jc_rxn - jc
r6 = A_pe.dot(phi).flatten() - B_pe.dot(ce).flatten() + self.B2_pe.dot(j).flatten() # E-lyte potential
r7 = self.A_ps_a.dot(phi_s_a).flatten() - self.B_ps_a.dot(ja_rxn).flatten() - self.B2_ps_a*self.i_app # Anode potential
r8 = self.A_ps_c.dot(phi_s_c).flatten() - self.B_ps_c.dot(jc_rxn).flatten() + self.B2_ps_c*self.i_app # Cathode potential
res_out = numpy.concatenate( [r1, r2, r3, r4, r5, r6, r7, r8] )
return res_out
def jac( self, c, t, y, yd ) :
### Setup
## Parse out the states
# E-lyte conc
ce = y[ self.ce_inds]
c_dots = yd[self.ce_inds]
# Solid conc a:anode, c:cathode
csa = y[ self.csa_inds]
csc = y[ self.csc_inds]
csa_dt = yd[self.csa_inds]
csc_dt = yd[self.csc_inds]
# Reaction (Butler-Volmer Kinetics)
ja_rxn = y[self.ja_inds]
jc_rxn = y[self.jc_inds]
# E-lyte potential
phi = y[self.pe_inds]
# Solid potential
phi_s_a = y[self.pa_inds]
phi_s_c = y[self.pc_inds]
## Grab state dependent matrices
# For E-lyte conc and potential (i.e., De(ce), kapp_e(ce))
A_ce = self.build_Ace_mat( ce )
A_pe = self.build_Ape_mat( ce )
B_pe = self.build_Bpe_mat( ce )
## Compute extra variables
# For the reaction kinetics
# csa_ss = numpy.array( [ csa[(i+1)*(self.Nra)-1] for i in range(self.Na) ] )
# csc_ss = numpy.array( [ csc[(i+1)*(self.Nrc)-1] for i in range(self.Nc) ] )
csa_ss = (self.C_cs_a.dot(csa)).flatten() + (self.D_cs_a.dot(ja_rxn)).flatten()
csc_ss = (self.C_cs_c.dot(csc)).flatten() + (self.D_cs_c.dot(jc_rxn)).flatten()
Uref_a = self.uref_a( csa_ss/self.csa_max ) # anode equilibrium potential
Uref_c = self.uref_c( csc_ss/self.csc_max ) # cathode equilibrium potential
eta_a = phi_s_a - phi[:self.Na] - Uref_a # anode overpotential
eta_c = phi_s_c - phi[-self.Nc:] - Uref_c # cathode overpotential
###
### Build the Jac matrix
## Self coupling
A_dots = numpy.diag( [1*c for i in range(self.num_diff_vars)] )
j_c = A_dots - scipy.linalg.block_diag( A_ce, self.A_cs_a, self.A_cs_c )
Bjac_a = self.build_Bjac_mat( eta_a, 2.0*self.io_a/self.F, 0.5*self.F/(self.R_gas*self.T) )
Bjac_c = self.build_Bjac_mat( eta_c, 2.0*self.io_c/self.F, 0.5*self.F/(self.R_gas*self.T) )
DUDcsa_ss = numpy.diag( (1.0/self.csa_max)*self.duref_a(csa_ss/self.csa_max) )
DUDcsc_ss = numpy.diag( (1.0/self.csc_max)*self.duref_c(csc_ss/self.csc_max) )
A_ja = numpy.diag(numpy.ones(self.Na)) - (Bjac_a.dot(-1.0*DUDcsa_ss*1.0)).dot( self.D_cs_a )
A_jc = numpy.diag(numpy.ones(self.Nc)) - (Bjac_c.dot(-1.0*DUDcsc_ss*1.0)).dot( self.D_cs_c )
j = scipy.linalg.block_diag( j_c, A_ja, A_jc, A_pe, self.A_ps_a, self.A_ps_c )
## Cross coupling
# c_e: j coupling back in
j[ numpy.ix_(self.ce_inds, self.ja_inds) ] = -self.B_ce[:, :self.Na ]
j[ numpy.ix_(self.ce_inds, self.jc_inds) ] = -self.B_ce[:, -self.Nc:]
# cs_a: j coupling
j[ numpy.ix_(self.csa_inds, self.ja_inds) ] = -self.B_cs_a
# cs_c: j coupling
j[ numpy.ix_(self.csc_inds, self.jc_inds) ] = -self.B_cs_c
# T
# j_a: pe, pa, csa coupling
j[numpy.ix_(self.ja_inds, self.pa_inds )] = -Bjac_a*( 1.0)
j[numpy.ix_(self.ja_inds, self.pe_a_inds)] = -Bjac_a*(-1.0)
j[numpy.ix_(self.ja_inds, self.csa_inds )] = -(Bjac_a.dot(-1.0*DUDcsa_ss*1.0)).dot( self.C_cs_a )
# j_c: pe, pc, csc coupling
j[numpy.ix_(self.jc_inds, self.pc_inds )] = -Bjac_c*( 1.0)
j[numpy.ix_(self.jc_inds, self.pe_c_inds)] = -Bjac_c*(-1.0)
j[numpy.ix_(self.jc_inds, self.csc_inds )] = -(Bjac_c.dot(-1.0*DUDcsc_ss*1.0)).dot( self.C_cs_c )
# phi_e: ce coupling into phi_e equation
j[numpy.ix_(self.pe_inds,self.ce_inds)] = -B_pe
j[numpy.ix_(self.pe_inds,self.ja_inds)] = self.B2_pe[:,:self.Na]
j[numpy.ix_(self.pe_inds,self.jc_inds)] = self.B2_pe[:,-self.Nc:]
# phi_s_a: ja
j[numpy.ix_(self.pa_inds,self.ja_inds)] = -self.B_ps_a
# phi_s_c: jc
j[numpy.ix_(self.pc_inds,self.jc_inds)] = -self.B_ps_c
###
return j
csa_max = 30555.0 # [mol/m^3]
csc_max = 51554.0 # [mol/m^3]
#bsp_dir = '/home/m_klein/Projects/battsimpy/'
bsp_dir = '/home/mk-sim-linux/Battery_TempGrad/Python/batt_simulation/battsimpy/'
#bsp_dir = '/Users/mk/Desktop/battsim/battsimpy/'
Ua_path = bsp_dir+'data/Model_v1/Model_Pars/solid/thermodynamics/uref_anode_bigx.csv'
Uc_path = bsp_dir+'data/Model_v1/Model_Pars/solid/thermodynamics/uref_cathode_bigx.csv'
uref_a, uref_c, duref_a, duref_c = get_smooth_Uref_data( Ua_path, Uc_path, ffa=0.4, ffc=0.2 )
xa_init, xc_init = 0.8, 0.37
ca_init = xa_init*csa_max
cc_init = xc_init*csc_max
Ua_init = uref_a( xa_init )
Uc_init = uref_c( xc_init )
print Ua_init
print Uc_init
### Mesh
La = 65.0
Ls = 25.0
Lc = 55.0
Lt = (La+Ls+Lc)
X = Lt*1e-6 # [m]
N = 80
Ns = int(N*(Ls/Lt))
Na = int(N*(La/Lt))
Nc = N - Ns - Na
print 'Na, Ns, Nc:', Na, Ns, Nc
Nra = 10
Nrc = 15
Ra = 12.0e-6
Rc = 6.5e-6
Crate = 3.
Vcut = 3.0 # [V], cutoff voltage for end of discharge
ce_lims = [50.,3700.]
cell_coated_area = 1.0 # [m^2]
cell_cap = 29.0
I_app = Crate*cell_cap # A
#i_app = I_app / cell_coated_area # current density, [A/m^2]
### Initial conditions
# E-lyte conc
c_init = 1100.0 # [mol/m^3]
c_centered = c_init*numpy.ones( N, dtype='d' )
# E-lyte potential
p_init = 0.0 # [V]
p_centered = p_init*numpy.ones( N, dtype='d' )
# Solid potential on anode and cathode
pa_init = Ua_init #0.0 # [V]
pa_centered = pa_init*numpy.ones( Na, dtype='d' )
pc_init = Uc_init #0.0 # [V]
pc_centered = pc_init*numpy.ones( Nc, dtype='d' )
# Solid conc on anode and cathode
ca_centered = ca_init*numpy.ones( Na*Nra, dtype='d' )
cc_centered = cc_init*numpy.ones( Nc*Nrc, dtype='d' )
# j init
ja = numpy.zeros(Na)
jc = numpy.zeros(Nc)
num_diff_vars = len(c_centered)+len(ca_centered)+len(cc_centered)
num_algr_vars = len(ja)+len(jc)+len(p_centered)+len(pa_centered)+len(pc_centered)
#The initial conditons
y0 = numpy.concatenate( [c_centered, ca_centered, cc_centered, ja, jc, p_centered, pa_centered, pc_centered] ) #Initial conditions
yd0 = [0.0 for i in range(len(y0))] #Initial conditions
#Create an Assimulo implicit problem
imp_mod = MyProblem(Na,Ns,Nc,Nra,Nrc,X,Ra,Rc,cell_coated_area,bsp_dir,y0,yd0,'Example using an analytic Jacobian')
#Sets the options to the problem
imp_mod.algvar = [1.0 for i in range(num_diff_vars)] + [0.0 for i in range(num_algr_vars)] #Set the algebraic components
#Create an Assimulo implicit solver (IDA)
imp_sim = IDA(imp_mod) #Create a IDA solver
#Sets the paramters
imp_sim.atol = 1e-5 #Default 1e-6
imp_sim.rtol = 1e-5 #Default 1e-6
imp_sim.suppress_alg = True #Suppres the algebraic variables on the error test
imp_sim.display_progress = False
imp_sim.verbosity = 50
imp_sim.report_continuously = True
imp_sim.time_limit = 10.
### Simulate
t01, t02 = 0.1, 0.2
imp_mod.set_iapp( I_app/10. )
imp_sim.make_consistent('IDA_YA_YDP_INIT')
ta, ya, yda = imp_sim.simulate(t01,2)
imp_mod.set_iapp( I_app/2. )
imp_sim.make_consistent('IDA_YA_YDP_INIT')
tb, yb, ydb = imp_sim.simulate(t02,2)
# Sim step 1
#imp_mod.set_iapp( I_app )
#imp_sim.make_consistent('IDA_YA_YDP_INIT')
#t1, y1, yd1 = imp_sim.simulate(1.0/Crate*3600.0,100)
NT = 100
time = numpy.linspace( t02+0.1, 1.0/Crate*3600.0, NT )
t_out = [ 0 for ts in time ]
V_out = [ 0 for ts in time ]
y_out = numpy.zeros( [len(time), yb.shape[ 1]] )
yd_out = numpy.zeros( [len(time), ydb.shape[1]] )
it = 0
V_cell = imp_mod.get_voltage( yb[-1,:].flatten() )
ce_now = yb[-1,imp_mod.ce_inds].flatten()
print 'V_cell prior to time loop:', V_cell
imp_mod.set_iapp( I_app )
imp_sim.make_consistent('IDA_YA_YDP_INIT')
sim_stopped = 0
while V_cell > Vcut and max(ce_now)<max(ce_lims) and min(ce_now)>min(ce_lims) and not sim_stopped and it<len(time) :
try :
ti, yi, ydi = imp_sim.simulate(time[it],1)
except :
ti = [t_out[it-1],t_out[it-1]]
yi = y_out[ it-2:it,:]
ydi = yd_out[ it-2:it,:]
sim_stopped = 1
print 'Sim stopped due time integration failure.'
t_out[ it] = ti[ -1 ]
y_out[ it,:] = yi[ -1,:]
yd_out[it,:] = ydi[-1,:]
V_cell = imp_mod.get_voltage( y_out[it,:] )
V_out[it] = V_cell
ce_now = y_out[it,imp_mod.ce_inds]
print 'time:',round(t_out[it],3), ' | Voltage:', round(V_cell,3)
if V_cell < Vcut :
print '\n','Vcut stopped simulation.'
elif max(ce_now)>max(ce_lims) :
print '\n','ce max stopped simulation.'
elif min(ce_now)<min(ce_lims) :
print '\n','ce min stopped simulation.'
it+=1
if it < len(time) :
t_out = t_out[ :it ]
V_out = V_out[ :it ]
y_out = y_out[ :it,:]
yd_out = yd_out[:it,:]
ce = y_out[:,imp_mod.ce_inds]
f,ax=plt.subplots(1,2)
ax[0].plot( imp_mod.x_m, ce.T )
ax[1].plot( t_out, V_out )
plt.show()
t1 = t_out
y1 = y_out
yd1 = yd_out
print t_out[it-1]
# Sim step 2
imp_mod.set_iapp( 0.0 )
imp_sim.make_consistent('IDA_YA_YDP_INIT')
t2, y2, yd2 = imp_sim.simulate(t_out[-1]*1.5,100)
plot_on = 1
if plot_on :
# extract variables
im = imp_mod
ce_1 = y1[:,im.ce_inds]
ca_1 = y1[:,im.csa_inds]
cc_1 = y1[:,im.csc_inds]
ca1_r = [ numpy.reshape( ca_1[it,:], (im.Na, im.Nra) ) for it in range(len(t1)) ]
cc1_r = [ numpy.reshape( cc_1[it,:], (im.Nc, im.Nrc) ) for it in range(len(t1)) ]
pe_1 = y1[:,im.pe_inds]
pa_1 = y1[:,im.pa_inds]
pc_1 = y1[:,im.pc_inds]
ja_1 = y1[:,im.ja_inds]
jc_1 = y1[:,im.jc_inds]
ce_2 = y2[:,im.ce_inds]
ca_2 = y2[:,im.csa_inds]
cc_2 = y2[:,im.csc_inds]
ca2_r = [ numpy.reshape( ca_2[it,:], (im.Na, im.Nra) ) for it in range(len(t2)) ]
cc2_r = [ numpy.reshape( cc_2[it,:], (im.Nc, im.Nrc) ) for it in range(len(t2)) ]
pe_2 = y2[:,im.pe_inds]
pa_2 = y2[:,im.pa_inds]
pc_2 = y2[:,im.pc_inds]
ja_2 = y2[:,im.ja_inds]
jc_2 = y2[:,im.jc_inds]
#Plot
# t1
# Plot through space
f, ax = plt.subplots(2,4)
# ce vs x
ax[0,0].plot(imp_mod.x_m*1e6,ce_1.T)
# pe vs x
ax[0,1].plot(imp_mod.x_m*1e6,pe_1.T)
# pa vs x
ax[0,2].plot(imp_mod.x_m_a*1e6,pa_1.T)
# pc vs x
ax[0,2].plot(imp_mod.x_m_c*1e6,pc_1.T)
ax[0,0].set_title('t1 c')
ax[0,0].set_xlabel('Cell Thickness [$\mu$m]')
ax[0,0].set_ylabel('E-lyte Conc. [mol/m$^3$]')
ax[0,1].set_title('t1 p')
ax[0,1].set_xlabel('Cell Thickness [$\mu$m]')
ax[0,1].set_ylabel('E-lyte Potential [V]')
ax[0,2].set_title('t1 p solid')
ax[0,2].set_xlabel('Cell Thickness [$\mu$m]')
ax[0,2].set_ylabel('Solid Potential [V]')
#ax[0,3].set_title('t1 conc solid')
#ax[0,3].set_xlabel('Cell Thickness [$\mu$m]')
#ax[0,3].set_ylabel('Solid Conc. [mol/m$^3$]')
# t2
ax[1,0].plot(imp_mod.x_m*1e6,ce_2.T)
ax[1,1].plot(imp_mod.x_m*1e6,pe_2.T)
ax[1,2].plot(imp_mod.x_m_a*1e6,pa_2.T)
ax[1,2].plot(imp_mod.x_m_c*1e6,pc_2.T)
ax[1,0].set_title('t2 c')
ax[1,0].set_xlabel('Cell Thickness [$\mu$m]')
ax[1,0].set_ylabel('E-lyte Conc. [mol/m$^3$]')
ax[1,1].set_title('t2 p e-lyte')
ax[1,1].set_xlabel('Cell Thickness [$\mu$m]')
ax[1,1].set_ylabel('E-lyte Potential [V]')
ax[1,2].set_title('t2 p solid')
ax[1,2].set_xlabel('Cell Thickness [$\mu$m]')
ax[1,2].set_ylabel('Solid Potential [V]')
#ax[1,3].set_title('t2 Solid Conc.')
#ax[1,3].set_xlabel('Cell Thickness [$\mu$m]')
#ax[1,3].set_ylabel('Solid Conc. [mol/m$^3$]')
plt.tight_layout()
fcs, ax = plt.subplots(1,2)
ira, irc = im.Nra-1, im.Nrc-1
for it in range(len(t1)) :
# ca vs x
ax[0].plot(imp_mod.x_m_a*1e6, ca1_r[it][:,ira])
# cc vs x
ax[0].plot(imp_mod.x_m_c*1e6, cc1_r[it][:,irc])
for it in range(len(t1)) :
ax[1].plot(imp_mod.x_m_a*1e6, ca2_r[it][:,ira])
ax[1].plot(imp_mod.x_m_c*1e6, cc2_r[it][:,irc])
ax[0].set_title('t1 Solid Conc.')
ax[1].set_title('t2 Solid Conc.')
ax[0].set_xlabel('Cell Thickness [$\mu$m]')
ax[0].set_ylabel('Solid Conc. [mol/m$^3$]')
plt.tight_layout()
fcsr, ax = plt.subplots(1,2)
ixa, ixc = im.Na-1, 0
for it in range(len(t1)) :
# ca vs x
ax[0].plot(imp_mod.r_m_a*1e6, ca1_r[it][ixa,:])
# cc vs x
ax[0].plot(imp_mod.r_m_c*1e6, cc1_r[it][ixc,:])
for it in range(len(t1)) :
ax[1].plot(imp_mod.r_m_a*1e6, ca2_r[it][ixa,:])
ax[1].plot(imp_mod.r_m_c*1e6, cc2_r[it][ixc,:])
ax[0].set_title('t1 Solid Conc.')
ax[1].set_title('t2 Solid Conc.')
ax[0].set_xlabel('Cell Thickness [$\mu$m]')
ax[0].set_ylabel('Solid Conc. [mol/m$^3$]')
plt.tight_layout()
# Plot through time
f, ax = plt.subplots(1,3)
ax[0].plot(t1,ce_1)
ax[1].plot(t1,pe_1)
ax[2].plot(t1,pa_1)
ax[2].plot(t1,pc_1)
#ax[3].plot(t1,ca_1)
#ax[3].plot(t1,cc_1)
ax[0].plot(t2,ce_2)
ax[1].plot(t2,pe_2)
ax[2].plot(t2,pa_2)
ax[2].plot(t2,pc_2)
#ax[3].plot(t2,ca_2)
#ax[3].plot(t2,cc_2)
ax[0].set_ylabel('E-lyte Conc. [mol/m$^3$]')
ax[0].set_xlabel('Time [s]')
ax[1].set_ylabel('E-lyte Potential [V]')
ax[1].set_xlabel('Time [s]')
ax[2].set_ylabel('Solid Potential [V]')
ax[2].set_xlabel('Time [s]')
#ax[3].set_ylabel('Solid Conc. [mol/m$^3$]')
#ax[3].set_xlabel('Time [s]')
plt.tight_layout()
plt.figure()
plt.plot( t1, pc_1[:,-1] - pa_1[:,0] )
plt.plot( t2, pc_2[:,-1] - pa_2[:,0] )
plt.show()
#
#
#
#imp_mod = MyProblem(Na,Ns,Nc,Nra,Nrc,X,Ra,Rc,cell_coated_area,bsp_dir,y0,yd0,'Example using an analytic Jacobian')
#
## my own time solver
#
#delta_t = 1.0
#tf = 10.
#time = [ i*delta_t for i in range(int(tf/delta_t)+1) ]
#
#print time
#
#x_out = numpy.zeros( [num_diff_vars, len(time)] )
#z_out = numpy.zeros( [num_algr_vars, len(time)] )
#
#x_out[:,0] = numpy.concatenate( [c_centered, ca_centered, cc_centered] )
#z_out[:,0] = numpy.concatenate( [ja, jc, p_centered, pa_centered, pc_centered] )
#
#for it, t in enumerate(time[1:]) :
#
# if it == 0 :
# Cur_vec = [ 0.0, 0.0, 0.1*I_app ]
# elif it == 1 :
# Cur_vec = [ 0.0, 0.1*I_app, 0.5*I_app ]
# elif it == 2 :
# Cur_vec = [ 0.1*I_app, 0.5*I_app, I_app ]
# elif it == 3 :
# Cur_vec = [ 0.5*I_app, I_app, I_app ]
# else :
# Cur_vec = [ I_app, I_app, I_app ]
#
# x_out[:,it+1], z_out[:,it+1], newtonStats = imp_mod.cn_solver( x_out[:,it], z_out[:,it], Cur_vec, delta_t )
#
#plt.close()
#f, ax = plt.subplots(1,3)
#ax[0].plot( imp_mod.x_m, x_out[:imp_mod.N] )
#
#ax[1].plot( imp_mod.x_m, z_out[imp_mod.Na+imp_mod.Nc:imp_mod.Na+imp_mod.Nc+imp_mod.N,:-1] )
#
#ax[2].plot( imp_mod.x_m_a, z_out[-imp_mod.Na-imp_mod.Nc:-imp_mod.Nc,:-1] )
#ax[2].plot( imp_mod.x_m_c, z_out[-imp_mod.Nc:,:-1] )
#plt.show()
#
#print z_out
#
#
# def dae_system( self, x, z, Input, get_mats=0 ) :
#
# self.set_iapp( Input )
#
# y = numpy.concatenate([x,z])
#
# ## Parse out the states
# # E-lyte conc
# ce = y[ self.ce_inds]
#
# # Solid conc a:anode, c:cathode
# csa = y[ self.csa_inds]
# csc = y[ self.csc_inds]
#
# # Reaction (Butler-Volmer Kinetics)
# ja_rxn = y[self.ja_inds]
# jc_rxn = y[self.jc_inds]
#
# # E-lyte potential
# phi = y[self.pe_inds]
#
# # Solid potential
# phi_s_a = y[self.pa_inds]
# phi_s_c = y[self.pc_inds]
#
# ## Grab state dependent matrices
# # For E-lyte conc and potential (i.e., De(ce), kapp_e(ce))
# A_ce = self.build_Ace_mat( ce )
# A_pe = self.build_Ape_mat( ce )
# B_pe = self.build_Bpe_mat( ce )
#
# ## Compute extra variables
# # For the reaction kinetics
## csa_ss = numpy.array( [ csa[(i+1)*(self.Nra)-1] for i in range(self.Na) ] )
## csc_ss = numpy.array( [ csc[(i+1)*(self.Nrc)-1] for i in range(self.Nc) ] )
# csa_ss = (self.C_cs_a.dot(csa)).flatten() + (self.D_cs_a.dot(ja_rxn)).flatten()
# csc_ss = (self.C_cs_c.dot(csc)).flatten() + (self.D_cs_c.dot(jc_rxn)).flatten()
#
# xa = csa /self.csa_max
# xc = csc /self.csc_max
# xa_ss = csa_ss/self.csa_max
# xc_ss = csc_ss/self.csc_max
#
# Uref_a = self.uref_a( xa_ss ) # anode equilibrium potential
# Uref_c = self.uref_c( xc_ss ) # cathode equilibrium potential
#
# eta_a = phi_s_a - phi[:self.Na] - Uref_a # anode overpotential
# eta_c = phi_s_c - phi[-self.Nc:] - Uref_c # cathode overpotential
#
## ja = 2.0*self.io_a * numpy.sqrt( ce[:self.Na]/self.ce_nom * (1.0 - csa_ss/self.csa_max) * (csa_ss/self.csa_max) ) * numpy.sinh( self.R_gas/(2.0*self.F*self.T)*eta_a )
## jc = 2.0*self.io_c * numpy.sqrt( ce[-self.Nc:]/self.ce_nom * (1.0 - csc_ss/self.csc_max) * (csc_ss/self.csc_max) ) * numpy.sinh( self.R_gas/(2.0*self.F*self.T)*eta_c )
# ja = 2.0*self.io_a/self.F * numpy.sinh( 0.5*self.F/(self.R_gas*self.T)*eta_a )
# jc = 2.0*self.io_c/self.F * numpy.sinh( 0.5*self.F/(self.R_gas*self.T)*eta_c )
#
# j = numpy.concatenate( [ ja_rxn, numpy.zeros(self.Ns), jc_rxn ] )
#
# ## Compute the residuals
# # Time deriv components
# r1 = ( ((A_ce.dot(ce)).flatten() + (self.B_ce.dot(j)).flatten()) ) # E-lyte conc
#
# r2 = ( (self.A_cs_a.dot(csa)).flatten() + (self.B_cs_a.dot(ja_rxn)).flatten() ) # Anode conc
# r3 = ( (self.A_cs_c.dot(csc)).flatten() + (self.B_cs_c.dot(jc_rxn)).flatten() ) # Cathode conc
#
# # Algebraic components
# r4 = ja_rxn - ja
# r5 = jc_rxn - jc
#
# r6 = A_pe.dot(phi).flatten() - B_pe.dot(ce).flatten() + self.B2_pe.dot(j).flatten() # E-lyte potential
#
# r7 = self.A_ps_a.dot(phi_s_a).flatten() - self.B_ps_a.dot(ja_rxn).flatten() - self.B2_ps_a*self.i_app # Anode potential
# r8 = self.A_ps_c.dot(phi_s_c).flatten() - self.B_ps_c.dot(jc_rxn).flatten() + self.B2_ps_c*self.i_app # Cathode potential
#
# if get_mats :
# res_out = numpy.concatenate( [r1,r2,r3] ), numpy.concatenate( [r4, r5, r6, r7, r8] ), { 'A_ce':A_ce, 'A_pe':A_pe, 'B_pe':B_pe, 'csa':csa, 'csc':csc, 'csa_ss':csa_ss, 'csc_ss':csc_ss, 'xa':xa, 'xc':xc, 'xa_ss':xa_ss, 'xc_ss':xc_ss, 'eta_a':eta_a, 'eta_c':eta_c }
# else :
# res_out = numpy.concatenate( [r1,r2,r3] ), numpy.concatenate( [r4, r5, r6, r7, r8] )
#
# return res_out
#
# def dae_system_num( self, y ) :
#
# ## Parse out the states
# # E-lyte conc
# ce = y[ self.ce_inds]
#
# # Solid conc a:anode, c:cathode
# csa = y[ self.csa_inds]
# csc = y[ self.csc_inds]
#
# # Reaction (Butler-Volmer Kinetics)
# ja_rxn = y[self.ja_inds]
# jc_rxn = y[self.jc_inds]
#
# # E-lyte potential
# phi = y[self.pe_inds]
#
# # Solid potential
# phi_s_a = y[self.pa_inds]
# phi_s_c = y[self.pc_inds]
#
# ## Grab state dependent matrices
# # For E-lyte conc and potential (i.e., De(ce), kapp_e(ce))
# A_ce = self.build_Ace_mat( ce )
# A_pe = self.build_Ape_mat( ce )
# B_pe = self.build_Bpe_mat( ce )
#
# ## Compute extra variables
# # For the reaction kinetics
## csa_ss = numpy.array( [ csa[(i+1)*(self.Nra)-1] for i in range(self.Na) ] )
## csc_ss = numpy.array( [ csc[(i+1)*(self.Nrc)-1] for i in range(self.Nc) ] )
# csa_ss = (self.C_cs_a.dot(csa)).flatten() + (self.D_cs_a.dot(ja_rxn)).flatten()
# csc_ss = (self.C_cs_c.dot(csc)).flatten() + (self.D_cs_c.dot(jc_rxn)).flatten()
#
# xa = csa /self.csa_max
# xc = csc /self.csc_max
# xa_ss = csa_ss/self.csa_max
# xc_ss = csc_ss/self.csc_max
#
# Uref_a = self.uref_a( xa_ss ) # anode equilibrium potential
# Uref_c = self.uref_c( xc_ss ) # cathode equilibrium potential
#
# eta_a = phi_s_a - phi[:self.Na] - Uref_a # anode overpotential
# eta_c = phi_s_c - phi[-self.Nc:] - Uref_c # cathode overpotential
#
## ja = 2.0*self.io_a * numpy.sqrt( ce[:self.Na]/self.ce_nom * (1.0 - csa_ss/self.csa_max) * (csa_ss/self.csa_max) ) * numpy.sinh( self.R_gas/(2.0*self.F*self.T)*eta_a )
## jc = 2.0*self.io_c * numpy.sqrt( ce[-self.Nc:]/self.ce_nom * (1.0 - csc_ss/self.csc_max) * (csc_ss/self.csc_max) ) * numpy.sinh( self.R_gas/(2.0*self.F*self.T)*eta_c )
# ja = 2.0*self.io_a/self.F * numpy.sinh( 0.5*self.F/(self.R_gas*self.T)*eta_a )
# jc = 2.0*self.io_c/self.F * numpy.sinh( 0.5*self.F/(self.R_gas*self.T)*eta_c )
#
# j = numpy.concatenate( [ ja_rxn, numpy.zeros(self.Ns), jc_rxn ] )
#
# ## Compute the residuals
# # Time deriv components
# r1 = ( ((A_ce.dot(ce)).flatten() + (self.B_ce.dot(j)).flatten()) ) # E-lyte conc
#
# r2 = ( (self.A_cs_a.dot(csa)).flatten() + (self.B_cs_a.dot(ja_rxn)).flatten() ) # Anode conc
# r3 = ( (self.A_cs_c.dot(csc)).flatten() + (self.B_cs_c.dot(jc_rxn)).flatten() ) # Cathode conc
#
# # Algebraic components
# r4 = ja_rxn - ja
# r5 = jc_rxn - jc
#
# r6 = A_pe.dot(phi).flatten() - B_pe.dot(ce).flatten() + self.B2_pe.dot(j).flatten() # E-lyte potential
#
# r7 = self.A_ps_a.dot(phi_s_a).flatten() - self.B_ps_a.dot(ja_rxn).flatten() - self.B2_ps_a*self.i_app # Anode potential
# r8 = self.A_ps_c.dot(phi_s_c).flatten() - self.B_ps_c.dot(jc_rxn).flatten() + self.B2_ps_c*self.i_app # Cathode potential
#
# res_out = numpy.concatenate( [r1,r2,r3, r4, r5, r6, r7, r8] )
#
# return res_out
#
#
# def jac_system( self, mats ) :
#
# A_ce = mats['A_ce']
# A_pe = mats['A_pe']
# B_pe = mats['B_pe']
#
# Bjac_a = self.build_Bjac_mat( mats['eta_a'], 2.0*self.io_a/self.F, 0.5*self.F/(self.R_gas*self.T) )
# Bjac_c = self.build_Bjac_mat( mats['eta_c'], 2.0*self.io_c/self.F, 0.5*self.F/(self.R_gas*self.T) )
#
# DUDcsa_ss = numpy.diag( (1.0/self.csa_max)*self.duref_a(mats['xa_ss']) )
# DUDcsc_ss = numpy.diag( (1.0/self.csc_max)*self.duref_c(mats['xc_ss']) )
#
# A_ja = numpy.diag(numpy.ones(self.Na)) - (Bjac_a.dot(-1.0*DUDcsa_ss*1.0)).dot( self.D_cs_a )
# A_jc = numpy.diag(numpy.ones(self.Nc)) - (Bjac_c.dot(-1.0*DUDcsc_ss*1.0)).dot( self.D_cs_c )
#
# ## fx
# fx = scipy.linalg.block_diag( A_ce, self.A_cs_a, self.A_cs_c )
# ##
#
# ## fz
# fz = numpy.zeros( [self.num_diff_vars, self.num_algr_vars] )
# # ce vs j
# fz[ numpy.ix_(self.ce_inds, self.ja_inds2) ] = self.B_ce[:, :self.Na ]
# fz[ numpy.ix_(self.ce_inds, self.jc_inds2) ] = self.B_ce[:, -self.Nc:]
# # cs vs j
# fz[ numpy.ix_(self.csa_inds, self.ja_inds2) ] = self.B_cs_a
# fz[ numpy.ix_(self.csc_inds, self.jc_inds2) ] = self.B_cs_c
# ##
#
# ## gx
# gx = numpy.zeros( [self.num_algr_vars, self.num_diff_vars] )
# # j vs cs_ss
# gx[ numpy.ix_(self.ja_inds2, self.csa_inds) ] = -(Bjac_a.dot(-1.0*DUDcsa_ss*1.0)).dot(self.C_cs_a)
# gx[ numpy.ix_(self.jc_inds2, self.csc_inds) ] = -(Bjac_c.dot(-1.0*DUDcsc_ss*1.0)).dot(self.C_cs_c)
# # phi_e vs ce
# gx[ numpy.ix_(self.pe_inds2, self.ce_inds) ] = -B_pe
# ##
#
# ## gz
# # z vs z
# gz0 = scipy.linalg.block_diag( A_ja, A_jc, A_pe, self.A_ps_a, self.A_ps_c )
# # z cross coupling
# gz00 = numpy.zeros_like( gz0 )
# # phi_e vs j
# gz00[ numpy.ix_(self.pe_inds2, self.ja_inds2) ] = self.B2_pe[:,:self.Na]
# gz00[ numpy.ix_(self.pe_inds2, self.jc_inds2) ] = self.B2_pe[:,-self.Nc:]
# # phi_s vs j
# gz00[ numpy.ix_(self.pa_inds2, self.ja_inds2) ] = -self.B_ps_a
# gz00[ numpy.ix_(self.pc_inds2, self.jc_inds2) ] = -self.B_ps_c
# # j vs phi_s
# gz00[ numpy.ix_(self.ja_inds2, self.pa_inds2) ] = -Bjac_a*( 1.0)
# gz00[ numpy.ix_(self.jc_inds2, self.pc_inds2) ] = -Bjac_c*( 1.0)
# # j vs phi_e
# gz00[ numpy.ix_(self.ja_inds2, self.pe_a_inds2) ] = -Bjac_a*(-1.0)
# gz00[ numpy.ix_(self.jc_inds2, self.pe_c_inds2) ] = -Bjac_c*(-1.0)
#
# gz = gz0 + gz00
#
# return fx, fz, gx, gz
#
#
# def cn_solver( self, x, z, Cur_vec, delta_t ) :
# """
# Crank-Nicholson solver for marching through time
# """
# Cur_prev, Cur, Cur_nxt = Cur_vec[0], Cur_vec[1], Cur_vec[2]
#
# maxIters = 20
# tol = 1e-5
#
# Nx = self.num_diff_vars
# Nz = self.num_algr_vars
#
# x_nxt = numpy.zeros( (Nx,maxIters), dtype='d' )
# z_nxt = numpy.zeros( (Nz,maxIters), dtype='d' )
#
# relres = numpy.zeros( maxIters, dtype='d' )
# relres[0] = 1.0
#
# var_flag = {'lim_on':0}
#
# # Solve for consistent ICs
# if Cur != Cur_prev :
# z_cons = numpy.zeros( (Nz, maxIters), dtype='d' )
# z_cons[:,0] = deepcopy(z)
#
# junk_f, g, mats = self.dae_system( x, z, Cur, get_mats=1 )
# for idx in range(maxIters-1) :
# (junk_fx, junk_fz, junk_gx, g_z) = self.jac_system( mats )
#
# Delta_z = -sparseSolve( sparseMat(g_z), g )
# z_cons[:,idx+1] = z_cons[:,idx] + Delta_z
#
# relres_z = numpy.linalg.norm(Delta_z,numpy.inf) / numpy.linalg.norm(z,numpy.inf)
# if relres_z < tol :
# break
# elif idx == maxIters-1 :
# print(('Warning: Max Newton iterations reached for consistency | RelChange=',relres_z*100.0))
#
# z = z_cons[:,idx+1]
#
# #print Cur
#
# f, g = self.dae_system( deepcopy(x), deepcopy(z), Cur )
#
# x_nxt[:,0] = deepcopy(x)
# z_nxt[:,0] = deepcopy(z)
#
# # plt.figure(1)
# # plt.plot( x_nxt[:,0] )
# # plt.plot( z_nxt[:,0] )
# # plt.show()
#
# for idx in range(maxIters-1) :
# f_nxt, g_nxt, mats = self.dae_system( x_nxt[:,idx], z_nxt[:,idx], Cur_nxt, get_mats=1 )
#
## print 'x:',x.shape
## print 'xnxt:',x_nxt[:,idx].shape
## print 'f:',f.shape
## print 'fnxt:',f_nxt.shape
#
## print 'z:', z.shape
## print 'g:', g.shape
## print 'znxt:', z_nxt[:,idx].shape
## print 'gnxt:', g_nxt.shape
#
# F1 = x - x_nxt[:,idx] + delta_t/2.*( f+f_nxt )
# F2 = g_nxt
# F = numpy.concatenate( (F1, F2), axis=0 )
#
# fx, fz, gx, gz = self.jac_system( mats )
#
#
# jmat = numpy.concatenate( (numpy.concatenate( (fx, fz), axis=1 ),
# numpy.concatenate( (gx, gz), axis=1 )) )
#
# self.Input = Cur_nxt
# jmat_num = compute_deriv( self.dae_system_num, numpy.concatenate( (x_nxt[:,idx], z_nxt[:,idx]) ) )
#
# fx_num = jmat_num[:self.num_diff_vars,:self.num_diff_vars]
# fz_num = jmat_num[:self.num_diff_vars,self.num_diff_vars:]
# gx_num = jmat_num[self.num_diff_vars:,:self.num_diff_vars]
# gz_num = jmat_num[self.num_diff_vars:,self.num_diff_vars:]
#
# F1x_num = -sparse.eye(len(x)) + delta_t/2. * fx_num
# F1z_num = delta_t/2. * fz_num
#
# F1_x = -sparse.eye(len(x)) + delta_t/2. * fx
# F1_z = delta_t/2. * fz
# F2_x = gx
# F2_z = gz
#
# J = numpy.concatenate( (numpy.concatenate( (F1_x, F1_z), axis=1 ),
# numpy.concatenate( (F2_x, F2_z), axis=1 )) )
#
## Jnum = numpy.concatenate( (numpy.concatenate( (F1x_num, F1z_num), axis=1 ),
## numpy.concatenate( (gx_num , gz_num ), axis=1 )) )
#
#
# Jsp = sparseMat( J )
#
## Jspnum = sparseMat( Jnum )
#
## Delta_y = -sparseSolve( Jspnum, F )
# Delta_y = -sparseSolve( Jsp, F )
#
#
# x_nxt[:,idx+1] = x_nxt[:,idx] + Delta_y[:Nx]
# z_nxt[:,idx+1] = z_nxt[:,idx] + Delta_y[Nx:]
#
# # plt.figure(1)
# # plt.plot(Delta_y)
#
# # plt.figure(2)
# # plt.plot(x_nxt[:,idx])
# # plt.plot(x_nxt[:,idx+1])
#
## plt.show()
#
# y = numpy.concatenate( (x_nxt[:,idx+1], z_nxt[:,idx+1]), axis=0 )
# relres[idx+1] = numpy.linalg.norm( Delta_y, numpy.inf ) / numpy.linalg.norm( y, numpy.inf )
#
# if (relres[idx+1]<tol) and (numpy.linalg.norm(F, numpy.inf)<tol) :
# break
# elif idx==maxIters-1 :
# print( ('Warning: Max Newton iterations reached in main CN loop | RelChange = ',relres[-1]*100.0) )
#
# x_nxtf = x_nxt[:,idx+1]
# z_nxtf = z_nxt[:,idx+1]
#
# newtonStats = {'var_flag':var_flag}
# newtonStats['iters'] = idx
# newtonStats['relres'] = relres
#
# print '###############################################'
# print 'numpy.allclose( fx, fx_num, rtol=0.001 ):', numpy.allclose( fx, fx_num, rtol=0.001 )
#
# print '###############################################'
# print 'numpy.allclose( fz, fz_num, rtol=0.001 ):', numpy.allclose( fz, fz_num, rtol=0.001 )
#
# print '###############################################'
# print 'numpy.allclose( gx, gx_num, rtol=0.001 ):', numpy.allclose( gx, gx_num, rtol=0.001 )
#
# print '###############################################'
# print 'numpy.allclose( gz, gz_num, rtol=0.001 ):', numpy.allclose( gz, gz_num, rtol=0.001 )
#
# print '###############################################'
# print 'numpy.allclose( jmat, jmat_num, rtol=0.001 ):', numpy.allclose( jmat, jmat_num, rtol=0.001 )
#
# jm1_sp = sps.csr_matrix(jmat)
# jm2_sp = sps.csr_matrix(jmat_num)
#
# fig, ax = plt.subplots(1,2)
# ax[0].spy( jm1_sp )
# ax[0].set_title('Analytical Jacobian')
# ax[1].spy( jm2_sp )
# ax[1].set_title('Numerical Jacobian')
# plt.suptitle( 'numpy.allclose( jmat, jmat_num, rtol=0.001 ):' + str(numpy.allclose( jmat, jmat_num, rtol=0.001 )) )
# plt.show()
#
# print 'Finished t_step'
#
# return x_nxtf, z_nxtf, newtonStats1]
|
matthewpklein/battsimpy
|
tests/dae_genPart.py
|
Python
|
gpl-3.0
| 59,229
|
[
"Gaussian"
] |
2a9fc896ae8822b3b45135ca7e637c430c63e8e7f4fd4c332365e22e5794f799
|
""" FileReport module defines the FileReport class, to report file status to the transformation DB
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from DIRAC import S_OK
from DIRAC.Core.Utilities import DEncode
from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
from DIRAC.RequestManagementSystem.Client.Operation import Operation
__RCSID__ = "$Id$"
class FileReport(object):
"""A stateful object for reporting to TransformationDB"""
def __init__(self, server="Transformation/TransformationManager"):
"""c'tor
self.transClient is a TransformationClient object
"""
self.transClient = TransformationClient()
self.transClient.setServer(server)
self.statusDict = {}
self.transformation = None
self.force = False
def setFileStatus(self, transformation, lfn, status, sendFlag=False):
"""Set file status in the context of the given transformation"""
if not self.transformation:
self.transformation = transformation
if isinstance(lfn, (list, dict, tuple)):
self.statusDict.update(dict.fromkeys(lfn, status))
else:
self.statusDict[lfn] = status
if sendFlag:
return self.commit()
return S_OK()
def setCommonStatus(self, status):
"""Set common status for all files in the internal cache"""
for lfn in self.statusDict:
self.statusDict[lfn] = status
return S_OK()
def getFiles(self):
"""Get the statuses of the files already accumulated in the FileReport object"""
return copy.deepcopy(self.statusDict)
def commit(self):
"""Commit pending file status update records"""
if not self.statusDict:
return S_OK({})
result = self.transClient.setFileStatusForTransformation(self.transformation, self.statusDict, force=self.force)
if result["OK"]:
self.statusDict = {}
return result
def generateForwardDISET(self):
"""Commit the accumulated records and generate request eventually"""
result = self.commit()
commitOp = None
if not result["OK"]:
# Generate Request
commitOp = Operation()
commitOp.Type = "SetFileStatus"
commitOp.Arguments = DEncode.encode(
{"transformation": self.transformation, "statusDict": self.statusDict, "force": self.force}
)
return S_OK(commitOp)
|
ic-hep/DIRAC
|
src/DIRAC/TransformationSystem/Client/FileReport.py
|
Python
|
gpl-3.0
| 2,597
|
[
"DIRAC"
] |
37aff056408001bc7e51dcc119aab28f1769f423c5de4b1181574b51a34ea7bc
|
import numpy as np
import time
from astrometry.util.ttime import Time
from astrometry.util.resample import resample_with_wcs, OverlapError
from astrometry.util.fits import fits_table
from astrometry.util.plotutils import dimshow
from tractor import Tractor, PointSource, Image, Catalog, Patch, Galaxy
from tractor.galaxy import (DevGalaxy, ExpGalaxy,
disable_galaxy_cache, enable_galaxy_cache)
from tractor.patch import ModelMask
from tractor.sersic import SersicGalaxy
from legacypipe.survey import (RexGalaxy,
LegacyEllipseWithPriors, LegacySersicIndex, get_rgb)
from legacypipe.bits import IN_BLOB
from legacypipe.coadds import quick_coadds
from legacypipe.runbrick_plots import _plot_mods
rgbkwargs_resid = dict(resids=True)
import logging
logger = logging.getLogger('legacypipe.oneblob')
def info(*args):
from legacypipe.utils import log_info
log_info(logger, args)
def debug(*args):
from legacypipe.utils import log_debug
log_debug(logger, args)
def is_debug():
return logger.isEnabledFor(logging.DEBUG)
# Determines the order of elements in the DCHISQ array.
MODEL_NAMES = ['psf', 'rex', 'dev', 'exp', 'ser']
# singleton
cpu_arch = None
def get_cpu_arch():
global cpu_arch
import os
if cpu_arch is not None:
return cpu_arch
family = None
model = None
modelname = None
if os.path.exists('/proc/cpuinfo'):
for line in open('/proc/cpuinfo').readlines():
words = [w.strip() for w in line.strip().split(':')]
if words[0] == 'cpu family' and family is None:
family = int(words[1])
#print('Set CPU family', family)
if words[0] == 'model' and model is None:
model = int(words[1])
#print('Set CPU model', model)
if words[0] == 'model name' and modelname is None:
modelname = words[1]
#print('CPU model', modelname)
codenames = {
# NERSC Cori machines
(6, 63): 'has',
(6, 87): 'knl',
}
cpu_arch = codenames.get((family, model), '')
return cpu_arch
def one_blob(X):
'''
Fits sources contained within a "blob" of pixels.
'''
if X is None:
return None
(nblob, iblob, Isrcs, brickwcs, bx0, by0, blobw, blobh, blobmask, timargs,
srcs, bands, plots, ps, reoptimize, iterative, use_ceres, refmap,
large_galaxies_force_pointsource, less_masking, frozen_galaxies) = X
debug('Fitting blob number %i: blobid %i, nsources %i, size %i x %i, %i images, %i frozen galaxies' %
(nblob, iblob, len(Isrcs), blobw, blobh, len(timargs), len(frozen_galaxies)))
if len(timargs) == 0:
return None
if len(Isrcs) == 0:
return None
for g in frozen_galaxies:
debug('Frozen galaxy:', g)
LegacySersicIndex.stepsize = 0.001
if plots:
import pylab as plt
plt.figure(2, figsize=(3,3))
plt.subplots_adjust(left=0.01, right=0.99, bottom=0.01, top=0.99)
plt.figure(1)
t0 = time.process_time()
# A local WCS for this blob
blobwcs = brickwcs.get_subimage(bx0, by0, blobw, blobh)
# Per-source measurements for this blob
B = fits_table()
B.sources = srcs
B.Isrcs = Isrcs
B.iblob = iblob
# Did sources start within the blob?
_,x0,y0 = blobwcs.radec2pixelxy(
np.array([src.getPosition().ra for src in srcs]),
np.array([src.getPosition().dec for src in srcs]))
# blob-relative initial positions (zero-indexed)
B.x0 = (x0 - 1.).astype(np.float32)
B.y0 = (y0 - 1.).astype(np.float32)
B.safe_x0 = np.clip(np.round(x0-1).astype(int), 0,blobw-1)
B.safe_y0 = np.clip(np.round(y0-1).astype(int), 0,blobh-1)
B.started_in_blob = blobmask[B.safe_y0, B.safe_x0]
# This uses 'initial' pixel positions, because that's what determines
# the fitting behaviors.
ob = OneBlob('%i'%(nblob+1), blobwcs, blobmask, timargs, srcs, bands,
plots, ps, use_ceres, refmap,
large_galaxies_force_pointsource,
less_masking, frozen_galaxies)
B = ob.run(B, reoptimize=reoptimize, iterative_detection=iterative)
_,x1,y1 = blobwcs.radec2pixelxy(
np.array([src.getPosition().ra for src in B.sources]),
np.array([src.getPosition().dec for src in B.sources]))
B.finished_in_blob = blobmask[
np.clip(np.round(y1-1).astype(int), 0, blobh-1),
np.clip(np.round(x1-1).astype(int), 0, blobw-1)]
assert(len(B.finished_in_blob) == len(B))
assert(len(B.finished_in_blob) == len(B.started_in_blob))
# Setting values here (after .run() has completed) means that iterative sources
# (which get merged with the original table B) get values also.
B.blob_x0 = np.zeros(len(B), np.int16) + bx0
B.blob_y0 = np.zeros(len(B), np.int16) + by0
B.blob_width = np.zeros(len(B), np.int16) + blobw
B.blob_height = np.zeros(len(B), np.int16) + blobh
B.blob_npix = np.zeros(len(B), np.int32) + np.sum(blobmask)
B.blob_nimages= np.zeros(len(B), np.int16) + len(timargs)
B.blob_totalpix = np.zeros(len(B), np.int32) + ob.total_pix
B.cpu_arch = np.zeros(len(B), dtype='U3')
B.cpu_arch[:] = get_cpu_arch()
B.cpu_blob = np.empty(len(B), np.float32)
# Convert to whole-brick (zero-indexed) pixel positions.
# (do this here rather than above to ease handling iterative detections)
B.x0 += bx0
B.y0 += by0
# these are now in brick coords... rename for consistency in runbrick.py
B.rename('x0', 'bx0')
B.rename('y0', 'by0')
t1 = time.process_time()
B.cpu_blob[:] = t1 - t0
return B
class OneBlob(object):
def __init__(self, name, blobwcs, blobmask, timargs, srcs, bands,
plots, ps, use_ceres, refmap,
large_galaxies_force_pointsource,
less_masking, frozen_galaxies):
self.name = name
self.blobwcs = blobwcs
self.pixscale = self.blobwcs.pixel_scale()
self.blobmask = blobmask
self.srcs = srcs
self.bands = bands
self.plots = plots
self.refmap = refmap
#self.plots_per_source = False
self.plots_per_source = plots
self.plots_per_model = False
# blob-1-data.png, etc
self.plots_single = False
self.ps = ps
self.use_ceres = use_ceres
self.deblend = False
self.large_galaxies_force_pointsource = large_galaxies_force_pointsource
self.less_masking = less_masking
self.tims = self.create_tims(timargs)
self.total_pix = sum([np.sum(t.getInvError() > 0) for t in self.tims])
self.plots2 = False
alphas = [0.1, 0.3, 1.0]
self.optargs = dict(priors=True, shared_params=False, alphas=alphas,
print_progress=True)
self.blobh,self.blobw = blobmask.shape
self.bigblob = (self.blobw * self.blobh) > 100*100
if self.bigblob:
debug('Big blob:', name)
self.trargs = dict()
self.frozen_galaxy_mods = []
if len(frozen_galaxies):
debug('Subtracting frozen galaxy models...')
tr = Tractor(self.tims, Catalog(*frozen_galaxies))
mm = []
for tim in self.tims:
mh,mw = tim.shape
mm.append(dict([(g, ModelMask(0, 0, mw, mh)) for g in frozen_galaxies]))
tr.setModelMasks(mm)
if self.plots:
mods = []
for tim in self.tims:
mod = tr.getModelImage(tim)
self.frozen_galaxy_mods.append(mod)
tim.data -= mod
if self.plots:
mods.append(mod)
if self.plots:
import pylab as plt
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs, images=mods,
fill_holes=False)
plt.clf()
dimshow(get_rgb(coimgs, self.bands))
plt.title('Subtracted frozen galaxies')
self.ps.savefig()
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False)
plt.clf()
dimshow(get_rgb(coimgs, self.bands))
plt.title('After subtracting frozen galaxies')
self.ps.savefig()
# if use_ceres:
# from tractor.ceres_optimizer import CeresOptimizer
# ceres_optimizer = CeresOptimizer()
# self.optargs.update(scale_columns=False,
# scaled=False,
# dynamic_scale=False)
# self.trargs.update(optimizer=ceres_optimizer)
# else:
# self.optargs.update(dchisq = 0.1)
from tractor.dense_optimizer import ConstrainedDenseOptimizer
self.trargs.update(optimizer=ConstrainedDenseOptimizer())
self.optargs.update(dchisq = 0.1)
def run(self, B, reoptimize=False, iterative_detection=True,
compute_metrics=True):
trun = tlast = Time()
# Not quite so many plots...
self.plots1 = self.plots
cat = Catalog(*self.srcs)
N = len(B)
B.cpu_source = np.zeros(N, np.float32)
B.force_keep_source = np.zeros(N, bool)
B.fit_background = np.zeros(N, bool)
B.forced_pointsource = np.zeros(N, bool)
B.hit_limit = np.zeros(N, bool)
B.hit_ser_limit = np.zeros(N, bool)
B.hit_r_limit = np.zeros(N, bool)
B.blob_symm_width = np.zeros(N, np.int16)
B.blob_symm_height = np.zeros(N, np.int16)
B.blob_symm_npix = np.zeros(N, np.int32)
B.blob_symm_nimages = np.zeros(N, np.int16)
# Save initial fluxes for all sources (used if we force
# keeping a reference star)
for src in self.srcs:
src.initial_brightness = src.brightness.copy()
# Set the freezeparams field for each source. (This is set for
# large galaxies with the 'freeze' column set.)
for src in self.srcs:
src.freezeparams = getattr(src, 'freezeparams', False)
if self.plots:
import pylab as plt
self._initial_plots()
from legacypipe.detection import plot_boundary_map
plt.clf()
dimshow(self.rgb)
ax = plt.axis()
bitset = ((self.refmap & IN_BLOB['MEDIUM']) != 0)
plot_boundary_map(bitset, rgb=(255,0,0), iterations=2)
bitset = ((self.refmap & IN_BLOB['BRIGHT']) != 0)
plot_boundary_map(bitset, rgb=(200,200,0), iterations=2)
bitset = ((self.refmap & IN_BLOB['GALAXY']) != 0)
plot_boundary_map(bitset, rgb=(0,255,0), iterations=2)
plt.axis(ax)
plt.title('Reference-source Masks')
self.ps.savefig()
tr = self.tractor(self.tims, cat)
# Fit any sources marked with 'needs_initial_flux' -- saturated, and SGA
fitflux = [src for src in cat if getattr(src, 'needs_initial_flux', False)]
if len(fitflux):
self._fit_fluxes(cat, self.tims, self.bands, fitcat=fitflux)
if self.plots:
self._plots(tr, 'Fitting initial fluxes')
del fitflux
if self.plots:
self._plots(tr, 'Initial models')
plt.clf()
self._plot_coadd(self.tims, self.blobwcs, model=tr)
plt.title('Initial models')
self.ps.savefig()
# Optimize individual sources, in order of flux.
# First, choose the ordering...
Ibright = _argsort_by_brightness(cat, self.bands, ref_first=True)
# The sizes of the model patches fit here are determined by the
# sources themselves, ie by the size of the mod patch returned by
# src.getModelPatch(tim)
if len(cat) > 1:
self._optimize_individual_sources_subtract(
cat, Ibright, B.cpu_source)
else:
self._optimize_individual_sources(tr, cat, Ibright, B.cpu_source)
if self.plots:
self._plots(tr, 'After source fitting')
plt.clf()
self._plot_coadd(self.tims, self.blobwcs, model=tr)
plt.title('After source fitting')
self.ps.savefig()
# Plot source locations
ax = plt.axis()
_,xf,yf = self.blobwcs.radec2pixelxy(
np.array([src.getPosition().ra for src in self.srcs]),
np.array([src.getPosition().dec for src in self.srcs]))
plt.plot(xf-1, yf-1, 'r.', label='Sources')
Ir = np.flatnonzero([is_reference_source(src) for src in self.srcs])
if len(Ir):
plt.plot(xf[Ir]-1, yf[Ir]-1, 'o', mec='g', mfc='none', ms=8, mew=2,
label='Ref source')
plt.legend()
plt.axis(ax)
plt.title('After source fitting')
self.ps.savefig()
if self.plots_single:
plt.figure(2)
mods = list(tr.getModelImages())
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs, images=mods,
fill_holes=False)
dimshow(get_rgb(coimgs,self.bands), ticks=False)
plt.savefig('blob-%s-initmodel.png' % (self.name))
res = [(tim.getImage() - mod) for tim,mod in zip(self.tims, mods)]
coresids,_ = quick_coadds(self.tims, self.bands, self.blobwcs, images=res)
dimshow(get_rgb(coresids, self.bands, resids=True), ticks=False)
plt.savefig('blob-%s-initresid.png' % (self.name))
dimshow(get_rgb(coresids, self.bands), ticks=False)
plt.savefig('blob-%s-initsub.png' % (self.name))
plt.figure(1)
debug('Blob', self.name, 'finished initial fitting:', Time()-tlast)
tlast = Time()
# Set any fitting behaviors based on geometric masks.
# Fitting behaviors: force point-source
force_pointsource_mask = (IN_BLOB['BRIGHT'] | IN_BLOB['CLUSTER'])
# large_galaxies_force_pointsource is True by default.
if self.large_galaxies_force_pointsource:
force_pointsource_mask |= IN_BLOB['GALAXY']
# Fit background?
fit_background_mask = IN_BLOB['BRIGHT']
if not self.less_masking:
fit_background_mask |= IN_BLOB['MEDIUM']
### this variable *also* forces fitting the background.
if self.large_galaxies_force_pointsource:
fit_background_mask |= IN_BLOB['GALAXY']
for srci,src in enumerate(cat):
_,ix,iy = self.blobwcs.radec2pixelxy(src.getPosition().ra,
src.getPosition().dec)
ix = int(np.clip(ix-1, 0, self.blobw-1))
iy = int(np.clip(iy-1, 0, self.blobh-1))
bits = self.refmap[iy, ix]
force_pointsource = ((bits & force_pointsource_mask) > 0)
fit_background = ((bits & fit_background_mask) > 0)
is_galaxy = isinstance(src, Galaxy)
if is_galaxy:
fit_background = False
force_pointsource = False
B.forced_pointsource[srci] = force_pointsource
B.fit_background[srci] = fit_background
# Also set a parameter on 'src' for use in compute_segmentation_map()
src.maskbits_forced_point_source = force_pointsource
self.compute_segmentation_map()
# Next, model selections: point source vs dev/exp vs ser.
B = self.run_model_selection(cat, Ibright, B,
iterative_detection=iterative_detection)
debug('Blob', self.name, 'finished model selection:', Time()-tlast)
tlast = Time()
# Cut down to just the kept sources
cat = B.sources
I = np.array([i for i,s in enumerate(cat) if s is not None])
B.cut(I)
cat = Catalog(*B.sources)
tr.catalog = cat
if self.plots:
self._plots(tr, 'After model selection')
plt.clf()
self._plot_coadd(self.tims, self.blobwcs, model=tr)
plt.title('After model selection')
self.ps.savefig()
plt.clf()
self._plot_coadd(self.tims, self.blobwcs, model=tr, addnoise=True)
plt.title('After model selection (+noise)')
self.ps.savefig()
if self.plots_single:
plt.figure(2)
mods = list(tr.getModelImages())
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs, images=mods,
fill_holes=False)
dimshow(get_rgb(coimgs,self.bands), ticks=False)
plt.savefig('blob-%s-model.png' % (self.name))
res = [(tim.getImage() - mod) for tim,mod in zip(self.tims, mods)]
coresids,_ = quick_coadds(self.tims, self.bands, self.blobwcs, images=res)
dimshow(get_rgb(coresids, self.bands, resids=True), ticks=False)
plt.savefig('blob-%s-resid.png' % (self.name))
plt.figure(1)
# Do another quick round of flux-only fitting?
# This does horribly -- fluffy galaxies go out of control because
# they're only constrained by pixels within this blob.
#_fit_fluxes(cat, tims, bands, use_ceres, alphas)
# A final optimization round?
if reoptimize:
if self.plots:
import pylab as plt
modimgs = list(tr.getModelImages())
co,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
images=modimgs)
plt.clf()
dimshow(get_rgb(co, self.bands))
plt.title('Before final opt')
self.ps.savefig()
Ibright = _argsort_by_brightness(cat, self.bands, ref_first=True)
if len(cat) > 1:
self._optimize_individual_sources_subtract(
cat, Ibright, B.cpu_source)
else:
self._optimize_individual_sources(tr, cat, Ibright, B.cpu_source)
if self.plots:
import pylab as plt
modimgs = list(tr.getModelImages())
co,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
images=modimgs)
plt.clf()
dimshow(get_rgb(co, self.bands))
plt.title('After final opt')
self.ps.savefig()
if compute_metrics:
# Compute variances on all parameters for the kept model
B.srcinvvars = [None for i in range(len(B))]
cat.thawAllRecursive()
cat.freezeAllParams()
for isub in range(len(B.sources)):
cat.thawParam(isub)
src = cat[isub]
if src is None:
cat.freezeParam(isub)
continue
# Convert to "vanilla" ellipse parameterization
nsrcparams = src.numberOfParams()
if B.force_keep_source[isub]:
B.srcinvvars[isub] = np.zeros(nsrcparams, np.float32)
cat.freezeParam(isub)
continue
_convert_ellipses(src)
assert(src.numberOfParams() == nsrcparams)
# Compute inverse-variances
allderivs = tr.getDerivs()
ivars = _compute_invvars(allderivs)
assert(len(ivars) == nsrcparams)
B.srcinvvars[isub] = ivars
assert(len(B.srcinvvars[isub]) == cat[isub].numberOfParams())
cat.freezeParam(isub)
# Check for sources with zero inverse-variance -- I think these
# can be generated during the "Simultaneous re-opt" stage above --
# sources can get scattered outside the blob.
I, = np.nonzero([np.sum(iv) > 0 or force
for iv,force in zip(B.srcinvvars, B.force_keep_source)])
if len(I) < len(B):
debug('Keeping', len(I), 'of', len(B),'sources with non-zero ivar')
B.cut(I)
cat = Catalog(*B.sources)
tr.catalog = cat
M = _compute_source_metrics(B.sources, self.tims, self.bands, tr)
for k,v in M.items():
B.set(k, v)
info('Blob', self.name, 'finished, total:', Time()-trun)
return B
def compute_segmentation_map(self):
from functools import reduce
from legacypipe.detection import detection_maps
from astrometry.util.multiproc import multiproc
from scipy.ndimage.morphology import binary_dilation
# Compute per-band detection maps
mp = multiproc()
detmaps,detivs,satmaps = detection_maps(
self.tims, self.blobwcs, self.bands, mp)
# same as in runbrick.py
saturated_pix = reduce(np.logical_or,
[binary_dilation(satmap > 0, iterations=4) for satmap in satmaps])
del satmaps
maxsn = 0
for i,(detmap,detiv) in enumerate(zip(detmaps,detivs)):
sn = detmap * np.sqrt(detiv)
if self.plots and False:
import pylab as plt
plt.clf()
plt.subplot(2,2,1)
plt.imshow(detmap, interpolation='nearest', origin='lower')
plt.title('detmap %s' % self.bands[i])
plt.colorbar()
plt.subplot(2,2,2)
plt.imshow(detiv, interpolation='nearest', origin='lower')
plt.title('detiv %s' % self.bands[i])
plt.colorbar()
plt.subplot(2,2,3)
plt.imshow(sn, interpolation='nearest', origin='lower')
plt.title('detsn %s' % self.bands[i])
plt.colorbar()
self.ps.savefig()
# HACK - no SEDs...
maxsn = np.maximum(maxsn, sn)
if self.plots:
import pylab as plt
plt.clf()
plt.imshow(saturated_pix, interpolation='nearest', origin='lower',
vmin=0, vmax=1, cmap='gray')
plt.title('saturated pix')
self.ps.savefig()
plt.clf()
plt.imshow(maxsn, interpolation='nearest', origin='lower')
plt.title('max s/n for segmentation')
self.ps.savefig()
ok,ix,iy = self.blobwcs.radec2pixelxy(
np.array([src.getPosition().ra for src in self.srcs]),
np.array([src.getPosition().dec for src in self.srcs]))
ix = np.clip(np.round(ix)-1, 0, self.blobw-1).astype(np.int32)
iy = np.clip(np.round(iy)-1, 0, self.blobh-1).astype(np.int32)
# Do not compute segmentation map for sources in the CLUSTER mask
# (or with very bad coords)
Iseg, = np.nonzero(ok * ((self.refmap[iy, ix] & IN_BLOB['CLUSTER']) == 0))
del ok
# Zero out the S/N in CLUSTER mask
maxsn[(self.refmap & IN_BLOB['CLUSTER']) > 0] = 0.
# (also zero out the satmap in the CLUSTER mask)
saturated_pix[(self.refmap & IN_BLOB['CLUSTER']) > 0] = False
import heapq
H,W = self.blobh, self.blobw
segmap = np.empty((H,W), np.int32)
segmap[:,:] = -1
# Iseg are the indices in self.srcs of sources to segment
sy = iy[Iseg]
sx = ix[Iseg]
segmap[sy, sx] = Iseg
maxr2 = np.zeros(len(Iseg), np.int32)
# Reference sources forced to be point sources get a max radius:
ref_radius = 25
for j,i in enumerate(Iseg):
if getattr(self.srcs[i], 'forced_point_source', False):
maxr2[j] = ref_radius**2
# Sources inside maskbits masks that are forced to be point sources
# also get a max radius.
for j,i in enumerate(Iseg):
if getattr(self.srcs[i], 'maskbits_forced_point_source', False):
maxr2[j] = ref_radius**2
mask = self.blobmask
# Watershed by priority-fill.
# values are (-sn, key, x, y, center_x, center_y, maxr2)
q = [(-maxsn[y,x], segmap[y,x],x,y,x,y,r2)
for x,y,r2 in zip(sx,sy,maxr2)]
heapq.heapify(q)
while len(q):
_,key,x,y,cx,cy,r2 = heapq.heappop(q)
segmap[y,x] = key
# 4-connected neighbours
for x,y in [(x, y-1), (x, y+1), (x-1, y), (x+1, y),]:
# out of bounds?
if x<0 or y<0 or x==W or y==H:
continue
# not in blobmask?
if not mask[y,x]:
continue
# already queued or segmented?
if segmap[y,x] != -1:
continue
# outside the ref source radius?
if r2 > 0 and (x-cx)**2 + (y-cy)**2 > r2:
continue
# mark as queued
segmap[y,x] = -2
# enqueue!
heapq.heappush(q, (-maxsn[y,x], key, x, y, cx, cy, r2))
del q, maxr2
del maxsn, saturated_pix
# ensure that each source owns a tiny radius around its center
# in the segmentation map. If there is more than one source
# in that radius, each pixel gets assigned to its nearest
# source.
radius = 5
Ibright = _argsort_by_brightness([self.srcs[i] for i in Iseg], self.bands)
_set_kingdoms(segmap, radius, Iseg[Ibright], ix, iy)
self.segmap = segmap
if self.plots:
import pylab as plt
plt.clf()
dimshow(segmap)
ax = plt.axis()
from legacypipe.detection import plot_boundary_map
plot_boundary_map(segmap >= 0)
plt.plot(ix, iy, 'r.')
plt.axis(ax)
plt.title('Segmentation map')
self.ps.savefig()
plt.clf()
dimshow(self.rgb)
ax = plt.axis()
for i in range(len(self.srcs)):
plot_boundary_map(segmap == i)
plt.plot(ix, iy, 'r.')
plt.axis(ax)
plt.title('Segments')
self.ps.savefig()
def run_model_selection(self, cat, Ibright, B, iterative_detection=True):
# We compute & subtract initial models for the other sources while
# fitting each source:
# -Remember the original images
# -Compute initial models for each source (in each tim)
# -Subtract initial models from images
# -During fitting, for each source:
# -add back in the source's initial model (to each tim)
# -fit, with Catalog([src])
# -subtract final model (from each tim)
# -Replace original images
models = SourceModels()
# Remember original tim images
models.save_images(self.tims)
# Create initial models for each tim x each source
models.create(self.tims, cat, subtract=True)
N = len(cat)
B.dchisq = np.zeros((N, 5), np.float32)
B.all_models = np.array([{} for i in range(N)])
B.all_model_ivs = np.array([{} for i in range(N)])
B.all_model_cpu = np.array([{} for i in range(N)])
B.all_model_hit_limit = np.array([{} for i in range(N)])
B.all_model_hit_r_limit = np.array([{} for i in range(N)])
B.all_model_opt_steps = np.array([{} for i in range(N)])
# Model selection for sources, in decreasing order of brightness
for numi,srci in enumerate(Ibright):
src = cat[srci]
debug('Model selection for source %i of %i in blob %s; sourcei %i' %
(numi+1, len(Ibright), self.name, srci))
cpu0 = time.process_time()
if src.freezeparams:
info('Frozen source', src, '-- keeping as-is!')
B.sources[srci] = src
continue
# Add this source's initial model back in.
models.add(srci, self.tims)
if self.plots_single:
import pylab as plt
plt.figure(2)
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False)
rgb = get_rgb(coimgs,self.bands)
plt.imsave('blob-%s-%s-bdata.png' % (self.name, srci), rgb,
origin='lower')
plt.figure(1)
# Model selection for this source.
keepsrc = self.model_selection_one_source(src, srci, models, B)
# Definitely keep ref stars (Gaia & Tycho)
if keepsrc is None and getattr(src, 'reference_star', False):
info('Dropped reference star:', src)
src.brightness = src.initial_brightness
info('Reset brightness to', src.brightness)
src.force_keep_source = True
keepsrc = src
B.sources[srci] = keepsrc
B.force_keep_source[srci] = getattr(keepsrc, 'force_keep_source', False)
cat[srci] = keepsrc
models.update_and_subtract(srci, keepsrc, self.tims)
if self.plots_single:
plt.figure(2)
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False)
dimshow(get_rgb(coimgs,self.bands), ticks=False)
plt.savefig('blob-%s-%i-sub.png' % (self.name, srci))
plt.figure(1)
cpu1 = time.process_time()
B.cpu_source[srci] += (cpu1 - cpu0)
# At this point, we have subtracted our best model fits for each source
# to be kept; the tims contain residual images.
if iterative_detection:
if self.plots and False:
# One plot per tim is a little much, even for me...
import pylab as plt
for tim in self.tims:
plt.clf()
plt.suptitle('Iterative detection: %s' % tim.name)
plt.subplot(2,2,1)
plt.imshow(tim.getImage(), interpolation='nearest', origin='lower',
vmin=-5.*tim.sig1, vmax=10.*tim.sig1)
plt.title('image')
plt.subplot(2,2,2)
plt.imshow(tim.getImage(), interpolation='nearest', origin='lower')
plt.title('image')
plt.colorbar()
plt.subplot(2,2,3)
plt.imshow(tim.getInvError(), interpolation='nearest', origin='lower')
plt.title('inverr')
plt.colorbar()
plt.subplot(2,2,4)
plt.imshow(tim.getImage() * (tim.getInvError() > 0), interpolation='nearest', origin='lower')
plt.title('image*(inverr>0)')
plt.colorbar()
self.ps.savefig()
Bnew = self.iterative_detection(B, models)
if Bnew is not None:
from astrometry.util.fits import merge_tables
# B.sources is a list of objects... merge() with
# fillzero doesn't handle them well.
srcs = B.sources
newsrcs = Bnew.sources
B.delete_column('sources')
Bnew.delete_column('sources')
# also scalars don't work well
iblob = B.iblob
B.delete_column('iblob')
B = merge_tables([B, Bnew], columns='fillzero')
# columns not in Bnew:
# {'safe_x0', 'safe_y0', 'started_in_blob'}
B.sources = srcs + newsrcs
B.iblob = iblob
models.restore_images(self.tims)
del models
return B
def iterative_detection(self, Bold, models):
# Compute per-band detection maps
from scipy.ndimage.morphology import binary_dilation
from legacypipe.detection import sed_matched_filters, detection_maps, run_sed_matched_filters
from astrometry.util.multiproc import multiproc
if self.plots:
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False)
import pylab as plt
plt.clf()
dimshow(get_rgb(coimgs,self.bands), ticks=False)
plt.title('Iterative detection: residuals')
self.ps.savefig()
mp = multiproc()
detmaps,detivs,satmaps = detection_maps(
self.tims, self.blobwcs, self.bands, mp)
# from runbrick.py
satmaps = [binary_dilation(satmap > 0, iterations=4) for satmap in satmaps]
# Also compute detection maps on the (first-round) model images!
# save tim.images (= residuals at this point)
realimages = [tim.getImage() for tim in self.tims]
for itim,(tim,mods) in enumerate(zip(self.tims, models.models)):
modimg = np.zeros_like(tim.getImage())
for mod in mods:
if mod is None:
continue
mod.addTo(modimg)
if len(self.frozen_galaxy_mods):
modimg += self.frozen_galaxy_mods[itim]
tim.data = modimg
if self.plots:
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False)
import pylab as plt
plt.clf()
dimshow(get_rgb(coimgs,self.bands), ticks=False)
plt.title('Iterative detection: first-round models')
self.ps.savefig()
mod_detmaps,mod_detivs,_ = detection_maps(
self.tims, self.blobwcs, self.bands, mp)
# revert
for tim,img in zip(self.tims, realimages):
tim.data = img
if self.plots:
import pylab as plt
plt.clf()
dimshow(get_rgb(detmaps,self.bands), ticks=False)
plt.title('Iterative detection: detection maps')
self.ps.savefig()
plt.clf()
dimshow(get_rgb(mod_detmaps,self.bands), ticks=False)
plt.title('Iterative detection: model detection maps')
self.ps.savefig()
# if self.plots:
# import pylab as plt
# plt.clf()
# for det,div,b in zip(detmaps, detivs, self.bands):
# plt.hist((det * np.sqrt(div)).ravel(), range=(-5,10),
# bins=50, histtype='step', color=dict(z='m').get(b, b))
# plt.title('Detection pixel S/N')
# self.ps.savefig()
detlogger = logging.getLogger('legacypipe.detection')
detloglvl = detlogger.getEffectiveLevel()
detlogger.setLevel(detloglvl + 10)
SEDs = sed_matched_filters(self.bands)
# Avoid re-detecting sources at positions close to initial
# source positions (including ones that will get cut!)
avoid_x = Bold.safe_x0
avoid_y = Bold.safe_y0
avoid_r = np.zeros(len(avoid_x), np.float32) + 2.
nsigma = 6.
Tnew,_,_ = run_sed_matched_filters(
SEDs, self.bands, detmaps, detivs, (avoid_x,avoid_y,avoid_r),
self.blobwcs, nsigma=nsigma, saturated_pix=satmaps, veto_map=None,
plots=False, ps=None, mp=mp)
detlogger.setLevel(detloglvl)
if Tnew is None:
debug('No iterative sources detected!')
return None
debug('Found', len(Tnew), 'new sources')
Tnew.cut(self.refmap[Tnew.iby, Tnew.ibx] == 0)
debug('Cut to', len(Tnew), 'on refmap')
if len(Tnew) == 0:
return None
detsns = np.dstack([m*np.sqrt(iv) for m,iv in zip(detmaps, detivs)])
modsns = np.dstack([m*np.sqrt(iv) for m,iv in zip(mod_detmaps, mod_detivs)])
det_max = np.max(detsns[Tnew.iby, Tnew.ibx, :], axis=1)
mod_max = np.max(modsns[Tnew.iby, Tnew.ibx, :], axis=1)
det_sum = np.sum(detsns[Tnew.iby, Tnew.ibx, :], axis=1)
mod_sum = np.sum(modsns[Tnew.iby, Tnew.ibx, :], axis=1)
del detsns, modsns
if self.plots:
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False)
import pylab as plt
plt.clf()
dimshow(get_rgb(coimgs,self.bands), ticks=False)
ax = plt.axis()
crossa = dict(ms=10, mew=1.5)
rr = np.array([s.getPosition().ra for s in Bold.sources
if s is not None])
dd = np.array([s.getPosition().dec for s in Bold.sources
if s is not None])
_,xx,yy = self.blobwcs.radec2pixelxy(rr, dd)
plt.plot(Bold.safe_x0, Bold.safe_y0, 'o', ms=5, mec='r',
mfc='none', label='Avoid (r=2)')
plt.plot(xx-1, yy-1, 'r+', label='Old', **crossa)
plt.plot(Tnew.ibx, Tnew.iby, '+', color=(0,1,0), label='New',
**crossa)
plt.axis(ax)
plt.legend()
plt.title('Iterative detections')
self.ps.savefig()
plt.clf()
plt.loglog(mod_max, det_max, 'k.')
ax = plt.axis()
plt.plot([1e-3, 1e6], [1e-3, 1e6], 'b--', lw=3, alpha=0.3)
plt.axis(ax)
plt.xlabel('Model detection S/N: max')
plt.ylabel('Iterative detection S/N: max')
self.ps.savefig()
plt.clf()
plt.loglog(mod_sum, det_sum, 'k.')
ax = plt.axis()
plt.plot([1e-3, 1e6], [1e-3, 1e6], 'b--', lw=3, alpha=0.3)
plt.axis(ax)
plt.xlabel('Model detection S/N: sum')
plt.ylabel('Iterative detection S/N: sum')
self.ps.savefig()
plt.clf()
dimshow(get_rgb(coimgs,self.bands), ticks=False)
ax = plt.axis()
crossa = dict(ms=10, mew=1.5)
plt.plot(xx-1, yy-1, 'r+', label='Old', **crossa)
plt.plot(Tnew.ibx, Tnew.iby, '+', color=(0,1,0), label='New',
**crossa)
for x,y,r1,r2 in zip(Tnew.ibx, Tnew.iby, det_max/np.maximum(mod_max, 1.), det_sum/np.maximum(mod_sum, len(self.bands))):
plt.text(x, y, '%.1f, %.1f' % (r1,r2),
color='k', fontsize=10,
bbox=dict(facecolor='w', alpha=0.5))
plt.axis(ax)
plt.legend()
plt.title('Iterative detections')
self.ps.savefig()
B = 0.2
Tnew.cut(det_max > B * np.maximum(mod_max, 1.))
debug('Cut to', len(Tnew), 'iterative sources compared to model detection map')
if len(Tnew) == 0:
return None
info('Measuring', len(Tnew), 'iterative sources')
from tractor import NanoMaggies, RaDecPos
newsrcs = [PointSource(RaDecPos(t.ra, t.dec),
NanoMaggies(**dict([(b,1) for b in self.bands])))
for t in Tnew]
# Save
oldsrcs = self.srcs
self.srcs = newsrcs
Bnew = fits_table()
Bnew.sources = newsrcs
Bnew.Isrcs = np.array([-1]*len(Bnew))
Bnew.x0 = Tnew.ibx.astype(np.float32)
Bnew.y0 = Tnew.iby.astype(np.float32)
# Be quieter during iterative detection!
bloblogger = logging.getLogger('legacypipe.oneblob')
loglvl = bloblogger.getEffectiveLevel()
bloblogger.setLevel(loglvl + 10)
# Run the whole oneblob pipeline on the iterative sources!
Bnew = self.run(Bnew, iterative_detection=False, compute_metrics=False)
bloblogger.setLevel(loglvl)
# revert
self.srcs = oldsrcs
if len(Bnew) == 0:
return None
return Bnew
def model_selection_one_source(self, src, srci, models, B):
if self.bigblob:
mods = [mod[srci] for mod in models.models]
srctims,modelMasks = _get_subimages(self.tims, mods, src)
# Create a little local WCS subregion for this source, by
# resampling non-zero inverrs from the srctims into blobwcs
insrc = np.zeros((self.blobh,self.blobw), bool)
for tim in srctims:
try:
Yo,Xo,Yi,Xi,_ = resample_with_wcs(
self.blobwcs, tim.subwcs, intType=np.int16)
except OverlapError:
continue
insrc[Yo,Xo] |= (tim.inverr[Yi,Xi] > 0)
if np.sum(insrc) == 0:
# No source pixels touching blob... this can
# happen when a source scatters outside the blob
# in the fitting stage. Drop the source here.
return None
yin = np.max(insrc, axis=1)
xin = np.max(insrc, axis=0)
yl,yh = np.flatnonzero(yin)[np.array([0,-1])]
xl,xh = np.flatnonzero(xin)[np.array([0,-1])]
del insrc
srcwcs = self.blobwcs.get_subimage(xl, yl, 1+xh-xl, 1+yh-yl)
srcwcs_x0y0 = (xl, yl)
# A mask for which pixels in the 'srcwcs' square are occupied.
srcblobmask = self.blobmask[yl:yh+1, xl:xh+1]
else:
modelMasks = models.model_masks(srci, src)
srctims = self.tims
srcwcs = self.blobwcs
srcwcs_x0y0 = (0, 0)
srcblobmask = self.blobmask
if self.plots_per_source:
# This is a handy blob-coordinates plot of the data
# going into the fit.
import pylab as plt
plt.clf()
_,_,coimgs,_ = quick_coadds(srctims, self.bands,self.blobwcs,
fill_holes=False, get_cow=True)
dimshow(get_rgb(coimgs, self.bands))
ax = plt.axis()
pos = src.getPosition()
_,x,y = self.blobwcs.radec2pixelxy(pos.ra, pos.dec)
ix,iy = int(np.round(x-1)), int(np.round(y-1))
plt.plot(x-1, y-1, 'r+')
plt.axis(ax)
plt.title('Model selection: data')
self.ps.savefig()
# Mask out other sources while fitting this one, by
# finding symmetrized blobs of significant pixels
mask_others = True
if mask_others:
from legacypipe.detection import detection_maps
from astrometry.util.multiproc import multiproc
from scipy.ndimage.morphology import binary_dilation, binary_fill_holes
from scipy.ndimage.measurements import label
# Compute per-band detection maps
mp = multiproc()
detmaps,detivs,_ = detection_maps(
srctims, srcwcs, self.bands, mp)
# Compute the symmetric area that fits in this 'srcblobmask' region
pos = src.getPosition()
_,xx,yy = srcwcs.radec2pixelxy(pos.ra, pos.dec)
bh,bw = srcblobmask.shape
ix = int(np.clip(np.round(xx-1), 0, bw-1))
iy = int(np.clip(np.round(yy-1), 0, bh-1))
flipw = min(ix, bw-1-ix)
fliph = min(iy, bh-1-iy)
flipblobs = np.zeros(srcblobmask.shape, bool)
# The slice where we can perform symmetrization
slc = (slice(iy-fliph, iy+fliph+1),
slice(ix-flipw, ix+flipw+1))
# Go through the per-band detection maps, marking significant pixels
for i,(detmap,detiv) in enumerate(zip(detmaps,detivs)):
sn = detmap * np.sqrt(detiv)
# flipsn = np.zeros_like(sn)
# # Symmetrize
# flipsn[slc] = np.minimum(sn[slc],
# np.flipud(np.fliplr(sn[slc])))
# # just OR the detection maps per-band...
# flipblobs |= (flipsn > 5.)
# Symmetrize
sn[slc] = np.minimum(sn[slc],
np.flipud(np.fliplr(sn[slc])))
# just OR the detection maps per-band...
flipblobs |= (sn > 5.)
flipblobs = binary_fill_holes(flipblobs)
blobs,_ = label(flipblobs)
goodblob = blobs[iy,ix]
if self.plots_per_source and True:
# This plot is about the symmetric-blob definitions
# when fitting sources.
import pylab as plt
#from legacypipe.detection import plot_boundary_map
# plt.clf()
# for i,(band,detmap,detiv) in enumerate(zip(self.bands, detmaps, detivs)):
# if i >= 4:
# break
# detsn = detmap * np.sqrt(detiv)
# plt.subplot(2,2, i+1)
# mx = detsn.max()
# dimshow(detsn, vmin=-2, vmax=max(8, mx))
# ax = plt.axis()
# plot_boundary_map(detsn >= 5.)
# plt.plot(ix, iy, 'rx')
# plt.plot([ix-flipw, ix-flipw, ix+flipw, ix+flipw, ix-flipw],
# [iy-fliph, iy+fliph, iy+fliph, iy-fliph, iy-fliph], 'r-')
# plt.axis(ax)
# plt.title('det S/N: ' + band)
# plt.subplot(2,2,4)
# dimshow(flipblobs, vmin=0, vmax=1)
# plt.colorbar()
# ax = plt.axis()
# plot_boundary_map(blobs == goodblob)
# if binary_fill_holes(flipblobs)[iy,ix]:
# fb = (blobs == goodblob)
# di = binary_dilation(fb, iterations=4)
# if np.any(di):
# plot_boundary_map(di, rgb=(255,0,0))
# plt.plot(ix, iy, 'rx')
# plt.plot([ix-flipw, ix-flipw, ix+flipw, ix+flipw, ix-flipw],
# [iy-fliph, iy+fliph, iy+fliph, iy-fliph, iy-fliph], 'r-')
# plt.axis(ax)
# plt.title('good blob')
# self.ps.savefig()
plt.clf()
plt.subplot(2,2,1)
dimshow(blobs)
plt.colorbar()
plt.title('blob map; goodblob=%i' % goodblob)
plt.subplot(2,2,2)
dimshow(flipblobs, vmin=0, vmax=1)
plt.colorbar()
plt.title('symmetric blob mask: 1 = good; red=symm')
ax = plt.axis()
plt.plot(ix, iy, 'rx')
plt.plot([ix-flipw-0.5, ix-flipw-0.5, ix+flipw+0.5, ix+flipw+0.5, ix-flipw-0.5],
[iy-fliph-0.5, iy+fliph+0.5, iy+fliph+0.5, iy-fliph-0.5, iy-fliph-0.5], 'r-')
plt.axis(ax)
plt.subplot(2,2,3)
dh,dw = flipblobs.shape
sx0,sy0 = srcwcs_x0y0
mysegmap = self.segmap[sy0:sy0+dh, sx0:sx0+dw]
# renumber for plotting
_,S = np.unique(mysegmap, return_inverse=True)
dimshow(S.reshape(mysegmap.shape), cmap='tab20',
interpolation='nearest', origin='lower')
ax = plt.axis()
plt.plot(ix, iy, 'kx', ms=15, mew=3)
plt.axis(ax)
plt.title('Segmentation map')
plt.subplot(2,2,4)
dilated = binary_dilation(flipblobs, iterations=4)
s = self.segmap[iy + sy0, ix + sx0]
if s != -1:
dilated *= (self.segmap[sy0:sy0+dh, sx0:sx0+dw] == s)
dimshow(dilated)
if s != -1:
plt.title('Dilated goodblob * Segmentation map')
else:
plt.title('Dilated goodblob (no Segmentation map)')
self.ps.savefig()
# If there is no longer a source detected at the original source
# position, we want to drop this source. However, saturation can
# cause there to be no detection S/N because of masking, so do
# a hole-fill before checking.
if not flipblobs[iy,ix]:
# The hole-fill can still fail (eg, in small test images) if
# the bleed trail splits the blob into two pieces.
# Skip this test for reference sources.
if is_reference_source(src):
debug('Reference source center is outside symmetric blob; keeping')
else:
debug('Source center is not in the symmetric blob mask; skipping')
return None
if goodblob != 0:
flipblobs = (blobs == goodblob)
dilated = binary_dilation(flipblobs, iterations=4)
if not np.any(dilated):
debug('No pixels in dilated symmetric mask')
return None
dh,dw = flipblobs.shape
sx0,sy0 = srcwcs_x0y0
s = self.segmap[iy + sy0, ix + sx0]
if s != -1:
dilated *= (self.segmap[sy0:sy0+dh, sx0:sx0+dw] == s)
if not np.any(dilated):
debug('No pixels in segmented dilated symmetric mask')
return None
yin = np.max(dilated, axis=1)
xin = np.max(dilated, axis=0)
yl,yh = np.flatnonzero(yin)[np.array([0,-1])]
xl,xh = np.flatnonzero(xin)[np.array([0,-1])]
(oldx0,oldy0) = srcwcs_x0y0
srcwcs = srcwcs.get_subimage(xl, yl, 1+xh-xl, 1+yh-yl)
srcwcs_x0y0 = (oldx0 + xl, oldy0 + yl)
srcblobmask = srcblobmask[yl:yh+1, xl:xh+1]
dilated = dilated[yl:yh+1, xl:xh+1]
flipblobs = flipblobs[yl:yh+1, xl:xh+1]
saved_srctim_ies = []
keep_srctims = []
mm = []
totalpix = 0
for tim in srctims:
# Zero out inverse-errors for all pixels outside
# 'dilated'.
try:
Yo,Xo,Yi,Xi,_ = resample_with_wcs(
tim.subwcs, srcwcs, intType=np.int16)
except OverlapError:
continue
ie = tim.getInvError()
newie = np.zeros_like(ie)
good, = np.nonzero(dilated[Yi,Xi] * (ie[Yo,Xo] > 0))
if len(good) == 0:
debug('Tim has inverr all == 0')
continue
yy = Yo[good]
xx = Xo[good]
newie[yy,xx] = ie[yy,xx]
xl,xh = xx.min(), xx.max()
yl,yh = yy.min(), yy.max()
totalpix += len(xx)
d = { src: ModelMask(xl, yl, 1+xh-xl, 1+yh-yl) }
mm.append(d)
saved_srctim_ies.append(ie)
tim.inverr = newie
keep_srctims.append(tim)
srctims = keep_srctims
modelMasks = mm
B.blob_symm_nimages[srci] = len(srctims)
B.blob_symm_npix[srci] = totalpix
sh,sw = srcwcs.shape
B.blob_symm_width [srci] = sw
B.blob_symm_height[srci] = sh
# if self.plots_per_source:
# from legacypipe.detection import plot_boundary_map
# plt.clf()
# dimshow(get_rgb(coimgs, self.bands))
# ax = plt.axis()
# plt.plot(x-1, y-1, 'r+')
# plt.axis(ax)
# sx0,sy0 = srcwcs_x0y0
# sh,sw = srcwcs.shape
# ext = [sx0, sx0+sw, sy0, sy0+sh]
# plot_boundary_map(flipblobs, rgb=(255,255,255), extent=ext)
# plot_boundary_map(dilated, rgb=(0,255,0), extent=ext)
# plt.title('symmetrized blobs')
# self.ps.savefig()
# nil,nil,coimgs,nil = quick_coadds(
# srctims, self.bands, self.blobwcs,
# fill_holes=False, get_cow=True)
# dimshow(get_rgb(coimgs, self.bands))
# ax = plt.axis()
# plt.plot(x-1, y-1, 'r+')
# plt.axis(ax)
# plt.title('Symmetric-blob masked')
# self.ps.savefig()
# plt.clf()
# for tim in srctims:
# ie = tim.getInvError()
# sigmas = (tim.getImage() * ie)[ie > 0]
# plt.hist(sigmas, range=(-5,5), bins=21, histtype='step')
# plt.axvline(np.mean(sigmas), alpha=0.5)
# plt.axvline(0., color='k', lw=3, alpha=0.5)
# plt.xlabel('Image pixels (sigma)')
# plt.title('Symmetrized pixel values')
# self.ps.savefig()
# # plot the modelmasks for each tim.
# plt.clf()
# R = int(np.floor(np.sqrt(len(srctims))))
# C = int(np.ceil(len(srctims) / float(R)))
# for i,tim in enumerate(srctims):
# plt.subplot(R, C, i+1)
# msk = modelMasks[i][src].mask
# print('Mask:', msk)
# if msk is None:
# continue
# plt.imshow(msk, interpolation='nearest', origin='lower', vmin=0, vmax=1)
# plt.title(tim.name)
# plt.suptitle('Model Masks')
# self.ps.savefig()
if self.bigblob and self.plots_per_source:
# This is a local source-WCS plot of the data going into the
# fit.
plt.clf()
coimgs,_ = quick_coadds(srctims, self.bands, srcwcs, fill_holes=False)
dimshow(get_rgb(coimgs, self.bands))
plt.title('Model selection: stage1 data (srcwcs)')
self.ps.savefig()
srctractor = self.tractor(srctims, [src])
srctractor.setModelMasks(modelMasks)
srccat = srctractor.getCatalog()
is_galaxy = isinstance(src, Galaxy)
force_pointsource = B.forced_pointsource[srci]
fit_background = B.fit_background[srci]
_,ix,iy = srcwcs.radec2pixelxy(src.getPosition().ra,
src.getPosition().dec)
ix = int(ix-1)
iy = int(iy-1)
# Start in blob
sh,sw = srcwcs.shape
if is_galaxy:
# allow SGA galaxy sources to start outside the blob
pass
elif ix < 0 or iy < 0 or ix >= sw or iy >= sh or not srcblobmask[iy,ix]:
debug('Source is starting outside blob -- skipping.')
if mask_others:
for ie,tim in zip(saved_srctim_ies, srctims):
tim.inverr = ie
return None
if is_galaxy:
# SGA galaxy: set the maximum allowed r_e.
known_galaxy_logrmax = 0.
if isinstance(src, (DevGalaxy,ExpGalaxy, SersicGalaxy)):
print('Known galaxy. Initial shape:', src.shape)
# MAGIC 2. = factor by which r_e is allowed to grow for an SGA galaxy.
known_galaxy_logrmax = np.log(src.shape.re * 2.)
else:
print('WARNING: unknown galaxy type:', src)
x0,y0 = srcwcs_x0y0
debug('Source at blob coordinates', x0+ix, y0+iy, '- forcing pointsource?', force_pointsource, ', is large galaxy?', is_galaxy, ', fitting sky background:', fit_background)
if fit_background:
for tim in srctims:
tim.freezeAllBut('sky')
srctractor.thawParam('images')
skyparams = srctractor.images.getParams()
enable_galaxy_cache()
# Compute the log-likehood without a source here.
srccat[0] = None
if fit_background:
srctractor.optimize_loop(**self.optargs)
if self.plots_per_source:
model_mod_rgb = {}
model_resid_rgb = {}
# the "none" model
modimgs = list(srctractor.getModelImages())
co,_ = quick_coadds(srctims, self.bands, srcwcs, images=modimgs)
rgb = get_rgb(co, self.bands)
model_mod_rgb['none'] = rgb
res = [(tim.getImage() - mod) for tim,mod in zip(srctims, modimgs)]
co,_ = quick_coadds(srctims, self.bands, srcwcs, images=res)
rgb = get_rgb(co, self.bands)
model_resid_rgb['none'] = rgb
chisqs_none = _per_band_chisqs(srctractor, self.bands)
nparams = dict(psf=2, rex=3, exp=5, dev=5, ser=6)
# This is our "upgrade" threshold: how much better a galaxy
# fit has to be versus psf
galaxy_margin = 3.**2 + (nparams['exp'] - nparams['psf'])
# *chisqs* is actually chi-squared improvement vs no source;
# larger is a better fit.
chisqs = dict(none=0)
oldmodel, psf, rex, dev, exp = _initialize_models(src)
ser = None
trymodels = [('psf', psf)]
if oldmodel == 'psf':
if getattr(src, 'forced_point_source', False):
# This is set in the GaiaSource contructor from
# gaia.pointsource
debug('Gaia source is forced to be a point source -- not trying other models')
elif force_pointsource:
# Geometric mask
debug('Not computing galaxy models due to being in a mask')
else:
trymodels.append(('rex', rex))
# Try galaxy models if rex > psf, or if bright.
# The 'gals' model is just a marker
trymodels.append(('gals', None))
else:
# If the source was initialized as a galaxy, try all models
trymodels.extend([('rex', rex), ('dev', dev), ('exp', exp),
('ser', None)])
cputimes = {}
for name,newsrc in trymodels:
cpum0 = time.process_time()
if name == 'gals':
# If 'rex' was better than 'psf', or the source is
# bright, try the galaxy models.
chi_rex = chisqs.get('rex', 0)
chi_psf = chisqs.get('psf', 0)
margin = 1. # 1 parameter
if chi_rex > (chi_psf+margin) or max(chi_psf, chi_rex) > 400:
trymodels.extend([
('dev', dev), ('exp', exp), ('ser', None)])
continue
if name == 'ser' and newsrc is None:
# Start at the better of exp or dev.
smod = _select_model(chisqs, nparams, galaxy_margin)
if smod not in ['dev', 'exp']:
continue
if smod == 'dev':
newsrc = ser = SersicGalaxy(
dev.getPosition().copy(), dev.getBrightness().copy(),
dev.getShape().copy(), LegacySersicIndex(4.))
elif smod == 'exp':
newsrc = ser = SersicGalaxy(
exp.getPosition().copy(), exp.getBrightness().copy(),
exp.getShape().copy(), LegacySersicIndex(1.))
#print('Initialized SER model:', newsrc)
srccat[0] = newsrc
# Set maximum galaxy model sizes
if is_galaxy:
# This is a known large galaxy -- set max size based on initial size.
logrmax = known_galaxy_logrmax
if name in ('rex', 'exp', 'dev', 'ser'):
newsrc.shape.setMaxLogRadius(logrmax)
else:
# FIXME -- could use different fractions for deV vs exp (or comp)
fblob = 0.8
sh,sw = srcwcs.shape
logrmax = np.log(fblob * max(sh, sw) * self.pixscale)
if name in ['rex', 'exp', 'dev', 'ser']:
if logrmax < newsrc.shape.getMaxLogRadius():
newsrc.shape.setMaxLogRadius(logrmax)
# Use the same modelMask shapes as the original source ('src').
# Need to create newsrc->mask mappings though:
mm = remap_modelmask(modelMasks, src, newsrc)
srctractor.setModelMasks(mm)
enable_galaxy_cache()
if fit_background:
# Reset sky params
srctractor.images.setParams(skyparams)
srctractor.thawParam('images')
# First-round optimization (during model selection)
R = srctractor.optimize_loop(**self.optargs)
#print('Fit result:', newsrc)
#print('Steps:', R['steps'])
hit_limit = R.get('hit_limit', False)
opt_steps = R.get('steps', -1)
hit_ser_limit = False
hit_r_limit = False
if hit_limit:
debug('Source', newsrc, 'hit limit:')
if is_debug():
for nm,p,low,upp in zip(newsrc.getParamNames(), newsrc.getParams(),
newsrc.getLowerBounds(), newsrc.getUpperBounds()):
debug(' ', nm, '=', p, 'bounds', low, upp)
if name == 'ser':
si = newsrc.sersicindex
sival = si.getValue()
# Can end up close, but not exactly at a limit...
if min(sival - si.lower, si.upper - sival) < 1e-3:
hit_ser_limit = True
debug('Hit sersic limit')
if name in ['rex', 'exp', 'dev', 'ser']:
shape = newsrc.shape
logr = shape.logre
if min(logr - shape.getLowerBounds()[0],
shape.getUpperBounds()[0] - logr) < 0.01:
hit_r_limit = True
debug('Hit radius limit')
_,ix,iy = srcwcs.radec2pixelxy(newsrc.getPosition().ra,
newsrc.getPosition().dec)
ix = int(ix-1)
iy = int(iy-1)
sh,sw = srcblobmask.shape
if is_galaxy:
# Allow (SGA) galaxies to exit the blob
pass
elif ix < 0 or iy < 0 or ix >= sw or iy >= sh or not srcblobmask[iy,ix]:
# Exited blob!
debug('Source exited sub-blob!')
if mask_others:
for ie,tim in zip(saved_srctim_ies, srctims):
tim.inverr = ie
continue
disable_galaxy_cache()
if self.plots_per_source:
# save RGB images for the model
modimgs = list(srctractor.getModelImages())
co,_ = quick_coadds(srctims, self.bands, srcwcs, images=modimgs)
rgb = get_rgb(co, self.bands)
model_mod_rgb[name] = rgb
res = [(tim.getImage() - mod) for tim,mod in zip(srctims, modimgs)]
co,_ = quick_coadds(srctims, self.bands, srcwcs, images=res)
rgb = get_rgb(co, self.bands)
model_resid_rgb[name] = rgb
# Compute inverse-variances for each source.
# Convert to "vanilla" ellipse parameterization
# (but save old shapes first)
# we do this (rather than making a copy) because we want to
# use the same modelMask maps.
if isinstance(newsrc, (DevGalaxy, ExpGalaxy, SersicGalaxy)):
oldshape = newsrc.shape
if fit_background:
# We have to freeze the sky here before computing
# uncertainties
srctractor.freezeParam('images')
nsrcparams = newsrc.numberOfParams()
_convert_ellipses(newsrc)
assert(newsrc.numberOfParams() == nsrcparams)
# Compute a very approximate "fracin" metric (fraction of
# flux in masked model image versus total flux of model),
# to avoid wild extrapolation when nearly unconstrained.
fracin = dict([(b, []) for b in self.bands])
fluxes = dict([(b, newsrc.getBrightness().getFlux(b))
for b in self.bands])
for tim,mod in zip(srctims, srctractor.getModelImages(sky=False)):
f = (mod * (tim.getInvError() > 0)).sum() / fluxes[tim.band]
fracin[tim.band].append(f)
for band in self.bands:
if len(fracin[band]) == 0:
continue
f = np.mean(fracin[band])
if f < 1e-6:
debug('Source', newsrc, ': setting flux in band', band,
'to zero based on fracin = %.3g' % f)
newsrc.getBrightness().setFlux(band, 0.)
# Compute inverse-variances
# This uses the second-round modelMasks.
allderivs = srctractor.getDerivs()
ivars = _compute_invvars(allderivs)
assert(len(ivars) == nsrcparams)
# If any fluxes have zero invvar, zero out the flux.
params = newsrc.getParams()
reset = False
for i,(pname,iv) in enumerate(zip(newsrc.getParamNames(), ivars)):
if iv == 0:
debug('Zeroing out flux', pname, 'based on iv==0')
params[i] = 0.
reset = True
if reset:
newsrc.setParams(params)
allderivs = srctractor.getDerivs()
ivars = _compute_invvars(allderivs)
assert(len(ivars) == nsrcparams)
B.all_model_ivs[srci][name] = np.array(ivars).astype(np.float32)
B.all_models[srci][name] = newsrc.copy()
assert(B.all_models[srci][name].numberOfParams() == nsrcparams)
# Now revert the ellipses!
if isinstance(newsrc, (DevGalaxy, ExpGalaxy, SersicGalaxy)):
newsrc.shape = oldshape
# Use the original 'srctractor' here so that the different
# models are evaluated on the same pixels.
ch = _per_band_chisqs(srctractor, self.bands)
chisqs[name] = _chisq_improvement(newsrc, ch, chisqs_none)
cpum1 = time.process_time()
B.all_model_cpu[srci][name] = cpum1 - cpum0
cputimes[name] = cpum1 - cpum0
B.all_model_hit_limit [srci][name] = hit_limit
B.all_model_hit_r_limit[srci][name] = hit_r_limit
B.all_model_opt_steps [srci][name] = opt_steps
if name == 'ser':
B.hit_ser_limit[srci] = hit_ser_limit
if mask_others:
for tim,ie in zip(srctims, saved_srctim_ies):
# revert tim to original (unmasked-by-others)
tim.inverr = ie
# After model selection, revert the sky
# (srctims=tims when not bigblob)
if fit_background:
srctractor.images.setParams(skyparams)
# Actually select which model to keep. The MODEL_NAMES
# array determines the order of the elements in the DCHISQ
# column of the catalog.
keepmod = _select_model(chisqs, nparams, galaxy_margin)
keepsrc = {'none':None, 'psf':psf, 'rex':rex,
'dev':dev, 'exp':exp, 'ser':ser}[keepmod]
bestchi = chisqs.get(keepmod, 0.)
B.dchisq[srci, :] = np.array([chisqs.get(k,0) for k in MODEL_NAMES])
#print('Keeping model', keepmod, '(chisqs: ', chisqs, ')')
if keepsrc is not None and bestchi == 0.:
# Weird edge case, or where some best-fit fluxes go
# negative. eg
# https://github.com/legacysurvey/legacypipe/issues/174
debug('Best dchisq is 0 -- dropping source')
keepsrc = None
B.hit_limit [srci] = B.all_model_hit_limit [srci].get(keepmod, False)
B.hit_r_limit [srci] = B.all_model_hit_r_limit [srci].get(keepmod, False)
if keepmod != 'ser':
B.hit_ser_limit[srci] = False
# This is the model-selection plot
if self.plots_per_source:
import pylab as plt
plt.clf()
rows,cols = 3, 6
modnames = ['none', 'psf', 'rex', 'dev', 'exp', 'ser']
# Top-left: image
plt.subplot(rows, cols, 1)
coimgs,_ = quick_coadds(srctims, self.bands, srcwcs)
rgb = get_rgb(coimgs, self.bands)
dimshow(rgb, ticks=False)
# next over: rgb with same stretch as models
plt.subplot(rows, cols, 2)
rgb = get_rgb(coimgs, self.bands)
dimshow(rgb, ticks=False)
for imod,modname in enumerate(modnames):
if modname != 'none' and not modname in chisqs:
continue
axes = []
# Second row: models
plt.subplot(rows, cols, 1+imod+1*cols)
rgb = model_mod_rgb[modname]
dimshow(rgb, ticks=False)
axes.append(plt.gca())
plt.title(modname)
# Third row: residuals (not chis)
plt.subplot(rows, cols, 1+imod+2*cols)
rgb = model_resid_rgb[modname]
dimshow(rgb, ticks=False)
axes.append(plt.gca())
plt.title('chisq %.0f' % chisqs[modname], fontsize=8)
# Highlight the model to be kept
if modname == keepmod:
for ax in axes:
for spine in ax.spines.values():
spine.set_edgecolor('red')
spine.set_linewidth(2)
plt.suptitle('Blob %s, src %i (psf: %s, fitbg: %s): keep %s\n%s\nwas: %s' %
(self.name, srci, force_pointsource, fit_background,
keepmod, str(keepsrc), str(src)), fontsize=10)
self.ps.savefig()
return keepsrc
def _optimize_individual_sources(self, tr, cat, Ibright, cputime):
# Single source (though this is coded to handle multiple sources)
# Fit sources one at a time, but don't subtract other models
cat.freezeAllParams()
models = SourceModels()
models.create(self.tims, cat)
enable_galaxy_cache()
for i in Ibright:
cpu0 = time.process_time()
cat.freezeAllBut(i)
src = cat[i]
if src.freezeparams:
debug('Frozen source', src, '-- keeping as-is!')
continue
modelMasks = models.model_masks(0, cat[i])
tr.setModelMasks(modelMasks)
tr.optimize_loop(**self.optargs)
cpu1 = time.process_time()
cputime[i] += (cpu1 - cpu0)
tr.setModelMasks(None)
disable_galaxy_cache()
def tractor(self, tims, cat):
tr = Tractor(tims, cat, **self.trargs)
tr.freezeParams('images')
return tr
def _optimize_individual_sources_subtract(self, cat, Ibright,
cputime):
# -Remember the original images
# -Compute initial models for each source (in each tim)
# -Subtract initial models from images
# -During fitting, for each source:
# -add back in the source's initial model (to each tim)
# -fit, with Catalog([src])
# -subtract final model (from each tim)
# -Replace original images
models = SourceModels()
# Remember original tim images
models.save_images(self.tims)
# Create & subtract initial models for each tim x each source
models.create(self.tims, cat, subtract=True)
# For sources, in decreasing order of brightness
for numi,srci in enumerate(Ibright):
cpu0 = time.process_time()
src = cat[srci]
if src.freezeparams:
debug('Frozen source', src, '-- keeping as-is!')
continue
debug('Fitting source', srci, '(%i of %i in blob %s)' %
(numi+1, len(Ibright), self.name), ':', src)
# Add this source's initial model back in.
models.add(srci, self.tims)
from tractor import Galaxy
is_galaxy = isinstance(src, Galaxy)
if is_galaxy:
# During SGA pre-burns, limit initial positions (fit
# other parameters), to avoid problems like NGC0943,
# where one galaxy in a pair moves a large distance to
# fit the overall light profile.
ra,dec = src.pos.getParams()
cosdec = np.cos(np.deg2rad(dec))
# max allowed motion in deg
maxmove = 5. / 3600.
src.pos.lowers = [ra - maxmove/cosdec, dec - maxmove]
src.pos.uppers = [ra + maxmove/cosdec, dec + maxmove]
if self.bigblob:
# Create super-local sub-sub-tims around this source
# Make the subimages the same size as the modelMasks.
mods = [mod[srci] for mod in models.models]
srctims,modelMasks = _get_subimages(self.tims, mods, src)
# We plots only the first & last three sources
if self.plots_per_source and (numi < 3 or numi >= len(Ibright)-3):
import pylab as plt
plt.clf()
# Recompute coadds because of the subtract-all-and-readd shuffle
coimgs,_ = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False)
rgb = get_rgb(coimgs, self.bands)
dimshow(rgb)
ax = plt.axis()
for tim in srctims:
h,w = tim.shape
tx,ty = [0,0,w,w,0], [0,h,h,0,0]
rd = [tim.getWcs().pixelToPosition(xi,yi)
for xi,yi in zip(tx,ty)]
ra = [p.ra for p in rd]
dec = [p.dec for p in rd]
_,x,y = self.blobwcs.radec2pixelxy(ra, dec)
plt.plot(x, y, 'b-')
ra,dec = tim.subwcs.pixelxy2radec(tx, ty)
_,x,y = self.blobwcs.radec2pixelxy(ra, dec)
plt.plot(x, y, 'c-')
plt.title('source %i of %i' % (numi, len(Ibright)))
plt.axis(ax)
self.ps.savefig()
else:
srctims = self.tims
modelMasks = models.model_masks(srci, src)
srctractor = self.tractor(srctims, [src])
srctractor.setModelMasks(modelMasks)
# First-round optimization
#print('First-round initial log-prob:', srctractor.getLogProb())
srctractor.optimize_loop(**self.optargs)
#print('First-round final log-prob:', srctractor.getLogProb())
if is_galaxy:
# Drop limits on SGA positions
src.pos.lowers = [None, None]
src.pos.uppers = [None, None]
# Re-remove the final fit model for this source
models.update_and_subtract(srci, src, self.tims)
srctractor.setModelMasks(None)
disable_galaxy_cache()
debug('Finished fitting:', src)
cpu1 = time.process_time()
cputime[srci] += (cpu1 - cpu0)
models.restore_images(self.tims)
del models
def _fit_fluxes(self, cat, tims, bands, fitcat=None):
if fitcat is None:
fitcat = [src for src in cat if not src.freezeparams]
if len(fitcat) == 0:
return
for src in fitcat:
src.freezeAllBut('brightness')
debug('Fitting fluxes for %i of %i sources' % (len(fitcat), len(cat)))
for b in bands:
for src in fitcat:
src.getBrightness().freezeAllBut(b)
# Images for this band
btims = [tim for tim in tims if tim.band == b]
btr = self.tractor(btims, fitcat)
try:
from tractor.ceres_optimizer import CeresOptimizer
ceres_block = 8
btr.optimizer = CeresOptimizer(BW=ceres_block, BH=ceres_block)
except ImportError:
from tractor.lsqr_optimizer import LsqrOptimizer
btr.optimizer = LsqrOptimizer()
btr.optimize_forced_photometry(shared_params=False, wantims=False)
for src in fitcat:
src.thawAllParams()
def _plots(self, tr, title):
plotmods = []
plotmodnames = []
plotmods.append(list(tr.getModelImages()))
plotmodnames.append(title)
for tim in tr.images:
if hasattr(tim, 'resamp'):
del tim.resamp
_plot_mods(tr.images, plotmods, self.blobwcs, plotmodnames, self.bands,
None, None, None,
self.blobw, self.blobh, self.ps, chi_plots=False)
for tim in tr.images:
if hasattr(tim, 'resamp'):
del tim.resamp
def _plot_coadd(self, tims, wcs, model=None, resid=None, addnoise=False):
if resid is not None:
mods = list(resid.getChiImages())
coimgs,_ = quick_coadds(tims, self.bands, wcs, images=mods,
fill_holes=False)
dimshow(get_rgb(coimgs,self.bands, **rgbkwargs_resid))
return
mods = None
if model is not None:
mods = list(model.getModelImages())
coimgs,_ = quick_coadds(tims, self.bands, wcs, images=mods,
fill_holes=False, addnoise=addnoise)
dimshow(get_rgb(coimgs,self.bands))
def _initial_plots(self):
import pylab as plt
debug('Plotting blob image for blob', self.name)
coimgs,_,sat = quick_coadds(self.tims, self.bands, self.blobwcs,
fill_holes=False, get_saturated=True)
self.rgb = get_rgb(coimgs, self.bands)
plt.clf()
dimshow(self.rgb)
plt.title('Blob: %s' % self.name)
self.ps.savefig()
if self.plots_single:
plt.figure(2)
dimshow(self.rgb, ticks=False)
plt.savefig('blob-%s-data.png' % (self.name))
plt.figure(1)
_,x0,y0 = self.blobwcs.radec2pixelxy(
np.array([src.getPosition().ra for src in self.srcs]),
np.array([src.getPosition().dec for src in self.srcs]))
h,w = sat.shape
ix = np.clip(np.round(x0)-1, 0, w-1).astype(int)
iy = np.clip(np.round(y0)-1, 0, h-1).astype(int)
srcsat = sat[iy,ix]
ax = plt.axis()
plt.plot(x0-1, y0-1, 'r.', label='Sources')
if len(srcsat):
plt.plot(x0[srcsat]-1, y0[srcsat]-1, 'o', mec='orange', mfc='none', ms=5, mew=2,
label='SATUR at center')
# ref sources
Ir = np.flatnonzero([is_reference_source(src) for src in self.srcs])
if len(Ir):
plt.plot(x0[Ir]-1, y0[Ir]-1, 'o', mec='g', mfc='none', ms=8, mew=2,
label='Ref source')
plt.axis(ax)
plt.title('initial sources')
plt.legend()
self.ps.savefig()
def create_tims(self, timargs):
from legacypipe.bits import DQ_BITS
# In order to make multiprocessing easier, the one_blob method
# is passed all the ingredients to make local tractor Images
# rather than the Images themselves. Here we build the
# 'tims'.
tims = []
for (img, inverr, dq, twcs, wcsobj, pcal, sky, subpsf, name,
band, sig1, imobj) in timargs:
# Mask out inverr for pixels that are not within the blob.
try:
Yo,Xo,Yi,Xi,_ = resample_with_wcs(wcsobj, self.blobwcs,
intType=np.int16)
except OverlapError:
continue
if len(Yo) == 0:
continue
inverr2 = np.zeros_like(inverr)
I = np.flatnonzero(self.blobmask[Yi,Xi])
inverr2[Yo[I],Xo[I]] = inverr[Yo[I],Xo[I]]
inverr = inverr2
# If the subimage (blob) is small enough, instantiate a
# constant PSF model in the center.
h,w = img.shape
if h < 400 and w < 400:
subpsf = subpsf.constantPsfAt(w/2., h/2.)
tim = Image(data=img, inverr=inverr, wcs=twcs,
psf=subpsf, photocal=pcal, sky=sky, name=name)
tim.band = band
tim.sig1 = sig1
tim.subwcs = wcsobj
tim.meta = imobj
tim.psf_sigma = imobj.fwhm / 2.35
tim.dq = dq
tim.dq_saturation_bits = DQ_BITS['satur']
tims.append(tim)
return tims
def _set_kingdoms(segmap, radius, I, ix, iy):
'''
radius: int
ix,iy: int arrays
I: indices into ix,iy that will be placed into 'segmap'
'''
# ensure that each source owns a tiny radius around its center
# in the segmentation map. If there is more than one source
# in that radius, each pixel gets assigned to its nearest
# source.
# 'kingdom' records the current distance to nearest source
assert(radius < 255)
kingdom = np.empty(segmap.shape, np.uint8)
kingdom[:,:,] = 255
H,W = segmap.shape
xcoords = np.arange(W)
ycoords = np.arange(H)
for i in I:
x,y = ix[i], iy[i]
yslc = slice(max(0, y-radius), min(H, y+radius+1))
xslc = slice(max(0, x-radius), min(W, x+radius+1))
slc = (yslc, xslc)
# Radius to nearest earlier source
oldr = kingdom[slc]
# Radius to new source
newr = np.hypot(xcoords[np.newaxis, xslc] - x, ycoords[yslc, np.newaxis] - y)
assert(newr.shape == oldr.shape)
newr = (newr + 0.5).astype(np.uint8)
# Pixels that are within range and closer to this source than any other.
owned = (newr <= radius) * (newr < oldr)
segmap[slc][owned] = i
kingdom[slc][owned] = newr[owned]
def _convert_ellipses(src):
if isinstance(src, (DevGalaxy, ExpGalaxy, SersicGalaxy)):
src.shape = src.shape.toEllipseE()
if isinstance(src, RexGalaxy):
src.shape.freezeParams('e1', 'e2')
def _compute_invvars(allderivs):
ivs = []
for derivs in allderivs:
chisq = 0
for deriv,tim in derivs:
h,w = tim.shape
deriv.clipTo(w,h)
ie = tim.getInvError()
slc = deriv.getSlice(ie)
chi = deriv.patch * ie[slc]
chisq += (chi**2).sum()
ivs.append(chisq)
return ivs
def _argsort_by_brightness(cat, bands, ref_first=False):
fluxes = []
for src in cat:
# HACK -- here we just *sum* the nanomaggies in each band. Bogus!
br = src.getBrightness()
flux = sum([br.getFlux(band) for band in bands])
if ref_first and is_reference_source(src):
# Put the reference sources at the front of the list!
flux += 1e6
fluxes.append(flux)
Ibright = np.argsort(-np.array(fluxes))
return Ibright
def is_reference_source(src):
return getattr(src, 'is_reference_source', False)
def _compute_source_metrics(srcs, tims, bands, tr):
# rchi2 quality-of-fit metric
rchi2_num = np.zeros((len(srcs),len(bands)), np.float32)
rchi2_den = np.zeros((len(srcs),len(bands)), np.float32)
# fracflux degree-of-blending metric
fracflux_num = np.zeros((len(srcs),len(bands)), np.float32)
fracflux_den = np.zeros((len(srcs),len(bands)), np.float32)
# fracin flux-inside-blob metric
fracin_num = np.zeros((len(srcs),len(bands)), np.float32)
fracin_den = np.zeros((len(srcs),len(bands)), np.float32)
# fracmasked: fraction of masked pixels metric
fracmasked_num = np.zeros((len(srcs),len(bands)), np.float32)
fracmasked_den = np.zeros((len(srcs),len(bands)), np.float32)
for iband,band in enumerate(bands):
for tim in tims:
if tim.band != band:
continue
mod = np.zeros(tim.getModelShape(), tr.modtype)
srcmods = [None for src in srcs]
counts = np.zeros(len(srcs))
pcal = tim.getPhotoCal()
# For each source, compute its model and record its flux
# in this image. Also compute the full model *mod*.
for isrc,src in enumerate(srcs):
patch = tr.getModelPatch(tim, src)
if patch is None or patch.patch is None:
continue
counts[isrc] = np.sum([np.abs(pcal.brightnessToCounts(b))
for b in src.getBrightnesses()])
if counts[isrc] == 0:
continue
H,W = mod.shape
patch.clipTo(W,H)
srcmods[isrc] = patch
patch.addTo(mod)
# Now compute metrics for each source
for isrc,patch in enumerate(srcmods):
if patch is None:
continue
if patch.patch is None:
continue
if counts[isrc] == 0:
continue
if np.sum(patch.patch**2) == 0:
continue
slc = patch.getSlice(mod)
patch = patch.patch
# print('fracflux: band', band, 'isrc', isrc, 'tim', tim.name)
# print('src:', srcs[isrc])
# print('patch sum', np.sum(patch),'abs',np.sum(np.abs(patch)))
# print('counts:', counts[isrc])
# print('mod slice sum', np.sum(mod[slc]))
# print('mod[slc] - patch:', np.sum(mod[slc] - patch))
# (mod - patch) is flux from others
# (mod - patch) / counts is normalized flux from others
# We take that and weight it by this source's profile;
# patch / counts is unit profile
# But this takes the dot product between the profiles,
# so we have to normalize appropriately, ie by
# (patch**2)/counts**2; counts**2 drops out of the
# denom. If you have an identical source with twice the flux,
# this results in fracflux being 2.0
# fraction of this source's flux that is inside this patch.
# This can be < 1 when the source is near an edge, or if the
# source is a huge diffuse galaxy in a small patch.
fin = np.abs(np.sum(patch) / counts[isrc])
# print('fin:', fin)
# print('fracflux_num: fin *',
# np.sum((mod[slc] - patch) * np.abs(patch)) /
# np.sum(patch**2))
fracflux_num[isrc,iband] += (fin *
np.sum((mod[slc] - patch) * np.abs(patch)) /
np.sum(patch**2))
fracflux_den[isrc,iband] += fin
fracmasked_num[isrc,iband] += (
np.sum((tim.getInvError()[slc] == 0) * np.abs(patch)) /
np.abs(counts[isrc]))
fracmasked_den[isrc,iband] += fin
fracin_num[isrc,iband] += np.abs(np.sum(patch))
fracin_den[isrc,iband] += np.abs(counts[isrc])
tim.getSky().addTo(mod)
chisq = ((tim.getImage() - mod) * tim.getInvError())**2
for isrc,patch in enumerate(srcmods):
if patch is None or patch.patch is None:
continue
if counts[isrc] == 0:
continue
slc = patch.getSlice(mod)
# We compute numerator and denom separately to handle
# edge objects, where sum(patch.patch) < counts.
# Also, to normalize by the number of images. (Being
# on the edge of an image is like being in half an
# image.)
rchi2_num[isrc,iband] += (np.sum(chisq[slc] * patch.patch) /
counts[isrc])
# If the source is not near an image edge,
# sum(patch.patch) == counts[isrc].
rchi2_den[isrc,iband] += np.sum(patch.patch) / counts[isrc]
assert(np.all(np.isfinite(fracflux_den)))
assert(np.all(np.isfinite(rchi2_den)))
assert(np.all(np.isfinite(fracmasked_den)))
fracflux = np.zeros_like(fracflux_num)
rchi2 = np.zeros_like(rchi2_num)
fracmasked = np.zeros_like(fracmasked_num)
# Avoid divide-by-zeros (these happen when, eg, we have no coverage in one band but
# sources detected in another band, hence denominator is zero)
I = np.flatnonzero(fracflux_den != 0)
fracflux.flat[I] = fracflux_num.flat[I] / fracflux_den.flat[I]
I = np.flatnonzero(rchi2_den != 0)
rchi2.flat[I] = rchi2_num.flat[I] / rchi2_den.flat[I]
I = np.flatnonzero(fracmasked_den != 0)
fracmasked.flat[I] = fracmasked_num.flat[I] / fracmasked_den.flat[I]
# fracin_{num,den} are in flux * nimages units
tinyflux = 1e-9
fracin = fracin_num / np.maximum(tinyflux, fracin_den)
return dict(fracin=fracin, fracflux=fracflux, rchisq=rchi2,
fracmasked=fracmasked)
def _initialize_models(src):
from legacypipe.survey import LogRadius
if isinstance(src, PointSource):
psf = src.copy()
rex = RexGalaxy(src.getPosition(), src.getBrightness(),
LogRadius(-1.)).copy()
# logr, ee1, ee2
shape = LegacyEllipseWithPriors(-1., 0., 0.)
dev = DevGalaxy(src.getPosition(), src.getBrightness(), shape).copy()
exp = ExpGalaxy(src.getPosition(), src.getBrightness(), shape).copy()
oldmodel = 'psf'
elif isinstance(src, DevGalaxy):
rex = RexGalaxy(src.getPosition(), src.getBrightness(),
LogRadius(np.log(src.getShape().re))).copy()
dev = src.copy()
exp = ExpGalaxy(src.getPosition(), src.getBrightness(),
src.getShape()).copy()
oldmodel = 'dev'
elif isinstance(src, ExpGalaxy):
psf = PointSource(src.getPosition(), src.getBrightness()).copy()
rex = RexGalaxy(src.getPosition(), src.getBrightness(),
LogRadius(np.log(src.getShape().re))).copy()
dev = DevGalaxy(src.getPosition(), src.getBrightness(),
src.getShape()).copy()
exp = src.copy()
oldmodel = 'exp'
return oldmodel, psf, rex, dev, exp
def _get_subimages(tims, mods, src):
subtims = []
modelMasks = []
#print('Big blob: trimming:')
for tim,mod in zip(tims, mods):
if mod is None:
continue
mh,mw = mod.shape
if mh == 0 or mw == 0:
continue
# for modelMasks
d = { src: ModelMask(0, 0, mw, mh) }
modelMasks.append(d)
x0,y0 = mod.x0 , mod.y0
x1,y1 = x0 + mw, y0 + mh
subtim = _get_subtim(tim, x0, x1, y0, y1)
if subtim.shape != (mh,mw):
print('Subtim was not the shape expected:', subtim.shape,
'image shape', tim.getImage().shape, 'slice y', y0,y1,
'x', x0,x1, 'mod shape', mh,mw)
subtims.append(subtim)
return subtims, modelMasks
def _get_subtim(tim, x0, x1, y0, y1):
slc = slice(y0,y1), slice(x0, x1)
subimg = tim.getImage()[slc]
subpsf = tim.psf.constantPsfAt((x0+x1)/2., (y0+y1)/2.)
subtim = Image(data=subimg,
inverr=tim.getInvError()[slc],
wcs=tim.wcs.shifted(x0, y0),
psf=subpsf,
photocal=tim.getPhotoCal(),
sky=tim.sky.shifted(x0, y0),
name=tim.name)
sh,sw = subtim.shape
subtim.subwcs = tim.subwcs.get_subimage(x0, y0, sw, sh)
subtim.band = tim.band
subtim.sig1 = tim.sig1
subtim.x0 = x0
subtim.y0 = y0
subtim.fulltim = tim
subtim.meta = tim.meta
subtim.psf_sigma = tim.psf_sigma
if tim.dq is not None:
subtim.dq = tim.dq[slc]
subtim.dq_saturation_bits = tim.dq_saturation_bits
else:
subtim.dq = None
return subtim
class SourceModels(object):
'''
This class maintains a list of the model patches for a set of sources
in a set of images.
'''
def __init__(self):
self.filledModelMasks = True
def save_images(self, tims):
self.orig_images = [tim.getImage() for tim in tims]
for tim,img in zip(tims, self.orig_images):
tim.data = img.copy()
def restore_images(self, tims):
for tim,img in zip(tims, self.orig_images):
tim.data = img
def create(self, tims, srcs, subtract=False, modelmasks=None):
'''
Note that this modifies the *tims* if subtract=True.
'''
self.models = []
for itim,tim in enumerate(tims):
mods = []
sh = tim.shape
ie = tim.getInvError()
for src in srcs:
mm = None
if modelmasks is not None:
mm = modelmasks[itim].get(src, None)
mod = src.getModelPatch(tim, modelMask=mm)
if mod is not None and mod.patch is not None:
if not np.all(np.isfinite(mod.patch)):
print('Non-finite mod patch')
print('source:', src)
print('tim:', tim)
print('PSF:', tim.getPsf())
assert(np.all(np.isfinite(mod.patch)))
mod = _clip_model_to_blob(mod, sh, ie)
if subtract and mod is not None:
mod.addTo(tim.getImage(), scale=-1)
mods.append(mod)
self.models.append(mods)
def add(self, i, tims):
'''
Adds the models for source *i* back into the tims.
'''
for tim,mods in zip(tims, self.models):
mod = mods[i]
if mod is not None:
mod.addTo(tim.getImage())
def update_and_subtract(self, i, src, tims, tim_ies=None, ps=None):
for itim,(tim,mods) in enumerate(zip(tims, self.models)):
if src is None:
mods[i] = None
continue
if tim is None:
continue
mod = src.getModelPatch(tim)
mods[i] = mod
if mod is None:
continue
if tim_ies is not None:
# Apply an extra mask (ie, the mask_others segmentation mask)
ie = tim_ies[itim]
if ie is None:
continue
inslice, outslice = mod.getSlices(tim.shape)
p = mod.patch[inslice]
img = tim.getImage()
img[outslice] -= p * (ie[outslice]>0)
else:
mod.addTo(tim.getImage(), scale=-1)
# if mod.patch.max() > 1e6:
# if ps is not None:
# z = np.zeros_like(tim.getImage())
# import pylab as plt
# plt.clf()
# plt.suptitle('tim: %s' % tim.name)
# plt.subplot(2,2,1)
# plt.imshow(mod.patch, interpolation='nearest', origin='lower')
# plt.colorbar()
# plt.title('mod')
# plt.subplot(2,2,2)
# plt.imshow(tim.getImage(), interpolation='nearest', origin='lower')
# plt.colorbar()
# plt.title('tim (before)')
# mod.addTo(z, scale=1)
# plt.subplot(2,2,3)
# plt.imshow(z, interpolation='nearest', origin='lower')
# plt.colorbar()
# plt.title('mod')
# img = tim.getImage().copy()
# mod.addTo(img, scale=-1)
# plt.subplot(2,2,4)
# plt.imshow(img, interpolation='nearest', origin='lower')
# plt.colorbar()
# plt.title('tim-mod')
# ps.savefig()
def model_masks(self, i, src):
modelMasks = []
for mods in self.models:
d = dict()
modelMasks.append(d)
mod = mods[i]
if mod is not None:
if self.filledModelMasks:
mh,mw = mod.shape
d[src] = ModelMask(mod.x0, mod.y0, mw, mh)
else:
d[src] = ModelMask(mod.x0, mod.y0, mod.patch != 0)
return modelMasks
def remap_modelmask(modelMasks, oldsrc, newsrc):
mm = []
for mim in modelMasks:
d = dict()
mm.append(d)
try:
d[newsrc] = mim[oldsrc]
except KeyError:
pass
return mm
def _clip_model_to_blob(mod, sh, ie):
'''
mod: Patch
sh: tim shape
ie: tim invError
Returns: new Patch
'''
mslc,islc = mod.getSlices(sh)
sy,sx = mslc
patch = mod.patch[mslc] * (ie[islc]>0)
if patch.shape == (0,0):
return None
mod = Patch(mod.x0 + sx.start, mod.y0 + sy.start, patch)
# Check
mh,mw = mod.shape
assert(mod.x0 >= 0)
assert(mod.y0 >= 0)
ph,pw = sh
assert(mod.x0 + mw <= pw)
assert(mod.y0 + mh <= ph)
return mod
def _select_model(chisqs, nparams, galaxy_margin):
'''
Returns keepmod (string), the name of the preferred model.
'''
keepmod = 'none'
#print('_select_model: chisqs', chisqs)
# This is our "detection threshold": 5-sigma in
# *parameter-penalized* units; ie, ~5.2-sigma for point sources
cut = 5.**2
# Take the best of all models computed
diff = max([chisqs[name] - nparams[name] for name in chisqs.keys()
if name != 'none'] + [-1])
if diff < cut:
# Drop this source
return keepmod
# Now choose between point source and REX
if 'psf' in chisqs and (not 'rex' in chisqs) and (not 'dev' in chisqs) and (not 'exp' in chisqs) and (not 'ser' in chisqs):
# bright stars / reference stars: we don't compute the REX or any other models.
# We also need to check existence of the *other* models because sometimes REX can fail
# in ways where we don't even compute a chisq (eg, source leaves blob)
return 'psf'
#print('PSF', chisqs.get('psf',0)-nparams['psf'], 'vs REX', chisqs.get('rex',0)-nparams['rex'])
# Is PSF good enough to keep?
if 'psf' in chisqs and (chisqs['psf']-nparams['psf'] >= cut):
keepmod = 'psf'
# Now choose between point source and REX
if 'psf' in chisqs and (
chisqs['psf']-nparams['psf'] >= chisqs.get('rex',0)-nparams['rex']):
#print('Keeping PSF')
keepmod = 'psf'
elif 'rex' in chisqs and (
chisqs['rex']-nparams['rex'] > chisqs.get('psf',0)-nparams['psf']):
#print('REX is better fit than PSF.')
oldkeepmod = keepmod
keepmod = 'rex'
# For REX, we also demand a fractionally better fit
dchisq_psf = chisqs.get('psf',0)
dchisq_rex = chisqs.get('rex',0)
if dchisq_psf > 0 and (dchisq_rex - dchisq_psf) < (0.01 * dchisq_psf):
#print('REX is not a fractionally better fit, keeping', oldkeepmod)
keepmod = oldkeepmod
if not ('exp' in chisqs or 'dev' in chisqs):
#print('No EXP or DEV; keeping', keepmod)
return keepmod
# This is our "upgrade" threshold: how much better a galaxy
# fit has to be versus psf
cut = galaxy_margin
# This is the "fractional" upgrade threshold for psf/rex to dev/exp:
# 1% of psf vs nothing
fcut = 0.01 * chisqs.get('psf', 0.)
cut = max(cut, fcut)
expdiff = chisqs.get('exp', 0) - chisqs[keepmod]
devdiff = chisqs.get('dev', 0) - chisqs[keepmod]
#print('EXP vs', keepmod, ':', expdiff)
#print('DEV vs', keepmod, ':', devdiff)
if not (expdiff > cut or devdiff > cut):
#print('Keeping', keepmod)
return keepmod
if expdiff > devdiff:
#print('Upgrading to EXP: diff', expdiff)
keepmod = 'exp'
else:
#print('Upgrading to DEV: diff', expdiff)
keepmod = 'dev'
# Consider Sersic models
if 'ser' not in chisqs:
return keepmod
serdiff = chisqs['ser'] - chisqs[keepmod]
sermargin = 25.
if serdiff < sermargin:
return keepmod
keepmod = 'ser'
return keepmod
def _chisq_improvement(src, chisqs, chisqs_none):
'''
chisqs, chisqs_none: dict of band->chisq
'''
bright = src.getBrightness()
bands = chisqs.keys()
fluxes = dict([(b, bright.getFlux(b)) for b in bands])
dchisq = 0.
for b in bands:
flux = fluxes[b]
if flux == 0:
continue
# this will be positive for an improved model
d = chisqs_none[b] - chisqs[b]
if flux > 0:
dchisq += d
else:
dchisq -= np.abs(d)
return dchisq
def _per_band_chisqs(tractor, bands):
chisqs = dict([(b,0) for b in bands])
for img in tractor.images:
chi = tractor.getChiImage(img=img)
chisqs[img.band] = chisqs[img.band] + (chi ** 2).sum()
return chisqs
|
legacysurvey/legacypipe
|
py/legacypipe/oneblob.py
|
Python
|
bsd-3-clause
| 102,118
|
[
"Galaxy"
] |
2f68d5e59e47673c37ad7e3a11740153236317159f0cfc5211314df492bff638
|
#!/usr/bin/env python
##############################################################################################
#
#
# regrid_emissions_N96e.py
#
#
# Requirements:
# Iris 1.10, cf_units, numpy
#
#
# This Python script has been written by N.L. Abraham as part of the UKCA Tutorials:
# http://www.ukca.ac.uk/wiki/index.php/UKCA_Chemistry_and_Aerosol_Tutorials_at_vn10.4
#
# Copyright (C) 2015 University of Cambridge
#
# This is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# It is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You find a copy of the GNU Lesser General Public License at <http://www.gnu.org/licenses/>.
#
# Written by N. Luke Abraham 2016-10-20 <nla27@cam.ac.uk>
#
#
##############################################################################################
# preamble
import os
import time
import iris
import cf_units
import numpy
# --- CHANGE THINGS BELOW THIS LINE TO WORK WITH YOUR FILES ETC. ---
# name of file containing an ENDGame grid, e.g. your model output
# NOTE: all the fields in the file should be on the same horizontal
# grid, as the field used MAY NOT be the first in order of STASH
grid_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/um/archer/ag542/apm.pp/ag542a.pm1988dec'
#
# name of emissions file
emissions_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/emissions/combined_1960/0.5x0.5/combined_sources_CO_1960_360d.nc'
#
# STASH code emissions are associated with
# 301-320: surface
# m01s00i303: CO surface emissions
#
# 321-340: full atmosphere
#
stash='m01s00i303'
# --- BELOW THIS LINE, NOTHING SHOULD NEED TO BE CHANGED ---
species_name='CO'
# this is the grid we want to regrid to, e.g. N96 ENDGame
grd=iris.load(grid_file)[0]
grd.coord(axis='x').guess_bounds()
grd.coord(axis='y').guess_bounds()
# This is the original data
ems=iris.load_cube(emissions_file)
# make intersection between 0 and 360 longitude to ensure that
# the data is regridded correctly
nems = ems.intersection(longitude=(0, 360))
# make sure that we use the same coordinate system, otherwise regrid won't work
nems.coord(axis='x').coord_system=grd.coord_system()
nems.coord(axis='y').coord_system=grd.coord_system()
# now guess the bounds of the new grid prior to regridding
nems.coord(axis='x').guess_bounds()
nems.coord(axis='y').guess_bounds()
# now regrid
ocube=nems.regrid(grd,iris.analysis.AreaWeighted())
# now add correct attributes and names to netCDF file
ocube.var_name='emissions_'+str.strip(species_name)
ocube.long_name=str.strip(species_name)+' surf emissions'
ocube.units=cf_units.Unit('kg m-2 s-1')
ocube.attributes['vertical_scaling']='surface'
ocube.attributes['um_stash_source']=stash
ocube.attributes['tracer_name']=str.strip(species_name)
# global attributes, so don't set in local_keys
# NOTE: all these should be strings, including the numbers!
# basic emissions type
ocube.attributes['emission_type']='2' # periodic time series
ocube.attributes['update_type']='2' # same as above
ocube.attributes['update_freq_in_hours']='120' # i.e. 5 days
ocube.attributes['um_version']='10.6' # UM version
ocube.attributes['source']='combined_sources_CO_1960_360d.nc'
ocube.attributes['title']='Monthly surface emissions of carbon monoxide for 1960'
ocube.attributes['File_version']='v1'
ocube.attributes['File_creation_date']=time.ctime(time.time())
ocube.attributes['grid']='regular 1.875 x 1.25 degree longitude-latitude grid (N96e)'
ocube.attributes['history']=time.ctime(time.time())+': '+__file__+' \n'+ocube.attributes['history']
ocube.attributes['institution']='Centre for Atmospheric Science, Department of Chemistry, University of Cambridge, U.K.'
ocube.attributes['reference']='Granier et al., Clim. Change, 2011; Lamarque et al., Atmos. Chem. Phys., 2010'
del ocube.attributes['NCO']
del ocube.attributes['file_creation_date']
del ocube.attributes['description']
# rename and set time coord - mid-month from 1960-Jan to 2020-Dec
# this bit is annoyingly fiddly
ocube.coord(axis='t').var_name='time'
ocube.coord(axis='t').standard_name='time'
ocube.coords(axis='t')[0].units=cf_units.Unit('days since 1960-01-01 00:00:00', calendar='360_day')
ocube.coord(axis='t').points=numpy.array([15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345])
# make z-direction.
zdims=iris.coords.DimCoord(numpy.array([0]),standard_name = 'model_level_number',
units='1',attributes={'positive':'up'})
ocube.add_aux_coord(zdims)
ocube=iris.util.new_axis(ocube, zdims)
# now transpose cube to put Z 2nd
ocube.transpose([1,0,2,3])
# make coordinates 64-bit
ocube.coord(axis='x').points=ocube.coord(axis='x').points.astype(dtype='float64')
ocube.coord(axis='y').points=ocube.coord(axis='y').points.astype(dtype='float64')
#ocube.coord(axis='z').points=ocube.coord(axis='z').points.astype(dtype='float64') # integer
ocube.coord(axis='t').points=ocube.coord(axis='t').points.astype(dtype='float64')
# for some reason, longitude_bounds are double, but latitude_bounds are float
ocube.coord('latitude').bounds=ocube.coord('latitude').bounds.astype(dtype='float64')
# add forecast_period & forecast_reference_time
# forecast_reference_time
frt=numpy.array([15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345], dtype='float64')
frt_dims=iris.coords.AuxCoord(frt,standard_name = 'forecast_reference_time',
units=cf_units.Unit('days since 1960-01-01 00:00:00', calendar='360_day'))
ocube.add_aux_coord(frt_dims,data_dims=0)
ocube.coord('forecast_reference_time').guess_bounds()
# forecast_period
fp=numpy.array([-360],dtype='float64')
fp_dims=iris.coords.AuxCoord(fp,standard_name = 'forecast_period',
units=cf_units.Unit('hours'),bounds=numpy.array([-720,0],dtype='float64'))
ocube.add_aux_coord(fp_dims,data_dims=None)
# add-in cell_methods
ocube.cell_methods = [iris.coords.CellMethod('mean', 'time')]
# set _FillValue
fillval=1e+20
ocube.data = numpy.ma.array(data=ocube.data, fill_value=fillval, dtype='float32')
# output file name, based on species
outpath='ukca_emiss_'+species_name+'.nc'
# don't want time to be cattable, as is a periodic emissions file
iris.FUTURE.netcdf_no_unlimited=True
# annoying hack to set a missing_value attribute as well as a _FillValue attribute
dict.__setitem__(ocube.attributes, 'missing_value', fillval)
# now write-out to netCDF
saver = iris.fileformats.netcdf.Saver(filename=outpath, netcdf_format='NETCDF3_CLASSIC')
saver.update_global_attributes(Conventions=iris.fileformats.netcdf.CF_CONVENTIONS_VERSION)
saver.write(ocube, local_keys=['vertical_scaling', 'missing_value','um_stash_source','tracer_name'])
# end of script
|
acsis-project/emissions
|
emissions/python/periodic_1960/regrid_CO_emissions_n96e_360d_1960.py
|
Python
|
gpl-3.0
| 7,011
|
[
"NetCDF"
] |
fc5f314596d51023aee5675f4210b5a0c78da4e2b5e3ef81b96878afe408e3f9
|
# sql/expression.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Defines the base components of SQL expression trees.
All components are derived from a common base class
:class:`.ClauseElement`. Common behaviors are organized
based on class hierarchies, in some cases via mixins.
All object construction from this package occurs via functions which
in some cases will construct composite :class:`.ClauseElement` structures
together, and in other cases simply return a single :class:`.ClauseElement`
constructed directly. The function interface affords a more "DSL-ish"
feel to constructing SQL expressions and also allows future class
reorganizations.
Even though classes are not constructed directly from the outside,
most classes which have additional public methods are considered to be
public (i.e. have no leading underscore). Other classes which are
"semi-public" are marked with a single leading underscore; these
classes usually have few or no public methods and are less guaranteed
to stay the same in future releases.
"""
import itertools
import re
from operator import attrgetter
from .. import util, exc, inspection
from . import operators
from .operators import ColumnOperators
from .visitors import Visitable, cloned_traverse
import operator
functions = util.importlater("sqlalchemy.sql", "functions")
sqlutil = util.importlater("sqlalchemy.sql", "util")
sqltypes = util.importlater("sqlalchemy", "types")
default = util.importlater("sqlalchemy.engine", "default")
__all__ = [
'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between',
'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct',
'except_', 'except_all', 'exists', 'extract', 'func', 'modifier',
'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label',
'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast',
'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery',
'table', 'text',
'tuple_', 'type_coerce', 'union', 'union_all', 'update', ]
PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
NO_ARG = util.symbol('NO_ARG')
def nullsfirst(column):
"""Return a NULLS FIRST ``ORDER BY`` clause element.
e.g.::
someselect.order_by(desc(table1.mycol).nullsfirst())
produces::
ORDER BY mycol DESC NULLS FIRST
"""
return UnaryExpression(column, modifier=operators.nullsfirst_op)
def nullslast(column):
"""Return a NULLS LAST ``ORDER BY`` clause element.
e.g.::
someselect.order_by(desc(table1.mycol).nullslast())
produces::
ORDER BY mycol DESC NULLS LAST
"""
return UnaryExpression(column, modifier=operators.nullslast_op)
def desc(column):
"""Return a descending ``ORDER BY`` clause element.
e.g.::
someselect.order_by(desc(table1.mycol))
produces::
ORDER BY mycol DESC
"""
return UnaryExpression(column, modifier=operators.desc_op)
def asc(column):
"""Return an ascending ``ORDER BY`` clause element.
e.g.::
someselect.order_by(asc(table1.mycol))
produces::
ORDER BY mycol ASC
"""
return UnaryExpression(column, modifier=operators.asc_op)
def outerjoin(left, right, onclause=None):
"""Return an ``OUTER JOIN`` clause element.
The returned object is an instance of :class:`.Join`.
Similar functionality is also available via the
:meth:`~.FromClause.outerjoin()` method on any
:class:`.FromClause`.
:param left: The left side of the join.
:param right: The right side of the join.
:param onclause: Optional criterion for the ``ON`` clause, is
derived from foreign key relationships established between
left and right otherwise.
To chain joins together, use the :meth:`.FromClause.join` or
:meth:`.FromClause.outerjoin` methods on the resulting
:class:`.Join` object.
"""
return Join(left, right, onclause, isouter=True)
def join(left, right, onclause=None, isouter=False):
"""Return a ``JOIN`` clause element (regular inner join).
The returned object is an instance of :class:`.Join`.
Similar functionality is also available via the
:meth:`~.FromClause.join()` method on any
:class:`.FromClause`.
:param left: The left side of the join.
:param right: The right side of the join.
:param onclause: Optional criterion for the ``ON`` clause, is
derived from foreign key relationships established between
left and right otherwise.
To chain joins together, use the :meth:`.FromClause.join` or
:meth:`.FromClause.outerjoin` methods on the resulting
:class:`.Join` object.
"""
return Join(left, right, onclause, isouter)
def select(columns=None, whereclause=None, from_obj=[], **kwargs):
"""Returns a ``SELECT`` clause element.
Similar functionality is also available via the :func:`select()`
method on any :class:`.FromClause`.
The returned object is an instance of :class:`.Select`.
All arguments which accept :class:`.ClauseElement` arguments also accept
string arguments, which will be converted as appropriate into
either :func:`text()` or :func:`literal_column()` constructs.
.. seealso::
:ref:`coretutorial_selecting` - Core Tutorial description of
:func:`.select`.
:param columns:
A list of :class:`.ClauseElement` objects, typically
:class:`.ColumnElement` objects or subclasses, which will form the
columns clause of the resulting statement. For all members which are
instances of :class:`.Selectable`, the individual :class:`.ColumnElement`
members of the :class:`.Selectable` will be added individually to the
columns clause. For example, specifying a
:class:`~sqlalchemy.schema.Table` instance will result in all the
contained :class:`~sqlalchemy.schema.Column` objects within to be added
to the columns clause.
This argument is not present on the form of :func:`select()`
available on :class:`~sqlalchemy.schema.Table`.
:param whereclause:
A :class:`.ClauseElement` expression which will be used to form the
``WHERE`` clause.
:param from_obj:
A list of :class:`.ClauseElement` objects which will be added to the
``FROM`` clause of the resulting statement. Note that "from" objects are
automatically located within the columns and whereclause ClauseElements.
Use this parameter to explicitly specify "from" objects which are not
automatically locatable. This could include
:class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
or :class:`.Join` objects whose presence will supercede that of the
:class:`~sqlalchemy.schema.Table` objects already located in the other
clauses.
:param autocommit:
Deprecated. Use .execution_options(autocommit=<True|False>)
to set the autocommit option.
:param bind=None:
an :class:`~.base.Engine` or :class:`~.base.Connection` instance
to which the
resulting :class:`.Select` object will be bound. The :class:`.Select`
object will otherwise automatically bind to whatever
:class:`~.base.Connectable` instances can be located within its contained
:class:`.ClauseElement` members.
:param correlate=True:
indicates that this :class:`.Select` object should have its
contained :class:`.FromClause` elements "correlated" to an enclosing
:class:`.Select` object. This means that any :class:`.ClauseElement`
instance within the "froms" collection of this :class:`.Select`
which is also present in the "froms" collection of an
enclosing select will not be rendered in the ``FROM`` clause
of this select statement.
:param distinct=False:
when ``True``, applies a ``DISTINCT`` qualifier to the columns
clause of the resulting statement.
The boolean argument may also be a column expression or list
of column expressions - this is a special calling form which
is understood by the Postgresql dialect to render the
``DISTINCT ON (<columns>)`` syntax.
``distinct`` is also available via the :meth:`~.Select.distinct`
generative method.
:param for_update=False:
when ``True``, applies ``FOR UPDATE`` to the end of the
resulting statement.
Certain database dialects also support
alternate values for this parameter:
* With the MySQL dialect, the value ``"read"`` translates to
``LOCK IN SHARE MODE``.
* With the Oracle and Postgresql dialects, the value ``"nowait"``
translates to ``FOR UPDATE NOWAIT``.
* With the Postgresql dialect, the values "read" and ``"read_nowait"``
translate to ``FOR SHARE`` and ``FOR SHARE NOWAIT``, respectively.
.. versionadded:: 0.7.7
:param group_by:
a list of :class:`.ClauseElement` objects which will comprise the
``GROUP BY`` clause of the resulting select.
:param having:
a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
of the resulting select when ``GROUP BY`` is used.
:param limit=None:
a numerical value which usually compiles to a ``LIMIT``
expression in the resulting select. Databases that don't
support ``LIMIT`` will attempt to provide similar
functionality.
:param offset=None:
a numeric value which usually compiles to an ``OFFSET``
expression in the resulting select. Databases that don't
support ``OFFSET`` will attempt to provide similar
functionality.
:param order_by:
a scalar or list of :class:`.ClauseElement` objects which will
comprise the ``ORDER BY`` clause of the resulting select.
:param use_labels=False:
when ``True``, the statement will be generated using labels
for each column in the columns clause, which qualify each
column with its parent table's (or aliases) name so that name
conflicts between columns in different tables don't occur.
The format of the label is <tablename>_<column>. The "c"
collection of the resulting :class:`.Select` object will use these
names as well for targeting column members.
use_labels is also available via the :meth:`~.SelectBase.apply_labels`
generative method.
"""
return Select(columns, whereclause=whereclause, from_obj=from_obj,
**kwargs)
def subquery(alias, *args, **kwargs):
"""Return an :class:`.Alias` object derived
from a :class:`.Select`.
name
alias name
\*args, \**kwargs
all other arguments are delivered to the
:func:`select` function.
"""
return Select(*args, **kwargs).alias(alias)
def insert(table, values=None, inline=False, **kwargs):
"""Represent an ``INSERT`` statement via the :class:`.Insert` SQL
construct.
Similar functionality is available via the
:meth:`~.TableClause.insert` method on
:class:`~.schema.Table`.
:param table: :class:`.TableClause` which is the subject of the insert.
:param values: collection of values to be inserted; see
:meth:`.Insert.values` for a description of allowed formats here.
Can be omitted entirely; a :class:`.Insert` construct will also
dynamically render the VALUES clause at execution time based on
the parameters passed to :meth:`.Connection.execute`.
:param inline: if True, SQL defaults will be compiled 'inline' into the
statement and not pre-executed.
If both `values` and compile-time bind parameters are present, the
compile-time bind parameters override the information specified
within `values` on a per-key basis.
The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
objects or their string identifiers. Each key may reference one of:
* a literal data value (i.e. string, number, etc.);
* a Column object;
* a SELECT statement.
If a ``SELECT`` statement is specified which references this
``INSERT`` statement's table, the statement will be correlated
against the ``INSERT`` statement.
.. seealso::
:ref:`coretutorial_insert_expressions` - SQL Expression Tutorial
:ref:`inserts_and_updates` - SQL Expression Tutorial
"""
return Insert(table, values, inline=inline, **kwargs)
def update(table, whereclause=None, values=None, inline=False, **kwargs):
"""Represent an ``UPDATE`` statement via the :class:`.Update` SQL
construct.
E.g.::
from sqlalchemy import update
stmt = update(users).where(users.c.id==5).\\
values(name='user #5')
Similar functionality is available via the
:meth:`~.TableClause.update` method on
:class:`.Table`::
stmt = users.update().\\
where(users.c.id==5).\\
values(name='user #5')
:param table: A :class:`.Table` object representing the database
table to be updated.
:param whereclause: Optional SQL expression describing the ``WHERE``
condition of the ``UPDATE`` statement. Modern applications
may prefer to use the generative :meth:`~Update.where()`
method to specify the ``WHERE`` clause.
The WHERE clause can refer to multiple tables.
For databases which support this, an ``UPDATE FROM`` clause will
be generated, or on MySQL, a multi-table update. The statement
will fail on databases that don't have support for multi-table
update statements. A SQL-standard method of referring to
additional tables in the WHERE clause is to use a correlated
subquery::
users.update().values(name='ed').where(
users.c.name==select([addresses.c.email_address]).\\
where(addresses.c.user_id==users.c.id).\\
as_scalar()
)
.. versionchanged:: 0.7.4
The WHERE clause can refer to multiple tables.
:param values:
Optional dictionary which specifies the ``SET`` conditions of the
``UPDATE``. If left as ``None``, the ``SET``
conditions are determined from those parameters passed to the
statement during the execution and/or compilation of the
statement. When compiled standalone without any parameters,
the ``SET`` clause generates for all columns.
Modern applications may prefer to use the generative
:meth:`.Update.values` method to set the values of the
UPDATE statement.
:param inline:
if True, SQL defaults present on :class:`.Column` objects via
the ``default`` keyword will be compiled 'inline' into the statement
and not pre-executed. This means that their values will not
be available in the dictionary returned from
:meth:`.ResultProxy.last_updated_params`.
If both ``values`` and compile-time bind parameters are present, the
compile-time bind parameters override the information specified
within ``values`` on a per-key basis.
The keys within ``values`` can be either :class:`.Column`
objects or their string identifiers (specifically the "key" of the
:class:`.Column`, normally but not necessarily equivalent to
its "name"). Normally, the
:class:`.Column` objects used here are expected to be
part of the target :class:`.Table` that is the table
to be updated. However when using MySQL, a multiple-table
UPDATE statement can refer to columns from any of
the tables referred to in the WHERE clause.
The values referred to in ``values`` are typically:
* a literal data value (i.e. string, number, etc.)
* a SQL expression, such as a related :class:`.Column`,
a scalar-returning :func:`.select` construct,
etc.
When combining :func:`.select` constructs within the values
clause of an :func:`.update` construct,
the subquery represented by the :func:`.select` should be
*correlated* to the parent table, that is, providing criterion
which links the table inside the subquery to the outer table
being updated::
users.update().values(
name=select([addresses.c.email_address]).\\
where(addresses.c.user_id==users.c.id).\\
as_scalar()
)
.. seealso::
:ref:`inserts_and_updates` - SQL Expression
Language Tutorial
"""
return Update(
table,
whereclause=whereclause,
values=values,
inline=inline,
**kwargs)
def delete(table, whereclause=None, **kwargs):
"""Represent a ``DELETE`` statement via the :class:`.Delete` SQL
construct.
Similar functionality is available via the
:meth:`~.TableClause.delete` method on
:class:`~.schema.Table`.
:param table: The table to be updated.
:param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
condition of the ``UPDATE`` statement. Note that the
:meth:`~Delete.where()` generative method may be used instead.
.. seealso::
:ref:`deletes` - SQL Expression Tutorial
"""
return Delete(table, whereclause, **kwargs)
def and_(*clauses):
"""Join a list of clauses together using the ``AND`` operator.
The ``&`` operator is also overloaded on all :class:`.ColumnElement`
subclasses to produce the
same result.
"""
if len(clauses) == 1:
return clauses[0]
return BooleanClauseList(operator=operators.and_, *clauses)
def or_(*clauses):
"""Join a list of clauses together using the ``OR`` operator.
The ``|`` operator is also overloaded on all
:class:`.ColumnElement` subclasses to produce the
same result.
"""
if len(clauses) == 1:
return clauses[0]
return BooleanClauseList(operator=operators.or_, *clauses)
def not_(clause):
"""Return a negation of the given clause, i.e. ``NOT(clause)``.
The ``~`` operator is also overloaded on all
:class:`.ColumnElement` subclasses to produce the
same result.
"""
return operators.inv(_literal_as_binds(clause))
def distinct(expr):
"""Return a ``DISTINCT`` clause.
e.g.::
distinct(a)
renders::
DISTINCT a
"""
expr = _literal_as_binds(expr)
return UnaryExpression(expr,
operator=operators.distinct_op, type_=expr.type)
def between(ctest, cleft, cright):
"""Return a ``BETWEEN`` predicate clause.
Equivalent of SQL ``clausetest BETWEEN clauseleft AND clauseright``.
The :func:`between()` method on all
:class:`.ColumnElement` subclasses provides
similar functionality.
"""
ctest = _literal_as_binds(ctest)
return ctest.between(cleft, cright)
def case(whens, value=None, else_=None):
"""Produce a ``CASE`` statement.
whens
A sequence of pairs, or alternatively a dict,
to be translated into "WHEN / THEN" clauses.
value
Optional for simple case statements, produces
a column expression as in "CASE <expr> WHEN ..."
else\_
Optional as well, for case defaults produces
the "ELSE" portion of the "CASE" statement.
The expressions used for THEN and ELSE,
when specified as strings, will be interpreted
as bound values. To specify textual SQL expressions
for these, use the :func:`literal_column`
construct.
The expressions used for the WHEN criterion
may only be literal strings when "value" is
present, i.e. CASE table.somecol WHEN "x" THEN "y".
Otherwise, literal strings are not accepted
in this position, and either the text(<string>)
or literal(<string>) constructs must be used to
interpret raw string values.
Usage examples::
case([(orderline.c.qty > 100, item.c.specialprice),
(orderline.c.qty > 10, item.c.bulkprice)
], else_=item.c.regularprice)
case(value=emp.c.type, whens={
'engineer': emp.c.salary * 1.1,
'manager': emp.c.salary * 3,
})
Using :func:`literal_column()`, to allow for databases that
do not support bind parameters in the ``then`` clause. The type
can be specified which determines the type of the :func:`case()` construct
overall::
case([(orderline.c.qty > 100,
literal_column("'greaterthan100'", String)),
(orderline.c.qty > 10, literal_column("'greaterthan10'",
String))
], else_=literal_column("'lethan10'", String))
"""
return Case(whens, value=value, else_=else_)
def cast(clause, totype, **kwargs):
"""Return a ``CAST`` function.
Equivalent of SQL ``CAST(clause AS totype)``.
Use with a :class:`~sqlalchemy.types.TypeEngine` subclass, i.e::
cast(table.c.unit_price * table.c.qty, Numeric(10,4))
or::
cast(table.c.timestamp, DATE)
"""
return Cast(clause, totype, **kwargs)
def extract(field, expr):
"""Return the clause ``extract(field FROM expr)``."""
return Extract(field, expr)
def collate(expression, collation):
"""Return the clause ``expression COLLATE collation``.
e.g.::
collate(mycolumn, 'utf8_bin')
produces::
mycolumn COLLATE utf8_bin
"""
expr = _literal_as_binds(expression)
return BinaryExpression(
expr,
_literal_as_text(collation),
operators.collate, type_=expr.type)
def exists(*args, **kwargs):
"""Return an ``EXISTS`` clause as applied to a :class:`.Select` object.
Calling styles are of the following forms::
# use on an existing select()
s = select([table.c.col1]).where(table.c.col2==5)
s = exists(s)
# construct a select() at once
exists(['*'], **select_arguments).where(criterion)
# columns argument is optional, generates "EXISTS (SELECT *)"
# by default.
exists().where(table.c.col2==5)
"""
return Exists(*args, **kwargs)
def union(*selects, **kwargs):
"""Return a ``UNION`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
A similar :func:`union()` method is available on all
:class:`.FromClause` subclasses.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
def union_all(*selects, **kwargs):
"""Return a ``UNION ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
A similar :func:`union_all()` method is available on all
:class:`.FromClause` subclasses.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
def except_(*selects, **kwargs):
"""Return an ``EXCEPT`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
def except_all(*selects, **kwargs):
"""Return an ``EXCEPT ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
def intersect(*selects, **kwargs):
"""Return an ``INTERSECT`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
def intersect_all(*selects, **kwargs):
"""Return an ``INTERSECT ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
def alias(selectable, name=None):
"""Return an :class:`.Alias` object.
An :class:`.Alias` represents any :class:`.FromClause`
with an alternate name assigned within SQL, typically using the ``AS``
clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
Similar functionality is available via the
:meth:`~.FromClause.alias` method
available on all :class:`.FromClause` subclasses.
When an :class:`.Alias` is created from a :class:`.Table` object,
this has the effect of the table being rendered
as ``tablename AS aliasname`` in a SELECT statement.
For :func:`.select` objects, the effect is that of creating a named
subquery, i.e. ``(select ...) AS aliasname``.
The ``name`` parameter is optional, and provides the name
to use in the rendered SQL. If blank, an "anonymous" name
will be deterministically generated at compile time.
Deterministic means the name is guaranteed to be unique against
other constructs used in the same statement, and will also be the
same name for each successive compilation of the same statement
object.
:param selectable: any :class:`.FromClause` subclass,
such as a table, select statement, etc.
:param name: string name to be assigned as the alias.
If ``None``, a name will be deterministically generated
at compile time.
"""
return Alias(selectable, name=name)
def literal(value, type_=None):
"""Return a literal clause, bound to a bind parameter.
Literal clauses are created automatically when non- :class:`.ClauseElement`
objects (such as strings, ints, dates, etc.) are used in a comparison
operation with a :class:`.ColumnElement`
subclass, such as a :class:`~sqlalchemy.schema.Column` object.
Use this function to force the
generation of a literal clause, which will be created as a
:class:`BindParameter` with a bound value.
:param value: the value to be bound. Can be any Python object supported by
the underlying DB-API, or is translatable via the given type argument.
:param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
will provide bind-parameter translation for this literal.
"""
return BindParameter(None, value, type_=type_, unique=True)
def tuple_(*expr):
"""Return a SQL tuple.
Main usage is to produce a composite IN construct::
tuple_(table.c.col1, table.c.col2).in_(
[(1, 2), (5, 12), (10, 19)]
)
.. warning::
The composite IN construct is not supported by all backends,
and is currently known to work on Postgresql and MySQL,
but not SQLite. Unsupported backends will raise
a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such
an expression is invoked.
"""
return Tuple(*expr)
def type_coerce(expr, type_):
"""Coerce the given expression into the given type,
on the Python side only.
:func:`.type_coerce` is roughly similar to :func:`.cast`, except no
"CAST" expression is rendered - the given type is only applied towards
expression typing and against received result values.
e.g.::
from sqlalchemy.types import TypeDecorator
import uuid
class AsGuid(TypeDecorator):
impl = String
def process_bind_param(self, value, dialect):
if value is not None:
return str(value)
else:
return None
def process_result_value(self, value, dialect):
if value is not None:
return uuid.UUID(value)
else:
return None
conn.execute(
select([type_coerce(mytable.c.ident, AsGuid)]).\\
where(
type_coerce(mytable.c.ident, AsGuid) ==
uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
)
)
"""
type_ = sqltypes.to_instance(type_)
if hasattr(expr, '__clause_element__'):
return type_coerce(expr.__clause_element__(), type_)
elif isinstance(expr, BindParameter):
bp = expr._clone()
bp.type = type_
return bp
elif not isinstance(expr, Visitable):
if expr is None:
return null()
else:
return literal(expr, type_=type_)
else:
return Label(None, expr, type_=type_)
def label(name, obj):
"""Return a :class:`Label` object for the
given :class:`.ColumnElement`.
A label changes the name of an element in the columns clause of a
``SELECT`` statement, typically via the ``AS`` SQL keyword.
This functionality is more conveniently available via the
:func:`label()` method on :class:`.ColumnElement`.
name
label name
obj
a :class:`.ColumnElement`.
"""
return Label(name, obj)
def column(text, type_=None):
"""Return a textual column clause, as would be in the columns clause of a
``SELECT`` statement.
The object returned is an instance of :class:`.ColumnClause`, which
represents the "syntactical" portion of the schema-level
:class:`~sqlalchemy.schema.Column` object. It is often used directly
within :func:`~.expression.select` constructs or with lightweight
:func:`~.expression.table` constructs.
Note that the :func:`~.expression.column` function is not part of
the ``sqlalchemy`` namespace. It must be imported from the
``sql`` package::
from sqlalchemy.sql import table, column
:param text: the name of the column. Quoting rules will be applied
to the clause like any other column name. For textual column constructs
that are not to be quoted, use the :func:`literal_column` function.
:param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object
which will provide result-set translation for this column.
See :class:`.ColumnClause` for further examples.
"""
return ColumnClause(text, type_=type_)
def literal_column(text, type_=None):
"""Return a textual column expression, as would be in the columns
clause of a ``SELECT`` statement.
The object returned supports further expressions in the same way as any
other column object, including comparison, math and string operations.
The type\_ parameter is important to determine proper expression behavior
(such as, '+' means string concatenation or numerical addition based on
the type).
:param text: the text of the expression; can be any SQL expression.
Quoting rules will not be applied. To specify a column-name expression
which should be subject to quoting rules, use the :func:`column`
function.
:param type\_: an optional :class:`~sqlalchemy.types.TypeEngine`
object which will
provide result-set translation and additional expression semantics for
this column. If left as None the type will be NullType.
"""
return ColumnClause(text, type_=type_, is_literal=True)
def table(name, *columns):
"""Represent a textual table clause.
The object returned is an instance of :class:`.TableClause`, which
represents the "syntactical" portion of the schema-level
:class:`~.schema.Table` object.
It may be used to construct lightweight table constructs.
Note that the :func:`~.expression.table` function is not part of
the ``sqlalchemy`` namespace. It must be imported from the
``sql`` package::
from sqlalchemy.sql import table, column
:param name: Name of the table.
:param columns: A collection of :func:`~.expression.column` constructs.
See :class:`.TableClause` for further examples.
"""
return TableClause(name, *columns)
def bindparam(key, value=NO_ARG, type_=None, unique=False, required=NO_ARG,
quote=None, callable_=None):
"""Create a bind parameter clause with the given key.
:param key:
the key for this bind param. Will be used in the generated
SQL statement for dialects that use named parameters. This
value may be modified when part of a compilation operation,
if other :class:`BindParameter` objects exist with the same
key, or if its length is too long and truncation is
required.
:param value:
Initial value for this bind param. This value may be
overridden by the dictionary of parameters sent to statement
compilation/execution.
Defaults to ``None``, however if neither ``value`` nor
``callable`` are passed explicitly, the ``required`` flag will be
set to ``True`` which has the effect of requiring a value be present
when the statement is actually executed.
.. versionchanged:: 0.8 The ``required`` flag is set to ``True``
automatically if ``value`` or ``callable`` is not passed.
:param callable\_:
A callable function that takes the place of "value". The function
will be called at statement execution time to determine the
ultimate value. Used for scenarios where the actual bind
value cannot be determined at the point at which the clause
construct is created, but embedded bind values are still desirable.
:param type\_:
A ``TypeEngine`` object that will be used to pre-process the
value corresponding to this :class:`BindParameter` at
execution time.
:param unique:
if True, the key name of this BindParamClause will be
modified if another :class:`BindParameter` of the same name
already has been located within the containing
:class:`.ClauseElement`.
:param required:
If ``True``, a value is required at execution time. If not passed,
is set to ``True`` or ``False`` based on whether or not
one of ``value`` or ``callable`` were passed..
.. versionchanged:: 0.8 If the ``required`` flag is not specified,
it will be set automatically to ``True`` or ``False`` depending
on whether or not the ``value`` or ``callable`` parameters
were specified.
:param quote:
True if this parameter name requires quoting and is not
currently known as a SQLAlchemy reserved word; this currently
only applies to the Oracle backend.
"""
if isinstance(key, ColumnClause):
type_ = key.type
key = key.name
if required is NO_ARG:
required = (value is NO_ARG and callable_ is None)
if value is NO_ARG:
value = None
return BindParameter(key, value, type_=type_,
callable_=callable_,
unique=unique, required=required,
quote=quote)
def outparam(key, type_=None):
"""Create an 'OUT' parameter for usage in functions (stored procedures),
for databases which support them.
The ``outparam`` can be used like a regular function parameter.
The "output" value will be available from the
:class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
attribute, which returns a dictionary containing the values.
"""
return BindParameter(
key, None, type_=type_, unique=False, isoutparam=True)
def text(text, bind=None, *args, **kwargs):
"""Create a SQL construct that is represented by a literal string.
E.g.::
t = text("SELECT * FROM users")
result = connection.execute(t)
The advantages :func:`text` provides over a plain string are
backend-neutral support for bind parameters, per-statement
execution options, as well as
bind parameter and result-column typing behavior, allowing
SQLAlchemy type constructs to play a role when executing
a statement that is specified literally.
Bind parameters are specified by name, using the format ``:name``.
E.g.::
t = text("SELECT * FROM users WHERE id=:user_id")
result = connection.execute(t, user_id=12)
To invoke SQLAlchemy typing logic for bind parameters, the
``bindparams`` list allows specification of :func:`bindparam`
constructs which specify the type for a given name::
t = text("SELECT id FROM users WHERE updated_at>:updated",
bindparams=[bindparam('updated', DateTime())]
)
Typing during result row processing is also an important concern.
Result column types
are specified using the ``typemap`` dictionary, where the keys
match the names of columns. These names are taken from what
the DBAPI returns as ``cursor.description``::
t = text("SELECT id, name FROM users",
typemap={
'id':Integer,
'name':Unicode
}
)
The :func:`text` construct is used internally for most cases when
a literal string is specified for part of a larger query, such as
within :func:`select()`, :func:`update()`,
:func:`insert()` or :func:`delete()`. In those cases, the same
bind parameter syntax is applied::
s = select([users.c.id, users.c.name]).where("id=:user_id")
result = connection.execute(s, user_id=12)
Using :func:`text` explicitly usually implies the construction
of a full, standalone statement. As such, SQLAlchemy refers
to it as an :class:`.Executable` object, and it supports
the :meth:`Executable.execution_options` method. For example,
a :func:`text` construct that should be subject to "autocommit"
can be set explicitly so using the ``autocommit`` option::
t = text("EXEC my_procedural_thing()").\\
execution_options(autocommit=True)
Note that SQLAlchemy's usual "autocommit" behavior applies to
:func:`text` constructs - that is, statements which begin
with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
or a variety of other phrases specific to certain backends, will
be eligible for autocommit if no transaction is in progress.
:param text:
the text of the SQL statement to be created. use ``:<param>``
to specify bind parameters; they will be compiled to their
engine-specific format.
:param autocommit:
Deprecated. Use .execution_options(autocommit=<True|False>)
to set the autocommit option.
:param bind:
an optional connection or engine to be used for this text query.
:param bindparams:
a list of :func:`bindparam()` instances which can be used to define
the types and/or initial values for the bind parameters within
the textual statement; the keynames of the bindparams must match
those within the text of the statement. The types will be used
for pre-processing on bind values.
:param typemap:
a dictionary mapping the names of columns represented in the
columns clause of a ``SELECT`` statement to type objects,
which will be used to perform post-processing on columns within
the result set. This argument applies to any expression
that returns result sets.
"""
return TextClause(text, bind=bind, *args, **kwargs)
def over(func, partition_by=None, order_by=None):
"""Produce an OVER clause against a function.
Used against aggregate or so-called "window" functions,
for database backends that support window functions.
E.g.::
from sqlalchemy import over
over(func.row_number(), order_by='x')
Would produce "ROW_NUMBER() OVER(ORDER BY x)".
:param func: a :class:`.FunctionElement` construct, typically
generated by :data:`~.expression.func`.
:param partition_by: a column element or string, or a list
of such, that will be used as the PARTITION BY clause
of the OVER construct.
:param order_by: a column element or string, or a list
of such, that will be used as the ORDER BY clause
of the OVER construct.
This function is also available from the :data:`~.expression.func`
construct itself via the :meth:`.FunctionElement.over` method.
.. versionadded:: 0.7
"""
return Over(func, partition_by=partition_by, order_by=order_by)
def null():
"""Return a :class:`Null` object, which compiles to ``NULL``.
"""
return Null()
def true():
"""Return a :class:`True_` object, which compiles to ``true``, or the
boolean equivalent for the target dialect.
"""
return True_()
def false():
"""Return a :class:`False_` object, which compiles to ``false``, or the
boolean equivalent for the target dialect.
"""
return False_()
class _FunctionGenerator(object):
"""Generate :class:`.Function` objects based on getattr calls."""
def __init__(self, **opts):
self.__names = []
self.opts = opts
def __getattr__(self, name):
# passthru __ attributes; fixes pydoc
if name.startswith('__'):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
elif name.endswith('_'):
name = name[0:-1]
f = _FunctionGenerator(**self.opts)
f.__names = list(self.__names) + [name]
return f
def __call__(self, *c, **kwargs):
o = self.opts.copy()
o.update(kwargs)
tokens = len(self.__names)
if tokens == 2:
package, fname = self.__names
elif tokens == 1:
package, fname = "_default", self.__names[0]
else:
package = None
if package is not None and \
package in functions._registry and \
fname in functions._registry[package]:
func = functions._registry[package][fname]
return func(*c, **o)
return Function(self.__names[-1],
packagenames=self.__names[0:-1], *c, **o)
# "func" global - i.e. func.count()
func = _FunctionGenerator()
"""Generate SQL function expressions.
:data:`.func` is a special object instance which generates SQL
functions based on name-based attributes, e.g.::
>>> print func.count(1)
count(:param_1)
The element is a column-oriented SQL element like any other, and is
used in that way::
>>> print select([func.count(table.c.id)])
SELECT count(sometable.id) FROM sometable
Any name can be given to :data:`.func`. If the function name is unknown to
SQLAlchemy, it will be rendered exactly as is. For common SQL functions
which SQLAlchemy is aware of, the name may be interpreted as a *generic
function* which will be compiled appropriately to the target database::
>>> print func.current_timestamp()
CURRENT_TIMESTAMP
To call functions which are present in dot-separated packages,
specify them in the same manner::
>>> print func.stats.yield_curve(5, 10)
stats.yield_curve(:yield_curve_1, :yield_curve_2)
SQLAlchemy can be made aware of the return type of functions to enable
type-specific lexical and result-based behavior. For example, to ensure
that a string-based function returns a Unicode value and is similarly
treated as a string in expressions, specify
:class:`~sqlalchemy.types.Unicode` as the type:
>>> print func.my_string(u'hi', type_=Unicode) + ' ' + \
... func.my_string(u'there', type_=Unicode)
my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3)
The object returned by a :data:`.func` call is usually an instance of
:class:`.Function`.
This object meets the "column" interface, including comparison and labeling
functions. The object can also be passed the :meth:`~.Connectable.execute`
method of a :class:`.Connection` or :class:`.Engine`, where it will be
wrapped inside of a SELECT statement first::
print connection.execute(func.current_timestamp()).scalar()
In a few exception cases, the :data:`.func` accessor
will redirect a name to a built-in expression such as :func:`.cast`
or :func:`.extract`, as these names have well-known meaning
but are not exactly the same as "functions" from a SQLAlchemy
perspective.
.. versionadded:: 0.8 :data:`.func` can return non-function expression
constructs for common quasi-functional names like :func:`.cast`
and :func:`.extract`.
Functions which are interpreted as "generic" functions know how to
calculate their return type automatically. For a listing of known generic
functions, see :ref:`generic_functions`.
"""
# "modifier" global - i.e. modifier.distinct
# TODO: use UnaryExpression for this instead ?
modifier = _FunctionGenerator(group=False)
class _truncated_label(unicode):
"""A unicode subclass used to identify symbolic "
"names that may require truncation."""
def apply_map(self, map_):
return self
# for backwards compatibility in case
# someone is re-implementing the
# _truncated_identifier() sequence in a custom
# compiler
_generated_label = _truncated_label
class _anonymous_label(_truncated_label):
"""A unicode subclass used to identify anonymously
generated names."""
def __add__(self, other):
return _anonymous_label(
unicode(self) +
unicode(other))
def __radd__(self, other):
return _anonymous_label(
unicode(other) +
unicode(self))
def apply_map(self, map_):
return self % map_
def _as_truncated(value):
"""coerce the given value to :class:`._truncated_label`.
Existing :class:`._truncated_label` and
:class:`._anonymous_label` objects are passed
unchanged.
"""
if isinstance(value, _truncated_label):
return value
else:
return _truncated_label(value)
def _string_or_unprintable(element):
if isinstance(element, basestring):
return element
else:
try:
return str(element)
except:
return "unprintable element %r" % element
def _clone(element, **kw):
return element._clone()
def _expand_cloned(elements):
"""expand the given set of ClauseElements to be the set of all 'cloned'
predecessors.
"""
return itertools.chain(*[x._cloned_set for x in elements])
def _select_iterables(elements):
"""expand tables into individual columns in the
given list of column expressions.
"""
return itertools.chain(*[c._select_iterable for c in elements])
def _cloned_intersection(a, b):
"""return the intersection of sets a and b, counting
any overlap between 'cloned' predecessors.
The returned set is in terms of the entities present within 'a'.
"""
all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
return set(elem for elem in a
if all_overlap.intersection(elem._cloned_set))
def _cloned_difference(a, b):
all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
return set(elem for elem in a
if not all_overlap.intersection(elem._cloned_set))
def _from_objects(*elements):
return itertools.chain(*[element._from_objects for element in elements])
def _labeled(element):
if not hasattr(element, 'name'):
return element.label(None)
else:
return element
# there is some inconsistency here between the usage of
# inspect() vs. checking for Visitable and __clause_element__.
# Ideally all functions here would derive from inspect(),
# however the inspect() versions add significant callcount
# overhead for critical functions like _interpret_as_column_or_from().
# Generally, the column-based functions are more performance critical
# and are fine just checking for __clause_element__(). it's only
# _interpret_as_from() where we'd like to be able to receive ORM entities
# that have no defined namespace, hence inspect() is needed there.
def _column_as_key(element):
if isinstance(element, basestring):
return element
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
try:
return element.key
except AttributeError:
return None
def _clause_element_as_expr(element):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
else:
return element
def _literal_as_text(element):
if isinstance(element, Visitable):
return element
elif hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif isinstance(element, basestring):
return TextClause(unicode(element))
elif isinstance(element, (util.NoneType, bool)):
return _const_expr(element)
else:
raise exc.ArgumentError(
"SQL expression object or string expected."
)
def _no_literals(element):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif not isinstance(element, Visitable):
raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' "
"function to indicate a SQL expression "
"literal, or 'literal()' to indicate a "
"bound value." % element)
else:
return element
def _is_literal(element):
return not isinstance(element, Visitable) and \
not hasattr(element, '__clause_element__')
def _only_column_elements_or_none(element, name):
if element is None:
return None
else:
return _only_column_elements(element, name)
def _only_column_elements(element, name):
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
if not isinstance(element, ColumnElement):
raise exc.ArgumentError(
"Column-based expression object expected for argument "
"'%s'; got: '%s', type %s" % (name, element, type(element)))
return element
def _literal_as_binds(element, name=None, type_=None):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif not isinstance(element, Visitable):
if element is None:
return null()
else:
return _BindParamClause(name, element, type_=type_, unique=True)
else:
return element
def _interpret_as_column_or_from(element):
if isinstance(element, Visitable):
return element
elif hasattr(element, '__clause_element__'):
return element.__clause_element__()
insp = inspection.inspect(element, raiseerr=False)
if insp is None:
if isinstance(element, (util.NoneType, bool)):
return _const_expr(element)
elif hasattr(insp, "selectable"):
return insp.selectable
return literal_column(str(element))
def _interpret_as_from(element):
insp = inspection.inspect(element, raiseerr=False)
if insp is None:
if isinstance(element, basestring):
return TextClause(unicode(element))
elif hasattr(insp, "selectable"):
return insp.selectable
raise exc.ArgumentError("FROM expression expected")
def _interpret_as_select(element):
element = _interpret_as_from(element)
if isinstance(element, Alias):
element = element.original
if not isinstance(element, Select):
element = element.select()
return element
def _const_expr(element):
if isinstance(element, (Null, False_, True_)):
return element
elif element is None:
return null()
elif element is False:
return false()
elif element is True:
return true()
else:
raise exc.ArgumentError(
"Expected None, False, or True"
)
def _type_from_args(args):
for a in args:
if not isinstance(a.type, sqltypes.NullType):
return a.type
else:
return sqltypes.NullType
def _corresponding_column_or_error(fromclause, column,
require_embedded=False):
c = fromclause.corresponding_column(column,
require_embedded=require_embedded)
if c is None:
raise exc.InvalidRequestError(
"Given column '%s', attached to table '%s', "
"failed to locate a corresponding column from table '%s'"
%
(column,
getattr(column, 'table', None),
fromclause.description)
)
return c
@util.decorator
def _generative(fn, *args, **kw):
"""Mark a method as generative."""
self = args[0]._generate()
fn(self, *args[1:], **kw)
return self
def is_column(col):
"""True if ``col`` is an instance of :class:`.ColumnElement`."""
return isinstance(col, ColumnElement)
class ClauseElement(Visitable):
"""Base class for elements of a programmatically constructed SQL
expression.
"""
__visit_name__ = 'clause'
_annotations = {}
supports_execution = False
_from_objects = []
bind = None
_is_clone_of = None
is_selectable = False
is_clause_element = True
def _clone(self):
"""Create a shallow copy of this ClauseElement.
This method may be used by a generative API. Its also used as
part of the "deep" copy afforded by a traversal that combines
the _copy_internals() method.
"""
c = self.__class__.__new__(self.__class__)
c.__dict__ = self.__dict__.copy()
ClauseElement._cloned_set._reset(c)
ColumnElement.comparator._reset(c)
# this is a marker that helps to "equate" clauses to each other
# when a Select returns its list of FROM clauses. the cloning
# process leaves around a lot of remnants of the previous clause
# typically in the form of column expressions still attached to the
# old table.
c._is_clone_of = self
return c
@property
def _constructor(self):
"""return the 'constructor' for this ClauseElement.
This is for the purposes for creating a new object of
this type. Usually, its just the element's __class__.
However, the "Annotated" version of the object overrides
to return the class of its proxied element.
"""
return self.__class__
@util.memoized_property
def _cloned_set(self):
"""Return the set consisting all cloned ancestors of this
ClauseElement.
Includes this ClauseElement. This accessor tends to be used for
FromClause objects to identify 'equivalent' FROM clauses, regardless
of transformative operations.
"""
s = util.column_set()
f = self
while f is not None:
s.add(f)
f = f._is_clone_of
return s
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_is_clone_of', None)
return d
if util.jython:
def __hash__(self):
"""Return a distinct hash code.
ClauseElements may have special equality comparisons which
makes us rely on them having unique hash codes for use in
hash-based collections. Stock __hash__ doesn't guarantee
unique values on platforms with moving GCs.
"""
return id(self)
def _annotate(self, values):
"""return a copy of this ClauseElement with annotations
updated by the given dictionary.
"""
return sqlutil.Annotated(self, values)
def _with_annotations(self, values):
"""return a copy of this ClauseElement with annotations
replaced by the given dictionary.
"""
return sqlutil.Annotated(self, values)
def _deannotate(self, values=None, clone=False):
"""return a copy of this :class:`.ClauseElement` with annotations
removed.
:param values: optional tuple of individual values
to remove.
"""
if clone:
# clone is used when we are also copying
# the expression for a deep deannotation
return self._clone()
else:
# if no clone, since we have no annotations we return
# self
return self
def unique_params(self, *optionaldict, **kwargs):
"""Return a copy with :func:`bindparam()` elements replaced.
Same functionality as ``params()``, except adds `unique=True`
to affected bind parameters so that multiple statements can be
used.
"""
return self._params(True, optionaldict, kwargs)
def params(self, *optionaldict, **kwargs):
"""Return a copy with :func:`bindparam()` elements replaced.
Returns a copy of this ClauseElement with :func:`bindparam()`
elements replaced with values taken from the given dictionary::
>>> clause = column('x') + bindparam('foo')
>>> print clause.compile().params
{'foo':None}
>>> print clause.params({'foo':7}).compile().params
{'foo':7}
"""
return self._params(False, optionaldict, kwargs)
def _params(self, unique, optionaldict, kwargs):
if len(optionaldict) == 1:
kwargs.update(optionaldict[0])
elif len(optionaldict) > 1:
raise exc.ArgumentError(
"params() takes zero or one positional dictionary argument")
def visit_bindparam(bind):
if bind.key in kwargs:
bind.value = kwargs[bind.key]
bind.required = False
if unique:
bind._convert_to_unique()
return cloned_traverse(self, {}, {'bindparam': visit_bindparam})
def compare(self, other, **kw):
"""Compare this ClauseElement to the given ClauseElement.
Subclasses should override the default behavior, which is a
straight identity comparison.
\**kw are arguments consumed by subclass compare() methods and
may be used to modify the criteria for comparison.
(see :class:`.ColumnElement`)
"""
return self is other
def _copy_internals(self, clone=_clone, **kw):
"""Reassign internal elements to be clones of themselves.
Called during a copy-and-traverse operation on newly
shallow-copied elements to create a deep copy.
The given clone function should be used, which may be applying
additional transformations to the element (i.e. replacement
traversal, cloned traversal, annotations).
"""
pass
def get_children(self, **kwargs):
"""Return immediate child elements of this :class:`.ClauseElement`.
This is used for visit traversal.
\**kwargs may contain flags that change the collection that is
returned, for example to return a subset of items in order to
cut down on larger traversals, or to return child items from a
different context (such as schema-level collections instead of
clause-level).
"""
return []
def self_group(self, against=None):
"""Apply a 'grouping' to this :class:`.ClauseElement`.
This method is overridden by subclasses to return a
"grouping" construct, i.e. parenthesis. In particular
it's used by "binary" expressions to provide a grouping
around themselves when placed into a larger expression,
as well as by :func:`.select` constructs when placed into
the FROM clause of another :func:`.select`. (Note that
subqueries should be normally created using the
:func:`.Select.alias` method, as many platforms require
nested SELECT statements to be named).
As expressions are composed together, the application of
:meth:`self_group` is automatic - end-user code should never
need to use this method directly. Note that SQLAlchemy's
clause constructs take operator precedence into account -
so parenthesis might not be needed, for example, in
an expression like ``x OR (y AND z)`` - AND takes precedence
over OR.
The base :meth:`self_group` method of :class:`.ClauseElement`
just returns self.
"""
return self
def compile(self, bind=None, dialect=None, **kw):
"""Compile this SQL expression.
The return value is a :class:`~.Compiled` object.
Calling ``str()`` or ``unicode()`` on the returned value will yield a
string representation of the result. The
:class:`~.Compiled` object also can return a
dictionary of bind parameter names and values
using the ``params`` accessor.
:param bind: An ``Engine`` or ``Connection`` from which a
``Compiled`` will be acquired. This argument takes precedence over
this :class:`.ClauseElement`'s bound engine, if any.
:param column_keys: Used for INSERT and UPDATE statements, a list of
column names which should be present in the VALUES clause of the
compiled statement. If ``None``, all columns from the target table
object are rendered.
:param dialect: A ``Dialect`` instance from which a ``Compiled``
will be acquired. This argument takes precedence over the `bind`
argument as well as this :class:`.ClauseElement`'s bound engine, if
any.
:param inline: Used for INSERT statements, for a dialect which does
not support inline retrieval of newly generated primary key
columns, will force the expression used to create the new primary
key value to be rendered inline within the INSERT statement's
VALUES clause. This typically refers to Sequence execution but may
also refer to any server-side default generation function
associated with a primary key `Column`.
"""
if not dialect:
if bind:
dialect = bind.dialect
elif self.bind:
dialect = self.bind.dialect
bind = self.bind
else:
dialect = default.DefaultDialect()
return self._compiler(dialect, bind=bind, **kw)
def _compiler(self, dialect, **kw):
"""Return a compiler appropriate for this ClauseElement, given a
Dialect."""
return dialect.statement_compiler(dialect, self, **kw)
def __str__(self):
# Py3K
#return unicode(self.compile())
# Py2K
return unicode(self.compile()).encode('ascii', 'backslashreplace')
# end Py2K
def __and__(self, other):
return and_(self, other)
def __or__(self, other):
return or_(self, other)
def __invert__(self):
return self._negate()
def __nonzero__(self):
raise TypeError("Boolean value of this clause is not defined")
def _negate(self):
if hasattr(self, 'negation_clause'):
return self.negation_clause
else:
return UnaryExpression(
self.self_group(against=operators.inv),
operator=operators.inv,
negate=None)
def __repr__(self):
friendly = getattr(self, 'description', None)
if friendly is None:
return object.__repr__(self)
else:
return '<%s.%s at 0x%x; %s>' % (
self.__module__, self.__class__.__name__, id(self), friendly)
inspection._self_inspects(ClauseElement)
class Immutable(object):
"""mark a ClauseElement as 'immutable' when expressions are cloned."""
def unique_params(self, *optionaldict, **kwargs):
raise NotImplementedError("Immutable objects do not support copying")
def params(self, *optionaldict, **kwargs):
raise NotImplementedError("Immutable objects do not support copying")
def _clone(self):
return self
class _DefaultColumnComparator(operators.ColumnOperators):
"""Defines comparison and math operations.
See :class:`.ColumnOperators` and :class:`.Operators` for descriptions
of all operations.
"""
@util.memoized_property
def type(self):
return self.expr.type
def operate(self, op, *other, **kwargs):
o = self.operators[op.__name__]
return o[0](self, self.expr, op, *(other + o[1:]), **kwargs)
def reverse_operate(self, op, other, **kwargs):
o = self.operators[op.__name__]
return o[0](self, self.expr, op, other, reverse=True, *o[1:], **kwargs)
def _adapt_expression(self, op, other_comparator):
"""evaluate the return type of <self> <op> <othertype>,
and apply any adaptations to the given operator.
This method determines the type of a resulting binary expression
given two source types and an operator. For example, two
:class:`.Column` objects, both of the type :class:`.Integer`, will
produce a :class:`.BinaryExpression` that also has the type
:class:`.Integer` when compared via the addition (``+``) operator.
However, using the addition operator with an :class:`.Integer`
and a :class:`.Date` object will produce a :class:`.Date`, assuming
"days delta" behavior by the database (in reality, most databases
other than Postgresql don't accept this particular operation).
The method returns a tuple of the form <operator>, <type>.
The resulting operator and type will be those applied to the
resulting :class:`.BinaryExpression` as the final operator and the
right-hand side of the expression.
Note that only a subset of operators make usage of
:meth:`._adapt_expression`,
including math operators and user-defined operators, but not
boolean comparison or special SQL keywords like MATCH or BETWEEN.
"""
return op, other_comparator.type
def _boolean_compare(self, expr, op, obj, negate=None, reverse=False,
_python_is_types=(util.NoneType, bool),
**kwargs):
if isinstance(obj, _python_is_types + (Null, True_, False_)):
# allow x ==/!= True/False to be treated as a literal.
# this comes out to "== / != true/false" or "1/0" if those
# constants aren't supported and works on all platforms
if op in (operators.eq, operators.ne) and \
isinstance(obj, (bool, True_, False_)):
return BinaryExpression(expr,
obj,
op,
type_=sqltypes.BOOLEANTYPE,
negate=negate, modifiers=kwargs)
else:
# all other None/True/False uses IS, IS NOT
if op in (operators.eq, operators.is_):
return BinaryExpression(expr, _const_expr(obj),
operators.is_,
negate=operators.isnot)
elif op in (operators.ne, operators.isnot):
return BinaryExpression(expr, _const_expr(obj),
operators.isnot,
negate=operators.is_)
else:
raise exc.ArgumentError(
"Only '=', '!=', 'is_()', 'isnot()' operators can "
"be used with None/True/False")
else:
obj = self._check_literal(expr, op, obj)
if reverse:
return BinaryExpression(obj,
expr,
op,
type_=sqltypes.BOOLEANTYPE,
negate=negate, modifiers=kwargs)
else:
return BinaryExpression(expr,
obj,
op,
type_=sqltypes.BOOLEANTYPE,
negate=negate, modifiers=kwargs)
def _binary_operate(self, expr, op, obj, reverse=False, result_type=None,
**kw):
obj = self._check_literal(expr, op, obj)
if reverse:
left, right = obj, expr
else:
left, right = expr, obj
if result_type is None:
op, result_type = left.comparator._adapt_expression(
op, right.comparator)
return BinaryExpression(left, right, op, type_=result_type)
def _scalar(self, expr, op, fn, **kw):
return fn(expr)
def _in_impl(self, expr, op, seq_or_selectable, negate_op, **kw):
seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
if isinstance(seq_or_selectable, ScalarSelect):
return self._boolean_compare(expr, op, seq_or_selectable,
negate=negate_op)
elif isinstance(seq_or_selectable, SelectBase):
# TODO: if we ever want to support (x, y, z) IN (select x,
# y, z from table), we would need a multi-column version of
# as_scalar() to produce a multi- column selectable that
# does not export itself as a FROM clause
return self._boolean_compare(
expr, op, seq_or_selectable.as_scalar(),
negate=negate_op, **kw)
elif isinstance(seq_or_selectable, (Selectable, TextClause)):
return self._boolean_compare(expr, op, seq_or_selectable,
negate=negate_op, **kw)
# Handle non selectable arguments as sequences
args = []
for o in seq_or_selectable:
if not _is_literal(o):
if not isinstance(o, ColumnOperators):
raise exc.InvalidRequestError('in() function accept'
's either a list of non-selectable values, '
'or a selectable: %r' % o)
elif o is None:
o = null()
else:
o = expr._bind_param(op, o)
args.append(o)
if len(args) == 0:
# Special case handling for empty IN's, behave like
# comparison against zero row selectable. We use != to
# build the contradiction as it handles NULL values
# appropriately, i.e. "not (x IN ())" should not return NULL
# values for x.
util.warn('The IN-predicate on "%s" was invoked with an '
'empty sequence. This results in a '
'contradiction, which nonetheless can be '
'expensive to evaluate. Consider alternative '
'strategies for improved performance.' % expr)
if op is operators.in_op:
return expr != expr
else:
return expr == expr
return self._boolean_compare(expr, op,
ClauseList(*args).self_group(against=op),
negate=negate_op)
def _unsupported_impl(self, expr, op, *arg, **kw):
raise NotImplementedError("Operator '%s' is not supported on "
"this expression" % op.__name__)
def _neg_impl(self, expr, op, **kw):
"""See :meth:`.ColumnOperators.__neg__`."""
return UnaryExpression(expr, operator=operators.neg)
def _match_impl(self, expr, op, other, **kw):
"""See :meth:`.ColumnOperators.match`."""
return self._boolean_compare(expr, operators.match_op,
self._check_literal(expr, operators.match_op,
other))
def _distinct_impl(self, expr, op, **kw):
"""See :meth:`.ColumnOperators.distinct`."""
return UnaryExpression(expr, operator=operators.distinct_op,
type_=expr.type)
def _between_impl(self, expr, op, cleft, cright, **kw):
"""See :meth:`.ColumnOperators.between`."""
return BinaryExpression(
expr,
ClauseList(
self._check_literal(expr, operators.and_, cleft),
self._check_literal(expr, operators.and_, cright),
operator=operators.and_,
group=False),
operators.between_op)
def _collate_impl(self, expr, op, other, **kw):
return collate(expr, other)
# a mapping of operators with the method they use, along with
# their negated operator for comparison operators
operators = {
"add": (_binary_operate,),
"mul": (_binary_operate,),
"sub": (_binary_operate,),
"div": (_binary_operate,),
"mod": (_binary_operate,),
"truediv": (_binary_operate,),
"custom_op": (_binary_operate,),
"concat_op": (_binary_operate,),
"lt": (_boolean_compare, operators.ge),
"le": (_boolean_compare, operators.gt),
"ne": (_boolean_compare, operators.eq),
"gt": (_boolean_compare, operators.le),
"ge": (_boolean_compare, operators.lt),
"eq": (_boolean_compare, operators.ne),
"like_op": (_boolean_compare, operators.notlike_op),
"ilike_op": (_boolean_compare, operators.notilike_op),
"notlike_op": (_boolean_compare, operators.like_op),
"notilike_op": (_boolean_compare, operators.ilike_op),
"contains_op": (_boolean_compare, operators.notcontains_op),
"startswith_op": (_boolean_compare, operators.notstartswith_op),
"endswith_op": (_boolean_compare, operators.notendswith_op),
"desc_op": (_scalar, desc),
"asc_op": (_scalar, asc),
"nullsfirst_op": (_scalar, nullsfirst),
"nullslast_op": (_scalar, nullslast),
"in_op": (_in_impl, operators.notin_op),
"notin_op": (_in_impl, operators.in_op),
"is_": (_boolean_compare, operators.is_),
"isnot": (_boolean_compare, operators.isnot),
"collate": (_collate_impl,),
"match_op": (_match_impl,),
"distinct_op": (_distinct_impl,),
"between_op": (_between_impl, ),
"neg": (_neg_impl,),
"getitem": (_unsupported_impl,),
"lshift": (_unsupported_impl,),
"rshift": (_unsupported_impl,),
}
def _check_literal(self, expr, operator, other):
if isinstance(other, (ColumnElement, TextClause)):
if isinstance(other, BindParameter) and \
isinstance(other.type, sqltypes.NullType):
# TODO: perhaps we should not mutate the incoming
# bindparam() here and instead make a copy of it.
# this might be the only place that we're mutating
# an incoming construct.
other.type = expr.type
return other
elif hasattr(other, '__clause_element__'):
other = other.__clause_element__()
elif isinstance(other, sqltypes.TypeEngine.Comparator):
other = other.expr
if isinstance(other, (SelectBase, Alias)):
return other.as_scalar()
elif not isinstance(other, (ColumnElement, TextClause)):
return expr._bind_param(operator, other)
else:
return other
class ColumnElement(ClauseElement, ColumnOperators):
"""Represent a column-oriented SQL expression suitable for usage in the
"columns" clause, WHERE clause etc. of a statement.
While the most familiar kind of :class:`.ColumnElement` is the
:class:`.Column` object, :class:`.ColumnElement` serves as the basis
for any unit that may be present in a SQL expression, including
the expressions themselves, SQL functions, bound parameters,
literal expressions, keywords such as ``NULL``, etc.
:class:`.ColumnElement` is the ultimate base class for all such elements.
A :class:`.ColumnElement` provides the ability to generate new
:class:`.ColumnElement`
objects using Python expressions. This means that Python operators
such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
and allow the instantiation of further :class:`.ColumnElement` instances
which are composed from other, more fundamental :class:`.ColumnElement`
objects. For example, two :class:`.ColumnClause` objects can be added
together with the addition operator ``+`` to produce
a :class:`.BinaryExpression`.
Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
of :class:`.ColumnElement`::
>>> from sqlalchemy.sql import column
>>> column('a') + column('b')
<sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
>>> print column('a') + column('b')
a + b
:class:`.ColumnElement` supports the ability to be a *proxy* element,
which indicates that the :class:`.ColumnElement` may be associated with
a :class:`.Selectable` which was derived from another :class:`.Selectable`.
An example of a "derived" :class:`.Selectable` is an :class:`.Alias` of a
:class:`~sqlalchemy.schema.Table`. For the ambitious, an in-depth
discussion of this concept can be found at
`Expression Transformations <http://techspot.zzzeek.org/2008/01/23/expression-transformations/>`_.
"""
__visit_name__ = 'column'
primary_key = False
foreign_keys = []
quote = None
_label = None
_key_label = None
_alt_names = ()
@util.memoized_property
def type(self):
return sqltypes.NULLTYPE
@util.memoized_property
def comparator(self):
return self.type.comparator_factory(self)
def __getattr__(self, key):
try:
return getattr(self.comparator, key)
except AttributeError:
raise AttributeError(
'Neither %r object nor %r object has an attribute %r' % (
type(self).__name__,
type(self.comparator).__name__,
key)
)
def operate(self, op, *other, **kwargs):
return op(self.comparator, *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
return op(other, self.comparator, **kwargs)
def _bind_param(self, operator, obj):
return BindParameter(None, obj,
_compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
@property
def expression(self):
"""Return a column expression.
Part of the inspection interface; returns self.
"""
return self
@property
def _select_iterable(self):
return (self, )
@util.memoized_property
def base_columns(self):
return util.column_set(c for c in self.proxy_set
if not hasattr(c, '_proxies'))
@util.memoized_property
def proxy_set(self):
s = util.column_set([self])
if hasattr(self, '_proxies'):
for c in self._proxies:
s.update(c.proxy_set)
return s
def shares_lineage(self, othercolumn):
"""Return True if the given :class:`.ColumnElement`
has a common ancestor to this :class:`.ColumnElement`."""
return bool(self.proxy_set.intersection(othercolumn.proxy_set))
def _compare_name_for_result(self, other):
"""Return True if the given column element compares to this one
when targeting within a result row."""
return hasattr(other, 'name') and hasattr(self, 'name') and \
other.name == self.name
def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw):
"""Create a new :class:`.ColumnElement` representing this
:class:`.ColumnElement` as it appears in the select list of a
descending selectable.
"""
if name is None:
name = self.anon_label
try:
key = str(self)
except exc.UnsupportedCompilationError:
key = self.anon_label
else:
key = name
co = ColumnClause(_as_truncated(name) if name_is_truncatable else name,
selectable,
type_=getattr(self,
'type', None))
co._proxies = [self]
if selectable._is_clone_of is not None:
co._is_clone_of = \
selectable._is_clone_of.columns.get(key)
selectable._columns[key] = co
return co
def compare(self, other, use_proxies=False, equivalents=None, **kw):
"""Compare this ColumnElement to another.
Special arguments understood:
:param use_proxies: when True, consider two columns that
share a common base column as equivalent (i.e. shares_lineage())
:param equivalents: a dictionary of columns as keys mapped to sets
of columns. If the given "other" column is present in this
dictionary, if any of the columns in the corresponding set() pass the
comparison test, the result is True. This is used to expand the
comparison to other columns that may be known to be equivalent to
this one via foreign key or other criterion.
"""
to_compare = (other, )
if equivalents and other in equivalents:
to_compare = equivalents[other].union(to_compare)
for oth in to_compare:
if use_proxies and self.shares_lineage(oth):
return True
elif hash(oth) == hash(self):
return True
else:
return False
def label(self, name):
"""Produce a column label, i.e. ``<columnname> AS <name>``.
This is a shortcut to the :func:`~.expression.label` function.
if 'name' is None, an anonymous label name will be generated.
"""
return Label(name, self, self.type)
@util.memoized_property
def anon_label(self):
"""provides a constant 'anonymous label' for this ColumnElement.
This is a label() expression which will be named at compile time.
The same label() is returned each time anon_label is called so
that expressions can reference anon_label multiple times, producing
the same label name at compile time.
the compiler uses this function automatically at compile time
for expressions that are known to be 'unnamed' like binary
expressions and function calls.
"""
return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self,
'name', 'anon')))
class ColumnCollection(util.OrderedProperties):
"""An ordered dictionary that stores a list of ColumnElement
instances.
Overrides the ``__eq__()`` method to produce SQL clauses between
sets of correlated columns.
"""
def __init__(self, *cols):
super(ColumnCollection, self).__init__()
self._data.update((c.key, c) for c in cols)
self.__dict__['_all_cols'] = util.column_set(self)
def __str__(self):
return repr([str(c) for c in self])
def replace(self, column):
"""add the given column to this collection, removing unaliased
versions of this column as well as existing columns with the
same key.
e.g.::
t = Table('sometable', metadata, Column('col1', Integer))
t.columns.replace(Column('col1', Integer, key='columnone'))
will remove the original 'col1' from the collection, and add
the new column under the name 'columnname'.
Used by schema.Column to override columns during table reflection.
"""
if column.name in self and column.key != column.name:
other = self[column.name]
if other.name == other.key:
del self._data[other.name]
self._all_cols.remove(other)
if column.key in self._data:
self._all_cols.remove(self._data[column.key])
self._all_cols.add(column)
self._data[column.key] = column
def add(self, column):
"""Add a column to this collection.
The key attribute of the column will be used as the hash key
for this dictionary.
"""
self[column.key] = column
def __delitem__(self, key):
raise NotImplementedError()
def __setattr__(self, key, object):
raise NotImplementedError()
def __setitem__(self, key, value):
if key in self:
# this warning is primarily to catch select() statements
# which have conflicting column names in their exported
# columns collection
existing = self[key]
if not existing.shares_lineage(value):
util.warn('Column %r on table %r being replaced by '
'%r, which has the same key. Consider '
'use_labels for select() statements.' % (key,
getattr(existing, 'table', None), value))
self._all_cols.remove(existing)
# pop out memoized proxy_set as this
# operation may very well be occurring
# in a _make_proxy operation
ColumnElement.proxy_set._reset(value)
self._all_cols.add(value)
self._data[key] = value
def clear(self):
self._data.clear()
self._all_cols.clear()
def remove(self, column):
del self._data[column.key]
self._all_cols.remove(column)
def update(self, value):
self._data.update(value)
self._all_cols.clear()
self._all_cols.update(self._data.values())
def extend(self, iter):
self.update((c.key, c) for c in iter)
__hash__ = None
def __eq__(self, other):
l = []
for c in other:
for local in self:
if c.shares_lineage(local):
l.append(c == local)
return and_(*l)
def __contains__(self, other):
if not isinstance(other, basestring):
raise exc.ArgumentError("__contains__ requires a string argument")
return util.OrderedProperties.__contains__(self, other)
def __setstate__(self, state):
self.__dict__['_data'] = state['_data']
self.__dict__['_all_cols'] = util.column_set(self._data.values())
def contains_column(self, col):
# this has to be done via set() membership
return col in self._all_cols
def as_immutable(self):
return ImmutableColumnCollection(self._data, self._all_cols)
class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
def __init__(self, data, colset):
util.ImmutableProperties.__init__(self, data)
self.__dict__['_all_cols'] = colset
extend = remove = util.ImmutableProperties._immutable
class ColumnSet(util.ordered_column_set):
def contains_column(self, col):
return col in self
def extend(self, cols):
for col in cols:
self.add(col)
def __add__(self, other):
return list(self) + list(other)
def __eq__(self, other):
l = []
for c in other:
for local in self:
if c.shares_lineage(local):
l.append(c == local)
return and_(*l)
def __hash__(self):
return hash(tuple(x for x in self))
class Selectable(ClauseElement):
"""mark a class as being selectable"""
__visit_name__ = 'selectable'
is_selectable = True
@property
def selectable(self):
return self
class FromClause(Selectable):
"""Represent an element that can be used within the ``FROM``
clause of a ``SELECT`` statement.
The most common forms of :class:`.FromClause` are the
:class:`.Table` and the :func:`.select` constructs. Key
features common to all :class:`.FromClause` objects include:
* a :attr:`.c` collection, which provides per-name access to a collection
of :class:`.ColumnElement` objects.
* a :attr:`.primary_key` attribute, which is a collection of all those
:class:`.ColumnElement` objects that indicate the ``primary_key`` flag.
* Methods to generate various derivations of a "from" clause, including
:meth:`.FromClause.alias`, :meth:`.FromClause.join`,
:meth:`.FromClause.select`.
"""
__visit_name__ = 'fromclause'
named_with_column = False
_hide_froms = []
quote = None
schema = None
_memoized_property = util.group_expirable_memoized_property(["_columns"])
def count(self, whereclause=None, **params):
"""return a SELECT COUNT generated against this
:class:`.FromClause`."""
if self.primary_key:
col = list(self.primary_key)[0]
else:
col = list(self.columns)[0]
return select(
[func.count(col).label('tbl_row_count')],
whereclause,
from_obj=[self],
**params)
def select(self, whereclause=None, **params):
"""return a SELECT of this :class:`.FromClause`.
.. seealso::
:func:`~.sql.expression.select` - general purpose
method which allows for arbitrary column lists.
"""
return select([self], whereclause, **params)
def join(self, right, onclause=None, isouter=False):
"""return a join of this :class:`.FromClause` against another
:class:`.FromClause`."""
return Join(self, right, onclause, isouter)
def outerjoin(self, right, onclause=None):
"""return an outer join of this :class:`.FromClause` against another
:class:`.FromClause`."""
return Join(self, right, onclause, True)
def alias(self, name=None):
"""return an alias of this :class:`.FromClause`.
This is shorthand for calling::
from sqlalchemy import alias
a = alias(self, name=name)
See :func:`~.expression.alias` for details.
"""
return Alias(self, name)
def is_derived_from(self, fromclause):
"""Return True if this FromClause is 'derived' from the given
FromClause.
An example would be an Alias of a Table is derived from that Table.
"""
# this is essentially an "identity" check in the base class.
# Other constructs override this to traverse through
# contained elements.
return fromclause in self._cloned_set
def _is_lexical_equivalent(self, other):
"""Return True if this FromClause and the other represent
the same lexical identity.
This tests if either one is a copy of the other, or
if they are the same via annotation identity.
"""
return self._cloned_set.intersection(other._cloned_set)
def replace_selectable(self, old, alias):
"""replace all occurrences of FromClause 'old' with the given Alias
object, returning a copy of this :class:`.FromClause`.
"""
return sqlutil.ClauseAdapter(alias).traverse(self)
def correspond_on_equivalents(self, column, equivalents):
"""Return corresponding_column for the given column, or if None
search for a match in the given dictionary.
"""
col = self.corresponding_column(column, require_embedded=True)
if col is None and col in equivalents:
for equiv in equivalents[col]:
nc = self.corresponding_column(equiv, require_embedded=True)
if nc:
return nc
return col
def corresponding_column(self, column, require_embedded=False):
"""Given a :class:`.ColumnElement`, return the exported
:class:`.ColumnElement` object from this :class:`.Selectable`
which corresponds to that original
:class:`~sqlalchemy.schema.Column` via a common ancestor
column.
:param column: the target :class:`.ColumnElement` to be matched
:param require_embedded: only return corresponding columns for
the given :class:`.ColumnElement`, if the given
:class:`.ColumnElement` is actually present within a sub-element
of this :class:`.FromClause`. Normally the column will match if
it merely shares a common ancestor with one of the exported
columns of this :class:`.FromClause`.
"""
def embedded(expanded_proxy_set, target_set):
for t in target_set.difference(expanded_proxy_set):
if not set(_expand_cloned([t])
).intersection(expanded_proxy_set):
return False
return True
# don't dig around if the column is locally present
if self.c.contains_column(column):
return column
col, intersect = None, None
target_set = column.proxy_set
cols = self.c
for c in cols:
expanded_proxy_set = set(_expand_cloned(c.proxy_set))
i = target_set.intersection(expanded_proxy_set)
if i and (not require_embedded
or embedded(expanded_proxy_set, target_set)):
if col is None:
# no corresponding column yet, pick this one.
col, intersect = c, i
elif len(i) > len(intersect):
# 'c' has a larger field of correspondence than
# 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
# matches a1.c.x->table.c.x better than
# selectable.c.x->table.c.x does.
col, intersect = c, i
elif i == intersect:
# they have the same field of correspondence. see
# which proxy_set has fewer columns in it, which
# indicates a closer relationship with the root
# column. Also take into account the "weight"
# attribute which CompoundSelect() uses to give
# higher precedence to columns based on vertical
# position in the compound statement, and discard
# columns that have no reference to the target
# column (also occurs with CompoundSelect)
col_distance = util.reduce(operator.add,
[sc._annotations.get('weight', 1) for sc in
col.proxy_set if sc.shares_lineage(column)])
c_distance = util.reduce(operator.add,
[sc._annotations.get('weight', 1) for sc in
c.proxy_set if sc.shares_lineage(column)])
if c_distance < col_distance:
col, intersect = c, i
return col
@property
def description(self):
"""a brief description of this FromClause.
Used primarily for error message formatting.
"""
return getattr(self, 'name', self.__class__.__name__ + " object")
def _reset_exported(self):
"""delete memoized collections when a FromClause is cloned."""
self._memoized_property.expire_instance(self)
@_memoized_property
def columns(self):
"""A named-based collection of :class:`.ColumnElement` objects
maintained by this :class:`.FromClause`.
The :attr:`.columns`, or :attr:`.c` collection, is the gateway
to the construction of SQL expressions using table-bound or
other selectable-bound columns::
select([mytable]).where(mytable.c.somecolumn == 5)
"""
if '_columns' not in self.__dict__:
self._init_collections()
self._populate_column_collection()
return self._columns.as_immutable()
@_memoized_property
def primary_key(self):
"""Return the collection of Column objects which comprise the
primary key of this FromClause."""
self._init_collections()
self._populate_column_collection()
return self.primary_key
@_memoized_property
def foreign_keys(self):
"""Return the collection of ForeignKey objects which this
FromClause references."""
self._init_collections()
self._populate_column_collection()
return self.foreign_keys
c = property(attrgetter('columns'),
doc="An alias for the :attr:`.columns` attribute.")
_select_iterable = property(attrgetter('columns'))
def _init_collections(self):
assert '_columns' not in self.__dict__
assert 'primary_key' not in self.__dict__
assert 'foreign_keys' not in self.__dict__
self._columns = ColumnCollection()
self.primary_key = ColumnSet()
self.foreign_keys = set()
@property
def _cols_populated(self):
return '_columns' in self.__dict__
def _populate_column_collection(self):
"""Called on subclasses to establish the .c collection.
Each implementation has a different way of establishing
this collection.
"""
def _refresh_for_new_column(self, column):
"""Given a column added to the .c collection of an underlying
selectable, produce the local version of that column, assuming this
selectable ultimately should proxy this column.
this is used to "ping" a derived selectable to add a new column
to its .c. collection when a Column has been added to one of the
Table objects it ultimtely derives from.
If the given selectable hasn't populated it's .c. collection yet,
it should at least pass on the message to the contained selectables,
but it will return None.
This method is currently used by Declarative to allow Table
columns to be added to a partially constructed inheritance
mapping that may have already produced joins. The method
isn't public right now, as the full span of implications
and/or caveats aren't yet clear.
It's also possible that this functionality could be invoked by
default via an event, which would require that
selectables maintain a weak referencing collection of all
derivations.
"""
if not self._cols_populated:
return None
elif column.key in self.columns and self.columns[column.key] is column:
return column
else:
return None
class BindParameter(ColumnElement):
"""Represent a bind parameter.
Public constructor is the :func:`bindparam()` function.
"""
__visit_name__ = 'bindparam'
quote = None
_is_crud = False
def __init__(self, key, value, type_=None, unique=False,
callable_=None,
isoutparam=False, required=False,
quote=None,
_compared_to_operator=None,
_compared_to_type=None):
"""Construct a BindParameter.
:param key:
the key for this bind param. Will be used in the generated
SQL statement for dialects that use named parameters. This
value may be modified when part of a compilation operation,
if other :class:`BindParameter` objects exist with the same
key, or if its length is too long and truncation is
required.
:param value:
Initial value for this bind param. This value may be
overridden by the dictionary of parameters sent to statement
compilation/execution.
:param callable\_:
A callable function that takes the place of "value". The function
will be called at statement execution time to determine the
ultimate value. Used for scenarios where the actual bind
value cannot be determined at the point at which the clause
construct is created, but embedded bind values are still desirable.
:param type\_:
A ``TypeEngine`` object that will be used to pre-process the
value corresponding to this :class:`BindParameter` at
execution time.
:param unique:
if True, the key name of this BindParamClause will be
modified if another :class:`BindParameter` of the same name
already has been located within the containing
:class:`.ClauseElement`.
:param quote:
True if this parameter name requires quoting and is not
currently known as a SQLAlchemy reserved word; this currently
only applies to the Oracle backend.
:param required:
a value is required at execution time.
:param isoutparam:
if True, the parameter should be treated like a stored procedure
"OUT" parameter.
"""
if unique:
self.key = _anonymous_label('%%(%d %s)s' % (id(self), key
or 'param'))
else:
self.key = key or _anonymous_label('%%(%d param)s'
% id(self))
# identifying key that won't change across
# clones, used to identify the bind's logical
# identity
self._identifying_key = self.key
# key that was passed in the first place, used to
# generate new keys
self._orig_key = key or 'param'
self.unique = unique
self.value = value
self.callable = callable_
self.isoutparam = isoutparam
self.required = required
self.quote = quote
if type_ is None:
if _compared_to_type is not None:
self.type = \
_compared_to_type.coerce_compared_value(
_compared_to_operator, value)
else:
self.type = sqltypes._type_map.get(type(value),
sqltypes.NULLTYPE)
elif isinstance(type_, type):
self.type = type_()
else:
self.type = type_
@property
def effective_value(self):
"""Return the value of this bound parameter,
taking into account if the ``callable`` parameter
was set.
The ``callable`` value will be evaluated
and returned if present, else ``value``.
"""
if self.callable:
return self.callable()
else:
return self.value
def _clone(self):
c = ClauseElement._clone(self)
if self.unique:
c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key
or 'param'))
return c
def _convert_to_unique(self):
if not self.unique:
self.unique = True
self.key = _anonymous_label('%%(%d %s)s' % (id(self),
self._orig_key or 'param'))
def compare(self, other, **kw):
"""Compare this :class:`BindParameter` to the given
clause."""
return isinstance(other, BindParameter) \
and self.type._compare_type_affinity(other.type) \
and self.value == other.value
def __getstate__(self):
"""execute a deferred value for serialization purposes."""
d = self.__dict__.copy()
v = self.value
if self.callable:
v = self.callable()
d['callable'] = None
d['value'] = v
return d
def __repr__(self):
return 'BindParameter(%r, %r, type_=%r)' % (self.key,
self.value, self.type)
class TypeClause(ClauseElement):
"""Handle a type keyword in a SQL statement.
Used by the ``Case`` statement.
"""
__visit_name__ = 'typeclause'
def __init__(self, type):
self.type = type
class Generative(object):
"""Allow a ClauseElement to generate itself via the
@_generative decorator.
"""
def _generate(self):
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
return s
class Executable(Generative):
"""Mark a ClauseElement as supporting execution.
:class:`.Executable` is a superclass for all "statement" types
of objects, including :func:`select`, :func:`delete`, :func:`update`,
:func:`insert`, :func:`text`.
"""
supports_execution = True
_execution_options = util.immutabledict()
_bind = None
@_generative
def execution_options(self, **kw):
""" Set non-SQL options for the statement which take effect during
execution.
Execution options can be set on a per-statement or
per :class:`.Connection` basis. Additionally, the
:class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide
access to execution options which they in turn configure upon
connections.
The :meth:`execution_options` method is generative. A new
instance of this statement is returned that contains the options::
statement = select([table.c.x, table.c.y])
statement = statement.execution_options(autocommit=True)
Note that only a subset of possible execution options can be applied
to a statement - these include "autocommit" and "stream_results",
but not "isolation_level" or "compiled_cache".
See :meth:`.Connection.execution_options` for a full list of
possible options.
.. seealso::
:meth:`.Connection.execution_options()`
:meth:`.Query.execution_options()`
"""
if 'isolation_level' in kw:
raise exc.ArgumentError(
"'isolation_level' execution option may only be specified "
"on Connection.execution_options(), or "
"per-engine using the isolation_level "
"argument to create_engine()."
)
if 'compiled_cache' in kw:
raise exc.ArgumentError(
"'compiled_cache' execution option may only be specified "
"on Connection.execution_options(), not per statement."
)
self._execution_options = self._execution_options.union(kw)
def execute(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`."""
e = self.bind
if e is None:
label = getattr(self, 'description', self.__class__.__name__)
msg = ('This %s is not directly bound to a Connection or Engine.'
'Use the .execute() method of a Connection or Engine '
'to execute this construct.' % label)
raise exc.UnboundExecutionError(msg)
return e._execute_clauseelement(self, multiparams, params)
def scalar(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`, returning the
result's scalar representation.
"""
return self.execute(*multiparams, **params).scalar()
@property
def bind(self):
"""Returns the :class:`.Engine` or :class:`.Connection` to
which this :class:`.Executable` is bound, or None if none found.
This is a traversal which checks locally, then
checks among the "from" clauses of associated objects
until a bound engine or connection is found.
"""
if self._bind is not None:
return self._bind
for f in _from_objects(self):
if f is self:
continue
engine = f.bind
if engine is not None:
return engine
else:
return None
# legacy, some outside users may be calling this
_Executable = Executable
class TextClause(Executable, ClauseElement):
"""Represent a literal SQL text fragment.
Public constructor is the :func:`text()` function.
"""
__visit_name__ = 'textclause'
_bind_params_regex = re.compile(r'(?<![:\w\x5c]):(\w+)(?!:)', re.UNICODE)
_execution_options = \
Executable._execution_options.union(
{'autocommit': PARSE_AUTOCOMMIT})
@property
def _select_iterable(self):
return (self,)
@property
def selectable(self):
return self
_hide_froms = []
def __init__(
self,
text='',
bind=None,
bindparams=None,
typemap=None,
autocommit=None,
):
self._bind = bind
self.bindparams = {}
self.typemap = typemap
if autocommit is not None:
util.warn_deprecated('autocommit on text() is deprecated. '
'Use .execution_options(autocommit=Tru'
'e)')
self._execution_options = \
self._execution_options.union(
{'autocommit': autocommit})
if typemap is not None:
for key in typemap.keys():
typemap[key] = sqltypes.to_instance(typemap[key])
def repl(m):
self.bindparams[m.group(1)] = bindparam(m.group(1))
return ':%s' % m.group(1)
# scan the string and search for bind parameter names, add them
# to the list of bindparams
self.text = self._bind_params_regex.sub(repl, text)
if bindparams is not None:
for b in bindparams:
self.bindparams[b.key] = b
@property
def type(self):
if self.typemap is not None and len(self.typemap) == 1:
return list(self.typemap)[0]
else:
return sqltypes.NULLTYPE
@property
def comparator(self):
return self.type.comparator_factory(self)
def self_group(self, against=None):
if against is operators.in_op:
return Grouping(self)
else:
return self
def _copy_internals(self, clone=_clone, **kw):
self.bindparams = dict((b.key, clone(b, **kw))
for b in self.bindparams.values())
def get_children(self, **kwargs):
return self.bindparams.values()
class Null(ColumnElement):
"""Represent the NULL keyword in a SQL statement.
Public constructor is the :func:`null()` function.
"""
__visit_name__ = 'null'
def __init__(self):
self.type = sqltypes.NULLTYPE
def compare(self, other):
return isinstance(other, Null)
class False_(ColumnElement):
"""Represent the ``false`` keyword in a SQL statement.
Public constructor is the :func:`false()` function.
"""
__visit_name__ = 'false'
def __init__(self):
self.type = sqltypes.BOOLEANTYPE
def compare(self, other):
return isinstance(other, False_)
class True_(ColumnElement):
"""Represent the ``true`` keyword in a SQL statement.
Public constructor is the :func:`true()` function.
"""
__visit_name__ = 'true'
def __init__(self):
self.type = sqltypes.BOOLEANTYPE
def compare(self, other):
return isinstance(other, True_)
class ClauseList(ClauseElement):
"""Describe a list of clauses, separated by an operator.
By default, is comma-separated, such as a column listing.
"""
__visit_name__ = 'clauselist'
def __init__(self, *clauses, **kwargs):
self.operator = kwargs.pop('operator', operators.comma_op)
self.group = kwargs.pop('group', True)
self.group_contents = kwargs.pop('group_contents', True)
if self.group_contents:
self.clauses = [
_literal_as_text(clause).self_group(against=self.operator)
for clause in clauses if clause is not None]
else:
self.clauses = [
_literal_as_text(clause)
for clause in clauses if clause is not None]
def __iter__(self):
return iter(self.clauses)
def __len__(self):
return len(self.clauses)
@property
def _select_iterable(self):
return iter(self)
def append(self, clause):
# TODO: not sure if i like the 'group_contents' flag. need to
# define the difference between a ClauseList of ClauseLists,
# and a "flattened" ClauseList of ClauseLists. flatten()
# method ?
if self.group_contents:
self.clauses.append(_literal_as_text(clause).\
self_group(against=self.operator))
else:
self.clauses.append(_literal_as_text(clause))
def _copy_internals(self, clone=_clone, **kw):
self.clauses = [clone(clause, **kw) for clause in self.clauses]
def get_children(self, **kwargs):
return self.clauses
@property
def _from_objects(self):
return list(itertools.chain(*[c._from_objects for c in self.clauses]))
def self_group(self, against=None):
if self.group and operators.is_precedent(self.operator, against):
return Grouping(self)
else:
return self
def compare(self, other, **kw):
"""Compare this :class:`.ClauseList` to the given :class:`.ClauseList`,
including a comparison of all the clause items.
"""
if not isinstance(other, ClauseList) and len(self.clauses) == 1:
return self.clauses[0].compare(other, **kw)
elif isinstance(other, ClauseList) and \
len(self.clauses) == len(other.clauses):
for i in range(0, len(self.clauses)):
if not self.clauses[i].compare(other.clauses[i], **kw):
return False
else:
return self.operator == other.operator
else:
return False
class BooleanClauseList(ClauseList, ColumnElement):
__visit_name__ = 'clauselist'
def __init__(self, *clauses, **kwargs):
super(BooleanClauseList, self).__init__(*clauses, **kwargs)
self.type = sqltypes.to_instance(kwargs.get('type_',
sqltypes.Boolean))
@property
def _select_iterable(self):
return (self, )
def self_group(self, against=None):
if not self.clauses:
return self
else:
return super(BooleanClauseList, self).self_group(against=against)
class Tuple(ClauseList, ColumnElement):
def __init__(self, *clauses, **kw):
clauses = [_literal_as_binds(c) for c in clauses]
self.type = kw.pop('type_', None)
if self.type is None:
self.type = _type_from_args(clauses)
super(Tuple, self).__init__(*clauses, **kw)
@property
def _select_iterable(self):
return (self, )
def _bind_param(self, operator, obj):
return Tuple(*[
BindParameter(None, o, _compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
for o in obj
]).self_group()
class Case(ColumnElement):
__visit_name__ = 'case'
def __init__(self, whens, value=None, else_=None):
try:
whens = util.dictlike_iteritems(whens)
except TypeError:
pass
if value is not None:
whenlist = [
(_literal_as_binds(c).self_group(),
_literal_as_binds(r)) for (c, r) in whens
]
else:
whenlist = [
(_no_literals(c).self_group(),
_literal_as_binds(r)) for (c, r) in whens
]
if whenlist:
type_ = list(whenlist[-1])[-1].type
else:
type_ = None
if value is None:
self.value = None
else:
self.value = _literal_as_binds(value)
self.type = type_
self.whens = whenlist
if else_ is not None:
self.else_ = _literal_as_binds(else_)
else:
self.else_ = None
def _copy_internals(self, clone=_clone, **kw):
if self.value is not None:
self.value = clone(self.value, **kw)
self.whens = [(clone(x, **kw), clone(y, **kw))
for x, y in self.whens]
if self.else_ is not None:
self.else_ = clone(self.else_, **kw)
def get_children(self, **kwargs):
if self.value is not None:
yield self.value
for x, y in self.whens:
yield x
yield y
if self.else_ is not None:
yield self.else_
@property
def _from_objects(self):
return list(itertools.chain(*[x._from_objects for x in
self.get_children()]))
class FunctionElement(Executable, ColumnElement, FromClause):
"""Base for SQL function-oriented constructs.
.. seealso::
:class:`.Function` - named SQL function.
:data:`.func` - namespace which produces registered or ad-hoc
:class:`.Function` instances.
:class:`.GenericFunction` - allows creation of registered function
types.
"""
packagenames = ()
def __init__(self, *clauses, **kwargs):
"""Construct a :class:`.FunctionElement`.
"""
args = [_literal_as_binds(c, self.name) for c in clauses]
self.clause_expr = ClauseList(
operator=operators.comma_op,
group_contents=True, *args).\
self_group()
@property
def columns(self):
"""Fulfill the 'columns' contract of :class:`.ColumnElement`.
Returns a single-element list consisting of this object.
"""
return [self]
@util.memoized_property
def clauses(self):
"""Return the underlying :class:`.ClauseList` which contains
the arguments for this :class:`.FunctionElement`.
"""
return self.clause_expr.element
def over(self, partition_by=None, order_by=None):
"""Produce an OVER clause against this function.
Used against aggregate or so-called "window" functions,
for database backends that support window functions.
The expression::
func.row_number().over(order_by='x')
is shorthand for::
from sqlalchemy import over
over(func.row_number(), order_by='x')
See :func:`~.expression.over` for a full description.
.. versionadded:: 0.7
"""
return over(self, partition_by=partition_by, order_by=order_by)
@property
def _from_objects(self):
return self.clauses._from_objects
def get_children(self, **kwargs):
return self.clause_expr,
def _copy_internals(self, clone=_clone, **kw):
self.clause_expr = clone(self.clause_expr, **kw)
self._reset_exported()
FunctionElement.clauses._reset(self)
def select(self):
"""Produce a :func:`~.expression.select` construct
against this :class:`.FunctionElement`.
This is shorthand for::
s = select([function_element])
"""
s = select([self])
if self._execution_options:
s = s.execution_options(**self._execution_options)
return s
def scalar(self):
"""Execute this :class:`.FunctionElement` against an embedded
'bind' and return a scalar value.
This first calls :meth:`~.FunctionElement.select` to
produce a SELECT construct.
Note that :class:`.FunctionElement` can be passed to
the :meth:`.Connectable.scalar` method of :class:`.Connection`
or :class:`.Engine`.
"""
return self.select().execute().scalar()
def execute(self):
"""Execute this :class:`.FunctionElement` against an embedded
'bind'.
This first calls :meth:`~.FunctionElement.select` to
produce a SELECT construct.
Note that :class:`.FunctionElement` can be passed to
the :meth:`.Connectable.execute` method of :class:`.Connection`
or :class:`.Engine`.
"""
return self.select().execute()
def _bind_param(self, operator, obj):
return BindParameter(None, obj, _compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
class Function(FunctionElement):
"""Describe a named SQL function.
See the superclass :class:`.FunctionElement` for a description
of public methods.
.. seealso::
:data:`.func` - namespace which produces registered or ad-hoc
:class:`.Function` instances.
:class:`.GenericFunction` - allows creation of registered function
types.
"""
__visit_name__ = 'function'
def __init__(self, name, *clauses, **kw):
"""Construct a :class:`.Function`.
The :data:`.func` construct is normally used to construct
new :class:`.Function` instances.
"""
self.packagenames = kw.pop('packagenames', None) or []
self.name = name
self._bind = kw.get('bind', None)
self.type = sqltypes.to_instance(kw.get('type_', None))
FunctionElement.__init__(self, *clauses, **kw)
def _bind_param(self, operator, obj):
return BindParameter(self.name, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
unique=True)
class Cast(ColumnElement):
__visit_name__ = 'cast'
def __init__(self, clause, totype, **kwargs):
self.type = sqltypes.to_instance(totype)
self.clause = _literal_as_binds(clause, None)
self.typeclause = TypeClause(self.type)
def _copy_internals(self, clone=_clone, **kw):
self.clause = clone(self.clause, **kw)
self.typeclause = clone(self.typeclause, **kw)
def get_children(self, **kwargs):
return self.clause, self.typeclause
@property
def _from_objects(self):
return self.clause._from_objects
class Extract(ColumnElement):
__visit_name__ = 'extract'
def __init__(self, field, expr, **kwargs):
self.type = sqltypes.Integer()
self.field = field
self.expr = _literal_as_binds(expr, None)
def _copy_internals(self, clone=_clone, **kw):
self.expr = clone(self.expr, **kw)
def get_children(self, **kwargs):
return self.expr,
@property
def _from_objects(self):
return self.expr._from_objects
class UnaryExpression(ColumnElement):
"""Define a 'unary' expression.
A unary expression has a single column expression
and an operator. The operator can be placed on the left
(where it is called the 'operator') or right (where it is called the
'modifier') of the column expression.
"""
__visit_name__ = 'unary'
def __init__(self, element, operator=None, modifier=None,
type_=None, negate=None):
self.operator = operator
self.modifier = modifier
self.element = _literal_as_text(element).\
self_group(against=self.operator or self.modifier)
self.type = sqltypes.to_instance(type_)
self.negate = negate
@property
def _from_objects(self):
return self.element._from_objects
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
def get_children(self, **kwargs):
return self.element,
def compare(self, other, **kw):
"""Compare this :class:`UnaryExpression` against the given
:class:`.ClauseElement`."""
return (
isinstance(other, UnaryExpression) and
self.operator == other.operator and
self.modifier == other.modifier and
self.element.compare(other.element, **kw)
)
def _negate(self):
if self.negate is not None:
return UnaryExpression(
self.element,
operator=self.negate,
negate=self.operator,
modifier=self.modifier,
type_=self.type)
else:
return super(UnaryExpression, self)._negate()
def self_group(self, against=None):
if self.operator and operators.is_precedent(self.operator,
against):
return Grouping(self)
else:
return self
class BinaryExpression(ColumnElement):
"""Represent an expression that is ``LEFT <operator> RIGHT``.
A :class:`.BinaryExpression` is generated automatically
whenever two column expressions are used in a Python binary expresion::
>>> from sqlalchemy.sql import column
>>> column('a') + column('b')
<sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
>>> print column('a') + column('b')
a + b
"""
__visit_name__ = 'binary'
def __init__(self, left, right, operator, type_=None,
negate=None, modifiers=None):
# allow compatibility with libraries that
# refer to BinaryExpression directly and pass strings
if isinstance(operator, basestring):
operator = operators.custom_op(operator)
self._orig = (left, right)
self.left = _literal_as_text(left).self_group(against=operator)
self.right = _literal_as_text(right).self_group(against=operator)
self.operator = operator
self.type = sqltypes.to_instance(type_)
self.negate = negate
if modifiers is None:
self.modifiers = {}
else:
self.modifiers = modifiers
def __nonzero__(self):
if self.operator in (operator.eq, operator.ne):
return self.operator(hash(self._orig[0]), hash(self._orig[1]))
else:
raise TypeError("Boolean value of this clause is not defined")
@property
def is_comparison(self):
return operators.is_comparison(self.operator)
@property
def _from_objects(self):
return self.left._from_objects + self.right._from_objects
def _copy_internals(self, clone=_clone, **kw):
self.left = clone(self.left, **kw)
self.right = clone(self.right, **kw)
def get_children(self, **kwargs):
return self.left, self.right
def compare(self, other, **kw):
"""Compare this :class:`BinaryExpression` against the
given :class:`BinaryExpression`."""
return (
isinstance(other, BinaryExpression) and
self.operator == other.operator and
(
self.left.compare(other.left, **kw) and
self.right.compare(other.right, **kw) or
(
operators.is_commutative(self.operator) and
self.left.compare(other.right, **kw) and
self.right.compare(other.left, **kw)
)
)
)
def self_group(self, against=None):
if operators.is_precedent(self.operator, against):
return Grouping(self)
else:
return self
def _negate(self):
if self.negate is not None:
return BinaryExpression(
self.left,
self.right,
self.negate,
negate=self.operator,
type_=sqltypes.BOOLEANTYPE,
modifiers=self.modifiers)
else:
return super(BinaryExpression, self)._negate()
class Exists(UnaryExpression):
__visit_name__ = UnaryExpression.__visit_name__
_from_objects = []
def __init__(self, *args, **kwargs):
if args and isinstance(args[0], (SelectBase, ScalarSelect)):
s = args[0]
else:
if not args:
args = ([literal_column('*')],)
s = select(*args, **kwargs).as_scalar().self_group()
UnaryExpression.__init__(self, s, operator=operators.exists,
type_=sqltypes.Boolean)
def select(self, whereclause=None, **params):
return select([self], whereclause, **params)
def correlate(self, *fromclause):
e = self._clone()
e.element = self.element.correlate(*fromclause).self_group()
return e
def correlate_except(self, *fromclause):
e = self._clone()
e.element = self.element.correlate_except(*fromclause).self_group()
return e
def select_from(self, clause):
"""return a new :class:`.Exists` construct, applying the given
expression to the :meth:`.Select.select_from` method of the select
statement contained.
"""
e = self._clone()
e.element = self.element.select_from(clause).self_group()
return e
def where(self, clause):
"""return a new exists() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
e = self._clone()
e.element = self.element.where(clause).self_group()
return e
class Join(FromClause):
"""represent a ``JOIN`` construct between two :class:`.FromClause`
elements.
The public constructor function for :class:`.Join` is the module-level
:func:`join()` function, as well as the :func:`join()` method available
off all :class:`.FromClause` subclasses.
"""
__visit_name__ = 'join'
def __init__(self, left, right, onclause=None, isouter=False):
"""Construct a new :class:`.Join`.
The usual entrypoint here is the :func:`~.expression.join`
function or the :meth:`.FromClause.join` method of any
:class:`.FromClause` object.
"""
self.left = _interpret_as_from(left)
self.right = _interpret_as_from(right).self_group()
if onclause is None:
self.onclause = self._match_primaries(self.left, self.right)
else:
self.onclause = onclause
self.isouter = isouter
@property
def description(self):
return "Join object on %s(%d) and %s(%d)" % (
self.left.description,
id(self.left),
self.right.description,
id(self.right))
def is_derived_from(self, fromclause):
return fromclause is self or \
self.left.is_derived_from(fromclause) or \
self.right.is_derived_from(fromclause)
def self_group(self, against=None):
return FromGrouping(self)
def _populate_column_collection(self):
columns = [c for c in self.left.columns] + \
[c for c in self.right.columns]
self.primary_key.extend(sqlutil.reduce_columns(
(c for c in columns if c.primary_key), self.onclause))
self._columns.update((col._label, col) for col in columns)
self.foreign_keys.update(itertools.chain(
*[col.foreign_keys for col in columns]))
def _refresh_for_new_column(self, column):
col = self.left._refresh_for_new_column(column)
if col is None:
col = self.right._refresh_for_new_column(column)
if col is not None:
if self._cols_populated:
self._columns[col._label] = col
self.foreign_keys.add(col)
if col.primary_key:
self.primary_key.add(col)
return col
return None
def _copy_internals(self, clone=_clone, **kw):
self._reset_exported()
self.left = clone(self.left, **kw)
self.right = clone(self.right, **kw)
self.onclause = clone(self.onclause, **kw)
def get_children(self, **kwargs):
return self.left, self.right, self.onclause
def _match_primaries(self, left, right):
if isinstance(left, Join):
left_right = left.right
else:
left_right = None
return sqlutil.join_condition(left, right, a_subset=left_right)
def select(self, whereclause=None, **kwargs):
"""Create a :class:`.Select` from this :class:`.Join`.
The equivalent long-hand form, given a :class:`.Join` object
``j``, is::
from sqlalchemy import select
j = select([j.left, j.right], **kw).\\
where(whereclause).\\
select_from(j)
:param whereclause: the WHERE criterion that will be sent to
the :func:`select()` function
:param \**kwargs: all other kwargs are sent to the
underlying :func:`select()` function.
"""
collist = [self.left, self.right]
return select(collist, whereclause, from_obj=[self], **kwargs)
@property
def bind(self):
return self.left.bind or self.right.bind
def alias(self, name=None):
"""return an alias of this :class:`.Join`.
Used against a :class:`.Join` object,
:meth:`~.Join.alias` calls the :meth:`~.Join.select`
method first so that a subquery against a
:func:`.select` construct is generated.
the :func:`~expression.select` construct also has the
``correlate`` flag set to ``False`` and will not
auto-correlate inside an enclosing :func:`~expression.select`
construct.
The equivalent long-hand form, given a :class:`.Join` object
``j``, is::
from sqlalchemy import select, alias
j = alias(
select([j.left, j.right]).\\
select_from(j).\\
with_labels(True).\\
correlate(False),
name=name
)
See :func:`~.expression.alias` for further details on
aliases.
"""
return self.select(use_labels=True, correlate=False).alias(name)
@property
def _hide_froms(self):
return itertools.chain(*[_from_objects(x.left, x.right)
for x in self._cloned_set])
@property
def _from_objects(self):
return [self] + \
self.onclause._from_objects + \
self.left._from_objects + \
self.right._from_objects
class Alias(FromClause):
"""Represents an table or selectable alias (AS).
Represents an alias, as typically applied to any table or
sub-select within a SQL statement using the ``AS`` keyword (or
without the keyword on certain databases such as Oracle).
This object is constructed from the :func:`~.expression.alias` module level
function as well as the :meth:`.FromClause.alias` method available on all
:class:`.FromClause` subclasses.
"""
__visit_name__ = 'alias'
named_with_column = True
def __init__(self, selectable, name=None):
baseselectable = selectable
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
self.supports_execution = baseselectable.supports_execution
if self.supports_execution:
self._execution_options = baseselectable._execution_options
self.element = selectable
if name is None:
if self.original.named_with_column:
name = getattr(self.original, 'name', None)
name = _anonymous_label('%%(%d %s)s' % (id(self), name
or 'anon'))
self.name = name
@property
def description(self):
# Py3K
#return self.name
# Py2K
return self.name.encode('ascii', 'backslashreplace')
# end Py2K
def as_scalar(self):
try:
return self.element.as_scalar()
except AttributeError:
raise AttributeError("Element %s does not support "
"'as_scalar()'" % self.element)
def is_derived_from(self, fromclause):
if fromclause in self._cloned_set:
return True
return self.element.is_derived_from(fromclause)
def _populate_column_collection(self):
for col in self.element.columns:
col._make_proxy(self)
def _refresh_for_new_column(self, column):
col = self.element._refresh_for_new_column(column)
if col is not None:
if not self._cols_populated:
return None
else:
return col._make_proxy(self)
else:
return None
def _copy_internals(self, clone=_clone, **kw):
# don't apply anything to an aliased Table
# for now. May want to drive this from
# the given **kw.
if isinstance(self.element, TableClause):
return
self._reset_exported()
self.element = clone(self.element, **kw)
baseselectable = self.element
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
def get_children(self, column_collections=True, **kw):
if column_collections:
for c in self.c:
yield c
yield self.element
@property
def _from_objects(self):
return [self]
@property
def bind(self):
return self.element.bind
class CTE(Alias):
"""Represent a Common Table Expression.
The :class:`.CTE` object is obtained using the
:meth:`.SelectBase.cte` method from any selectable.
See that method for complete examples.
.. versionadded:: 0.7.6
"""
__visit_name__ = 'cte'
def __init__(self, selectable,
name=None,
recursive=False,
_cte_alias=None,
_restates=frozenset()):
self.recursive = recursive
self._cte_alias = _cte_alias
self._restates = _restates
super(CTE, self).__init__(selectable, name=name)
def alias(self, name=None):
return CTE(
self.original,
name=name,
recursive=self.recursive,
_cte_alias=self,
)
def union(self, other):
return CTE(
self.original.union(other),
name=self.name,
recursive=self.recursive,
_restates=self._restates.union([self])
)
def union_all(self, other):
return CTE(
self.original.union_all(other),
name=self.name,
recursive=self.recursive,
_restates=self._restates.union([self])
)
class Grouping(ColumnElement):
"""Represent a grouping within a column expression"""
__visit_name__ = 'grouping'
def __init__(self, element):
self.element = element
self.type = getattr(element, 'type', sqltypes.NULLTYPE)
@property
def _label(self):
return getattr(self.element, '_label', None) or self.anon_label
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
def get_children(self, **kwargs):
return self.element,
@property
def _from_objects(self):
return self.element._from_objects
def __getattr__(self, attr):
return getattr(self.element, attr)
def __getstate__(self):
return {'element': self.element, 'type': self.type}
def __setstate__(self, state):
self.element = state['element']
self.type = state['type']
def compare(self, other, **kw):
return isinstance(other, Grouping) and \
self.element.compare(other.element)
class FromGrouping(FromClause):
"""Represent a grouping of a FROM clause"""
__visit_name__ = 'grouping'
def __init__(self, element):
self.element = element
def _init_collections(self):
pass
@property
def columns(self):
return self.element.columns
@property
def primary_key(self):
return self.element.primary_key
@property
def foreign_keys(self):
# this could be
# self.element.foreign_keys
# see SelectableTest.test_join_condition
return set()
@property
def _hide_froms(self):
return self.element._hide_froms
def get_children(self, **kwargs):
return self.element,
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
@property
def _from_objects(self):
return self.element._from_objects
def __getattr__(self, attr):
return getattr(self.element, attr)
def __getstate__(self):
return {'element': self.element}
def __setstate__(self, state):
self.element = state['element']
class Over(ColumnElement):
"""Represent an OVER clause.
This is a special operator against a so-called
"window" function, as well as any aggregate function,
which produces results relative to the result set
itself. It's supported only by certain database
backends.
"""
__visit_name__ = 'over'
order_by = None
partition_by = None
def __init__(self, func, partition_by=None, order_by=None):
self.func = func
if order_by is not None:
self.order_by = ClauseList(*util.to_list(order_by))
if partition_by is not None:
self.partition_by = ClauseList(*util.to_list(partition_by))
@util.memoized_property
def type(self):
return self.func.type
def get_children(self, **kwargs):
return [c for c in
(self.func, self.partition_by, self.order_by)
if c is not None]
def _copy_internals(self, clone=_clone, **kw):
self.func = clone(self.func, **kw)
if self.partition_by is not None:
self.partition_by = clone(self.partition_by, **kw)
if self.order_by is not None:
self.order_by = clone(self.order_by, **kw)
@property
def _from_objects(self):
return list(itertools.chain(
*[c._from_objects for c in
(self.func, self.partition_by, self.order_by)
if c is not None]
))
class Label(ColumnElement):
"""Represents a column label (AS).
Represent a label, as typically applied to any column-level
element using the ``AS`` sql keyword.
This object is constructed from the :func:`label()` module level
function as well as the :func:`label()` method available on all
:class:`.ColumnElement` subclasses.
"""
__visit_name__ = 'label'
def __init__(self, name, element, type_=None):
while isinstance(element, Label):
element = element.element
if name:
self.name = name
else:
self.name = _anonymous_label('%%(%d %s)s' % (id(self),
getattr(element, 'name', 'anon')))
self.key = self._label = self._key_label = self.name
self._element = element
self._type = type_
self.quote = element.quote
self._proxies = [element]
def __reduce__(self):
return self.__class__, (self.name, self._element, self._type)
@util.memoized_property
def type(self):
return sqltypes.to_instance(
self._type or getattr(self._element, 'type', None)
)
@util.memoized_property
def element(self):
return self._element.self_group(against=operators.as_)
def self_group(self, against=None):
sub_element = self._element.self_group(against=against)
if sub_element is not self._element:
return Label(self.name,
sub_element,
type_=self._type)
else:
return self
@property
def primary_key(self):
return self.element.primary_key
@property
def foreign_keys(self):
return self.element.foreign_keys
def get_children(self, **kwargs):
return self.element,
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
@property
def _from_objects(self):
return self.element._from_objects
def _make_proxy(self, selectable, name=None, **kw):
e = self.element._make_proxy(selectable,
name=name if name else self.name)
e._proxies.append(self)
if self._type is not None:
e.type = self._type
return e
class ColumnClause(Immutable, ColumnElement):
"""Represents a generic column expression from any textual string.
This includes columns associated with tables, aliases and select
statements, but also any arbitrary text. May or may not be bound
to an underlying :class:`.Selectable`.
:class:`.ColumnClause` is constructed by itself typically via
the :func:`~.expression.column` function. It may be placed directly
into constructs such as :func:`.select` constructs::
from sqlalchemy.sql import column, select
c1, c2 = column("c1"), column("c2")
s = select([c1, c2]).where(c1==5)
There is also a variant on :func:`~.expression.column` known
as :func:`~.expression.literal_column` - the difference is that
in the latter case, the string value is assumed to be an exact
expression, rather than a column name, so that no quoting rules
or similar are applied::
from sqlalchemy.sql import literal_column, select
s = select([literal_column("5 + 7")])
:class:`.ColumnClause` can also be used in a table-like
fashion by combining the :func:`~.expression.column` function
with the :func:`~.expression.table` function, to produce
a "lightweight" form of table metadata::
from sqlalchemy.sql import table, column
user = table("user",
column("id"),
column("name"),
column("description"),
)
The above construct can be created in an ad-hoc fashion and is
not associated with any :class:`.schema.MetaData`, unlike it's
more full fledged :class:`.schema.Table` counterpart.
:param text: the text of the element.
:param selectable: parent selectable.
:param type: :class:`.types.TypeEngine` object which can associate
this :class:`.ColumnClause` with a type.
:param is_literal: if True, the :class:`.ColumnClause` is assumed to
be an exact expression that will be delivered to the output with no
quoting rules applied regardless of case sensitive settings. the
:func:`literal_column()` function is usually used to create such a
:class:`.ColumnClause`.
"""
__visit_name__ = 'column'
onupdate = default = server_default = server_onupdate = None
_memoized_property = util.group_expirable_memoized_property()
def __init__(self, text, selectable=None, type_=None, is_literal=False):
self.key = self.name = text
self.table = selectable
self.type = sqltypes.to_instance(type_)
self.is_literal = is_literal
def _compare_name_for_result(self, other):
if self.is_literal or \
self.table is None or \
not hasattr(other, 'proxy_set') or (
isinstance(other, ColumnClause) and other.is_literal
):
return super(ColumnClause, self).\
_compare_name_for_result(other)
else:
return other.proxy_set.intersection(self.proxy_set)
def _get_table(self):
return self.__dict__['table']
def _set_table(self, table):
self._memoized_property.expire_instance(self)
self.__dict__['table'] = table
table = property(_get_table, _set_table)
@_memoized_property
def _from_objects(self):
t = self.table
if t is not None:
return [t]
else:
return []
@util.memoized_property
def description(self):
# Py3K
#return self.name
# Py2K
return self.name.encode('ascii', 'backslashreplace')
# end Py2K
@_memoized_property
def _key_label(self):
if self.key != self.name:
return self._gen_label(self.key)
else:
return self._label
@_memoized_property
def _label(self):
return self._gen_label(self.name)
def _gen_label(self, name):
t = self.table
if self.is_literal:
return None
elif t is not None and t.named_with_column:
if getattr(t, 'schema', None):
label = t.schema.replace('.', '_') + "_" + \
t.name + "_" + name
else:
label = t.name + "_" + name
# ensure the label name doesn't conflict with that
# of an existing column
if label in t.c:
_label = label
counter = 1
while _label in t.c:
_label = label + "_" + str(counter)
counter += 1
label = _label
return _as_truncated(label)
else:
return name
def _bind_param(self, operator, obj):
return BindParameter(self.name, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
unique=True)
def _make_proxy(self, selectable, name=None, attach=True,
name_is_truncatable=False, **kw):
# propagate the "is_literal" flag only if we are keeping our name,
# otherwise its considered to be a label
is_literal = self.is_literal and (name is None or name == self.name)
c = self._constructor(
_as_truncated(name or self.name) if \
name_is_truncatable else \
(name or self.name),
selectable=selectable,
type_=self.type,
is_literal=is_literal
)
if name is None:
c.key = self.key
c._proxies = [self]
if selectable._is_clone_of is not None:
c._is_clone_of = \
selectable._is_clone_of.columns.get(c.key)
if attach:
selectable._columns[c.key] = c
return c
class TableClause(Immutable, FromClause):
"""Represents a minimal "table" construct.
The constructor for :class:`.TableClause` is the
:func:`~.expression.table` function. This produces
a lightweight table object that has only a name and a
collection of columns, which are typically produced
by the :func:`~.expression.column` function::
from sqlalchemy.sql import table, column
user = table("user",
column("id"),
column("name"),
column("description"),
)
The :class:`.TableClause` construct serves as the base for
the more commonly used :class:`~.schema.Table` object, providing
the usual set of :class:`~.expression.FromClause` services including
the ``.c.`` collection and statement generation methods.
It does **not** provide all the additional schema-level services
of :class:`~.schema.Table`, including constraints, references to other
tables, or support for :class:`.MetaData`-level services. It's useful
on its own as an ad-hoc construct used to generate quick SQL
statements when a more fully fledged :class:`~.schema.Table`
is not on hand.
"""
__visit_name__ = 'table'
named_with_column = True
implicit_returning = False
""":class:`.TableClause` doesn't support having a primary key or column
-level defaults, so implicit returning doesn't apply."""
_autoincrement_column = None
"""No PK or default support so no autoincrement column."""
def __init__(self, name, *columns):
super(TableClause, self).__init__()
self.name = self.fullname = name
self._columns = ColumnCollection()
self.primary_key = ColumnSet()
self.foreign_keys = set()
for c in columns:
self.append_column(c)
def _init_collections(self):
pass
@util.memoized_property
def description(self):
# Py3K
#return self.name
# Py2K
return self.name.encode('ascii', 'backslashreplace')
# end Py2K
def append_column(self, c):
self._columns[c.key] = c
c.table = self
def get_children(self, column_collections=True, **kwargs):
if column_collections:
return [c for c in self.c]
else:
return []
def count(self, whereclause=None, **params):
"""return a SELECT COUNT generated against this
:class:`.TableClause`."""
if self.primary_key:
col = list(self.primary_key)[0]
else:
col = list(self.columns)[0]
return select(
[func.count(col).label('tbl_row_count')],
whereclause,
from_obj=[self],
**params)
def insert(self, values=None, inline=False, **kwargs):
"""Generate an :func:`.insert` construct against this
:class:`.TableClause`.
E.g.::
table.insert().values(name='foo')
See :func:`.insert` for argument and usage information.
"""
return insert(self, values=values, inline=inline, **kwargs)
def update(self, whereclause=None, values=None, inline=False, **kwargs):
"""Generate an :func:`.update` construct against this
:class:`.TableClause`.
E.g.::
table.update().where(table.c.id==7).values(name='foo')
See :func:`.update` for argument and usage information.
"""
return update(self, whereclause=whereclause,
values=values, inline=inline, **kwargs)
def delete(self, whereclause=None, **kwargs):
"""Generate a :func:`.delete` construct against this
:class:`.TableClause`.
E.g.::
table.delete().where(table.c.id==7)
See :func:`.delete` for argument and usage information.
"""
return delete(self, whereclause, **kwargs)
@property
def _from_objects(self):
return [self]
class SelectBase(Executable, FromClause):
"""Base class for :class:`.Select` and :class:`.CompoundSelect`."""
_order_by_clause = ClauseList()
_group_by_clause = ClauseList()
_limit = None
_offset = None
def __init__(self,
use_labels=False,
for_update=False,
limit=None,
offset=None,
order_by=None,
group_by=None,
bind=None,
autocommit=None):
self.use_labels = use_labels
self.for_update = for_update
if autocommit is not None:
util.warn_deprecated('autocommit on select() is '
'deprecated. Use .execution_options(a'
'utocommit=True)')
self._execution_options = \
self._execution_options.union(
{'autocommit': autocommit})
if limit is not None:
self._limit = util.asint(limit)
if offset is not None:
self._offset = util.asint(offset)
self._bind = bind
if order_by is not None:
self._order_by_clause = ClauseList(*util.to_list(order_by))
if group_by is not None:
self._group_by_clause = ClauseList(*util.to_list(group_by))
def as_scalar(self):
"""return a 'scalar' representation of this selectable, which can be
used as a column expression.
Typically, a select statement which has only one column in its columns
clause is eligible to be used as a scalar expression.
The returned object is an instance of
:class:`ScalarSelect`.
"""
return ScalarSelect(self)
@_generative
def apply_labels(self):
"""return a new selectable with the 'use_labels' flag set to True.
This will result in column expressions being generated using labels
against their table name, such as "SELECT somecolumn AS
tablename_somecolumn". This allows selectables which contain multiple
FROM clauses to produce a unique set of column names regardless of
name conflicts among the individual FROM clauses.
"""
self.use_labels = True
def label(self, name):
"""return a 'scalar' representation of this selectable, embedded as a
subquery with a label.
.. seealso::
:meth:`~.SelectBase.as_scalar`.
"""
return self.as_scalar().label(name)
def cte(self, name=None, recursive=False):
"""Return a new :class:`.CTE`, or Common Table Expression instance.
Common table expressions are a SQL standard whereby SELECT
statements can draw upon secondary statements specified along
with the primary statement, using a clause called "WITH".
Special semantics regarding UNION can also be employed to
allow "recursive" queries, where a SELECT statement can draw
upon the set of rows that have previously been selected.
SQLAlchemy detects :class:`.CTE` objects, which are treated
similarly to :class:`.Alias` objects, as special elements
to be delivered to the FROM clause of the statement as well
as to a WITH clause at the top of the statement.
.. versionadded:: 0.7.6
:param name: name given to the common table expression. Like
:meth:`._FromClause.alias`, the name can be left as ``None``
in which case an anonymous symbol will be used at query
compile time.
:param recursive: if ``True``, will render ``WITH RECURSIVE``.
A recursive common table expression is intended to be used in
conjunction with UNION ALL in order to derive rows
from those already selected.
The following examples illustrate two examples from
Postgresql's documentation at
http://www.postgresql.org/docs/8.4/static/queries-with.html.
Example 1, non recursive::
from sqlalchemy import Table, Column, String, Integer, MetaData, \\
select, func
metadata = MetaData()
orders = Table('orders', metadata,
Column('region', String),
Column('amount', Integer),
Column('product', String),
Column('quantity', Integer)
)
regional_sales = select([
orders.c.region,
func.sum(orders.c.amount).label('total_sales')
]).group_by(orders.c.region).cte("regional_sales")
top_regions = select([regional_sales.c.region]).\\
where(
regional_sales.c.total_sales >
select([
func.sum(regional_sales.c.total_sales)/10
])
).cte("top_regions")
statement = select([
orders.c.region,
orders.c.product,
func.sum(orders.c.quantity).label("product_units"),
func.sum(orders.c.amount).label("product_sales")
]).where(orders.c.region.in_(
select([top_regions.c.region])
)).group_by(orders.c.region, orders.c.product)
result = conn.execute(statement).fetchall()
Example 2, WITH RECURSIVE::
from sqlalchemy import Table, Column, String, Integer, MetaData, \\
select, func
metadata = MetaData()
parts = Table('parts', metadata,
Column('part', String),
Column('sub_part', String),
Column('quantity', Integer),
)
included_parts = select([
parts.c.sub_part,
parts.c.part,
parts.c.quantity]).\\
where(parts.c.part=='our part').\\
cte(recursive=True)
incl_alias = included_parts.alias()
parts_alias = parts.alias()
included_parts = included_parts.union_all(
select([
parts_alias.c.part,
parts_alias.c.sub_part,
parts_alias.c.quantity
]).
where(parts_alias.c.part==incl_alias.c.sub_part)
)
statement = select([
included_parts.c.sub_part,
func.sum(included_parts.c.quantity).
label('total_quantity')
]).\
select_from(included_parts.join(parts,
included_parts.c.part==parts.c.part)).\\
group_by(included_parts.c.sub_part)
result = conn.execute(statement).fetchall()
.. seealso::
:meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
"""
return CTE(self, name=name, recursive=recursive)
@_generative
@util.deprecated('0.6',
message=":func:`.autocommit` is deprecated. Use "
":func:`.Executable.execution_options` with the "
"'autocommit' flag.")
def autocommit(self):
"""return a new selectable with the 'autocommit' flag set to
True."""
self._execution_options = \
self._execution_options.union({'autocommit': True})
def _generate(self):
"""Override the default _generate() method to also clear out
exported collections."""
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
s._reset_exported()
return s
@_generative
def limit(self, limit):
"""return a new selectable with the given LIMIT criterion
applied."""
self._limit = util.asint(limit)
@_generative
def offset(self, offset):
"""return a new selectable with the given OFFSET criterion
applied."""
self._offset = util.asint(offset)
@_generative
def order_by(self, *clauses):
"""return a new selectable with the given list of ORDER BY
criterion applied.
The criterion will be appended to any pre-existing ORDER BY
criterion.
"""
self.append_order_by(*clauses)
@_generative
def group_by(self, *clauses):
"""return a new selectable with the given list of GROUP BY
criterion applied.
The criterion will be appended to any pre-existing GROUP BY
criterion.
"""
self.append_group_by(*clauses)
def append_order_by(self, *clauses):
"""Append the given ORDER BY criterion applied to this selectable.
The criterion will be appended to any pre-existing ORDER BY criterion.
This is an **in-place** mutation method; the
:meth:`~.SelectBase.order_by` method is preferred, as it provides standard
:term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
self._order_by_clause = ClauseList()
else:
if getattr(self, '_order_by_clause', None) is not None:
clauses = list(self._order_by_clause) + list(clauses)
self._order_by_clause = ClauseList(*clauses)
def append_group_by(self, *clauses):
"""Append the given GROUP BY criterion applied to this selectable.
The criterion will be appended to any pre-existing GROUP BY criterion.
This is an **in-place** mutation method; the
:meth:`~.SelectBase.group_by` method is preferred, as it provides standard
:term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
self._group_by_clause = ClauseList()
else:
if getattr(self, '_group_by_clause', None) is not None:
clauses = list(self._group_by_clause) + list(clauses)
self._group_by_clause = ClauseList(*clauses)
@property
def _from_objects(self):
return [self]
class ScalarSelect(Generative, Grouping):
_from_objects = []
def __init__(self, element):
self.element = element
self.type = element._scalar_type()
@property
def columns(self):
raise exc.InvalidRequestError('Scalar Select expression has no '
'columns; use this object directly within a '
'column-level expression.')
c = columns
@_generative
def where(self, crit):
"""Apply a WHERE clause to the SELECT statement referred to
by this :class:`.ScalarSelect`.
"""
self.element = self.element.where(crit)
def self_group(self, **kwargs):
return self
class CompoundSelect(SelectBase):
"""Forms the basis of ``UNION``, ``UNION ALL``, and other
SELECT-based set operations.
.. seealso::
:func:`.union`
:func:`.union_all`
:func:`.intersect`
:func:`.intersect_all`
:func:`.except`
:func:`.except_all`
"""
__visit_name__ = 'compound_select'
UNION = util.symbol('UNION')
UNION_ALL = util.symbol('UNION ALL')
EXCEPT = util.symbol('EXCEPT')
EXCEPT_ALL = util.symbol('EXCEPT ALL')
INTERSECT = util.symbol('INTERSECT')
INTERSECT_ALL = util.symbol('INTERSECT ALL')
def __init__(self, keyword, *selects, **kwargs):
self._auto_correlate = kwargs.pop('correlate', False)
self.keyword = keyword
self.selects = []
numcols = None
# some DBs do not like ORDER BY in the inner queries of a UNION, etc.
for n, s in enumerate(selects):
s = _clause_element_as_expr(s)
if not numcols:
numcols = len(s.c)
elif len(s.c) != numcols:
raise exc.ArgumentError('All selectables passed to '
'CompoundSelect must have identical numbers of '
'columns; select #%d has %d columns, select '
'#%d has %d' % (1, len(self.selects[0].c), n
+ 1, len(s.c)))
self.selects.append(s.self_group(self))
SelectBase.__init__(self, **kwargs)
def _scalar_type(self):
return self.selects[0]._scalar_type()
def self_group(self, against=None):
return FromGrouping(self)
def is_derived_from(self, fromclause):
for s in self.selects:
if s.is_derived_from(fromclause):
return True
return False
def _populate_column_collection(self):
for cols in zip(*[s.c for s in self.selects]):
# this is a slightly hacky thing - the union exports a
# column that resembles just that of the *first* selectable.
# to get at a "composite" column, particularly foreign keys,
# you have to dig through the proxies collection which we
# generate below. We may want to improve upon this, such as
# perhaps _make_proxy can accept a list of other columns
# that are "shared" - schema.column can then copy all the
# ForeignKeys in. this would allow the union() to have all
# those fks too.
proxy = cols[0]._make_proxy(self,
name=cols[0]._label if self.use_labels else None,
key=cols[0]._key_label if self.use_labels else None)
# hand-construct the "_proxies" collection to include all
# derived columns place a 'weight' annotation corresponding
# to how low in the list of select()s the column occurs, so
# that the corresponding_column() operation can resolve
# conflicts
proxy._proxies = [c._annotate({'weight': i + 1}) for (i,
c) in enumerate(cols)]
def _refresh_for_new_column(self, column):
for s in self.selects:
s._refresh_for_new_column(column)
if not self._cols_populated:
return None
raise NotImplementedError("CompoundSelect constructs don't support "
"addition of columns to underlying selectables")
def _copy_internals(self, clone=_clone, **kw):
self._reset_exported()
self.selects = [clone(s, **kw) for s in self.selects]
if hasattr(self, '_col_map'):
del self._col_map
for attr in ('_order_by_clause', '_group_by_clause'):
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
def get_children(self, column_collections=True, **kwargs):
return (column_collections and list(self.c) or []) \
+ [self._order_by_clause, self._group_by_clause] \
+ list(self.selects)
def bind(self):
if self._bind:
return self._bind
for s in self.selects:
e = s.bind
if e:
return e
else:
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class HasPrefixes(object):
_prefixes = ()
@_generative
def prefix_with(self, *expr, **kw):
"""Add one or more expressions following the statement keyword, i.e.
SELECT, INSERT, UPDATE, or DELETE. Generative.
This is used to support backend-specific prefix keywords such as those
provided by MySQL.
E.g.::
stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
Multiple prefixes can be specified by multiple calls
to :meth:`.prefix_with`.
:param \*expr: textual or :class:`.ClauseElement` construct which
will be rendered following the INSERT, UPDATE, or DELETE
keyword.
:param \**kw: A single keyword 'dialect' is accepted. This is an
optional string dialect name which will
limit rendering of this prefix to only that dialect.
"""
dialect = kw.pop('dialect', None)
if kw:
raise exc.ArgumentError("Unsupported argument(s): %s" %
",".join(kw))
self._setup_prefixes(expr, dialect)
def _setup_prefixes(self, prefixes, dialect=None):
self._prefixes = self._prefixes + tuple(
[(_literal_as_text(p), dialect) for p in prefixes])
class Select(HasPrefixes, SelectBase):
"""Represents a ``SELECT`` statement.
.. seealso::
:func:`~.expression.select` - the function which creates
a :class:`.Select` object.
:ref:`coretutorial_selecting` - Core Tutorial description
of :func:`.select`.
"""
__visit_name__ = 'select'
_prefixes = ()
_hints = util.immutabledict()
_distinct = False
_from_cloned = None
_correlate = ()
_correlate_except = None
_memoized_property = SelectBase._memoized_property
def __init__(self,
columns,
whereclause=None,
from_obj=None,
distinct=False,
having=None,
correlate=True,
prefixes=None,
**kwargs):
"""Construct a Select object.
The public constructor for Select is the
:func:`select` function; see that function for
argument descriptions.
Additional generative and mutator methods are available on the
:class:`SelectBase` superclass.
"""
self._auto_correlate = correlate
if distinct is not False:
if distinct is True:
self._distinct = True
else:
self._distinct = [
_literal_as_text(e)
for e in util.to_list(distinct)
]
if from_obj is not None:
self._from_obj = util.OrderedSet(
_interpret_as_from(f)
for f in util.to_list(from_obj))
else:
self._from_obj = util.OrderedSet()
try:
cols_present = bool(columns)
except TypeError:
raise exc.ArgumentError("columns argument to select() must "
"be a Python list or other iterable")
if cols_present:
self._raw_columns = []
for c in columns:
c = _interpret_as_column_or_from(c)
if isinstance(c, ScalarSelect):
c = c.self_group(against=operators.comma_op)
self._raw_columns.append(c)
else:
self._raw_columns = []
if whereclause is not None:
self._whereclause = _literal_as_text(whereclause)
else:
self._whereclause = None
if having is not None:
self._having = _literal_as_text(having)
else:
self._having = None
if prefixes:
self._setup_prefixes(prefixes)
SelectBase.__init__(self, **kwargs)
@property
def _froms(self):
# would love to cache this,
# but there's just enough edge cases, particularly now that
# declarative encourages construction of SQL expressions
# without tables present, to just regen this each time.
froms = []
seen = set()
translate = self._from_cloned
def add(items):
for item in items:
if item is self:
raise exc.InvalidRequestError(
"select() construct refers to itself as a FROM")
if translate and item in translate:
item = translate[item]
if not seen.intersection(item._cloned_set):
froms.append(item)
seen.update(item._cloned_set)
add(_from_objects(*self._raw_columns))
if self._whereclause is not None:
add(_from_objects(self._whereclause))
add(self._from_obj)
return froms
def _get_display_froms(self, explicit_correlate_froms=None,
implicit_correlate_froms=None):
"""Return the full list of 'from' clauses to be displayed.
Takes into account a set of existing froms which may be
rendered in the FROM clause of enclosing selects; this Select
may want to leave those absent if it is automatically
correlating.
"""
froms = self._froms
toremove = set(itertools.chain(*[
_expand_cloned(f._hide_froms)
for f in froms]))
if toremove:
# if we're maintaining clones of froms,
# add the copies out to the toremove list. only include
# clones that are lexical equivalents.
if self._from_cloned:
toremove.update(
self._from_cloned[f] for f in
toremove.intersection(self._from_cloned)
if self._from_cloned[f]._is_lexical_equivalent(f)
)
# filter out to FROM clauses not in the list,
# using a list to maintain ordering
froms = [f for f in froms if f not in toremove]
if self._correlate:
to_correlate = self._correlate
if to_correlate:
froms = [
f for f in froms if f not in
_cloned_intersection(
_cloned_intersection(froms, explicit_correlate_froms or ()),
to_correlate
)
]
if self._correlate_except is not None:
froms = [
f for f in froms if f not in
_cloned_difference(
_cloned_intersection(froms, explicit_correlate_froms or ()),
self._correlate_except
)
]
if self._auto_correlate and \
implicit_correlate_froms and \
len(froms) > 1:
froms = [
f for f in froms if f not in
_cloned_intersection(froms, implicit_correlate_froms)
]
if not len(froms):
raise exc.InvalidRequestError("Select statement '%s"
"' returned no FROM clauses due to "
"auto-correlation; specify "
"correlate(<tables>) to control "
"correlation manually." % self)
return froms
def _scalar_type(self):
elem = self._raw_columns[0]
cols = list(elem._select_iterable)
return cols[0].type
@property
def froms(self):
"""Return the displayed list of FromClause elements."""
return self._get_display_froms()
@_generative
def with_hint(self, selectable, text, dialect_name='*'):
"""Add an indexing hint for the given selectable to this
:class:`.Select`.
The text of the hint is rendered in the appropriate
location for the database backend in use, relative
to the given :class:`.Table` or :class:`.Alias` passed as the
``selectable`` argument. The dialect implementation
typically uses Python string substitution syntax
with the token ``%(name)s`` to render the name of
the table or alias. E.g. when using Oracle, the
following::
select([mytable]).\\
with_hint(mytable, "+ index(%(name)s ix_mytable)")
Would render SQL as::
select /*+ index(mytable ix_mytable) */ ... from mytable
The ``dialect_name`` option will limit the rendering of a particular
hint to a particular backend. Such as, to add hints for both Oracle
and Sybase simultaneously::
select([mytable]).\\
with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
"""
self._hints = self._hints.union(
{(selectable, dialect_name): text})
@property
def type(self):
raise exc.InvalidRequestError("Select objects don't have a type. "
"Call as_scalar() on this Select object "
"to return a 'scalar' version of this Select.")
@_memoized_property.method
def locate_all_froms(self):
"""return a Set of all FromClause elements referenced by this Select.
This set is a superset of that returned by the ``froms`` property,
which is specifically for those FromClause elements that would
actually be rendered.
"""
froms = self._froms
return froms + list(_from_objects(*froms))
@property
def inner_columns(self):
"""an iterator of all ColumnElement expressions which would
be rendered into the columns clause of the resulting SELECT statement.
"""
return _select_iterables(self._raw_columns)
def is_derived_from(self, fromclause):
if self in fromclause._cloned_set:
return True
for f in self.locate_all_froms():
if f.is_derived_from(fromclause):
return True
return False
def _copy_internals(self, clone=_clone, **kw):
# Select() object has been cloned and probably adapted by the
# given clone function. Apply the cloning function to internal
# objects
# 1. keep a dictionary of the froms we've cloned, and what
# they've become. This is consulted later when we derive
# additional froms from "whereclause" and the columns clause,
# which may still reference the uncloned parent table.
# as of 0.7.4 we also put the current version of _froms, which
# gets cleared on each generation. previously we were "baking"
# _froms into self._from_obj.
self._from_cloned = from_cloned = dict((f, clone(f, **kw))
for f in self._from_obj.union(self._froms))
# 3. update persistent _from_obj with the cloned versions.
self._from_obj = util.OrderedSet(from_cloned[f] for f in
self._from_obj)
# the _correlate collection is done separately, what can happen
# here is the same item is _correlate as in _from_obj but the
# _correlate version has an annotation on it - (specifically
# RelationshipProperty.Comparator._criterion_exists() does
# this). Also keep _correlate liberally open with it's previous
# contents, as this set is used for matching, not rendering.
self._correlate = set(clone(f) for f in
self._correlate).union(self._correlate)
# 4. clone other things. The difficulty here is that Column
# objects are not actually cloned, and refer to their original
# .table, resulting in the wrong "from" parent after a clone
# operation. Hence _from_cloned and _from_obj supercede what is
# present here.
self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
for attr in '_whereclause', '_having', '_order_by_clause', \
'_group_by_clause':
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
# erase exported column list, _froms collection,
# etc.
self._reset_exported()
def get_children(self, column_collections=True, **kwargs):
"""return child elements as per the ClauseElement specification."""
return (column_collections and list(self.columns) or []) + \
self._raw_columns + list(self._froms) + \
[x for x in
(self._whereclause, self._having,
self._order_by_clause, self._group_by_clause)
if x is not None]
@_generative
def column(self, column):
"""return a new select() construct with the given column expression
added to its columns clause.
"""
self.append_column(column)
def reduce_columns(self, only_synonyms=True):
"""Return a new :func`.select` construct with redundantly
named, equivalently-valued columns removed from the columns clause.
"Redundant" here means two columns where one refers to the
other either based on foreign key, or via a simple equality
comparison in the WHERE clause of the statement. The primary purpose
of this method is to automatically construct a select statement
with all uniquely-named columns, without the need to use
table-qualified labels as :meth:`.apply_labels` does.
When columns are omitted based on foreign key, the referred-to
column is the one that's kept. When columns are omitted based on
WHERE eqivalence, the first column in the columns clause is the
one that's kept.
:param only_synonyms: when True, limit the removal of columns
to those which have the same name as the equivalent. Otherwise,
all columns that are equivalent to another are removed.
.. versionadded:: 0.8
"""
return self.with_only_columns(
sqlutil.reduce_columns(
self.inner_columns,
only_synonyms=only_synonyms,
*(self._whereclause, ) + tuple(self._from_obj)
)
)
@_generative
def with_only_columns(self, columns):
"""Return a new :func:`.select` construct with its columns
clause replaced with the given columns.
.. versionchanged:: 0.7.3
Due to a bug fix, this method has a slight
behavioral change as of version 0.7.3.
Prior to version 0.7.3, the FROM clause of
a :func:`.select` was calculated upfront and as new columns
were added; in 0.7.3 and later it's calculated
at compile time, fixing an issue regarding late binding
of columns to parent tables. This changes the behavior of
:meth:`.Select.with_only_columns` in that FROM clauses no
longer represented in the new list are dropped,
but this behavior is more consistent in
that the FROM clauses are consistently derived from the
current columns clause. The original intent of this method
is to allow trimming of the existing columns list to be fewer
columns than originally present; the use case of replacing
the columns list with an entirely different one hadn't
been anticipated until 0.7.3 was released; the usage
guidelines below illustrate how this should be done.
This method is exactly equivalent to as if the original
:func:`.select` had been called with the given columns
clause. I.e. a statement::
s = select([table1.c.a, table1.c.b])
s = s.with_only_columns([table1.c.b])
should be exactly equivalent to::
s = select([table1.c.b])
This means that FROM clauses which are only derived
from the column list will be discarded if the new column
list no longer contains that FROM::
>>> table1 = table('t1', column('a'), column('b'))
>>> table2 = table('t2', column('a'), column('b'))
>>> s1 = select([table1.c.a, table2.c.b])
>>> print s1
SELECT t1.a, t2.b FROM t1, t2
>>> s2 = s1.with_only_columns([table2.c.b])
>>> print s2
SELECT t2.b FROM t1
The preferred way to maintain a specific FROM clause
in the construct, assuming it won't be represented anywhere
else (i.e. not in the WHERE clause, etc.) is to set it using
:meth:`.Select.select_from`::
>>> s1 = select([table1.c.a, table2.c.b]).\\
... select_from(table1.join(table2,
... table1.c.a==table2.c.a))
>>> s2 = s1.with_only_columns([table2.c.b])
>>> print s2
SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a
Care should also be taken to use the correct
set of column objects passed to :meth:`.Select.with_only_columns`.
Since the method is essentially equivalent to calling the
:func:`.select` construct in the first place with the given
columns, the columns passed to :meth:`.Select.with_only_columns`
should usually be a subset of those which were passed
to the :func:`.select` construct, not those which are available
from the ``.c`` collection of that :func:`.select`. That
is::
s = select([table1.c.a, table1.c.b]).select_from(table1)
s = s.with_only_columns([table1.c.b])
and **not**::
# usually incorrect
s = s.with_only_columns([s.c.b])
The latter would produce the SQL::
SELECT b
FROM (SELECT t1.a AS a, t1.b AS b
FROM t1), t1
Since the :func:`.select` construct is essentially being
asked to select both from ``table1`` as well as itself.
"""
self._reset_exported()
rc = []
for c in columns:
c = _interpret_as_column_or_from(c)
if isinstance(c, ScalarSelect):
c = c.self_group(against=operators.comma_op)
rc.append(c)
self._raw_columns = rc
@_generative
def where(self, whereclause):
"""return a new select() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
self.append_whereclause(whereclause)
@_generative
def having(self, having):
"""return a new select() construct with the given expression added to
its HAVING clause, joined to the existing clause via AND, if any.
"""
self.append_having(having)
@_generative
def distinct(self, *expr):
"""Return a new select() construct which will apply DISTINCT to its
columns clause.
:param \*expr: optional column expressions. When present,
the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
construct.
"""
if expr:
expr = [_literal_as_text(e) for e in expr]
if isinstance(self._distinct, list):
self._distinct = self._distinct + expr
else:
self._distinct = expr
else:
self._distinct = True
@_generative
def select_from(self, fromclause):
"""return a new :func:`.select` construct with the
given FROM expression
merged into its list of FROM objects.
E.g.::
table1 = table('t1', column('a'))
table2 = table('t2', column('b'))
s = select([table1.c.a]).\\
select_from(
table1.join(table2, table1.c.a==table2.c.b)
)
The "from" list is a unique set on the identity of each element,
so adding an already present :class:`.Table` or other selectable
will have no effect. Passing a :class:`.Join` that refers
to an already present :class:`.Table` or other selectable will have
the effect of concealing the presence of that selectable as
an individual element in the rendered FROM list, instead
rendering it into a JOIN clause.
While the typical purpose of :meth:`.Select.select_from` is to
replace the default, derived FROM clause with a join, it can
also be called with individual table elements, multiple times
if desired, in the case that the FROM clause cannot be fully
derived from the columns clause::
select([func.count('*')]).select_from(table1)
"""
self.append_from(fromclause)
@_generative
def correlate(self, *fromclauses):
"""return a new :class:`.Select` which will correlate the given FROM
clauses to that of an enclosing :class:`.Select`.
Calling this method turns off the :class:`.Select` object's
default behavior of "auto-correlation". Normally, FROM elements
which appear in a :class:`.Select` that encloses this one via
its :term:`WHERE clause`, ORDER BY, HAVING or
:term:`columns clause` will be omitted from this :class:`.Select`
object's :term:`FROM clause`.
Setting an explicit correlation collection using the
:meth:`.Select.correlate` method provides a fixed list of FROM objects
that can potentially take place in this process.
When :meth:`.Select.correlate` is used to apply specific FROM clauses
for correlation, the FROM elements become candidates for
correlation regardless of how deeply nested this :class:`.Select`
object is, relative to an enclosing :class:`.Select` which refers to
the same FROM object. This is in contrast to the behavior of
"auto-correlation" which only correlates to an immediate enclosing
:class:`.Select`. Multi-level correlation ensures that the link
between enclosed and enclosing :class:`.Select` is always via
at least one WHERE/ORDER BY/HAVING/columns clause in order for
correlation to take place.
If ``None`` is passed, the :class:`.Select` object will correlate
none of its FROM entries, and all will render unconditionally
in the local FROM clause.
:param \*fromclauses: a list of one or more :class:`.FromClause`
constructs, or other compatible constructs (i.e. ORM-mapped
classes) to become part of the correlate collection.
.. versionchanged:: 0.8.0 ORM-mapped classes are accepted by
:meth:`.Select.correlate`.
.. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
longer unconditionally removes entries from the FROM clause; instead,
the candidate FROM entries must also be matched by a FROM entry
located in an enclosing :class:`.Select`, which ultimately encloses
this one as present in the WHERE clause, ORDER BY clause, HAVING
clause, or columns clause of an enclosing :meth:`.Select`.
.. versionchanged:: 0.8.2 explicit correlation takes place
via any level of nesting of :class:`.Select` objects; in previous
0.8 versions, correlation would only occur relative to the immediate
enclosing :class:`.Select` construct.
.. seealso::
:meth:`.Select.correlate_except`
:ref:`correlated_subqueries`
"""
self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate = ()
else:
self._correlate = set(self._correlate).union(
_interpret_as_from(f) for f in fromclauses)
@_generative
def correlate_except(self, *fromclauses):
"""return a new :class:`.Select` which will omit the given FROM
clauses from the auto-correlation process.
Calling :meth:`.Select.correlate_except` turns off the
:class:`.Select` object's default behavior of
"auto-correlation" for the given FROM elements. An element
specified here will unconditionally appear in the FROM list, while
all other FROM elements remain subject to normal auto-correlation
behaviors.
.. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except`
method was improved to fully prevent FROM clauses specified here
from being omitted from the immediate FROM clause of this
:class:`.Select`.
If ``None`` is passed, the :class:`.Select` object will correlate
all of its FROM entries.
.. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will
correctly auto-correlate all FROM clauses.
:param \*fromclauses: a list of one or more :class:`.FromClause`
constructs, or other compatible constructs (i.e. ORM-mapped
classes) to become part of the correlate-exception collection.
.. seealso::
:meth:`.Select.correlate`
:ref:`correlated_subqueries`
"""
self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate_except = ()
else:
self._correlate_except = set(self._correlate_except or ()).union(
_interpret_as_from(f) for f in fromclauses)
def append_correlation(self, fromclause):
"""append the given correlation expression to this select()
construct.
This is an **in-place** mutation method; the
:meth:`~.Select.correlate` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._auto_correlate = False
self._correlate = set(self._correlate).union(
_interpret_as_from(f) for f in fromclause)
def append_column(self, column):
"""append the given column expression to the columns clause of this
select() construct.
This is an **in-place** mutation method; the
:meth:`~.Select.column` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._reset_exported()
column = _interpret_as_column_or_from(column)
if isinstance(column, ScalarSelect):
column = column.self_group(against=operators.comma_op)
self._raw_columns = self._raw_columns + [column]
def append_prefix(self, clause):
"""append the given columns clause prefix expression to this select()
construct.
This is an **in-place** mutation method; the
:meth:`~.Select.prefix_with` method is preferred, as it provides standard
:term:`method chaining`.
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
def append_whereclause(self, whereclause):
"""append the given expression to this select() construct's WHERE
criterion.
The expression will be joined to existing WHERE criterion via AND.
This is an **in-place** mutation method; the
:meth:`~.Select.where` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._reset_exported()
whereclause = _literal_as_text(whereclause)
if self._whereclause is not None:
self._whereclause = and_(self._whereclause, whereclause)
else:
self._whereclause = whereclause
def append_having(self, having):
"""append the given expression to this select() construct's HAVING
criterion.
The expression will be joined to existing HAVING criterion via AND.
This is an **in-place** mutation method; the
:meth:`~.Select.having` method is preferred, as it provides standard
:term:`method chaining`.
"""
if self._having is not None:
self._having = and_(self._having, _literal_as_text(having))
else:
self._having = _literal_as_text(having)
def append_from(self, fromclause):
"""append the given FromClause expression to this select() construct's
FROM clause.
This is an **in-place** mutation method; the
:meth:`~.Select.select_from` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._reset_exported()
fromclause = _interpret_as_from(fromclause)
self._from_obj = self._from_obj.union([fromclause])
@_memoized_property
def _columns_plus_names(self):
if self.use_labels:
names = set()
def name_for_col(c):
if c._label is None:
return (None, c)
name = c._label
if name in names:
name = c.anon_label
else:
names.add(name)
return name, c
return [
name_for_col(c)
for c in util.unique_list(_select_iterables(self._raw_columns))
]
else:
return [
(None, c)
for c in util.unique_list(_select_iterables(self._raw_columns))
]
def _populate_column_collection(self):
for name, c in self._columns_plus_names:
if not hasattr(c, '_make_proxy'):
continue
if name is None:
key = None
elif self.use_labels:
key = c._key_label
if key is not None and key in self.c:
key = c.anon_label
else:
key = None
c._make_proxy(self, key=key,
name=name,
name_is_truncatable=True)
def _refresh_for_new_column(self, column):
for fromclause in self._froms:
col = fromclause._refresh_for_new_column(column)
if col is not None:
if col in self.inner_columns and self._cols_populated:
our_label = col._key_label if self.use_labels else col.key
if our_label not in self.c:
return col._make_proxy(self,
name=col._label if self.use_labels else None,
key=col._key_label if self.use_labels else None,
name_is_truncatable=True)
return None
return None
def self_group(self, against=None):
"""return a 'grouping' construct as per the ClauseElement
specification.
This produces an element that can be embedded in an expression. Note
that this method is called automatically as needed when constructing
expressions and should not require explicit use.
"""
if isinstance(against, CompoundSelect):
return self
return FromGrouping(self)
def union(self, other, **kwargs):
"""return a SQL UNION of this select() construct against the given
selectable."""
return union(self, other, **kwargs)
def union_all(self, other, **kwargs):
"""return a SQL UNION ALL of this select() construct against the given
selectable.
"""
return union_all(self, other, **kwargs)
def except_(self, other, **kwargs):
"""return a SQL EXCEPT of this select() construct against the given
selectable."""
return except_(self, other, **kwargs)
def except_all(self, other, **kwargs):
"""return a SQL EXCEPT ALL of this select() construct against the
given selectable.
"""
return except_all(self, other, **kwargs)
def intersect(self, other, **kwargs):
"""return a SQL INTERSECT of this select() construct against the given
selectable.
"""
return intersect(self, other, **kwargs)
def intersect_all(self, other, **kwargs):
"""return a SQL INTERSECT ALL of this select() construct against the
given selectable.
"""
return intersect_all(self, other, **kwargs)
def bind(self):
if self._bind:
return self._bind
froms = self._froms
if not froms:
for c in self._raw_columns:
e = c.bind
if e:
self._bind = e
return e
else:
e = list(froms)[0].bind
if e:
self._bind = e
return e
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class UpdateBase(HasPrefixes, Executable, ClauseElement):
"""Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
"""
__visit_name__ = 'update_base'
_execution_options = \
Executable._execution_options.union({'autocommit': True})
kwargs = util.immutabledict()
_hints = util.immutabledict()
_prefixes = ()
def _process_colparams(self, parameters):
def process_single(p):
if isinstance(p, (list, tuple)):
return dict(
(c.key, pval)
for c, pval in zip(self.table.c, p)
)
else:
return p
if isinstance(parameters, (list, tuple)) and \
isinstance(parameters[0], (list, tuple, dict)):
if not self._supports_multi_parameters:
raise exc.InvalidRequestError(
"This construct does not support "
"multiple parameter sets.")
return [process_single(p) for p in parameters], True
else:
return process_single(parameters), False
def params(self, *arg, **kw):
"""Set the parameters for the statement.
This method raises ``NotImplementedError`` on the base class,
and is overridden by :class:`.ValuesBase` to provide the
SET/VALUES clause of UPDATE and INSERT.
"""
raise NotImplementedError(
"params() is not supported for INSERT/UPDATE/DELETE statements."
" To set the values for an INSERT or UPDATE statement, use"
" stmt.values(**parameters).")
def bind(self):
"""Return a 'bind' linked to this :class:`.UpdateBase`
or a :class:`.Table` associated with it.
"""
return self._bind or self.table.bind
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
@_generative
def returning(self, *cols):
"""Add a RETURNING or equivalent clause to this statement.
The given list of columns represent columns within the table that is
the target of the INSERT, UPDATE, or DELETE. Each element can be any
column expression. :class:`~sqlalchemy.schema.Table` objects will be
expanded into their individual columns.
Upon compilation, a RETURNING clause, or database equivalent,
will be rendered within the statement. For INSERT and UPDATE,
the values are the newly inserted/updated values. For DELETE,
the values are those of the rows which were deleted.
Upon execution, the values of the columns to be returned
are made available via the result set and can be iterated
using ``fetchone()`` and similar. For DBAPIs which do not
natively support returning values (i.e. cx_oracle),
SQLAlchemy will approximate this behavior at the result level
so that a reasonable amount of behavioral neutrality is
provided.
Note that not all databases/DBAPIs
support RETURNING. For those backends with no support,
an exception is raised upon compilation and/or execution.
For those who do support it, the functionality across backends
varies greatly, including restrictions on executemany()
and other statements which return multiple rows. Please
read the documentation notes for the database in use in
order to determine the availability of RETURNING.
"""
self._returning = cols
@_generative
def with_hint(self, text, selectable=None, dialect_name="*"):
"""Add a table hint for a single table to this
INSERT/UPDATE/DELETE statement.
.. note::
:meth:`.UpdateBase.with_hint` currently applies only to
Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use
:meth:`.UpdateBase.prefix_with`.
The text of the hint is rendered in the appropriate
location for the database backend in use, relative
to the :class:`.Table` that is the subject of this
statement, or optionally to that of the given
:class:`.Table` passed as the ``selectable`` argument.
The ``dialect_name`` option will limit the rendering of a particular
hint to a particular backend. Such as, to add a hint
that only takes effect for SQL Server::
mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql")
.. versionadded:: 0.7.6
:param text: Text of the hint.
:param selectable: optional :class:`.Table` that specifies
an element of the FROM clause within an UPDATE or DELETE
to be the subject of the hint - applies only to certain backends.
:param dialect_name: defaults to ``*``, if specified as the name
of a particular dialect, will apply these hints only when
that dialect is in use.
"""
if selectable is None:
selectable = self.table
self._hints = self._hints.union(
{(selectable, dialect_name): text})
class ValuesBase(UpdateBase):
"""Supplies support for :meth:`.ValuesBase.values` to
INSERT and UPDATE constructs."""
__visit_name__ = 'values_base'
_supports_multi_parameters = False
_has_multi_parameters = False
select = None
def __init__(self, table, values, prefixes):
self.table = _interpret_as_from(table)
self.parameters, self._has_multi_parameters = \
self._process_colparams(values)
if prefixes:
self._setup_prefixes(prefixes)
@_generative
def values(self, *args, **kwargs):
"""specify a fixed VALUES clause for an INSERT statement, or the SET
clause for an UPDATE.
Note that the :class:`.Insert` and :class:`.Update` constructs support
per-execution time formatting of the VALUES and/or SET clauses,
based on the arguments passed to :meth:`.Connection.execute`. However,
the :meth:`.ValuesBase.values` method can be used to "fix" a particular
set of parameters into the statement.
Multiple calls to :meth:`.ValuesBase.values` will produce a new
construct, each one with the parameter list modified to include
the new parameters sent. In the typical case of a single
dictionary of parameters, the newly passed keys will replace
the same keys in the previous construct. In the case of a list-based
"multiple values" construct, each new list of values is extended
onto the existing list of values.
:param \**kwargs: key value pairs representing the string key
of a :class:`.Column` mapped to the value to be rendered into the
VALUES or SET clause::
users.insert().values(name="some name")
users.update().where(users.c.id==5).values(name="some name")
:param \*args: Alternatively, a dictionary, tuple or list
of dictionaries or tuples can be passed as a single positional
argument in order to form the VALUES or
SET clause of the statement. The single dictionary form
works the same as the kwargs form::
users.insert().values({"name": "some name"})
If a tuple is passed, the tuple should contain the same number
of columns as the target :class:`.Table`::
users.insert().values((5, "some name"))
The :class:`.Insert` construct also supports multiply-rendered VALUES
construct, for those backends which support this SQL syntax
(SQLite, Postgresql, MySQL). This mode is indicated by passing a list
of one or more dictionaries/tuples::
users.insert().values([
{"name": "some name"},
{"name": "some other name"},
{"name": "yet another name"},
])
In the case of an :class:`.Update`
construct, only the single dictionary/tuple form is accepted,
else an exception is raised. It is also an exception case to
attempt to mix the single-/multiple- value styles together,
either through multiple :meth:`.ValuesBase.values` calls
or by sending a list + kwargs at the same time.
.. note::
Passing a multiple values list is *not* the same
as passing a multiple values list to the :meth:`.Connection.execute`
method. Passing a list of parameter sets to :meth:`.ValuesBase.values`
produces a construct of this form::
INSERT INTO table (col1, col2, col3) VALUES
(col1_0, col2_0, col3_0),
(col1_1, col2_1, col3_1),
...
whereas a multiple list passed to :meth:`.Connection.execute`
has the effect of using the DBAPI
`executemany() <http://www.python.org/dev/peps/pep-0249/#id18>`_
method, which provides a high-performance system of invoking
a single-row INSERT statement many times against a series
of parameter sets. The "executemany" style is supported by
all database backends, as it does not depend on a special SQL
syntax.
.. versionadded:: 0.8
Support for multiple-VALUES INSERT statements.
.. seealso::
:ref:`inserts_and_updates` - SQL Expression
Language Tutorial
:func:`~.expression.insert` - produce an ``INSERT`` statement
:func:`~.expression.update` - produce an ``UPDATE`` statement
"""
if self.select is not None:
raise exc.InvalidRequestError(
"This construct already inserts from a SELECT")
if self._has_multi_parameters and kwargs:
raise exc.InvalidRequestError(
"This construct already has multiple parameter sets.")
if args:
if len(args) > 1:
raise exc.ArgumentError(
"Only a single dictionary/tuple or list of "
"dictionaries/tuples is accepted positionally.")
v = args[0]
else:
v = {}
if self.parameters is None:
self.parameters, self._has_multi_parameters = \
self._process_colparams(v)
else:
if self._has_multi_parameters:
self.parameters = list(self.parameters)
p, self._has_multi_parameters = self._process_colparams(v)
if not self._has_multi_parameters:
raise exc.ArgumentError(
"Can't mix single-values and multiple values "
"formats in one statement")
self.parameters.extend(p)
else:
self.parameters = self.parameters.copy()
p, self._has_multi_parameters = self._process_colparams(v)
if self._has_multi_parameters:
raise exc.ArgumentError(
"Can't mix single-values and multiple values "
"formats in one statement")
self.parameters.update(p)
if kwargs:
if self._has_multi_parameters:
raise exc.ArgumentError(
"Can't pass kwargs and multiple parameter sets "
"simultaenously")
else:
self.parameters.update(kwargs)
class Insert(ValuesBase):
"""Represent an INSERT construct.
The :class:`.Insert` object is created using the
:func:`~.expression.insert()` function.
.. seealso::
:ref:`coretutorial_insert_expressions`
"""
__visit_name__ = 'insert'
_supports_multi_parameters = True
def __init__(self,
table,
values=None,
inline=False,
bind=None,
prefixes=None,
returning=None,
**kwargs):
ValuesBase.__init__(self, table, values, prefixes)
self._bind = bind
self.select = None
self.inline = inline
self._returning = returning
self.kwargs = kwargs
def get_children(self, **kwargs):
if self.select is not None:
return self.select,
else:
return ()
@_generative
def from_select(self, names, select):
"""Return a new :class:`.Insert` construct which represents
an ``INSERT...FROM SELECT`` statement.
e.g.::
sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
ins = table2.insert().from_select(['a', 'b'], sel)
:param names: a sequence of string column names or :class:`.Column`
objects representing the target columns.
:param select: a :func:`.select` construct, :class:`.FromClause`
or other construct which resolves into a :class:`.FromClause`,
such as an ORM :class:`.Query` object, etc. The order of
columns returned from this FROM clause should correspond to the
order of columns sent as the ``names`` parameter; while this
is not checked before passing along to the database, the database
would normally raise an exception if these column lists don't
correspond.
.. note::
Depending on backend, it may be necessary for the :class:`.Insert`
statement to be constructed using the ``inline=True`` flag; this
flag will prevent the implicit usage of ``RETURNING`` when the
``INSERT`` statement is rendered, which isn't supported on a backend
such as Oracle in conjunction with an ``INSERT..SELECT`` combination::
sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
ins = table2.insert(inline=True).from_select(['a', 'b'], sel)
.. versionadded:: 0.8.3
"""
if self.parameters:
raise exc.InvalidRequestError(
"This construct already inserts value expressions")
self.parameters, self._has_multi_parameters = \
self._process_colparams(dict((n, null()) for n in names))
self.select = _interpret_as_select(select)
def _copy_internals(self, clone=_clone, **kw):
# TODO: coverage
self.parameters = self.parameters.copy()
if self.select is not None:
self.select = _clone(self.select)
class Update(ValuesBase):
"""Represent an Update construct.
The :class:`.Update` object is created using the :func:`update()` function.
"""
__visit_name__ = 'update'
def __init__(self,
table,
whereclause,
values=None,
inline=False,
bind=None,
prefixes=None,
returning=None,
**kwargs):
ValuesBase.__init__(self, table, values, prefixes)
self._bind = bind
self._returning = returning
if whereclause is not None:
self._whereclause = _literal_as_text(whereclause)
else:
self._whereclause = None
self.inline = inline
self.kwargs = kwargs
def get_children(self, **kwargs):
if self._whereclause is not None:
return self._whereclause,
else:
return ()
def _copy_internals(self, clone=_clone, **kw):
# TODO: coverage
self._whereclause = clone(self._whereclause, **kw)
self.parameters = self.parameters.copy()
@_generative
def where(self, whereclause):
"""return a new update() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
if self._whereclause is not None:
self._whereclause = and_(self._whereclause,
_literal_as_text(whereclause))
else:
self._whereclause = _literal_as_text(whereclause)
@property
def _extra_froms(self):
# TODO: this could be made memoized
# if the memoization is reset on each generative call.
froms = []
seen = set([self.table])
if self._whereclause is not None:
for item in _from_objects(self._whereclause):
if not seen.intersection(item._cloned_set):
froms.append(item)
seen.update(item._cloned_set)
return froms
class Delete(UpdateBase):
"""Represent a DELETE construct.
The :class:`.Delete` object is created using the :func:`delete()` function.
"""
__visit_name__ = 'delete'
def __init__(self,
table,
whereclause,
bind=None,
returning=None,
prefixes=None,
**kwargs):
self._bind = bind
self.table = _interpret_as_from(table)
self._returning = returning
if prefixes:
self._setup_prefixes(prefixes)
if whereclause is not None:
self._whereclause = _literal_as_text(whereclause)
else:
self._whereclause = None
self.kwargs = kwargs
def get_children(self, **kwargs):
if self._whereclause is not None:
return self._whereclause,
else:
return ()
@_generative
def where(self, whereclause):
"""Add the given WHERE clause to a newly returned delete construct."""
if self._whereclause is not None:
self._whereclause = and_(self._whereclause,
_literal_as_text(whereclause))
else:
self._whereclause = _literal_as_text(whereclause)
def _copy_internals(self, clone=_clone, **kw):
# TODO: coverage
self._whereclause = clone(self._whereclause, **kw)
class _IdentifiedClause(Executable, ClauseElement):
__visit_name__ = 'identified'
_execution_options = \
Executable._execution_options.union({'autocommit': False})
quote = None
def __init__(self, ident):
self.ident = ident
class SavepointClause(_IdentifiedClause):
__visit_name__ = 'savepoint'
class RollbackToSavepointClause(_IdentifiedClause):
__visit_name__ = 'rollback_to_savepoint'
class ReleaseSavepointClause(_IdentifiedClause):
__visit_name__ = 'release_savepoint'
# old names for compatibility
_BindParamClause = BindParameter
_Label = Label
_SelectBase = SelectBase
_BinaryExpression = BinaryExpression
_Cast = Cast
_Null = Null
_False = False_
_True = True_
_TextClause = TextClause
_UnaryExpression = UnaryExpression
_Case = Case
_Tuple = Tuple
_Over = Over
_Generative = Generative
_TypeClause = TypeClause
_Extract = Extract
_Exists = Exists
_Grouping = Grouping
_FromGrouping = FromGrouping
_ScalarSelect = ScalarSelect
|
gujiawen/flask_web
|
venv/lib/python2.7/site-packages/sqlalchemy/sql/expression.py
|
Python
|
mit
| 222,486
|
[
"VisIt"
] |
7c750f376070127410e22450f20f9d5e1fa2a94d41e16f41e747c9f2983bbbf8
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# =============================================================================
"""
Example usage::
import numpy as np
from singa import tensor
from singa import device
# create a tensor with shape (2,3), default CppCPU device and float32
x = tensor.Tensor((2, 3))
x.set_value(0.4)
# create a tensor from a numpy array
npy = np.zeros((3, 3), dtype=np.float32)
y = tensor.from_numpy(npy)
y.uniform(-1, 1) # sample values from the uniform distribution
z = tensor.mult(x, y) # gemm -> z of shape (2, 3)
x += z # element-wise addition
dev = device.get_default_device()
x.to_device(dev) # move the data to a gpu device
s = tensor.to_numpy(x) # tensor -> numpy array
There are two sets of tensor functions,
Tensor member functions
which would change the internal state of the Tensor instance.
Tensor module functions
which accept Tensor instances as arguments and return Tensor instances.
Every Tesor instance must be initialized before reading data from it.
"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from builtins import object
import numpy as np
from functools import reduce
from .proto import core_pb2
from . import singa_wrap as singa
from .device import get_default_device
int32 = core_pb2.kInt
float32 = core_pb2.kFloat32
CTensor = singa.Tensor
class Tensor(object):
'''Python Tensor, which wraps a swig converted Tensor from CPP Tensor.
Args:
shape (tuple<int>): a tuple of integers for the tensor shape. If shape
is not specified, the created tensor is called a dummy tensor.
device: a swig device. If None, the default host device is used.
dtype: data type. currently, most operations only accept float32.
data: a numpy array or swig tensor.
requires_grad: boolean indicator for computing the gradient.
stores_grad: boolean indicator for storing and returning the gradient.
Some intermediate tensors' gradient can be released
during the backward propagation. A tensor may require
grad but not store grad; But if a tensor stores grad
then it must require grad.
'''
tensor_count = 0
def __init__(self, shape=(), device=None, dtype=float32,
data=None, requires_grad=True, stores_grad=False,
creator=None,name=None):
if device is None:
device = get_default_device()
if isinstance(data, np.ndarray):
self.data = CTensor(list(data.shape), device, dtype)
copy_from_numpy(self.data, data)
elif isinstance(data, CTensor):
self.data = data
assert data.device().id() == device.id(), 'not the same device'
else:
self.data = CTensor(list(shape), device, dtype)
self.shape = tuple(self.data.shape())
self.device = device
self.dtype = self.data.data_type()
self.requires_grad = requires_grad
self.stores_grad = stores_grad
if name is None:
self.name = 'Dummy#{}'.format(Tensor.tensor_count)
Tensor.tensor_count += 1
else:
self.name = name
if creator is None:
from . import autograd
self.creator = autograd.Dummy(self,name)
else:
self.creator = creator
def ndim(self):
'''
Returns:
the number of dimensions of the tensor.
'''
return self.data.nDim()
def is_empty(self):
'''
Returns:
True if the tensor is empty according to its shape
'''
return self.ndim() == 0
def is_transpose(self):
'''
Returns:
True if the internal data is transposed; otherwise False.
'''
return self.data.transpose()
def transpose(self, axes=None):
'''
To transpose the tensor
'''
t = Tensor(self.shape, self.device, self.dtype)
if axes == None:
tshape = [self.shape[x] for x in range(len(t.shape))]
t.shape = tuple(tshape)
t.data = singa.DefaultTranspose(self.data)
else:
if(len(axes) != len(self.shape)):
raise ValueError('dimensions do not match')
tshape = [self.shape[x] for x in axes]
t.shape = tuple(tshape)
t.data = singa.Transpose(self.data, list(axes))
return t
def size(self): # TODO(wangwei) compute size
'''
Returns:
the number of elements of the tensor.
'''
return self.data.Size()
def memsize(self):
'''
Returns:
the number of Bytes allocated for this tensor.
'''
return self.data.MemSize()
def reshape(self, shape):
'''Return a new tensor with the given shape, and the original
tensor is not changed.
Args:
shape (list<int>): new shape, which should have the same
volumn as the original shape.
'''
t = Tensor(self.shape, self.device, self.dtype)
assert product(self.shape) == product(shape), \
'product of shape should be equal'
t.shape = shape
t.data = singa.Reshape(self.data, shape)
return t
def reset_like(self, t):
'''Reset the shape, dtype and device as the given tensor.
Args:
t (Tensor)
'''
self.data.ResetLike(t.data)
self.shape = t.shape
self.device = t.device
self.dtype = t.dtype
'''
def as_type(self, dtype):
Change the data type.
Args:
dtype:
self.data.AsType(dtype)
'''
def to_device(self, device):
'''Move the tensor data onto a given device.
Args:
device: a swig Device converted from CudaGPU or CppCPU or OpenclGPU
'''
self.data.ToDevice(device)
self.device = device
def to_host(self):
'''Move the tensor data onto the default host CppCPU device.
'''
self.data.ToHost()
self.device = get_default_device()
def l2(self):
'''
Returns:
the L2 norm.
'''
return self.data.L2()
def l1(self):
'''
Returns:
the L1 norm.
'''
return self.data.L1()
def set_value(self, x):
'''Set all elements of the tensor to be the give value.
Args:
x (float), a float value to be set to all elements.
'''
# assert type(x) == float, 'set value only accepts float input'
# if isinstance(x, float):
self.data.SetFloatValue(float(x))
def copy_from_numpy(self, np_array, offset=0):
''' Copy the data from the numpy array.
Args:
np_array: source numpy array
offset (int): destination offset
'''
assert np_array.size == self.size(), 'tensor shape should be the same'
if not np_array.ndim == 1:
np_array = np_array.flatten()
dt = np_array.dtype
if dt == np.float32:
self.data.CopyFloatDataFromHostPtr(np_array)
elif dt == np.int or dt == np.int32:
self.data.CopyIntDataFromHostPtr(np_array)
else:
print('Not implemented yet for ', dt)
def copy_data(self, t):
'''Copy data from other Tensor instance.
Args:
t (Tensor): source Tensor.
'''
assert isinstance(t, Tensor), 't must be a singa Tensor instance'
self.data.CopyData(t.data)
def clone(self):
'''
Returns:
a new Tensor which does deep copy of this tensor
'''
return _call_singa_func(self.data.Clone)
def repeat(self, repeats, axis):
'''Repeat data of a tensor
Args:
repeats(int or a sequence): the number that the tensor need to repeat for
axis (int):the axis to do repeat
If it is None, then the repeated tensor will be flattened.If it isn't None,
the repeats could be sequence, but it's size should match the axis's shape
Return:
the tensor which has been repeated
'''
t = Tensor()
t_ndim = self.ndim()
if isinstance(repeats, int) or isinstance(repeats, long):
if repeats < 0:
raise ValueError(
"'repeats' should not be negative: {}".format(repeats))
if axis != None and axis < 0:
axis += t_ndim
# broadcast = True
if axis == None:
axis = 9999
t.shape = (product(self.shape) * repeats,)
Repeats = [repeats, ]
t.data = self.data.Repeat(Repeats, axis)
elif axis >= 0:
t_shape = list(self.shape)
t_shape[axis] = self.shape[axis] * repeats
t.shape = tuple(t_shape)
Repeats = [repeats, ]
t.data = self.data.Repeat(Repeats, axis)
elif isinstance(repeats, tuple) or isinstance(repeats, list):
for rep in repeats:
if rep < 0:
raise ValueError(
"'repeats' should be int or sequence: {}".format(repeats))
if axis != None and axis < 0:
axis += t_ndim
if axis == None:
axis = 9999
raise ValueError(
"when axis us None, 'repeats' should be int: {}".format(repeats))
elif axis >= 0:
t_shape = list(self.shape)
t_shape[axis] = sum(repeats)
t.shape = tuple(t_shape)
t.data = self.data.Repeat(list(repeats), axis)
else:
raise ValueError('repeats should be int or sequence')
return t
def T(self):
''' shallow copy.
Returns:
a new Tensor which shares the underlying data memory (shallow copy).
'''
return _call_singa_func(singa.DefaultTranspose, self.data)
def copy(self):
'''shallow copy calls copy constructor of singa::Tensor
'''
return _call_singa_func(CTensor, self.data)
def deepcopy(self):
'''Same as clone().
Returns:
a new Tensor
'''
return self.clone()
def bernoulli(self, p):
'''Sample 0/1 for each element according to the given probability.
Args:
p (float): with probability p, each element is sample to 1.
'''
singa.Bernoulli(float(p), self.data)
def gaussian(self, mean, std):
'''Generate a value for each element following a Gaussian distribution.
Args:
mean (float): mean of the distribution
std (float): standard variance of the distribution
'''
singa.Gaussian(float(mean), float(std), self.data)
def uniform(self, low, high):
'''Generate a value for each element following a uniform distribution.
Args:
low (float): the lower bound
high (float): the hight bound
'''
singa.Uniform(float(low), float(high), self.data)
def add_column(self, v):
'''Add a tensor to each column of this tensor.
Args:
v (Tensor): a Tensor to be added as a column to this tensor.
'''
singa.AddColumn(v.data, self.data)
def add_row(self, v):
'''Add a tensor to each row of this tensor.
Args:
v (Tensor): a Tensor to be added as a row to this tensor.
'''
singa.AddRow(v.data, self.data)
def div_column(self, v):
'''Divide each column of this tensor by v.
Args:
v (Tensor): 1d tensor of the same length the column of self.
'''
singa.DivColumn(v.data, self.data)
def div_row(self, v):
'''Divide each row of this tensor by v.
Args:
v (Tensor): 1d tensor of the same length the row of self.
'''
singa.DivRow(v.data, self.data)
def mult_column(self, v):
'''Multiply each column of this tensor by v element-wisely.
Args:
v (Tensor): 1d tensor of the same length the column of self.
'''
singa.MultColumn(v.data, self.data)
def mult_row(self, v):
'''Multiply each row of this tensor by v element-wisely.
Args:
v (Tensor): 1d tensor of the same length the row of self.
'''
singa.MultRow(v.data, self.data)
'''
python operators (+=, -=, *=, /=) for singa::Tensor unary operators
'''
def __iadd__(self, x):
''' inplace element-wise addition with a tensor or a float value.
Args:
x (float or Tensor):
'''
if isinstance(x, Tensor):
self.data += x.data
else:
self.data += float(x)
return self
def __isub__(self, x):
''' inplace element-wise subtraction with a tensor or a float value.
Args:
x (float or Tensor):
'''
if isinstance(x, Tensor):
self.data -= x.data
else:
self.data -= float(x)
return self
def __imul__(self, x):
''' inplace element-wise multiplication with a tensor or a float value.
Args:
x (float or Tensor):
'''
if isinstance(x, Tensor):
self.data *= x.data
else:
self.data *= float(x)
return self
def __idiv__(self, x):
''' inplace element-wise division by a tensor or a float value.
Args:
x (float or Tensor):
'''
if isinstance(x, Tensor):
self.data *= (1.0/x.data)
else:
self.data *= (1.0/float(x))
return self
'''
python operators (+, -, *, /, <, <=, >, >=) for singa binary operators
https://docs.python.org/2/library/operator.html#mapping-operators-to-functions
'''
def __add__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__add__(self.data, rhs.data))
else:
return _call_singa_func(singa.AddFloat,
self.data, rhs)
def __sub__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__sub__(self.data, rhs.data))
else:
return _call_singa_func(singa.SubFloat,
self.data, rhs)
def __mul__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__mul__(self.data, rhs.data))
else:
return _call_singa_func(singa.MultFloat,
self.data, rhs)
def __div__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__div__(self.data, rhs.data))
else:
return _call_singa_func(singa.DivFloat,
self.data, rhs)
def __truediv__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__div__(self.data, rhs.data))
else:
return _call_singa_func(singa.DivFloat,
self.data, rhs)
def __lt__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__lt__(self.data, rhs.data))
else:
return _call_singa_func(singa.LTFloat, self.data, rhs)
def __le__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__le__(self.data, rhs.data))
else:
return _call_singa_func(singa.LEFloat, self.data, rhs)
def __gt__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__gt__(self.data, rhs.data))
else:
return _call_singa_func(singa.GTFloat, self.data, rhs)
def __ge__(self, rhs):
if isinstance(rhs, Tensor):
return from_raw_tensor(
singa.__ge__(self.data, rhs.data))
else:
return _call_singa_func(singa.GEFloat, self.data, rhs)
def __radd__(self, lhs):
lhs = float(lhs)
one = Tensor(self.shape, self.device, self.dtype)
one.set_value(lhs)
one += self
return one
def __rsub__(self, lhs):
lhs = float(lhs)
one = Tensor(self.shape, self.device, self.dtype)
one.set_value(lhs)
one -= self
return one
def __rmul__(self, lhs):
lhs = float(lhs)
one = Tensor(self.shape, self.device, self.dtype)
one.set_value(lhs)
one *= self
return one
def __rdiv__(self, lhs):
lhs = float(lhs)
one = Tensor(self.shape, self.device, self.dtype)
one.set_value(lhs)
one /= self
return one
def __rtruediv__(self, lhs):
lhs = float(lhs)
one = Tensor(self.shape, self.device, self.dtype)
one.set_value(lhs)
one /= self
return one
''' python functions for global functions in Tensor.h
'''
def from_raw_tensor(t):
x = Tensor(t.shape(), t.device(), t.data_type())
x.data = t
return x
def from_raw_tensors(tt):
ret = []
for t in list(tt):
ret.append(from_raw_tensor(t))
return ret
def zeros_like(t):
ret = Tensor(t.shape, t.device, t.dtype)
ret.set_value(float(0))
return ret
def ones_like(t):
ret = Tensor(t.shape, t.device, t.dtype)
ret.set_value(float(1))
return ret
def product(shape):
return reduce(lambda x, y: x * y, shape)
def sizeof(dtype):
'''
Returns:
the number of bytes of the given SINGA data type defined in core.proto
'''
return singa.SizeOf(dtype)
def reshape(tensor, shape):
'''Reshape the input tensor with the given shape and
the original tensor is not changed
Args:
t (Tensor): the tensor to be changed
s (list<int>): the new shape, which should have the same volumn as the
old shape.
Returns:
the new Tensor
'''
return _call_singa_func(singa.Reshape, tensor.data, shape)
def transpose(t, axes=None):
'''
Returns:
the transposed tensor
'''
ret = t.transpose(axes)
return ret
def copy_data_to_from(dst, src, size, dst_offset=0, src_offset=0):
'''Copy the data between two Tensor instances which could be on different
devices.
Args:
dst (Tensor): destination Tensor
src (Tensor): source Tensor
size (int) : number of elements to copy
dst_offset (int): offset in terms of elements to the start of dst
src_offset (int): offset in terms of elements to the start of src
'''
singa.CopyDataToFrom(dst.data, src.data, size,
dst_offset, src_offset)
def from_numpy(np_array):
'''Create a Tensor instance with the shape, dtype and values from the numpy
array.
Args:
np_array: the numpy array.
Returns:
A Tensor instance allocated on the default CppCPU device.
'''
assert type(np_array) is np.ndarray, 'Must input numpy array'
# convert to float32 array
if np_array.dtype == np.float64 or np_array.dtype == np.float:
np_array = np_array.astype(np.float32)
if np_array.dtype == np.int64 or np_array.dtype == np.int:
np_array = np_array.astype(np.int32)
if np_array.dtype == np.float32:
dtype = core_pb2.kFloat32
else:
assert np_array.dtype == np.int32, \
'Only float and int tensors are supported'
dtype = core_pb2.kInt
ret = Tensor(np_array.shape, dtype=dtype)
ret.copy_from_numpy(np_array)
return ret
def to_host(t):
'''Copy the data to a host tensor.
'''
ret = t.clone()
ret.to_host()
return ret
def to_numpy(t):
'''Copy the tensor into a numpy array.
Args:
t (Tensor), a Tensor
Returns:
a numpy array
'''
th = to_host(t)
if th.dtype == core_pb2.kFloat32:
np_array = th.data.GetFloatValue(int(th.size()))
elif th.dtype == core_pb2.kInt:
np_array = th.data.GetIntValue(int(th.size()))
else:
print('Not implemented yet for ', th.dtype)
return np_array.reshape(th.shape)
def abs(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = abs(x), x is an element of t
'''
return _call_singa_func(singa.Abs, t.data)
def exp(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = exp(x), x is an element of t
'''
return _call_singa_func(singa.Exp, t.data)
def log(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = log(x), x is an element of t
'''
return _call_singa_func(singa.Log, t.data)
def sigmoid(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = sigmoid(x); x is an element of t
'''
return _call_singa_func(singa.Sigmoid, t.data)
def sign(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = sign(x)
'''
return _call_singa_func(singa.Sign, t.data)
def sqrt(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = sqrt(x), x is an element of t
'''
return _call_singa_func(singa.Sqrt, t.data)
def square(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = x * x, x is an element of t
'''
return _call_singa_func(singa.Square, t.data)
def tanh(t):
'''
Args:
t (Tensor): input Tensor
Returns:
a new Tensor whose element y = tanh(x), x is an element of t
'''
return _call_singa_func(singa.Tanh, t.data)
def sum(t, axis=None, out=None):
'''Sum of tensor elements over given axis
Args:
t: Singa.tensor
The array_like tensor to be sumed
axis: None or int or tuple of ints, optional
Axis or axes along which a sum is performed.
The default, axis=None, will sum all of the elements of the input array.
If axis is negative it counts from the last to the first axis.
If axis is a tuple of ints, a sum is performed on all of the axes specified
in the tuple instead of a single axis or all the axes as before.
out:Singa.tensor optional
Alternative output array in which to place the result.
It must have the same shape as the expected output,
but the type of the output values will be cast if necessary.
Return: sum_along_axis: tensor
A tensor with the same shape as t, with the specified axis removed.
If a is a 0-d array, or if axis is None, a scalar is returned.
If an output array is specified, a reference to out is returned
'''
t_shape = t.shape
t_ndim = t.ndim()
if axis is None:
one = Tensor(t.shape, t.device)
one.set_value(1.0)
ret = tensordot(t, one, t_ndim)
if isinstance(axis, int):
if axis < 0:
axis += t_ndim
axis_shape = t_shape[axis]
axis_shape = int(axis_shape)
one = Tensor(shape=(axis_shape, ), device=t.device)
one.set_value(1.0)
ret = tensordot(t, one, axes=([axis], [0]))
if isinstance(axis, tuple):
l_axis = list(axis)
axis_shape = [t_shape[x] for x in axis]
axisshape = tuple(axis_shape)
one = Tensor(axisshape, t.device)
one.set_value(1.0)
one_axis = [x for x in range(one.ndim())]
ret = tensordot(t, one, (l_axis, one_axis))
if out is not None:
if out.shape != ret.shape:
raise ValueError('dimensions do not match')
out[:] = ret
return out
else:
return ret
def pow(t, x, out=None):
'''
Args:
t (Tensor): input tensor
x (float or Tensor): y[i] = t[i]^x if x is a float value; otherwise,
y[i]= t[i]^x[i] if x is a tensor.
out (None or Tensor): if None, a new Tensor would be constructed to
store the result; otherwise, the result is put into out.
Returns:
the result tensor.
'''
if out is None:
if isinstance(x, Tensor):
return _call_singa_func(singa.Pow, t.data, x.data)
else:
return _call_singa_func(singa.PowFloat, t.data, x)
else:
if isinstance(x, Tensor):
singa.PowWithRet(t.data, x.data, out.data)
else:
singa.PowFloatWitRet(t.data, x, out.data)
return out
def average(t, axis=None):
'''
Args:
t (Tensor): input Tensor
axis (int, optional): if None, average all elements; otherwise average
along the given dimension. 0 for averaging each column; 1 for
averaging each row.
Returns:
a float value if axis is None; otherwise, a new Tensor for the result.
'''
if t.ndim() > 1:
return _call_singa_func(singa.Average, t.data, axis)
else:
return singa.SumAsFloat(t.data) / t.size()
def softmax(t, out=None):
'''Apply SoftMax for each row of the Tensor.
Args:
t (Tensor): the input 1d or 2d tensor
out (Tensor, optional): if not None, it is used to store the result
Returns:
the result Tensor
'''
if out is None:
return _call_singa_func(singa.SoftMax, t.data)
else:
singa.SoftMax(t.data, out.data)
return out
def lt(t, x):
'''Elementi-wise comparison for t < x
Args:
t (Tensor): left hand side operand
x (Tensor or float): right hand side operand
Returns:
a Tensor with each element being t[i] < x ? 1.0f:0.0f,
or t[i] < x[i] ? 1.0f:0.0f
'''
return t < x
def le(t, x):
'''Elementi-wise comparison for t <= x.
Args:
t (Tensor): left hand side operand
x (Tensor or float): right hand side operand
Returns:
a Tensor with each element being t[i] <= x ? 1.0f:0.0f,
or t[i] <= x[i] ? 1.0f:0.0f
'''
return t <= x
def gt(t, x):
'''Elementi-wise comparison for t > x.
Args:
t (Tensor): left hand side operand
x (Tensor or float): right hand side operand
Returns:
a Tensor with each element being t[i] > x ? 1.0f:0.0f,
or t[i] > x[i] ? 1.0f:0.0f
'''
return t > x
def ge(t, x):
'''Elementi-wise comparison for t >= x.
Args:
t (Tensor): left hand side operand
x (Tensor or float): right hand side operand
Returns:
a Tensor with each element being t[i] >= x ? 1.0f:0.0f,
or t[i] >= x[i] ? 1.0f:0.0f
'''
return t >= x
def add(lhs, rhs, ret=None):
'''Elementi-wise addition.
Args:
lhs (Tensor)
rhs (Tensor)
ret (Tensor, optional): if not None, the result is stored in it;
otherwise, a new Tensor would be created for the result.
Returns:
the result Tensor
'''
if ret is None:
# call Tensor.__add__()
return lhs + rhs
else:
if isinstance(rhs, Tensor):
singa.Add(lhs.data, rhs.data, ret.data)
else:
singa.AddFloatWithRet(lhs.data, rhs, ret.data)
return ret
def sub(lhs, rhs, ret=None):
'''Elementi-wise subtraction.
Args:
lhs (Tensor)
rhs (Tensor)
ret (Tensor, optional): if not None, the result is stored in it;
otherwise, a new Tensor would be created for the result.
Returns:
the result Tensor
'''
if ret is None:
# call Tensor.__sub__()
return lhs - rhs
else:
if isinstance(rhs, Tensor):
singa.Sub(lhs.data, rhs.data, ret.data)
else:
singa.SubFloatWithRet(lhs.data, rhs, ret.data)
return ret
def eltwise_mult(lhs, rhs, ret=None):
'''Elementi-wise multiplication.
Args:
lhs (Tensor)
rhs (Tensor)
ret (Tensor, optional): if not None, the result is stored in it;
otherwise, a new Tensor would be created for the result.
Returns:
the result Tensor
'''
if ret is None:
# call Tensor.__mul__()
return lhs * rhs
else:
if isinstance(rhs, Tensor):
singa.EltwiseMult(lhs.data, rhs.data,
ret.data)
else:
singa.EltwiseMultFloatWithRet(lhs.data, rhs,
ret.data)
return ret
def mult(A, B, C=None, alpha=1.0, beta=0.0):
'''Do matrix-matrix or matrix-vector multiplication.
This function returns C = alpha * A * B + beta * C
Args:
A (Tensor): 2d Tensor
B (Tensor): If B is a 1d Tensor, GEMV would be invoked for matrix-vector
multiplication; otherwise GEMM would be invoked.
C (Tensor, optional): for storing the result; If None, a new Tensor
would be created.
alpha (float)
beta (float)
Returns:
the result Tensor
'''
if C is None:
return _call_singa_func(singa.Mult, A.data, B.data)
else:
singa.MultWithScale(alpha, A.data, B.data,
beta, C.data)
return C
def einsum(ops, *args):
'''
function TODO list to finish the function in cpp(just like numpy function):
1.sum(A,axis = None)
2.repeat(A,repeats)
3.transpose(A,axes = None)
Do the matrix to matrix einsum calculation according to the operands
Warning : this function could only support two matrix' einsum calcultion
Args:
ops(string):
the string specifies the subscripts for summation such as 'ki,kj->kij'
Here all the 26 lowercase letter can be used here.
arg(list of array_like):
These are the tensors for the operation,but here only support two tensors.
Returns: Singa.Tensor
the output matirx of the einsum calculation
The best way to understand this function is to try the examples below:
A_ = [0,1,2,3,4,5,6,7,8,9,10,11]
A = A_.reshape(4,3)
B = A_.reshape(3,4)
Here this einsum calculation is the same as normal 'mult'
Res = einsum('ij,jk->ik',A,B)
>>> [[ 20 23 26 29]
[ 56 68 80 92]
[ 92 113 134 155]
[128 158 188 218]]
A_ = [0,1,2,3,4,5,6,7,8,9,10,11]
A = A_.reshape(4,3)
B = A_.reshape(4,3)
Here the einsum calculation is the same as normol 'eltwise_mult'
Res = einsum('ki,ki->ki',A,B)
>>> [[ 0 1 4]
[ 9 16 25]
[ 36 49 64]
[ 81 100 121]]
A = [0,1,2,3,4,5,6,7,8,9,10,11]
A = A.reshape(4,3)
Res = einsum('ki,kj->kij',A,A)
>>> [[[ 0 0 0]
[ 0 1 2]
[ 0 2 4]]
[[ 9 12 15]
[ 12 16 20]
[ 15 20 25]]
[[ 36 42 48]
[ 42 49 56]
[ 48 56 64]]
[[ 81 90 99]
[ 90 100 110]
[ 99 110 121]]]
A_ = [0,1,2,3,4,5,6,7,8,9,10,11]
A = A_.reshape(3,2,2)
Res = einsum('kia,kja->kij',A,A)
>>> [[[ 1 3]
[ 3 13]]
[[ 41 59]
[ 59 85]]
[[145 179]
[179 221]]]
'''
if len(ops) == 0:
raise ValueError("No input operands")
if len(args) != 2:
raise ValueError("Currently only two operands are supported")
# to get the input and output ops
inputops, outputops = ops.split('->')
inputops = inputops.split(',')
# to get the two input tensor
A = args[0]
B = args[1]
if A.ndim() != len(inputops[0]) or B.ndim() != len(inputops[1]):
raise ValueError("input dim doesn't match operands")
# to get the indices in input but not in output
sums = sorted(list((set(inputops[0]) | set(inputops[1])) - set(outputops)))
# to get the indices that A and B use to broadcast to each other
broadcast_A = sorted(list(set(inputops[1]) - set(inputops[0])))
broadcast_B = sorted(list(set(inputops[0]) - set(inputops[1])))
# to get all the indices in input
outputall = sorted(list(set(inputops[0]) | set(inputops[1])))
# Map indices to axis integers
sums = [outputall.index(x) for x in sums]
broadcast_idA = [inputops[1].find(x) for x in broadcast_A]
broadcast_idB = [inputops[0].find(x) for x in broadcast_B]
broadcast_a = [B.shape[x] for x in broadcast_idA]
broadcast_b = [A.shape[x] for x in broadcast_idB]
# get the the transpose and reshape parameter used in the elementwise
# calculation
transpose_A = [(list(inputops[0]) + broadcast_A).index(x)
for x in outputall]
transpose_B = [(list(inputops[1]) + broadcast_B).index(x)
for x in outputall]
reshape_A = list(A.shape) + broadcast_a
reshape_B = list(B.shape) + broadcast_b
if len(broadcast_a) == 0:
broadcast_a = [1]
if len(broadcast_b) == 0:
broadcast_b = [1]
mult_A = repeat(A, product(broadcast_a))
mult_A = mult_A.reshape(reshape_A)
mult_A = transpose(mult_A, transpose_A)
mult_B = repeat(B, product(broadcast_b))
mult_B = mult_B.reshape(reshape_B)
mult_B = transpose(mult_B, transpose_B)
if mult_A.shape != mult_B.shape:
raise ValueError("Error: matrix dimension mismatch")
res = eltwise_mult(mult_A, mult_B)
sum_R = sorted(sums, reverse=True)
for i in sum_R:
res = sum(res, axis=i)
transpose_res = [sorted(list(outputops)).index(x) for x in list(outputops)]
res = transpose(res, transpose_res)
return res
def repeat(t, repeats, axis=None):
'''Return the repeated tensor
Args:
t(tensor): the tensor to be repeated
repeats(int or a sequence): the number that the tensor need to repeat for
axis (int):the axis to do repeat
If it is None, then the repeated tensor will be flattened.If it isn't None,
the repeats could be sequence, but it's size should match the axis's shape
Return:
the tensor which has been repeated
'''
ret = t.repeat(repeats, axis)
return ret
def tensordot(A, B, axes=2):
"""Returns the tensor multiplication of two tensors along specified axes.
This is equivalent to compute dot product along the specified axes which
are treated as one axis by reshaping.
Args:
A: Singa.Tensor
B: Singa.Tensor
axes:
- If it is an integer, then ''axes'' represent axes at the last of ''a`'' and
the first of ''b'' are used.
- If it is a pair of sequences of integers, then these two
sequences specify the list of axes for ''a'' and ''b''. The
corresponding axes are paired for sum-product.
Return:
singa.tensor: The tensor product of ''A'' and ''B'' along the
axes specified by ''axes''.
Thanks to numpy.tensordot.
the link is https://github.com/numpy/numpy/blob/v1.14.0/numpy/core/numeric.py#L1123-L1306
"""
# when axes is an integer, axes_A and axes_B represent axes at the last of ''A'' and
# the first of ''B''. For example, when axes is 1, we do the normal multiplication :
# if A is in shape(3,2,4), B is in shape(4,2,5), it will return a matrix in shape(3,2,2,5)
# when axes is 2 and A,B are shape (3,2,4) and (2,4,5), it will return a
# matrix in shape(3,5)
if type(axes) == int:
axes_A = list(range(-axes, 0))
axes_B = list(range(0, axes))
axes_B = axes_B
else:
axes_A, axes_B = axes
# when axes is a pair of sequences of integers.For example, A is in shape(3,2,4),
# B is in shape(4,2,5), we set axes as ([1,2],[1,0]), it will return a
# matrix in shape(3,5)
if isinstance(axes_A, list):
na = len(axes_A)
axes_A = list(axes_A)
else:
axes_A = [axes_A]
na = 1
if isinstance(axes_B, list):
nb = len(axes_B)
axes_B = list(axes_B)
else:
axes_B = [axes_B]
nb = 1
# a_shape and b_shape are the shape of tensor A and B, while nda and ndb
# are the dim of A and B
a_shape = A.shape
nda = A.ndim()
b_shape = B.shape
ndb = B.ndim()
equal = True
# to check if the length of axe_A is equal to axes_B
if na != nb:
equal = False
else:
# to make the shape match
for k in range(na):
if a_shape[axes_A[k]] != b_shape[axes_B[k]]:
equal = False
break
if axes_A[k] < 0:
axes_A[k] += nda
if axes_B[k] < 0:
axes_B[k] += ndb
if not equal:
raise ValueError("shape-mismatch for sum")
'''start to do the calculation according to the axes'''
notin = [k for k in range(nda) if k not in axes_A]
# nda is the dim of A, and axes_a is the axis for A, notin is the axis
# which is not in axes_A
newaxes_a = notin + axes_A
N2 = 1
for axis in axes_A:
N2 *= a_shape[axis]
N1 = 1
for ax in notin:
N1 *= a_shape[ax]
# newshape_a is the shape to do multiplication.For example, A is in shape(3,2,4),
# B is in shape(4,2,5), we set axes as ([1,2],[1,0]), then newshape_a should be (3,5)
# olda is the shape that will be shown in the result.
newshape_a = (N1, N2)
olda = [a_shape[axis] for axis in notin]
notin = [k for k in range(ndb) if k not in axes_B]
newaxes_b = axes_B + notin
N2 = 1
for axis in axes_B:
N2 *= b_shape[axis]
N1 = 1
for bx in notin:
N1 *= b_shape[bx]
newshape_b = (N2, N1)
oldb = [b_shape[axis] for axis in notin]
A = transpose(A, newaxes_a)
B = transpose(B, newaxes_b)
at = reshape(A, newshape_a)
bt = reshape(B, newshape_b)
res = mult(at, bt)
if len(olda + oldb) == 0:
olda = [1]
oldb = [1]
res = res.reshape(tuple(olda + oldb))
else:
res = res.reshape(tuple(olda + oldb))
return res
def div(lhs, rhs, ret=None):
'''Elementi-wise division.
Args:
lhs (Tensor)
rhs (Tensor)
ret (Tensor, optional): if not None, the result is stored in it;
otherwise, a new Tensor would be created for the result.
Returns:
the result Tensor
'''
if ret is None:
# call Tensor.__div__()
return lhs / rhs
else:
if isinstance(rhs, Tensor):
singa.Div(lhs.data, rhs.data, ret.data)
else:
singa.DivFloatWithRet(lhs.data, rhs, ret.data)
return ret
def axpy(alpha, x, y):
'''Element-wise operation for y += alpha * x.
Args:
alpha (float)
x (Tensor)
y (Tensor)
Returns:
y
'''
singa.Axpy(float(alpha), x.data, y.data)
return y
def bernoulli(p, t):
'''Generate a binary value for each element of t.
Args:
p (float): each element is 1 with probability p; and 0 with 1 - p
t (Tensor): the results are put into t
Returns:
t
'''
singa.Bernoulli(float(p), t.data)
return t
def gaussian(mean, std, t):
'''Generate values following a Gaussian distribution.
Args:
mean (float): the mean of the Gaussian distribution.
std (float): the standard variance of the Gaussian distribution.
t (Tensor): the results are put into t
Returns:
t
'''
singa.Gaussian(float(mean), float(std), t.data)
return t
def uniform(low, high, t):
'''Generate values following a Uniform distribution.
Args:
low (float): the lower bound
hight (float): the higher bound
t (Tensor): the results are put into t
Returns:
t
'''
singa.Uniform(float(low), float(high), t.data)
return t
def add_column(alpha, v, beta, M):
'''Add v to each column of M.
Denote each column of M as m, m = alpha * v + beta * m
Args:
alpha (float)
v (Tensor)
beta (float)
M (Tensor): 2d tensor
Returns:
M
'''
singa.AddColumnWithScale(float(alpha), float(beta), v.data,
M.data)
return M
def add_row(alpha, v, beta, M):
'''Add v to each row of M.
Denote each row of M as m, m = alpha * v + beta * m
Args:
alpha (float)
v (Tensor)
beta (float)
M (Tensor): 2d tensor
Returns:
M
'''
singa.AddRowWithScale(alpha, beta, v.data, M.data)
return M
def sum_columns(M):
'''Sum all columns into a single column.
Args:
M (Tensor): the input 2d tensor.
Returns:
a new Tensor as the resulted column.
'''
assert M.ndim() == 2, 'M.nDim() is supposed to be 2'
ret = Tensor((M.shape[0], 1), M.data.device())
singa.SumColumns(M.data, ret.data)
return ret
def sum_rows(M):
'''Sum all rows into a single row.
Args:
M (Tensor): the input 2d tensor.
Returns:
a new Tensor as the resulted row.
'''
assert M.ndim() == 2, 'M.nDim() is supposed to be 2'
ret = Tensor((1, M.shape[1]), M.data.device())
singa.SumRows(M.data, ret.data)
return ret
''' private functions, internally used
'''
def _call_singa_func(_singa_func, *args):
''' this function calls singa global functions that returns Tensor
and create new python Tensor instance
e.g., Tensor [singa_func](args...)
'''
new_t = Tensor()
new_t.data = _singa_func(*args)
new_t.shape = tuple(new_t.data.shape())
new_t.device = new_t.data.device()
new_t.dtype = new_t.data.data_type()
return new_t
def copy_from_numpy(data, np_array):
'''
Copy the data from the numpy array.
'''
assert np_array.size == data.Size(), \
'tensor shape should be the same'
if not np_array.ndim == 1:
np_array = np_array.flatten()
dt = np_array.dtype
if dt == np.float32:
data.CopyFloatDataFromHostPtr(np_array)
elif dt == np.int or dt == np.int32:
data.CopyIntDataFromHostPtr(np_array)
else:
print('Not implemented yet for ', dt)
|
nusdbsystem/incubator-singa
|
python/singa/tensor.py
|
Python
|
apache-2.0
| 43,493
|
[
"Gaussian"
] |
5f0354ee44d0c10fb19eb81c0b0c5203455a179eab97a0ed46b1755ddc1fe4b4
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests.common import SavepointCase, TransactionCase, HttpCase
class TransactionCaseWithUserDemo(TransactionCase):
def setUp(self):
super(TransactionCaseWithUserDemo, self).setUp()
self.env.ref('base.partner_admin').write({'name': 'Mitchell Admin'})
self.user_demo = self.env['res.users'].search([('login', '=', 'demo')])
self.partner_demo = self.user_demo.partner_id
if not self.user_demo:
self.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
# YTI TODO: This could be factorized between the different classes
self.partner_demo = self.env['res.partner'].create({
'name': 'Marc Demo',
'email': 'mark.brown23@example.com',
})
self.user_demo = self.env['res.users'].create({
'login': 'demo',
'password': 'demo',
'partner_id': self.partner_demo.id,
'groups_id': [(6, 0, [self.env.ref('base.group_user').id, self.env.ref('base.group_partner_manager').id])],
})
class HttpCaseWithUserDemo(HttpCase):
def setUp(self):
super(HttpCaseWithUserDemo, self).setUp()
self.env.ref('base.partner_admin').write({'name': 'Mitchell Admin'})
self.user_demo = self.env['res.users'].search([('login', '=', 'demo')])
self.partner_demo = self.user_demo.partner_id
if not self.user_demo:
self.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
self.partner_demo = self.env['res.partner'].create({
'name': 'Marc Demo',
'email': 'mark.brown23@example.com',
})
self.user_demo = self.env['res.users'].create({
'login': 'demo',
'password': 'demo',
'partner_id': self.partner_demo.id,
'groups_id': [(6, 0, [self.env.ref('base.group_user').id, self.env.ref('base.group_partner_manager').id])],
})
class SavepointCaseWithUserDemo(SavepointCase):
@classmethod
def setUpClass(cls):
super(SavepointCaseWithUserDemo, cls).setUpClass()
cls.user_demo = cls.env['res.users'].search([('login', '=', 'demo')])
cls.partner_demo = cls.user_demo.partner_id
if not cls.user_demo:
cls.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
cls.partner_demo = cls.env['res.partner'].create({
'name': 'Marc Demo',
'email': 'mark.brown23@example.com',
})
cls.user_demo = cls.env['res.users'].create({
'login': 'demo',
'password': 'demo',
'partner_id': cls.partner_demo.id,
'groups_id': [(6, 0, [cls.env.ref('base.group_user').id, cls.env.ref('base.group_partner_manager').id])],
})
@classmethod
def _load_partners_set(cls):
cls.partner_category = cls.env['res.partner.category'].create({
'name': 'Sellers',
'color': 2,
})
cls.partner_category_child_1 = cls.env['res.partner.category'].create({
'name': 'Office Supplies',
'parent_id': cls.partner_category.id,
})
cls.partner_category_child_2 = cls.env['res.partner.category'].create({
'name': 'Desk Manufacturers',
'parent_id': cls.partner_category.id,
})
# Load all the demo partners
cls.partners = cls.env['res.partner'].create([
{
'name': 'Inner Works', # Wood Corner
'state_id': cls.env.ref('base.state_us_1').id,
'category_id': [(6, 0, [cls.partner_category_child_1.id, cls.partner_category_child_2.id,])],
'child_ids': [(0, 0, {
'name': 'Sheila Ruiz', # 'Willie Burke',
}), (0, 0, {
'name': 'Wyatt Howard', # 'Ron Gibson',
}), (0, 0, {
'name': 'Austin Kennedy', # Tom Ruiz
})],
}, {
'name': 'Pepper Street', # 'Deco Addict',
'state_id': cls.env.ref('base.state_us_2').id,
'child_ids': [(0, 0, {
'name': 'Liam King', # 'Douglas Fletcher',
}), (0, 0, {
'name': 'Craig Richardson', # 'Floyd Steward',
}), (0, 0, {
'name': 'Adam Cox', # 'Addison Olson',
})],
}, {
'name': 'AnalytIQ', #'Gemini Furniture',
'state_id': cls.env.ref('base.state_us_3').id,
'child_ids': [(0, 0, {
'name': 'Pedro Boyd', # Edwin Hansen
}), (0, 0, {
'name': 'Landon Roberts', # 'Jesse Brown',
'company_id': cls.env.ref('base.main_company').id,
}), (0, 0, {
'name': 'Leona Shelton', # 'Soham Palmer',
}), (0, 0, {
'name': 'Scott Kim', # 'Oscar Morgan',
})],
}, {
'name': 'Urban Trends', # 'Ready Mat',
'state_id': cls.env.ref('base.state_us_4').id,
'category_id': [(6, 0, [cls.partner_category_child_1.id, cls.partner_category_child_2.id,])],
'child_ids': [(0, 0, {
'name': 'Louella Jacobs', # 'Billy Fox',
}), (0, 0, {
'name': 'Albert Alexander', # 'Kim Snyder',
}), (0, 0, {
'name': 'Brad Castillo', # 'Edith Sanchez',
}), (0, 0, {
'name': 'Sophie Montgomery', # 'Sandra Neal',
}), (0, 0, {
'name': 'Chloe Bates', # 'Julie Richards',
}), (0, 0, {
'name': 'Mason Crawford', # 'Travis Mendoza',
}), (0, 0, {
'name': 'Elsie Kennedy', # 'Theodore Gardner',
})],
}, {
'name': 'Ctrl-Alt-Fix', # 'The Jackson Group',
'state_id': cls.env.ref('base.state_us_5').id,
'child_ids': [(0, 0, {
'name': 'carole miller', # 'Toni Rhodes',
}), (0, 0, {
'name': 'Cecil Holmes', # 'Gordon Owens',
})],
}, {
'name': 'Ignitive Labs', # 'Azure Interior',
'state_id': cls.env.ref('base.state_us_6').id,
'child_ids': [(0, 0, {
'name': 'Jonathan Webb', # 'Brandon Freeman',
}), (0, 0, {
'name': 'Clinton Clark', # 'Nicole Ford',
}), (0, 0, {
'name': 'Howard Bryant', # 'Colleen Diaz',
})],
}, {
'name': 'Amber & Forge', # 'Lumber Inc',
'state_id': cls.env.ref('base.state_us_7').id,
'child_ids': [(0, 0, {
'name': 'Mark Webb', # 'Lorraine Douglas',
})],
}, {
'name': 'Rebecca Day', # 'Chester Reed',
'parent_id': cls.env.ref('base.main_partner').id,
}, {
'name': 'Gabriella Jennings', # 'Dwayne Newman',
'parent_id': cls.env.ref('base.main_partner').id,
}
])
class HttpCaseWithUserPortal(HttpCase):
def setUp(self):
super(HttpCaseWithUserPortal, self).setUp()
self.user_portal = self.env['res.users'].search([('login', '=', 'portal')])
self.partner_portal = self.user_portal.partner_id
if not self.user_portal:
self.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
self.partner_portal = self.env['res.partner'].create({
'name': 'Joel Willis',
'email': 'joel.willis63@example.com',
})
self.user_portal = self.env['res.users'].with_context(no_reset_password=True).create({
'login': 'portal',
'password': 'portal',
'partner_id': self.partner_portal.id,
'groups_id': [(6, 0, [self.env.ref('base.group_portal').id])],
})
|
ddico/odoo
|
odoo/addons/base/tests/common.py
|
Python
|
agpl-3.0
| 8,468
|
[
"Amber"
] |
8faef9197b1f8d9275db4c3722076a5d2c37c01f7c592945373fce26d715263b
|
# -*- coding: utf-8 -*-
"""
This is a Python implementation of the fast algorithm developed by
Vincent Mazet and Nicolas Chopin
(see http://miv.u-strasbg.fr/mazet/rtnorm/).
The version this code is based on is the Matlab implementation from 2012.
Created on Mon Aug 12 13:48:22 2013
Update on 11/27/2014: Added `erf` fallback implementation for missing
scipy. Thanks to Dr. Cliff Kerr (University of Sidney) for submitting
his patch!
@author: Christoph Lassner
"""
from numpy.random import uniform as rand, normal as randn, randint as randi
from numpy import sqrt, pi, exp, log, floor, array
try:
from scipy.special import erf
except:
# In some situations scipy might not be available or might take too long
# to compile (e.g. for Amazon Application deployment).
# Use a fallback implementation just relying on `math.erf` and
# `numpy.nditer`.
from numpy import nditer # Loop over N-dimensional arrays
import math # For erf function in math
def erf(arr):
r"""
Replicating SciPy erf function using math erf function to remove
SciPy dependency.
"""
output = array(arr) # Copy input array
for x in nditer(output, op_flags=['readwrite']): # Loop over each element
x = math.erf(x) # Calculate the erf for this value
return output
def rtnorm(a, b, mu=0., sigma=1., size=1, probabilities=False):
r"""
Pseudorandom numbers from a truncated Gaussian distribution.
X = rtnorm(a, b) returns a pseudorandom variable generated from a normal
distribution with mean zero and variance one (i.e. standard normal
distribution) truncated to the interval [a,b].
X = rtnorm(a,b,mu,sigma) returns a pseudorandom variable generated from
a normal distribution with mean MU and variance SIGMA truncated to the
interval [a, b].
The parameter size allows to specify a vector length and if probabilities
is set to True, the function also returns the vector of probabilities of X.
This implements an extension of Chopin's algorithm detailed in
N. Chopin, "Fast simulation of truncated Gaussian distributions", Stat
Comput (2011) 21:275-288
Copyright (C) 2012 Vincent Mazet (LSIIT, CNRS/Université de Strasbourg),
Version 2012-07-04, vincent.mazet@unistra.fr
08/12/2013:
- created python version.
18/06/2012:
- first launch of rtnorm.m
05/07/2012:
- fix bug concerning the computing of the pdf when (mu,sigma) is
different from (0,1).
- fix bug about some indexes out of bounds when computing yl for some
values of the input arguments.
04/09/2012:
- change condition in line 2628 to fix a bug.
Licence: GNU General Public License Version 2
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version. This program is distributed in the hope that
it will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details. You should have received a
copy of the GNU General Public License along with this program; if not,
see http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
"""
# Ensure these are floats for proper division values later on.
mu = float(mu)
sigma = float(sigma)
a = float(a)
b = float(b)
# Scaling
if not mu == 0. or not sigma == 1.:
a = (a-mu) / sigma
b = (b-mu) / sigma
# Generate the random variables
r = array([rtstdnorm(a, b) for x in range(size)])
# Scaling
if not mu == 0. or not sigma == 1.:
r = r * sigma + mu
# Compute the probabilities
if probabilities:
Z = sqrt(pi/2)*sigma * (erf(b/sqrt(2))-erf(a/sqrt(2)))
Z = max(Z, 1e-15) # Avoid NaN
p = exp(-(r-mu)**2/2/sigma**2) / Z
return r, p
else:
return r
def rtstdnorm(a, b):
r"""
RTNORM Pseudorandom numbers from a truncated (normalized) Gaussian
distribution (i.e. rtnorm(a,b,0,1)).
"""
# Left and right limits
xmin = -2.00443204036
xmax = 3.48672170399
# Check if a < b
if a >= b:
raise Exception('For a truncated ndst in [a,b] b must be greater than a.')
# Check if |a| < |b|
elif abs(a) > abs(b):
r = -rtstdnorm(-b, -a)
# If a in the right tail (a > xmax), use rejection algorithm with
# a truncated exponential proposal
elif a > xmax:
stop = False
twoasq = 2*a**2
expab = exp(-a*(b-a)) - 1
while not stop:
# The rand-function in Matlab that was used here returns values
# uniformly distributed in (0, 1). The numpy version includes
# the left border of the interval, so the numbers are drawn from
# [0, 1). Hence use a low lower border.
z = log(1 + rand(low=1E-15)*expab)
e = -log(rand(low=1E-15))
stop = (twoasq*e > z ** 2)
r = a - z/a
# If a in the left tail (a < xmin), use rejection algorithm with
# a Gaussian proposal
elif a < xmin:
stop = False
while not stop:
r = randn()
stop = (r>=a) and (r<=b)
# In other cases (xmin < a < xmax), use Chopin's algorithm
else:
# Design variables
kmin = 5 # if kb-ka < kmin then use a rejection algorithm
INVH = 1631.73284006 # 1/h, h being the minimal interval range
I0 = 3271 # = - floor(x(1)/h)
ALPHA = 1.837877066409345 # = log(2*pi)
N = 4000 # Index of the right tail
yl0 = 0.053513975472 # y_l of the leftmost rectangle
ylN = 0.000914116389555 # y_l of the rightmost rectangle
# Compute ka and kb
i = int(I0 + floor(a*INVH))
ka = ncell[i] # not: +1 due to index offset in Matlab ;-)
kb = 0
if b >= xmax:
kb = N
else:
i = int(I0 + floor(b*INVH))
kb = ncell[i] # not: +1 due to index offset in Matlab
# If |b-a| is small, use rejection algorithm with a truncated exponential proposal
if abs(kb-ka) < kmin:
stop = False
twoasq = 2 * a**2
expab = exp(-a*(b-a)) - 1
while not stop:
z = log( 1 + rand()*expab )
e = -log(rand())
stop = (twoasq*e > z**2)
r = a - z/a
return r
while True:
# Sample integer between ka and kb
# Note that while matlab randi has including border, for numpy the high
# border is exclusive. Hence add one.
k = randi(low=ka, high=(kb+1)) # not: +1 due to index offset in Matlab
if k == N:
# Right tail
lbound = x[-1]
z = -log(rand())
e = -log(rand())
z = z / lbound
if (z**2 <= 2*e) and (z < b-lbound):
# Accept this proposition, otherwise reject
r = lbound + z
return r
elif (k<=ka+2) or (k>=kb and b<xmax):
# Two leftmost and rightmost regions
sim = x[k] + (x[k+1]-x[k]) * rand()
if (sim >= a) and (sim <= b):
# Accept this proposition, otherwise reject
simy = yu[k]*rand()
# Compute y_l from y_k
if k == 0:
ylk = yl0
elif k == N:
ylk = ylN
elif k <= 1954:
ylk = yu[k-1]
else:
ylk = yu[k+1]
if (simy<ylk) or (sim**2 + 2*log(simy) + ALPHA < 0):
r = sim
return r
else:
# All the other boxes
u = rand()
simy = yu[k] * u
d = x[k+1] - x[k]
# Compute y_l from y_k
if k == 1:
ylk = yl0
elif k == N:
ylk = ylN
elif k <= 1954:
ylk = yu[k-1]
else:
ylk = yu[k+1]
if simy < ylk: # That's what happens most of the time
r = x[k] + u*d*yu[k]/ylk
return r
sim = x[k] + d * rand()
# Otherwise, check you're below the pdf curve
if sim**2 + 2*log(simy) + ALPHA < 0:
r = sim
return r
return r
# Tables
x = array([
-2.00443204036, -1.99990455547, -1.99541747213, -1.99096998962, \
-1.98656133124, -1.98219074335, -1.97785749442, -1.97356087419, \
-1.96930019287, -1.96507478031, -1.96088398528, -1.95672717477, \
-1.95260373328, -1.9485130622, -1.94445457918, -1.94042771755, \
-1.93643192574, -1.93246666677, -1.92853141772, -1.92462566922, \
-1.92074892503, -1.91690070156, -1.91308052741, -1.90928794302, \
-1.90552250025, -1.90178376197, -1.89807130174, -1.89438470345, \
-1.89072356098, -1.88708747787, -1.88347606705, -1.8798889505, \
-1.87632575899, -1.87278613181, -1.86926971649, -1.86577616858, \
-1.86230515137, -1.85885633567, -1.8554293996, -1.85202402837, \
-1.84863991405, -1.84527675539, -1.84193425762, -1.83861213227, \
-1.83531009698, -1.83202787533, -1.82876519668, -1.825521796, \
-1.82229741372, -1.81909179558, -1.81590469249, -1.81273586036, \
-1.80958506, -1.80645205698, -1.8033366215, -1.80023852827, \
-1.79715755637, -1.79409348917, -1.79104611422, -1.78801522309, \
-1.78500061134, -1.78200207837, -1.77901942732, -1.77605246501, \
-1.77310100183, -1.77016485166, -1.76724383175, -1.7643377627, \
-1.7614464683, -1.75856977555, -1.75570751448, -1.75285951816, \
-1.75002562257, -1.74720566658, -1.74439949184, -1.74160694276, \
-1.73882786639, -1.7360621124, -1.73330953303, -1.73056998298, \
-1.72784331941, -1.72512940185, -1.72242809217, -1.71973925449, \
-1.71706275519, -1.7143984628, -1.71174624799, -1.70910598353, \
-1.70647754419, -1.70386080677, -1.70125565, -1.69866195455, \
-1.69607960292, -1.69350847947, -1.69094847035, -1.68839946345, \
-1.6858613484, -1.68333401649, -1.68081736069, -1.67831127556, \
-1.67581565725, -1.67333040348, -1.67085541345, -1.6683905879, \
-1.665935829, -1.66349104035, -1.66105612696, -1.65863099522, \
-1.65621555288, -1.65380970898, -1.65141337389, -1.64902645924, \
-1.64664887792, -1.64428054402, -1.64192137286, -1.63957128092, \
-1.63723018585, -1.63489800643, -1.63257466256, -1.63026007522, \
-1.62795416649, -1.62565685948, -1.62336807836, -1.62108774828, \
-1.61881579544, -1.61655214696, -1.61429673098, -1.61204947656, \
-1.60981031368, -1.60757917325, -1.60535598708, -1.60314068784, \
-1.60093320909, -1.59873348523, -1.59654145149, -1.59435704393, \
-1.59218019943, -1.59001085565, -1.58784895103, -1.58569442477, \
-1.58354721686, -1.581407268, -1.57927451964, -1.57714891392, \
-1.57503039371, -1.57291890258, -1.57081438477, -1.56871678519, \
-1.56662604942, -1.56454212368, -1.56246495486, -1.56039449044, \
-1.55833067854, -1.55627346789, -1.55422280782, -1.55217864825, \
-1.55014093969, -1.5481096332, -1.54608468043, -1.54406603357, \
-1.54205364536, -1.54004746908, -1.53804745854, -1.53605356807, \
-1.53406575252, -1.53208396723, -1.53010816806, -1.52813831134, \
-1.52617435391, -1.52421625305, -1.52226396655, -1.52031745264, \
-1.51837667, -1.51644157777, -1.51451213554, -1.51258830332, \
-1.51067004156, -1.50875731112, -1.5068500733, -1.50494828979, \
-1.50305192269, -1.50116093452, -1.49927528818, -1.49739494693, \
-1.49551987447, -1.49365003484, -1.49178539245, -1.48992591209, \
-1.48807155892, -1.48622229844, -1.48437809651, -1.48253891934, \
-1.48070473348, -1.47887550581, -1.47705120356, -1.47523179427, \
-1.47341724582, -1.47160752641, -1.46980260454, -1.46800244903, \
-1.46620702902, -1.46441631394, -1.46263027351, -1.46084887778, \
-1.45907209704, -1.45729990192, -1.4555322633, -1.45376915236, \
-1.45201054053, -1.45025639954, -1.44850670139, -1.44676141832, \
-1.44502052286, -1.44328398779, -1.44155178613, -1.43982389118, \
-1.43810027647, -1.43638091579, -1.43466578316, -1.43295485285, \
-1.43124809936, -1.42954549744, -1.42784702206, -1.4261526484, \
-1.42446235191, -1.42277610822, -1.42109389321, -1.41941568296, \
-1.41774145377, -1.41607118216, -1.41440484485, -1.41274241877, \
-1.41108388106, -1.40942920906, -1.40777838029, -1.40613137251, \
-1.40448816364, -1.40284873181, -1.40121305532, -1.39958111269, \
-1.3979528826, -1.39632834393, -1.39470747574, -1.39309025725, \
-1.39147666789, -1.38986668723, -1.38826029505, -1.38665747129, \
-1.38505819603, -1.38346244956, -1.38187021232, -1.3802814649, \
-1.37869618807, -1.37711436276, -1.37553597004, -1.37396099116, \
-1.37238940752, -1.37082120066, -1.36925635228, -1.36769484423, \
-1.36613665852, -1.36458177727, -1.3630301828, -1.36148185752, \
-1.35993678401, -1.35839494499, -1.35685632332, -1.35532090198, \
-1.3537886641, -1.35225959295, -1.35073367192, -1.34921088453, \
-1.34769121444, -1.34617464545, -1.34466116145, -1.34315074649, \
-1.34164338473, -1.34013906045, -1.33863775808, -1.33713946213, \
-1.33564415726, -1.33415182822, -1.33266245992, -1.33117603734, \
-1.3296925456, -1.32821196994, -1.32673429568, -1.32525950829, \
-1.32378759331, -1.32231853644, -1.32085232344, -1.31938894019, \
-1.3179283727, -1.31647060705, -1.31501562944, -1.31356342618, \
-1.31211398366, -1.31066728839, -1.30922332698, -1.30778208612, \
-1.30634355261, -1.30490771336, -1.30347455533, -1.30204406564, \
-1.30061623144, -1.29919104001, -1.29776847873, -1.29634853503, \
-1.29493119647, -1.29351645068, -1.29210428538, -1.29069468838, \
-1.28928764758, -1.28788315096, -1.28648118658, -1.2850817426, \
-1.28368480725, -1.28229036885, -1.2808984158, -1.27950893658, \
-1.27812191975, -1.27673735394, -1.27535522788, -1.27397553036, \
-1.27259825027, -1.27122337654, -1.2698508982, -1.26848080436, \
-1.26711308419, -1.26574772695, -1.26438472195, -1.26302405859, \
-1.26166572634, -1.26030971474, -1.25895601339, -1.25760461198, \
-1.25625550025, -1.25490866802, -1.25356410517, -1.25222180165, \
-1.25088174749, -1.24954393277, -1.24820834764, -1.24687498231, \
-1.24554382707, -1.24421487225, -1.24288810826, -1.24156352558, \
-1.24024111474, -1.23892086632, -1.23760277098, -1.23628681945, \
-1.23497300248, -1.23366131092, -1.23235173565, -1.23104426764, \
-1.22973889789, -1.22843561746, -1.22713441748, -1.22583528914, \
-1.22453822366, -1.22324321234, -1.22195024653, -1.22065931762, \
-1.21937041707, -1.2180835364, -1.21679866716, -1.21551580096, \
-1.21423492948, -1.21295604444, -1.21167913759, -1.21040420078, \
-1.20913122586, -1.20786020475, -1.20659112944, -1.20532399194, \
-1.20405878432, -1.2027954987, -1.20153412724, -1.20027466216, \
-1.19901709572, -1.19776142023, -1.19650762804, -1.19525571156, \
-1.19400566322, -1.19275747553, -1.19151114101, -1.19026665225, \
-1.18902400188, -1.18778318256, -1.18654418701, -1.18530700798, \
-1.18407163828, -1.18283807074, -1.18160629825, -1.18037631374, \
-1.17914811017, -1.17792168055, -1.17669701793, -1.1754741154, \
-1.17425296609, -1.17303356317, -1.17181589985, -1.17059996938, \
-1.16938576505, -1.16817328018, -1.16696250814, -1.16575344233, \
-1.1645460762, -1.16334040321, -1.16213641689, -1.16093411079, \
-1.1597334785, -1.15853451364, -1.15733720988, -1.1561415609, \
-1.15494756045, -1.1537552023, -1.15256448023, -1.1513753881, \
-1.15018791978, -1.14900206916, -1.1478178302, -1.14663519686, \
-1.14545416315, -1.14427472312, -1.14309687083, -1.14192060039, \
-1.14074590595, -1.13957278166, -1.13840122174, -1.13723122041, \
-1.13606277195, -1.13489587064, -1.13373051083, -1.13256668686, \
-1.13140439313, -1.13024362405, -1.12908437408, -1.12792663769, \
-1.1267704094, -1.12561568374, -1.12446245528, -1.12331071862, \
-1.12216046839, -1.12101169923, -1.11986440583, -1.1187185829, \
-1.11757422519, -1.11643132745, -1.11528988448, -1.11414989111, \
-1.11301134218, -1.11187423257, -1.11073855719, -1.10960431095, \
-1.10847148882, -1.10734008578, -1.10621009684, -1.10508151703, \
-1.10395434141, -1.10282856507, -1.10170418311, -1.10058119068, \
-1.09945958293, -1.09833935506, -1.09722050226, -1.09610301977, \
-1.09498690286, -1.09387214681, -1.09275874692, -1.09164669853, \
-1.09053599698, -1.08942663766, -1.08831861598, -1.08721192734, \
-1.08610656721, -1.08500253104, -1.08389981434, -1.08279841262, \
-1.08169832142, -1.0805995363, -1.07950205283, -1.07840586663, \
-1.07731097332, -1.07621736855, -1.07512504799, -1.07403400732, \
-1.07294424226, -1.07185574854, -1.07076852192, -1.06968255816, \
-1.06859785307, -1.06751440246, -1.06643220217, -1.06535124805, \
-1.06427153597, -1.06319306184, -1.06211582157, -1.06103981109, \
-1.05996502636, -1.05889146336, -1.05781911808, -1.05674798653, \
-1.05567806475, -1.05460934878, -1.0535418347, -1.0524755186, \
-1.05141039657, -1.05034646476, -1.04928371929, -1.04822215635, \
-1.04716177209, -1.04610256273, -1.04504452448, -1.04398765357, \
-1.04293194626, -1.04187739881, -1.04082400752, -1.03977176868, \
-1.03872067861, -1.03767073366, -1.03662193018, -1.03557426455, \
-1.03452773314, -1.03348233237, -1.03243805866, -1.03139490845, \
-1.03035287819, -1.02931196435, -1.02827216342, -1.02723347191, \
-1.02619588633, -1.02515940322, -1.02412401912, -1.02308973062, \
-1.02205653428, -1.02102442671, -1.01999340452, -1.01896346433, \
-1.01793460279, -1.01690681657, -1.01588010232, -1.01485445675, \
-1.01382987655, -1.01280635844, -1.01178389916, -1.01076249545, \
-1.00974214407, -1.0087228418, -1.00770458543, -1.00668737176, \
-1.00567119762, -1.00465605983, -1.00364195524, -1.00262888071, \
-1.00161683312, -1.00060580935, -0.999595806306, -0.9985868209, \
-0.997578850062, -0.996571890733, -0.995565939868, -0.994560994436, \
-0.993557051418, -0.992554107808, -0.991552160613, -0.990551206854, \
-0.989551243564, -0.988552267788, -0.987554276585, -0.986557267027, \
-0.985561236196, -0.984566181188, -0.983572099113, -0.982578987091, \
-0.981586842254, -0.980595661749, -0.979605442731, -0.978616182371, \
-0.977627877849, -0.976640526359, -0.975654125105, -0.974668671305, \
-0.973684162186, -0.972700594988, -0.971717966963, -0.970736275374, \
-0.969755517495, -0.968775690612, -0.967796792022, -0.966818819033, \
-0.965841768964, -0.964865639146, -0.963890426921, -0.962916129641, \
-0.961942744669, -0.960970269379, -0.959998701157, -0.959028037398, \
-0.958058275508, -0.957089412906, -0.956121447017, -0.955154375281, \
-0.954188195145, -0.953222904069, -0.952258499521, -0.951294978982, \
-0.95033233994, -0.949370579895, -0.948409696358, -0.947449686847, \
-0.946490548893, -0.945532280036, -0.944574877824, -0.943618339818, \
-0.942662663587, -0.941707846709, -0.940753886774, -0.939800781378, \
-0.93884852813, -0.937897124647, -0.936946568555, -0.935996857491, \
-0.9350479891, -0.934099961035, -0.933152770962, -0.932206416553, \
-0.931260895491, -0.930316205466, -0.929372344179, -0.928429309338, \
-0.927487098664, -0.926545709881, -0.925605140727, -0.924665388946, \
-0.923726452292, -0.922788328527, -0.921851015421, -0.920914510754, \
-0.919978812315, -0.919043917899, -0.918109825313, -0.917176532369, \
-0.916244036888, -0.915312336703, -0.91438142965, -0.913451313577, \
-0.912521986339, -0.911593445799, -0.910665689828, -0.909738716305, \
-0.908812523118, -0.907887108163, -0.906962469342, -0.906038604567, \
-0.905115511758, -0.90419318884, -0.90327163375, -0.902350844428, \
-0.901430818827, -0.900511554903, -0.899593050622, -0.898675303958, \
-0.897758312891, -0.896842075409, -0.895926589508, -0.895011853191, \
-0.894097864469, -0.893184621359, -0.892272121887, -0.891360364086, \
-0.890449345995, -0.889539065661, -0.888629521138, -0.887720710488, \
-0.886812631779, -0.885905283087, -0.884998662493, -0.884092768089, \
-0.883187597969, -0.882283150238, -0.881379423006, -0.88047641439, \
-0.879574122514, -0.878672545509, -0.877771681512, -0.876871528668, \
-0.875972085128, -0.875073349049, -0.874175318595, -0.873277991937, \
-0.872381367254, -0.871485442727, -0.870590216549, -0.869695686916, \
-0.868801852031, -0.867908710104, -0.86701625935, -0.866124497993, \
-0.865233424261, -0.864343036389, -0.863453332618, -0.862564311196, \
-0.861675970376, -0.860788308418, -0.859901323588, -0.859015014157, \
-0.858129378404, -0.857244414613, -0.856360121074, -0.855476496083, \
-0.854593537942, -0.853711244958, -0.852829615446, -0.851948647726, \
-0.851068340122, -0.850188690965, -0.849309698594, -0.84843136135, \
-0.847553677583, -0.846676645646, -0.845800263899, -0.844924530708, \
-0.844049444444, -0.843175003483, -0.842301206208, -0.841428051007, \
-0.840555536273, -0.839683660404, -0.838812421805, -0.837941818885, \
-0.83707185006, -0.83620251375, -0.83533380838, -0.834465732382, \
-0.833598284192, -0.832731462252, -0.831865265009, -0.830999690914, \
-0.830134738426, -0.829270406006, -0.828406692123, -0.827543595248, \
-0.826681113861, -0.825819246443, -0.824957991484, -0.824097347476, \
-0.823237312917, -0.82237788631, -0.821519066163, -0.82066085099, \
-0.819803239307, -0.818946229639, -0.818089820512, -0.817234010459, \
-0.816378798017, -0.815524181729, -0.814670160142, -0.813816731806, \
-0.812963895279, -0.812111649122, -0.8112599919, -0.810408922185, \
-0.80955843855, -0.808708539576, -0.807859223848, -0.807010489955, \
-0.806162336489, -0.805314762049, -0.804467765238, -0.803621344663, \
-0.802775498936, -0.801930226672, -0.801085526493, -0.800241397023, \
-0.799397836891, -0.798554844732, -0.797712419183, -0.796870558888, \
-0.796029262492, -0.795188528648, -0.79434835601, -0.793508743238, \
-0.792669688996, -0.791831191953, -0.790993250781, -0.790155864155, \
-0.789319030758, -0.788482749274, -0.787647018393, -0.786811836806, \
-0.785977203212, -0.785143116312, -0.784309574812, -0.783476577421, \
-0.782644122852, -0.781812209823, -0.780980837056, -0.780150003277, \
-0.779319707213, -0.7784899476, -0.777660723175, -0.776832032678, \
-0.776003874855, -0.775176248455, -0.774349152231, -0.773522584939, \
-0.772696545341, -0.7718710322, -0.771046044284, -0.770221580367, \
-0.769397639223, -0.768574219631, -0.767751320376, -0.766928940243, \
-0.766107078024, -0.765285732513, -0.764464902507, -0.763644586809, \
-0.762824784223, -0.762005493558, -0.761186713627, -0.760368443246, \
-0.759550681234, -0.758733426414, -0.757916677614, -0.757100433662, \
-0.756284693394, -0.755469455646, -0.754654719259, -0.753840483077, \
-0.753026745948, -0.752213506722, -0.751400764255, -0.750588517404, \
-0.74977676503, -0.748965505998, -0.748154739176, -0.747344463435, \
-0.746534677651, -0.7457253807, -0.744916571465, -0.74410824883, \
-0.743300411682, -0.742493058914, -0.741686189419, -0.740879802096, \
-0.740073895844, -0.739268469569, -0.738463522177, -0.737659052579, \
-0.736855059689, -0.736051542423, -0.735248499702, -0.734445930447, \
-0.733643833587, -0.73284220805, -0.732041052768, -0.731240366677, \
-0.730440148716, -0.729640397826, -0.728841112951, -0.728042293041, \
-0.727243937044, -0.726446043916, -0.725648612612, -0.724851642093, \
-0.724055131321, -0.723259079262, -0.722463484884, -0.721668347159, \
-0.720873665062, -0.720079437569, -0.719285663661, -0.718492342322, \
-0.717699472536, -0.716907053294, -0.716115083586, -0.715323562408, \
-0.714532488756, -0.713741861631, -0.712951680037, -0.712161942978, \
-0.711372649463, -0.710583798504, -0.709795389115, -0.709007420313, \
-0.708219891118, -0.707432800551, -0.706646147638, -0.705859931406, \
-0.705074150887, -0.704288805113, -0.70350389312, -0.702719413947, \
-0.701935366634, -0.701151750226, -0.700368563769, -0.699585806312, \
-0.698803476906, -0.698021574607, -0.69724009847, -0.696459047555, \
-0.695678420925, -0.694898217643, -0.694118436777, -0.693339077397, \
-0.692560138575, -0.691781619384, -0.691003518904, -0.690225836212, \
-0.689448570392, -0.688671720529, -0.687895285708, -0.687119265021, \
-0.686343657558, -0.685568462415, -0.684793678689, -0.684019305478, \
-0.683245341885, -0.682471787013, -0.68169863997, -0.680925899864, \
-0.680153565806, -0.679381636911, -0.678610112294, -0.677838991074, \
-0.677068272372, -0.67629795531, -0.675528039013, -0.674758522611, \
-0.673989405232, -0.67322068601, -0.672452364078, -0.671684438573, \
-0.670916908635, -0.670149773405, -0.669383032026, -0.668616683646, \
-0.66785072741, -0.667085162471, -0.666319987981, -0.665555203094, \
-0.664790806967, -0.66402679876, -0.663263177633, -0.662499942752, \
-0.66173709328, -0.660974628386, -0.66021254724, -0.659450849015, \
-0.658689532883, -0.657928598023, -0.657168043612, -0.656407868831, \
-0.655648072862, -0.654888654892, -0.654129614105, -0.653370949693, \
-0.652612660844, -0.651854746754, -0.651097206616, -0.650340039629, \
-0.64958324499, -0.648826821903, -0.648070769569, -0.647315087195, \
-0.646559773988, -0.645804829157, -0.645050251913, -0.64429604147, \
-0.643542197043, -0.642788717849, -0.642035603108, -0.641282852041, \
-0.640530463871, -0.639778437823, -0.639026773124, -0.638275469004, \
-0.637524524692, -0.636773939423, -0.636023712429, -0.63527384295, \
-0.634524330221, -0.633775173485, -0.633026371984, -0.632277924961, \
-0.631529831662, -0.630782091336, -0.630034703232, -0.629287666601, \
-0.628540980698, -0.627794644778, -0.627048658096, -0.626303019913, \
-0.625557729489, -0.624812786087, -0.62406818897, -0.623323937406, \
-0.622580030661, -0.621836468005, -0.621093248711, -0.62035037205, \
-0.619607837299, -0.618865643733, -0.618123790632, -0.617382277275, \
-0.616641102944, -0.615900266923, -0.615159768498, -0.614419606955, \
-0.613679781584, -0.612940291674, -0.612201136518, -0.61146231541, \
-0.610723827646, -0.609985672522, -0.609247849338, -0.608510357395, \
-0.607773195994, -0.607036364439, -0.606299862036, -0.605563688093, \
-0.604827841918, -0.604092322821, -0.603357130115, -0.602622263113, \
-0.601887721131, -0.601153503486, -0.600419609496, -0.599686038481, \
-0.598952789763, -0.598219862666, -0.597487256514, -0.596754970634, \
-0.596023004354, -0.595291357003, -0.594560027913, -0.593829016416, \
-0.593098321847, -0.592367943541, -0.591637880836, -0.590908133071, \
-0.590178699585, -0.589449579722, -0.588720772824, -0.587992278236, \
-0.587264095305, -0.586536223378, -0.585808661806, -0.585081409939, \
-0.584354467129, -0.58362783273, -0.582901506099, -0.58217548659, \
-0.581449773564, -0.580724366379, -0.579999264397, -0.57927446698, \
-0.578549973492, -0.5778257833, -0.577101895769, -0.576378310269, \
-0.575655026169, -0.57493204284, -0.574209359655, -0.573486975988, \
-0.572764891214, -0.57204310471, -0.571321615855, -0.570600424028, \
-0.56987952861, -0.569158928984, -0.568438624533, -0.567718614641, \
-0.566998898697, -0.566279476088, -0.565560346202, -0.56484150843, \
-0.564122962165, -0.563404706799, -0.562686741727, -0.561969066345, \
-0.56125168005, -0.560534582241, -0.559817772317, -0.559101249681, \
-0.558385013733, -0.557669063878, -0.556953399521, -0.556238020069, \
-0.555522924929, -0.55480811351, -0.554093585223, -0.553379339478, \
-0.552665375689, -0.551951693269, -0.551238291635, -0.550525170202, \
-0.549812328389, -0.549099765614, -0.548387481299, -0.547675474863, \
-0.546963745731, -0.546252293327, -0.545541117075, -0.544830216402, \
-0.544119590737, -0.543409239507, -0.542699162143, -0.541989358077, \
-0.541279826741, -0.540570567568, -0.539861579995, -0.539152863456, \
-0.53844441739, -0.537736241235, -0.537028334431, -0.536320696418, \
-0.535613326639, -0.534906224537, -0.534199389557, -0.533492821143, \
-0.532786518743, -0.532080481805, -0.531374709778, -0.530669202111, \
-0.529963958257, -0.529258977668, -0.528554259797, -0.5278498041, \
-0.527145610031, -0.526441677048, -0.52573800461, -0.525034592175, \
-0.524331439204, -0.523628545158, -0.5229259095, -0.522223531693, \
-0.521521411203, -0.520819547494, -0.520117940035, -0.519416588293, \
-0.518715491738, -0.518014649839, -0.517314062067, -0.516613727896, \
-0.515913646798, -0.515213818248, -0.514514241722, -0.513814916696, \
-0.513115842648, -0.512417019057, -0.511718445402, -0.511020121164, \
-0.510322045826, -0.509624218869, -0.508926639778, -0.508229308038, \
-0.507532223135, -0.506835384556, -0.506138791788, -0.505442444322, \
-0.504746341647, -0.504050483254, -0.503354868635, -0.502659497283, \
-0.501964368692, -0.501269482359, -0.500574837777, -0.499880434446, \
-0.499186271862, -0.498492349525, -0.497798666935, -0.497105223592, \
-0.496412018999, -0.49571905266, -0.495026324076, -0.494333832755, \
-0.493641578201, -0.492949559921, -0.492257777423, -0.491566230217, \
-0.49087491781, -0.490183839715, -0.489492995442, -0.488802384505, \
-0.488112006416, -0.487421860691, -0.486731946843, -0.48604226439, \
-0.48535281285, -0.484663591739, -0.483974600576, -0.483285838883, \
-0.48259730618, -0.481909001988, -0.48122092583, -0.480533077229, \
-0.479845455711, -0.479158060801, -0.478470892024, -0.477783948909, \
-0.477097230982, -0.476410737773, -0.475724468813, -0.47503842363, \
-0.474352601759, -0.473667002729, -0.472981626076, -0.472296471333, \
-0.471611538035, -0.470926825718, -0.470242333919, -0.469558062177, \
-0.468874010028, -0.468190177013, -0.467506562672, -0.466823166546, \
-0.466139988176, -0.465457027107, -0.46477428288, -0.464091755042, \
-0.463409443136, -0.46272734671, -0.46204546531, -0.461363798483, \
-0.460682345779, -0.460001106748, -0.459320080938, -0.458639267901, \
-0.45795866719, -0.457278278357, -0.456598100954, -0.455918134538, \
-0.455238378662, -0.454558832883, -0.453879496757, -0.453200369842, \
-0.452521451697, -0.45184274188, -0.45116423995, -0.45048594547, \
-0.449807858, -0.449129977103, -0.448452302341, -0.447774833279, \
-0.44709756948, -0.446420510511, -0.445743655937, -0.445067005325, \
-0.444390558244, -0.44371431426, -0.443038272944, -0.442362433866, \
-0.441686796595, -0.441011360704, -0.440336125765, -0.43966109135, \
-0.438986257034, -0.43831162239, -0.437637186994, -0.436962950421, \
-0.436288912249, -0.435615072055, -0.434941429417, -0.434267983913, \
-0.433594735123, -0.432921682628, -0.432248826008, -0.431576164846, \
-0.430903698722, -0.430231427222, -0.429559349928, -0.428887466426, \
-0.4282157763, -0.427544279136, -0.426872974521, -0.426201862043, \
-0.42553094129, -0.42486021185, -0.424189673313, -0.423519325269, \
-0.422849167309, -0.422179199024, -0.421509420007, -0.420839829851, \
-0.420170428149, -0.419501214496, -0.418832188486, -0.418163349716, \
-0.417494697781, -0.416826232279, -0.416157952806, -0.415489858963, \
-0.414821950346, -0.414154226557, -0.413486687196, -0.412819331863, \
-0.41215216016, -0.41148517169, -0.410818366055, -0.410151742859, \
-0.409485301706, -0.408819042201, -0.40815296395, -0.407487066559, \
-0.406821349634, -0.406155812784, -0.405490455615, -0.404825277738, \
-0.404160278761, -0.403495458294, -0.402830815949, -0.402166351335, \
-0.401502064066, -0.400837953753, -0.40017402001, -0.399510262451, \
-0.398846680689, -0.39818327434, -0.397520043019, -0.396856986343, \
-0.396194103929, -0.395531395393, -0.394868860354, -0.394206498431, \
-0.393544309243, -0.392882292409, -0.392220447549, -0.391558774287, \
-0.390897272241, -0.390235941036, -0.389574780293, -0.388913789636, \
-0.38825296869, -0.387592317078, -0.386931834425, -0.386271520359, \
-0.385611374504, -0.384951396488, -0.384291585938, -0.383631942482, \
-0.382972465749, -0.382313155368, -0.381654010969, -0.380995032182, \
-0.380336218639, -0.379677569969, -0.379019085806, -0.378360765783, \
-0.377702609531, -0.377044616686, -0.37638678688, -0.37572911975, \
-0.37507161493, -0.374414272056, -0.373757090765, -0.373100070693, \
-0.372443211479, -0.37178651276, -0.371129974175, -0.370473595364, \
-0.369817375965, -0.369161315619, -0.368505413967, -0.36784967065, \
-0.367194085311, -0.36653865759, -0.365883387132, -0.36522827358, \
-0.364573316578, -0.36391851577, -0.363263870801, -0.362609381316, \
-0.361955046963, -0.361300867386, -0.360646842235, -0.359992971155, \
-0.359339253795, -0.358685689804, -0.358032278831, -0.357379020525, \
-0.356725914537, -0.356072960516, -0.355420158116, -0.354767506986, \
-0.354115006779, -0.353462657149, -0.352810457747, -0.352158408227, \
-0.351506508245, -0.350854757453, -0.350203155508, -0.349551702065, \
-0.34890039678, -0.348249239309, -0.34759822931, -0.346947366441, \
-0.346296650358, -0.345646080722, -0.344995657189, -0.344345379421, \
-0.343695247078, -0.343045259818, -0.342395417304, -0.341745719196, \
-0.341096165157, -0.340446754849, -0.339797487934, -0.339148364076, \
-0.338499382938, -0.337850544184, -0.33720184748, -0.33655329249, \
-0.335904878879, -0.335256606314, -0.334608474462, -0.333960482988, \
-0.33331263156, -0.332664919846, -0.332017347514, -0.331369914234, \
-0.330722619673, -0.330075463502, -0.329428445391, -0.328781565009, \
-0.328134822029, -0.32748821612, -0.326841746956, -0.326195414208, \
-0.325549217549, -0.324903156652, -0.32425723119, -0.323611440838, \
-0.322965785269, -0.322320264159, -0.321674877183, -0.321029624016, \
-0.320384504335, -0.319739517815, -0.319094664135, -0.318449942971, \
-0.317805354, -0.317160896902, -0.316516571355, -0.315872377037, \
-0.315228313629, -0.314584380809, -0.313940578259, -0.313296905659, \
-0.312653362689, -0.312009949032, -0.311366664369, -0.310723508383, \
-0.310080480756, -0.309437581171, -0.308794809313, -0.308152164863, \
-0.307509647508, -0.306867256932, -0.306224992819, -0.305582854855, \
-0.304940842726, -0.304298956119, -0.303657194719, -0.303015558215, \
-0.302374046293, -0.301732658641, -0.301091394947, -0.3004502549, \
-0.29980923819, -0.299168344504, -0.298527573533, -0.297886924968, \
-0.297246398498, -0.296605993814, -0.295965710609, -0.295325548572, \
-0.294685507396, -0.294045586775, -0.293405786399, -0.292766105963, \
-0.292126545159, -0.291487103683, -0.290847781227, -0.290208577487, \
-0.289569492157, -0.288930524932, -0.288291675509, -0.287652943584, \
-0.287014328852, -0.28637583101, -0.285737449756, -0.285099184787, \
-0.2844610358, -0.283823002495, -0.283185084568, -0.28254728172, \
-0.28190959365, -0.281272020056, -0.280634560639, -0.279997215099, \
-0.279359983137, -0.278722864454, -0.278085858751, -0.277448965729, \
-0.27681218509, -0.276175516537, -0.275538959773, -0.2749025145, \
-0.274266180422, -0.273629957242, -0.272993844665, -0.272357842394, \
-0.271721950134, -0.271086167591, -0.270450494469, -0.269814930474, \
-0.269179475313, -0.268544128691, -0.267908890315, -0.267273759892, \
-0.26663873713, -0.266003821735, -0.265369013416, -0.264734311881, \
-0.264099716838, -0.263465227997, -0.262830845067, -0.262196567757, \
-0.261562395776, -0.260928328836, -0.260294366646, -0.259660508917, \
-0.25902675536, -0.258393105687, -0.25775955961, -0.25712611684, \
-0.256492777089, -0.25585954007, -0.255226405497, -0.254593373082, \
-0.253960442538, -0.253327613581, -0.252694885923, -0.252062259279, \
-0.251429733364, -0.250797307892, -0.25016498258, -0.249532757143, \
-0.248900631296, -0.248268604756, -0.24763667724, -0.247004848463, \
-0.246373118143, -0.245741485998, -0.245109951745, -0.244478515102, \
-0.243847175788, -0.24321593352, -0.242584788017, -0.241953738999, \
-0.241322786185, -0.240691929294, -0.240061168047, -0.239430502163, \
-0.238799931363, -0.238169455368, -0.237539073899, -0.236908786677, \
-0.236278593424, -0.235648493861, -0.235018487711, -0.234388574696, \
-0.233758754538, -0.233129026961, -0.232499391688, -0.231869848442, \
-0.231240396947, -0.230611036927, -0.229981768106, -0.229352590209, \
-0.22872350296, -0.228094506085, -0.227465599309, -0.226836782357, \
-0.226208054955, -0.22557941683, -0.224950867708, -0.224322407315, \
-0.223694035378, -0.223065751624, -0.222437555781, -0.221809447576, \
-0.221181426737, -0.220553492993, -0.219925646071, -0.219297885701, \
-0.21867021161, -0.218042623529, -0.217415121186, -0.216787704312, \
-0.216160372636, -0.215533125887, -0.214905963798, -0.214278886097, \
-0.213651892517, -0.213024982787, -0.212398156639, -0.211771413806, \
-0.211144754018, -0.210518177008, -0.209891682507, -0.209265270249, \
-0.208638939966, -0.208012691392, -0.207386524258, -0.206760438299, \
-0.206134433249, -0.20550850884, -0.204882664808, -0.204256900887, \
-0.203631216811, -0.203005612315, -0.202380087133, -0.201754641003, \
-0.201129273658, -0.200503984834, -0.199878774268, -0.199253641695, \
-0.198628586852, -0.198003609476, -0.197378709302, -0.196753886069, \
-0.196129139514, -0.195504469373, -0.194879875385, -0.194255357287, \
-0.193630914818, -0.193006547715, -0.192382255718, -0.191758038565, \
-0.191133895995, -0.190509827747, -0.189885833561, -0.189261913176, \
-0.188638066331, -0.188014292767, -0.187390592225, -0.186766964443, \
-0.186143409164, -0.185519926127, -0.184896515074, -0.184273175746, \
-0.183649907884, -0.18302671123, -0.182403585526, -0.181780530513, \
-0.181157545935, -0.180534631532, -0.179911787049, -0.179289012227, \
-0.17866630681, -0.178043670541, -0.177421103162, -0.176798604419, \
-0.176176174053, -0.175553811811, -0.174931517434, -0.174309290669, \
-0.173687131258, -0.173065038948, -0.172443013482, -0.171821054607, \
-0.171199162066, -0.170577335606, -0.169955574973, -0.169333879911, \
-0.168712250167, -0.168090685487, -0.167469185618, -0.166847750305, \
-0.166226379296, -0.165605072338, -0.164983829177, -0.164362649561, \
-0.163741533237, -0.163120479952, -0.162499489456, -0.161878561494, \
-0.161257695816, -0.16063689217, -0.160016150304, -0.159395469966, \
-0.158774850907, -0.158154292873, -0.157533795616, -0.156913358883, \
-0.156292982424, -0.15567266599, -0.155052409329, -0.154432212191, \
-0.153812074328, -0.153191995488, -0.152571975423, -0.151952013883, \
-0.151332110619, -0.150712265381, -0.150092477921, -0.14947274799, \
-0.148853075339, -0.148233459721, -0.147613900885, -0.146994398586, \
-0.146374952573, -0.1457555626, -0.14513622842, -0.144516949783, \
-0.143897726444, -0.143278558154, -0.142659444667, -0.142040385735, \
-0.141421381113, -0.140802430553, -0.140183533808, -0.139564690633, \
-0.138945900782, -0.138327164007, -0.137708480064, -0.137089848707, \
-0.136471269689, -0.135852742766, -0.135234267692, -0.134615844222, \
-0.13399747211, -0.133379151113, -0.132760880985, -0.132142661481, \
-0.131524492358, -0.13090637337, -0.130288304273, -0.129670284824, \
-0.129052314778, -0.128434393892, -0.127816521921, -0.127198698623, \
-0.126580923754, -0.12596319707, -0.125345518329, -0.124727887287, \
-0.124110303701, -0.12349276733, -0.122875277929, -0.122257835256, \
-0.12164043907, -0.121023089128, -0.120405785187, -0.119788527006, \
-0.119171314342, -0.118554146955, -0.117937024601, -0.117319947041, \
-0.116702914032, -0.116085925333, -0.115468980703, -0.1148520799, \
-0.114235222685, -0.113618408815, -0.113001638051, -0.112384910152, \
-0.111768224876, -0.111151581985, -0.110534981238, -0.109918422394, \
-0.109301905213, -0.108685429456, -0.108068994883, -0.107452601254, \
-0.10683624833, -0.10621993587, -0.105603663637, -0.104987431389, \
-0.10437123889, -0.103755085898, -0.103138972176, -0.102522897484, \
-0.101906861584, -0.101290864237, -0.100674905205, -0.100058984249, \
-0.0994431011311, -0.0988272556129, -0.0982114474563, -0.0975956764232, \
-0.0969799422759, -0.0963642447764, -0.0957485836871, -0.0951329587704, \
-0.0945173697886, -0.0939018165045, -0.0932862986806, -0.0926708160798, \
-0.0920553684649, -0.0914399555988, -0.0908245772446, -0.0902092331655, \
-0.0895939231246, -0.0889786468852, -0.0883634042109, -0.0877481948651, \
-0.0871330186113, -0.0865178752133, -0.0859027644347, -0.0852876860396, \
-0.0846726397917, -0.0840576254552, -0.0834426427941, -0.0828276915726, \
-0.0822127715549, -0.0815978825056, -0.0809830241889, -0.0803681963694, \
-0.0797533988117, -0.0791386312806, -0.0785238935406, -0.0779091853568, \
-0.077294506494, -0.0766798567172, -0.0760652357914, -0.0754506434819, \
-0.0748360795539, -0.0742215437727, -0.0736070359035, -0.072992555712, \
-0.0723781029636, -0.0717636774239, -0.0711492788586, -0.0705349070334, \
-0.0699205617141, -0.0693062426667, -0.068691949657, -0.0680776824512, \
-0.0674634408152, -0.0668492245152, -0.0662350333175, -0.0656208669883, \
-0.065006725294, -0.0643926080011, -0.0637785148759, -0.0631644456851, \
-0.0625504001953, -0.0619363781731, -0.0613223793853, -0.0607084035987, \
-0.0600944505801, -0.0594805200964, -0.0588666119147, -0.058252725802, \
-0.0576388615253, -0.0570250188518, -0.0564111975488, -0.0557973973834, \
-0.0551836181231, -0.0545698595352, -0.0539561213871, -0.0533424034463, \
-0.0527287054803, -0.0521150272568, -0.0515013685434, -0.0508877291078, \
-0.0502741087177, -0.0496605071409, -0.0490469241453, -0.0484333594987, \
-0.0478198129692, -0.0472062843246, -0.0465927733331, -0.0459792797628, \
-0.0453658033816, -0.0447523439579, -0.0441389012599, -0.0435254750557, \
-0.0429120651138, -0.0422986712024, -0.04168529309, -0.041071930545, \
-0.0404585833359, -0.0398452512311, -0.0392319339993, -0.0386186314091, \
-0.0380053432289, -0.0373920692277, -0.0367788091739, -0.0361655628365, \
-0.0355523299841, -0.0349391103856, -0.0343259038099, -0.0337127100257, \
-0.0330995288021, -0.032486359908, -0.0318732031123, -0.0312600581842, \
-0.0306469248925, -0.0300338030065, -0.0294206922952, -0.0288075925277, \
-0.0281945034733, -0.0275814249011, -0.0269683565803, -0.0263552982801, \
-0.0257422497699, -0.025129210819, -0.0245161811967, -0.0239031606722, \
-0.0232901490151, -0.0226771459946, -0.0220641513803, -0.0214511649415, \
-0.0208381864477, -0.0202252156684, -0.019612252373, -0.0189992963312, \
-0.0183863473125, -0.0177734050864, -0.0171604694224, -0.0165475400903, \
-0.0159346168595, -0.0153216994998, -0.0147087877807, -0.0140958814719, \
-0.0134829803432, -0.0128700841641, -0.0122571927044, -0.0116443057337, \
-0.0110314230219, -0.0104185443386, -0.00980566945358, -0.00919279813659, \
-0.00857993015739, -0.00796706528575, -0.00735420329145, -0.00674134394428, \
-0.00612848701402, -0.00551563227049, -0.00490277948347, -0.00428992842278, \
-0.00367707885824, -0.00306423055966, -0.00245138329686, -0.00183853683967, \
-0.00122569095791, -0.000612845421414, 0.0, 0.000612845421414, \
0.00122569095791, 0.00183853683967, 0.00245138329686, 0.00306423055966, \
0.00367707885824, 0.00428992842278, 0.00490277948347, 0.00551563227049, \
0.00612848701402, 0.00674134394428, 0.00735420329145, 0.00796706528575, \
0.00857993015739, 0.00919279813659, 0.00980566945358, 0.0104185443386, \
0.0110314230219, 0.0116443057337, 0.0122571927044, 0.0128700841641, \
0.0134829803432, 0.0140958814719, 0.0147087877807, 0.0153216994998, \
0.0159346168595, 0.0165475400903, 0.0171604694224, 0.0177734050864, \
0.0183863473125, 0.0189992963312, 0.019612252373, 0.0202252156684, \
0.0208381864477, 0.0214511649415, 0.0220641513803, 0.0226771459946, \
0.0232901490151, 0.0239031606722, 0.0245161811967, 0.025129210819, \
0.0257422497699, 0.0263552982801, 0.0269683565803, 0.0275814249011, \
0.0281945034733, 0.0288075925277, 0.0294206922952, 0.0300338030065, \
0.0306469248925, 0.0312600581842, 0.0318732031123, 0.032486359908, \
0.0330995288021, 0.0337127100257, 0.0343259038099, 0.0349391103856, \
0.0355523299841, 0.0361655628365, 0.0367788091739, 0.0373920692277, \
0.0380053432289, 0.0386186314091, 0.0392319339993, 0.0398452512311, \
0.0404585833359, 0.041071930545, 0.04168529309, 0.0422986712024, \
0.0429120651138, 0.0435254750557, 0.0441389012599, 0.0447523439579, \
0.0453658033816, 0.0459792797628, 0.0465927733331, 0.0472062843246, \
0.0478198129692, 0.0484333594987, 0.0490469241453, 0.0496605071409, \
0.0502741087177, 0.0508877291078, 0.0515013685434, 0.0521150272568, \
0.0527287054803, 0.0533424034463, 0.0539561213871, 0.0545698595352, \
0.0551836181231, 0.0557973973834, 0.0564111975488, 0.0570250188518, \
0.0576388615253, 0.058252725802, 0.0588666119147, 0.0594805200964, \
0.0600944505801, 0.0607084035987, 0.0613223793853, 0.0619363781731, \
0.0625504001953, 0.0631644456851, 0.0637785148759, 0.0643926080011, \
0.065006725294, 0.0656208669883, 0.0662350333175, 0.0668492245152, \
0.0674634408152, 0.0680776824512, 0.068691949657, 0.0693062426667, \
0.0699205617141, 0.0705349070334, 0.0711492788586, 0.0717636774239, \
0.0723781029636, 0.072992555712, 0.0736070359035, 0.0742215437727, \
0.0748360795539, 0.0754506434819, 0.0760652357914, 0.0766798567172, \
0.077294506494, 0.0779091853568, 0.0785238935406, 0.0791386312806, \
0.0797533988117, 0.0803681963694, 0.0809830241889, 0.0815978825056, \
0.0822127715549, 0.0828276915726, 0.0834426427941, 0.0840576254552, \
0.0846726397917, 0.0852876860396, 0.0859027644347, 0.0865178752133, \
0.0871330186113, 0.0877481948651, 0.0883634042109, 0.0889786468852, \
0.0895939231246, 0.0902092331655, 0.0908245772446, 0.0914399555988, \
0.0920553684649, 0.0926708160798, 0.0932862986806, 0.0939018165045, \
0.0945173697886, 0.0951329587704, 0.0957485836871, 0.0963642447764, \
0.0969799422759, 0.0975956764232, 0.0982114474563, 0.0988272556129, \
0.0994431011311, 0.100058984249, 0.100674905205, 0.101290864237, \
0.101906861584, 0.102522897484, 0.103138972176, 0.103755085898, \
0.10437123889, 0.104987431389, 0.105603663637, 0.10621993587, \
0.10683624833, 0.107452601254, 0.108068994883, 0.108685429456, \
0.109301905213, 0.109918422394, 0.110534981238, 0.111151581985, \
0.111768224876, 0.112384910152, 0.113001638051, 0.113618408815, \
0.114235222685, 0.1148520799, 0.115468980703, 0.116085925333, \
0.116702914032, 0.117319947041, 0.117937024601, 0.118554146955, \
0.119171314342, 0.119788527006, 0.120405785187, 0.121023089128, \
0.12164043907, 0.122257835256, 0.122875277929, 0.12349276733, \
0.124110303701, 0.124727887287, 0.125345518329, 0.12596319707, \
0.126580923754, 0.127198698623, 0.127816521921, 0.128434393892, \
0.129052314778, 0.129670284824, 0.130288304273, 0.13090637337, \
0.131524492358, 0.132142661481, 0.132760880985, 0.133379151113, \
0.13399747211, 0.134615844222, 0.135234267692, 0.135852742766, \
0.136471269689, 0.137089848707, 0.137708480064, 0.138327164007, \
0.138945900782, 0.139564690633, 0.140183533808, 0.140802430553, \
0.141421381113, 0.142040385735, 0.142659444667, 0.143278558154, \
0.143897726444, 0.144516949783, 0.14513622842, 0.1457555626, \
0.146374952573, 0.146994398586, 0.147613900885, 0.148233459721, \
0.148853075339, 0.14947274799, 0.150092477921, 0.150712265381, \
0.151332110619, 0.151952013883, 0.152571975423, 0.153191995488, \
0.153812074328, 0.154432212191, 0.155052409329, 0.15567266599, \
0.156292982424, 0.156913358883, 0.157533795616, 0.158154292873, \
0.158774850907, 0.159395469966, 0.160016150304, 0.16063689217, \
0.161257695816, 0.161878561494, 0.162499489456, 0.163120479952, \
0.163741533237, 0.164362649561, 0.164983829177, 0.165605072338, \
0.166226379296, 0.166847750305, 0.167469185618, 0.168090685487, \
0.168712250167, 0.169333879911, 0.169955574973, 0.170577335606, \
0.171199162066, 0.171821054607, 0.172443013482, 0.173065038948, \
0.173687131258, 0.174309290669, 0.174931517434, 0.175553811811, \
0.176176174053, 0.176798604419, 0.177421103162, 0.178043670541, \
0.17866630681, 0.179289012227, 0.179911787049, 0.180534631532, \
0.181157545935, 0.181780530513, 0.182403585526, 0.18302671123, \
0.183649907884, 0.184273175746, 0.184896515074, 0.185519926127, \
0.186143409164, 0.186766964443, 0.187390592225, 0.188014292767, \
0.188638066331, 0.189261913176, 0.189885833561, 0.190509827747, \
0.191133895995, 0.191758038565, 0.192382255718, 0.193006547715, \
0.193630914818, 0.194255357287, 0.194879875385, 0.195504469373, \
0.196129139514, 0.196753886069, 0.197378709302, 0.198003609476, \
0.198628586852, 0.199253641695, 0.199878774268, 0.200503984834, \
0.201129273658, 0.201754641003, 0.202380087133, 0.203005612315, \
0.203631216811, 0.204256900887, 0.204882664808, 0.20550850884, \
0.206134433249, 0.206760438299, 0.207386524258, 0.208012691392, \
0.208638939966, 0.209265270249, 0.209891682507, 0.210518177008, \
0.211144754018, 0.211771413806, 0.212398156639, 0.213024982787, \
0.213651892517, 0.214278886097, 0.214905963798, 0.215533125887, \
0.216160372636, 0.216787704312, 0.217415121186, 0.218042623529, \
0.21867021161, 0.219297885701, 0.219925646071, 0.220553492993, \
0.221181426737, 0.221809447576, 0.222437555781, 0.223065751624, \
0.223694035378, 0.224322407315, 0.224950867708, 0.22557941683, \
0.226208054955, 0.226836782357, 0.227465599309, 0.228094506085, \
0.22872350296, 0.229352590209, 0.229981768106, 0.230611036927, \
0.231240396947, 0.231869848442, 0.232499391688, 0.233129026961, \
0.233758754538, 0.234388574696, 0.235018487711, 0.235648493861, \
0.236278593424, 0.236908786677, 0.237539073899, 0.238169455368, \
0.238799931363, 0.239430502163, 0.240061168047, 0.240691929294, \
0.241322786185, 0.241953738999, 0.242584788017, 0.24321593352, \
0.243847175788, 0.244478515102, 0.245109951745, 0.245741485998, \
0.246373118143, 0.247004848463, 0.24763667724, 0.248268604756, \
0.248900631296, 0.249532757143, 0.25016498258, 0.250797307892, \
0.251429733364, 0.252062259279, 0.252694885923, 0.253327613581, \
0.253960442538, 0.254593373082, 0.255226405497, 0.25585954007, \
0.256492777089, 0.25712611684, 0.25775955961, 0.258393105687, \
0.25902675536, 0.259660508917, 0.260294366646, 0.260928328836, \
0.261562395776, 0.262196567757, 0.262830845067, 0.263465227997, \
0.264099716838, 0.264734311881, 0.265369013416, 0.266003821735, \
0.26663873713, 0.267273759892, 0.267908890315, 0.268544128691, \
0.269179475313, 0.269814930474, 0.270450494469, 0.271086167591, \
0.271721950134, 0.272357842394, 0.272993844665, 0.273629957242, \
0.274266180422, 0.2749025145, 0.275538959773, 0.276175516537, \
0.27681218509, 0.277448965729, 0.278085858751, 0.278722864454, \
0.279359983137, 0.279997215099, 0.280634560639, 0.281272020056, \
0.28190959365, 0.28254728172, 0.283185084568, 0.283823002495, \
0.2844610358, 0.285099184787, 0.285737449756, 0.28637583101, \
0.287014328852, 0.287652943584, 0.288291675509, 0.288930524932, \
0.289569492157, 0.290208577487, 0.290847781227, 0.291487103683, \
0.292126545159, 0.292766105963, 0.293405786399, 0.294045586775, \
0.294685507396, 0.295325548572, 0.295965710609, 0.296605993814, \
0.297246398498, 0.297886924968, 0.298527573533, 0.299168344504, \
0.29980923819, 0.3004502549, 0.301091394947, 0.301732658641, \
0.302374046293, 0.303015558215, 0.303657194719, 0.304298956119, \
0.304940842726, 0.305582854855, 0.306224992819, 0.306867256932, \
0.307509647508, 0.308152164863, 0.308794809313, 0.309437581171, \
0.310080480756, 0.310723508383, 0.311366664369, 0.312009949032, \
0.312653362689, 0.313296905659, 0.313940578259, 0.314584380809, \
0.315228313629, 0.315872377037, 0.316516571355, 0.317160896902, \
0.317805354, 0.318449942971, 0.319094664135, 0.319739517815, \
0.320384504335, 0.321029624016, 0.321674877183, 0.322320264159, \
0.322965785269, 0.323611440838, 0.32425723119, 0.324903156652, \
0.325549217549, 0.326195414208, 0.326841746956, 0.32748821612, \
0.328134822029, 0.328781565009, 0.329428445391, 0.330075463502, \
0.330722619673, 0.331369914234, 0.332017347514, 0.332664919846, \
0.33331263156, 0.333960482988, 0.334608474462, 0.335256606314, \
0.335904878879, 0.33655329249, 0.33720184748, 0.337850544184, \
0.338499382938, 0.339148364076, 0.339797487934, 0.340446754849, \
0.341096165157, 0.341745719196, 0.342395417304, 0.343045259818, \
0.343695247078, 0.344345379421, 0.344995657189, 0.345646080722, \
0.346296650358, 0.346947366441, 0.34759822931, 0.348249239309, \
0.34890039678, 0.349551702065, 0.350203155508, 0.350854757453, \
0.351506508245, 0.352158408227, 0.352810457747, 0.353462657149, \
0.354115006779, 0.354767506986, 0.355420158116, 0.356072960516, \
0.356725914537, 0.357379020525, 0.358032278831, 0.358685689804, \
0.359339253795, 0.359992971155, 0.360646842235, 0.361300867386, \
0.361955046963, 0.362609381316, 0.363263870801, 0.36391851577, \
0.364573316578, 0.36522827358, 0.365883387132, 0.36653865759, \
0.367194085311, 0.36784967065, 0.368505413967, 0.369161315619, \
0.369817375965, 0.370473595364, 0.371129974175, 0.37178651276, \
0.372443211479, 0.373100070693, 0.373757090765, 0.374414272056, \
0.37507161493, 0.37572911975, 0.37638678688, 0.377044616686, \
0.377702609531, 0.378360765783, 0.379019085806, 0.379677569969, \
0.380336218639, 0.380995032182, 0.381654010969, 0.382313155368, \
0.382972465749, 0.383631942482, 0.384291585938, 0.384951396488, \
0.385611374504, 0.386271520359, 0.386931834425, 0.387592317078, \
0.38825296869, 0.388913789636, 0.389574780293, 0.390235941036, \
0.390897272241, 0.391558774287, 0.392220447549, 0.392882292409, \
0.393544309243, 0.394206498431, 0.394868860354, 0.395531395393, \
0.396194103929, 0.396856986343, 0.397520043019, 0.39818327434, \
0.398846680689, 0.399510262451, 0.40017402001, 0.400837953753, \
0.401502064066, 0.402166351335, 0.402830815949, 0.403495458294, \
0.404160278761, 0.404825277738, 0.405490455615, 0.406155812784, \
0.406821349634, 0.407487066559, 0.40815296395, 0.408819042201, \
0.409485301706, 0.410151742859, 0.410818366055, 0.41148517169, \
0.41215216016, 0.412819331863, 0.413486687196, 0.414154226557, \
0.414821950346, 0.415489858963, 0.416157952806, 0.416826232279, \
0.417494697781, 0.418163349716, 0.418832188486, 0.419501214496, \
0.420170428149, 0.420839829851, 0.421509420007, 0.422179199024, \
0.422849167309, 0.423519325269, 0.424189673313, 0.42486021185, \
0.42553094129, 0.426201862043, 0.426872974521, 0.427544279136, \
0.4282157763, 0.428887466426, 0.429559349928, 0.430231427222, \
0.430903698722, 0.431576164846, 0.432248826008, 0.432921682628, \
0.433594735123, 0.434267983913, 0.434941429417, 0.435615072055, \
0.436288912249, 0.436962950421, 0.437637186994, 0.43831162239, \
0.438986257034, 0.43966109135, 0.440336125765, 0.441011360704, \
0.441686796595, 0.442362433866, 0.443038272944, 0.44371431426, \
0.444390558244, 0.445067005325, 0.445743655937, 0.446420510511, \
0.44709756948, 0.447774833279, 0.448452302341, 0.449129977103, \
0.449807858, 0.45048594547, 0.45116423995, 0.45184274188, \
0.452521451697, 0.453200369842, 0.453879496757, 0.454558832883, \
0.455238378662, 0.455918134538, 0.456598100954, 0.457278278357, \
0.45795866719, 0.458639267901, 0.459320080938, 0.460001106748, \
0.460682345779, 0.461363798483, 0.46204546531, 0.46272734671, \
0.463409443136, 0.464091755042, 0.46477428288, 0.465457027107, \
0.466139988176, 0.466823166546, 0.467506562672, 0.468190177013, \
0.468874010028, 0.469558062177, 0.470242333919, 0.470926825718, \
0.471611538035, 0.472296471333, 0.472981626076, 0.473667002729, \
0.474352601759, 0.47503842363, 0.475724468813, 0.476410737773, \
0.477097230982, 0.477783948909, 0.478470892024, 0.479158060801, \
0.479845455711, 0.480533077229, 0.48122092583, 0.481909001988, \
0.48259730618, 0.483285838883, 0.483974600576, 0.484663591739, \
0.48535281285, 0.48604226439, 0.486731946843, 0.487421860691, \
0.488112006416, 0.488802384505, 0.489492995442, 0.490183839715, \
0.49087491781, 0.491566230217, 0.492257777423, 0.492949559921, \
0.493641578201, 0.494333832755, 0.495026324076, 0.49571905266, \
0.496412018999, 0.497105223592, 0.497798666935, 0.498492349525, \
0.499186271862, 0.499880434446, 0.500574837777, 0.501269482359, \
0.501964368692, 0.502659497283, 0.503354868635, 0.504050483254, \
0.504746341647, 0.505442444322, 0.506138791788, 0.506835384556, \
0.507532223135, 0.508229308038, 0.508926639778, 0.509624218869, \
0.510322045826, 0.511020121164, 0.511718445402, 0.512417019057, \
0.513115842648, 0.513814916696, 0.514514241722, 0.515213818248, \
0.515913646798, 0.516613727896, 0.517314062067, 0.518014649839, \
0.518715491738, 0.519416588293, 0.520117940035, 0.520819547494, \
0.521521411203, 0.522223531693, 0.5229259095, 0.523628545158, \
0.524331439204, 0.525034592175, 0.52573800461, 0.526441677048, \
0.527145610031, 0.5278498041, 0.528554259797, 0.529258977668, \
0.529963958257, 0.530669202111, 0.531374709778, 0.532080481805, \
0.532786518743, 0.533492821143, 0.534199389557, 0.534906224537, \
0.535613326639, 0.536320696418, 0.537028334431, 0.537736241235, \
0.53844441739, 0.539152863456, 0.539861579995, 0.540570567568, \
0.541279826741, 0.541989358077, 0.542699162143, 0.543409239507, \
0.544119590737, 0.544830216402, 0.545541117075, 0.546252293327, \
0.546963745731, 0.547675474863, 0.548387481299, 0.549099765614, \
0.549812328389, 0.550525170202, 0.551238291635, 0.551951693269, \
0.552665375689, 0.553379339478, 0.554093585223, 0.55480811351, \
0.555522924929, 0.556238020069, 0.556953399521, 0.557669063878, \
0.558385013733, 0.559101249681, 0.559817772317, 0.560534582241, \
0.56125168005, 0.561969066345, 0.562686741727, 0.563404706799, \
0.564122962165, 0.56484150843, 0.565560346202, 0.566279476088, \
0.566998898697, 0.567718614641, 0.568438624533, 0.569158928984, \
0.56987952861, 0.570600424028, 0.571321615855, 0.57204310471, \
0.572764891214, 0.573486975988, 0.574209359655, 0.57493204284, \
0.575655026169, 0.576378310269, 0.577101895769, 0.5778257833, \
0.578549973492, 0.57927446698, 0.579999264397, 0.580724366379, \
0.581449773564, 0.58217548659, 0.582901506099, 0.58362783273, \
0.584354467129, 0.585081409939, 0.585808661806, 0.586536223378, \
0.587264095305, 0.587992278236, 0.588720772824, 0.589449579722, \
0.590178699585, 0.590908133071, 0.591637880836, 0.592367943541, \
0.593098321847, 0.593829016416, 0.594560027913, 0.595291357003, \
0.596023004354, 0.596754970634, 0.597487256514, 0.598219862666, \
0.598952789763, 0.599686038481, 0.600419609496, 0.601153503486, \
0.601887721131, 0.602622263113, 0.603357130115, 0.604092322821, \
0.604827841918, 0.605563688093, 0.606299862036, 0.607036364439, \
0.607773195994, 0.608510357395, 0.609247849338, 0.609985672522, \
0.610723827646, 0.61146231541, 0.612201136518, 0.612940291674, \
0.613679781584, 0.614419606955, 0.615159768498, 0.615900266923, \
0.616641102944, 0.617382277275, 0.618123790632, 0.618865643733, \
0.619607837299, 0.62035037205, 0.621093248711, 0.621836468005, \
0.622580030661, 0.623323937406, 0.62406818897, 0.624812786087, \
0.625557729489, 0.626303019913, 0.627048658096, 0.627794644778, \
0.628540980698, 0.629287666601, 0.630034703232, 0.630782091336, \
0.631529831662, 0.632277924961, 0.633026371984, 0.633775173485, \
0.634524330221, 0.63527384295, 0.636023712429, 0.636773939423, \
0.637524524692, 0.638275469004, 0.639026773124, 0.639778437823, \
0.640530463871, 0.641282852041, 0.642035603108, 0.642788717849, \
0.643542197043, 0.64429604147, 0.645050251913, 0.645804829157, \
0.646559773988, 0.647315087195, 0.648070769569, 0.648826821903, \
0.64958324499, 0.650340039629, 0.651097206616, 0.651854746754, \
0.652612660844, 0.653370949693, 0.654129614105, 0.654888654892, \
0.655648072862, 0.656407868831, 0.657168043612, 0.657928598023, \
0.658689532883, 0.659450849015, 0.66021254724, 0.660974628386, \
0.66173709328, 0.662499942752, 0.663263177633, 0.66402679876, \
0.664790806967, 0.665555203094, 0.666319987981, 0.667085162471, \
0.66785072741, 0.668616683646, 0.669383032026, 0.670149773405, \
0.670916908635, 0.671684438573, 0.672452364078, 0.67322068601, \
0.673989405232, 0.674758522611, 0.675528039013, 0.67629795531, \
0.677068272372, 0.677838991074, 0.678610112294, 0.679381636911, \
0.680153565806, 0.680925899864, 0.68169863997, 0.682471787013, \
0.683245341885, 0.684019305478, 0.684793678689, 0.685568462415, \
0.686343657558, 0.687119265021, 0.687895285708, 0.688671720529, \
0.689448570392, 0.690225836212, 0.691003518904, 0.691781619384, \
0.692560138575, 0.693339077397, 0.694118436777, 0.694898217643, \
0.695678420925, 0.696459047555, 0.69724009847, 0.698021574607, \
0.698803476906, 0.699585806312, 0.700368563769, 0.701151750226, \
0.701935366634, 0.702719413947, 0.70350389312, 0.704288805113, \
0.705074150887, 0.705859931406, 0.706646147638, 0.707432800551, \
0.708219891118, 0.709007420313, 0.709795389115, 0.710583798504, \
0.711372649463, 0.712161942978, 0.712951680037, 0.713741861631, \
0.714532488756, 0.715323562408, 0.716115083586, 0.716907053294, \
0.717699472536, 0.718492342322, 0.719285663661, 0.720079437569, \
0.720873665062, 0.721668347159, 0.722463484884, 0.723259079262, \
0.724055131321, 0.724851642093, 0.725648612612, 0.726446043916, \
0.727243937044, 0.728042293041, 0.728841112951, 0.729640397826, \
0.730440148716, 0.731240366677, 0.732041052768, 0.73284220805, \
0.733643833587, 0.734445930447, 0.735248499702, 0.736051542423, \
0.736855059689, 0.737659052579, 0.738463522177, 0.739268469569, \
0.740073895844, 0.740879802096, 0.741686189419, 0.742493058914, \
0.743300411682, 0.74410824883, 0.744916571465, 0.7457253807, \
0.746534677651, 0.747344463435, 0.748154739176, 0.748965505998, \
0.74977676503, 0.750588517404, 0.751400764255, 0.752213506722, \
0.753026745948, 0.753840483077, 0.754654719259, 0.755469455646, \
0.756284693394, 0.757100433662, 0.757916677614, 0.758733426414, \
0.759550681234, 0.760368443246, 0.761186713627, 0.762005493558, \
0.762824784223, 0.763644586809, 0.764464902507, 0.765285732513, \
0.766107078024, 0.766928940243, 0.767751320376, 0.768574219631, \
0.769397639223, 0.770221580367, 0.771046044284, 0.7718710322, \
0.772696545341, 0.773522584939, 0.774349152231, 0.775176248455, \
0.776003874855, 0.776832032678, 0.777660723175, 0.7784899476, \
0.779319707213, 0.780150003277, 0.780980837056, 0.781812209823, \
0.782644122852, 0.783476577421, 0.784309574812, 0.785143116312, \
0.785977203212, 0.786811836806, 0.787647018393, 0.788482749274, \
0.789319030758, 0.790155864155, 0.790993250781, 0.791831191953, \
0.792669688996, 0.793508743238, 0.79434835601, 0.795188528648, \
0.796029262492, 0.796870558888, 0.797712419183, 0.798554844732, \
0.799397836891, 0.800241397023, 0.801085526493, 0.801930226672, \
0.802775498936, 0.803621344663, 0.804467765238, 0.805314762049, \
0.806162336489, 0.807010489955, 0.807859223848, 0.808708539576, \
0.80955843855, 0.810408922185, 0.8112599919, 0.812111649122, \
0.812963895279, 0.813816731806, 0.814670160142, 0.815524181729, \
0.816378798017, 0.817234010459, 0.818089820512, 0.818946229639, \
0.819803239307, 0.82066085099, 0.821519066163, 0.82237788631, \
0.823237312917, 0.824097347476, 0.824957991484, 0.825819246443, \
0.826681113861, 0.827543595248, 0.828406692123, 0.829270406006, \
0.830134738426, 0.830999690914, 0.831865265009, 0.832731462252, \
0.833598284192, 0.834465732382, 0.83533380838, 0.83620251375, \
0.83707185006, 0.837941818885, 0.838812421805, 0.839683660404, \
0.840555536273, 0.841428051007, 0.842301206208, 0.843175003483, \
0.844049444444, 0.844924530708, 0.845800263899, 0.846676645646, \
0.847553677583, 0.84843136135, 0.849309698594, 0.850188690965, \
0.851068340122, 0.851948647726, 0.852829615446, 0.853711244958, \
0.854593537942, 0.855476496083, 0.856360121074, 0.857244414613, \
0.858129378404, 0.859015014157, 0.859901323588, 0.860788308418, \
0.861675970376, 0.862564311196, 0.863453332618, 0.864343036389, \
0.865233424261, 0.866124497993, 0.86701625935, 0.867908710104, \
0.868801852031, 0.869695686916, 0.870590216549, 0.871485442727, \
0.872381367254, 0.873277991937, 0.874175318595, 0.875073349049, \
0.875972085128, 0.876871528668, 0.877771681512, 0.878672545509, \
0.879574122514, 0.88047641439, 0.881379423006, 0.882283150238, \
0.883187597969, 0.884092768089, 0.884998662493, 0.885905283087, \
0.886812631779, 0.887720710488, 0.888629521138, 0.889539065661, \
0.890449345995, 0.891360364086, 0.892272121887, 0.893184621359, \
0.894097864469, 0.895011853191, 0.895926589508, 0.896842075409, \
0.897758312891, 0.898675303958, 0.899593050622, 0.900511554903, \
0.901430818827, 0.902350844428, 0.90327163375, 0.90419318884, \
0.905115511758, 0.906038604567, 0.906962469342, 0.907887108163, \
0.908812523118, 0.909738716305, 0.910665689828, 0.911593445799, \
0.912521986339, 0.913451313577, 0.91438142965, 0.915312336703, \
0.916244036888, 0.917176532369, 0.918109825313, 0.919043917899, \
0.919978812315, 0.920914510754, 0.921851015421, 0.922788328527, \
0.923726452292, 0.924665388946, 0.925605140727, 0.926545709881, \
0.927487098664, 0.928429309338, 0.929372344179, 0.930316205466, \
0.931260895491, 0.932206416553, 0.933152770962, 0.934099961035, \
0.9350479891, 0.935996857491, 0.936946568555, 0.937897124647, \
0.93884852813, 0.939800781378, 0.940753886774, 0.941707846709, \
0.942662663587, 0.943618339818, 0.944574877824, 0.945532280036, \
0.946490548893, 0.947449686847, 0.948409696358, 0.949370579895, \
0.95033233994, 0.951294978982, 0.952258499521, 0.953222904069, \
0.954188195145, 0.955154375281, 0.956121447017, 0.957089412906, \
0.958058275508, 0.959028037398, 0.959998701157, 0.960970269379, \
0.961942744669, 0.962916129641, 0.963890426921, 0.964865639146, \
0.965841768964, 0.966818819033, 0.967796792022, 0.968775690612, \
0.969755517495, 0.970736275374, 0.971717966963, 0.972700594988, \
0.973684162186, 0.974668671305, 0.975654125105, 0.976640526359, \
0.977627877849, 0.978616182371, 0.979605442731, 0.980595661749, \
0.981586842254, 0.982578987091, 0.983572099113, 0.984566181188, \
0.985561236196, 0.986557267027, 0.987554276585, 0.988552267788, \
0.989551243564, 0.990551206854, 0.991552160613, 0.992554107808, \
0.993557051418, 0.994560994436, 0.995565939868, 0.996571890733, \
0.997578850062, 0.9985868209, 0.999595806306, 1.00060580935, \
1.00161683312, 1.00262888071, 1.00364195524, 1.00465605983, \
1.00567119762, 1.00668737176, 1.00770458543, 1.0087228418, \
1.00974214407, 1.01076249545, 1.01178389916, 1.01280635844, \
1.01382987655, 1.01485445675, 1.01588010232, 1.01690681657, \
1.01793460279, 1.01896346433, 1.01999340452, 1.02102442671, \
1.02205653428, 1.02308973062, 1.02412401912, 1.02515940322, \
1.02619588633, 1.02723347191, 1.02827216342, 1.02931196435, \
1.03035287819, 1.03139490845, 1.03243805866, 1.03348233237, \
1.03452773314, 1.03557426455, 1.03662193018, 1.03767073366, \
1.03872067861, 1.03977176868, 1.04082400752, 1.04187739881, \
1.04293194626, 1.04398765357, 1.04504452448, 1.04610256273, \
1.04716177209, 1.04822215635, 1.04928371929, 1.05034646476, \
1.05141039657, 1.0524755186, 1.0535418347, 1.05460934878, \
1.05567806475, 1.05674798653, 1.05781911808, 1.05889146336, \
1.05996502636, 1.06103981109, 1.06211582157, 1.06319306184, \
1.06427153597, 1.06535124805, 1.06643220217, 1.06751440246, \
1.06859785307, 1.06968255816, 1.07076852192, 1.07185574854, \
1.07294424226, 1.07403400732, 1.07512504799, 1.07621736855, \
1.07731097332, 1.07840586663, 1.07950205283, 1.0805995363, \
1.08169832142, 1.08279841262, 1.08389981434, 1.08500253104, \
1.08610656721, 1.08721192734, 1.08831861598, 1.08942663766, \
1.09053599698, 1.09164669853, 1.09275874692, 1.09387214681, \
1.09498690286, 1.09610301977, 1.09722050226, 1.09833935506, \
1.09945958293, 1.10058119068, 1.10170418311, 1.10282856507, \
1.10395434141, 1.10508151703, 1.10621009684, 1.10734008578, \
1.10847148882, 1.10960431095, 1.11073855719, 1.11187423257, \
1.11301134218, 1.11414989111, 1.11528988448, 1.11643132745, \
1.11757422519, 1.1187185829, 1.11986440583, 1.12101169923, \
1.12216046839, 1.12331071862, 1.12446245528, 1.12561568374, \
1.1267704094, 1.12792663769, 1.12908437408, 1.13024362405, \
1.13140439313, 1.13256668686, 1.13373051083, 1.13489587064, \
1.13606277195, 1.13723122041, 1.13840122174, 1.13957278166, \
1.14074590595, 1.14192060039, 1.14309687083, 1.14427472312, \
1.14545416315, 1.14663519686, 1.1478178302, 1.14900206916, \
1.15018791978, 1.1513753881, 1.15256448023, 1.1537552023, \
1.15494756045, 1.1561415609, 1.15733720988, 1.15853451364, \
1.1597334785, 1.16093411079, 1.16213641689, 1.16334040321, \
1.1645460762, 1.16575344233, 1.16696250814, 1.16817328018, \
1.16938576505, 1.17059996938, 1.17181589985, 1.17303356317, \
1.17425296609, 1.1754741154, 1.17669701793, 1.17792168055, \
1.17914811017, 1.18037631374, 1.18160629825, 1.18283807074, \
1.18407163828, 1.18530700798, 1.18654418701, 1.18778318256, \
1.18902400188, 1.19026665225, 1.19151114101, 1.19275747553, \
1.19400566322, 1.19525571156, 1.19650762804, 1.19776142023, \
1.19901709572, 1.20027466216, 1.20153412724, 1.2027954987, \
1.20405878432, 1.20532399194, 1.20659112944, 1.20786020475, \
1.20913122586, 1.21040420078, 1.21167913759, 1.21295604444, \
1.21423492948, 1.21551580096, 1.21679866716, 1.2180835364, \
1.21937041707, 1.22065931762, 1.22195024653, 1.22324321234, \
1.22453822366, 1.22583528914, 1.22713441748, 1.22843561746, \
1.22973889789, 1.23104426764, 1.23235173565, 1.23366131092, \
1.23497300248, 1.23628681945, 1.23760277098, 1.23892086632, \
1.24024111474, 1.24156352558, 1.24288810826, 1.24421487225, \
1.24554382707, 1.24687498231, 1.24820834764, 1.24954393277, \
1.25088174749, 1.25222180165, 1.25356410517, 1.25490866802, \
1.25625550025, 1.25760461198, 1.25895601339, 1.26030971474, \
1.26166572634, 1.26302405859, 1.26438472195, 1.26574772695, \
1.26711308419, 1.26848080436, 1.2698508982, 1.27122337654, \
1.27259825027, 1.27397553036, 1.27535522788, 1.27673735394, \
1.27812191975, 1.27950893658, 1.2808984158, 1.28229036885, \
1.28368480725, 1.2850817426, 1.28648118658, 1.28788315096, \
1.28928764758, 1.29069468838, 1.29210428538, 1.29351645068, \
1.29493119647, 1.29634853503, 1.29776847873, 1.29919104001, \
1.30061623144, 1.30204406564, 1.30347455533, 1.30490771336, \
1.30634355261, 1.30778208612, 1.30922332698, 1.31066728839, \
1.31211398366, 1.31356342618, 1.31501562944, 1.31647060705, \
1.3179283727, 1.31938894019, 1.32085232344, 1.32231853644, \
1.32378759331, 1.32525950829, 1.32673429568, 1.32821196994, \
1.3296925456, 1.33117603734, 1.33266245992, 1.33415182822, \
1.33564415726, 1.33713946213, 1.33863775808, 1.34013906045, \
1.34164338473, 1.34315074649, 1.34466116145, 1.34617464545, \
1.34769121444, 1.34921088453, 1.35073367192, 1.35225959295, \
1.3537886641, 1.35532090198, 1.35685632332, 1.35839494499, \
1.35993678401, 1.36148185752, 1.3630301828, 1.36458177727, \
1.36613665852, 1.36769484423, 1.36925635228, 1.37082120066, \
1.37238940752, 1.37396099116, 1.37553597004, 1.37711436276, \
1.37869618807, 1.3802814649, 1.38187021232, 1.38346244956, \
1.38505819603, 1.38665747129, 1.38826029505, 1.38986668723, \
1.39147666789, 1.39309025725, 1.39470747574, 1.39632834393, \
1.3979528826, 1.39958111269, 1.40121305532, 1.40284873181, \
1.40448816364, 1.40613137251, 1.40777838029, 1.40942920906, \
1.41108388106, 1.41274241877, 1.41440484485, 1.41607118216, \
1.41774145377, 1.41941568296, 1.42109389321, 1.42277610822, \
1.42446235191, 1.4261526484, 1.42784702206, 1.42954549744, \
1.43124809936, 1.43295485285, 1.43466578316, 1.43638091579, \
1.43810027647, 1.43982389118, 1.44155178613, 1.44328398779, \
1.44502052286, 1.44676141832, 1.44850670139, 1.45025639954, \
1.45201054053, 1.45376915236, 1.4555322633, 1.45729990192, \
1.45907209704, 1.46084887778, 1.46263027351, 1.46441631394, \
1.46620702902, 1.46800244903, 1.46980260454, 1.47160752641, \
1.47341724582, 1.47523179427, 1.47705120356, 1.47887550581, \
1.48070473348, 1.48253891934, 1.48437809651, 1.48622229844, \
1.48807155892, 1.48992591209, 1.49178539245, 1.49365003484, \
1.49551987447, 1.49739494693, 1.49927528818, 1.50116093452, \
1.50305192269, 1.50494828979, 1.5068500733, 1.50875731112, \
1.51067004156, 1.51258830332, 1.51451213554, 1.51644157777, \
1.51837667, 1.52031745264, 1.52226396655, 1.52421625305, \
1.52617435391, 1.52813831134, 1.53010816806, 1.53208396723, \
1.53406575252, 1.53605356807, 1.53804745854, 1.54004746908, \
1.54205364536, 1.54406603357, 1.54608468043, 1.5481096332, \
1.55014093969, 1.55217864825, 1.55422280782, 1.55627346789, \
1.55833067854, 1.56039449044, 1.56246495486, 1.56454212368, \
1.56662604942, 1.56871678519, 1.57081438477, 1.57291890258, \
1.57503039371, 1.57714891392, 1.57927451964, 1.581407268, \
1.58354721686, 1.58569442477, 1.58784895103, 1.59001085565, \
1.59218019943, 1.59435704393, 1.59654145149, 1.59873348523, \
1.60093320909, 1.60314068784, 1.60535598708, 1.60757917325, \
1.60981031368, 1.61204947656, 1.61429673098, 1.61655214696, \
1.61881579544, 1.62108774828, 1.62336807836, 1.62565685948, \
1.62795416649, 1.63026007522, 1.63257466256, 1.63489800643, \
1.63723018585, 1.63957128092, 1.64192137286, 1.64428054402, \
1.64664887792, 1.64902645924, 1.65141337389, 1.65380970898, \
1.65621555288, 1.65863099522, 1.66105612696, 1.66349104035, \
1.665935829, 1.6683905879, 1.67085541345, 1.67333040348, \
1.67581565725, 1.67831127556, 1.68081736069, 1.68333401649, \
1.6858613484, 1.68839946345, 1.69094847035, 1.69350847947, \
1.69607960292, 1.69866195455, 1.70125565, 1.70386080677, \
1.70647754419, 1.70910598353, 1.71174624799, 1.7143984628, \
1.71706275519, 1.71973925449, 1.72242809217, 1.72512940185, \
1.72784331941, 1.73056998298, 1.73330953303, 1.7360621124, \
1.73882786639, 1.74160694276, 1.74439949184, 1.74720566658, \
1.75002562257, 1.75285951816, 1.75570751448, 1.75856977555, \
1.7614464683, 1.7643377627, 1.76724383175, 1.77016485166, \
1.77310100183, 1.77605246501, 1.77901942732, 1.78200207837, \
1.78500061134, 1.78801522309, 1.79104611422, 1.79409348917, \
1.79715755637, 1.80023852827, 1.8033366215, 1.80645205698, \
1.80958506, 1.81273586036, 1.81590469249, 1.81909179558, \
1.82229741372, 1.825521796, 1.82876519668, 1.83202787533, \
1.83531009698, 1.83861213227, 1.84193425762, 1.84527675539, \
1.84863991405, 1.85202402837, 1.8554293996, 1.85885633567, \
1.86230515137, 1.86577616858, 1.86926971649, 1.87278613181, \
1.87632575899, 1.8798889505, 1.88347606705, 1.88708747787, \
1.89072356098, 1.89438470345, 1.89807130174, 1.90178376197, \
1.90552250025, 1.90928794302, 1.91308052741, 1.91690070156, \
1.92074892503, 1.92462566922, 1.92853141772, 1.93246666677, \
1.93643192574, 1.94042771755, 1.94445457918, 1.9485130622, \
1.95260373328, 1.95672717477, 1.96088398528, 1.96507478031, \
1.96930019287, 1.97356087419, 1.97785749442, 1.98219074335, \
1.98656133124, 1.99096998962, 1.99541747213, 1.99990455547, \
2.00443204036, 2.00900075251, 2.01361154371, 2.01826529295, \
2.02296290758, 2.02770532458, 2.03249351185, 2.03732846962, \
2.04221123192, 2.0471428681, 2.05212448451, 2.05715722623, \
2.06224227888, 2.06738087065, 2.07257427429, 2.07782380938, \
2.08313084465, 2.08849680045, 2.09392315145, 2.09941142942, \
2.10496322628, 2.11058019727, 2.11626406445, 2.12201662031, \
2.12783973172, 2.13373534416, 2.13970548619, 2.14575227431, \
2.15187791816, 2.1580847261, 2.16437511125, 2.17075159793, \
2.17721682871, 2.18377357191, 2.19042472984, 2.1971733476, \
2.20402262267, 2.21097591537, 2.21803676019, 2.22520887808, \
2.23249618993, 2.23990283128, 2.24743316835, 2.25509181569, \
2.26288365549, 2.27081385882, 2.27888790909, 2.28711162784, \
2.29549120334, 2.30403322228, 2.31274470495, 2.32163314439, \
2.33070655011, 2.33997349698, 2.34944318006, 2.35912547628, \
2.36903101398, 2.37917125159, 2.3895585669, 2.40020635872, \
2.411129163, 2.422342786, 2.43386445755, 2.44571300825, \
2.45790907518, 2.47047534177, 2.48343681896, 2.49682117646, \
2.51065913523, 2.52498493519, 2.53983689632, 2.55525809634, \
2.57129719532, 2.58800944712, 2.60545795099, 2.62371521493, \
2.64286512889, 2.66300548366, 2.68425122698, 2.7067387311, \
2.73063147325, 2.75612772826, 2.78347119119, 2.81296597401, \
2.84499832124, 2.8800689917, 2.91884323355, 2.9622311235, \
3.0115232357, 3.06863405379, 3.13657337257, 3.22045475765, \
3.32996541598, 3.48672170399 ])
yu = array([
0.0540012735356, 0.0544874991381, 0.054972661389, 0.0554567692269, \
0.0559398314244, 0.0564218565922, 0.0569028531841, 0.0573828295011, \
0.0578617936955, 0.0583397537752, 0.058816717607, 0.059292692921, \
0.0597676873138, 0.060241708252, 0.0607147630756, 0.0611868590013, \
0.0616580031256, 0.0621282024276, 0.0625974637723, 0.0630657939132, \
0.0635331994951, 0.0639996870564, 0.0644652630325, 0.0649299337573, \
0.0653937054663, 0.0658565842984, 0.0663185762986, 0.0667796874201, \
0.0672399235259, 0.0676992903918, 0.0681577937072, 0.0686154390782, \
0.0690722320286, 0.0695281780022, 0.0699832823643, 0.0704375504035, \
0.0708909873334, 0.0713435982942, 0.0717953883543, 0.0722463625114, \
0.0726965256949, 0.0731458827663, 0.0735944385211, 0.0740421976905, \
0.074489164942, 0.0749353448811, 0.0753807420524, 0.0758253609412, \
0.076269205974, 0.0767122815202, 0.0771545918932, 0.077596141351, \
0.0780369340979, 0.0784769742851, 0.078916266012, 0.0793548133268, \
0.0797926202279, 0.0802296906646, 0.080666028538, 0.081101637702, \
0.081536521964, 0.0819706850859, 0.082404130785, 0.0828368627345, \
0.0832688845646, 0.0837001998631, 0.0841308121761, 0.084560725009, \
0.0849899418268, 0.0854184660553, 0.0858463010813, 0.0862734502535, \
0.0866999168832, 0.0871257042449, 0.0875508155769, 0.0879752540816, \
0.0883990229268, 0.0888221252457, 0.0892445641375, 0.0896663426683, \
0.0900874638713, 0.0905079307475, 0.0909277462662, 0.0913469133655, \
0.0917654349529, 0.0921833139053, 0.0926005530704, 0.0930171552661, \
0.0934331232819, 0.0938484598786, 0.0942631677893, 0.0946772497194, \
0.0950907083474, 0.095503546325, 0.0959157662776, 0.0963273708049, \
0.0967383624809, 0.0971487438546, 0.0975585174503, 0.0979676857678, \
0.098376251283, 0.098784216448, 0.0991915836918, 0.0995983554201, \
0.100004534016, 0.100410121841, 0.100815121233, 0.10121953451, \
0.101623363968, 0.102026611881, 0.102429280502, 0.102831372066, \
0.103232888785, 0.103633832853, 0.104034206444, 0.10443401171, \
0.104833250787, 0.105231925791, 0.10563003882, 0.106027591953, \
0.106424587249, 0.106821026753, 0.10721691249, 0.107612246467, \
0.108007030675, 0.108401267088, 0.108794957663, 0.10918810434, \
0.109580709043, 0.109972773679, 0.110364300142, 0.110755290307, \
0.111145746034, 0.111535669171, 0.111925061545, 0.112313924974, \
0.112702261257, 0.113090072181, 0.113477359516, 0.113864125022, \
0.11425037044, 0.1146360975, 0.115021307918, 0.115406003396, \
0.115790185624, 0.116173856276, 0.116557017014, 0.11693966949, \
0.117321815339, 0.117703456185, 0.118084593641, 0.118465229306, \
0.118845364768, 0.1192250016, 0.119604141367, 0.119982785621, \
0.120360935901, 0.120738593735, 0.121115760642, 0.121492438126, \
0.121868627682, 0.122244330795, 0.122619548937, 0.12299428357, \
0.123368536146, 0.123742308106, 0.124115600881, 0.12448841589, \
0.124860754545, 0.125232618245, 0.12560400838, 0.125974926331, \
0.126345373469, 0.126715351154, 0.127084860738, 0.127453903564, \
0.127822480963, 0.128190594259, 0.128558244767, 0.128925433793, \
0.129292162631, 0.129658432571, 0.130024244891, 0.13038960086, \
0.130754501741, 0.131118948787, 0.131482943242, 0.131846486342, \
0.132209579317, 0.132572223385, 0.132934419758, 0.133296169642, \
0.13365747423, 0.134018334713, 0.13437875227, 0.134738728074, \
0.13509826329, 0.135457359075, 0.135816016581, 0.13617423695, \
0.136532021316, 0.13688937081, 0.137246286551, 0.137602769653, \
0.137958821225, 0.138314442365, 0.138669634167, 0.139024397717, \
0.139378734095, 0.139732644373, 0.140086129618, 0.14043919089, \
0.140791829241, 0.141144045718, 0.141495841361, 0.141847217205, \
0.142198174276, 0.142548713597, 0.142898836183, 0.143248543043, \
0.143597835179, 0.14394671359, 0.144295179266, 0.144643233193, \
0.144990876349, 0.14533810971, 0.145684934242, 0.146031350908, \
0.146377360665, 0.146722964463, 0.147068163249, 0.147412957962, \
0.147757349537, 0.148101338903, 0.148444926984, 0.148788114699, \
0.149130902961, 0.149473292678, 0.149815284753, 0.150156880085, \
0.150498079565, 0.150838884082, 0.151179294519, 0.151519311753, \
0.151858936658, 0.152198170101, 0.152537012946, 0.152875466051, \
0.153213530271, 0.153551206454, 0.153888495445, 0.154225398084, \
0.154561915205, 0.15489804764, 0.155233796214, 0.155569161751, \
0.155904145066, 0.156238746972, 0.156572968279, 0.156906809791, \
0.157240272307, 0.157573356623, 0.15790606353, 0.158238393817, \
0.158570348265, 0.158901927654, 0.159233132759, 0.159563964351, \
0.159894423197, 0.160224510058, 0.160554225695, 0.160883570863, \
0.161212546311, 0.161541152788, 0.161869391036, 0.162197261796, \
0.162524765803, 0.162851903789, 0.163178676483, 0.163505084608, \
0.163831128886, 0.164156810034, 0.164482128766, 0.164807085792, \
0.165131681818, 0.165455917548, 0.165779793681, 0.166103310913, \
0.166426469936, 0.16674927144, 0.167071716111, 0.167393804631, \
0.167715537679, 0.16803691593, 0.168357940058, 0.168678610731, \
0.168998928615, 0.169318894373, 0.169638508664, 0.169957772145, \
0.170276685469, 0.170595249286, 0.170913464242, 0.171231330982, \
0.171548850146, 0.171866022373, 0.172182848295, 0.172499328546, \
0.172815463755, 0.173131254545, 0.173446701542, 0.173761805364, \
0.174076566628, 0.174390985949, 0.174705063937, 0.175018801202, \
0.175332198348, 0.175645255979, 0.175957974695, 0.176270355092, \
0.176582397766, 0.176894103309, 0.177205472308, 0.177516505351, \
0.177827203022, 0.178137565902, 0.178447594568, 0.178757289598, \
0.179066651564, 0.179375681037, 0.179684378585, 0.179992744774, \
0.180300780166, 0.180608485323, 0.180915860803, 0.18122290716, \
0.181529624949, 0.18183601472, 0.182142077022, 0.182447812399, \
0.182753221396, 0.183058304554, 0.183363062412, 0.183667495505, \
0.183971604368, 0.184275389534, 0.18457885153, 0.184881990885, \
0.185184808123, 0.185487303768, 0.185789478338, 0.186091332353, \
0.186392866329, 0.186694080779, 0.186994976215, 0.187295553146, \
0.18759581208, 0.187895753521, 0.188195377973, 0.188494685937, \
0.188793677911, 0.189092354391, 0.189390715873, 0.18968876285, \
0.189986495811, 0.190283915244, 0.190581021638, 0.190877815475, \
0.191174297238, 0.191470467408, 0.191766326463, 0.192061874879, \
0.192357113132, 0.192652041693, 0.192946661033, 0.193240971621, \
0.193534973925, 0.193828668408, 0.194122055533, 0.194415135763, \
0.194707909556, 0.19500037737, 0.195292539661, 0.195584396882, \
0.195875949485, 0.196167197921, 0.196458142637, 0.196748784081, \
0.197039122697, 0.197329158929, 0.197618893218, 0.197908326003, \
0.198197457722, 0.198486288812, 0.198774819706, 0.199063050838, \
0.199350982639, 0.199638615537, 0.199925949961, 0.200212986337, \
0.200499725089, 0.20078616664, 0.20107231141, 0.20135815982, \
0.201643712287, 0.201928969228, 0.202213931056, 0.202498598186, \
0.202782971029, 0.203067049994, 0.20335083549, 0.203634327924, \
0.203917527701, 0.204200435225, 0.204483050898, 0.204765375121, \
0.205047408293, 0.205329150811, 0.205610603072, 0.205891765471, \
0.2061726384, 0.206453222252, 0.206733517417, 0.207013524284, \
0.207293243239, 0.20757267467, 0.207851818961, 0.208130676495, \
0.208409247653, 0.208687532816, 0.208965532363, 0.209243246671, \
0.209520676117, 0.209797821075, 0.210074681919, 0.210351259021, \
0.210627552752, 0.21090356348, 0.211179291575, 0.211454737402, \
0.211729901327, 0.212004783714, 0.212279384926, 0.212553705325, \
0.21282774527, 0.213101505121, 0.213374985234, 0.213648185967, \
0.213921107674, 0.214193750709, 0.214466115425, 0.214738202173, \
0.215010011303, 0.215281543164, 0.215552798104, 0.215823776468, \
0.216094478602, 0.21636490485, 0.216635055555, 0.216904931057, \
0.217174531699, 0.217443857818, 0.217712909752, 0.21798168784, \
0.218250192415, 0.218518423813, 0.218786382367, 0.219054068409, \
0.21932148227, 0.21958862428, 0.219855494768, 0.220122094062, \
0.220388422488, 0.220654480371, 0.220920268035, 0.221185785805, \
0.221451034002, 0.221716012947, 0.22198072296, 0.222245164359, \
0.222509337463, 0.222773242589, 0.223036880051, 0.223300250165, \
0.223563353244, 0.2238261896, 0.224088759545, 0.224351063389, \
0.224613101442, 0.224874874012, 0.225136381406, 0.22539762393, \
0.22565860189, 0.22591931559, 0.226179765333, 0.226439951422, \
0.226699874157, 0.22695953384, 0.227218930768, 0.227478065241, \
0.227736937556, 0.227995548009, 0.228253896895, 0.22851198451, \
0.228769811145, 0.229027377095, 0.22928468265, 0.229541728101, \
0.229798513738, 0.23005503985, 0.230311306723, 0.230567314646, \
0.230823063904, 0.231078554782, 0.231333787564, 0.231588762534, \
0.231843479974, 0.232097940164, 0.232352143387, 0.232606089921, \
0.232859780045, 0.233113214036, 0.233366392173, 0.233619314731, \
0.233871981984, 0.234124394209, 0.234376551677, 0.234628454662, \
0.234880103436, 0.235131498268, 0.235382639431, 0.235633527192, \
0.23588416182, 0.236134543582, 0.236384672746, 0.236634549577, \
0.23688417434, 0.2371335473, 0.237382668719, 0.237631538861, \
0.237880157987, 0.238128526359, 0.238376644236, 0.238624511878, \
0.238872129544, 0.23911949749, 0.239366615975, 0.239613485254, \
0.239860105583, 0.240106477217, 0.240352600409, 0.240598475413, \
0.240844102481, 0.241089481863, 0.241334613813, 0.241579498578, \
0.241824136409, 0.242068527555, 0.242312672262, 0.242556570778, \
0.24280022335, 0.243043630222, 0.243286791641, 0.243529707849, \
0.24377237909, 0.244014805607, 0.244256987642, 0.244498925435, \
0.244740619229, 0.244982069261, 0.245223275772, 0.245464238999, \
0.24570495918, 0.245945436553, 0.246185671353, 0.246425663816, \
0.246665414177, 0.24690492267, 0.247144189529, 0.247383214985, \
0.247621999273, 0.247860542621, 0.248098845263, 0.248336907427, \
0.248574729343, 0.24881231124, 0.249049653346, 0.249286755888, \
0.249523619094, 0.249760243188, 0.249996628397, 0.250232774945, \
0.250468683057, 0.250704352956, 0.250939784865, 0.251174979007, \
0.251409935601, 0.251644654871, 0.251879137035, 0.252113382314, \
0.252347390927, 0.252581163092, 0.252814699027, 0.253047998949, \
0.253281063075, 0.253513891621, 0.253746484801, 0.253978842831, \
0.254210965925, 0.254442854297, 0.254674508159, 0.254905927723, \
0.255137113202, 0.255368064807, 0.255598782747, 0.255829267233, \
0.256059518475, 0.256289536681, 0.256519322059, 0.256748874817, \
0.256978195162, 0.2572072833, 0.257436139437, 0.257664763779, \
0.25789315653, 0.258121317895, 0.258349248077, 0.258576947278, \
0.258804415703, 0.259031653551, 0.259258661026, 0.259485438327, \
0.259711985655, 0.259938303209, 0.260164391189, 0.260390249794, \
0.260615879221, 0.260841279668, 0.261066451331, 0.261291394408, \
0.261516109095, 0.261740595585, 0.261964854076, 0.26218888476, \
0.262412687831, 0.262636263484, 0.26285961191, 0.263082733302, \
0.263305627851, 0.263528295749, 0.263750737186, 0.263972952353, \
0.264194941439, 0.264416704633, 0.264638242124, 0.2648595541, \
0.265080640748, 0.265301502256, 0.265522138811, 0.265742550598, \
0.265962737803, 0.266182700611, 0.266402439207, 0.266621953774, \
0.266841244498, 0.26706031156, 0.267279155143, 0.26749777543, \
0.267716172603, 0.267934346842, 0.268152298328, 0.268370027242, \
0.268587533763, 0.268804818071, 0.269021880344, 0.269238720761, \
0.2694553395, 0.269671736739, 0.269887912653, 0.270103867421, \
0.270319601217, 0.270535114218, 0.270750406598, 0.270965478533, \
0.271180330196, 0.271394961762, 0.271609373403, 0.271823565293, \
0.272037537604, 0.272251290507, 0.272464824175, 0.272678138779, \
0.272891234489, 0.273104111476, 0.273316769908, 0.273529209956, \
0.273741431789, 0.273953435575, 0.274165221481, 0.274376789677, \
0.274588140328, 0.274799273601, 0.275010189664, 0.275220888681, \
0.275431370818, 0.275641636241, 0.275851685114, 0.2760615176, \
0.276271133865, 0.276480534071, 0.276689718381, 0.276898686958, \
0.277107439965, 0.277315977561, 0.27752429991, 0.277732407172, \
0.277940299507, 0.278147977076, 0.278355440038, 0.278562688553, \
0.278769722779, 0.278976542875, 0.279183149, 0.27938954131, \
0.279595719963, 0.279801685116, 0.280007436926, 0.280212975549, \
0.280418301139, 0.280623413854, 0.280828313848, 0.281033001275, \
0.281237476289, 0.281441739045, 0.281645789695, 0.281849628394, \
0.282053255293, 0.282256670545, 0.282459874302, 0.282662866715, \
0.282865647935, 0.283068218114, 0.283270577402, 0.283472725948, \
0.283674663903, 0.283876391415, 0.284077908635, 0.284279215709, \
0.284480312788, 0.284681200017, 0.284881877546, 0.285082345521, \
0.285282604088, 0.285482653395, 0.285682493588, 0.285882124811, \
0.286081547211, 0.286280760932, 0.286479766119, 0.286678562916, \
0.286877151468, 0.287075531918, 0.287273704409, 0.287471669084, \
0.287669426085, 0.287866975555, 0.288064317636, 0.28826145247, \
0.288458380196, 0.288655100957, 0.288851614893, 0.289047922144, \
0.289244022849, 0.289439917148, 0.289635605182, 0.289831087087, \
0.290026363003, 0.290221433068, 0.290416297419, 0.290610956195, \
0.290805409533, 0.290999657568, 0.291193700439, 0.29138753828, \
0.291581171228, 0.291774599419, 0.291967822987, 0.292160842068, \
0.292353656796, 0.292546267306, 0.292738673731, 0.292930876204, \
0.29312287486, 0.293314669832, 0.293506261251, 0.293697649251, \
0.293888833963, 0.294079815519, 0.294270594051, 0.29446116969, \
0.294651542566, 0.294841712811, 0.295031680553, 0.295221445924, \
0.295411009053, 0.295600370069, 0.295789529101, 0.295978486277, \
0.296167241727, 0.296355795578, 0.296544147958, 0.296732298995, \
0.296920248815, 0.297107997546, 0.297295545315, 0.297482892246, \
0.297670038468, 0.297856984104, 0.298043729282, 0.298230274125, \
0.298416618759, 0.298602763308, 0.298788707897, 0.298974452649, \
0.299159997689, 0.299345343139, 0.299530489123, 0.299715435764, \
0.299900183184, 0.300084731505, 0.30026908085, 0.300453231339, \
0.300637183096, 0.30082093624, 0.301004490893, 0.301187847175, \
0.301371005207, 0.301553965108, 0.301736726999, 0.301919290999, \
0.302101657227, 0.302283825802, 0.302465796843, 0.302647570468, \
0.302829146795, 0.303010525942, 0.303191708028, 0.303372693168, \
0.303553481481, 0.303734073083, 0.30391446809, 0.304094666619, \
0.304274668786, 0.304454474707, 0.304634084497, 0.304813498271, \
0.304992716144, 0.305171738232, 0.305350564647, 0.305529195506, \
0.305707630921, 0.305885871006, 0.306063915875, 0.306241765641, \
0.306419420416, 0.306596880314, 0.306774145446, 0.306951215926, \
0.307128091864, 0.307304773373, 0.307481260563, 0.307657553547, \
0.307833652434, 0.308009557336, 0.308185268362, 0.308360785624, \
0.308536109231, 0.308711239292, 0.308886175918, 0.309060919216, \
0.309235469297, 0.309409826268, 0.309583990239, 0.309757961317, \
0.309931739611, 0.310105325228, 0.310278718275, 0.31045191886, \
0.31062492709, 0.310797743071, 0.310970366911, 0.311142798715, \
0.31131503859, 0.31148708664, 0.311658942973, 0.311830607693, \
0.312002080905, 0.312173362715, 0.312344453226, 0.312515352544, \
0.312686060772, 0.312856578014, 0.313026904375, 0.313197039958, \
0.313366984865, 0.313536739201, 0.313706303067, 0.313875676567, \
0.314044859803, 0.314213852877, 0.31438265589, 0.314551268945, \
0.314719692144, 0.314887925586, 0.315055969374, 0.315223823609, \
0.31539148839, 0.315558963818, 0.315726249993, 0.315893347016, \
0.316060254985, 0.316226974001, 0.316393504163, 0.31655984557, \
0.31672599832, 0.316891962512, 0.317057738245, 0.317223325617, \
0.317388724726, 0.31755393567, 0.317718958546, 0.317883793451, \
0.318048440483, 0.318212899739, 0.318377171315, 0.318541255307, \
0.318705151813, 0.318868860929, 0.319032382749, 0.31919571737, \
0.319358864888, 0.319521825397, 0.319684598993, 0.31984718577, \
0.320009585823, 0.320171799247, 0.320333826135, 0.320495666583, \
0.320657320683, 0.320818788529, 0.320980070215, 0.321141165834, \
0.321302075479, 0.321462799242, 0.321623337217, 0.321783689496, \
0.321943856171, 0.322103837334, 0.322263633077, 0.322423243491, \
0.322582668669, 0.322741908701, 0.322900963677, 0.323059833691, \
0.323218518831, 0.323377019188, 0.323535334852, 0.323693465915, \
0.323851412464, 0.324009174591, 0.324166752385, 0.324324145934, \
0.324481355329, 0.324638380658, 0.324795222009, 0.324951879472, \
0.325108353134, 0.325264643085, 0.325420749411, 0.3255766722, \
0.325732411541, 0.325887967521, 0.326043340226, 0.326198529745, \
0.326353536163, 0.326508359567, 0.326663000045, 0.326817457682, \
0.326971732564, 0.327125824778, 0.327279734408, 0.327433461542, \
0.327587006263, 0.327740368658, 0.327893548812, 0.328046546808, \
0.328199362732, 0.328351996669, 0.328504448702, 0.328656718917, \
0.328808807396, 0.328960714223, 0.329112439483, 0.329263983259, \
0.329415345633, 0.32956652669, 0.329717526511, 0.32986834518, \
0.330018982779, 0.330169439391, 0.330319715097, 0.33046980998, \
0.330619724122, 0.330769457604, 0.330919010508, 0.331068382916, \
0.331217574907, 0.331366586564, 0.331515417967, 0.331664069197, \
0.331812540334, 0.331960831459, 0.332108942652, 0.332256873993, \
0.332404625561, 0.332552197437, 0.332699589699, 0.332846802427, \
0.332993835701, 0.333140689599, 0.3332873642, 0.333433859582, \
0.333580175825, 0.333726313006, 0.333872271204, 0.334018050496, \
0.334163650961, 0.334309072676, 0.334454315719, 0.334599380166, \
0.334744266096, 0.334888973585, 0.33503350271, 0.335177853547, \
0.335322026174, 0.335466020667, 0.335609837101, 0.335753475553, \
0.335896936099, 0.336040218815, 0.336183323776, 0.336326251057, \
0.336469000734, 0.336611572882, 0.336753967576, 0.336896184891, \
0.3370382249, 0.33718008768, 0.337321773304, 0.337463281846, \
0.33760461338, 0.337745767981, 0.337886745721, 0.338027546675, \
0.338168170916, 0.338308618517, 0.33844888955, 0.338588984091, \
0.33872890221, 0.33886864398, 0.339008209475, 0.339147598766, \
0.339286811925, 0.339425849025, 0.339564710138, 0.339703395335, \
0.339841904688, 0.339980238268, 0.340118396147, 0.340256378395, \
0.340394185084, 0.340531816284, 0.340669272067, 0.340806552503, \
0.340943657662, 0.341080587614, 0.34121734243, 0.341353922179, \
0.341490326932, 0.341626556758, 0.341762611726, 0.341898491907, \
0.342034197368, 0.34216972818, 0.342305084412, 0.342440266131, \
0.342575273407, 0.342710106308, 0.342844764904, 0.342979249261, \
0.343113559448, 0.343247695533, 0.343381657583, 0.343515445668, \
0.343649059853, 0.343782500207, 0.343915766796, 0.344048859689, \
0.344181778951, 0.344314524649, 0.344447096851, 0.344579495623, \
0.344711721031, 0.344843773142, 0.344975652022, 0.345107357736, \
0.345238890351, 0.345370249932, 0.345501436546, 0.345632450257, \
0.34576329113, 0.345893959232, 0.346024454627, 0.346154777379, \
0.346284927555, 0.346414905218, 0.346544710432, 0.346674343264, \
0.346803803775, 0.346933092032, 0.347062208097, 0.347191152035, \
0.347319923909, 0.347448523782, 0.34757695172, 0.347705207784, \
0.347833292037, 0.347961204544, 0.348088945366, 0.348216514568, \
0.34834391221, 0.348471138357, 0.348598193069, 0.348725076411, \
0.348851788443, 0.348978329228, 0.349104698827, 0.349230897303, \
0.349356924717, 0.349482781131, 0.349608466606, 0.349733981203, \
0.349859324983, 0.349984498007, 0.350109500337, 0.350234332033, \
0.350358993156, 0.350483483765, 0.350607803923, 0.350731953687, \
0.35085593312, 0.350979742281, 0.351103381229, 0.351226850025, \
0.351350148728, 0.351473277398, 0.351596236094, 0.351719024875, \
0.3518416438, 0.351964092929, 0.35208637232, 0.352208482032, \
0.352330422124, 0.352452192653, 0.35257379368, 0.352695225262, \
0.352816487456, 0.352937580322, 0.353058503916, 0.353179258297, \
0.353299843523, 0.353420259651, 0.353540506738, 0.353660584842, \
0.35378049402, 0.353900234329, 0.354019805826, 0.354139208567, \
0.35425844261, 0.354377508012, 0.354496404828, 0.354615133114, \
0.354733692928, 0.354852084326, 0.354970307363, 0.355088362094, \
0.355206248577, 0.355323966867, 0.355441517019, 0.355558899089, \
0.355676113131, 0.355793159202, 0.355910037356, 0.356026747648, \
0.356143290133, 0.356259664866, 0.356375871902, 0.356491911294, \
0.356607783098, 0.356723487367, 0.356839024156, 0.356954393519, \
0.35706959551, 0.357184630183, 0.35729949759, 0.357414197787, \
0.357528730826, 0.357643096761, 0.357757295645, 0.357871327531, \
0.357985192472, 0.358098890522, 0.358212421732, 0.358325786157, \
0.358438983847, 0.358552014857, 0.358664879237, 0.358777577041, \
0.358890108321, 0.359002473128, 0.359114671515, 0.359226703533, \
0.359338569235, 0.359450268671, 0.359561801893, 0.359673168954, \
0.359784369903, 0.359895404792, 0.360006273672, 0.360116976594, \
0.36022751361, 0.360337884769, 0.360448090122, 0.360558129721, \
0.360668003615, 0.360777711854, 0.360887254489, 0.360996631571, \
0.361105843148, 0.361214889271, 0.36132376999, 0.361432485354, \
0.361541035413, 0.361649420216, 0.361757639813, 0.361865694253, \
0.361973583586, 0.362081307859, 0.362188867123, 0.362296261425, \
0.362403490816, 0.362510555343, 0.362617455054, 0.36272419, \
0.362830760226, 0.362937165783, 0.363043406718, 0.363149483079, \
0.363255394914, 0.36336114227, 0.363466725196, 0.36357214374, \
0.363677397947, 0.363782487867, 0.363887413546, 0.363992175032, \
0.364096772372, 0.364201205612, 0.364305474799, 0.364409579981, \
0.364513521204, 0.364617298515, 0.364720911959, 0.364824361585, \
0.364927647437, 0.365030769562, 0.365133728007, 0.365236522816, \
0.365339154037, 0.365441621714, 0.365543925894, 0.365646066623, \
0.365748043945, 0.365849857906, 0.365951508551, 0.366052995927, \
0.366154320077, 0.366255481046, 0.366356478881, 0.366457313625, \
0.366557985323, 0.36665849402, 0.366758839761, 0.366859022589, \
0.36695904255, 0.367058899687, 0.367158594044, 0.367258125667, \
0.367357494598, 0.367456700881, 0.367555744561, 0.36765462568, \
0.367753344284, 0.367851900414, 0.367950294114, 0.368048525428, \
0.368146594399, 0.36824450107, 0.368342245484, 0.368439827684, \
0.368537247713, 0.368634505613, 0.368731601427, 0.368828535197, \
0.368925306967, 0.369021916778, 0.369118364673, 0.369214650693, \
0.369310774882, 0.36940673728, 0.36950253793, 0.369598176874, \
0.369693654153, 0.369788969809, 0.369884123883, 0.369979116417, \
0.370073947452, 0.37016861703, 0.370263125191, 0.370357471977, \
0.370451657429, 0.370545681587, 0.370639544492, 0.370733246185, \
0.370826786707, 0.370920166098, 0.371013384399, 0.371106441649, \
0.37119933789, 0.371292073161, 0.371384647502, 0.371477060953, \
0.371569313555, 0.371661405347, 0.371753336368, 0.371845106659, \
0.371936716259, 0.372028165207, 0.372119453543, 0.372210581305, \
0.372301548534, 0.372392355268, 0.372483001547, 0.372573487408, \
0.372663812892, 0.372753978036, 0.372843982879, 0.372933827461, \
0.373023511819, 0.373113035991, 0.373202400017, 0.373291603934, \
0.373380647781, 0.373469531595, 0.373558255414, 0.373646819277, \
0.373735223221, 0.373823467284, 0.373911551503, 0.373999475916, \
0.37408724056, 0.374174845473, 0.374262290692, 0.374349576255, \
0.374436702197, 0.374523668557, 0.374610475371, 0.374697122676, \
0.374783610509, 0.374869938907, 0.374956107906, 0.375042117542, \
0.375127967852, 0.375213658872, 0.37529919064, 0.375384563189, \
0.375469776558, 0.375554830782, 0.375639725896, 0.375724461937, \
0.37580903894, 0.375893456941, 0.375977715976, 0.37606181608, \
0.376145757289, 0.376229539637, 0.37631316316, 0.376396627894, \
0.376479933873, 0.376563081133, 0.376646069707, 0.376728899632, \
0.376811570942, 0.376894083671, 0.376976437854, 0.377058633526, \
0.377140670721, 0.377222549474, 0.377304269818, 0.377385831788, \
0.377467235419, 0.377548480743, 0.377629567795, 0.377710496609, \
0.377791267219, 0.377871879658, 0.37795233396, 0.378032630158, \
0.378112768286, 0.378192748378, 0.378272570466, 0.378352234585, \
0.378431740766, 0.378511089043, 0.378590279449, 0.378669312017, \
0.37874818678, 0.378826903771, 0.378905463021, 0.378983864565, \
0.379062108433, 0.37914019466, 0.379218123276, 0.379295894315, \
0.379373507808, 0.379450963788, 0.379528262286, 0.379605403335, \
0.379682386967, 0.379759213212, 0.379835882104, 0.379912393674, \
0.379988747952, 0.380064944972, 0.380140984763, 0.380216867358, \
0.380292592787, 0.380368161083, 0.380443572275, 0.380518826396, \
0.380593923475, 0.380668863545, 0.380743646634, 0.380818272776, \
0.380892741999, 0.380967054335, 0.381041209813, 0.381115208466, \
0.381189050322, 0.381262735412, 0.381336263766, 0.381409635414, \
0.381482850387, 0.381555908715, 0.381628810426, 0.381701555551, \
0.381774144121, 0.381846576163, 0.381918851709, 0.381990970787, \
0.382062933426, 0.382134739657, 0.382206389509, 0.38227788301, \
0.382349220191, 0.382420401079, 0.382491425704, 0.382562294094, \
0.38263300628, 0.382703562288, 0.382773962149, 0.382844205891, \
0.382914293541, 0.382984225129, 0.383054000684, 0.383123620232, \
0.383193083804, 0.383262391426, 0.383331543127, 0.383400538934, \
0.383469378877, 0.383538062982, 0.383606591278, 0.383674963791, \
0.383743180551, 0.383811241584, 0.383879146918, 0.383946896581, \
0.384014490599, 0.384081929, 0.384149211811, 0.38421633906, \
0.384283310773, 0.384350126978, 0.384416787701, 0.384483292969, \
0.384549642809, 0.384615837248, 0.384681876312, 0.384747760028, \
0.384813488423, 0.384879061522, 0.384944479352, 0.38500974194, \
0.385074849312, 0.385139801493, 0.38520459851, 0.385269240389, \
0.385333727156, 0.385398058837, 0.385462235457, 0.385526257042, \
0.385590123617, 0.385653835209, 0.385717391843, 0.385780793545, \
0.385844040338, 0.38590713225, 0.385970069305, 0.386032851528, \
0.386095478944, 0.386157951579, 0.386220269457, 0.386282432603, \
0.386344441042, 0.386406294798, 0.386467993897, 0.386529538363, \
0.386590928221, 0.386652163494, 0.386713244207, 0.386774170385, \
0.386834942052, 0.386895559231, 0.386956021948, 0.387016330226, \
0.387076484089, 0.387136483561, 0.387196328665, 0.387256019426, \
0.387315555867, 0.387374938013, 0.387434165885, 0.387493239508, \
0.387552158906, 0.387610924101, 0.387669535118, 0.387727991978, \
0.387786294705, 0.387844443323, 0.387902437854, 0.387960278321, \
0.388017964748, 0.388075497156, 0.388132875569, 0.38819010001, \
0.3882471705, 0.388304087062, 0.38836084972, 0.388417458495, \
0.388473913409, 0.388530214485, 0.388586361746, 0.388642355212, \
0.388698194907, 0.388753880852, 0.388809413069, 0.38886479158, \
0.388920016407, 0.388975087572, 0.389030005096, 0.389084769, \
0.389139379307, 0.389193836037, 0.389248139213, 0.389302288855, \
0.389356284985, 0.389410127623, 0.389463816792, 0.389517352512, \
0.389570734804, 0.389623963688, 0.389677039187, 0.38972996132, \
0.389782730109, 0.389835345574, 0.389887807735, 0.389940116614, \
0.38999227223, 0.390044274604, 0.390096123756, 0.390147819707, \
0.390199362477, 0.390250752086, 0.390301988554, 0.390353071901, \
0.390404002147, 0.390454779312, 0.390505403415, 0.390555874477, \
0.390606192517, 0.390656357554, 0.390706369609, 0.390756228701, \
0.390805934848, 0.390855488072, 0.39090488839, 0.390954135822, \
0.391003230387, 0.391052172104, 0.391100960993, 0.391149597072, \
0.391198080361, 0.391246410877, 0.39129458864, 0.391342613669, \
0.391390485981, 0.391438205597, 0.391485772534, 0.39153318681, \
0.391580448445, 0.391627557456, 0.391674513861, 0.39172131768, \
0.39176796893, 0.391814467628, 0.391860813794, 0.391907007445, \
0.391953048599, 0.391998937274, 0.392044673487, 0.392090257256, \
0.392135688599, 0.392180967534, 0.392226094077, 0.392271068247, \
0.392315890061, 0.392360559535, 0.392405076688, 0.392449441536, \
0.392493654097, 0.392537714388, 0.392581622425, 0.392625378225, \
0.392668981806, 0.392712433185, 0.392755732377, 0.3927988794, \
0.392841874271, 0.392884717005, 0.392927407619, 0.392969946131, \
0.393012332555, 0.393054566909, 0.393096649209, 0.39313857947, \
0.39318035771, 0.393221983944, 0.393263458188, 0.393304780457, \
0.393345950769, 0.393386969139, 0.393427835581, 0.393468550113, \
0.39350911275, 0.393549523507, 0.3935897824, 0.393629889444, \
0.393669844655, 0.393709648048, 0.393749299638, 0.393788799441, \
0.393828147471, 0.393867343743, 0.393906388274, 0.393945281077, \
0.393984022167, 0.39402261156, 0.39406104927, 0.394099335311, \
0.394137469699, 0.394175452448, 0.394213283572, 0.394250963087, \
0.394288491006, 0.394325867343, 0.394363092114, 0.394400165331, \
0.39443708701, 0.394473857165, 0.394510475809, 0.394546942956, \
0.394583258621, 0.394619422817, 0.394655435558, 0.394691296858, \
0.39472700673, 0.394762565188, 0.394797972246, 0.394833227916, \
0.394868332214, 0.394903285151, 0.394938086741, 0.394972736997, \
0.395007235934, 0.395041583563, 0.395075779897, 0.395109824951, \
0.395143718736, 0.395177461266, 0.395211052553, 0.39524449261, \
0.39527778145, 0.395310919086, 0.39534390553, 0.395376740795, \
0.395409424892, 0.395441957836, 0.395474339637, 0.395506570308, \
0.395538649862, 0.39557057831, 0.395602355665, 0.395633981938, \
0.395665457143, 0.39569678129, 0.395727954391, 0.395758976459, \
0.395789847504, 0.39582056754, 0.395851136577, 0.395881554626, \
0.395911821701, 0.395941937811, 0.395971902968, 0.396001717184, \
0.39603138047, 0.396060892837, 0.396090254297, 0.396119464859, \
0.396148524537, 0.396177433339, 0.396206191278, 0.396234798364, \
0.396263254608, 0.396291560021, 0.396319714613, 0.396347718395, \
0.396375571378, 0.396403273572, 0.396430824988, 0.396458225635, \
0.396485475525, 0.396512574667, 0.396539523072, 0.39656632075, \
0.396592967711, 0.396619463965, 0.396645809523, 0.396672004393, \
0.396698048586, 0.396723942111, 0.396749684979, 0.396775277199, \
0.396800718781, 0.396826009734, 0.396851150068, 0.396876139792, \
0.396900978915, 0.396925667448, 0.396950205399, 0.396974592777, \
0.396998829592, 0.397022915853, 0.397046851568, 0.397070636747, \
0.397094271399, 0.397117755533, 0.397141089157, 0.39716427228, \
0.39718730491, 0.397210187058, 0.39723291873, 0.397255499936, \
0.397277930684, 0.397300210983, 0.39732234084, 0.397344320264, \
0.397366149264, 0.397387827847, 0.397409356022, 0.397430733797, \
0.397451961179, 0.397473038177, 0.397493964799, 0.397514741051, \
0.397535366943, 0.397555842482, 0.397576167675, 0.397596342531, \
0.397616367056, 0.397636241258, 0.397655965145, 0.397675538724, \
0.397694962001, 0.397714234986, 0.397733357684, 0.397752330103, \
0.39777115225, 0.397789824132, 0.397808345755, 0.397826717128, \
0.397844938256, 0.397863009146, 0.397880929806, 0.397898700242, \
0.39791632046, 0.397933790467, 0.39795111027, 0.397968279875, \
0.397985299288, 0.398002168517, 0.398018887566, 0.398035456442, \
0.398051875152, 0.398068143701, 0.398084262096, 0.398100230343, \
0.398116048447, 0.398131716414, 0.398147234251, 0.398162601962, \
0.398177819554, 0.398192887033, 0.398207804404, 0.398222571672, \
0.398237188843, 0.398251655922, 0.398265972915, 0.398280139828, \
0.398294156664, 0.39830802343, 0.398321740131, 0.398335306771, \
0.398348723357, 0.398361989891, 0.398375106381, 0.39838807283, \
0.398400889243, 0.398413555626, 0.398426071982, 0.398438438316, \
0.398450654634, 0.398462720938, 0.398474637235, 0.398486403528, \
0.398498019821, 0.39850948612, 0.398520802428, 0.398531968749, \
0.398542985087, 0.398553851447, 0.398564567832, 0.398575134247, \
0.398585550695, 0.398595817181, 0.398605933707, 0.398615900278, \
0.398625716898, 0.398635383569, 0.398644900296, 0.398654267082, \
0.39866348393, 0.398672550844, 0.398681467827, 0.398690234883, \
0.398698852014, 0.398707319224, 0.398715636516, 0.398723803893, \
0.398731821357, 0.398739688913, 0.398747406562, 0.398754974308, \
0.398762392153, 0.3987696601, 0.398776778152, 0.398783746311, \
0.398790564579, 0.39879723296, 0.398803751456, 0.398810120068, \
0.3988163388, 0.398822407654, 0.398828326631, 0.398834095735, \
0.398839714966, 0.398845184327, 0.39885050382, 0.398855673448, \
0.39886069321, 0.398865563111, 0.398870283151, 0.398874853332, \
0.398879273655, 0.398883544123, 0.398887664737, 0.398891635497, \
0.398895456407, 0.398899127466, 0.398902648676, 0.398906020039, \
0.398909241556, 0.398912313228, 0.398915235055, 0.398918007039, \
0.398920629181, 0.398923101482, 0.398925423943, 0.398927596563, \
0.398929619345, 0.398931492289, 0.398933215395, 0.398934788664, \
0.398936212097, 0.398937485693, 0.398938609454, 0.398939583379, \
0.39894040747, 0.398941081725, 0.398941606146, 0.398941980732, \
0.398942205484, 0.398942280401, 0.398942280401, 0.398942205484, \
0.398941980732, 0.398941606146, 0.398941081725, 0.39894040747, \
0.398939583379, 0.398938609454, 0.398937485693, 0.398936212097, \
0.398934788664, 0.398933215395, 0.398931492289, 0.398929619345, \
0.398927596563, 0.398925423943, 0.398923101482, 0.398920629181, \
0.398918007039, 0.398915235055, 0.398912313228, 0.398909241556, \
0.398906020039, 0.398902648676, 0.398899127466, 0.398895456407, \
0.398891635497, 0.398887664737, 0.398883544123, 0.398879273655, \
0.398874853332, 0.398870283151, 0.398865563111, 0.39886069321, \
0.398855673448, 0.39885050382, 0.398845184327, 0.398839714966, \
0.398834095735, 0.398828326631, 0.398822407654, 0.3988163388, \
0.398810120068, 0.398803751456, 0.39879723296, 0.398790564579, \
0.398783746311, 0.398776778152, 0.3987696601, 0.398762392153, \
0.398754974308, 0.398747406562, 0.398739688913, 0.398731821357, \
0.398723803893, 0.398715636516, 0.398707319224, 0.398698852014, \
0.398690234883, 0.398681467827, 0.398672550844, 0.39866348393, \
0.398654267082, 0.398644900296, 0.398635383569, 0.398625716898, \
0.398615900278, 0.398605933707, 0.398595817181, 0.398585550695, \
0.398575134247, 0.398564567832, 0.398553851447, 0.398542985087, \
0.398531968749, 0.398520802428, 0.39850948612, 0.398498019821, \
0.398486403528, 0.398474637235, 0.398462720938, 0.398450654634, \
0.398438438316, 0.398426071982, 0.398413555626, 0.398400889243, \
0.39838807283, 0.398375106381, 0.398361989891, 0.398348723357, \
0.398335306771, 0.398321740131, 0.39830802343, 0.398294156664, \
0.398280139828, 0.398265972915, 0.398251655922, 0.398237188843, \
0.398222571672, 0.398207804404, 0.398192887033, 0.398177819554, \
0.398162601962, 0.398147234251, 0.398131716414, 0.398116048447, \
0.398100230343, 0.398084262096, 0.398068143701, 0.398051875152, \
0.398035456442, 0.398018887566, 0.398002168517, 0.397985299288, \
0.397968279875, 0.39795111027, 0.397933790467, 0.39791632046, \
0.397898700242, 0.397880929806, 0.397863009146, 0.397844938256, \
0.397826717128, 0.397808345755, 0.397789824132, 0.39777115225, \
0.397752330103, 0.397733357684, 0.397714234986, 0.397694962001, \
0.397675538724, 0.397655965145, 0.397636241258, 0.397616367056, \
0.397596342531, 0.397576167675, 0.397555842482, 0.397535366943, \
0.397514741051, 0.397493964799, 0.397473038177, 0.397451961179, \
0.397430733797, 0.397409356022, 0.397387827847, 0.397366149264, \
0.397344320264, 0.39732234084, 0.397300210983, 0.397277930684, \
0.397255499936, 0.39723291873, 0.397210187058, 0.39718730491, \
0.39716427228, 0.397141089157, 0.397117755533, 0.397094271399, \
0.397070636747, 0.397046851568, 0.397022915853, 0.396998829592, \
0.396974592777, 0.396950205399, 0.396925667448, 0.396900978915, \
0.396876139792, 0.396851150068, 0.396826009734, 0.396800718781, \
0.396775277199, 0.396749684979, 0.396723942111, 0.396698048586, \
0.396672004393, 0.396645809523, 0.396619463965, 0.396592967711, \
0.39656632075, 0.396539523072, 0.396512574667, 0.396485475525, \
0.396458225635, 0.396430824988, 0.396403273572, 0.396375571378, \
0.396347718395, 0.396319714613, 0.396291560021, 0.396263254608, \
0.396234798364, 0.396206191278, 0.396177433339, 0.396148524537, \
0.396119464859, 0.396090254297, 0.396060892837, 0.39603138047, \
0.396001717184, 0.395971902968, 0.395941937811, 0.395911821701, \
0.395881554626, 0.395851136577, 0.39582056754, 0.395789847504, \
0.395758976459, 0.395727954391, 0.39569678129, 0.395665457143, \
0.395633981938, 0.395602355665, 0.39557057831, 0.395538649862, \
0.395506570308, 0.395474339637, 0.395441957836, 0.395409424892, \
0.395376740795, 0.39534390553, 0.395310919086, 0.39527778145, \
0.39524449261, 0.395211052553, 0.395177461266, 0.395143718736, \
0.395109824951, 0.395075779897, 0.395041583563, 0.395007235934, \
0.394972736997, 0.394938086741, 0.394903285151, 0.394868332214, \
0.394833227916, 0.394797972246, 0.394762565188, 0.39472700673, \
0.394691296858, 0.394655435558, 0.394619422817, 0.394583258621, \
0.394546942956, 0.394510475809, 0.394473857165, 0.39443708701, \
0.394400165331, 0.394363092114, 0.394325867343, 0.394288491006, \
0.394250963087, 0.394213283572, 0.394175452448, 0.394137469699, \
0.394099335311, 0.39406104927, 0.39402261156, 0.393984022167, \
0.393945281077, 0.393906388274, 0.393867343743, 0.393828147471, \
0.393788799441, 0.393749299638, 0.393709648048, 0.393669844655, \
0.393629889444, 0.3935897824, 0.393549523507, 0.39350911275, \
0.393468550113, 0.393427835581, 0.393386969139, 0.393345950769, \
0.393304780457, 0.393263458188, 0.393221983944, 0.39318035771, \
0.39313857947, 0.393096649209, 0.393054566909, 0.393012332555, \
0.392969946131, 0.392927407619, 0.392884717005, 0.392841874271, \
0.3927988794, 0.392755732377, 0.392712433185, 0.392668981806, \
0.392625378225, 0.392581622425, 0.392537714388, 0.392493654097, \
0.392449441536, 0.392405076688, 0.392360559535, 0.392315890061, \
0.392271068247, 0.392226094077, 0.392180967534, 0.392135688599, \
0.392090257256, 0.392044673487, 0.391998937274, 0.391953048599, \
0.391907007445, 0.391860813794, 0.391814467628, 0.39176796893, \
0.39172131768, 0.391674513861, 0.391627557456, 0.391580448445, \
0.39153318681, 0.391485772534, 0.391438205597, 0.391390485981, \
0.391342613669, 0.39129458864, 0.391246410877, 0.391198080361, \
0.391149597072, 0.391100960993, 0.391052172104, 0.391003230387, \
0.390954135822, 0.39090488839, 0.390855488072, 0.390805934848, \
0.390756228701, 0.390706369609, 0.390656357554, 0.390606192517, \
0.390555874477, 0.390505403415, 0.390454779312, 0.390404002147, \
0.390353071901, 0.390301988554, 0.390250752086, 0.390199362477, \
0.390147819707, 0.390096123756, 0.390044274604, 0.38999227223, \
0.389940116614, 0.389887807735, 0.389835345574, 0.389782730109, \
0.38972996132, 0.389677039187, 0.389623963688, 0.389570734804, \
0.389517352512, 0.389463816792, 0.389410127623, 0.389356284985, \
0.389302288855, 0.389248139213, 0.389193836037, 0.389139379307, \
0.389084769, 0.389030005096, 0.388975087572, 0.388920016407, \
0.38886479158, 0.388809413069, 0.388753880852, 0.388698194907, \
0.388642355212, 0.388586361746, 0.388530214485, 0.388473913409, \
0.388417458495, 0.38836084972, 0.388304087062, 0.3882471705, \
0.38819010001, 0.388132875569, 0.388075497156, 0.388017964748, \
0.387960278321, 0.387902437854, 0.387844443323, 0.387786294705, \
0.387727991978, 0.387669535118, 0.387610924101, 0.387552158906, \
0.387493239508, 0.387434165885, 0.387374938013, 0.387315555867, \
0.387256019426, 0.387196328665, 0.387136483561, 0.387076484089, \
0.387016330226, 0.386956021948, 0.386895559231, 0.386834942052, \
0.386774170385, 0.386713244207, 0.386652163494, 0.386590928221, \
0.386529538363, 0.386467993897, 0.386406294798, 0.386344441042, \
0.386282432603, 0.386220269457, 0.386157951579, 0.386095478944, \
0.386032851528, 0.385970069305, 0.38590713225, 0.385844040338, \
0.385780793545, 0.385717391843, 0.385653835209, 0.385590123617, \
0.385526257042, 0.385462235457, 0.385398058837, 0.385333727156, \
0.385269240389, 0.38520459851, 0.385139801493, 0.385074849312, \
0.38500974194, 0.384944479352, 0.384879061522, 0.384813488423, \
0.384747760028, 0.384681876312, 0.384615837248, 0.384549642809, \
0.384483292969, 0.384416787701, 0.384350126978, 0.384283310773, \
0.38421633906, 0.384149211811, 0.384081929, 0.384014490599, \
0.383946896581, 0.383879146918, 0.383811241584, 0.383743180551, \
0.383674963791, 0.383606591278, 0.383538062982, 0.383469378877, \
0.383400538934, 0.383331543127, 0.383262391426, 0.383193083804, \
0.383123620232, 0.383054000684, 0.382984225129, 0.382914293541, \
0.382844205891, 0.382773962149, 0.382703562288, 0.38263300628, \
0.382562294094, 0.382491425704, 0.382420401079, 0.382349220191, \
0.38227788301, 0.382206389509, 0.382134739657, 0.382062933426, \
0.381990970787, 0.381918851709, 0.381846576163, 0.381774144121, \
0.381701555551, 0.381628810426, 0.381555908715, 0.381482850387, \
0.381409635414, 0.381336263766, 0.381262735412, 0.381189050322, \
0.381115208466, 0.381041209813, 0.380967054335, 0.380892741999, \
0.380818272776, 0.380743646634, 0.380668863545, 0.380593923475, \
0.380518826396, 0.380443572275, 0.380368161083, 0.380292592787, \
0.380216867358, 0.380140984763, 0.380064944972, 0.379988747952, \
0.379912393674, 0.379835882104, 0.379759213212, 0.379682386967, \
0.379605403335, 0.379528262286, 0.379450963788, 0.379373507808, \
0.379295894315, 0.379218123276, 0.37914019466, 0.379062108433, \
0.378983864565, 0.378905463021, 0.378826903771, 0.37874818678, \
0.378669312017, 0.378590279449, 0.378511089043, 0.378431740766, \
0.378352234585, 0.378272570466, 0.378192748378, 0.378112768286, \
0.378032630158, 0.37795233396, 0.377871879658, 0.377791267219, \
0.377710496609, 0.377629567795, 0.377548480743, 0.377467235419, \
0.377385831788, 0.377304269818, 0.377222549474, 0.377140670721, \
0.377058633526, 0.376976437854, 0.376894083671, 0.376811570942, \
0.376728899632, 0.376646069707, 0.376563081133, 0.376479933873, \
0.376396627894, 0.37631316316, 0.376229539637, 0.376145757289, \
0.37606181608, 0.375977715976, 0.375893456941, 0.37580903894, \
0.375724461937, 0.375639725896, 0.375554830782, 0.375469776558, \
0.375384563189, 0.37529919064, 0.375213658872, 0.375127967852, \
0.375042117542, 0.374956107906, 0.374869938907, 0.374783610509, \
0.374697122676, 0.374610475371, 0.374523668557, 0.374436702197, \
0.374349576255, 0.374262290692, 0.374174845473, 0.37408724056, \
0.373999475916, 0.373911551503, 0.373823467284, 0.373735223221, \
0.373646819277, 0.373558255414, 0.373469531595, 0.373380647781, \
0.373291603934, 0.373202400017, 0.373113035991, 0.373023511819, \
0.372933827461, 0.372843982879, 0.372753978036, 0.372663812892, \
0.372573487408, 0.372483001547, 0.372392355268, 0.372301548534, \
0.372210581305, 0.372119453543, 0.372028165207, 0.371936716259, \
0.371845106659, 0.371753336368, 0.371661405347, 0.371569313555, \
0.371477060953, 0.371384647502, 0.371292073161, 0.37119933789, \
0.371106441649, 0.371013384399, 0.370920166098, 0.370826786707, \
0.370733246185, 0.370639544492, 0.370545681587, 0.370451657429, \
0.370357471977, 0.370263125191, 0.37016861703, 0.370073947452, \
0.369979116417, 0.369884123883, 0.369788969809, 0.369693654153, \
0.369598176874, 0.36950253793, 0.36940673728, 0.369310774882, \
0.369214650693, 0.369118364673, 0.369021916778, 0.368925306967, \
0.368828535197, 0.368731601427, 0.368634505613, 0.368537247713, \
0.368439827684, 0.368342245484, 0.36824450107, 0.368146594399, \
0.368048525428, 0.367950294114, 0.367851900414, 0.367753344284, \
0.36765462568, 0.367555744561, 0.367456700881, 0.367357494598, \
0.367258125667, 0.367158594044, 0.367058899687, 0.36695904255, \
0.366859022589, 0.366758839761, 0.36665849402, 0.366557985323, \
0.366457313625, 0.366356478881, 0.366255481046, 0.366154320077, \
0.366052995927, 0.365951508551, 0.365849857906, 0.365748043945, \
0.365646066623, 0.365543925894, 0.365441621714, 0.365339154037, \
0.365236522816, 0.365133728007, 0.365030769562, 0.364927647437, \
0.364824361585, 0.364720911959, 0.364617298515, 0.364513521204, \
0.364409579981, 0.364305474799, 0.364201205612, 0.364096772372, \
0.363992175032, 0.363887413546, 0.363782487867, 0.363677397947, \
0.36357214374, 0.363466725196, 0.36336114227, 0.363255394914, \
0.363149483079, 0.363043406718, 0.362937165783, 0.362830760226, \
0.36272419, 0.362617455054, 0.362510555343, 0.362403490816, \
0.362296261425, 0.362188867123, 0.362081307859, 0.361973583586, \
0.361865694253, 0.361757639813, 0.361649420216, 0.361541035413, \
0.361432485354, 0.36132376999, 0.361214889271, 0.361105843148, \
0.360996631571, 0.360887254489, 0.360777711854, 0.360668003615, \
0.360558129721, 0.360448090122, 0.360337884769, 0.36022751361, \
0.360116976594, 0.360006273672, 0.359895404792, 0.359784369903, \
0.359673168954, 0.359561801893, 0.359450268671, 0.359338569235, \
0.359226703533, 0.359114671515, 0.359002473128, 0.358890108321, \
0.358777577041, 0.358664879237, 0.358552014857, 0.358438983847, \
0.358325786157, 0.358212421732, 0.358098890522, 0.357985192472, \
0.357871327531, 0.357757295645, 0.357643096761, 0.357528730826, \
0.357414197787, 0.35729949759, 0.357184630183, 0.35706959551, \
0.356954393519, 0.356839024156, 0.356723487367, 0.356607783098, \
0.356491911294, 0.356375871902, 0.356259664866, 0.356143290133, \
0.356026747648, 0.355910037356, 0.355793159202, 0.355676113131, \
0.355558899089, 0.355441517019, 0.355323966867, 0.355206248577, \
0.355088362094, 0.354970307363, 0.354852084326, 0.354733692928, \
0.354615133114, 0.354496404828, 0.354377508012, 0.35425844261, \
0.354139208567, 0.354019805826, 0.353900234329, 0.35378049402, \
0.353660584842, 0.353540506738, 0.353420259651, 0.353299843523, \
0.353179258297, 0.353058503916, 0.352937580322, 0.352816487456, \
0.352695225262, 0.35257379368, 0.352452192653, 0.352330422124, \
0.352208482032, 0.35208637232, 0.351964092929, 0.3518416438, \
0.351719024875, 0.351596236094, 0.351473277398, 0.351350148728, \
0.351226850025, 0.351103381229, 0.350979742281, 0.35085593312, \
0.350731953687, 0.350607803923, 0.350483483765, 0.350358993156, \
0.350234332033, 0.350109500337, 0.349984498007, 0.349859324983, \
0.349733981203, 0.349608466606, 0.349482781131, 0.349356924717, \
0.349230897303, 0.349104698827, 0.348978329228, 0.348851788443, \
0.348725076411, 0.348598193069, 0.348471138357, 0.34834391221, \
0.348216514568, 0.348088945366, 0.347961204544, 0.347833292037, \
0.347705207784, 0.34757695172, 0.347448523782, 0.347319923909, \
0.347191152035, 0.347062208097, 0.346933092032, 0.346803803775, \
0.346674343264, 0.346544710432, 0.346414905218, 0.346284927555, \
0.346154777379, 0.346024454627, 0.345893959232, 0.34576329113, \
0.345632450257, 0.345501436546, 0.345370249932, 0.345238890351, \
0.345107357736, 0.344975652022, 0.344843773142, 0.344711721031, \
0.344579495623, 0.344447096851, 0.344314524649, 0.344181778951, \
0.344048859689, 0.343915766796, 0.343782500207, 0.343649059853, \
0.343515445668, 0.343381657583, 0.343247695533, 0.343113559448, \
0.342979249261, 0.342844764904, 0.342710106308, 0.342575273407, \
0.342440266131, 0.342305084412, 0.34216972818, 0.342034197368, \
0.341898491907, 0.341762611726, 0.341626556758, 0.341490326932, \
0.341353922179, 0.34121734243, 0.341080587614, 0.340943657662, \
0.340806552503, 0.340669272067, 0.340531816284, 0.340394185084, \
0.340256378395, 0.340118396147, 0.339980238268, 0.339841904688, \
0.339703395335, 0.339564710138, 0.339425849025, 0.339286811925, \
0.339147598766, 0.339008209475, 0.33886864398, 0.33872890221, \
0.338588984091, 0.33844888955, 0.338308618517, 0.338168170916, \
0.338027546675, 0.337886745721, 0.337745767981, 0.33760461338, \
0.337463281846, 0.337321773304, 0.33718008768, 0.3370382249, \
0.336896184891, 0.336753967576, 0.336611572882, 0.336469000734, \
0.336326251057, 0.336183323776, 0.336040218815, 0.335896936099, \
0.335753475553, 0.335609837101, 0.335466020667, 0.335322026174, \
0.335177853547, 0.33503350271, 0.334888973585, 0.334744266096, \
0.334599380166, 0.334454315719, 0.334309072676, 0.334163650961, \
0.334018050496, 0.333872271204, 0.333726313006, 0.333580175825, \
0.333433859582, 0.3332873642, 0.333140689599, 0.332993835701, \
0.332846802427, 0.332699589699, 0.332552197437, 0.332404625561, \
0.332256873993, 0.332108942652, 0.331960831459, 0.331812540334, \
0.331664069197, 0.331515417967, 0.331366586564, 0.331217574907, \
0.331068382916, 0.330919010508, 0.330769457604, 0.330619724122, \
0.33046980998, 0.330319715097, 0.330169439391, 0.330018982779, \
0.32986834518, 0.329717526511, 0.32956652669, 0.329415345633, \
0.329263983259, 0.329112439483, 0.328960714223, 0.328808807396, \
0.328656718917, 0.328504448702, 0.328351996669, 0.328199362732, \
0.328046546808, 0.327893548812, 0.327740368658, 0.327587006263, \
0.327433461542, 0.327279734408, 0.327125824778, 0.326971732564, \
0.326817457682, 0.326663000045, 0.326508359567, 0.326353536163, \
0.326198529745, 0.326043340226, 0.325887967521, 0.325732411541, \
0.3255766722, 0.325420749411, 0.325264643085, 0.325108353134, \
0.324951879472, 0.324795222009, 0.324638380658, 0.324481355329, \
0.324324145934, 0.324166752385, 0.324009174591, 0.323851412464, \
0.323693465915, 0.323535334852, 0.323377019188, 0.323218518831, \
0.323059833691, 0.322900963677, 0.322741908701, 0.322582668669, \
0.322423243491, 0.322263633077, 0.322103837334, 0.321943856171, \
0.321783689496, 0.321623337217, 0.321462799242, 0.321302075479, \
0.321141165834, 0.320980070215, 0.320818788529, 0.320657320683, \
0.320495666583, 0.320333826135, 0.320171799247, 0.320009585823, \
0.31984718577, 0.319684598993, 0.319521825397, 0.319358864888, \
0.31919571737, 0.319032382749, 0.318868860929, 0.318705151813, \
0.318541255307, 0.318377171315, 0.318212899739, 0.318048440483, \
0.317883793451, 0.317718958546, 0.31755393567, 0.317388724726, \
0.317223325617, 0.317057738245, 0.316891962512, 0.31672599832, \
0.31655984557, 0.316393504163, 0.316226974001, 0.316060254985, \
0.315893347016, 0.315726249993, 0.315558963818, 0.31539148839, \
0.315223823609, 0.315055969374, 0.314887925586, 0.314719692144, \
0.314551268945, 0.31438265589, 0.314213852877, 0.314044859803, \
0.313875676567, 0.313706303067, 0.313536739201, 0.313366984865, \
0.313197039958, 0.313026904375, 0.312856578014, 0.312686060772, \
0.312515352544, 0.312344453226, 0.312173362715, 0.312002080905, \
0.311830607693, 0.311658942973, 0.31148708664, 0.31131503859, \
0.311142798715, 0.310970366911, 0.310797743071, 0.31062492709, \
0.31045191886, 0.310278718275, 0.310105325228, 0.309931739611, \
0.309757961317, 0.309583990239, 0.309409826268, 0.309235469297, \
0.309060919216, 0.308886175918, 0.308711239292, 0.308536109231, \
0.308360785624, 0.308185268362, 0.308009557336, 0.307833652434, \
0.307657553547, 0.307481260563, 0.307304773373, 0.307128091864, \
0.306951215926, 0.306774145446, 0.306596880314, 0.306419420416, \
0.306241765641, 0.306063915875, 0.305885871006, 0.305707630921, \
0.305529195506, 0.305350564647, 0.305171738232, 0.304992716144, \
0.304813498271, 0.304634084497, 0.304454474707, 0.304274668786, \
0.304094666619, 0.30391446809, 0.303734073083, 0.303553481481, \
0.303372693168, 0.303191708028, 0.303010525942, 0.302829146795, \
0.302647570468, 0.302465796843, 0.302283825802, 0.302101657227, \
0.301919290999, 0.301736726999, 0.301553965108, 0.301371005207, \
0.301187847175, 0.301004490893, 0.30082093624, 0.300637183096, \
0.300453231339, 0.30026908085, 0.300084731505, 0.299900183184, \
0.299715435764, 0.299530489123, 0.299345343139, 0.299159997689, \
0.298974452649, 0.298788707897, 0.298602763308, 0.298416618759, \
0.298230274125, 0.298043729282, 0.297856984104, 0.297670038468, \
0.297482892246, 0.297295545315, 0.297107997546, 0.296920248815, \
0.296732298995, 0.296544147958, 0.296355795578, 0.296167241727, \
0.295978486277, 0.295789529101, 0.295600370069, 0.295411009053, \
0.295221445924, 0.295031680553, 0.294841712811, 0.294651542566, \
0.29446116969, 0.294270594051, 0.294079815519, 0.293888833963, \
0.293697649251, 0.293506261251, 0.293314669832, 0.29312287486, \
0.292930876204, 0.292738673731, 0.292546267306, 0.292353656796, \
0.292160842068, 0.291967822987, 0.291774599419, 0.291581171228, \
0.29138753828, 0.291193700439, 0.290999657568, 0.290805409533, \
0.290610956195, 0.290416297419, 0.290221433068, 0.290026363003, \
0.289831087087, 0.289635605182, 0.289439917148, 0.289244022849, \
0.289047922144, 0.288851614893, 0.288655100957, 0.288458380196, \
0.28826145247, 0.288064317636, 0.287866975555, 0.287669426085, \
0.287471669084, 0.287273704409, 0.287075531918, 0.286877151468, \
0.286678562916, 0.286479766119, 0.286280760932, 0.286081547211, \
0.285882124811, 0.285682493588, 0.285482653395, 0.285282604088, \
0.285082345521, 0.284881877546, 0.284681200017, 0.284480312788, \
0.284279215709, 0.284077908635, 0.283876391415, 0.283674663903, \
0.283472725948, 0.283270577402, 0.283068218114, 0.282865647935, \
0.282662866715, 0.282459874302, 0.282256670545, 0.282053255293, \
0.281849628394, 0.281645789695, 0.281441739045, 0.281237476289, \
0.281033001275, 0.280828313848, 0.280623413854, 0.280418301139, \
0.280212975549, 0.280007436926, 0.279801685116, 0.279595719963, \
0.27938954131, 0.279183149, 0.278976542875, 0.278769722779, \
0.278562688553, 0.278355440038, 0.278147977076, 0.277940299507, \
0.277732407172, 0.27752429991, 0.277315977561, 0.277107439965, \
0.276898686958, 0.276689718381, 0.276480534071, 0.276271133865, \
0.2760615176, 0.275851685114, 0.275641636241, 0.275431370818, \
0.275220888681, 0.275010189664, 0.274799273601, 0.274588140328, \
0.274376789677, 0.274165221481, 0.273953435575, 0.273741431789, \
0.273529209956, 0.273316769908, 0.273104111476, 0.272891234489, \
0.272678138779, 0.272464824175, 0.272251290507, 0.272037537604, \
0.271823565293, 0.271609373403, 0.271394961762, 0.271180330196, \
0.270965478533, 0.270750406598, 0.270535114218, 0.270319601217, \
0.270103867421, 0.269887912653, 0.269671736739, 0.2694553395, \
0.269238720761, 0.269021880344, 0.268804818071, 0.268587533763, \
0.268370027242, 0.268152298328, 0.267934346842, 0.267716172603, \
0.26749777543, 0.267279155143, 0.26706031156, 0.266841244498, \
0.266621953774, 0.266402439207, 0.266182700611, 0.265962737803, \
0.265742550598, 0.265522138811, 0.265301502256, 0.265080640748, \
0.2648595541, 0.264638242124, 0.264416704633, 0.264194941439, \
0.263972952353, 0.263750737186, 0.263528295749, 0.263305627851, \
0.263082733302, 0.26285961191, 0.262636263484, 0.262412687831, \
0.26218888476, 0.261964854076, 0.261740595585, 0.261516109095, \
0.261291394408, 0.261066451331, 0.260841279668, 0.260615879221, \
0.260390249794, 0.260164391189, 0.259938303209, 0.259711985655, \
0.259485438327, 0.259258661026, 0.259031653551, 0.258804415703, \
0.258576947278, 0.258349248077, 0.258121317895, 0.25789315653, \
0.257664763779, 0.257436139437, 0.2572072833, 0.256978195162, \
0.256748874817, 0.256519322059, 0.256289536681, 0.256059518475, \
0.255829267233, 0.255598782747, 0.255368064807, 0.255137113202, \
0.254905927723, 0.254674508159, 0.254442854297, 0.254210965925, \
0.253978842831, 0.253746484801, 0.253513891621, 0.253281063075, \
0.253047998949, 0.252814699027, 0.252581163092, 0.252347390927, \
0.252113382314, 0.251879137035, 0.251644654871, 0.251409935601, \
0.251174979007, 0.250939784865, 0.250704352956, 0.250468683057, \
0.250232774945, 0.249996628397, 0.249760243188, 0.249523619094, \
0.249286755888, 0.249049653346, 0.24881231124, 0.248574729343, \
0.248336907427, 0.248098845263, 0.247860542621, 0.247621999273, \
0.247383214985, 0.247144189529, 0.24690492267, 0.246665414177, \
0.246425663816, 0.246185671353, 0.245945436553, 0.24570495918, \
0.245464238999, 0.245223275772, 0.244982069261, 0.244740619229, \
0.244498925435, 0.244256987642, 0.244014805607, 0.24377237909, \
0.243529707849, 0.243286791641, 0.243043630222, 0.24280022335, \
0.242556570778, 0.242312672262, 0.242068527555, 0.241824136409, \
0.241579498578, 0.241334613813, 0.241089481863, 0.240844102481, \
0.240598475413, 0.240352600409, 0.240106477217, 0.239860105583, \
0.239613485254, 0.239366615975, 0.23911949749, 0.238872129544, \
0.238624511878, 0.238376644236, 0.238128526359, 0.237880157987, \
0.237631538861, 0.237382668719, 0.2371335473, 0.23688417434, \
0.236634549577, 0.236384672746, 0.236134543582, 0.23588416182, \
0.235633527192, 0.235382639431, 0.235131498268, 0.234880103436, \
0.234628454662, 0.234376551677, 0.234124394209, 0.233871981984, \
0.233619314731, 0.233366392173, 0.233113214036, 0.232859780045, \
0.232606089921, 0.232352143387, 0.232097940164, 0.231843479974, \
0.231588762534, 0.231333787564, 0.231078554782, 0.230823063904, \
0.230567314646, 0.230311306723, 0.23005503985, 0.229798513738, \
0.229541728101, 0.22928468265, 0.229027377095, 0.228769811145, \
0.22851198451, 0.228253896895, 0.227995548009, 0.227736937556, \
0.227478065241, 0.227218930768, 0.22695953384, 0.226699874157, \
0.226439951422, 0.226179765333, 0.22591931559, 0.22565860189, \
0.22539762393, 0.225136381406, 0.224874874012, 0.224613101442, \
0.224351063389, 0.224088759545, 0.2238261896, 0.223563353244, \
0.223300250165, 0.223036880051, 0.222773242589, 0.222509337463, \
0.222245164359, 0.22198072296, 0.221716012947, 0.221451034002, \
0.221185785805, 0.220920268035, 0.220654480371, 0.220388422488, \
0.220122094062, 0.219855494768, 0.21958862428, 0.21932148227, \
0.219054068409, 0.218786382367, 0.218518423813, 0.218250192415, \
0.21798168784, 0.217712909752, 0.217443857818, 0.217174531699, \
0.216904931057, 0.216635055555, 0.21636490485, 0.216094478602, \
0.215823776468, 0.215552798104, 0.215281543164, 0.215010011303, \
0.214738202173, 0.214466115425, 0.214193750709, 0.213921107674, \
0.213648185967, 0.213374985234, 0.213101505121, 0.21282774527, \
0.212553705325, 0.212279384926, 0.212004783714, 0.211729901327, \
0.211454737402, 0.211179291575, 0.21090356348, 0.210627552752, \
0.210351259021, 0.210074681919, 0.209797821075, 0.209520676117, \
0.209243246671, 0.208965532363, 0.208687532816, 0.208409247653, \
0.208130676495, 0.207851818961, 0.20757267467, 0.207293243239, \
0.207013524284, 0.206733517417, 0.206453222252, 0.2061726384, \
0.205891765471, 0.205610603072, 0.205329150811, 0.205047408293, \
0.204765375121, 0.204483050898, 0.204200435225, 0.203917527701, \
0.203634327924, 0.20335083549, 0.203067049994, 0.202782971029, \
0.202498598186, 0.202213931056, 0.201928969228, 0.201643712287, \
0.20135815982, 0.20107231141, 0.20078616664, 0.200499725089, \
0.200212986337, 0.199925949961, 0.199638615537, 0.199350982639, \
0.199063050838, 0.198774819706, 0.198486288812, 0.198197457722, \
0.197908326003, 0.197618893218, 0.197329158929, 0.197039122697, \
0.196748784081, 0.196458142637, 0.196167197921, 0.195875949485, \
0.195584396882, 0.195292539661, 0.19500037737, 0.194707909556, \
0.194415135763, 0.194122055533, 0.193828668408, 0.193534973925, \
0.193240971621, 0.192946661033, 0.192652041693, 0.192357113132, \
0.192061874879, 0.191766326463, 0.191470467408, 0.191174297238, \
0.190877815475, 0.190581021638, 0.190283915244, 0.189986495811, \
0.18968876285, 0.189390715873, 0.189092354391, 0.188793677911, \
0.188494685937, 0.188195377973, 0.187895753521, 0.18759581208, \
0.187295553146, 0.186994976215, 0.186694080779, 0.186392866329, \
0.186091332353, 0.185789478338, 0.185487303768, 0.185184808123, \
0.184881990885, 0.18457885153, 0.184275389534, 0.183971604368, \
0.183667495505, 0.183363062412, 0.183058304554, 0.182753221396, \
0.182447812399, 0.182142077022, 0.18183601472, 0.181529624949, \
0.18122290716, 0.180915860803, 0.180608485323, 0.180300780166, \
0.179992744774, 0.179684378585, 0.179375681037, 0.179066651564, \
0.178757289598, 0.178447594568, 0.178137565902, 0.177827203022, \
0.177516505351, 0.177205472308, 0.176894103309, 0.176582397766, \
0.176270355092, 0.175957974695, 0.175645255979, 0.175332198348, \
0.175018801202, 0.174705063937, 0.174390985949, 0.174076566628, \
0.173761805364, 0.173446701542, 0.173131254545, 0.172815463755, \
0.172499328546, 0.172182848295, 0.171866022373, 0.171548850146, \
0.171231330982, 0.170913464242, 0.170595249286, 0.170276685469, \
0.169957772145, 0.169638508664, 0.169318894373, 0.168998928615, \
0.168678610731, 0.168357940058, 0.16803691593, 0.167715537679, \
0.167393804631, 0.167071716111, 0.16674927144, 0.166426469936, \
0.166103310913, 0.165779793681, 0.165455917548, 0.165131681818, \
0.164807085792, 0.164482128766, 0.164156810034, 0.163831128886, \
0.163505084608, 0.163178676483, 0.162851903789, 0.162524765803, \
0.162197261796, 0.161869391036, 0.161541152788, 0.161212546311, \
0.160883570863, 0.160554225695, 0.160224510058, 0.159894423197, \
0.159563964351, 0.159233132759, 0.158901927654, 0.158570348265, \
0.158238393817, 0.15790606353, 0.157573356623, 0.157240272307, \
0.156906809791, 0.156572968279, 0.156238746972, 0.155904145066, \
0.155569161751, 0.155233796214, 0.15489804764, 0.154561915205, \
0.154225398084, 0.153888495445, 0.153551206454, 0.153213530271, \
0.152875466051, 0.152537012946, 0.152198170101, 0.151858936658, \
0.151519311753, 0.151179294519, 0.150838884082, 0.150498079565, \
0.150156880085, 0.149815284753, 0.149473292678, 0.149130902961, \
0.148788114699, 0.148444926984, 0.148101338903, 0.147757349537, \
0.147412957962, 0.147068163249, 0.146722964463, 0.146377360665, \
0.146031350908, 0.145684934242, 0.14533810971, 0.144990876349, \
0.144643233193, 0.144295179266, 0.14394671359, 0.143597835179, \
0.143248543043, 0.142898836183, 0.142548713597, 0.142198174276, \
0.141847217205, 0.141495841361, 0.141144045718, 0.140791829241, \
0.14043919089, 0.140086129618, 0.139732644373, 0.139378734095, \
0.139024397717, 0.138669634167, 0.138314442365, 0.137958821225, \
0.137602769653, 0.137246286551, 0.13688937081, 0.136532021316, \
0.13617423695, 0.135816016581, 0.135457359075, 0.13509826329, \
0.134738728074, 0.13437875227, 0.134018334713, 0.13365747423, \
0.133296169642, 0.132934419758, 0.132572223385, 0.132209579317, \
0.131846486342, 0.131482943242, 0.131118948787, 0.130754501741, \
0.13038960086, 0.130024244891, 0.129658432571, 0.129292162631, \
0.128925433793, 0.128558244767, 0.128190594259, 0.127822480963, \
0.127453903564, 0.127084860738, 0.126715351154, 0.126345373469, \
0.125974926331, 0.12560400838, 0.125232618245, 0.124860754545, \
0.12448841589, 0.124115600881, 0.123742308106, 0.123368536146, \
0.12299428357, 0.122619548937, 0.122244330795, 0.121868627682, \
0.121492438126, 0.121115760642, 0.120738593735, 0.120360935901, \
0.119982785621, 0.119604141367, 0.1192250016, 0.118845364768, \
0.118465229306, 0.118084593641, 0.117703456185, 0.117321815339, \
0.11693966949, 0.116557017014, 0.116173856276, 0.115790185624, \
0.115406003396, 0.115021307918, 0.1146360975, 0.11425037044, \
0.113864125022, 0.113477359516, 0.113090072181, 0.112702261257, \
0.112313924974, 0.111925061545, 0.111535669171, 0.111145746034, \
0.110755290307, 0.110364300142, 0.109972773679, 0.109580709043, \
0.10918810434, 0.108794957663, 0.108401267088, 0.108007030675, \
0.107612246467, 0.10721691249, 0.106821026753, 0.106424587249, \
0.106027591953, 0.10563003882, 0.105231925791, 0.104833250787, \
0.10443401171, 0.104034206444, 0.103633832853, 0.103232888785, \
0.102831372066, 0.102429280502, 0.102026611881, 0.101623363968, \
0.10121953451, 0.100815121233, 0.100410121841, 0.100004534016, \
0.0995983554201, 0.0991915836918, 0.098784216448, 0.098376251283, \
0.0979676857678, 0.0975585174503, 0.0971487438546, 0.0967383624809, \
0.0963273708049, 0.0959157662776, 0.095503546325, 0.0950907083474, \
0.0946772497194, 0.0942631677893, 0.0938484598786, 0.0934331232819, \
0.0930171552661, 0.0926005530704, 0.0921833139053, 0.0917654349529, \
0.0913469133655, 0.0909277462662, 0.0905079307475, 0.0900874638713, \
0.0896663426683, 0.0892445641375, 0.0888221252457, 0.0883990229268, \
0.0879752540816, 0.0875508155769, 0.0871257042449, 0.0866999168832, \
0.0862734502535, 0.0858463010813, 0.0854184660553, 0.0849899418268, \
0.084560725009, 0.0841308121761, 0.0837001998631, 0.0832688845646, \
0.0828368627345, 0.082404130785, 0.0819706850859, 0.081536521964, \
0.081101637702, 0.080666028538, 0.0802296906646, 0.0797926202279, \
0.0793548133268, 0.078916266012, 0.0784769742851, 0.0780369340979, \
0.077596141351, 0.0771545918932, 0.0767122815202, 0.076269205974, \
0.0758253609412, 0.0753807420524, 0.0749353448811, 0.074489164942, \
0.0740421976905, 0.0735944385211, 0.0731458827663, 0.0726965256949, \
0.0722463625114, 0.0717953883543, 0.0713435982942, 0.0708909873334, \
0.0704375504035, 0.0699832823643, 0.0695281780022, 0.0690722320286, \
0.0686154390782, 0.0681577937072, 0.0676992903918, 0.0672399235259, \
0.0667796874201, 0.0663185762986, 0.0658565842984, 0.0653937054663, \
0.0649299337573, 0.0644652630325, 0.0639996870564, 0.0635331994951, \
0.0630657939132, 0.0625974637723, 0.0621282024276, 0.0616580031256, \
0.0611868590013, 0.0607147630756, 0.060241708252, 0.0597676873138, \
0.059292692921, 0.058816717607, 0.0583397537752, 0.0578617936955, \
0.0573828295011, 0.0569028531841, 0.0564218565922, 0.0559398314244, \
0.0554567692269, 0.054972661389, 0.0544874991381, 0.0540012735356, \
0.053513975472, 0.0530255956613, 0.0525361246366, 0.052045552744, \
0.0515538701374, 0.0510610667724, 0.0505671324, 0.0500720565609, \
0.0495758285778, 0.0490784375496, 0.0485798723434, 0.048080121587, \
0.0475791736615, 0.0470770166928, 0.0465736385427, 0.0460690268006, \
0.0455631687734, 0.0450560514761, 0.0445476616215, 0.0440379856092, \
0.0435270095144, 0.0430147190762, 0.0425010996849, 0.0419861363691, \
0.0414698137822, 0.0409521161875, 0.0404330274434, 0.0399125309876, \
0.0393906098197, 0.0388672464844, 0.0383424230521, 0.0378161210995, \
0.037288321689, 0.0367590053466, 0.0362281520387, 0.0356957411476, \
0.0351617514458, 0.034626161068, 0.0340889474822, 0.0335500874589, \
0.033009557038, 0.032467331494, 0.0319233852988, 0.0313776920821, \
0.0308302245895, 0.0302809546372, 0.0297298530646, 0.0291768896823, \
0.0286220332182, 0.0280652512583, 0.0275065101844, 0.0269457751062, \
0.02638300979, 0.0258181765805, 0.0252512363179, 0.0246821482486, \
0.0241108699283, 0.0235373571186, 0.0229615636755, 0.0223834414285, \
0.0218029400512, 0.0212200069209, 0.0206345869682, 0.0200466225144, \
0.0194560530967, 0.0188628152806, 0.0182668424588, 0.0176680646372, \
0.0170664082066, 0.016461795704, 0.0158541455627, 0.015243371858, \
0.0146293840521, 0.0140120867521, 0.0133913794939, 0.0127671565806, \
0.0121393070128, 0.0115077145716, 0.0108722581496, 0.0102328124764, \
0.00958924947315, 0.00894144061242, 0.00828926090091, 0.00763259551896, \
0.00697135089651, 0.00630547338383, 0.00563498133279, 0.00496002177703, \
0.00428097439553, 0.00359865177269, 0.00291471045349, 0.00223256762114, \
0.00155968193081 ])
ncell = array([
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, \
2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, \
5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 8, 8, \
8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 11, \
11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 14, \
14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 17, 17, \
17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 20, 20, 20, \
20, 20, 20, 20, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, \
23, 23, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, \
27, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 30, 30, \
30, 30, 30, 30, 31, 31, 31, 31, 31, 31, 32, 32, 32, 32, 32, 32, 33, 33, 33, 33, \
33, 34, 34, 34, 34, 34, 34, 35, 35, 35, 35, 35, 35, 36, 36, 36, 36, 36, 37, 37, \
37, 37, 37, 37, 38, 38, 38, 38, 38, 39, 39, 39, 39, 39, 39, 40, 40, 40, 40, 40, \
40, 41, 41, 41, 41, 41, 42, 42, 42, 42, 42, 43, 43, 43, 43, 43, 43, 44, 44, 44, \
44, 44, 45, 45, 45, 45, 45, 46, 46, 46, 46, 46, 46, 47, 47, 47, 47, 47, 48, 48, \
48, 48, 48, 49, 49, 49, 49, 49, 50, 50, 50, 50, 50, 50, 51, 51, 51, 51, 51, 52, \
52, 52, 52, 52, 53, 53, 53, 53, 53, 54, 54, 54, 54, 54, 55, 55, 55, 55, 55, 56, \
56, 56, 56, 56, 57, 57, 57, 57, 57, 58, 58, 58, 58, 58, 59, 59, 59, 59, 59, 60, \
60, 60, 60, 60, 61, 61, 61, 61, 61, 62, 62, 62, 62, 63, 63, 63, 63, 63, 64, 64, \
64, 64, 64, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 67, 67, 67, 67, 68, 68, 68, \
68, 68, 69, 69, 69, 69, 69, 70, 70, 70, 70, 71, 71, 71, 71, 71, 72, 72, 72, 72, \
72, 73, 73, 73, 73, 74, 74, 74, 74, 74, 75, 75, 75, 75, 76, 76, 76, 76, 76, 77, \
77, 77, 77, 78, 78, 78, 78, 78, 79, 79, 79, 79, 80, 80, 80, 80, 80, 81, 81, 81, \
81, 82, 82, 82, 82, 83, 83, 83, 83, 83, 84, 84, 84, 84, 85, 85, 85, 85, 86, 86, \
86, 86, 86, 87, 87, 87, 87, 88, 88, 88, 88, 89, 89, 89, 89, 89, 90, 90, 90, 90, \
91, 91, 91, 91, 92, 92, 92, 92, 93, 93, 93, 93, 94, 94, 94, 94, 95, 95, 95, 95, \
95, 96, 96, 96, 96, 97, 97, 97, 97, 98, 98, 98, 98, 99, 99, 99, 99, 100, 100, 100, \
100, 101, 101, 101, 101, 102, 102, 102, 102, 103, 103, 103, 103, 104, 104, 104, 104, 105, 105, 105, \
105, 106, 106, 106, 106, 107, 107, 107, 107, 108, 108, 108, 108, 109, 109, 109, 109, 110, 110, 110, \
110, 111, 111, 111, 111, 112, 112, 112, 113, 113, 113, 113, 114, 114, 114, 114, 115, 115, 115, 115, \
116, 116, 116, 116, 117, 117, 117, 117, 118, 118, 118, 119, 119, 119, 119, 120, 120, 120, 120, 121, \
121, 121, 121, 122, 122, 122, 123, 123, 123, 123, 124, 124, 124, 124, 125, 125, 125, 126, 126, 126, \
126, 127, 127, 127, 127, 128, 128, 128, 129, 129, 129, 129, 130, 130, 130, 130, 131, 131, 131, 132, \
132, 132, 132, 133, 133, 133, 134, 134, 134, 134, 135, 135, 135, 136, 136, 136, 136, 137, 137, 137, \
137, 138, 138, 138, 139, 139, 139, 139, 140, 140, 140, 141, 141, 141, 141, 142, 142, 142, 143, 143, \
143, 144, 144, 144, 144, 145, 145, 145, 146, 146, 146, 146, 147, 147, 147, 148, 148, 148, 148, 149, \
149, 149, 150, 150, 150, 151, 151, 151, 151, 152, 152, 152, 153, 153, 153, 154, 154, 154, 154, 155, \
155, 155, 156, 156, 156, 157, 157, 157, 157, 158, 158, 158, 159, 159, 159, 160, 160, 160, 160, 161, \
161, 161, 162, 162, 162, 163, 163, 163, 164, 164, 164, 164, 165, 165, 165, 166, 166, 166, 167, 167, \
167, 168, 168, 168, 169, 169, 169, 169, 170, 170, 170, 171, 171, 171, 172, 172, 172, 173, 173, 173, \
174, 174, 174, 175, 175, 175, 176, 176, 176, 176, 177, 177, 177, 178, 178, 178, 179, 179, 179, 180, \
180, 180, 181, 181, 181, 182, 182, 182, 183, 183, 183, 184, 184, 184, 185, 185, 185, 186, 186, 186, \
187, 187, 187, 188, 188, 188, 189, 189, 189, 190, 190, 190, 191, 191, 191, 192, 192, 192, 193, 193, \
193, 194, 194, 194, 195, 195, 195, 196, 196, 196, 197, 197, 197, 198, 198, 198, 199, 199, 199, 200, \
200, 200, 201, 201, 201, 202, 202, 202, 203, 203, 203, 204, 204, 204, 205, 205, 206, 206, 206, 207, \
207, 207, 208, 208, 208, 209, 209, 209, 210, 210, 210, 211, 211, 211, 212, 212, 213, 213, 213, 214, \
214, 214, 215, 215, 215, 216, 216, 216, 217, 217, 217, 218, 218, 219, 219, 219, 220, 220, 220, 221, \
221, 221, 222, 222, 223, 223, 223, 224, 224, 224, 225, 225, 225, 226, 226, 227, 227, 227, 228, 228, \
228, 229, 229, 229, 230, 230, 231, 231, 231, 232, 232, 232, 233, 233, 234, 234, 234, 235, 235, 235, \
236, 236, 237, 237, 237, 238, 238, 238, 239, 239, 240, 240, 240, 241, 241, 241, 242, 242, 243, 243, \
243, 244, 244, 244, 245, 245, 246, 246, 246, 247, 247, 248, 248, 248, 249, 249, 249, 250, 250, 251, \
251, 251, 252, 252, 253, 253, 253, 254, 254, 254, 255, 255, 256, 256, 256, 257, 257, 258, 258, 258, \
259, 259, 260, 260, 260, 261, 261, 262, 262, 262, 263, 263, 264, 264, 264, 265, 265, 266, 266, 266, \
267, 267, 268, 268, 268, 269, 269, 270, 270, 270, 271, 271, 272, 272, 272, 273, 273, 274, 274, 274, \
275, 275, 276, 276, 276, 277, 277, 278, 278, 278, 279, 279, 280, 280, 280, 281, 281, 282, 282, 283, \
283, 283, 284, 284, 285, 285, 285, 286, 286, 287, 287, 288, 288, 288, 289, 289, 290, 290, 290, 291, \
291, 292, 292, 293, 293, 293, 294, 294, 295, 295, 296, 296, 296, 297, 297, 298, 298, 298, 299, 299, \
300, 300, 301, 301, 301, 302, 302, 303, 303, 304, 304, 304, 305, 305, 306, 306, 307, 307, 307, 308, \
308, 309, 309, 310, 310, 311, 311, 311, 312, 312, 313, 313, 314, 314, 314, 315, 315, 316, 316, 317, \
317, 318, 318, 318, 319, 319, 320, 320, 321, 321, 322, 322, 322, 323, 323, 324, 324, 325, 325, 326, \
326, 326, 327, 327, 328, 328, 329, 329, 330, 330, 330, 331, 331, 332, 332, 333, 333, 334, 334, 335, \
335, 335, 336, 336, 337, 337, 338, 338, 339, 339, 340, 340, 340, 341, 341, 342, 342, 343, 343, 344, \
344, 345, 345, 346, 346, 346, 347, 347, 348, 348, 349, 349, 350, 350, 351, 351, 352, 352, 353, 353, \
353, 354, 354, 355, 355, 356, 356, 357, 357, 358, 358, 359, 359, 360, 360, 361, 361, 361, 362, 362, \
363, 363, 364, 364, 365, 365, 366, 366, 367, 367, 368, 368, 369, 369, 370, 370, 371, 371, 371, 372, \
372, 373, 373, 374, 374, 375, 375, 376, 376, 377, 377, 378, 378, 379, 379, 380, 380, 381, 381, 382, \
382, 383, 383, 384, 384, 385, 385, 386, 386, 387, 387, 388, 388, 389, 389, 390, 390, 391, 391, 392, \
392, 393, 393, 394, 394, 395, 395, 396, 396, 397, 397, 398, 398, 399, 399, 400, 400, 401, 401, 402, \
402, 403, 403, 404, 404, 405, 405, 406, 406, 407, 407, 408, 408, 409, 409, 410, 410, 411, 411, 412, \
412, 413, 413, 414, 414, 415, 415, 416, 416, 417, 417, 418, 418, 419, 419, 420, 420, 421, 421, 422, \
423, 423, 424, 424, 425, 425, 426, 426, 427, 427, 428, 428, 429, 429, 430, 430, 431, 431, 432, 432, \
433, 433, 434, 435, 435, 436, 436, 437, 437, 438, 438, 439, 439, 440, 440, 441, 441, 442, 442, 443, \
444, 444, 445, 445, 446, 446, 447, 447, 448, 448, 449, 449, 450, 450, 451, 452, 452, 453, 453, 454, \
454, 455, 455, 456, 456, 457, 458, 458, 459, 459, 460, 460, 461, 461, 462, 462, 463, 464, 464, 465, \
465, 466, 466, 467, 467, 468, 468, 469, 470, 470, 471, 471, 472, 472, 473, 473, 474, 475, 475, 476, \
476, 477, 477, 478, 478, 479, 480, 480, 481, 481, 482, 482, 483, 483, 484, 485, 485, 486, 486, 487, \
487, 488, 488, 489, 490, 490, 491, 491, 492, 492, 493, 494, 494, 495, 495, 496, 496, 497, 498, 498, \
499, 499, 500, 500, 501, 502, 502, 503, 503, 504, 504, 505, 506, 506, 507, 507, 508, 508, 509, 510, \
510, 511, 511, 512, 512, 513, 514, 514, 515, 515, 516, 517, 517, 518, 518, 519, 519, 520, 521, 521, \
522, 522, 523, 524, 524, 525, 525, 526, 526, 527, 528, 528, 529, 529, 530, 531, 531, 532, 532, 533, \
534, 534, 535, 535, 536, 537, 537, 538, 538, 539, 539, 540, 541, 541, 542, 542, 543, 544, 544, 545, \
545, 546, 547, 547, 548, 548, 549, 550, 550, 551, 551, 552, 553, 553, 554, 555, 555, 556, 556, 557, \
558, 558, 559, 559, 560, 561, 561, 562, 562, 563, 564, 564, 565, 565, 566, 567, 567, 568, 569, 569, \
570, 570, 571, 572, 572, 573, 573, 574, 575, 575, 576, 577, 577, 578, 578, 579, 580, 580, 581, 582, \
582, 583, 583, 584, 585, 585, 586, 586, 587, 588, 588, 589, 590, 590, 591, 591, 592, 593, 593, 594, \
595, 595, 596, 597, 597, 598, 598, 599, 600, 600, 601, 602, 602, 603, 603, 604, 605, 605, 606, 607, \
607, 608, 609, 609, 610, 610, 611, 612, 612, 613, 614, 614, 615, 616, 616, 617, 618, 618, 619, 619, \
620, 621, 621, 622, 623, 623, 624, 625, 625, 626, 627, 627, 628, 629, 629, 630, 630, 631, 632, 632, \
633, 634, 634, 635, 636, 636, 637, 638, 638, 639, 640, 640, 641, 642, 642, 643, 644, 644, 645, 646, \
646, 647, 647, 648, 649, 649, 650, 651, 651, 652, 653, 653, 654, 655, 655, 656, 657, 657, 658, 659, \
659, 660, 661, 661, 662, 663, 663, 664, 665, 665, 666, 667, 667, 668, 669, 669, 670, 671, 671, 672, \
673, 674, 674, 675, 676, 676, 677, 678, 678, 679, 680, 680, 681, 682, 682, 683, 684, 684, 685, 686, \
686, 687, 688, 688, 689, 690, 690, 691, 692, 693, 693, 694, 695, 695, 696, 697, 697, 698, 699, 699, \
700, 701, 701, 702, 703, 704, 704, 705, 706, 706, 707, 708, 708, 709, 710, 710, 711, 712, 713, 713, \
714, 715, 715, 716, 717, 717, 718, 719, 719, 720, 721, 722, 722, 723, 724, 724, 725, 726, 726, 727, \
728, 729, 729, 730, 731, 731, 732, 733, 734, 734, 735, 736, 736, 737, 738, 738, 739, 740, 741, 741, \
742, 743, 743, 744, 745, 746, 746, 747, 748, 748, 749, 750, 751, 751, 752, 753, 753, 754, 755, 756, \
756, 757, 758, 758, 759, 760, 761, 761, 762, 763, 763, 764, 765, 766, 766, 767, 768, 769, 769, 770, \
771, 771, 772, 773, 774, 774, 775, 776, 777, 777, 778, 779, 779, 780, 781, 782, 782, 783, 784, 785, \
785, 786, 787, 787, 788, 789, 790, 790, 791, 792, 793, 793, 794, 795, 796, 796, 797, 798, 798, 799, \
800, 801, 801, 802, 803, 804, 804, 805, 806, 807, 807, 808, 809, 810, 810, 811, 812, 813, 813, 814, \
815, 816, 816, 817, 818, 819, 819, 820, 821, 822, 822, 823, 824, 825, 825, 826, 827, 828, 828, 829, \
830, 831, 831, 832, 833, 834, 834, 835, 836, 837, 837, 838, 839, 840, 840, 841, 842, 843, 843, 844, \
845, 846, 846, 847, 848, 849, 849, 850, 851, 852, 853, 853, 854, 855, 856, 856, 857, 858, 859, 859, \
860, 861, 862, 862, 863, 864, 865, 866, 866, 867, 868, 869, 869, 870, 871, 872, 872, 873, 874, 875, \
876, 876, 877, 878, 879, 879, 880, 881, 882, 883, 883, 884, 885, 886, 886, 887, 888, 889, 890, 890, \
891, 892, 893, 893, 894, 895, 896, 897, 897, 898, 899, 900, 900, 901, 902, 903, 904, 904, 905, 906, \
907, 908, 908, 909, 910, 911, 911, 912, 913, 914, 915, 915, 916, 917, 918, 919, 919, 920, 921, 922, \
923, 923, 924, 925, 926, 927, 927, 928, 929, 930, 931, 931, 932, 933, 934, 935, 935, 936, 937, 938, \
939, 939, 940, 941, 942, 943, 943, 944, 945, 946, 947, 947, 948, 949, 950, 951, 951, 952, 953, 954, \
955, 955, 956, 957, 958, 959, 959, 960, 961, 962, 963, 963, 964, 965, 966, 967, 968, 968, 969, 970, \
971, 972, 972, 973, 974, 975, 976, 976, 977, 978, 979, 980, 981, 981, 982, 983, 984, 985, 985, 986, \
987, 988, 989, 990, 990, 991, 992, 993, 994, 994, 995, 996, 997, 998, 999, 999, 1000, 1001, 1002, 1003, \
1004, 1004, 1005, 1006, 1007, 1008, 1008, 1009, 1010, 1011, 1012, 1013, 1013, 1014, 1015, 1016, 1017, 1018, 1018, 1019, \
1020, 1021, 1022, 1023, 1023, 1024, 1025, 1026, 1027, 1028, 1028, 1029, 1030, 1031, 1032, 1033, 1033, 1034, 1035, 1036, \
1037, 1038, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1044, 1045, 1046, 1047, 1048, 1049, 1049, 1050, 1051, 1052, 1053, \
1054, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1060, 1061, 1062, 1063, 1064, 1065, 1065, 1066, 1067, 1068, 1069, 1070, \
1071, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1077, 1078, 1079, 1080, 1081, 1082, 1082, 1083, 1084, 1085, 1086, 1087, \
1088, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1100, 1101, 1102, 1103, 1104, \
1105, 1106, 1107, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1119, 1120, 1121, \
1122, 1123, 1124, 1125, 1126, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1133, 1134, 1135, 1136, 1137, 1138, 1139, \
1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1153, 1154, 1155, 1156, \
1157, 1158, 1159, 1160, 1161, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1168, 1169, 1170, 1171, 1172, 1173, 1174, \
1175, 1176, 1176, 1177, 1178, 1179, 1180, 1181, 1182, 1183, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1191, \
1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1208, 1209, \
1210, 1211, 1212, 1213, 1214, 1215, 1216, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1225, 1226, 1227, \
1228, 1229, 1230, 1231, 1232, 1233, 1234, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1243, 1244, 1245, \
1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1263, \
1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1272, 1273, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, \
1283, 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1290, 1291, 1292, 1293, 1294, 1294, 1295, 1296, 1297, 1298, 1299, 1300, \
1301, 1302, 1303, 1304, 1305, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1317, 1318, \
1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, \
1338, 1339, 1340, 1341, 1342, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, \
1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1369, 1370, 1370, 1371, 1372, 1373, 1374, \
1375, 1376, 1377, 1378, 1379, 1380, 1381, 1382, 1383, 1384, 1385, 1385, 1386, 1387, 1388, 1389, 1390, 1391, 1392, 1393, \
1394, 1395, 1396, 1397, 1398, 1399, 1400, 1401, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409, 1410, 1411, 1412, \
1413, 1414, 1415, 1416, 1417, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1427, 1428, 1429, 1430, 1431, \
1432, 1433, 1434, 1435, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, \
1451, 1452, 1453, 1454, 1455, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, \
1470, 1471, 1472, 1473, 1474, 1475, 1476, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, \
1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, \
1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1526, \
1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, \
1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, \
1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, \
1586, 1587, 1588, 1589, 1590, 1591, 1592, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, \
1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, \
1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1637, 1638, 1639, 1640, 1641, 1642, 1643, \
1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, \
1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, \
1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1702, \
1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1722, \
1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, \
1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, \
1763, 1764, 1765, 1766, 1767, 1768, 1769, 1770, 1771, 1772, 1773, 1774, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, \
1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, \
1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, \
1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, \
1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1857, 1858, 1859, 1860, 1861, 1862, \
1863, 1864, 1865, 1866, 1867, 1868, 1869, 1870, 1871, 1872, 1873, 1874, 1875, 1876, 1877, 1878, 1879, 1880, 1881, 1882, \
1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1900, 1901, 1902, \
1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, \
1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1931, 1932, 1933, 1934, 1935, 1936, 1937, 1938, 1939, 1940, 1941, 1942, \
1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1955, 1956, 1957, 1958, 1959, 1960, 1961, \
1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, \
1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, \
2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, \
2022, 2023, 2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2037, 2038, 2039, 2040, 2041, \
2042, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, \
2062, 2063, 2064, 2065, 2066, 2067, 2068, 2069, 2070, 2071, 2072, 2073, 2074, 2075, 2076, 2077, 2078, 2079, 2080, 2081, \
2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2090, 2091, 2092, 2093, 2094, 2095, 2096, 2097, 2098, 2099, 2100, 2101, \
2102, 2103, 2104, 2105, 2106, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2117, 2118, 2119, 2120, 2121, \
2122, 2123, 2124, 2125, 2126, 2127, 2128, 2129, 2130, 2131, 2132, 2133, 2134, 2135, 2136, 2137, 2138, 2139, 2140, 2141, \
2142, 2143, 2144, 2145, 2146, 2147, 2148, 2149, 2150, 2151, 2152, 2153, 2154, 2155, 2156, 2157, 2158, 2159, 2160, 2161, \
2162, 2163, 2164, 2165, 2166, 2167, 2168, 2169, 2170, 2171, 2172, 2173, 2174, 2175, 2176, 2177, 2178, 2179, 2180, 2181, \
2182, 2183, 2184, 2185, 2186, 2187, 2188, 2189, 2190, 2191, 2192, 2193, 2194, 2195, 2196, 2197, 2198, 2199, 2200, 2201, \
2202, 2203, 2204, 2205, 2205, 2206, 2207, 2208, 2209, 2210, 2211, 2212, 2213, 2214, 2215, 2216, 2217, 2218, 2219, 2220, \
2221, 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2229, 2230, 2231, 2232, 2233, 2234, 2235, 2236, 2237, 2238, 2239, 2240, \
2241, 2242, 2243, 2244, 2245, 2246, 2247, 2248, 2249, 2250, 2251, 2252, 2253, 2254, 2255, 2256, 2257, 2258, 2259, 2260, \
2261, 2262, 2263, 2264, 2265, 2266, 2267, 2268, 2269, 2270, 2270, 2271, 2272, 2273, 2274, 2275, 2276, 2277, 2278, 2279, \
2280, 2281, 2282, 2283, 2284, 2285, 2286, 2287, 2288, 2289, 2290, 2291, 2292, 2293, 2294, 2295, 2296, 2297, 2298, 2299, \
2300, 2301, 2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309, 2310, 2311, 2312, 2313, 2314, 2315, 2315, 2316, 2317, 2318, \
2319, 2320, 2321, 2322, 2323, 2324, 2325, 2326, 2327, 2328, 2329, 2330, 2331, 2332, 2333, 2334, 2335, 2336, 2337, 2338, \
2339, 2340, 2341, 2342, 2343, 2344, 2345, 2346, 2347, 2348, 2349, 2350, 2351, 2351, 2352, 2353, 2354, 2355, 2356, 2357, \
2358, 2359, 2360, 2361, 2362, 2363, 2364, 2365, 2366, 2367, 2368, 2369, 2370, 2371, 2372, 2373, 2374, 2375, 2376, 2377, \
2378, 2379, 2380, 2381, 2381, 2382, 2383, 2384, 2385, 2386, 2387, 2388, 2389, 2390, 2391, 2392, 2393, 2394, 2395, 2396, \
2397, 2398, 2399, 2400, 2401, 2402, 2403, 2404, 2405, 2406, 2407, 2407, 2408, 2409, 2410, 2411, 2412, 2413, 2414, 2415, \
2416, 2417, 2418, 2419, 2420, 2421, 2422, 2423, 2424, 2425, 2426, 2427, 2428, 2429, 2430, 2431, 2431, 2432, 2433, 2434, \
2435, 2436, 2437, 2438, 2439, 2440, 2441, 2442, 2443, 2444, 2445, 2446, 2447, 2448, 2449, 2450, 2451, 2452, 2452, 2453, \
2454, 2455, 2456, 2457, 2458, 2459, 2460, 2461, 2462, 2463, 2464, 2465, 2466, 2467, 2468, 2469, 2470, 2471, 2472, 2472, \
2473, 2474, 2475, 2476, 2477, 2478, 2479, 2480, 2481, 2482, 2483, 2484, 2485, 2486, 2487, 2488, 2489, 2490, 2490, 2491, \
2492, 2493, 2494, 2495, 2496, 2497, 2498, 2499, 2500, 2501, 2502, 2503, 2504, 2505, 2506, 2506, 2507, 2508, 2509, 2510, \
2511, 2512, 2513, 2514, 2515, 2516, 2517, 2518, 2519, 2520, 2521, 2522, 2522, 2523, 2524, 2525, 2526, 2527, 2528, 2529, \
2530, 2531, 2532, 2533, 2534, 2535, 2536, 2537, 2537, 2538, 2539, 2540, 2541, 2542, 2543, 2544, 2545, 2546, 2547, 2548, \
2549, 2550, 2551, 2551, 2552, 2553, 2554, 2555, 2556, 2557, 2558, 2559, 2560, 2561, 2562, 2563, 2564, 2565, 2565, 2566, \
2567, 2568, 2569, 2570, 2571, 2572, 2573, 2574, 2575, 2576, 2577, 2578, 2578, 2579, 2580, 2581, 2582, 2583, 2584, 2585, \
2586, 2587, 2588, 2589, 2590, 2590, 2591, 2592, 2593, 2594, 2595, 2596, 2597, 2598, 2599, 2600, 2601, 2602, 2602, 2603, \
2604, 2605, 2606, 2607, 2608, 2609, 2610, 2611, 2612, 2613, 2613, 2614, 2615, 2616, 2617, 2618, 2619, 2620, 2621, 2622, \
2623, 2624, 2624, 2625, 2626, 2627, 2628, 2629, 2630, 2631, 2632, 2633, 2634, 2634, 2635, 2636, 2637, 2638, 2639, 2640, \
2641, 2642, 2643, 2644, 2644, 2645, 2646, 2647, 2648, 2649, 2650, 2651, 2652, 2653, 2654, 2654, 2655, 2656, 2657, 2658, \
2659, 2660, 2661, 2662, 2663, 2664, 2664, 2665, 2666, 2667, 2668, 2669, 2670, 2671, 2672, 2673, 2673, 2674, 2675, 2676, \
2677, 2678, 2679, 2680, 2681, 2682, 2682, 2683, 2684, 2685, 2686, 2687, 2688, 2689, 2690, 2691, 2691, 2692, 2693, 2694, \
2695, 2696, 2697, 2698, 2699, 2699, 2700, 2701, 2702, 2703, 2704, 2705, 2706, 2707, 2708, 2708, 2709, 2710, 2711, 2712, \
2713, 2714, 2715, 2716, 2716, 2717, 2718, 2719, 2720, 2721, 2722, 2723, 2724, 2724, 2725, 2726, 2727, 2728, 2729, 2730, \
2731, 2731, 2732, 2733, 2734, 2735, 2736, 2737, 2738, 2739, 2739, 2740, 2741, 2742, 2743, 2744, 2745, 2746, 2746, 2747, \
2748, 2749, 2750, 2751, 2752, 2753, 2754, 2754, 2755, 2756, 2757, 2758, 2759, 2760, 2761, 2761, 2762, 2763, 2764, 2765, \
2766, 2767, 2768, 2768, 2769, 2770, 2771, 2772, 2773, 2774, 2774, 2775, 2776, 2777, 2778, 2779, 2780, 2781, 2781, 2782, \
2783, 2784, 2785, 2786, 2787, 2788, 2788, 2789, 2790, 2791, 2792, 2793, 2794, 2794, 2795, 2796, 2797, 2798, 2799, 2800, \
2800, 2801, 2802, 2803, 2804, 2805, 2806, 2807, 2807, 2808, 2809, 2810, 2811, 2812, 2813, 2813, 2814, 2815, 2816, 2817, \
2818, 2819, 2819, 2820, 2821, 2822, 2823, 2824, 2825, 2825, 2826, 2827, 2828, 2829, 2830, 2830, 2831, 2832, 2833, 2834, \
2835, 2836, 2836, 2837, 2838, 2839, 2840, 2841, 2842, 2842, 2843, 2844, 2845, 2846, 2847, 2847, 2848, 2849, 2850, 2851, \
2852, 2853, 2853, 2854, 2855, 2856, 2857, 2858, 2858, 2859, 2860, 2861, 2862, 2863, 2863, 2864, 2865, 2866, 2867, 2868, \
2869, 2869, 2870, 2871, 2872, 2873, 2874, 2874, 2875, 2876, 2877, 2878, 2879, 2879, 2880, 2881, 2882, 2883, 2884, 2884, \
2885, 2886, 2887, 2888, 2889, 2889, 2890, 2891, 2892, 2893, 2894, 2894, 2895, 2896, 2897, 2898, 2899, 2899, 2900, 2901, \
2902, 2903, 2903, 2904, 2905, 2906, 2907, 2908, 2908, 2909, 2910, 2911, 2912, 2913, 2913, 2914, 2915, 2916, 2917, 2917, \
2918, 2919, 2920, 2921, 2922, 2922, 2923, 2924, 2925, 2926, 2926, 2927, 2928, 2929, 2930, 2931, 2931, 2932, 2933, 2934, \
2935, 2935, 2936, 2937, 2938, 2939, 2939, 2940, 2941, 2942, 2943, 2944, 2944, 2945, 2946, 2947, 2948, 2948, 2949, 2950, \
2951, 2952, 2952, 2953, 2954, 2955, 2956, 2956, 2957, 2958, 2959, 2960, 2960, 2961, 2962, 2963, 2964, 2964, 2965, 2966, \
2967, 2968, 2968, 2969, 2970, 2971, 2972, 2972, 2973, 2974, 2975, 2976, 2976, 2977, 2978, 2979, 2980, 2980, 2981, 2982, \
2983, 2984, 2984, 2985, 2986, 2987, 2988, 2988, 2989, 2990, 2991, 2992, 2992, 2993, 2994, 2995, 2996, 2996, 2997, 2998, \
2999, 2999, 3000, 3001, 3002, 3003, 3003, 3004, 3005, 3006, 3007, 3007, 3008, 3009, 3010, 3010, 3011, 3012, 3013, 3014, \
3014, 3015, 3016, 3017, 3017, 3018, 3019, 3020, 3021, 3021, 3022, 3023, 3024, 3024, 3025, 3026, 3027, 3028, 3028, 3029, \
3030, 3031, 3031, 3032, 3033, 3034, 3035, 3035, 3036, 3037, 3038, 3038, 3039, 3040, 3041, 3041, 3042, 3043, 3044, 3045, \
3045, 3046, 3047, 3048, 3048, 3049, 3050, 3051, 3051, 3052, 3053, 3054, 3054, 3055, 3056, 3057, 3058, 3058, 3059, 3060, \
3061, 3061, 3062, 3063, 3064, 3064, 3065, 3066, 3067, 3067, 3068, 3069, 3070, 3070, 3071, 3072, 3073, 3073, 3074, 3075, \
3076, 3076, 3077, 3078, 3079, 3079, 3080, 3081, 3082, 3082, 3083, 3084, 3085, 3085, 3086, 3087, 3088, 3088, 3089, 3090, \
3091, 3091, 3092, 3093, 3094, 3094, 3095, 3096, 3097, 3097, 3098, 3099, 3100, 3100, 3101, 3102, 3103, 3103, 3104, 3105, \
3106, 3106, 3107, 3108, 3109, 3109, 3110, 3111, 3111, 3112, 3113, 3114, 3114, 3115, 3116, 3117, 3117, 3118, 3119, 3120, \
3120, 3121, 3122, 3122, 3123, 3124, 3125, 3125, 3126, 3127, 3128, 3128, 3129, 3130, 3130, 3131, 3132, 3133, 3133, 3134, \
3135, 3136, 3136, 3137, 3138, 3138, 3139, 3140, 3141, 3141, 3142, 3143, 3144, 3144, 3145, 3146, 3146, 3147, 3148, 3149, \
3149, 3150, 3151, 3151, 3152, 3153, 3154, 3154, 3155, 3156, 3156, 3157, 3158, 3159, 3159, 3160, 3161, 3161, 3162, 3163, \
3164, 3164, 3165, 3166, 3166, 3167, 3168, 3169, 3169, 3170, 3171, 3171, 3172, 3173, 3173, 3174, 3175, 3176, 3176, 3177, \
3178, 3178, 3179, 3180, 3181, 3181, 3182, 3183, 3183, 3184, 3185, 3185, 3186, 3187, 3188, 3188, 3189, 3190, 3190, 3191, \
3192, 3192, 3193, 3194, 3194, 3195, 3196, 3197, 3197, 3198, 3199, 3199, 3200, 3201, 3201, 3202, 3203, 3203, 3204, 3205, \
3206, 3206, 3207, 3208, 3208, 3209, 3210, 3210, 3211, 3212, 3212, 3213, 3214, 3214, 3215, 3216, 3217, 3217, 3218, 3219, \
3219, 3220, 3221, 3221, 3222, 3223, 3223, 3224, 3225, 3225, 3226, 3227, 3227, 3228, 3229, 3229, 3230, 3231, 3231, 3232, \
3233, 3233, 3234, 3235, 3236, 3236, 3237, 3238, 3238, 3239, 3240, 3240, 3241, 3242, 3242, 3243, 3244, 3244, 3245, 3246, \
3246, 3247, 3248, 3248, 3249, 3250, 3250, 3251, 3252, 3252, 3253, 3254, 3254, 3255, 3256, 3256, 3257, 3258, 3258, 3259, \
3260, 3260, 3261, 3261, 3262, 3263, 3263, 3264, 3265, 3265, 3266, 3267, 3267, 3268, 3269, 3269, 3270, 3271, 3271, 3272, \
3273, 3273, 3274, 3275, 3275, 3276, 3277, 3277, 3278, 3278, 3279, 3280, 3280, 3281, 3282, 3282, 3283, 3284, 3284, 3285, \
3286, 3286, 3287, 3288, 3288, 3289, 3289, 3290, 3291, 3291, 3292, 3293, 3293, 3294, 3295, 3295, 3296, 3297, 3297, 3298, \
3298, 3299, 3300, 3300, 3301, 3302, 3302, 3303, 3304, 3304, 3305, 3305, 3306, 3307, 3307, 3308, 3309, 3309, 3310, 3310, \
3311, 3312, 3312, 3313, 3314, 3314, 3315, 3316, 3316, 3317, 3317, 3318, 3319, 3319, 3320, 3321, 3321, 3322, 3322, 3323, \
3324, 3324, 3325, 3325, 3326, 3327, 3327, 3328, 3329, 3329, 3330, 3330, 3331, 3332, 3332, 3333, 3334, 3334, 3335, 3335, \
3336, 3337, 3337, 3338, 3338, 3339, 3340, 3340, 3341, 3342, 3342, 3343, 3343, 3344, 3345, 3345, 3346, 3346, 3347, 3348, \
3348, 3349, 3349, 3350, 3351, 3351, 3352, 3352, 3353, 3354, 3354, 3355, 3356, 3356, 3357, 3357, 3358, 3359, 3359, 3360, \
3360, 3361, 3362, 3362, 3363, 3363, 3364, 3365, 3365, 3366, 3366, 3367, 3368, 3368, 3369, 3369, 3370, 3370, 3371, 3372, \
3372, 3373, 3373, 3374, 3375, 3375, 3376, 3376, 3377, 3378, 3378, 3379, 3379, 3380, 3381, 3381, 3382, 3382, 3383, 3383, \
3384, 3385, 3385, 3386, 3386, 3387, 3388, 3388, 3389, 3389, 3390, 3390, 3391, 3392, 3392, 3393, 3393, 3394, 3395, 3395, \
3396, 3396, 3397, 3397, 3398, 3399, 3399, 3400, 3400, 3401, 3401, 3402, 3403, 3403, 3404, 3404, 3405, 3405, 3406, 3407, \
3407, 3408, 3408, 3409, 3409, 3410, 3411, 3411, 3412, 3412, 3413, 3413, 3414, 3415, 3415, 3416, 3416, 3417, 3417, 3418, \
3419, 3419, 3420, 3420, 3421, 3421, 3422, 3422, 3423, 3424, 3424, 3425, 3425, 3426, 3426, 3427, 3427, 3428, 3429, 3429, \
3430, 3430, 3431, 3431, 3432, 3432, 3433, 3434, 3434, 3435, 3435, 3436, 3436, 3437, 3437, 3438, 3439, 3439, 3440, 3440, \
3441, 3441, 3442, 3442, 3443, 3443, 3444, 3445, 3445, 3446, 3446, 3447, 3447, 3448, 3448, 3449, 3449, 3450, 3451, 3451, \
3452, 3452, 3453, 3453, 3454, 3454, 3455, 3455, 3456, 3457, 3457, 3458, 3458, 3459, 3459, 3460, 3460, 3461, 3461, 3462, \
3462, 3463, 3463, 3464, 3465, 3465, 3466, 3466, 3467, 3467, 3468, 3468, 3469, 3469, 3470, 3470, 3471, 3471, 3472, 3472, \
3473, 3474, 3474, 3475, 3475, 3476, 3476, 3477, 3477, 3478, 3478, 3479, 3479, 3480, 3480, 3481, 3481, 3482, 3482, 3483, \
3483, 3484, 3484, 3485, 3486, 3486, 3487, 3487, 3488, 3488, 3489, 3489, 3490, 3490, 3491, 3491, 3492, 3492, 3493, 3493, \
3494, 3494, 3495, 3495, 3496, 3496, 3497, 3497, 3498, 3498, 3499, 3499, 3500, 3500, 3501, 3501, 3502, 3502, 3503, 3503, \
3504, 3504, 3505, 3505, 3506, 3506, 3507, 3507, 3508, 3508, 3509, 3509, 3510, 3510, 3511, 3511, 3512, 3512, 3513, 3513, \
3514, 3514, 3515, 3515, 3516, 3516, 3517, 3517, 3518, 3518, 3519, 3519, 3520, 3520, 3521, 3521, 3522, 3522, 3523, 3523, \
3524, 3524, 3525, 3525, 3526, 3526, 3527, 3527, 3528, 3528, 3529, 3529, 3530, 3530, 3531, 3531, 3532, 3532, 3533, 3533, \
3534, 3534, 3535, 3535, 3536, 3536, 3536, 3537, 3537, 3538, 3538, 3539, 3539, 3540, 3540, 3541, 3541, 3542, 3542, 3543, \
3543, 3544, 3544, 3545, 3545, 3546, 3546, 3546, 3547, 3547, 3548, 3548, 3549, 3549, 3550, 3550, 3551, 3551, 3552, 3552, \
3553, 3553, 3554, 3554, 3554, 3555, 3555, 3556, 3556, 3557, 3557, 3558, 3558, 3559, 3559, 3560, 3560, 3561, 3561, 3561, \
3562, 3562, 3563, 3563, 3564, 3564, 3565, 3565, 3566, 3566, 3567, 3567, 3567, 3568, 3568, 3569, 3569, 3570, 3570, 3571, \
3571, 3572, 3572, 3572, 3573, 3573, 3574, 3574, 3575, 3575, 3576, 3576, 3577, 3577, 3577, 3578, 3578, 3579, 3579, 3580, \
3580, 3581, 3581, 3581, 3582, 3582, 3583, 3583, 3584, 3584, 3585, 3585, 3585, 3586, 3586, 3587, 3587, 3588, 3588, 3589, \
3589, 3589, 3590, 3590, 3591, 3591, 3592, 3592, 3593, 3593, 3593, 3594, 3594, 3595, 3595, 3596, 3596, 3596, 3597, 3597, \
3598, 3598, 3599, 3599, 3600, 3600, 3600, 3601, 3601, 3602, 3602, 3603, 3603, 3603, 3604, 3604, 3605, 3605, 3606, 3606, \
3606, 3607, 3607, 3608, 3608, 3609, 3609, 3609, 3610, 3610, 3611, 3611, 3611, 3612, 3612, 3613, 3613, 3614, 3614, 3614, \
3615, 3615, 3616, 3616, 3617, 3617, 3617, 3618, 3618, 3619, 3619, 3619, 3620, 3620, 3621, 3621, 3622, 3622, 3622, 3623, \
3623, 3624, 3624, 3624, 3625, 3625, 3626, 3626, 3627, 3627, 3627, 3628, 3628, 3629, 3629, 3629, 3630, 3630, 3631, 3631, \
3631, 3632, 3632, 3633, 3633, 3633, 3634, 3634, 3635, 3635, 3635, 3636, 3636, 3637, 3637, 3637, 3638, 3638, 3639, 3639, \
3639, 3640, 3640, 3641, 3641, 3641, 3642, 3642, 3643, 3643, 3643, 3644, 3644, 3645, 3645, 3645, 3646, 3646, 3647, 3647, \
3647, 3648, 3648, 3649, 3649, 3649, 3650, 3650, 3651, 3651, 3651, 3652, 3652, 3653, 3653, 3653, 3654, 3654, 3654, 3655, \
3655, 3656, 3656, 3656, 3657, 3657, 3658, 3658, 3658, 3659, 3659, 3659, 3660, 3660, 3661, 3661, 3661, 3662, 3662, 3663, \
3663, 3663, 3664, 3664, 3664, 3665, 3665, 3666, 3666, 3666, 3667, 3667, 3667, 3668, 3668, 3669, 3669, 3669, 3670, 3670, \
3670, 3671, 3671, 3672, 3672, 3672, 3673, 3673, 3673, 3674, 3674, 3675, 3675, 3675, 3676, 3676, 3676, 3677, 3677, 3678, \
3678, 3678, 3679, 3679, 3679, 3680, 3680, 3680, 3681, 3681, 3682, 3682, 3682, 3683, 3683, 3683, 3684, 3684, 3684, 3685, \
3685, 3686, 3686, 3686, 3687, 3687, 3687, 3688, 3688, 3688, 3689, 3689, 3690, 3690, 3690, 3691, 3691, 3691, 3692, 3692, \
3692, 3693, 3693, 3693, 3694, 3694, 3694, 3695, 3695, 3696, 3696, 3696, 3697, 3697, 3697, 3698, 3698, 3698, 3699, 3699, \
3699, 3700, 3700, 3700, 3701, 3701, 3701, 3702, 3702, 3703, 3703, 3703, 3704, 3704, 3704, 3705, 3705, 3705, 3706, 3706, \
3706, 3707, 3707, 3707, 3708, 3708, 3708, 3709, 3709, 3709, 3710, 3710, 3710, 3711, 3711, 3711, 3712, 3712, 3712, 3713, \
3713, 3713, 3714, 3714, 3714, 3715, 3715, 3715, 3716, 3716, 3716, 3717, 3717, 3717, 3718, 3718, 3718, 3719, 3719, 3719, \
3720, 3720, 3720, 3721, 3721, 3721, 3722, 3722, 3722, 3723, 3723, 3723, 3724, 3724, 3724, 3725, 3725, 3725, 3726, 3726, \
3726, 3727, 3727, 3727, 3728, 3728, 3728, 3729, 3729, 3729, 3730, 3730, 3730, 3731, 3731, 3731, 3731, 3732, 3732, 3732, \
3733, 3733, 3733, 3734, 3734, 3734, 3735, 3735, 3735, 3736, 3736, 3736, 3737, 3737, 3737, 3738, 3738, 3738, 3738, 3739, \
3739, 3739, 3740, 3740, 3740, 3741, 3741, 3741, 3742, 3742, 3742, 3743, 3743, 3743, 3743, 3744, 3744, 3744, 3745, 3745, \
3745, 3746, 3746, 3746, 3747, 3747, 3747, 3747, 3748, 3748, 3748, 3749, 3749, 3749, 3750, 3750, 3750, 3750, 3751, 3751, \
3751, 3752, 3752, 3752, 3753, 3753, 3753, 3753, 3754, 3754, 3754, 3755, 3755, 3755, 3756, 3756, 3756, 3756, 3757, 3757, \
3757, 3758, 3758, 3758, 3759, 3759, 3759, 3759, 3760, 3760, 3760, 3761, 3761, 3761, 3761, 3762, 3762, 3762, 3763, 3763, \
3763, 3763, 3764, 3764, 3764, 3765, 3765, 3765, 3766, 3766, 3766, 3766, 3767, 3767, 3767, 3768, 3768, 3768, 3768, 3769, \
3769, 3769, 3770, 3770, 3770, 3770, 3771, 3771, 3771, 3771, 3772, 3772, 3772, 3773, 3773, 3773, 3773, 3774, 3774, 3774, \
3775, 3775, 3775, 3775, 3776, 3776, 3776, 3777, 3777, 3777, 3777, 3778, 3778, 3778, 3778, 3779, 3779, 3779, 3780, 3780, \
3780, 3780, 3781, 3781, 3781, 3781, 3782, 3782, 3782, 3783, 3783, 3783, 3783, 3784, 3784, 3784, 3784, 3785, 3785, 3785, \
3786, 3786, 3786, 3786, 3787, 3787, 3787, 3787, 3788, 3788, 3788, 3788, 3789, 3789, 3789, 3790, 3790, 3790, 3790, 3791, \
3791, 3791, 3791, 3792, 3792, 3792, 3792, 3793, 3793, 3793, 3793, 3794, 3794, 3794, 3794, 3795, 3795, 3795, 3796, 3796, \
3796, 3796, 3797, 3797, 3797, 3797, 3798, 3798, 3798, 3798, 3799, 3799, 3799, 3799, 3800, 3800, 3800, 3800, 3801, 3801, \
3801, 3801, 3802, 3802, 3802, 3802, 3803, 3803, 3803, 3803, 3804, 3804, 3804, 3804, 3805, 3805, 3805, 3805, 3806, 3806, \
3806, 3806, 3807, 3807, 3807, 3807, 3808, 3808, 3808, 3808, 3809, 3809, 3809, 3809, 3810, 3810, 3810, 3810, 3811, 3811, \
3811, 3811, 3812, 3812, 3812, 3812, 3812, 3813, 3813, 3813, 3813, 3814, 3814, 3814, 3814, 3815, 3815, 3815, 3815, 3816, \
3816, 3816, 3816, 3817, 3817, 3817, 3817, 3818, 3818, 3818, 3818, 3818, 3819, 3819, 3819, 3819, 3820, 3820, 3820, 3820, \
3821, 3821, 3821, 3821, 3821, 3822, 3822, 3822, 3822, 3823, 3823, 3823, 3823, 3824, 3824, 3824, 3824, 3824, 3825, 3825, \
3825, 3825, 3826, 3826, 3826, 3826, 3827, 3827, 3827, 3827, 3827, 3828, 3828, 3828, 3828, 3829, 3829, 3829, 3829, 3829, \
3830, 3830, 3830, 3830, 3831, 3831, 3831, 3831, 3831, 3832, 3832, 3832, 3832, 3833, 3833, 3833, 3833, 3833, 3834, 3834, \
3834, 3834, 3835, 3835, 3835, 3835, 3835, 3836, 3836, 3836, 3836, 3836, 3837, 3837, 3837, 3837, 3838, 3838, 3838, 3838, \
3838, 3839, 3839, 3839, 3839, 3839, 3840, 3840, 3840, 3840, 3841, 3841, 3841, 3841, 3841, 3842, 3842, 3842, 3842, 3842, \
3843, 3843, 3843, 3843, 3843, 3844, 3844, 3844, 3844, 3844, 3845, 3845, 3845, 3845, 3846, 3846, 3846, 3846, 3846, 3847, \
3847, 3847, 3847, 3847, 3848, 3848, 3848, 3848, 3848, 3849, 3849, 3849, 3849, 3849, 3850, 3850, 3850, 3850, 3850, 3851, \
3851, 3851, 3851, 3851, 3852, 3852, 3852, 3852, 3852, 3853, 3853, 3853, 3853, 3853, 3854, 3854, 3854, 3854, 3854, 3855, \
3855, 3855, 3855, 3855, 3856, 3856, 3856, 3856, 3856, 3857, 3857, 3857, 3857, 3857, 3857, 3858, 3858, 3858, 3858, 3858, \
3859, 3859, 3859, 3859, 3859, 3860, 3860, 3860, 3860, 3860, 3861, 3861, 3861, 3861, 3861, 3861, 3862, 3862, 3862, 3862, \
3862, 3863, 3863, 3863, 3863, 3863, 3864, 3864, 3864, 3864, 3864, 3864, 3865, 3865, 3865, 3865, 3865, 3866, 3866, 3866, \
3866, 3866, 3867, 3867, 3867, 3867, 3867, 3867, 3868, 3868, 3868, 3868, 3868, 3868, 3869, 3869, 3869, 3869, 3869, 3870, \
3870, 3870, 3870, 3870, 3870, 3871, 3871, 3871, 3871, 3871, 3872, 3872, 3872, 3872, 3872, 3872, 3873, 3873, 3873, 3873, \
3873, 3873, 3874, 3874, 3874, 3874, 3874, 3875, 3875, 3875, 3875, 3875, 3875, 3876, 3876, 3876, 3876, 3876, 3876, 3877, \
3877, 3877, 3877, 3877, 3877, 3878, 3878, 3878, 3878, 3878, 3878, 3879, 3879, 3879, 3879, 3879, 3879, 3880, 3880, 3880, \
3880, 3880, 3880, 3881, 3881, 3881, 3881, 3881, 3881, 3882, 3882, 3882, 3882, 3882, 3882, 3883, 3883, 3883, 3883, 3883, \
3883, 3884, 3884, 3884, 3884, 3884, 3884, 3885, 3885, 3885, 3885, 3885, 3885, 3886, 3886, 3886, 3886, 3886, 3886, 3887, \
3887, 3887, 3887, 3887, 3887, 3887, 3888, 3888, 3888, 3888, 3888, 3888, 3889, 3889, 3889, 3889, 3889, 3889, 3890, 3890, \
3890, 3890, 3890, 3890, 3890, 3891, 3891, 3891, 3891, 3891, 3891, 3892, 3892, 3892, 3892, 3892, 3892, 3892, 3893, 3893, \
3893, 3893, 3893, 3893, 3894, 3894, 3894, 3894, 3894, 3894, 3894, 3895, 3895, 3895, 3895, 3895, 3895, 3895, 3896, 3896, \
3896, 3896, 3896, 3896, 3897, 3897, 3897, 3897, 3897, 3897, 3897, 3898, 3898, 3898, 3898, 3898, 3898, 3898, 3899, 3899, \
3899, 3899, 3899, 3899, 3899, 3900, 3900, 3900, 3900, 3900, 3900, 3900, 3901, 3901, 3901, 3901, 3901, 3901, 3901, 3902, \
3902, 3902, 3902, 3902, 3902, 3902, 3903, 3903, 3903, 3903, 3903, 3903, 3903, 3904, 3904, 3904, 3904, 3904, 3904, 3904, \
3905, 3905, 3905, 3905, 3905, 3905, 3905, 3906, 3906, 3906, 3906, 3906, 3906, 3906, 3906, 3907, 3907, 3907, 3907, 3907, \
3907, 3907, 3908, 3908, 3908, 3908, 3908, 3908, 3908, 3908, 3909, 3909, 3909, 3909, 3909, 3909, 3909, 3910, 3910, 3910, \
3910, 3910, 3910, 3910, 3910, 3911, 3911, 3911, 3911, 3911, 3911, 3911, 3912, 3912, 3912, 3912, 3912, 3912, 3912, 3912, \
3913, 3913, 3913, 3913, 3913, 3913, 3913, 3913, 3914, 3914, 3914, 3914, 3914, 3914, 3914, 3914, 3915, 3915, 3915, 3915, \
3915, 3915, 3915, 3915, 3916, 3916, 3916, 3916, 3916, 3916, 3916, 3916, 3917, 3917, 3917, 3917, 3917, 3917, 3917, 3917, \
3918, 3918, 3918, 3918, 3918, 3918, 3918, 3918, 3919, 3919, 3919, 3919, 3919, 3919, 3919, 3919, 3919, 3920, 3920, 3920, \
3920, 3920, 3920, 3920, 3920, 3921, 3921, 3921, 3921, 3921, 3921, 3921, 3921, 3922, 3922, 3922, 3922, 3922, 3922, 3922, \
3922, 3922, 3923, 3923, 3923, 3923, 3923, 3923, 3923, 3923, 3923, 3924, 3924, 3924, 3924, 3924, 3924, 3924, 3924, 3925, \
3925, 3925, 3925, 3925, 3925, 3925, 3925, 3925, 3926, 3926, 3926, 3926, 3926, 3926, 3926, 3926, 3926, 3927, 3927, 3927, \
3927, 3927, 3927, 3927, 3927, 3927, 3928, 3928, 3928, 3928, 3928, 3928, 3928, 3928, 3928, 3929, 3929, 3929, 3929, 3929, \
3929, 3929, 3929, 3929, 3929, 3930, 3930, 3930, 3930, 3930, 3930, 3930, 3930, 3930, 3931, 3931, 3931, 3931, 3931, 3931, \
3931, 3931, 3931, 3931, 3932, 3932, 3932, 3932, 3932, 3932, 3932, 3932, 3932, 3933, 3933, 3933, 3933, 3933, 3933, 3933, \
3933, 3933, 3933, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3935, 3935, 3935, 3935, 3935, 3935, 3935, \
3935, 3935, 3935, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3937, 3937, 3937, 3937, 3937, 3937, 3937, \
3937, 3937, 3937, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3939, 3939, 3939, 3939, 3939, 3939, \
3939, 3939, 3939, 3939, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3941, 3941, 3941, 3941, 3941, \
3941, 3941, 3941, 3941, 3941, 3941, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3943, 3943, 3943, \
3943, 3943, 3943, 3943, 3943, 3943, 3943, 3943, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3945, \
3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3946, 3946, 3946, 3946, 3946, 3946, 3946, 3946, 3946, \
3946, 3946, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3948, 3948, 3948, 3948, 3948, 3948, \
3948, 3948, 3948, 3948, 3948, 3948, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3950, \
3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3951, 3951, 3951, 3951, 3951, 3951, 3951, 3951, 3951, \
3951, 3951, 3951, 3951, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3953, 3953, 3953, \
3953, 3953, 3953, 3953, 3953, 3953, 3953, 3953, 3953, 3953, 3954, 3954, 3954, 3954, 3954, 3954, 3954, 3954, 3954, 3954, \
3954, 3954, 3954, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3956, 3956, 3956, \
3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3957, 3957, 3957, 3957, 3957, 3957, 3957, 3957, 3957, \
3957, 3957, 3957, 3957, 3957, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, \
3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3960, 3960, 3960, 3960, 3960, \
3960, 3960, 3960, 3960, 3960, 3960, 3960, 3960, 3960, 3960, 3961, 3961, 3961, 3961, 3961, 3961, 3961, 3961, 3961, 3961, \
3961, 3961, 3961, 3961, 3961, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, \
3962, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3964, 3964, 3964, \
3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3965, 3965, 3965, 3965, 3965, 3965, \
3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3966, 3966, 3966, 3966, 3966, 3966, 3966, 3966, 3966, \
3966, 3966, 3966, 3966, 3966, 3966, 3966, 3966, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, \
3967, 3967, 3967, 3967, 3967, 3967, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, \
3968, 3968, 3968, 3968, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, \
3969, 3969, 3969, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, \
3970, 3970, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, \
3971, 3971, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, \
3972, 3972, 3972, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, \
3973, 3973, 3973, 3973, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, \
3974, 3974, 3974, 3974, 3974, 3974, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, \
3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, \
3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, \
3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3978, 3978, 3978, 3978, \
3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, \
3978, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, \
3979, 3979, 3979, 3979, 3979, 3979, 3979, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, \
3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3981, 3981, 3981, 3981, 3981, 3981, \
3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, \
3981, 3981, 3981, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, \
3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3983, 3983, 3983, 3983, 3983, 3983, 3983, \
3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, \
3983, 3983, 3983, 3983, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, \
3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3985, 3985, 3985, \
3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, \
3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, \
3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, \
3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, \
3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, \
3987, 3987, 3987, 3987, 3987, 3987, 3987, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, \
3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, \
3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, \
3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, \
3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3990, 3990, 3990, 3990, 3990, 3990, 3990, \
3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, \
3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, \
3990, 3990, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, \
3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, \
3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3992, 3992, 3992, 3992, 3992, 3992, \
3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, \
3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, \
3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, \
3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, \
3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, \
3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000 ])
|
nick-youngblut/SIPSim
|
SIPSim/rtnorm.py
|
Python
|
mit
| 203,109
|
[
"Gaussian"
] |
3090bf5dc13edbb4f194b35e63b8faea428a7103917ad1dc94f8119422075868
|
# special hop classes that have not been moved into MDAnalysis.analysis.density
# Already changed msg --> logger and various other bits and pieces.
import numpy # need v >= 1.0
import sys
import os,os.path,errno
import cPickle
import warnings
from gridData import Grid, OpenDX # http://github.com/orbeckst/GridDataFormats
import MDAnalysis
from MDAnalysis.core.util import fixedwidth_bins, iterable, asiterable
from MDAnalysis import NoDataError
import logging
logger = logging.getLogger("MDAnalysis.analysis.density")
class DensityCollector(object):
"""Collect subsequent coordinate frames to build up a :class:`Density`."""
use_kdtree = True
def __init__(self, name, universe, **kwargs):
self.name = name
try:
universe.selectAtoms('all')
universe.trajectory.ts
except AttributeError:
raise TypeError("The universe must be a proper MDAnalysis.Universe instance.")
self.universe = u = universe
self.delta = kwargs.pop('delta', 1.0)
self.atomselection = kwargs.pop('atomselection', 'name OH2')
self.cutoff = kwargs.pop('cutoff', 3.5)
self.soluteselection = kwargs.pop('soluteselection', None) #'protein and not name H*')
self.padding = kwargs.pop('padding', 2.0)
self.metadata = kwargs.pop('metadata', {})
self.parameters = kwargs.pop('parameters',{}) # for advanced fiddling...
# define the self.current_coordinates() function ... monkey patching!
if self.cutoff > 0 and self.soluteselection is not None:
# special fast selection for '<atomsel> not within <cutoff> of <solutesel>'
notwithin_coordinates = notwithin_coordinates_factory(u,self.atomselection,self.soluteselection,
self.cutoff,use_kdtree=self.use_kdtree)
self.current_coordinates = notwithin_coordinates
self.mode = "BULK"
else:
group = u.selectAtoms(self.atomselection)
self.current_coordinates = group.coordinates
self.mode = "SOLVENT"
coord = self.current_coordinates()
logger.info("%-10s: Selected %d atoms out of %d atoms (%s) from %d total." %
(self.name, coord.shape[0],len(u.selectAtoms(self.atomselection)),
self.atomselection,len(u.atoms)))
self.__complete = False
def init_histogram(self, **kwargs):
# needs to be done separately because we might need additional information
# after init (at least I cannot think of a better way...)
smin = kwargs.pop("smin", self.min_coordinates(padding=self.padding))
smax = kwargs.pop("smax", self.max_coordinates(padding=self.padding))
BINS = fixedwidth_bins(self.delta, smin, smax)
self.arange = zip(BINS['min'],BINS['max'])
self.bins = BINS['Nbins']
# create empty grid with the right dimensions (and get the edges)
grid,edges = numpy.histogramdd(numpy.zeros((1,3)), bins=self.bins,
range=self.arange, normed=False)
grid *= 0.0
h = grid.copy()
self.grid = grid
self.edges = edges
self._h = h # temporary for accumulation
def min_coordinates(self, **kwargs):
return numpy.min(self.current_coordinates(), axis=0) - kwargs.pop('padding', self.padding)
def max_coordinates(self, **kwargs):
return numpy.max(self.current_coordinates(), axis=0) + kwargs.pop('padding', self.padding)
def collect(self):
assert hasattr(self, 'grid'), "init_histogram() must be called first"
coord = self.current_coordinates()
if len(coord) > 0:
self._h[:],self.edges[:] = numpy.histogramdd(coord, bins=self.bins, range=self.arange, normed=False)
self.grid += self._h # accumulate average histogram
return len(coord)
def finish(self):
if self.isComplete():
return
u = self.universe
numframes = u.trajectory.numframes / u.trajectory.skip
self.grid /= float(numframes)
self.__complete = True
def Density(self):
"""Return a :class:`Density` from the data."""
if not hasattr(self, 'grid'):
raise NoDataError("No data for density available. Run collect() first.")
u = self.universe
metadata = self.metadata
metadata['collector'] = self.name
metadata['collector_mode'] = self.mode
metadata['topology'] = u.filename
metadata['trajectory'] = u.trajectory.filename
metadata['atomselection'] = self.atomselection
metadata['numframes'] = u.trajectory.numframes
metadata['dt'] = u.trajectory.dt # in ps for default MDAnalysis
# totaltime should be in MDAnalysis!
metadata['totaltime'] = round(u.trajectory.numframes * metadata['dt'] * u.trajectory.skip_timestep, 3)
metadata['time_unit'] = MDAnalysis.core.flags['time_unit'] # just to make sure we know it...
metadata['skip_timestep'] = u.trajectory.skip_timestep # frames
metadata['delta'] = u.trajectory.delta # in native units (?)
if self.mode == 'BULK':
metadata['soluteselection'] = self.soluteselection
metadata['cutoff'] = self.cutoff # in Angstrom
parameters = self.parameters
parameters['isDensity'] = False # must override
# Density automatically converts histogram to density for isDensity=False
g = Density(grid=self.grid, edges=self.edges,
unit=dict(length=MDAnalysis.core.flags['length_unit']),
parameters=parameters, metadata=metadata)
logger.info("%-10s: Histogram completed (initial density in %s**-3)" % (self.name, MDAnalysis.core.flags['length_unit']))
return g
def isComplete(self):
return self.__complete
def __repr__(self):
if self.mode == "BULK":
return "<DensityCollector %(name)r, delta=%(delta).1f A: "\
"'%(atomselection)s and not around %(cutoff).1f (%(soluteselection)s)'>" % vars(self)
else:
return "<DensityCollector %(name)r, delta=%(delta).1f A: %(atomselection)r>" % vars(self)
class DensityCreator(object):
modes = ("all", "bulk", "solvent")
defaults = {'cutoff': 3.5,
'soluteselection': "protein and not name H*",
'delta':1.0, 'atomselection': "name OH2",
'padding': 2.0,
}
def __init__(self, *args, **kwargs):
"""Create a density grid from a trajectory.
density_from_trajectory(PSF, DCD, delta=1.0, atomselection='name OH2', ...) --> density
or
density_from_trajectory(PDB, XTC, delta=1.0, atomselection='name OH2', ...) --> density
:Arguments:
psf/pdb/gro
topology file
dcd/xtc/trr/pdb
trajectory; if reading a single PDB file it is sufficient to just provide it
once as a single argument
:Keywords:
mode
'solvent', 'bulk' or 'all' ('all' does both 'solvent' and \bulk' at the
same time and thus :meth:`DensityCreator.Density`` returns a list of
densities; this saves time!) ['all']
atomselection
selection string (MDAnalysis syntax) for the species to be analyzed
["name OH2"]
delta
approximate bin size for the density grid in Angstroem (same in x,y,z)
(It is slightly adjusted when the box length is not an integer multiple
of delta.) [1.0]
metadata
dictionary of additional data to be saved with the object
padding
increase histogram dimensions by padding (on top of initial box size)
in Angstroem [2.0]
soluteselection
MDAnalysis selection for the solute, e.g. "protein" [``None``]
cutoff
With *cutoff*, select '<atomsel> NOT WITHIN <cutoff> OF <soluteselection>'
(Special routines that are faster than the standard AROUND selection) [0]
verbosity: int
level of chattiness; 0 is silent, 3 is verbose [3]
:Returns: :class:`hop.sitemap.Density`
:TODO:
* Should be able to also set skip and start/stop for data collection.
.. Note::
* In order to calculate the bulk density, use
atomselection='name OH2',soluteselection='protein and not name H*',cutoff=3.5
This will select water oxygens not within 3.5 A of the protein heavy atoms.
Alternatively, use the VMD-based :func:`density_from_volmap` function.
* The histogramming grid is determined by the initial frames min and max.
* metadata will be populated with psf, dcd, and a few other items.
This allows more compact downstream processing.
"""
_kwargs = self.defaults.copy()
_kwargs.update(kwargs)
kwargs = _kwargs
# workaround for python 2.5 *args,**kwargs only allowed:
universe_kwargs = {'permissive':kwargs.pop('permissive',False)}
self.universe = MDAnalysis.asUniverse(*args, **universe_kwargs)
self.mode = kwargs.pop("mode", "all") # 'all' runs modes[1:]
if not self.mode in self.modes:
raise ValueError("mode must be one of %r, not %r" % (self.modes, self.mode))
if self.mode == "all":
modes = self.modes[1:]
else:
modes = [self.mode]
self.collectors = []
min_coords = []
max_coords = []
for mode in modes:
modeargs = kwargs.copy()
if mode == "solvent":
modeargs['soluteselection'] = None
modeargs['cutoff'] = 0
c = DensityCollector(mode, self.universe, **modeargs)
self.collectors.append(c)
min_coords.append(c.min_coordinates()) # with default padding from modeargs
max_coords.append(c.max_coordinates())
# determine maximum bounding box from initial positions of solvent
# (add generous padding... probably more than my default 2 A)
smin = numpy.sort(min_coords, axis=0)[0] # the three smallest values
smax = numpy.sort(max_coords, axis=0)[-1] # the three largest values
for c in self.collectors:
c.init_histogram(smin=smin, smax=smax) # also guarantees compatible grid
self.densities = {} # densities will be stored with mode as key
def create(self):
u = self.universe
for ts in u.trajectory:
status = ["Histograming"]
for c in self.collectors:
natoms = c.collect()
status.append("%s=%d" % (c.name, natoms))
if u.trajectory.ts.frame % 10 == 0 or \
u.trajectory.ts.frame == u.trajectory.numframes:
message = " ".join(status)
message += " atoms in frame %5d/%d [%5.1f%%]\r" % (
u.trajectory.ts.frame,
u.trajectory.numframes,
100.0*u.trajectory.ts.frame/u.trajectory.numframes)
print message
print
self.densities = {}
for c in self.collectors:
c.finish()
self.densities[c.name] = c.Density()
# should save precious files!!!
return self.densities
def DensityWithBulk(self, density_unit='water', solvent_threshold=2.72, bulk_threshold=0.6):
"""Return a solvent density with bulk site inserted.
DensityWithBulk(self, solvent_threshold=2.72, bulk_threshold=0.6) --> Density
Only works if two densities were generated that are named 'solvent' and
'bulk' (this is the default for the *mode* = "all" keyword for
:class:`DensityCreator`.)
:Arguments:
*density_unit*
Measure density in multiples of this unit; possible values are
'Molar', 'nm', 'Angstrom', or the density at standard conditions
of 'water' (experimental value), 'TIP3P', 'TIP4P', 'SPC' ['water']
*solvent_threshold*
Hydration sites are considered as regions of density > this
threshold; it is assumed to be given in the *density_unit*.
*bulk_threshold*
The bulk site is the largest region with a density >
*bulk_threshold*; in order to avoid overlap with the hydration
sites, it is necessary to use a special selection for the solvent
that excludes it from the vicinity of the solute.
.. SeeAlso:: This method uses meth:`hop.sitemap.Density.map_sites` and
meth:`hop.sitemap.Density.site_insert_bulk`.
"""
if len(self.densities) != 2:
raise NoDataError("Need exactly two densities.")
try:
solvent = self.densities['solvent']
bulk = self.densities['bulk']
except KeyError:
raise NoDataError("Need a 'solvent' and a 'bulk' density in %s.densities" %
self.__class__.__name__)
solvent.convert_density(density_unit)
solvent.map_sites(solvent_threshold)
bulk.convert_density(density_unit)
bulk.map_sites(bulk_threshold)
# ye olde bulk-hack....
solvent.site_insert_bulk(bulk)
# should really save
# solvent.save()
return solvent
|
iwelland/hop
|
hop/density_newmda.py
|
Python
|
lgpl-3.0
| 13,736
|
[
"MDAnalysis",
"VMD"
] |
557ccd7ed122856b9e33c0533aa7eb559e58b6431c5bd049f789f5879c340c2b
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Run this test like so:
# vtkpython TestParallelCoordinatesColors.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestParallelCoordinatesColors(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if colored parallel coordinates plots can be built with python"
# Set up a 2D scene, add a PC chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0, 1.0, 1.0)
view.GetRenderWindow().SetSize(600,300)
chart = vtk.vtkChartParallelCoordinates()
view.GetScene().AddItem(chart)
# Create a table with some points in it
arrX = vtk.vtkFloatArray()
arrX.SetName("XAxis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Tan")
numPoints = 200
inc = 7.5 / (numPoints-1)
for i in range(numPoints):
arrX.InsertNextValue(i * inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.tan(i * inc) + 0.5)
table = vtk.vtkTable()
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Create blue to gray to red lookup table
lut = vtk.vtkLookupTable()
lutNum = 256
lut.SetNumberOfTableValues(lutNum)
lut.Build()
ctf = vtk.vtkColorTransferFunction()
ctf.SetColorSpaceToDiverging()
cl = []
# Variant of Colorbrewer RdBu 5
cl.append([float(cc)/255.0 for cc in [202, 0, 32]])
cl.append([float(cc)/255.0 for cc in [244, 165, 130]])
cl.append([float(cc)/255.0 for cc in [140, 140, 140]])
cl.append([float(cc)/255.0 for cc in [146, 197, 222]])
cl.append([float(cc)/255.0 for cc in [5, 113, 176]])
vv = [float(xx)/float(len(cl)-1) for xx in range(len(cl))]
vv.reverse()
for pt,color in zip(vv,cl):
ctf.AddRGBPoint(pt, color[0], color[1], color[2])
for ii,ss in enumerate([float(xx)/float(lutNum) for xx in range(lutNum)]):
cc = ctf.GetColor(ss)
lut.SetTableValue(ii,cc[0],cc[1],cc[2],1.0)
lut.SetAlpha(0.25)
lut.SetRange(-1, 1)
chart.GetPlot(0).SetInputData(table)
chart.GetPlot(0).SetScalarVisibility(1)
chart.GetPlot(0).SetLookupTable(lut)
chart.GetPlot(0).SelectColorArray("Cosine")
view.GetRenderWindow().SetMultiSamples(0)
view.GetRenderWindow().Render()
img_file = "TestParallelCoordinatesColors.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestParallelCoordinatesColors, 'test')])
|
aashish24/VTK-old
|
Charts/Core/Testing/Python/TestParallelCoordinatesColors.py
|
Python
|
bsd-3-clause
| 3,087
|
[
"VTK"
] |
7c70736b87f006b60c0a5324faed58410af18633133b18280da8c83b3a45c3e9
|
import unittest
# https://www.interviewcake.com/question/python3/mesh-message
# MeshMessage problem from InterviewCake.
# Essentially a breath-first search that keeps track of previous parent.
from collections import deque
def get_path(graph, start_node, end_node):
if start_node not in graph:
raise Exception(f'{start_node} not in graph')
if end_node not in graph:
raise Exception(f'{end_node} not in graph')
# Find the shortest route in the network between the two users
queue = deque([start_node])
parents = {start_node:None}
while len(queue):
v = queue.popleft() #BFS
if v == end_node:
ans=[]
print(f'{parents[v]}')
cur_node = v
# reconstructs the path
while cur_node:
ans = [cur_node] + ans
cur_node = parents[cur_node] # stops when None is reached
print(f'{ans}')
return ans
for neighbor in graph[v]:
if neighbor not in parents:
queue.append(neighbor)
parents[neighbor] = v
else:
# noop - we do not want to visit nodes we have visited before
pass
return None
# Tests
class Test(unittest.TestCase):
def setUp(self):
self.graph = {
'a': ['b', 'c', 'd'],
'b': ['a', 'd'],
'c': ['a', 'e'],
'd': ['a', 'b'],
'e': ['c'],
'f': ['g'],
'g': ['f'],
}
def test_two_hop_path_1(self):
actual = get_path(self.graph, 'a', 'e')
expected = ['a', 'c', 'e']
self.assertEqual(actual, expected)
def test_two_hop_path_2(self):
actual = get_path(self.graph, 'd', 'c')
expected = ['d', 'a', 'c']
self.assertEqual(actual, expected)
def test_one_hop_path_1(self):
actual = get_path(self.graph, 'a', 'c')
expected = ['a', 'c']
self.assertEqual(actual, expected)
def test_one_hop_path_2(self):
actual = get_path(self.graph, 'f', 'g')
expected = ['f', 'g']
self.assertEqual(actual, expected)
def test_one_hop_path_3(self):
actual = get_path(self.graph, 'g', 'f')
expected = ['g', 'f']
self.assertEqual(actual, expected)
def test_zero_hop_path(self):
actual = get_path(self.graph, 'a', 'a')
expected = ['a']
self.assertEqual(actual, expected)
def test_no_path(self):
actual = get_path(self.graph, 'a', 'f')
expected = None
self.assertEqual(actual, expected)
def test_start_node_not_present(self):
with self.assertRaises(Exception):
get_path(self.graph, 'h', 'a')
def test_end_node_not_present(self):
with self.assertRaises(Exception):
get_path(self.graph, 'a', 'h')
unittest.main(verbosity=2)
|
jackchi/interview-prep
|
graph/graph-shortest-path.py
|
Python
|
mit
| 2,958
|
[
"VisIt"
] |
6e097a3ab56e825cf8d38f5b65e11314f0a392d3f321f1034f39dbaca9b50795
|
"""
Code for managing the implementation cache.
"""
# Copyright (C) 2009, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from zeroinstall import _
import os
from logging import debug, info, warn
from zeroinstall.support import basedir
from zeroinstall import SafeException, support
class BadDigest(SafeException):
"""Thrown if a digest is invalid (either syntactically or cryptographically)."""
detail = None
class NotStored(SafeException):
"""Throws if a requested implementation isn't in the cache."""
class NonwritableStore(SafeException):
"""Attempt to add to a non-writable store directory."""
def _copytree2(src, dst):
import shutil
names = os.listdir(src)
assert os.path.isdir(dst)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
os.mkdir(dstname)
mtime = int(os.lstat(srcname).st_mtime)
_copytree2(srcname, dstname)
os.utime(dstname, (mtime, mtime))
else:
shutil.copy2(srcname, dstname)
class Store:
"""A directory for storing implementations."""
def __init__(self, dir, public = False):
"""Create a new Store.
@param dir: directory to contain the implementations
@type dir: str
@param public: deprecated
@type public: bool"""
self.dir = dir
def __str__(self):
return _("Store '%s'") % self.dir
def lookup(self, digest):
try:
alg, value = digest.split('=', 1)
except ValueError:
raise BadDigest(_("Digest must be in the form ALG=VALUE, not '%s'") % digest)
try:
assert '/' not in value
int(value, 16) # Check valid format
except ValueError, ex:
raise BadDigest(_("Bad value for digest: %s") % str(ex))
dir = os.path.join(self.dir, digest)
if os.path.isdir(dir):
return dir
return None
def get_tmp_dir_for(self, required_digest):
"""Create a temporary directory in the directory where we would store an implementation
with the given digest. This is used to setup a new implementation before being renamed if
it turns out OK.
@raise NonwritableStore: if we can't create it"""
try:
if not os.path.isdir(self.dir):
os.makedirs(self.dir)
from tempfile import mkdtemp
tmp = mkdtemp(dir = self.dir, prefix = 'tmp-')
os.chmod(tmp, 0755) # r-x for all; needed by 0store-helper
return tmp
except OSError, ex:
raise NonwritableStore(str(ex))
def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0, try_helper = False):
import unpack
info(_("Caching new implementation (digest %s) in %s"), required_digest, self.dir)
if self.lookup(required_digest):
info(_("Not adding %s as it already exists!"), required_digest)
return
tmp = self.get_tmp_dir_for(required_digest)
try:
unpack.unpack_archive(url, data, tmp, extract, type = type, start_offset = start_offset)
except:
import shutil
shutil.rmtree(tmp)
raise
try:
self.check_manifest_and_rename(required_digest, tmp, extract, try_helper = try_helper)
except Exception:
warn(_("Leaving extracted directory as %s"), tmp)
raise
def add_dir_to_cache(self, required_digest, path, try_helper = False):
"""Copy the contents of path to the cache.
@param required_digest: the expected digest
@type required_digest: str
@param path: the root of the tree to copy
@type path: str
@param try_helper: attempt to use privileged helper before user cache (since 0.26)
@type try_helper: bool
@raise BadDigest: if the contents don't match the given digest."""
if self.lookup(required_digest):
info(_("Not adding %s as it already exists!"), required_digest)
return
tmp = self.get_tmp_dir_for(required_digest)
try:
_copytree2(path, tmp)
self.check_manifest_and_rename(required_digest, tmp, try_helper = try_helper)
except:
warn(_("Error importing directory."))
warn(_("Deleting %s"), tmp)
support.ro_rmtree(tmp)
raise
def _add_with_helper(self, required_digest, path):
"""Use 0store-secure-add to copy 'path' to the system store.
@param required_digest: the digest for path
@type required_digest: str
@param path: root of implementation directory structure
@type path: str
@return: True iff the directory was copied into the system cache successfully
"""
if required_digest.startswith('sha1='):
return False # Old digest alg not supported
helper = support.find_in_path('0store-secure-add-helper')
if not helper:
info(_("'0store-secure-add-helper' command not found. Not adding to system cache."))
return False
import subprocess
env = os.environ.copy()
env['ENV_NOT_CLEARED'] = 'Unclean' # (warn about insecure configurations)
env['HOME'] = 'Unclean' # (warn about insecure configurations)
dev_null = os.open('/dev/null', os.O_RDONLY)
try:
info(_("Trying to add to system cache using %s"), helper)
child = subprocess.Popen([helper, required_digest],
stdin = dev_null,
cwd = path,
env = env)
exit_code = child.wait()
finally:
os.close(dev_null)
if exit_code:
warn(_("0store-secure-add-helper failed."))
return False
info(_("Added succcessfully."))
return True
def check_manifest_and_rename(self, required_digest, tmp, extract = None, try_helper = False):
"""Check that tmp[/extract] has the required_digest.
On success, rename the checked directory to the digest, and
make the whole tree read-only.
@param try_helper: attempt to use privileged helper to import to system cache first (since 0.26)
@type try_helper: bool
@raise BadDigest: if the input directory doesn't match the given digest"""
if extract:
extracted = os.path.join(tmp, extract)
if not os.path.isdir(extracted):
raise Exception(_('Directory %s not found in archive') % extract)
else:
extracted = tmp
import manifest
manifest.fixup_permissions(extracted)
alg, required_value = manifest.splitID(required_digest)
actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg))
if actual_digest != required_digest:
raise BadDigest(_('Incorrect manifest -- archive is corrupted.\n'
'Required digest: %(required_digest)s\n'
'Actual digest: %(actual_digest)s\n') %
{'required_digest': required_digest, 'actual_digest': actual_digest})
if try_helper:
if self._add_with_helper(required_digest, extracted):
support.ro_rmtree(tmp)
return
info(_("Can't add to system store. Trying user store instead."))
final_name = os.path.join(self.dir, required_digest)
if os.path.isdir(final_name):
raise Exception(_("Item %s already stored.") % final_name) # XXX: not really an error
# If we just want a subdirectory then the rename will change
# extracted/.. and so we'll need write permission on 'extracted'
os.chmod(extracted, 0755)
os.rename(extracted, final_name)
os.chmod(final_name, 0555)
if extract:
os.rmdir(tmp)
def __repr__(self):
return "<store: %s>" % self.dir
class Stores(object):
"""A list of L{Store}s. All stores are searched when looking for an implementation.
When storing, we use the first of the system caches (if writable), or the user's
cache otherwise."""
__slots__ = ['stores']
def __init__(self):
user_store = os.path.join(basedir.xdg_cache_home, '0install.net', 'implementations')
self.stores = [Store(user_store)]
impl_dirs = basedir.load_first_config('0install.net', 'injector',
'implementation-dirs')
debug(_("Location of 'implementation-dirs' config file being used: '%s'"), impl_dirs)
if impl_dirs:
dirs = file(impl_dirs)
else:
if os.name == "nt":
from win32com.shell import shell, shellcon
localAppData = shell.SHGetFolderPath(0, shellcon.CSIDL_LOCAL_APPDATA, 0, 0)
commonAppData = shell.SHGetFolderPath(0, shellcon.CSIDL_COMMON_APPDATA, 0, 0)
userCache = os.path.join(localAppData, "0install.net", "implementations")
sharedCache = os.path.join(commonAppData, "0install.net", "implementations")
dirs = [userCache, sharedCache]
else:
dirs = ['/var/cache/0install.net/implementations']
for directory in dirs:
directory = directory.strip()
if directory and not directory.startswith('#'):
debug(_("Added system store '%s'"), directory)
self.stores.append(Store(directory))
def lookup(self, digest):
return self.lookup_any([digest])
def lookup_any(self, digests):
"""Search for digest in all stores."""
assert digests
for digest in digests:
assert digest
if '/' in digest or '=' not in digest:
raise BadDigest(_('Syntax error in digest (use ALG=VALUE, not %s)') % digest)
for store in self.stores:
path = store.lookup(digest)
if path:
return path
raise NotStored(_("Item with digests '%(digests)s' not found in stores. Searched:\n- %(stores)s") %
{'digests': digests, 'stores': '\n- '.join([s.dir for s in self.stores])})
def add_dir_to_cache(self, required_digest, dir):
"""Add to the best writable cache.
@see: L{Store.add_dir_to_cache}"""
self._write_store(lambda store, **kwargs: store.add_dir_to_cache(required_digest, dir, **kwargs))
def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0):
"""Add to the best writable cache.
@see: L{Store.add_archive_to_cache}"""
self._write_store(lambda store, **kwargs: store.add_archive_to_cache(required_digest,
data, url, extract, type = type, start_offset = start_offset, **kwargs))
def _write_store(self, fn):
"""Call fn(first_system_store). If it's read-only, try again with the user store."""
if len(self.stores) > 1:
try:
fn(self.get_first_system_store())
return
except NonwritableStore:
debug(_("%s not-writable. Trying helper instead."), self.get_first_system_store())
pass
fn(self.stores[0], try_helper = True)
def get_first_system_store(self):
"""The first system store is the one we try writing to first.
@since: 0.30"""
try:
return self.stores[1]
except IndexError:
raise SafeException(_("No system stores have been configured"))
|
pombredanne/zero-install
|
zeroinstall/zerostore/__init__.py
|
Python
|
lgpl-2.1
| 10,114
|
[
"VisIt"
] |
5ae1a93d1c93dfcf50128949f67bc8e4e1f67cd285cd407d075c52b2b627f60f
|
"""Module symbol-table generator"""
from compiler import ast
from compiler.consts import SC_LOCAL, SC_GLOBAL_IMPLICIT, SC_GLOBAL_EXPLICT, \
SC_FREE, SC_CELL, SC_UNKNOWN
from compiler.misc import mangle
import types
import sys
MANGLE_LEN = 256
class Scope:
# XXX how much information do I need about each name?
def __init__(self, name, module, klass=None):
self.name = name
self.module = module
self.defs = {}
self.uses = {}
self.globals = {}
self.params = {}
self.frees = {}
self.cells = {}
self.children = []
# nested is true if the class could contain free variables,
# i.e. if it is nested within another function.
self.nested = None
self.generator = None
self.klass = None
if klass is not None:
for i in range(len(klass)):
if klass[i] != '_':
self.klass = klass[i:]
break
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
def mangle(self, name):
if self.klass is None:
return name
return mangle(name, self.klass)
def add_def(self, name):
self.defs[self.mangle(name)] = 1
def add_use(self, name):
self.uses[self.mangle(name)] = 1
def add_global(self, name):
name = self.mangle(name)
if name in self.uses or name in self.defs:
pass # XXX warn about global following def/use
if name in self.params:
raise SyntaxError, "%s in %s is global and parameter" % \
(name, self.name)
self.globals[name] = 1
self.module.add_def(name)
def add_param(self, name):
name = self.mangle(name)
self.defs[name] = 1
self.params[name] = 1
def get_names(self):
d = {}
d.update(self.defs)
d.update(self.uses)
d.update(self.globals)
return d.keys()
def add_child(self, child):
self.children.append(child)
def get_children(self):
return self.children
def DEBUG(self):
print >> sys.stderr, self.name, self.nested and "nested" or ""
print >> sys.stderr, "\tglobals: ", self.globals
print >> sys.stderr, "\tcells: ", self.cells
print >> sys.stderr, "\tdefs: ", self.defs
print >> sys.stderr, "\tuses: ", self.uses
print >> sys.stderr, "\tfrees:", self.frees
def check_name(self, name):
"""Return scope of name.
The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
"""
if name in self.globals:
return SC_GLOBAL_EXPLICT
if name in self.cells:
return SC_CELL
if name in self.defs:
return SC_LOCAL
if self.nested and (name in self.frees or name in self.uses):
return SC_FREE
if self.nested:
return SC_UNKNOWN
else:
return SC_GLOBAL_IMPLICIT
def get_free_vars(self):
if not self.nested:
return ()
free = {}
free.update(self.frees)
for name in self.uses.keys():
if name not in self.defs and name not in self.globals:
free[name] = 1
return free.keys()
def handle_children(self):
for child in self.children:
frees = child.get_free_vars()
globals = self.add_frees(frees)
for name in globals:
child.force_global(name)
def force_global(self, name):
"""Force name to be global in scope.
Some child of the current node had a free reference to name.
When the child was processed, it was labelled a free
variable. Now that all its enclosing scope have been
processed, the name is known to be a global or builtin. So
walk back down the child chain and set the name to be global
rather than free.
Be careful to stop if a child does not think the name is
free.
"""
self.globals[name] = 1
if name in self.frees:
del self.frees[name]
for child in self.children:
if child.check_name(name) == SC_FREE:
child.force_global(name)
def add_frees(self, names):
"""Process list of free vars from nested scope.
Returns a list of names that are either 1) declared global in the
parent or 2) undefined in a top-level parent. In either case,
the nested scope should treat them as globals.
"""
child_globals = []
for name in names:
sc = self.check_name(name)
if self.nested:
if sc == SC_UNKNOWN or sc == SC_FREE \
or isinstance(self, ClassScope):
self.frees[name] = 1
elif sc == SC_GLOBAL_IMPLICIT:
child_globals.append(name)
elif isinstance(self, FunctionScope) and sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
else:
if sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
return child_globals
def get_cell_vars(self):
return self.cells.keys()
class ModuleScope(Scope):
__super_init = Scope.__init__
def __init__(self):
self.__super_init("global", self)
class FunctionScope(Scope):
pass
class GenExprScope(Scope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("generator expression<%d>"%i, module, klass)
self.add_param('.0')
def get_names(self):
keys = Scope.get_names(self)
return keys
class LambdaScope(FunctionScope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("lambda.%d" % i, module, klass)
class ClassScope(Scope):
__super_init = Scope.__init__
def __init__(self, name, module):
self.__super_init(name, module, name)
class SymbolVisitor:
def __init__(self):
self.scopes = {}
self.klass = None
# node that define new scopes
def visitModule(self, node):
scope = self.module = self.scopes[node] = ModuleScope()
self.visit(node.node, scope)
visitExpression = visitModule
def visitFunction(self, node, parent):
if node.decorators:
self.visit(node.decorators, parent)
parent.add_def(node.name)
for n in node.defaults:
self.visit(n, parent)
scope = FunctionScope(node.name, self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
self._do_args(scope, node.argnames)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitGenExpr(self, node, parent):
scope = GenExprScope(self.module, self.klass);
if parent.nested or isinstance(parent, FunctionScope) \
or isinstance(parent, GenExprScope):
scope.nested = 1
self.scopes[node] = scope
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitGenExprInner(self, node, scope):
for genfor in node.quals:
self.visit(genfor, scope)
self.visit(node.expr, scope)
def visitGenExprFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.iter, scope)
for if_ in node.ifs:
self.visit(if_, scope)
def visitGenExprIf(self, node, scope):
self.visit(node.test, scope)
def visitLambda(self, node, parent, assign=0):
# Lambda is an expression, so it could appear in an expression
# context where assign is passed. The transformer should catch
# any code that has a lambda on the left-hand side.
assert not assign
for n in node.defaults:
self.visit(n, parent)
scope = LambdaScope(self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
self._do_args(scope, node.argnames)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def _do_args(self, scope, args):
for name in args:
if type(name) == types.TupleType:
self._do_args(scope, name)
else:
scope.add_param(name)
def handle_free_vars(self, scope, parent):
parent.add_child(scope)
scope.handle_children()
def visitClass(self, node, parent):
parent.add_def(node.name)
for n in node.bases:
self.visit(n, parent)
scope = ClassScope(node.name, self.module)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
if node.doc is not None:
scope.add_def('__doc__')
scope.add_def('__module__')
self.scopes[node] = scope
prev = self.klass
self.klass = node.name
self.visit(node.code, scope)
self.klass = prev
self.handle_free_vars(scope, parent)
# name can be a def or a use
# XXX a few calls and nodes expect a third "assign" arg that is
# true if the name is being used as an assignment. only
# expressions contained within statements may have the assign arg.
def visitName(self, node, scope, assign=0):
if assign:
scope.add_def(node.name)
else:
scope.add_use(node.name)
# operations that bind new names
def visitFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.list, scope)
self.visit(node.body, scope)
if node.else_:
self.visit(node.else_, scope)
def visitFrom(self, node, scope):
for name, asname in node.names:
if name == "*":
continue
scope.add_def(asname or name)
def visitImport(self, node, scope):
for name, asname in node.names:
i = name.find(".")
if i > -1:
name = name[:i]
scope.add_def(asname or name)
def visitGlobal(self, node, scope):
for name in node.names:
scope.add_global(name)
def visitAssign(self, node, scope):
"""Propagate assignment flag down to child nodes.
The Assign node doesn't itself contains the variables being
assigned to. Instead, the children in node.nodes are visited
with the assign flag set to true. When the names occur in
those nodes, they are marked as defs.
Some names that occur in an assignment target are not bound by
the assignment, e.g. a name occurring inside a slice. The
visitor handles these nodes specially; they do not propagate
the assign flag to their children.
"""
for n in node.nodes:
self.visit(n, scope, 1)
self.visit(node.expr, scope)
def visitAssName(self, node, scope, assign=1):
scope.add_def(node.name)
def visitAssAttr(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
def visitSubscript(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
for n in node.subs:
self.visit(n, scope, 0)
def visitSlice(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
if node.lower:
self.visit(node.lower, scope, 0)
if node.upper:
self.visit(node.upper, scope, 0)
def visitAugAssign(self, node, scope):
# If the LHS is a name, then this counts as assignment.
# Otherwise, it's just use.
self.visit(node.node, scope)
if isinstance(node.node, ast.Name):
self.visit(node.node, scope, 1) # XXX worry about this
self.visit(node.expr, scope)
# prune if statements if tests are false
_const_types = types.StringType, types.IntType, types.FloatType
def visitIf(self, node, scope):
for test, body in node.tests:
if isinstance(test, ast.Const):
if type(test.value) in self._const_types:
if not test.value:
continue
self.visit(test, scope)
self.visit(body, scope)
if node.else_:
self.visit(node.else_, scope)
# a yield statement signals a generator
def visitYield(self, node, scope):
scope.generator = 1
self.visit(node.value, scope)
def list_eq(l1, l2):
return sorted(l1) == sorted(l2)
if __name__ == "__main__":
import sys
from compiler import parseFile, walk
import symtable
def get_names(syms):
return [s for s in [s.get_name() for s in syms.get_symbols()]
if not (s.startswith('_[') or s.startswith('.'))]
for file in sys.argv[1:]:
print file
f = open(file)
buf = f.read()
f.close()
syms = symtable.symtable(buf, file, "exec")
mod_names = get_names(syms)
tree = parseFile(file)
s = SymbolVisitor()
walk(tree, s)
# compare module-level symbols
names2 = s.scopes[tree].get_names()
if not list_eq(mod_names, names2):
print
print "oops", file
print sorted(mod_names)
print sorted(names2)
sys.exit(-1)
d = {}
d.update(s.scopes)
del d[tree]
scopes = d.values()
del d
for s in syms.get_symbols():
if s.is_namespace():
l = [sc for sc in scopes
if sc.name == s.get_name()]
if len(l) > 1:
print "skipping", s.get_name()
else:
if not list_eq(get_names(s.get_namespace()),
l[0].get_names()):
print s.get_name()
print sorted(get_names(s.get_namespace()))
print sorted(l[0].get_names())
sys.exit(-1)
|
ktan2020/legacy-automation
|
win/Lib/compiler/symbols.py
|
Python
|
mit
| 14,949
|
[
"VisIt"
] |
ec5146c044ebbd07b2f7b83fd95b6bd9487cf5d419468a71aa93e8af89ecbcda
|
# -*- coding: utf-8 -*-
""" Laplacian segmentation """
# Code source: Brian McFee
# License: ISC
from collections import defaultdict
import numpy as np
import scipy
import sklearn.cluster
import librosa
def embed_beats(A_rep, A_loc, config):
R = librosa.segment.recurrence_matrix(A_rep, width=config["rec_width"],
mode='affinity',
metric='cosine',
sym=True)
# Enhance diagonals with a median filter (Equation 2)
df = librosa.segment.timelag_filter(scipy.ndimage.median_filter)
Rf = df(R, size=(1, config["rec_smooth"]))
path_distance = np.sum(np.diff(A_loc, axis=1)**2, axis=0)
sigma = np.median(path_distance)
path_sim = np.exp(-path_distance / sigma)
R_path = np.diag(path_sim, k=1) + np.diag(path_sim, k=-1)
##########################################################
# And compute the balanced combination (Equations 6, 7, 9)
deg_path = np.sum(R_path, axis=1)
deg_rec = np.sum(Rf, axis=1)
mu = deg_path.dot(deg_path + deg_rec) / np.sum((deg_path + deg_rec)**2)
A = mu * Rf + (1 - mu) * R_path
#####################################################
# Now let's compute the normalized Laplacian (Eq. 10)
L = scipy.sparse.csgraph.laplacian(A, normed=True)
# and its spectral decomposition
evals, evecs = scipy.linalg.eigh(L)
# We can clean this up further with a median filter.
# This can help smooth over small discontinuities
evecs = scipy.ndimage.median_filter(evecs, size=(config["evec_smooth"], 1))
return evecs
def cluster(evecs, Cnorm, k, in_bound_idxs=None):
X = evecs[:, :k] / (Cnorm[:, k - 1:k] + 1e-5)
KM = sklearn.cluster.KMeans(n_clusters=k, n_init=50, max_iter=500)
seg_ids = KM.fit_predict(X)
###############################################################
# Locate segment boundaries from the label sequence
if in_bound_idxs is None:
bound_beats = 1 + np.flatnonzero(seg_ids[:-1] != seg_ids[1:])
# Count beats 0 as a boundary
bound_idxs = librosa.util.fix_frames(bound_beats, x_min=0)
else:
bound_idxs = in_bound_idxs
# Compute the segment label for each boundary
bound_segs = list(seg_ids[bound_idxs])
# Tack on the end-time
bound_idxs = list(np.append(bound_idxs, len(Cnorm) - 1))
return bound_idxs, bound_segs
def _reindex_labels(ref_int, ref_lab, est_int, est_lab):
# for each estimated label
# find the reference label that is maximally overlaps with
score_map = defaultdict(lambda: 0)
for r_int, r_lab in zip(ref_int, ref_lab):
for e_int, e_lab in zip(est_int, est_lab):
score_map[(e_lab, r_lab)] += max(0, min(e_int[1], r_int[1]) -
max(e_int[0], r_int[0]))
r_taken = set()
e_map = dict()
hits = [(score_map[k], k) for k in score_map]
hits = sorted(hits, reverse=True)
while hits:
cand_v, (e_lab, r_lab) = hits.pop(0)
if r_lab in r_taken or e_lab in e_map:
continue
e_map[e_lab] = r_lab
r_taken.add(r_lab)
# Anything left over is unused
unused = set(est_lab) - set(ref_lab)
for e, u in zip(set(est_lab) - set(e_map.keys()), unused):
e_map[e] = u
return [e_map[e] for e in est_lab]
def reindex(hierarchy):
new_hier = [hierarchy[0]]
for i in range(1, len(hierarchy)):
ints, labs = hierarchy[i]
labs = _reindex_labels(new_hier[i - 1][0], new_hier[i - 1][1], ints, labs)
new_hier.append((ints, labs))
return new_hier
def do_segmentation(C, M, config, in_bound_idxs=None):
embedding = embed_beats(C, M, config)
Cnorm = np.cumsum(embedding ** 2, axis=1) ** 0.5
if config["hier"]:
est_idxs = []
est_labels = []
for k in range(1, config["num_layers"] + 1):
est_idx, est_label = cluster(embedding, Cnorm, k)
est_idxs.append(est_idx)
est_labels.append(np.asarray(est_label, dtype=np.int))
else:
est_idxs, est_labels = cluster(embedding, Cnorm, config["scluster_k"], in_bound_idxs)
est_labels = np.asarray(est_labels, dtype=np.int)
return est_idxs, est_labels, Cnorm
|
urinieto/msaf
|
msaf/algorithms/scluster/main2.py
|
Python
|
mit
| 4,321
|
[
"Brian"
] |
4e5285e5a4accc4db16fdff101bdfc988862ae538d203a5bca9ba903b7eab43a
|
from unittest import mock
import numpy as np
import pytest
import hyperspy.api as hs
from hyperspy.misc.utils import slugify
from hyperspy.decorators import lazifyTestClass
from hyperspy.misc.test_utils import ignore_warning
RTOL = 1E-6
class TestModelJacobians:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros(1))
m = s.create_model()
self.low_loss = 7.
self.weights = 0.3
m.axis.axis = np.array([1, 0])
m.channel_switches = np.array([0, 1], dtype=bool)
m.append(hs.model.components1D.Gaussian())
m[0].A.value = 1
m[0].centre.value = 2.
m[0].sigma.twin = m[0].centre
m._low_loss = mock.MagicMock()
m.low_loss.return_value = self.low_loss
self.model = m
m.convolution_axis = np.zeros(2)
def test_jacobian_not_convolved(self):
m = self.model
m.convolved = False
jac = m._jacobian((1, 2, 3), None, weights=self.weights)
np.testing.assert_array_almost_equal(jac.squeeze(), self.weights *
np.array([m[0].A.grad(0),
m[0].sigma.grad(0) +
m[0].centre.grad(0)]))
assert m[0].A.value == 1
assert m[0].centre.value == 2
assert m[0].sigma.value == 2
def test_jacobian_convolved(self):
m = self.model
m.convolved = True
m.append(hs.model.components1D.Gaussian())
m[0].convolved = False
m[1].convolved = True
jac = m._jacobian((1, 2, 3, 4, 5), None, weights=self.weights)
np.testing.assert_array_almost_equal(jac.squeeze(), self.weights *
np.array([m[0].A.grad(0),
m[0].sigma.grad(0) +
m[0].centre.grad(0),
m[1].A.grad(0) *
self.low_loss,
m[1].centre.grad(0) *
self.low_loss,
m[1].sigma.grad(0) *
self.low_loss,
]))
assert m[0].A.value == 1
assert m[0].centre.value == 2
assert m[0].sigma.value == 2
assert m[1].A.value == 3
assert m[1].centre.value == 4
assert m[1].sigma.value == 5
class TestModelCallMethod:
def setup_method(self, method):
s = hs.signals.Signal1D(np.empty(1))
m = s.create_model()
m.append(hs.model.components1D.Gaussian())
m.append(hs.model.components1D.Gaussian())
self.model = m
def test_call_method_no_convolutions(self):
m = self.model
m.convolved = False
m[1].active = False
r1 = m()
r2 = m(onlyactive=True)
np.testing.assert_allclose(m[0].function(0) * 2, r1)
np.testing.assert_allclose(m[0].function(0), r2)
m.convolved = True
r1 = m(non_convolved=True)
r2 = m(non_convolved=True, onlyactive=True)
np.testing.assert_allclose(m[0].function(0) * 2, r1)
np.testing.assert_allclose(m[0].function(0), r2)
def test_call_method_with_convolutions(self):
m = self.model
m._low_loss = mock.MagicMock()
m.low_loss.return_value = 0.3
m.convolved = True
m.append(hs.model.components1D.Gaussian())
m[1].active = False
m[0].convolved = True
m[1].convolved = False
m[2].convolved = False
m.convolution_axis = np.array([0., ])
r1 = m()
r2 = m(onlyactive=True)
np.testing.assert_allclose(m[0].function(0) * 2.3, r1)
np.testing.assert_allclose(m[0].function(0) * 1.3, r2)
def test_call_method_binned(self):
m = self.model
m.convolved = False
m.remove(1)
m.signal.metadata.Signal.binned = True
m.signal.axes_manager[-1].scale = 0.3
r1 = m()
np.testing.assert_allclose(m[0].function(0) * 0.3, r1)
class TestModelPlotCall:
def setup_method(self, method):
s = hs.signals.Signal1D(np.empty(1))
m = s.create_model()
m.__call__ = mock.MagicMock()
m.__call__.return_value = np.array([0.5, 0.25])
m.axis = mock.MagicMock()
m.fetch_stored_values = mock.MagicMock()
m.channel_switches = np.array([0, 1, 1, 0, 0], dtype=bool)
self.model = m
def test_model2plot_own_am(self):
m = self.model
m.axis.axis.shape = (5,)
res = m._model2plot(m.axes_manager)
np.testing.assert_array_equal(
res, np.array([np.nan, 0.5, 0.25, np.nan, np.nan]))
assert m.__call__.called
assert (
m.__call__.call_args[1] == {
'non_convolved': False, 'onlyactive': True})
assert not m.fetch_stored_values.called
def test_model2plot_other_am(self):
m = self.model
res = m._model2plot(m.axes_manager.deepcopy(), out_of_range2nans=False)
np.testing.assert_array_equal(res, np.array([0.5, 0.25]))
assert m.__call__.called
assert (
m.__call__.call_args[1] == {
'non_convolved': False, 'onlyactive': True})
assert 2 == m.fetch_stored_values.call_count
class TestModelSettingPZero:
def setup_method(self, method):
s = hs.signals.Signal1D(np.empty(1))
m = s.create_model()
m.append(hs.model.components1D.Gaussian())
m[0].A.value = 1.1
m[0].centre._number_of_elements = 2
m[0].centre.value = (2.2, 3.3)
m[0].sigma.value = 4.4
m[0].sigma.free = False
m[0].A._bounds = (0.1, 0.11)
m[0].centre._bounds = ((0.2, 0.21), (0.3, 0.31))
m[0].sigma._bounds = (0.4, 0.41)
self.model = m
def test_setting_p0(self):
m = self.model
m.append(hs.model.components1D.Gaussian())
m[-1].active = False
m.p0 = None
m._set_p0()
assert m.p0 == (1.1, 2.2, 3.3)
def test_fetching_from_p0(self):
m = self.model
m.append(hs.model.components1D.Gaussian())
m[-1].active = False
m[-1].A.value = 100
m[-1].sigma.value = 200
m[-1].centre.value = 300
m.p0 = (1.2, 2.3, 3.4, 5.6, 6.7, 7.8)
m._fetch_values_from_p0()
assert m[0].A.value == 1.2
assert m[0].centre.value == (2.3, 3.4)
assert m[0].sigma.value == 4.4
assert m[1].A.value == 100
assert m[1].sigma.value == 200
assert m[1].centre.value == 300
def test_setting_boundaries(self):
m = self.model
m.append(hs.model.components1D.Gaussian())
m[-1].active = False
m.set_boundaries()
assert (m.free_parameters_boundaries ==
[(0.1, 0.11), (0.2, 0.21), (0.3, 0.31)])
def test_setting_mpfit_parameters_info(self):
m = self.model
m[0].A.bmax = None
m[0].centre.bmin = None
m[0].centre.bmax = 0.31
m.append(hs.model.components1D.Gaussian())
m[-1].active = False
m.set_mpfit_parameters_info()
assert (m.mpfit_parinfo ==
[{'limited': [True, False],
'limits': [0.1, 0]},
{'limited': [False, True],
'limits': [0, 0.31]},
{'limited': [False, True],
'limits': [0, 0.31]},
])
class TestModel1D:
def setup_method(self, method):
s = hs.signals.Signal1D(np.empty(1))
m = s.create_model()
self.model = m
def test_errfunc(self):
m = self.model
m._model_function = mock.MagicMock()
m._model_function.return_value = 3.
np.testing.assert_equal(m._errfunc(None, 1., None), 2.)
np.testing.assert_equal(m._errfunc(None, 1., 0.3), 0.6)
def test_errfunc2(self):
m = self.model
m._model_function = mock.MagicMock()
m._model_function.return_value = 3. * np.ones(2)
np.testing.assert_equal(m._errfunc2(None, np.ones(2), None), 2 * 4.)
np.testing.assert_equal(m._errfunc2(None, np.ones(2), 0.3), 2 * 0.36)
def test_gradient_ls(self):
m = self.model
m._errfunc = mock.MagicMock()
m._errfunc.return_value = 0.1
m._jacobian = mock.MagicMock()
m._jacobian.return_value = np.ones((1, 2)) * 7.
np.testing.assert_equal(m._gradient_ls(None, None), 2 * 0.1 * 7 * 2)
def test_gradient_ml(self):
m = self.model
m._model_function = mock.MagicMock()
m._model_function.return_value = 3. * np.ones(2)
m._jacobian = mock.MagicMock()
m._jacobian.return_value = np.ones((1, 2)) * 7.
np.testing.assert_equal(
m._gradient_ml(None, 1.2), -2 * 7 * (1.2 / 3 - 1))
def test_model_function(self):
m = self.model
m.append(hs.model.components1D.Gaussian())
m[0].A.value = 1.3
m[0].centre.value = 0.003
m[0].sigma.value = 0.1
param = (100, 0.1, 0.2)
np.testing.assert_array_almost_equal(176.03266338,
m._model_function(param))
assert m[0].A.value == 100
assert m[0].centre.value == 0.1
assert m[0].sigma.value == 0.2
def test_append_existing_component(self):
g = hs.model.components1D.Gaussian()
m = self.model
m.append(g)
with pytest.raises(ValueError):
m.append(g)
def test_append_component(self):
g = hs.model.components1D.Gaussian()
m = self.model
m.append(g)
assert g in m
assert g.model is m
assert g._axes_manager is m.axes_manager
assert all([hasattr(p, 'map') for p in g.parameters])
def test_calculating_convolution_axis(self):
m = self.model
# setup
m.axis.offset = 10
m.axis.size = 10
ll_axis = mock.MagicMock()
ll_axis.size = 7
ll_axis.value2index.return_value = 3
m._low_loss = mock.MagicMock()
m.low_loss.axes_manager.signal_axes = [ll_axis, ]
# calculation
m.set_convolution_axis()
# tests
np.testing.assert_array_equal(m.convolution_axis, np.arange(7, 23))
np.testing.assert_equal(ll_axis.value2index.call_args[0][0], 0)
def test_access_component_by_name(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
g2 = hs.model.components1D.Gaussian()
g2.name = "test"
m.extend((g1, g2))
assert m["test"] is g2
def test_access_component_by_index(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
g2 = hs.model.components1D.Gaussian()
g2.name = "test"
m.extend((g1, g2))
assert m[1] is g2
def test_component_name_when_append(self):
m = self.model
gs = [
hs.model.components1D.Gaussian(),
hs.model.components1D.Gaussian(),
hs.model.components1D.Gaussian()]
m.extend(gs)
assert m['Gaussian'] is gs[0]
assert m['Gaussian_0'] is gs[1]
assert m['Gaussian_1'] is gs[2]
def test_several_component_with_same_name(self):
m = self.model
gs = [
hs.model.components1D.Gaussian(),
hs.model.components1D.Gaussian(),
hs.model.components1D.Gaussian()]
m.extend(gs)
m[0]._name = "hs.model.components1D.Gaussian"
m[1]._name = "hs.model.components1D.Gaussian"
m[2]._name = "hs.model.components1D.Gaussian"
with pytest.raises(ValueError):
m['Gaussian']
def test_no_component_with_that_name(self):
m = self.model
with pytest.raises(ValueError):
m['Voigt']
def test_component_already_in_model(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
with pytest.raises(ValueError):
m.extend((g1, g1))
def test_remove_component(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
m.remove(g1)
assert len(m) == 0
def test_remove_component_by_index(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
m.remove(0)
assert len(m) == 0
def test_remove_component_by_name(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
m.remove(g1.name)
assert len(m) == 0
def test_delete_component_by_index(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
del m[0]
assert g1 not in m
def test_delete_component_by_name(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
del m[g1.name]
assert g1 not in m
def test_delete_slice(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
g2 = hs.model.components1D.Gaussian()
g3 = hs.model.components1D.Gaussian()
g3.A.twin = g1.A
g1.sigma.twin = g2.sigma
m.extend([g1, g2, g3])
del m[:2]
assert g1 not in m
assert g2 not in m
assert g3 in m
assert not g1.sigma.twin
assert not g1.A._twins
def test_get_component_by_name(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
g2 = hs.model.components1D.Gaussian()
g2.name = "test"
m.extend((g1, g2))
assert m._get_component("test") is g2
def test_get_component_by_index(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
g2 = hs.model.components1D.Gaussian()
g2.name = "test"
m.extend((g1, g2))
assert m._get_component(1) is g2
def test_get_component_by_component(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
g2 = hs.model.components1D.Gaussian()
g2.name = "test"
m.extend((g1, g2))
assert m._get_component(g2) is g2
def test_get_component_wrong(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
g2 = hs.model.components1D.Gaussian()
g2.name = "test"
m.extend((g1, g2))
with pytest.raises(ValueError):
m._get_component(1.2)
def test_components_class_default(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
assert getattr(m.components, g1.name) is g1
def test_components_class_change_name(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
g1.name = "test"
assert getattr(m.components, g1.name) is g1
def test_components_class_change_name_del_default(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
g1.name = "test"
with pytest.raises(AttributeError):
getattr(m.components, "Gaussian")
def test_components_class_change_invalid_name(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
g1.name = "1, Test This!"
assert (
getattr(m.components,
slugify(g1.name, valid_variable_name=True)) is g1)
def test_components_class_change_name_del_default2(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
invalid_name = "1, Test This!"
g1.name = invalid_name
g1.name = "test"
with pytest.raises(AttributeError):
getattr(m.components, slugify(invalid_name))
def test_snap_parameter_bounds(self):
m = self.model
g1 = hs.model.components1D.Gaussian()
m.append(g1)
g2 = hs.model.components1D.Gaussian()
m.append(g2)
g3 = hs.model.components1D.Gaussian()
m.append(g3)
g4 = hs.model.components1D.Gaussian()
m.append(g4)
p = hs.model.components1D.Polynomial(3, legacy=False)
m.append(p)
g1.A.value = 3.
g1.centre.bmin = 300.
g1.centre.value = 1.
g1.sigma.bmax = 15.
g1.sigma.value = 30
g2.A.value = 1
g2.A.bmin = 0.
g2.A.bmax = 3.
g2.centre.value = 0
g2.centre.bmin = 1
g2.centre.bmax = 3.
g2.sigma.value = 4
g2.sigma.bmin = 1
g2.sigma.bmax = 3.
g3.A.bmin = 0
g3.A.value = -3
g3.A.free = False
g3.centre.value = 15
g3.centre.bmax = 10
g3.centre.free = False
g3.sigma.value = 1
g3.sigma.bmin = 0
g3.sigma.bmax = 0
g4.active = False
g4.A.value = 300
g4.A.bmin = 500
g4.centre.value = 0
g4.centre.bmax = -1
g4.sigma.value = 1
g4.sigma.bmin = 10
p.a0.value = 1
p.a1.value = 2
p.a2.value = 3
p.a3.value = 4
p.a0.bmin = 2
p.a1.bmin = 2
p.a2.bmin = 2
p.a3.bmin = 2
p.a0.bmax = 3
p.a1.bmax = 3
p.a2.bmax = 3
p.a3.bmax = 3
m.ensure_parameters_in_bounds()
np.testing.assert_allclose(g1.A.value, 3.)
np.testing.assert_allclose(g2.A.value, 1.)
np.testing.assert_allclose(g3.A.value, -3.)
np.testing.assert_allclose(g4.A.value, 300.)
np.testing.assert_allclose(g1.centre.value, 300.)
np.testing.assert_allclose(g2.centre.value, 1.)
np.testing.assert_allclose(g3.centre.value, 15.)
np.testing.assert_allclose(g4.centre.value, 0)
np.testing.assert_allclose(g1.sigma.value, 15.)
np.testing.assert_allclose(g2.sigma.value, 3.)
np.testing.assert_allclose(g3.sigma.value, 0.)
np.testing.assert_allclose(g4.sigma.value, 1)
np.testing.assert_almost_equal(p.a0.value, 2)
np.testing.assert_almost_equal(p.a1.value, 2)
np.testing.assert_almost_equal(p.a2.value, 3)
np.testing.assert_almost_equal(p.a3.value, 3)
class TestModel2D:
def setup_method(self, method):
g = hs.model.components2D.Gaussian2D(
centre_x=-5.,
centre_y=-5.,
sigma_x=1.,
sigma_y=2.)
x = np.arange(-10, 10, 0.01)
y = np.arange(-10, 10, 0.01)
X, Y = np.meshgrid(x, y)
im = hs.signals.Signal2D(g.function(X, Y))
im.axes_manager[0].scale = 0.01
im.axes_manager[0].offset = -10
im.axes_manager[1].scale = 0.01
im.axes_manager[1].offset = -10
self.im = im
def test_fitting(self):
im = self.im
m = im.create_model()
gt = hs.model.components2D.Gaussian2D(centre_x=-4.5,
centre_y=-4.5,
sigma_x=0.5,
sigma_y=1.5)
m.append(gt)
m.fit()
np.testing.assert_allclose(gt.centre_x.value, -5.)
np.testing.assert_allclose(gt.centre_y.value, -5.)
np.testing.assert_allclose(gt.sigma_x.value, 1.)
np.testing.assert_allclose(gt.sigma_y.value, 2.)
class TestModelPrintCurrentValues:
def setup_method(self, method):
np.random.seed(1)
s = hs.signals.Signal1D(np.arange(10, 100, 0.1))
s.axes_manager[0].scale = 0.1
s.axes_manager[0].offset = 10
m = s.create_model()
with ignore_warning(message="The API of the `Polynomial` component"):
m.append(hs.model.components1D.Polynomial(1))
m.append(hs.model.components1D.Offset())
self.s = s
self.m = m
@pytest.mark.parametrize("only_free", [True, False])
@pytest.mark.parametrize("skip_multi", [True, False])
def test_print_current_values(self, only_free, skip_multi):
self.m.print_current_values(only_free, skip_multi)
def test_print_current_values_component_list(self):
self.m.print_current_values(component_list=list(self.m))
@lazifyTestClass
class TestModelFitBinned:
def setup_method(self, method):
np.random.seed(1)
s = hs.signals.Signal1D(
np.random.normal(
scale=2,
size=10000)).get_histogram()
s.metadata.Signal.binned = True
g = hs.model.components1D.Gaussian()
m = s.create_model()
m.append(g)
g.sigma.value = 1
g.centre.value = 0.5
g.A.value = 1e3
self.m = m
def test_fit_neldermead_leastsq(self):
self.m.fit(fitter="Nelder-Mead", method="ls")
np.testing.assert_allclose(self.m[0].A.value, 9976.14519369)
np.testing.assert_allclose(self.m[0].centre.value, -0.110610743285)
np.testing.assert_allclose(self.m[0].sigma.value, 1.98380705455)
def test_fit_neldermead_ml(self):
self.m.fit(fitter="Nelder-Mead", method="ml")
np.testing.assert_allclose(self.m[0].A.value, 10001.39613936,
atol=1E-3)
np.testing.assert_allclose(self.m[0].centre.value, -0.104151206314,
atol=1E-6)
np.testing.assert_allclose(self.m[0].sigma.value, 2.00053642434)
def test_fit_leastsq(self):
self.m.fit(fitter="leastsq")
np.testing.assert_allclose(self.m[0].A.value, 9976.14526082, RTOL)
np.testing.assert_allclose(
self.m[0].centre.value, -0.110610727064, RTOL)
np.testing.assert_allclose(self.m[0].sigma.value, 1.98380707571, RTOL)
def test_fit_mpfit(self):
self.m.fit(fitter="mpfit")
np.testing.assert_allclose(self.m[0].A.value, 9976.14526286)
np.testing.assert_allclose(self.m[0].centre.value, -0.110610718444,
atol=1E-6)
np.testing.assert_allclose(self.m[0].sigma.value, 1.98380707614,
atol=1E-6)
def test_fit_odr(self):
self.m.fit(fitter="odr")
np.testing.assert_allclose(self.m[0].A.value, 9976.14531979)
np.testing.assert_allclose(self.m[0].centre.value, -0.110610724054,
atol=1e-7)
np.testing.assert_allclose(self.m[0].sigma.value, 1.98380709939)
def test_fit_leastsq_grad(self):
self.m.fit(fitter="leastsq", grad=True)
np.testing.assert_allclose(self.m[0].A.value, 9976.14526084)
np.testing.assert_allclose(self.m[0].centre.value, -0.11061073306)
np.testing.assert_allclose(self.m[0].sigma.value, 1.98380707552)
def test_fit_mpfit_grad(self):
self.m.fit(fitter="mpfit", grad=True)
np.testing.assert_allclose(self.m[0].A.value, 9976.14526084)
np.testing.assert_allclose(self.m[0].centre.value, -0.11061073306)
np.testing.assert_allclose(self.m[0].sigma.value, 1.98380707552)
def test_fit_odr_grad(self):
self.m.fit(fitter="odr", grad=True)
np.testing.assert_allclose(self.m[0].A.value, 9976.14531979)
np.testing.assert_allclose(self.m[0].centre.value, -0.110610724054,
atol=1e-7)
np.testing.assert_allclose(self.m[0].sigma.value, 1.98380709939)
def test_fit_bounded_mpfit(self):
self.m[0].centre.bmin = 0.5
# self.m[0].bounded = True
self.m.fit(fitter="mpfit", bounded=True)
np.testing.assert_allclose(self.m[0].A.value, 9991.65422046)
np.testing.assert_allclose(self.m[0].centre.value, 0.5)
np.testing.assert_allclose(self.m[0].sigma.value, 2.08398236966)
def test_fit_bounded_leastsq(self):
pytest.importorskip("scipy", minversion="0.17")
self.m[0].centre.bmin = 0.5
# self.m[0].bounded = True
self.m.fit(fitter="leastsq", bounded=True)
np.testing.assert_allclose(self.m[0].A.value, 9991.65422046)
np.testing.assert_allclose(self.m[0].centre.value, 0.5)
np.testing.assert_allclose(self.m[0].sigma.value, 2.08398236966, RTOL)
def test_fit_bounded_lbfgs(self):
self.m[0].centre.bmin = 0.5
# self.m[0].bounded = True
self.m.fit(fitter="L-BFGS-B", bounded=True, grad=True)
np.testing.assert_allclose(self.m[0].A.value, 9991.65422046)
np.testing.assert_allclose(self.m[0].centre.value, 0.5)
np.testing.assert_allclose(self.m[0].sigma.value, 2.08398236966)
def test_fit_bounded_bad_starting_values_mpfit(self):
self.m[0].centre.bmin = 0.5
self.m[0].centre.value = -1
# self.m[0].bounded = True
self.m.fit(fitter="mpfit", bounded=True)
np.testing.assert_allclose(self.m[0].A.value, 9991.65422046)
np.testing.assert_allclose(self.m[0].centre.value, 0.5)
np.testing.assert_allclose(self.m[0].sigma.value, 2.08398236966)
def test_fit_bounded_bad_starting_values_leastsq(self):
self.m[0].centre.bmin = 0.5
self.m[0].centre.value = -1
# self.m[0].bounded = True
self.m.fit(fitter="leastsq", bounded=True)
np.testing.assert_allclose(self.m[0].A.value, 9991.65422046)
np.testing.assert_allclose(self.m[0].centre.value, 0.5)
np.testing.assert_allclose(self.m[0].sigma.value, 2.08398236966, RTOL)
def test_fit_bounded_bad_starting_values_lbfgs(self):
self.m[0].centre.bmin = 0.5
self.m[0].centre.value = -1
# self.m[0].bounded = True
self.m.fit(fitter="L-BFGS-B", bounded=True, grad=True)
np.testing.assert_allclose(self.m[0].A.value, 9991.65422046)
np.testing.assert_allclose(self.m[0].centre.value, 0.5)
np.testing.assert_allclose(self.m[0].sigma.value, 2.08398236966)
def test_wrong_method(self):
with pytest.raises(ValueError):
self.m.fit(method="dummy")
@lazifyTestClass
class TestModelWeighted:
def setup_method(self, method):
np.random.seed(1)
s = hs.signals.Signal1D(np.arange(10, 100, 0.1))
s.metadata.set_item("Signal.Noise_properties.variance",
hs.signals.Signal1D(np.arange(10, 100, 0.01)))
s.axes_manager[0].scale = 0.1
s.axes_manager[0].offset = 10
s.add_poissonian_noise()
m = s.create_model()
m.append(hs.model.components1D.Polynomial(1, legacy=False))
self.m = m
def test_fit_leastsq_binned(self):
self.m.signal.metadata.Signal.binned = True
self.m.fit(fitter="leastsq", method="ls")
for result, expected in zip([self.m[0].a1.value, self.m[0].a0.value],
(9.9165596693502778, 1.6628238107916631)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_fit_odr_binned(self):
self.m.signal.metadata.Signal.binned = True
self.m.fit(fitter="odr", method="ls")
for result, expected in zip([self.m[0].a1.value, self.m[0].a0.value],
(9.9165596548961972, 1.6628247412317521)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_fit_mpfit_binned(self):
self.m.signal.metadata.Signal.binned = True
self.m.fit(fitter="mpfit", method="ls")
for result, expected in zip([self.m[0].a1.value, self.m[0].a0.value],
(9.9165596607108739, 1.6628243846485873)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_fit_neldermead_binned(self):
self.m.signal.metadata.Signal.binned = True
self.m.fit(
fitter="Nelder-Mead",
method="ls",
)
for result, expected in zip([self.m[0].a1.value, self.m[0].a0.value],
(9.9137288425667442, 1.8446013472266145)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_fit_leastsq_unbinned(self):
self.m.signal.metadata.Signal.binned = False
self.m.fit(fitter="leastsq", method="ls")
for result, expected in zip(
[self.m[0].a1.value, self.m[0].a0.value],
(0.99165596391487121, 0.16628254242532492)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_fit_odr_unbinned(self):
self.m.signal.metadata.Signal.binned = False
self.m.fit(fitter="odr", method="ls")
for result, expected in zip(
[self.m[0].a1.value, self.m[0].a0.value],
(0.99165596548961943, 0.16628247412317315)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_fit_mpfit_unbinned(self):
self.m.signal.metadata.Signal.binned = False
self.m.fit(fitter="mpfit", method="ls")
for result, expected in zip(
[self.m[0].a1.value, self.m[0].a0.value],
(0.99165596295068958, 0.16628257462820528)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_fit_neldermead_unbinned(self):
self.m.signal.metadata.Signal.binned = False
self.m.fit(
fitter="Nelder-Mead",
method="ls",
)
for result, expected in zip(
[self.m[0].a1.value, self.m[0].a0.value],
(0.99136169230026261, 0.18483060534056939)):
np.testing.assert_allclose(result, expected, atol=1E-5)
def test_chisq(self):
self.m.signal.metadata.Signal.binned = True
self.m.fit(fitter="leastsq", method="ls")
np.testing.assert_allclose(self.m.chisq.data, 3029.16949561)
def test_red_chisq(self):
self.m.fit(fitter="leastsq", method="ls")
np.testing.assert_allclose(self.m.red_chisq.data, 3.37700055)
class TestModelScalarVariance:
def setup_method(self, method):
s = hs.signals.Signal1D(np.ones(100))
m = s.create_model()
m.append(hs.model.components1D.Offset())
self.s = s
self.m = m
def test_std1_chisq(self):
std = 1
np.random.seed(1)
self.s.add_gaussian_noise(std)
self.s.metadata.set_item("Signal.Noise_properties.variance", std ** 2)
self.m.fit(fitter="leastsq", method="ls")
np.testing.assert_allclose(self.m.chisq.data, 78.35015229)
def test_std10_chisq(self):
std = 10
np.random.seed(1)
self.s.add_gaussian_noise(std)
self.s.metadata.set_item("Signal.Noise_properties.variance", std ** 2)
self.m.fit(fitter="leastsq", method="ls")
np.testing.assert_allclose(self.m.chisq.data, 78.35015229)
def test_std1_red_chisq(self):
std = 1
np.random.seed(1)
self.s.add_gaussian_noise(std)
self.s.metadata.set_item("Signal.Noise_properties.variance", std ** 2)
self.m.fit(fitter="leastsq", method="ls")
np.testing.assert_allclose(self.m.red_chisq.data, 0.79949135)
def test_std10_red_chisq(self):
std = 10
np.random.seed(1)
self.s.add_gaussian_noise(std)
self.s.metadata.set_item("Signal.Noise_properties.variance", std ** 2)
self.m.fit(fitter="leastsq", method="ls")
np.testing.assert_allclose(self.m.red_chisq.data, 0.79949135)
def test_std1_red_chisq_in_range(self):
std = 1
self.m.set_signal_range(10, 50)
np.random.seed(1)
self.s.add_gaussian_noise(std)
self.s.metadata.set_item("Signal.Noise_properties.variance", std ** 2)
self.m.fit(fitter="leastsq", method="ls")
np.testing.assert_allclose(self.m.red_chisq.data, 0.86206965)
@pytest.mark.filterwarnings("ignore:The API of the `Polynomial`")
@lazifyTestClass
class TestModelSignalVariance:
def setup_method(self, method):
variance = hs.signals.Signal1D(
np.arange(100, 300, dtype="float64").reshape((2, 100)))
s = variance.deepcopy()
np.random.seed(1)
std = 10
np.random.seed(1)
s.add_gaussian_noise(std)
np.random.seed(1)
s.add_poissonian_noise()
s.metadata.set_item("Signal.Noise_properties.variance",
variance + std ** 2)
m = s.create_model()
m.append(hs.model.components1D.Polynomial(order=1))
self.s = s
self.m = m
def test_std1_red_chisq(self):
self.m.multifit(fitter="leastsq", method="ls", show_progressbar=None)
np.testing.assert_allclose(self.m.red_chisq.data[0], 0.813109,
atol=1e-5)
np.testing.assert_allclose(self.m.red_chisq.data[1], 0.697727,
atol=1e-5)
@lazifyTestClass
class TestMultifit:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros((2, 200)))
s.axes_manager[-1].offset = 1
s.data[:] = 2 * s.axes_manager[-1].axis ** (-3)
m = s.create_model()
m.append(hs.model.components1D.PowerLaw())
m[0].A.value = 2
m[0].r.value = 2
m.store_current_values()
m.axes_manager.indices = (1,)
m[0].r.value = 100
m[0].A.value = 2
m.store_current_values()
m[0].A.free = False
self.m = m
m.axes_manager.indices = (0,)
m[0].A.value = 100
def test_fetch_only_fixed_false(self):
self.m.multifit(fetch_only_fixed=False, show_progressbar=None)
np.testing.assert_array_almost_equal(self.m[0].r.map['values'],
[3., 100.])
np.testing.assert_array_almost_equal(self.m[0].A.map['values'],
[2., 2.])
def test_fetch_only_fixed_true(self):
self.m.multifit(fetch_only_fixed=True, show_progressbar=None)
np.testing.assert_array_almost_equal(self.m[0].r.map['values'],
[3., 3.])
np.testing.assert_array_almost_equal(self.m[0].A.map['values'],
[2., 2.])
def test_parameter_as_signal_values(self):
# There are more as_signal tests in test_parameters.py
rs = self.m[0].r.as_signal(field="values")
np.testing.assert_allclose(rs.data, np.array([2., 100.]))
assert not "Signal.Noise_properties.variance" in rs.metadata
self.m.multifit(fetch_only_fixed=True, show_progressbar=None)
rs = self.m[0].r.as_signal(field="values")
assert "Signal.Noise_properties.variance" in rs.metadata
assert isinstance(rs.metadata.Signal.Noise_properties.variance,
hs.signals.Signal1D)
def test_bounded_snapping_mpfit(self):
m = self.m
m[0].A.free = True
m.signal.data *= 2.
m[0].A.value = 2.
m[0].A.bmin = 3.
m.multifit(fitter='mpfit', bounded=True, show_progressbar=None)
np.testing.assert_array_almost_equal(self.m[0].r.map['values'],
[3., 3.])
np.testing.assert_array_almost_equal(self.m[0].A.map['values'],
[4., 4.])
def test_bounded_snapping_leastsq(self):
m = self.m
m[0].A.free = True
m.signal.data *= 2.
m[0].A.value = 2.
m[0].A.bmin = 3.
m.multifit(fitter='leastsq', bounded=True, show_progressbar=None)
np.testing.assert_array_almost_equal(self.m[0].r.map['values'],
[3., 3.])
np.testing.assert_array_almost_equal(self.m[0].A.map['values'],
[4., 4.])
class TestStoreCurrentValues:
def setup_method(self, method):
self.m = hs.signals.Signal1D(np.arange(10)).create_model()
self.o = hs.model.components1D.Offset()
self.m.append(self.o)
def test_active(self):
self.o.offset.value = 2
self.o.offset.std = 3
self.m.store_current_values()
assert self.o.offset.map["values"][0] == 2
assert self.o.offset.map["is_set"][0] == True
def test_not_active(self):
self.o.active = False
self.o.offset.value = 2
self.o.offset.std = 3
self.m.store_current_values()
assert self.o.offset.map["values"][0] != 2
class TestSetCurrentValuesTo:
def setup_method(self, method):
self.m = hs.signals.Signal1D(
np.arange(10).reshape(2, 5)).create_model()
self.comps = [
hs.model.components1D.Offset(),
hs.model.components1D.Offset()]
self.m.extend(self.comps)
def test_set_all(self):
for c in self.comps:
c.offset.value = 2
self.m.assign_current_values_to_all()
assert (self.comps[0].offset.map["values"] == 2).all()
assert (self.comps[1].offset.map["values"] == 2).all()
def test_set_1(self):
self.comps[1].offset.value = 2
self.m.assign_current_values_to_all([self.comps[1]])
assert (self.comps[0].offset.map["values"] != 2).all()
assert (self.comps[1].offset.map["values"] == 2).all()
def test_fetch_values_from_arrays():
m = hs.signals.Signal1D(np.arange(10)).create_model()
gaus = hs.model.components1D.Gaussian(A=100, sigma=10, centre=3)
m.append(gaus)
values = np.array([1.2, 3.4, 5.6])
stds = values - 1
m.fetch_values_from_array(values, array_std=stds)
parameters = sorted(gaus.free_parameters, key=lambda x: x.name)
for v, s, p in zip(values, stds, parameters):
assert p.value == v
assert p.std == s
def sets_second_parameter_to_two(model, parameters, data, weights=None):
return np.abs(parameters[1] - 2)
class TestCustomOptimisation:
def setup_method(self, method):
s = hs.signals.Signal1D([1., 2, 3, 5, 7, 12, 8, 6, 3, 2, 2])
# data that should fit with A=49, centre=5.13, sigma=2.0
self.m = s.create_model()
self.m.append(hs.model.components1D.Gaussian())
def test_custom_function(self):
m = self.m
m.fit(method='custom', min_function=sets_second_parameter_to_two,
fitter='TNC')
assert m[0].centre.value == 2.
def test_no_function(self):
with pytest.raises(ValueError):
self.m.fit(method='custom')
def test_no_gradient(self):
with pytest.raises(ValueError):
self.m.fit(method='custom',
min_function=lambda *args: 1,
grad=True
)
def test_custom_gradient_function(self):
from unittest import mock
gradf = mock.Mock(return_value=[10, 1, 10])
self.m.fit(method='custom',
fitter='BFGS',
min_function=sets_second_parameter_to_two,
grad=True,
min_function_grad=gradf)
assert gradf.called
assert all([args[0] is self.m for args, kwargs in
gradf.call_args_list])
class TestAsSignal:
def setup_method(self, method):
self.m = hs.signals.Signal1D(
np.arange(20).reshape(2, 2, 5)).create_model()
self.comps = [
hs.model.components1D.Offset(),
hs.model.components1D.Offset()]
self.m.extend(self.comps)
for c in self.comps:
c.offset.value = 2
self.m.assign_current_values_to_all()
@pytest.mark.parallel
def test_threaded_identical(self):
# all components
s = self.m.as_signal(show_progressbar=False, parallel=True)
s1 = self.m.as_signal(show_progressbar=False, parallel=False)
np.testing.assert_allclose(s1.data, s.data)
# more complicated
self.m[0].active_is_multidimensional = True
self.m[0]._active_array[0] = False
for component in [0, 1]:
s = self.m.as_signal(component_list=[component],
show_progressbar=False, parallel=True)
s1 = self.m.as_signal(component_list=[component],
show_progressbar=False, parallel=False)
np.testing.assert_allclose(s1.data, s.data)
@pytest.mark.parametrize('parallel',
[pytest.param(True, marks=pytest.mark.parallel), False])
def test_all_components_simple(self, parallel):
s = self.m.as_signal(show_progressbar=False, parallel=parallel)
assert np.all(s.data == 4.)
@pytest.mark.parametrize('parallel',
[pytest.param(True, marks=pytest.mark.parallel), False])
def test_one_component_simple(self, parallel):
s = self.m.as_signal(component_list=[0], show_progressbar=False,
parallel=parallel)
assert np.all(s.data == 2.)
assert self.m[1].active
@pytest.mark.parametrize('parallel',
[pytest.param(True, marks=pytest.mark.parallel), False])
def test_all_components_multidim(self, parallel):
self.m[0].active_is_multidimensional = True
s = self.m.as_signal(show_progressbar=False, parallel=parallel)
assert np.all(s.data == 4.)
self.m[0]._active_array[0] = False
s = self.m.as_signal(show_progressbar=False, parallel=parallel)
np.testing.assert_array_equal(
s.data, np.array([np.ones((2, 5)) * 2, np.ones((2, 5)) * 4]))
assert self.m[0].active_is_multidimensional
@pytest.mark.parametrize('parallel',
[pytest.param(True, marks=pytest.mark.parallel), False])
def test_one_component_multidim(self, parallel):
self.m[0].active_is_multidimensional = True
s = self.m.as_signal(component_list=[0], show_progressbar=False,
parallel=parallel)
assert np.all(s.data == 2.)
assert self.m[1].active
assert not self.m[1].active_is_multidimensional
s = self.m.as_signal(component_list=[1], show_progressbar=False,
parallel=parallel)
np.testing.assert_equal(s.data, 2.)
assert self.m[0].active_is_multidimensional
self.m[0]._active_array[0] = False
s = self.m.as_signal(component_list=[1], show_progressbar=False,
parallel=parallel)
assert np.all(s.data == 2.)
s = self.m.as_signal(component_list=[0], show_progressbar=False,
parallel=parallel)
np.testing.assert_array_equal(s.data, np.array([np.zeros((2, 5)),
np.ones((2, 5)) * 2]))
@lazifyTestClass
class TestCreateModel:
def setup_method(self, method):
self.s = hs.signals.Signal1D(np.asarray([0, ]))
self.im = hs.signals.Signal2D(np.ones([1, 1, ]))
def test_create_model(self):
from hyperspy.models.model1d import Model1D
from hyperspy.models.model2d import Model2D
assert isinstance(self.s.create_model(), Model1D)
assert isinstance(self.im.create_model(), Model2D)
class TestAdjustPosition:
def setup_method(self, method):
self.s = hs.signals.Signal1D(np.random.rand(10, 10, 20))
self.m = self.s.create_model()
def test_enable_adjust_position(self):
self.m.append(hs.model.components1D.Gaussian())
self.m.enable_adjust_position()
assert len(self.m._position_widgets) == 1
# Check that both line and label was added
assert len(list(self.m._position_widgets.values())[0]) == 2
def test_disable_adjust_position(self):
self.m.append(hs.model.components1D.Gaussian())
self.m.enable_adjust_position()
self.m.disable_adjust_position()
assert len(self.m._position_widgets) == 0
def test_enable_all(self):
self.m.append(hs.model.components1D.Gaussian())
self.m.enable_adjust_position()
self.m.append(hs.model.components1D.Gaussian())
assert len(self.m._position_widgets) == 2
def test_enable_all_zero_start(self):
self.m.enable_adjust_position()
self.m.append(hs.model.components1D.Gaussian())
assert len(self.m._position_widgets) == 1
def test_manual_close(self):
self.m.append(hs.model.components1D.Gaussian())
self.m.append(hs.model.components1D.Gaussian())
self.m.enable_adjust_position()
list(self.m._position_widgets.values())[0][0].close()
assert len(self.m._position_widgets) == 2
assert len(list(self.m._position_widgets.values())[0]) == 1
list(self.m._position_widgets.values())[0][0].close()
assert len(self.m._position_widgets) == 1
assert len(list(self.m._position_widgets.values())[0]) == 2
self.m.disable_adjust_position()
assert len(self.m._position_widgets) == 0
def test_as_signal_parallel():
import numpy as np
import hyperspy.api as hs
np.random.seed(1)
s = hs.signals.Signal1D(np.random.random((50, 10)))
m = s.create_model()
m.append(hs.model.components1D.PowerLaw())
m.set_signal_range(2, 5)
m.multifit(show_progressbar=False)
s1 = m.as_signal(out_of_range_to_nan=True, parallel=True,
show_progressbar=False)
s2 = m.as_signal(out_of_range_to_nan=True, parallel=True,
show_progressbar=False)
np.testing.assert_allclose(s1, s2)
|
sem-geologist/hyperspy
|
hyperspy/tests/model/test_model.py
|
Python
|
gpl-3.0
| 45,608
|
[
"Gaussian"
] |
ff46a00e846313115e34c677e9adb06e812c10af555a8ea4b4c0c6d52361441b
|
import pytest
import numpy as np
import elfi
def simple_gaussian_model(true_param, seed, n_summaries=10):
"""The simple gaussian model that has been used as a toy example in the LFIRE paper."""
def power(x, y):
return x**y
m = elfi.ElfiModel()
mu = elfi.Prior('uniform', -5, 10, model=m, name='mu')
y = elfi.Simulator(gauss, *[mu], observed=gauss(true_param, seed=seed), name='y')
for i in range(n_summaries):
elfi.Summary(power, y, i, model=m, name=f'power_{i}')
return m
def gauss(mu, sigma=3, n_obs=1, batch_size=1, seed=None, *args, **kwargs):
if isinstance(seed, int):
np.random.seed(seed)
mu = np.asanyarray(mu).reshape((-1, 1))
sigma = np.asanyarray(sigma).reshape((-1, 1))
return np.random.normal(mu, sigma, size=(batch_size, n_obs))
@pytest.fixture
def true_param():
return 2.6
@pytest.fixture
def seed():
return 4
@pytest.fixture
def parameter_values():
return {'mu': 1.0}
@pytest.fixture
def bolfire_method(true_param, seed):
m = simple_gaussian_model(true_param, seed)
return elfi.BOLFIRE(m)
def test_generate_marginal(bolfire_method):
assert bolfire_method._generate_marginal().shape == (10, 10)
def test_generate_likelihood(bolfire_method, parameter_values):
assert bolfire_method._generate_likelihood(parameter_values).shape == (10, 10)
def test_generate_training_data(bolfire_method, parameter_values):
likelihood = bolfire_method._generate_likelihood(parameter_values)
X, y = bolfire_method._generate_training_data(likelihood, bolfire_method.marginal)
assert X.shape == (20, 10)
assert y.shape == (20,)
|
elfi-dev/elfi
|
tests/unit/test_bolfire_unit.py
|
Python
|
bsd-3-clause
| 1,654
|
[
"Gaussian"
] |
818258680d0567afd8ed4b790297b544a0f76f6cc570bec9dd464a587fbb1e65
|
#
# Copyright 2015 LinkedIn Corp. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
from com.ziclix.python.sql import zxJDBC
from wherehows.common import Constant
from org.slf4j import LoggerFactory
import sys, os, datetime
class CodeSearchLoad:
def __init__(self, args):
self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
username = args[Constant.WH_DB_USERNAME_KEY]
password = args[Constant.WH_DB_PASSWORD_KEY]
JDBC_DRIVER = args[Constant.WH_DB_DRIVER_KEY]
JDBC_URL = args[Constant.WH_DB_URL_KEY]
self.database_scm_repo_file = args[Constant.DATABASE_SCM_REPO_OUTPUT_KEY]
self.app_id = args[Constant.APP_ID_KEY]
self.wh_etl_exec_id = args[Constant.WH_EXEC_ID_KEY]
self.conn_mysql = zxJDBC.connect(JDBC_URL, username, password, JDBC_DRIVER)
self.conn_cursor = self.conn_mysql.cursor()
if Constant.INNODB_LOCK_WAIT_TIMEOUT in args:
lock_wait_time = args[Constant.INNODB_LOCK_WAIT_TIMEOUT]
self.conn_cursor.execute("SET innodb_lock_wait_timeout = %s;" % lock_wait_time)
self.logger.info("Load Code Search CSV into {}, app_id {}, wh_exec_id {}"
.format(JDBC_URL, self.app_id, self.wh_etl_exec_id))
def load_database_scm_repo(self):
load_database_scm_repos_cmd = '''
DELETE FROM stg_database_scm_map WHERE app_id = {app_id};
-- load into stg table
LOAD DATA LOCAL INFILE '{source_file}'
INTO TABLE stg_database_scm_map
FIELDS TERMINATED BY '\Z' ESCAPED BY '\0'
LINES TERMINATED BY '\n'
(`scm_url`, `database_name`, `database_type`, `app_name`, `filepath`, `committers`, `scm_type`)
'''.format(source_file=self.database_scm_repo_file, app_id=self.app_id)
self.executeCommands(load_database_scm_repos_cmd)
self.logger.info("finish loading SCM metadata.")
def merge_repo_owners_into_dataset_owners(self):
merge_repo_owners_into_dataset_owners_cmd = '''
UPDATE stg_database_scm_map stg
SET stg.app_id = {app_id};
UPDATE stg_database_scm_map stg
SET stg.wh_etl_exec_id = {wh_etl_exec_id};
-- find owner app_id, 300 for USER, 301 for GROUP
UPDATE stg_database_scm_map stg
JOIN (select app_id, user_id from dir_external_user_info) ldap
ON FIND_IN_SET(ldap.user_id,stg.committers)
SET stg.app_id = ldap.app_id;
UPDATE stg_database_scm_map stg
JOIN (select distinct app_id, group_id from dir_external_group_user_map) ldap
ON FIND_IN_SET(ldap.group_id,stg.committers)
SET stg.app_id = ldap.app_id;
-- INSERT/UPDATE into dataset_owner
INSERT INTO dataset_owner (
dataset_id, dataset_urn, owner_id, sort_id, namespace, app_id, owner_type, owner_sub_type, owner_id_type,
owner_source, db_ids, is_group, is_active, source_time, created_time, wh_etl_exec_id, confirmed_by, confirmed_on
)
SELECT * FROM (
SELECT ds.id, ds.urn, u.user_id n_owner_id, '0' n_sort_id,
'urn:li:corpuser' n_namespace, r.app_id,
'Owner' n_owner_type,
null n_owner_sub_type,
case when r.app_id = 300 then 'USER' when r.app_id = 301 then 'GROUP' else null end n_owner_id_type,
'SCM' n_owner_source, null db_ids,
IF(r.app_id = 301, 'Y', 'N') is_group,
'Y' is_active, 0 source_time, unix_timestamp(NOW()) created_time, r.wh_etl_exec_id,
'system' confirmed_by, unix_timestamp(NOW()) confirmed_on
FROM dict_dataset ds
JOIN stg_database_scm_map r
ON ds.urn LIKE concat(r.database_type, ':///', r.database_name,'/%')
JOIN dir_external_user_info u
ON FIND_IN_SET(u.user_id,r.committers)
) n
ON DUPLICATE KEY UPDATE
dataset_urn = n.urn,
sort_id = COALESCE(n.n_sort_id, sort_id),
owner_type = n.n_owner_type,
owner_sub_type = COALESCE(owner_sub_type, n.n_owner_sub_type),
owner_id_type = COALESCE(owner_id_type, n.n_owner_id_type),
owner_source = CASE WHEN owner_source is null THEN 'SCM'
WHEN owner_source LIKE '%SCM%' THEN owner_source ELSE CONCAT(owner_source, ',SCM') END,
namespace = COALESCE(namespace, n.n_namespace),
wh_etl_exec_id = n.wh_etl_exec_id,
modified_time = unix_timestamp(NOW()),
confirmed_by = 'system',
confirmed_on = unix_timestamp(NOW());
-- reset dataset owner sort id
UPDATE dataset_owner d
JOIN (
select dataset_urn, dataset_id, owner_type, owner_id, sort_id,
@owner_rank := IF(@current_dataset_id = dataset_id, @owner_rank + 1, 0) rank,
@current_dataset_id := dataset_id
from dataset_owner, (select @current_dataset_id := 0, @owner_rank := 0) t
where dataset_urn like 'espresso:///%' or dataset_urn like 'oracle:///%'
order by dataset_id asc, owner_type desc, sort_id asc, owner_id asc
) s
ON d.dataset_id = s.dataset_id AND d.owner_id = s.owner_id
SET d.sort_id = s.rank;
'''.format(app_id=self.app_id,wh_etl_exec_id = self.wh_etl_exec_id)
self.executeCommands(merge_repo_owners_into_dataset_owners_cmd)
self.logger.info("finish merging repo and dataset owners")
def executeCommands(self, commands):
for cmd in commands.split(";"):
self.logger.debug(cmd)
self.conn_cursor.execute(cmd)
self.conn_mysql.commit()
def run(self):
try:
begin = datetime.datetime.now().strftime("%H:%M:%S")
self.load_database_scm_repo()
self.merge_repo_owners_into_dataset_owners()
end = datetime.datetime.now().strftime("%H:%M:%S")
self.logger.info("Load Code Search metadata [%s -> %s]" % (str(begin), str(end)))
finally:
self.conn_cursor.close()
self.conn_mysql.close()
if __name__ == "__main__":
args = sys.argv[1]
l = CodeSearchLoad(args)
l.run()
|
thomas-young-2013/wherehowsX
|
metadata-etl/src/main/resources/jython/CodeSearchLoad.py
|
Python
|
apache-2.0
| 6,633
|
[
"ESPResSo"
] |
6c8ec7225d8118f370a3308653fbf3242f7d24c12ddc3de5426e8a0e51b96eed
|
# -*- coding: utf-8 -*-
#
# -----------------------------------------------------------------------------------
# Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----------------------------------------------------------------------------------
import sys
sys.path.append("..")
from hackathon import (
RequiredFeature,
Component,
Context,
)
from hackathon.database.models import (
Experiment,
DockerContainer,
HackathonAzureKey,
PortBinding,
DockerHostServer,
)
from hackathon.constants import (
EStatus,
PortBindingType,
VEStatus,
HEALTH,
)
from compiler.ast import (
flatten,
)
from threading import (
Lock,
)
from hackathon.template.docker_template_unit import (
DockerTemplateUnit,
)
from hackathon.azureformation.endpoint import (
Endpoint
)
from docker_formation_base import (
DockerFormationBase,
)
from hackathon.azureformation.service import (
Service,
)
from hackathon.hackathon_response import (
internal_server_error
)
from hackathon.constants import (
HEALTH_STATUS,
)
import json
import requests
from datetime import timedelta
class HostedDockerFormation(DockerFormationBase, Component):
template_manager = RequiredFeature("template_manager")
hackathon_manager = RequiredFeature("hackathon_manager")
scheduler = RequiredFeature("scheduler")
"""
Docker resource management based on docker remote api v1.18
Host resource are required. Azure key required in case of azure.
"""
application_json = {'content-type': 'application/json'}
host_ports = []
host_port_max_num = 30
docker_host_manager = RequiredFeature("docker_host_manager")
def __init__(self):
self.lock = Lock()
def report_health(self):
"""Report health of DockerHostServers
:rtype: dict
:return health status item of docker. OK when all servers running, Warning if some of them working, Error if no server running
"""
try:
hosts = self.db.find_all_objects(DockerHostServer)
alive = 0
for host in hosts:
if self.ping(host):
alive += 1
if alive == len(hosts):
return {
HEALTH.STATUS: HEALTH_STATUS.OK
}
elif alive > 0:
return {
HEALTH.STATUS: HEALTH_STATUS.WARNING,
HEALTH.DESCRIPTION: 'at least one docker host servers are down'
}
else:
return {
HEALTH.STATUS: HEALTH_STATUS.ERROR,
HEALTH.DESCRIPTION: 'all docker host servers are down'
}
except Exception as e:
return {
HEALTH.STATUS: HEALTH_STATUS.ERROR,
HEALTH.DESCRIPTION: e.message
}
def get_available_host_port(self, docker_host, private_port):
"""
We use double operation to ensure ports not conflicted, first we get ports from host machine, but in multiple
threads situation, the interval between two requests is too short, maybe the first thread do not get port
ended, so the host machine don't update ports in time, thus the second thread may get the same port.
To avoid this condition, we use static variable host_ports to cache the latest host_port_max_num ports.
Every thread visit variable host_ports is synchronized.
To save space, we will release the ports if the number over host_port_max_num.
:param docker_host:
:param private_port:
:return:
"""
self.log.debug("try to assign docker port %d on server %r" % (private_port, docker_host))
containers = self.__containers_info(docker_host)
host_ports = flatten(map(lambda p: p['Ports'], containers))
# todo if azure return -1
def sub(port):
return port["PublicPort"] if "PublicPort" in port else -1
host_public_ports = map(lambda x: sub(x), host_ports)
return self.__get_available_host_port(host_public_ports, private_port)
def stop(self, name, **kwargs):
"""
stop a container
:param name: container's name
:param docker_host: host machine where container running
:return:
"""
container = kwargs["container"]
expr_id = kwargs["expr_id"]
docker_host = self.docker_host_manager.get_host_server_by_id(container.host_server_id)
if self.__get_container(name, docker_host) is not None:
containers_url = '%s/containers/%s/stop' % (self.get_vm_url(docker_host), name)
req = requests.post(containers_url)
self.log.debug(req.content)
self.__stop_container(expr_id, container, docker_host)
def delete(self, name, **kwargs):
"""
delete a container
:param name:
:param docker_host:
:return:
"""
container = kwargs["container"]
expr_id = kwargs["expr_id"]
docker_host = self.docker_host_manager.get_host_server_by_id(container.host_server_id)
containers_url = '%s/containers/%s?force=1' % (self.get_vm_url(docker_host), name)
req = requests.delete(containers_url)
self.log.debug(req.content)
self.__stop_container(expr_id, container, docker_host)
def start(self, unit, **kwargs):
"""
In this function, we create a container and then start a container
:param unit: docker template unit
:param docker_host:
:return:
"""
virtual_environment = kwargs["virtual_environment"]
hackathon = kwargs["hackathon"]
experiment = kwargs["experiment"]
container_name = unit.get_name()
host_server = self.docker_host_manager.get_available_docker_host(1, hackathon)
container = DockerContainer(experiment,
name=container_name,
host_server_id=host_server.id,
virtual_environment=virtual_environment,
image=unit.get_image_with_tag())
self.db.add_object(container)
self.db.commit()
# port binding
ps = map(lambda p:
[p.port_from, p.port_to],
self.__assign_ports(experiment, host_server, virtual_environment, unit.get_ports()))
# guacamole config
guacamole = unit.get_remote()
port_cfg = filter(lambda p:
p[DockerTemplateUnit.PORTS_PORT] == guacamole[DockerTemplateUnit.REMOTE_PORT],
unit.get_ports())
if len(port_cfg) > 0:
gc = {
"displayname": container_name,
"name": container_name,
"protocol": guacamole[DockerTemplateUnit.REMOTE_PROTOCOL],
"hostname": host_server.public_ip,
"port": port_cfg[0]["public_port"]
}
if DockerTemplateUnit.REMOTE_USERNAME in guacamole:
gc["username"] = guacamole[DockerTemplateUnit.REMOTE_USERNAME]
if DockerTemplateUnit.REMOTE_PASSWORD in guacamole:
gc["password"] = guacamole[DockerTemplateUnit.REMOTE_PASSWORD]
# save guacamole config into DB
virtual_environment.remote_paras = json.dumps(gc)
exist = self.__get_container(container_name, host_server)
if exist is not None:
container.container_id = exist["Id"]
host_server.container_count += 1
self.db.commit()
else:
container_config = unit.get_container_config()
# create container
try:
container_create_result = self.__create(host_server, container_config, container_name)
except Exception as e:
self.log.error(e)
self.log.error("container %s fail to create" % container_name)
return None
container.container_id = container_create_result["Id"]
# start container
try:
self.__start(host_server, container_create_result["Id"])
host_server.container_count += 1
self.db.commit()
except Exception as e:
self.log.error(e)
self.log.error("container %s fail to start" % container["Id"])
return None
# check
if self.__get_container(container_name, host_server) is None:
self.log.error(
"container %s has started, but can not find it in containers' info, maybe it exited again."
% container_name)
return None
self.log.debug("starting container %s is ended ... " % container_name)
virtual_environment.status = VEStatus.RUNNING
self.db.commit()
return container
def get_vm_url(self, docker_host):
return 'http://%s:%d' % (docker_host.public_dns, docker_host.public_docker_api_port)
def pull_image(self, context):
docker_host, image_name, tag = context.docker_host, context.image_name, context.tag
pull_image_url = self.get_vm_url(docker_host) + "/images/create?fromImage=" + image_name + '&tag=' + tag
self.log.debug(" send request to pull image:" + pull_image_url)
return requests.post(pull_image_url)
def get_pulled_images(self, docker_host):
get_images_url = self.get_vm_url(docker_host) + "/images/json?all=0"
current_images_info = json.loads(self.util.get_remote(get_images_url)) # [{},{},{}]
current_images_tags = map(lambda x: x['RepoTags'], current_images_info) # [[],[],[]]
return flatten(current_images_tags) # [ imange:tag, image:tag ]
def ensure_images(self):
hackathons = self.hackathon_manager.get_online_hackathons()
map(lambda h: self.__ensure_images_for_hackathon(h), hackathons)
def check_container_status_is_normal(self, docker_container):
"""check container's running status on docker host
if status is Running or Restarting returns True , else returns False
:type docker_container: DockerContainer
:param docker_container: the container that you want to check
:type boolean
:return True: the container running status is running or restarting , else returns False
"""
docker_host = self.db.find_first_object_by(DockerHostServer, id=docker_container.host_server_id)
if docker_host is not None:
container_info = self.__get_container_info_by_container_id(docker_host, docker_container.container_id)
if container_info is None:
return False
return container_info['State']['Running'] or container_info['State']['Restarting']
else:
return False
def ping(self, docker_host):
"""Ping docker host to check running status
:type docker_host : DockerHostServer
:param docker_host: the hots that you want to check docker service running status
:type boolean
:return: True: running status is OK, else return False
"""
try:
ping_url = '%s/_ping' % self.__get_vm_url(docker_host)
req = requests.get(ping_url)
self.log.debug(req.content)
return req.status_code == 200 and req.content == 'OK'
except Exception as e:
self.log.error(e)
return False
# --------------------------------------------- helper function ---------------------------------------------#
def __name_match(self, id, lists):
for list in lists:
if id in list:
return True
return False
def __get_schedule_job_id(self, hackathon):
return "pull_images_for_hackathon_%s" % hackathon.id
def __ensure_images_for_hackathon(self, hackathon):
# only ensure those alauda is disabled
if hackathon.is_alauda_enabled():
self.log.debug("schedule job of hackathon '%s(%d)' removed for alauda enabled" %
(hackathon.name, hackathon.id))
self.scheduler.remove_job(self.__get_schedule_job_id(hackathon))
return
self.log.debug("adding schedule job to ensure images for hackathon [%d]%s" % (hackathon.id, hackathon.name))
next_run_time = self.util.get_now() + timedelta(seconds=3)
context = Context(hackathon_id=hackathon.id)
self.scheduler.add_interval(feature="template_manager",
method="pull_images_for_hackathon",
id=self.__get_schedule_job_id(hackathon),
context=context,
next_run_time=next_run_time,
minutes=60)
def __get_vm_url(self, docker_host):
return 'http://%s:%d' % (docker_host.public_dns, docker_host.public_docker_api_port)
def __clear_ports_cache(self):
"""
cache ports, if ports' number more than host_port_max_num, release the ports.
But if there is a thread apply new ports, we will do this operation in the next loop.
Because the host machine do not update the ports information,
if we release ports now, the new ports will be lost.
:return:
"""
num = self.db.count(Experiment, Experiment.status == EStatus.STARTING)
if num > 0:
self.log.debug("there are %d experiment is starting, host ports will updated in next loop" % num)
return
self.log.debug("-----release ports cache successfully------")
self.host_ports = []
def __stop_container(self, expr_id, container, docker_host):
self.__release_ports(expr_id, docker_host)
docker_host.container_count -= 1
if docker_host.container_count < 0:
docker_host.container_count = 0
self.db.commit()
def __containers_info(self, docker_host):
containers_url = '%s/containers/json' % self.get_vm_url(docker_host)
req = requests.get(containers_url)
self.log.debug(req.content)
return self.util.convert(json.loads(req.content))
def __get_available_host_port(self, port_bindings, port):
"""
simple lock mechanism, visit static variable ports synchronize, because port_bindings is not in real-time,
so we should cache the latest ports, when the cache ports number is more than host_port_max_num,
we will release it to save space.
:param port_bindings:
:param port:
:return:
"""
self.lock.acquire()
try:
host_port = port + 10000
while host_port in port_bindings or host_port in self.host_ports:
host_port += 1
if host_port >= 65535:
self.log.error("port used up on this host server")
raise Exception("no port available")
if len(self.host_ports) >= self.host_port_max_num:
self.__clear_ports_cache()
self.host_ports.append(host_port)
self.log.debug("host_port is %d " % host_port)
return host_port
finally:
self.lock.release()
def __get_container(self, name, docker_host):
containers = self.__containers_info(docker_host)
return next((c for c in containers if name in c["Names"] or '/' + name in c["Names"]), None)
def __create(self, docker_host, container_config, container_name):
"""
only create a container, in this step, we cannot start a container.
:param docker_host:
:param container_config:
:param container_name:
:return:
"""
containers_url = '%s/containers/create?name=%s' % (self.get_vm_url(docker_host), container_name)
req = requests.post(containers_url, data=json.dumps(container_config), headers=self.application_json)
self.log.debug(req.content)
container = json.loads(req.content)
if container is None:
raise AssertionError("container is none")
return container
def __start(self, docker_host, container_id):
"""
start a container
:param docker_host:
:param container_id:
:return:
"""
url = '%s/containers/%s/start' % (self.get_vm_url(docker_host), container_id)
req = requests.post(url, headers=self.application_json)
self.log.debug(req.content)
def __get_available_public_ports(self, expr_id, host_server, host_ports):
self.log.debug("starting to get azure ports")
ep = Endpoint(Service(self.load_azure_key_id(expr_id)))
host_server_name = host_server.vm_name
host_server_dns = host_server.public_dns.split('.')[0]
public_endpoints = ep.assign_public_endpoints(host_server_dns, 'Production', host_server_name, host_ports)
if not isinstance(public_endpoints, list):
self.log.debug("failed to get public ports")
return internal_server_error('cannot get public ports')
self.log.debug("public ports : %s" % public_endpoints)
return public_endpoints
def load_azure_key_id(self, expr_id):
expr = self.db.get_object(Experiment, expr_id)
hak = self.db.find_first_object_by(HackathonAzureKey, hackathon_id=expr.hackathon_id)
return hak.azure_key_id
def __assign_ports(self, expr, host_server, ve, port_cfg):
"""
assign ports from host server
:param expr:
:param host_server:
:param ve:
:param port_cfg:
:return:
"""
# get 'host_port'
map(lambda p:
p.update(
{DockerTemplateUnit.PORTS_HOST_PORT: self.get_available_host_port(host_server, p[
DockerTemplateUnit.PORTS_PORT])}
),
port_cfg)
# get 'public' cfg
public_ports_cfg = filter(lambda p: DockerTemplateUnit.PORTS_PUBLIC in p, port_cfg)
host_ports = [u[DockerTemplateUnit.PORTS_HOST_PORT] for u in public_ports_cfg]
if self.util.safe_get_config("environment", "prod") == "local":
map(lambda cfg: cfg.update({DockerTemplateUnit.PORTS_PUBLIC_PORT: cfg[DockerTemplateUnit.PORTS_HOST_PORT]}),
public_ports_cfg)
else:
public_ports = self.__get_available_public_ports(expr.id, host_server, host_ports)
for i in range(len(public_ports_cfg)):
public_ports_cfg[i][DockerTemplateUnit.PORTS_PUBLIC_PORT] = public_ports[i]
binding_dockers = []
# update port binding
for public_cfg in public_ports_cfg:
binding_cloud_service = PortBinding(name=public_cfg[DockerTemplateUnit.PORTS_NAME],
port_from=public_cfg[DockerTemplateUnit.PORTS_PUBLIC_PORT],
port_to=public_cfg[DockerTemplateUnit.PORTS_HOST_PORT],
binding_type=PortBindingType.CLOUD_SERVICE,
binding_resource_id=host_server.id,
virtual_environment=ve,
experiment=expr,
url=public_cfg[DockerTemplateUnit.PORTS_URL]
if DockerTemplateUnit.PORTS_URL in public_cfg else None)
binding_docker = PortBinding(name=public_cfg[DockerTemplateUnit.PORTS_NAME],
port_from=public_cfg[DockerTemplateUnit.PORTS_HOST_PORT],
port_to=public_cfg[DockerTemplateUnit.PORTS_PORT],
binding_type=PortBindingType.DOCKER,
binding_resource_id=host_server.id,
virtual_environment=ve,
experiment=expr)
binding_dockers.append(binding_docker)
self.db.add_object(binding_cloud_service)
self.db.add_object(binding_docker)
self.db.commit()
local_ports_cfg = filter(lambda p: DockerTemplateUnit.PORTS_PUBLIC not in p, port_cfg)
for local_cfg in local_ports_cfg:
port_binding = PortBinding(name=local_cfg[DockerTemplateUnit.PORTS_NAME],
port_from=local_cfg[DockerTemplateUnit.PORTS_HOST_PORT],
port_to=local_cfg[DockerTemplateUnit.PORTS_PORT],
binding_type=PortBindingType.DOCKER,
binding_resource_id=host_server.id,
virtual_environment=ve,
experiment=expr)
binding_dockers.append(port_binding)
self.db.add_object(port_binding)
self.db.commit()
return binding_dockers
def __release_ports(self, expr_id, host_server):
"""
release the specified experiment's ports
"""
self.log.debug("Begin to release ports: expr_id: %d, host_server: %r" % (expr_id, host_server))
ports_binding = self.db.find_all_objects_by(PortBinding, experiment_id=expr_id)
if ports_binding is not None:
docker_binding = filter(
lambda u: self.util.safe_get_config("environment", "prod") != "local" and u.binding_type == 1,
ports_binding)
ports_to = [d.port_to for d in docker_binding]
if len(ports_to) != 0:
self.__release_public_ports(expr_id, host_server, ports_to)
for port in ports_binding:
self.db.delete_object(port)
self.db.commit()
self.log.debug("End to release ports: expr_id: %d, host_server: %r" % (expr_id, host_server))
def __release_public_ports(self, expr_id, host_server, host_ports):
ep = Endpoint(Service(self.load_azure_key_id(expr_id)))
host_server_name = host_server.vm_name
host_server_dns = host_server.public_dns.split('.')[0]
self.log.debug("starting to release ports ... ")
ep.release_public_endpoints(host_server_dns, 'Production', host_server_name, host_ports)
def __get_container_info_by_container_id(self, docker_host, container_id):
"""get a container info by container_id from a docker host
:type docker_host: str|unicode
:param: the docker host which you want to search container from
:type container_id: str|unicode
:param as a parameter that you want to search container though docker remote API
:return dic object of the container info if not None
"""
try:
get_container_url = self.get_vm_url(docker_host) + "/container/%s/json?all=0" % container_id
req = requests.get(get_container_url)
if req.status_code >= 200 and req.status_code < 300 :
container_info = json.loads(req.content)
return container_info
return None
except Exception as ex:
self.log.error(ex)
return None
|
xunxunzgq/open-hackathon-bak_01
|
open-hackathon-server/src/hackathon/docker/hosted_docker.py
|
Python
|
mit
| 24,562
|
[
"VisIt"
] |
3aacb61c9d879ea58ed6808783ea654368c5551fa96590b4e87269053e6f3563
|
from ase.data.molecules import molecule
from ase.visualize import view
from gpaw import GPAW
from gpaw.wannier import Wannier
calc = GPAW(nbands=4)
atoms = molecule('H2O')
atoms.center(vacuum=3.)
atoms.set_calculator(calc)
atoms.get_potential_energy()
# Initialize the Wannier class
w = Wannier(calc)
w.localize()
centers = w.get_centers()
view(atoms + Atoms(symbols='X4', positions=centers))
|
qsnake/gpaw
|
doc/exercises/wannier/wannier-h2o.py
|
Python
|
gpl-3.0
| 397
|
[
"ASE",
"GPAW"
] |
42c8fdce4b828b778f084a0941bcf7dadffbd6f542bedb7e0a76a78877f70948
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"package containing various sphinx extensions"
#legacy aliases
|
rmcgibbo/psi4public
|
doc/sphinxman/source/psi4doc/ext/__init__.py
|
Python
|
lgpl-3.0
| 978
|
[
"Psi4"
] |
785dd09fdca20a408283535bd3eeddbbdfe3991bc9dc79a82929f8bb60e8704e
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import ldap
import re
import sys
import urllib
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
from nose.tools import assert_true, assert_equal, assert_false
import desktop.conf
from desktop.lib.django_test_util import make_logged_in_client
from django.contrib.auth.models import User, Group
from django.utils.encoding import smart_unicode
from django.core.urlresolvers import reverse
from django.test.client import Client
from useradmin.models import HuePermission, GroupPermission, UserProfile
from useradmin.models import get_profile, get_default_user_group
import useradmin.conf
import useradmin.ldap_access
from hadoop import pseudo_hdfs4
from useradmin.password_policy import reset_password_policy
def reset_all_users():
"""Reset to a clean state by deleting all users"""
for user in User.objects.all():
user.delete()
def reset_all_groups():
"""Reset to a clean state by deleting all groups"""
useradmin.conf.DEFAULT_USER_GROUP.set_for_testing(None)
for grp in Group.objects.all():
grp.delete()
class LdapTestConnection(object):
"""
Test class which mimics the behaviour of LdapConnection (from ldap_access.py).
It also includes functionality to fake modifications to an LDAP server. It is designed
as a singleton, to allow for changes to persist across discrete connections.
This class assumes uid is the user_name_attr.
"""
def __init__(self):
self._instance = LdapTestConnection.Data()
def add_user_group_for_test(self, user, group):
self._instance.groups[group]['members'].append(user)
def remove_user_group_for_test(self, user, group):
self._instance.groups[group]['members'].remove(user)
def add_posix_user_group_for_test(self, user, group):
self._instance.groups[group]['posix_members'].append(user)
def remove_posix_user_group_for_test(self, user, group):
self._instance.groups[group]['posix_members'].remove(user)
def find_users(self, username_pattern, search_attr=None, user_name_attr=None, find_by_dn=False, scope=ldap.SCOPE_SUBTREE):
""" Returns info for a particular user via a case insensitive search """
if find_by_dn:
data = filter(lambda attrs: attrs['dn'] == username_pattern, self._instance.users.values())
else:
username_pattern = "^%s$" % username_pattern.replace('.','\\.').replace('*', '.*')
username_fsm = re.compile(username_pattern, flags=re.I)
usernames = filter(lambda username: username_fsm.match(username), self._instance.users.keys())
data = [self._instance.users.get(username) for username in usernames]
return data
def find_groups(self, groupname_pattern, search_attr=None, group_name_attr=None, group_member_attr=None, group_filter=None, find_by_dn=False, scope=ldap.SCOPE_SUBTREE):
""" Return all groups in the system with parents and children """
if find_by_dn:
data = filter(lambda attrs: attrs['dn'] == groupname_pattern, self._instance.groups.values())
# SCOPE_SUBTREE means we return all sub-entries of the desired entry along with the desired entry.
if data and scope == ldap.SCOPE_SUBTREE:
sub_data = filter(lambda attrs: attrs['dn'].endswith(data[0]['dn']), self._instance.groups.values())
data.extend(sub_data)
else:
groupname_pattern = "^%s$" % groupname_pattern.replace('.','\\.').replace('*', '.*')
groupnames = filter(lambda username: re.match(groupname_pattern, username), self._instance.groups.keys())
data = [self._instance.groups.get(groupname) for groupname in groupnames]
return data
def find_members_of_group(self, dn, search_attr, ldap_filter, scope=ldap.SCOPE_SUBTREE):
members = []
for group_info in self._instance.groups:
if group_info['dn'] == dn:
members.extend(group_info['members'])
members = set(members)
users = []
for user_info in self._instance.users:
if user_info['dn'] in members:
users.append(user_info)
groups = []
for group_info in self._instance.groups:
if group_info['dn'] in members:
groups.append(group_info)
return users + groups
def find_users_of_group(self, dn):
members = []
for group_info in self._instance.groups.values():
if group_info['dn'] == dn:
members.extend(group_info['members'])
members = set(members)
users = []
for user_info in self._instance.users.values():
if user_info['dn'] in members:
users.append(user_info)
return users
def find_groups_of_group(self, dn):
members = []
for group_info in self._instance.groups.values():
if group_info['dn'] == dn:
members.extend(group_info['members'])
groups = []
for group_info in self._instance.groups.values():
if group_info['dn'] in members:
groups.append(group_info)
return groups
class Data:
def __init__(self):
self.users = {'moe': {'dn': 'uid=moe,ou=People,dc=example,dc=com', 'username':'moe', 'first':'Moe', 'email':'moe@stooges.com', 'groups': ['cn=TestUsers,ou=Groups,dc=example,dc=com']},
'lårry': {'dn': 'uid=lårry,ou=People,dc=example,dc=com', 'username':'lårry', 'first':'Larry', 'last':'Stooge', 'email':'larry@stooges.com', 'groups': ['cn=TestUsers,ou=Groups,dc=example,dc=com', 'cn=Test Administrators,cn=TestUsers,ou=Groups,dc=example,dc=com']},
'curly': {'dn': 'uid=curly,ou=People,dc=example,dc=com', 'username':'curly', 'first':'Curly', 'last':'Stooge', 'email':'curly@stooges.com', 'groups': ['cn=TestUsers,ou=Groups,dc=example,dc=com', 'cn=Test Administrators,cn=TestUsers,ou=Groups,dc=example,dc=com']},
'Rock': {'dn': 'uid=Rock,ou=People,dc=example,dc=com', 'username':'Rock', 'first':'rock', 'last':'man', 'email':'rockman@stooges.com', 'groups': ['cn=Test Administrators,cn=TestUsers,ou=Groups,dc=example,dc=com']},
'nestedguy': {'dn': 'uid=nestedguy,ou=People,dc=example,dc=com', 'username':'nestedguy', 'first':'nested', 'last':'guy', 'email':'nestedguy@stooges.com', 'groups': ['cn=NestedGroup,ou=Groups,dc=example,dc=com']},
'otherguy': {'dn': 'uid=otherguy,ou=People,dc=example,dc=com', 'username':'otherguy', 'first':'Other', 'last':'Guy', 'email':'other@guy.com'},
'posix_person': {'dn': 'uid=posix_person,ou=People,dc=example,dc=com', 'username': 'posix_person', 'first': 'pos', 'last': 'ix', 'email': 'pos@ix.com'},
'posix_person2': {'dn': 'uid=posix_person2,ou=People,dc=example,dc=com', 'username': 'posix_person2', 'first': 'pos', 'last': 'ix', 'email': 'pos@ix.com'},
'user with space': {'dn': 'uid=user with space,ou=People,dc=example,dc=com', 'username': 'user with space', 'first': 'user', 'last': 'space', 'email': 'user@space.com'},
'spaceless': {'dn': 'uid=user without space,ou=People,dc=example,dc=com', 'username': 'spaceless', 'first': 'user', 'last': 'space', 'email': 'user@space.com'},}
self.groups = {'TestUsers': {
'dn': 'cn=TestUsers,ou=Groups,dc=example,dc=com',
'name':'TestUsers',
'members':['uid=moe,ou=People,dc=example,dc=com','uid=lårry,ou=People,dc=example,dc=com','uid=curly,ou=People,dc=example,dc=com'],
'posix_members':[]},
'Test Administrators': {
'dn': 'cn=Test Administrators,cn=TestUsers,ou=Groups,dc=example,dc=com',
'name':'Test Administrators',
'members':['uid=Rock,ou=People,dc=example,dc=com','uid=lårry,ou=People,dc=example,dc=com','uid=curly,ou=People,dc=example,dc=com'],
'posix_members':[]},
'OtherGroup': {
'dn': 'cn=OtherGroup,cn=TestUsers,ou=Groups,dc=example,dc=com',
'name':'OtherGroup',
'members':[],
'posix_members':[]},
'NestedGroups': {
'dn': 'cn=NestedGroups,ou=Groups,dc=example,dc=com',
'name':'NestedGroups',
'members':['cn=NestedGroup,ou=Groups,dc=example,dc=com'],
'posix_members':[]
},
'NestedGroup': {
'dn': 'cn=NestedGroup,ou=Groups,dc=example,dc=com',
'name':'NestedGroup',
'members':['uid=nestedguy,ou=People,dc=example,dc=com'],
'posix_members':[]
},
'NestedPosixGroups': {
'dn': 'cn=NestedPosixGroups,ou=Groups,dc=example,dc=com',
'name':'NestedPosixGroups',
'members':['cn=PosixGroup,ou=Groups,dc=example,dc=com'],
'posix_members':[]
},
'PosixGroup': {
'dn': 'cn=PosixGroup,ou=Groups,dc=example,dc=com',
'name':'PosixGroup',
'members':[],
'posix_members':['posix_person','lårry']},
'PosixGroup1': {
'dn': 'cn=PosixGroup1,cn=PosixGroup,ou=Groups,dc=example,dc=com',
'name':'PosixGroup1',
'members':[],
'posix_members':['posix_person2']},
}
def test_invalid_username():
BAD_NAMES = ('-foo', 'foo:o', 'foo o', ' foo')
c = make_logged_in_client(username="test", is_superuser=True)
for bad_name in BAD_NAMES:
assert_true(c.get('/useradmin/users/new'))
response = c.post('/useradmin/users/new', dict(username=bad_name, password1="test", password2="test"))
assert_true('not allowed' in response.context["form"].errors['username'][0])
def test_group_permissions():
reset_all_users()
reset_all_groups()
# Get ourselves set up with a user and a group
c = make_logged_in_client(username="test", is_superuser=True)
Group.objects.create(name="test-group")
test_user = User.objects.get(username="test")
test_user.groups.add(Group.objects.get(name="test-group"))
test_user.save()
# Make sure that a superuser can always access applications
response = c.get('/useradmin/users')
assert_true('Hue Users' in response.content)
assert_true(len(GroupPermission.objects.all()) == 0)
c.post('/useradmin/groups/edit/test-group',
dict(name="test-group",
members=[User.objects.get(username="test").pk],
permissions=[HuePermission.objects.get(app='useradmin',action='access').pk],
save="Save"), follow=True)
assert_true(len(GroupPermission.objects.all()) == 1)
# Now test that we have limited access
c1 = make_logged_in_client(username="nonadmin", is_superuser=False)
response = c1.get('/useradmin/users')
assert_true('You do not have permission to access the Useradmin application.' in response.content)
# Add the non-admin to a group that should grant permissions to the app
test_user = User.objects.get(username="nonadmin")
test_user.groups.add(Group.objects.get(name='test-group'))
test_user.save()
# Check that we have access now
response = c1.get('/useradmin/users')
assert_true(get_profile(test_user).has_hue_permission('access','useradmin'))
assert_true('Hue Users' in response.content)
# Make sure we can't modify permissions
response = c1.get('/useradmin/permissions/edit/useradmin/access')
assert_true('must be a superuser to change permissions' in response.content)
# And revoke access from the group
c.post('/useradmin/permissions/edit/useradmin/access',
dict(app='useradmin',
priv='access',
groups=[],
save="Save"), follow=True)
assert_true(len(GroupPermission.objects.all()) == 0)
assert_false(get_profile(test_user).has_hue_permission('access','useradmin'))
# We should no longer have access to the app
response = c1.get('/useradmin/users')
assert_true('You do not have permission to access the Useradmin application.' in response.content)
def test_default_group():
reset_all_users()
reset_all_groups()
useradmin.conf.DEFAULT_USER_GROUP.set_for_testing('test_default')
get_default_user_group()
c = make_logged_in_client(username='test', is_superuser=True)
# Create default group if it doesn't already exist.
assert_true(Group.objects.filter(name='test_default').exists())
# Try deleting the default group
assert_true(Group.objects.filter(name='test_default').exists())
response = c.post('/useradmin/groups/delete', {'group_names': ['test_default']})
assert_true('default user group may not be deleted' in response.content)
assert_true(Group.objects.filter(name='test_default').exists())
# Change the name of the default group, and try deleting again
useradmin.conf.DEFAULT_USER_GROUP.set_for_testing('new_default')
response = c.post('/useradmin/groups/delete' , {'group_names': ['test_default']})
assert_false(Group.objects.filter(name='test_default').exists())
assert_true(Group.objects.filter(name='new_default').exists())
def test_get_profile():
# Ensure profiles are created after get_profile is called.
reset_all_users()
reset_all_groups()
c = make_logged_in_client(username='test', password='test', is_superuser=True)
assert_equal(0, UserProfile.objects.count())
p = get_profile(User.objects.get(username='test'))
assert_equal(1, UserProfile.objects.count())
def test_group_admin():
reset_all_users()
reset_all_groups()
c = make_logged_in_client(username="test", is_superuser=True)
response = c.get('/useradmin/groups')
# No groups just yet
assert_true(len(response.context["groups"]) == 0)
assert_true("Hue Groups" in response.content)
# Create a group
response = c.get('/useradmin/groups/new')
assert_equal('/useradmin/groups/new', response.context['action'])
c.post('/useradmin/groups/new', dict(name="testgroup"))
# We should have an empty group in the DB now
assert_true(len(Group.objects.all()) == 1)
assert_true(Group.objects.filter(name="testgroup").exists())
assert_true(len(Group.objects.get(name="testgroup").user_set.all()) == 0)
# And now, just for kicks, let's try adding a user
response = c.post('/useradmin/groups/edit/testgroup',
dict(name="testgroup",
members=[User.objects.get(username="test").pk],
save="Save"), follow=True)
assert_true(len(Group.objects.get(name="testgroup").user_set.all()) == 1)
assert_true(Group.objects.get(name="testgroup").user_set.filter(username="test").exists())
# Test some permissions
c2 = make_logged_in_client(username="nonadmin", is_superuser=False)
# Need to give access to the user for the rest of the test
group = Group.objects.create(name="access-group")
perm = HuePermission.objects.get(app='useradmin', action='access')
GroupPermission.objects.create(group=group, hue_permission=perm)
test_user = User.objects.get(username="nonadmin")
test_user.groups.add(Group.objects.get(name="access-group"))
test_user.save()
# Make sure non-superusers can't do bad things
response = c2.get('/useradmin/groups/new')
assert_true("You must be a superuser" in response.content)
response = c2.get('/useradmin/groups/edit/testgroup')
assert_true("You must be a superuser" in response.content)
response = c2.post('/useradmin/groups/new', dict(name="nonsuperuser"))
assert_true("You must be a superuser" in response.content)
response = c2.post('/useradmin/groups/edit/testgroup',
dict(name="nonsuperuser",
members=[User.objects.get(username="test").pk],
save="Save"), follow=True)
assert_true("You must be a superuser" in response.content)
# Should be one group left, because we created the other group
response = c.post('/useradmin/groups/delete', {'group_names': ['testgroup']})
assert_true(len(Group.objects.all()) == 1)
group_count = len(Group.objects.all())
response = c.post('/useradmin/groups/new', dict(name="with space"))
assert_equal(len(Group.objects.all()), group_count + 1)
def test_user_admin_password_policy():
reset_all_users()
reset_all_groups()
# Set up password policy
password_hint = password_error_msg = ("The password must be at least 8 characters long, "
"and must contain both uppercase and lowercase letters, "
"at least one number, and at least one special character.")
password_rule = "^(?=.*?[A-Z])(?=(.*[a-z]){1,})(?=(.*[\d]){1,})(?=(.*[\W_]){1,}).{8,}$"
useradmin.conf.PASSWORD_POLICY.IS_ENABLED.set_for_testing(True)
useradmin.conf.PASSWORD_POLICY.PWD_RULE.set_for_testing(password_rule)
useradmin.conf.PASSWORD_POLICY.PWD_HINT.set_for_testing(password_hint)
useradmin.conf.PASSWORD_POLICY.PWD_ERROR_MESSAGE.set_for_testing(password_error_msg)
reset_password_policy()
# Test first-ever login with password policy enabled
c = Client()
response = c.get('/accounts/login/')
assert_equal(200, response.status_code)
assert_true(response.context['first_login_ever'])
response = c.post('/accounts/login/', dict(username="test_first_login", password="foo"))
assert_true(response.context['first_login_ever'])
assert_equal([password_error_msg], response.context["form"]["password"].errors)
response = c.post('/accounts/login/', dict(username="test_first_login", password="foobarTest1["), follow=True)
assert_equal(200, response.status_code)
assert_true(User.objects.get(username="test_first_login").is_superuser)
assert_true(User.objects.get(username="test_first_login").check_password("foobarTest1["))
c.get('/accounts/logout')
# Test changing a user's password
c = make_logged_in_client('superuser', is_superuser=True)
# Test password hint is displayed
response = c.get('/useradmin/users/edit/superuser')
assert_true(password_hint in response.content)
# Password is less than 8 characters
response = c.post('/useradmin/users/edit/superuser',
dict(username="superuser",
is_superuser=True,
password1="foo",
password2="foo"))
assert_equal([password_error_msg], response.context["form"]["password1"].errors)
# Password is more than 8 characters long but does not have a special character
response = c.post('/useradmin/users/edit/superuser',
dict(username="superuser",
is_superuser=True,
password1="foobarTest1",
password2="foobarTest1"))
assert_equal([password_error_msg], response.context["form"]["password1"].errors)
# Password1 and Password2 are valid but they do not match
response = c.post('/useradmin/users/edit/superuser',
dict(username="superuser",
is_superuser=True,
password1="foobarTest1??",
password2="foobarTest1?",
password_old="foobarTest1[",
is_active=True))
assert_equal(["Passwords do not match."], response.context["form"]["password2"].errors)
# Password is valid now
c.post('/useradmin/users/edit/superuser',
dict(username="superuser",
is_superuser=True,
password1="foobarTest1[",
password2="foobarTest1[",
password_old="test",
is_active=True))
assert_true(User.objects.get(username="superuser").is_superuser)
assert_true(User.objects.get(username="superuser").check_password("foobarTest1["))
# Test creating a new user
response = c.get('/useradmin/users/new')
assert_true(password_hint in response.content)
# Password is more than 8 characters long but does not have a special character
response = c.post('/useradmin/users/new',
dict(username="test_user",
is_superuser=False,
password1="foo",
password2="foo"))
assert_equal({'password1': [password_error_msg], 'password2': [password_error_msg]},
response.context["form"].errors)
# Password is more than 8 characters long but does not have a special character
response = c.post('/useradmin/users/new',
dict(username="test_user",
is_superuser=False,
password1="foobarTest1",
password2="foobarTest1"))
assert_equal({'password1': [password_error_msg], 'password2': [password_error_msg]},
response.context["form"].errors)
# Password1 and Password2 are valid but they do not match
response = c.post('/useradmin/users/new',
dict(username="test_user",
is_superuser=False,
password1="foobarTest1[",
password2="foobarTest1?"))
assert_equal({'password2': ["Passwords do not match."]}, response.context["form"].errors)
# Password is valid now
c.post('/useradmin/users/new',
dict(username="test_user",
is_superuser=False,
password1="foobarTest1[",
password2="foobarTest1[", is_active=True))
assert_false(User.objects.get(username="test_user").is_superuser)
assert_true(User.objects.get(username="test_user").check_password("foobarTest1["))
def test_user_admin():
FUNNY_NAME = '~`!@#$%^&*()_-+={}[]|\;"<>?/,.'
FUNNY_NAME_QUOTED = urllib.quote(FUNNY_NAME)
reset_all_users()
reset_all_groups()
useradmin.conf.DEFAULT_USER_GROUP.set_for_testing('test_default')
useradmin.conf.PASSWORD_POLICY.IS_ENABLED.set_for_testing(False)
reset_password_policy()
c = make_logged_in_client('test', is_superuser=True)
user = User.objects.get(username='test')
# Test basic output.
response = c.get('/useradmin/')
assert_true(len(response.context["users"]) > 0)
assert_true("Hue Users" in response.content)
# Test editing a superuser
# Just check that this comes back
response = c.get('/useradmin/users/edit/test')
# Edit it, to add a first and last name
response = c.post('/useradmin/users/edit/test',
dict(username="test",
first_name=u"Inglés",
last_name=u"Español",
is_superuser="True",
is_active="True"),
follow=True)
assert_true("User information updated" in response.content,
"Notification should be displayed in: %s" % response.content)
# Edit it, can't change username
response = c.post('/useradmin/users/edit/test',
dict(username="test2",
first_name=u"Inglés",
last_name=u"Español",
is_superuser="True",
is_active="True"),
follow=True)
assert_true("You cannot change a username" in response.content)
# Now make sure that those were materialized
response = c.get('/useradmin/users/edit/test')
assert_equal(smart_unicode("Inglés"), response.context["form"].instance.first_name)
assert_true("Español" in response.content)
# Shouldn't be able to demote to non-superuser
response = c.post('/useradmin/users/edit/test', dict(username="test",
first_name=u"Inglés", last_name=u"Español",
is_superuser=False, is_active=True))
assert_true("You cannot remove" in response.content,
"Shouldn't be able to remove the last superuser")
# Shouldn't be able to delete oneself
response = c.post('/useradmin/users/delete', {u'user_ids': [user.id]})
assert_true("You cannot remove yourself" in response.content,
"Shouldn't be able to delete the last superuser")
# Let's try changing the password
response = c.post('/useradmin/users/edit/test', dict(username="test", first_name="Tom", last_name="Tester", is_superuser=True, password1="foo", password2="foobar"))
assert_equal(["Passwords do not match."], response.context["form"]["password2"].errors, "Should have complained about mismatched password")
# Old password not confirmed
response = c.post('/useradmin/users/edit/test', dict(username="test", first_name="Tom", last_name="Tester", password1="foo", password2="foo", is_active=True, is_superuser=True))
assert_equal(["The old password does not match the current password."], response.context["form"]["password_old"].errors, "Should have complained about old password")
# Good now
response = c.post('/useradmin/users/edit/test', dict(username="test", first_name="Tom", last_name="Tester", password1="foo", password2="foo", password_old="test", is_active=True, is_superuser=True))
assert_true(User.objects.get(username="test").is_superuser)
assert_true(User.objects.get(username="test").check_password("foo"))
# Change it back!
response = c.post('/useradmin/users/edit/test', dict(username="test", first_name="Tom", last_name="Tester", password1="test", password2="test", password_old="foo", is_active="True", is_superuser="True"))
assert_true(User.objects.get(username="test").check_password("test"))
assert_true(make_logged_in_client(username = "test", password = "test"), "Check that we can still login.")
# Check new user form for default group
group = get_default_user_group()
response = c.get('/useradmin/users/new')
assert_true(response)
assert_true(('<option value="1" selected="selected">%s</option>' % group) in str(response))
# Create a new regular user (duplicate name)
response = c.post('/useradmin/users/new', dict(username="test", password1="test", password2="test"))
assert_equal({ 'username': ["User with this Username already exists."]}, response.context["form"].errors)
# Create a new regular user (for real)
response = c.post('/useradmin/users/new', dict(username=FUNNY_NAME,
password1="test",
password2="test",
is_active="True"))
response = c.get('/useradmin/')
assert_true(FUNNY_NAME_QUOTED in response.content)
assert_true(len(response.context["users"]) > 1)
assert_true("Hue Users" in response.content)
# Validate profile is created.
assert_true(UserProfile.objects.filter(user__username=FUNNY_NAME).exists())
# Need to give access to the user for the rest of the test
group = Group.objects.create(name="test-group")
perm = HuePermission.objects.get(app='useradmin', action='access')
GroupPermission.objects.create(group=group, hue_permission=perm)
# Verify that we can modify user groups through the user admin pages
response = c.post('/useradmin/users/new', dict(username="group_member", password1="test", password2="test", groups=[group.pk]))
User.objects.get(username='group_member')
assert_true(User.objects.get(username='group_member').groups.filter(name='test-group').exists())
response = c.post('/useradmin/users/edit/group_member', dict(username="group_member", groups=[]))
assert_false(User.objects.get(username='group_member').groups.filter(name='test-group').exists())
# Check permissions by logging in as the new user
c_reg = make_logged_in_client(username=FUNNY_NAME, password="test")
test_user = User.objects.get(username=FUNNY_NAME)
test_user.groups.add(Group.objects.get(name="test-group"))
test_user.save()
# Regular user should be able to modify oneself
response = c_reg.post('/useradmin/users/edit/%s' % (FUNNY_NAME_QUOTED,),
dict(username = FUNNY_NAME,
first_name = "Hello",
is_active = True,
groups=[group.id for group in test_user.groups.all()]), follow=True)
assert_equal(response.status_code, 200)
response = c_reg.get('/useradmin/users/edit/%s' % (FUNNY_NAME_QUOTED,), follow=True)
assert_equal(response.status_code, 200)
assert_equal("Hello", response.context["form"].instance.first_name)
funny_user = User.objects.get(username=FUNNY_NAME)
# Can't edit other people.
response = c_reg.post("/useradmin/users/delete", {u'user_ids': [funny_user.id]})
assert_true("You must be a superuser" in response.content,
"Regular user can't edit other people")
# Revert to regular "test" user, that has superuser powers.
c_su = make_logged_in_client()
# Inactivate FUNNY_NAME
c_su.post('/useradmin/users/edit/%s' % (FUNNY_NAME_QUOTED,),
dict(username = FUNNY_NAME,
first_name = "Hello",
is_active = False))
# Now make sure FUNNY_NAME can't log back in
response = c_reg.get('/useradmin/users/edit/%s' % (FUNNY_NAME_QUOTED,))
assert_true(response.status_code == 302 and "login" in response["location"],
"Inactivated user gets redirected to login page")
# Delete that regular user
funny_profile = get_profile(test_user)
response = c_su.post('/useradmin/users/delete', {u'user_ids': [funny_user.id]})
assert_equal(302, response.status_code)
assert_false(User.objects.filter(username=FUNNY_NAME).exists())
assert_false(UserProfile.objects.filter(id=funny_profile.id).exists())
# Bulk delete users
u1 = User.objects.create(username='u1', password="u1")
u2 = User.objects.create(username='u2', password="u2")
assert_equal(User.objects.filter(username__in=['u1', 'u2']).count(), 2)
response = c_su.post('/useradmin/users/delete', {u'user_ids': [u1.id, u2.id]})
assert_equal(User.objects.filter(username__in=['u1', 'u2']).count(), 0)
# Make sure that user deletion works if the user has never performed a request.
funny_user = User.objects.create(username=FUNNY_NAME, password='test')
assert_true(User.objects.filter(username=FUNNY_NAME).exists())
assert_false(UserProfile.objects.filter(user__username=FUNNY_NAME).exists())
response = c_su.post('/useradmin/users/delete', {u'user_ids': [funny_user.id]})
assert_equal(302, response.status_code)
assert_false(User.objects.filter(username=FUNNY_NAME).exists())
assert_false(UserProfile.objects.filter(user__username=FUNNY_NAME).exists())
# You shouldn't be able to create a user without a password
response = c_su.post('/useradmin/users/new', dict(username="test"))
assert_true("You must specify a password when creating a new user." in response.content)
@attr('requires_hadoop')
def test_ensure_home_directory():
raise SkipTest
reset_all_users()
reset_all_groups()
useradmin.conf.PASSWORD_POLICY.IS_ENABLED.set_for_testing(False)
reset_password_policy()
# Cluster and client for home directory creation
cluster = pseudo_hdfs4.shared_cluster()
c = make_logged_in_client(cluster.superuser, is_superuser=True, groupname='test1')
cluster.fs.setuser(cluster.superuser)
# Create a user with a home directory
assert_false(cluster.fs.exists('/user/test1'))
response = c.post('/useradmin/users/new', dict(username="test1", password1='test', password2='test', ensure_home_directory=True))
assert_true(cluster.fs.exists('/user/test1'))
dir_stat = cluster.fs.stats('/user/test1')
assert_equal('test1', dir_stat.user)
assert_equal('test1', dir_stat.group)
assert_equal('40755', '%o' % dir_stat.mode)
# Create a user, then add their home directory
assert_false(cluster.fs.exists('/user/test2'))
response = c.post('/useradmin/users/new', dict(username="test2", password1='test', password2='test'))
assert_false(cluster.fs.exists('/user/test2'))
response = c.post('/useradmin/users/edit/%s' % "test2", dict(username="test2", password1='test', password2='test', password_old="test", ensure_home_directory=True))
assert_true(cluster.fs.exists('/user/test2'))
dir_stat = cluster.fs.stats('/user/test2')
assert_equal('test2', dir_stat.user)
assert_equal('test2', dir_stat.group)
assert_equal('40755', '%o' % dir_stat.mode)
def test_list_for_autocomplete():
reset_all_users()
reset_all_groups()
# Now the autocomplete has access to all the users and groups
c1 = make_logged_in_client('test_list_for_autocomplete', is_superuser=False, groupname='test_list_for_autocomplete')
c2_same_group = make_logged_in_client('test_list_for_autocomplete2', is_superuser=False, groupname='test_list_for_autocomplete')
c3_other_group = make_logged_in_client('test_list_for_autocomplete3', is_superuser=False, groupname='test_list_for_autocomplete_other_group')
# c1 is in the same group as c2
response = c1.get(reverse('useradmin.views.list_for_autocomplete'), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
content = json.loads(response.content)
users = [user['username'] for user in content['users']]
groups = [user['name'] for user in content['groups']]
assert_equal(['test_list_for_autocomplete2', 'test_list_for_autocomplete3'], users)
assert_true('test_list_for_autocomplete' in groups, groups)
assert_true('test_list_for_autocomplete_other_group' in groups, groups)
# c2 is in the same group as c1
response = c2_same_group.get(reverse('useradmin.views.list_for_autocomplete'), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
content = json.loads(response.content)
users = [user['username'] for user in content['users']]
groups = [user['name'] for user in content['groups']]
assert_equal(['test_list_for_autocomplete', 'test_list_for_autocomplete3'], users)
assert_true('test_list_for_autocomplete' in groups, groups)
assert_true('test_list_for_autocomplete_other_group' in groups, groups)
# c3 is alone except for groups
response = c3_other_group.get(reverse('useradmin.views.list_for_autocomplete'), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
content = json.loads(response.content)
users = [user['username'] for user in content['users']]
groups = [user['name'] for user in content['groups']]
assert_equal(['test_list_for_autocomplete', 'test_list_for_autocomplete2'], users)
assert_true('test_list_for_autocomplete' in groups, groups)
assert_true('test_list_for_autocomplete_other_group' in groups, groups)
class MockLdapConnection(object):
def __init__(self, ldap_config, ldap_url, username, password, ldap_cert):
self.ldap_config = ldap_config
self.ldap_url = ldap_url
self.username = username
self.password = password
self.ldap_cert = ldap_cert
def test_get_connection_bind_password():
# Unfortunately our tests leak a cached test ldap connection across functions, so we need to clear it out.
useradmin.ldap_access.CACHED_LDAP_CONN = None
# Monkey patch the LdapConnection class as we don't want to make a real connection.
OriginalLdapConnection = useradmin.ldap_access.LdapConnection
reset = [
desktop.conf.LDAP.LDAP_URL.set_for_testing('default.example.com'),
desktop.conf.LDAP.BIND_PASSWORD.set_for_testing('default-password'),
desktop.conf.LDAP.LDAP_SERVERS.set_for_testing({
'test': {
'ldap_url': 'test.example.com',
'bind_password': 'test-password',
}
})
]
try:
useradmin.ldap_access.LdapConnection = MockLdapConnection
connection = useradmin.ldap_access.get_connection_from_server()
assert_equal(connection.password, 'default-password')
connection = useradmin.ldap_access.get_connection_from_server('test')
assert_equal(connection.password, 'test-password')
finally:
useradmin.ldap_access.LdapConnection = OriginalLdapConnection
for f in reset:
f()
def test_get_connection_bind_password_script():
# Unfortunately our tests leak a cached test ldap connection across functions, so we need to clear it out.
useradmin.ldap_access.CACHED_LDAP_CONN = None
SCRIPT = '%s -c "print \'\\n password from script \\n\'"' % sys.executable
# Monkey patch the LdapConnection class as we don't want to make a real connection.
OriginalLdapConnection = useradmin.ldap_access.LdapConnection
reset = [
desktop.conf.LDAP.LDAP_URL.set_for_testing('default.example.com'),
desktop.conf.LDAP.BIND_PASSWORD_SCRIPT.set_for_testing(
'%s -c "print \'\\n default password \\n\'"' % sys.executable
),
desktop.conf.LDAP.LDAP_SERVERS.set_for_testing({
'test': {
'ldap_url': 'test.example.com',
'bind_password_script':
'%s -c "print \'\\n test password \\n\'"' % sys.executable,
}
})
]
try:
useradmin.ldap_access.LdapConnection = MockLdapConnection
connection = useradmin.ldap_access.get_connection_from_server()
assert_equal(connection.password, ' default password ')
connection = useradmin.ldap_access.get_connection_from_server('test')
assert_equal(connection.password, ' test password ')
finally:
useradmin.ldap_access.LdapConnection = OriginalLdapConnection
for f in reset:
f()
|
vmanoria/bluemix-hue-filebrowser
|
hue-3.8.1-bluemix/apps/useradmin/src/useradmin/tests.py
|
Python
|
gpl-2.0
| 37,802
|
[
"MOE"
] |
a10296ea3b576e6c831fa0ef296d4b30c3990cb9417d86fe7638f524d191686c
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
# Multilingual
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'),
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, admin.site.urls),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += i18n_patterns(
# Homepage app
url(r'^', include('homepage.urls')),
# User management
url(r'^users/', include('users.urls', namespace='users')),
url(r'^accounts/', include('allauth.urls')),
# User avatar
url(r'^avatar/', include('avatar.urls')),
# Cashfield app
url(r'^', include('cashfield.urls', namespace='cashfield')),
)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
manax-dojo/cashflow
|
config/urls.py
|
Python
|
bsd-3-clause
| 1,829
|
[
"VisIt"
] |
c2710bcb615576e8bbaf76b62d21a8faff15018795571ea0fcb07136c6272c79
|
import numpy as np
from GPy.core import Param
from GPy.kern.src.grid_kerns import GridRBF
from GPy.kern.src.psi_comp import PSICOMP_RBF, PSICOMP_RBF_GPU
from GPy.kern.src.stationary import Stationary
from paramz.transformations import Logexp
class CausalRBF(Stationary):
"""
Radial Basis Function kernel, aka squared-exponential, exponentiated quadratic or Gaussian kernel:
.. math::
k(r) = \sigma^2 \exp \\bigg(- \\frac{1}{2} r^2 \\bigg)
"""
_support_GPU = True
def __init__(
self,
input_dim,
variance_adjustment,
variance=1.0,
lengthscale=None,
rescale_variance=1.0,
ARD=False,
active_dims=None,
name="rbf",
useGPU=False,
inv_l=False,
):
super(CausalRBF, self).__init__(input_dim, variance, lengthscale, ARD, active_dims, name, useGPU=useGPU)
if self.useGPU:
self.psicomp = PSICOMP_RBF_GPU()
else:
self.psicomp = PSICOMP_RBF()
self.use_invLengthscale = inv_l
if inv_l:
self.unlink_parameter(self.lengthscale)
self.inv_l = Param("inv_lengthscale", 1.0 / self.lengthscale ** 2, Logexp())
self.link_parameter(self.inv_l)
self.variance_adjustment = variance_adjustment
self.rescale_variance = Param("rescale_variance", rescale_variance, Logexp())
def to_dict(self):
"""
Convert the object into a json serializable dictionary.
Note: It uses the private method _save_to_input_dict of the parent.
:return dict: json serializable dictionary containing the needed information to instantiate the object
"""
input_dict = super(CausalRBF, self)._save_to_input_dict()
input_dict["class"] = "GPy.kern.RBF"
input_dict["inv_l"] = self.use_invLengthscale
if input_dict["inv_l"] == True:
input_dict["lengthscale"] = np.sqrt(1 / float(self.inv_l))
return input_dict
def K(self, X, X2=None):
"""
Kernel function applied on inputs X and X2.
In the stationary case there is an inner function depending on the
distances from X to X2, called r.
K(X, X2) = K_of_r((X-X2)**2)
"""
if X2 is None:
X2 = X
r = self._scaled_dist(X, X2)
values = self.variance * np.exp(-0.5 * r ** 2)
value_diagonal_X = self.variance_adjustment(X)
value_diagonal_X2 = self.variance_adjustment(X2)
additional_matrix = np.dot(np.sqrt(value_diagonal_X), np.sqrt(np.transpose(value_diagonal_X2)))
assert additional_matrix.shape == values.shape, (
additional_matrix.shape,
values.shape,
)
return values + additional_matrix
def Kdiag(self, X):
ret = np.empty(X.shape[0])
ret[:] = np.repeat(0.1, X.shape[0])
diagonal_terms = ret
value = self.variance_adjustment(X)
if X.shape[0] == 1 and X.shape[1] == 1:
diagonal_terms = value
else:
if np.isscalar(value) == True:
diagonal_terms = value
else:
diagonal_terms = value[:, 0]
return self.variance + diagonal_terms
def K_of_r(self, r):
return self.variance * np.exp(-0.5 * r ** 2)
def dK_dr(self, r):
return -r * self.K_of_r(r)
def dK2_drdr(self, r):
return (r ** 2 - 1) * self.K_of_r(r)
def dK2_drdr_diag(self):
return -self.variance # as the diagonal of r is always filled with zeros
def __getstate__(self):
dc = super(CausalRBF, self).__getstate__()
if self.useGPU:
dc["psicomp"] = PSICOMP_RBF()
dc["useGPU"] = False
return dc
def __setstate__(self, state):
self.use_invLengthscale = False
return super(CausalRBF, self).__setstate__(state)
def spectrum(self, omega):
assert self.input_dim == 1
return self.variance * np.sqrt(2 * np.pi) * self.lengthscale * np.exp(-self.lengthscale * 2 * omega ** 2 / 2)
def parameters_changed(self):
if self.use_invLengthscale:
self.lengthscale[:] = 1.0 / np.sqrt(self.inv_l + 1e-200)
super(CausalRBF, self).parameters_changed()
def get_one_dimensional_kernel(self, dim):
"""
Specially intended for Grid regression.
"""
oneDkernel = GridRBF(input_dim=1, variance=self.variance.copy(), originalDimensions=dim)
return oneDkernel
# ---------------------------------------#
# PSI statistics #
# ---------------------------------------#
def psi0(self, Z, variational_posterior):
return self.psicomp.psicomputations(self, Z, variational_posterior)[0]
def psi1(self, Z, variational_posterior):
return self.psicomp.psicomputations(self, Z, variational_posterior)[1]
def psi2(self, Z, variational_posterior):
return self.psicomp.psicomputations(self, Z, variational_posterior, return_psi2_n=False)[2]
def psi2n(self, Z, variational_posterior):
return self.psicomp.psicomputations(self, Z, variational_posterior, return_psi2_n=True)[2]
def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
dL_dvar, dL_dlengscale = self.psicomp.psiDerivativecomputations(
self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior
)[:2]
self.variance.gradient = dL_dvar
self.lengthscale.gradient = dL_dlengscale
if self.use_invLengthscale:
self.inv_l.gradient = dL_dlengscale * (self.lengthscale ** 3 / -2.0)
def gradients_Z_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
return self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[2]
def gradients_qX_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
return self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[3:]
def update_gradients_diag(self, dL_dKdiag, X):
super(CausalRBF, self).update_gradients_diag(dL_dKdiag, X)
if self.use_invLengthscale:
self.inv_l.gradient = self.lengthscale.gradient * (self.lengthscale ** 3 / -2.0)
def update_gradients_full(self, dL_dK, X, X2=None):
super(CausalRBF, self).update_gradients_full(dL_dK, X, X2)
if self.use_invLengthscale:
self.inv_l.gradient = self.lengthscale.gradient * (self.lengthscale ** 3 / -2.0)
|
neildhir/DCBO
|
src/bayes_opt/causal_kernels.py
|
Python
|
mit
| 6,617
|
[
"Gaussian"
] |
535e9381c3e72e0f8c6ba62e487f8bf01d352e8ebc7f4985e43c20d880896ae3
|
__author__ = 'Daan Wierstra and Tom Schaul'
from itertools import chain
from scipy import zeros
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.networks.recurrent import RecurrentNetwork
from pybrain.structure.modules.neuronlayer import NeuronLayer
from pybrain.structure.connections import FullConnection
# CHECKME: allow modules that do not inherit from NeuronLayer? and treat them as single neurons?
class NeuronDecomposableNetwork(object):
""" A Network, that allows accessing parameters decomposed by their
corresponding individual neuron. """
# ESP style treatment:
espStyleDecomposition = True
def addModule(self, m):
assert isinstance(m, NeuronLayer)
super(NeuronDecomposableNetwork, self).addModule(m)
def sortModules(self):
super(NeuronDecomposableNetwork, self).sortModules()
self._constructParameterInfo()
# contains a list of lists of indices
self.decompositionIndices = {}
for neuron in self._neuronIterator():
self.decompositionIndices[neuron] = []
for w in range(self.paramdim):
inneuron, outneuron = self.paramInfo[w]
if self.espStyleDecomposition and outneuron[0] in self.outmodules:
self.decompositionIndices[inneuron].append(w)
else:
self.decompositionIndices[outneuron].append(w)
def _neuronIterator(self):
for m in self.modules:
for n in range(m.dim):
yield (m, n)
def _constructParameterInfo(self):
""" construct a dictionnary with information about each parameter:
The key is the index in self.params, and the value is a tuple containing
(inneuron, outneuron), where a neuron is a tuple of it's module and an index.
"""
self.paramInfo = {}
index = 0
for x in self._containerIterator():
if isinstance(x, FullConnection):
for w in range(x.paramdim):
inbuf, outbuf = x.whichBuffers(w)
self.paramInfo[index + w] = ((x.inmod, x.inmod.whichNeuron(outputIndex=inbuf)),
(x.outmod, x.outmod.whichNeuron(inputIndex=outbuf)))
elif isinstance(x, NeuronLayer):
for n in range(x.paramdim):
self.paramInfo[index + n] = ((x, n), (x, n))
else:
raise
index += x.paramdim
def getDecomposition(self):
""" return a list of arrays, each corresponding to one neuron's relevant parameters """
res = []
for neuron in self._neuronIterator():
nIndices = self.decompositionIndices[neuron]
if len(nIndices) > 0:
tmp = zeros(len(nIndices))
for i, ni in enumerate(nIndices):
tmp[i] = self.params[ni]
res.append(tmp)
return res
def setDecomposition(self, decomposedParams):
""" set parameters by neuron decomposition,
each corresponding to one neuron's relevant parameters """
nindex = 0
for neuron in self._neuronIterator():
nIndices = self.decompositionIndices[neuron]
if len(nIndices) > 0:
for i, ni in enumerate(nIndices):
self.params[ni] = decomposedParams[nindex][i]
nindex += 1
@staticmethod
def convertNormalNetwork(n):
""" convert a normal network into a decomposable one """
if isinstance(n, RecurrentNetwork):
res = RecurrentDecomposableNetwork()
for c in n.recurrentConns:
res.addRecurrentConnection(c)
else:
res = FeedForwardDecomposableNetwork()
for m in n.inmodules:
res.addInputModule(m)
for m in n.outmodules:
res.addOutputModule(m)
for m in n.modules:
res.addModule(m)
for c in chain(*n.connections.values()):
res.addConnection(c)
res.name = n.name
res.sortModules()
return res
class FeedForwardDecomposableNetwork(NeuronDecomposableNetwork, FeedForwardNetwork):
pass
class RecurrentDecomposableNetwork(NeuronDecomposableNetwork, RecurrentNetwork):
pass
|
hassaanm/stock-trading
|
src/pybrain/structure/networks/neurondecomposable.py
|
Python
|
apache-2.0
| 4,335
|
[
"NEURON"
] |
037373e8a0658abb5451046c49ccbe16a93f770f5dbc7ed22099ef58482355a5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.