CHRIS / functions.py
Robert Elder
updates to quantity module
e33154d
import math
import numpy as np
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import io
import base64
from scipy import special
from scipy.optimize import bisect
def SigFigs(number, n):
if number == 0: return number
return round(number, n - int(math.floor(math.log10(math.fabs(number)))) - 1)
def HtmlNumber(number,n):
number_str = f'{number:.{n}e}'
base, exponent = number_str.split('e')
html_output = f'{base} × 10<sup>{exponent[0] if exponent[0]=="-" else ""}{int(exponent[1:]):d}</sup>'
return html_output
def Piringer(Mw, Ap, T=310.):
# Semi-empirical model for D(Mw) given polymer property Ap- Toxicol. Sci. 2019, 172 (1), 201–212.
if Mw > 1100.: # if molecule is greater than 1100 g/mol, default to that value as worst case
Mw = 1100.
return 1e4 * np.exp(Ap - 0.1351 * Mw ** (2. / 3.) + 0.003 * Mw - 10454. / T)
def WilkeChang(Mw):
# Semi-empirical model for D(Mw) assuming diffusion in water
Va = 4.21 + 1.58 * Mw # molecular volume as a function of Mw assuming linear alkanes as worst case
T = 310.
MwH2O = 18.
viscosity = 0.6913
alpha = 2.6
return 7.4e-8 * (MwH2O * alpha) ** 0.5 * T / viscosity / Va ** 0.6
def SheetRelease(amount, vol, area, time, D):
# analytical solution(s) to Fick's law for a plane sheet
L = vol / area
D = D * 3600. # convert seconds to hours
tau = D * time / L ** 2. # dimensionless time
if tau <= 0.2:
release = 2. * amount * np.sqrt(tau / np.pi)
else:
release = amount * (1. - (8. / (np.pi ** 2.)) * np.exp(-tau * np.pi ** 2. / 4.))
return release
def SheetRates(amount, vol, area, time, D):
D = D * 86400.
L = vol / area
rates = np.zeros(len(time))
tau = D * time[0] / L ** 2.
if tau < 0.2:
rates[0] = 2. * amount * np.sqrt(tau / np.pi)
else:
rates[0] = amount * (1. - (8. / (np.pi ** 2.)) * np.exp(-tau * np.pi ** 2. / 4.))
for i in range(1, len(time)):
tau = D * time[i] / L ** 2.
if tau < 0.2:
rates[i] = 2. * amount * np.sqrt(tau / np.pi) - np.sum(rates[:i])
else:
rates[i] = amount * (1. - (8. / (np.pi ** 2.)) * np.exp(-tau * np.pi ** 2. / 4.)) - np.sum(rates[:i])
return rates
def RatePlot(tarray, rates):
fig, ax = plt.subplots(figsize=(6, 4))
ax.plot(tarray, rates, 'o')
ax.set(
xlabel='time (days)',
ylabel='release rate (mg/day)',
)
plt.tight_layout()
pngImage = io.BytesIO()
FigureCanvas(fig).print_png(pngImage)
pngImageB64String = "data:image/png;base64,"
pngImageB64String += base64.b64encode(pngImage.getvalue()).decode('utf8')
return pngImageB64String
def CdfPlot(vals, units=None):
xlabel = 'Total quantity' if units is None else f'Total quantity ({units})'
fig, ax = plt.subplots(figsize=(6, 4))
ax.ecdf(vals, c='b', lw=3)
q50 = np.nanquantile(vals,0.5)
ax.plot([q50,q50],[0,0.5],'k--',[q50],[0.5],'ko',[ax.get_xlim()[0],q50],[0.5,0.5],'k--',lw=2,ms=10)
ax.set_xscale('log')
ax.set(
xlabel=xlabel,
ylabel='Cumulative probability',
)
plt.tight_layout()
pngImage = io.BytesIO()
FigureCanvas(fig).print_png(pngImage)
pngImageB64String = "data:image/png;base64,"
pngImageB64String += base64.b64encode(pngImage.getvalue()).decode('utf8')
return pngImageB64String
def PlaneSheetFiniteBathMass(M0, D, K, PolymerVolume, SurfaceArea, SolventVolume, ExtractionTime, nterms):
L = PolymerVolume / SurfaceArea # effective length scale of the component
alpha = SolventVolume / PolymerVolume / K
Minfty = M0 / (1. + 1. / (alpha))
qn = np.zeros((nterms))
for j in range(nterms):
solved = False
eps = 1e-8
while not solved:
f = lambda x: np.tan(x) + alpha * x
rts = bisect(f, np.pi / 2. + j * np.pi + eps, np.pi * (1. + j) - eps, xtol=1e-6)
# print(rts)
solved = True
qn[j] = rts
result = 1.
for j in range(nterms):
result = result - (2. * alpha * (1. + alpha)) * np.exp(-D * qn[j] ** 2. * ExtractionTime / L ** 2.) / (
1. + alpha + alpha ** 2. * qn[j] ** 2.)
result = Minfty * result
return result
def PlaneSheetFiniteBathMassApprox(M0, D, K, PolymerVolume, SurfaceArea, SolventVolume, ExtractionTime):
L = PolymerVolume / SurfaceArea # effective length scale of the component
alpha = SolventVolume / PolymerVolume / K
Minfty = M0 / (1. + 1. / (alpha))
T = D * ExtractionTime / L ** 2.
# if exp will blow up, use asymptotic expansion instead
if (T / alpha ** 2. < 100.):
result = Minfty * (1. + alpha) * (1. - np.exp(T / alpha ** 2.) * special.erfc(np.sqrt(T) / alpha))
else:
result = Minfty * (1. + alpha) * (1. - alpha / (np.sqrt(np.pi) * np.sqrt(T)) + alpha ** 3. / (
2. * np.sqrt(np.pi) * (T) ** 1.5) - 3. * alpha ** 5. / (4. * np.sqrt(np.pi) * (T) ** 2.5))
return result
def PlaneSheetAnalytical(M0, D, K, PolymerVolume, SurfaceArea, SolventVolume, ExtractionTime, nterms):
L = PolymerVolume / SurfaceArea # effective length scale of the component
tau = D * ExtractionTime / L ** 2.
if tau > 0.05:
result = PlaneSheetFiniteBathMass(M0, D, K, PolymerVolume, SurfaceArea, SolventVolume, ExtractionTime, nterms)
else:
result = PlaneSheetFiniteBathMassApprox(M0, D, K, PolymerVolume, SurfaceArea, SolventVolume, ExtractionTime)
return result
def Piecewise(Mw, params):
mw_cut, func_lo, func_hi, params_lo, params_hi = params
if Mw <= mw_cut:
D = func_lo(Mw,*params_lo)
else:
D = func_hi(Mw,*params_hi)
return D
def PowerLaw(Mw, A, B):
logMw = np.log(Mw)
logD = A+logMw*B
return np.exp(logD)
def weight_func(dists):
"""
Returns the weights, based on the method in OPERA
Parameter f controls the maximum weight, prevents 1/dist -> inf
Set f to a very small number to mimic default sklearn behavior when dist = 0
"""
#f = 1e-15 # OPERA on CompTox?
#f = 5e-2 # OPERA in Matlab code
#f = 1e-3 # test
f = 5e-3
tmp = 1/(f+dists)
tmp = tmp/tmp.sum(axis=1)[..., None]
return tmp
#def func_piringer(Mw,T,Ap):
# if Mw > 1100.: # if molecule is greater than 1100 g/mol, default to that value as worst case
# Mw = 1100.
# D = 1e4*np.exp(Ap-0.1351*Mw**(2/3)+0.003*Mw-10454/T)
# return D