repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu1623.py | itur/models/itu1623.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
import numpy as np
from astropy import units as u
from scipy.optimize import fsolve
from scipy.special import erf as erf
from itur.utils import get_input_type, prepare_quantity, prepare_output_array, prepare_input_array
def Qfunc(z):
"""Tail distribution function of the standard normal distribution.
Q(z) is the probability that a normal (Gaussian) random variable will
a value larger than z standard deviations
The Q-function can be expressed in terms of the error function as
.. math::
Q(z) = \\frac{1}{2} \\left(1 - erf\\left(\\frac{z}{\\sqrt{2}}\\right)\\right)
Parameters
----------
z: float
Value to evaluate Q at.
Returns
-------
q : float
Value of the Q function evaluated at z.
"""
return 0.5 * (1 - erf(z / np.sqrt(2)))
class __ITU1623__:
"""Prediction method of fade dynamics on Earth-space paths.
Available versions:
* P.1623-0 (03/92) (Superseded)
* P.1623-1 (83/97) (Current version)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.1623 recommendation.
def __init__(self, version=1):
if version == 1:
self.instance = _ITU1623_1_()
elif version == 0:
self.instance = _ITU1623_0_()
else:
raise ValueError(
f"Version {version} is not implemented for the ITU-R P.1623 model."
)
self._zero_isoterm_data = {}
@property
def __version__(self):
return self.instance.__version__
def fade_duration(self, D_arr, A, el, f, T_tot):
# Abstract method to compute the fade duration
return self.instance.fade_duration(D_arr, A, el, f, T_tot)
def fade_slope(self, z, A, f_B, delta_t):
# Abstract method to compute the fade slope
return self.instance.fade_slope(z, A, f_B, delta_t)
def fade_depth(self, N_target, D_target, A, PofA, el, f):
# Abstract method to compute the fade depth
return self.instance.fade_depth(N_target, D_target, A, PofA, el, f)
class _ITU1623_1_:
def __init__(self):
self.__version__ = 1
self.year = 2005
self.month = 3
self.link = "https://www.itu.int/rec/R-REC-P.1623-1-200503-I/en"
@classmethod
def fade_duration(self, D_arr, A, el, f, T_tot):
if np.any(f < 10) or np.any(f > 50):
warnings.warn(
RuntimeWarning(
'The method to compute fade duration parameters '
'in recommendation ITU-P 1623-11 is only '
'recommended for frequencies in the 10-50GHz range'))
if np.any(el < 5) or np.any(el > 60):
warnings.warn(
RuntimeWarning(
'The method to compute fade duration parameters '
'in recommendation ITU-P 1623-11 is only '
'recommended for elevation angles in the 5-60deg range'))
# Step 1: Calculate the mean duration D0 of the log-normal
# distribution of the fraction of fading time due to fades of long
# duration, given that the attenuation is greater than A:
D_0 = 80 * (el ** (-0.4)) * (f ** (1.4)) * (A ** (-0.39)) # seconds
# Step 2: Calculate the standard deviation σ of the lognormal
# distribution of the fraction of fading time due to fades of long
# duration:
sigma = 1.85 * (f ** (-0.05)) * (A ** (-0.027))
# Step 3: Calculate the exponent γ of the power-law distribution of
# the fraction of fading time due to fades of short duration:
gamma = 0.055 * (f ** 0.65) * (A ** (-0.003))
# Step 4: Calculate the boundary between short and long fade
# durations, Dt:
p_1 = (0.885 * gamma) - 0.814
p_2 = (-1.05 * (gamma ** 2)) + (2.23 * gamma) - 1.61
D_t = D_0 * np.exp(p_1 * sigma ** 2 + p_2 * sigma - 0.39)
# Step 5: Calculate the mean duration D2 of the log-normal distribution
# of the probability of occurrence of fading events of long duration:
D_2 = D_0 * np.exp(-(sigma ** 2))
# Step 6: Calculate the fraction of time k due to fades of duration
# less than Dt:
Q_1 = Qfunc((np.log(D_t) - np.log(D_0)) / sigma)
Q_2 = Qfunc((np.log(D_t) - np.log(D_2)) / sigma)
k = 1. / (1 + ((np.sqrt(D_0 * D_2) *
(1 - gamma) * Q_1) / (D_t * gamma * Q_2)))
# Step 7: Calculate the probability of occurrence of fade events
# duration d longer than D given that attenuation a is greater than A:
p = np.zeros_like(D_arr) # initializes p for indexing ops.
Q_ratio_p = (Qfunc(np.log(D_arr / D_2) / sigma) /
Qfunc(np.log(D_t / D_2) / sigma))
p = np.where(
np.logical_and(D_arr >= 1, D_arr <= D_t),
D_arr ** -gamma,
(D_t ** -gamma) * Q_ratio_p,
)
# Step 8: Calculate the cumulative probability of exceedance, i.e. the
# total fraction of fade time due to fades of duration d longer than D:
F = np.zeros_like(D_arr)
Q_ratio_F = (Qfunc(np.log(D_arr / D_0) / sigma) /
Qfunc(np.log(D_t / D_0) / sigma)) # or divide by Q_2
F = np.where(
np.logical_and(D_arr >= 1, D_arr <= D_t),
(1 - (k * (D_arr / D_t) ** (1 - gamma))),
((1 - k) * Q_ratio_F),
)
# Step 9: Compute N(D,A), The total number of fades of duration d
# longer than D for a given threshold A.
# Step 9a. Compute Ntot, using the Ttot(A) parameter
N_tot = T_tot * (k / gamma) * ((1 - gamma) / (D_t ** (1 - gamma)))
# Compute number of fades N(D,A)
N = N_tot * p
# Compute T(d > D|a > A), total fading time due to fades of duration d
# longer than D for the threshold A:
T = T_tot * F
return np.array([p, F, N, T])
@classmethod
def fade_slope(self, z, A, f_B, delta_t):
# Step 1: Calculate F
b = 2.3
F = np.sqrt(
(2 * np.pi) ** 2 / ((1 / f_B ** b) + (2 * delta_t) ** b) ** (1 / b)
) # eq. 18
# Step 2: Calculate STD of the conditional fade slope
# s is a parameter which depends on climate and elevation angle; an
# overall average value in Europe and the United States of America,
# at elevations between 10° and 50°, is s = 0.01
s = 0.01
sigma_z = s * F * A
# Step 3a: Calculate the conditional probability
p = 2 / (np.pi * sigma_z * (1 + (z / (sigma_z) ** 2) ** 2)) # eq. 20
# Step 3b : If required, calculate p(ζ |A), the conditional
# probability (complementary cumulative distribution function) that
# the fade slope ζ is exceeded for a given attenuation value, A:
z_over_sigmaz = z / sigma_z
abs_z_over_sigmaz = np.abs(z) / sigma_z
P = (
0.5
- z_over_sigmaz / (np.pi * (1 + z_over_sigmaz ** 2))
- (np.arctan(z_over_sigmaz) / np.pi)
)
# Step 3b2 : calculate p(ζ |A), the conditional probability that the
# absolute value of the fade slope ζ is exceeded for a given
# attenuation value, A:
P2 = (
1
- 2 * abs_z_over_sigmaz / (np.pi * (1 + abs_z_over_sigmaz ** 2))
- (2 * np.arctan(abs_z_over_sigmaz) / np.pi)
)
return p, P, P2, sigma_z
@classmethod
def fade_depth(self, N_target, D_target, A, PofA, el, f):
d_target = np.atleast_1d(D_target)
def delta_N_events(x):
P_it = 10 ** (np.interp(x, A, np.log10(PofA)))
T_tot_it = (P_it / 100) * 365.25 * 86400
_, _, N_it, _ = self.fade_duration(d_target, x, el, f, T_tot_it)
delta = N_target - N_it
return delta
a_min = fsolve(delta_N_events, 1) # a_min should have shape (1,)
return a_min.item()
class _ITU1623_0_:
def __init__(self):
self.__version__ = 0
self.year = 2003
self.month = 4
self.link = "https://www.itu.int/rec/R-REC-P.1623-0-200304-S/en"
@staticmethod
def fade_duration(*args, **kwargs):
return _ITU1623_1_.fade_duration(*args, **kwargs)
@staticmethod
def fade_slope(*args, **kwargs):
return _ITU1623_1_.fade_slope(*args, **kwargs)
@staticmethod
def fade_depth(*args, **kwargs):
return _ITU1623_1_.fade_depth(*args, **kwargs)
__model = __ITU1623__()
def change_version(new_version):
"""
Change the version of the ITU-R P.1623 recommendation currently being used.
This function changes the model used for the ITU-R P.1623 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 1: Activates recommendation ITU-R P.1623-1 (03/2005) (Current version)
* 0: Activates recommendation ITU-R P.1623-0 (04/2003) (Superseded)
"""
global __model
__model = __ITU1623__(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.1623 recommendation currently being used.
Returns
-------
version: int
Version currently being used.
"""
return __model.__version__
def fade_duration_probability(D, A, el, f):
"""Compute the probability of occurrence of fades of duration longer than D.
Compute the probability of occurrence of fades of duration d longer than
D (s), given that the attenuation a is greater than A (dB).
This probability can be estimated from the ratio of the number of fades
of duration longer than D to the total number of fades observed,
given that the threshold A is exceeded.
Parameters
----------
D: number, sequence, or numpy.ndarray
Event durations, array, (s)
A: number
Attenuation threshold, scalar, (dB)
el: number
Elevation angle towards the satellite, deg (5 - 60)
f: number
Frequency, GHz (between 10 and 50 GHz)
Returns
-------
p: number, sequence, or numpy.ndarray
Probability of occurence of fade events of duration d longer than D
given a>A, P(d > D|a > A)
References
----------
[1] Prediction method of fade dynamics on Earth-space paths:
https://www.itu.int/rec/R-REC-P.1623/en
"""
type_output = get_input_type(D)
A = prepare_quantity(A, u.dB / u.s, 'Attenuation threshold')
el = prepare_quantity(el, u.deg, 'Elevation angle')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.fade_duration(D, A, el, f, 1)[0]
return prepare_output_array(val, type_output) * u.dimensionless_unscaled
def fade_duration_cummulative_probability(D, A, el, f):
"""Compute the cumulative probability of exceedance of fades of duration
longer than D.
Compute the cummulative exceedance probability F(d > D|a > A),
the total fraction (between 0 and 1) of fade time due to fades of duration
d longer than D (s), given that the attenuation a is greater than A (dB).
Parameters
----------
D: number, sequence, or numpy.ndarray
Event durations, array, (s)
A: number
Attenuation threshold, scalar, (dB)
el: number
Elevation angle towards the satellite, deg (5 - 60)
f: number
Frequency, GHz (between 10 and 50 GHz)
Returns
-------
F: number, sequence, or numpy.ndarray
Cumulative probability of exceedance, total fraction of fade time
due to fades of d > D
References
----------
[1] Prediction method of fade dynamics on Earth-space paths:
https://www.itu.int/rec/R-REC-P.1623/en
"""
type_output = get_input_type(D)
A = prepare_quantity(A, u.dB / u.s, 'Attenuation threshold')
el = prepare_quantity(el, u.deg, 'Elevation angle')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.fade_duration(D, A, el, f, 1)[1]
return prepare_output_array(val, type_output) * u.dimensionless_unscaled
def fade_duration_number_fades(D, A, el, f, T_tot):
"""Compute the number of fades of duration longer than D.
For a given reference period, the number of fades of duration longer
D is estimated by multiplying the probability of occurrence P(d > D|a > A)
by the total number of fades exceeding the threshold, Ntot(A).
Parameters
----------
D: number, sequence, or numpy.ndarray
Event durations, array, (s)
A: number
Attenuation threshold, scalar, (dB)
el: number
Elevation angle towards the satellite, deg (5 - 60)
f: number
Frequency, GHz (between 10 and 50 GHz)
T_tot: number
Total fade time from cumulative distribution (P(A)/100)*Reference time
period. T_tot should be obtained from local data. If this long-term
statistic is not available, an estimate can be calculated from
Recommendation ITU-R P.618. In this case the procedure consists in
calculating the CDF of total attenuation, deriving the percentage of
time the considered attenuation threshold A is exceeded and then the
associated total exceedance time T_tot for the reference period
considered.
For a reference period of a year,
T_tot = ((100-availability_in_pctg)/100)*365.25*24*3600 [s]
Returns
-------
N: Total number of fades of duration d longer than D, for a given
threshold A
References
----------
[1] Prediction method of fade dynamics on Earth-space paths:
https://www.itu.int/rec/R-REC-P.1623/en
"""
type_output = get_input_type(D)
D = prepare_input_array(D)
A = prepare_quantity(A, u.dB / u.s, 'Attenuation threshold')
el = prepare_quantity(el, u.deg, 'Elevation angle')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.fade_duration(D, A, el, f, T_tot)[2]
return prepare_output_array(val, type_output) * u.dimensionless_unscaled
def fade_duration_total_exceedance_time(D, A, el, f, T_tot):
"""Compute the total exceedance time of fades of duration longer than D.
The total exceedance time due to fade events of duration longer than D is
obtained by multiplying the fraction of time F(d > D|a > A) by the total
time that the threshold is exceeded, Ttot(A).
Parameters
----------
D: number, sequence, or numpy.ndarray
Event durations, array, (s)
A: number
Attenuation threshold, scalar, (dB)
el: number
Elevation angle towards the satellite, deg (5 - 60)
f: number
Frequency, GHz (between 10 and 50 GHz)
T_tot: number
Total fade time from cumulative distribution (P(A)/100)*Reference time
period. T_tot should be obtained from local data. If this long-term
statistic is not available, an estimate can be calculated from
Recommendation ITU-R P.618. In this case the procedure consists in
calculating the CDF of total attenuation, deriving the percentage of
time the considered attenuation threshold A is exceeded and then the
associated total exceedance time T_tot for the reference period
considered.
For a reference period of a year,
T_tot = ((100-availability_in_pctg)/100)*365.25*24*3600 [s]
Returns
-------
T: Total fading time due to fades of d > D for A threshold.
References
----------
[1] Prediction method of fade dynamics on Earth-space paths:
https://www.itu.int/rec/R-REC-P.1623/en
"""
type_output = get_input_type(D)
A = prepare_quantity(A, u.dB / u.s, 'Attenuation threshold')
el = prepare_quantity(el, u.deg, 'Elevation angle')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.fade_duration(D, A, el, f, T_tot)[3]
return prepare_output_array(val, type_output) * u.s
def fade_duration(D, A, el, f, T_tot):
"""Compute the probability of occurrence of fades of duration longer than D.
Compute the probability of occurrence of fades of duration d longer than
D (s), given that the attenuation a is greater than A (dB) and
F(d > D|a > A), the cumulative exceedance probability, or, equivalently,
the total fraction (between 0 and 1) of fade time due to fades of duration
d longer than D (s), given that the attenuation a is greater than A (dB).
The function also returns other parameters associated to the fade duration
prediction method. See ITU-R P.1623 Annex 1 Section 2.2
Parameters
----------
D: number, sequence, or numpy.ndarray
Event durations, array, (s)
A: number
Attenuation threshold, scalar, (dB)
el: number
Elevation angle towards the satellite, deg (5 - 60)
f: number
Frequency, GHz (between 10 and 50 GHz)
T_tot: number
Total fade time from cumulative distribution (P(A)/100)*Reference time
period. T_tot should be obtained from local data. If this long-term
statistic is not available, an estimate can be calculated from
Recommendation ITU-R P.618. In this case the procedure consists in
calculating the CDF of total attenuation, deriving the percentage of
time the considered attenuation threshold A is exceeded and then the
associated total exceedance time T_tot for the reference period
considered.
For a reference period of a year,
T_tot = ((100-availability_in_pctg)/100)*365.25*24*3600 [s]
Returns
-------
p: probability of occurence of fade events of
duration d longer than D given a>A, P(d > D|a > A)
F: cumulative probability of exceedance, total
fraction of fade time due to fades of d > D
N: total number of fades of duration d longer than D, for a given
threshold A
T: total fading time due to fades of d > D for A threshold
References
----------
[1] Prediction method of fade dynamics on Earth-space paths:
https://www.itu.int/rec/R-REC-P.1623/en
"""
get_input_type(D)
A = prepare_quantity(A, u.dB / u.s, 'Attenuation threshold')
el = prepare_quantity(el, u.deg, 'Elevation angle')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.fade_duration(D, A, el, f, T_tot)
return val
def fade_slope(z, A, f_B, delta_t):
"""Compute the probability of exceeding a valueo f fade slope.
Fade slope is defined as the rate of change of attenuation with time
information about the expected fade slope is essential to assess the
required minimum tracking rate of a fade mitigation system.
The model is valid for the following ranges of parameters:
* frequencies from 10 to 30 GHz
* elevation angles from 10° to 50°.
See ITU-R P.1623 Annex 1 Section 3.2
Parameters
----------
z: number, sequence, or numpy.ndarray
array of fade slope values (dB/s)
A: number
attenuation threshold, scalar, dB (range 0 - 20 dB)
f_B: number
3 dB cut-off frequency of the low pass filter (Hz, range 0.001 - 1)
used to remove tropospheric scintillation and rapid variations of rain
attenuation from the signal. Experimental results show that a 3 dB
cut-off frequency of 0.02 Hz allows scintillation and rapid variations
of rain attenuation to be filtered out adequately.
delta_t: number
Time interval length over which fade slope is calculated (s), 2-200 s
Returns
-------
p: conditional probability (probability density function)
that the fade slope is equal to the fade slope for
a given attenuation value, A
P: conditional probability (complementary cumulative
distribution function)that the fade slope is exceeded
for a given attenuation value, A
P2: conditional probability that the absolute value of
the fade slope is exceeded for a given attenuation
value, A
sigma_z: standard deviation of the conditional fade slope
Remark
------
The output is an array of 4 elements.
Example
-------
.. code-block:: python
import itur.models.itu1623 as itu1623
z = np.linspace(-2,2,100)
A = 10
f_B = 0.02
delta_t = 1
p, P, P2, sigma_z = itu1623.fade_slope(z, A, f_B, delta_t)
References
----------
[1] Prediction method of fade dynamics on Earth-space paths:
https://www.itu.int/rec/R-REC-P.1623/en
"""
get_input_type(z)
z = prepare_quantity(z, u.dB / u.s, 'Fade slope values')
A = prepare_quantity(A, u.dB / u.s, 'Attenuation threshold')
delta_t = prepare_quantity(delta_t, u.s, 'Time interval')
f_B = prepare_quantity(f_B, u.GHz, 'Cut-off Frequency')
val = __model.fade_slope(z, A, f_B, delta_t)
return val
def fade_depth(N_target, D_target, A, PofA, el, f):
"""Compute the maximum fade a link must tolerate given a target outage
intensity value (number of events) and a target duration of event.
The fade depth is computed by numerical solution of the fade_duration
problem.
See ITU-R P.1623 Annex 1 Section 3.2
Parameters
----------
N_target : int
Target outage intensity (scalar)
D_target : int
Event duration (scalar)
A : number, sequence, or numpy.ndarray
Attenuation distribution (CDF, A) for the link under analysis
PofA : number, sequence, or numpy.ndarray
Probability that A is exceeded (CDF, probability)
el : number
Elevation angle (deg)
f : number
Frequency (GHz)
Returns
-------
a_min: number
Minimum attenuation the link must tolerate to meet the OI target
Remark
------
This function uses scipy's fsolve as optimizer.
Example
-------
.. code-block:: python
import itur.models.itu1623 as itu1623
N_target = 25
D_target = 60
PofA = np.array([50, 30, 20, 10, 5, 3, 2, 1, .5, .3, .2, .1, .05, .03,
.02, .01, .005, .003, .002, .001])
A = np.array([0.4, 0.6, 0.8, 1.8, 2.70, 3.5, 4.20, 5.7, 7.4, 9, 10.60,
14, 18.3, 22.3, 25.8, 32.6, 40.1, 46.1, 50.8, 58.8])
el = 38.5
f = 28
itu1623.fade_depth(N_target, D_target, A, PofA, el, f) # 21.6922280
References
----------
[1] Prediction method of fade dynamics on Earth-space paths:
https://www.itu.int/rec/R-REC-P.1623/en
"""
get_input_type(A)
el = prepare_quantity(el, u.deg, 'Elevation angle')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.fade_depth(N_target, D_target, A, PofA, el, f)
return val
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu836.py | itur/models/itu836.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from astropy import units as u
from itur.models.itu1511 import topographic_altitude
from itur.models.itu1144 import (bilinear_2D_interpolator,
bicubic_2D_interpolator)
from itur.utils import (prepare_input_array, prepare_output_array,
dataset_dir, prepare_quantity, get_input_type,
load_data_interpolator)
def __interpolator_836__(self, data, lat, lon, p, alt=None,
alt_res_fcn=topographic_altitude):
lat_f = lat.flatten()
lon_f = lon.flatten()
available_p = np.array([0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10,
20, 30, 50, 60, 70, 80, 90, 95, 99])
if p in available_p:
p_below = p_above = p
pExact = True
else:
pExact = False
idx = available_p.searchsorted(p, side='right') - 1
idx = np.clip(idx, 0, len(available_p) - 1)
p_below = available_p[idx]
idx = np.clip(idx + 1, 0, len(available_p) - 1)
p_above = available_p[idx]
R = -(lat_f - 90) // 1.125
C = lon_f // 1.125
lats = np.array([90 - R * 1.125, 90 - (R + 1) * 1.125,
90 - R * 1.125, 90 - (R + 1) * 1.125])
lons = np.mod(np.array([C * 1.125, C * 1.125,
(C + 1) * 1.125, (C + 1) * 1.125]), 360)
r = - (lat_f - 90) / 1.125
c = lon_f / 1.125
data_a = data(lats, lons, p_above)
VSCH_a = self.VSCH(lats, lons, p_above)
# Compute the altitude of the data point
if alt_res_fcn is topographic_altitude:
altitude_res = alt_res_fcn(lats, lons).value.reshape(lats.shape)
else:
altitude_res = alt_res_fcn(lats, lons)
if alt is None:
alt = altitude_res
else:
alt = alt.flatten()
data_a = data_a * np.exp(- (alt - altitude_res) * 1.0 / (VSCH_a))
data_a = (data_a[0, :] * ((R + 1 - r) * (C + 1 - c)) +
data_a[1, :] * ((r - R) * (C + 1 - c)) +
data_a[2, :] * ((R + 1 - r) * (c - C)) +
data_a[3, :] * ((r - R) * (c - C)))
if not pExact:
data_b = data(lats, lons, p_below)
VSCH_b = self.VSCH(lats, lons, p_below)
data_b = data_b * np.exp(- (alt - altitude_res) / (VSCH_b))
data_b = (data_b[0, :] * ((R + 1 - r) * (C + 1 - c)) +
data_b[1, :] * ((r - R) * (C + 1 - c)) +
data_b[2, :] * ((R + 1 - r) * (c - C)) +
data_b[3, :] * ((r - R) * (c - C)))
# Compute the values of Lred_a
if not pExact:
rho = data_b + (data_a - data_b) * (np.log(p) - np.log(p_below)) / \
(np.log(p_above) - np.log(p_below))
return rho.reshape(lat.shape)
else:
return data_a.reshape(lat.shape)
class __ITU836():
"""Private class to model the ITU-R P.836 recommendations.
Water vapour: surface density and total columnar content
Available versions:
* P.836-6 (12/17) (Current version)
* P.836-5 (09/13) (Superseded)
* P.836-4 (10/09) (Superseded)
Not available versions:
* P.836-0 (03/92) (Superseded)
* P.836-1 (08/97) (Superseded)
* P.836-2 (02/01) (Superseded)
* P.836-3 (11/01) (Superseded)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.836 recommendation.
def __init__(self, version=6):
if version == 6:
self.instance = _ITU836_6()
elif version == 5:
self.instance = _ITU836_5()
elif version == 4:
self.instance = _ITU836_4()
else:
raise ValueError(
f"Version {version} is not implemented for the ITU-R P.836 model."
)
self._V = {}
self._VSCH = {}
self._rho = {}
self._topo_alt = None
@property
def __version__(self):
return self.instance.__version__
def surface_water_vapour_density(self, lat, lon, p, alt):
fcn = np.vectorize(self.instance.surface_water_vapour_density,
excluded=[0, 1, 3], otypes=[np.ndarray])
return np.array(fcn(lat, lon, p, alt).tolist())
def total_water_vapour_content(self, lat, lon, p, alt):
fcn = np.vectorize(self.instance.total_water_vapour_content,
excluded=[0, 1, 3], otypes=[np.ndarray])
return np.array(fcn(lat, lon, p, alt).tolist())
class _ITU836_6():
def __init__(self):
self.__version__ = 6
self.year = 2017
self.month = 12
self.link = 'https://www.itu.int/rec/R-REC-P.836-6-201712-I/en'
self._V = {}
self._VSCH = {}
self._rho = {}
self._topo_alt = None
def V(self, lat, lon, p):
if not self._V:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v6_v_%s.npz')
for p_loads in ps:
self._V[float(p_loads)] = load_data_interpolator(
'836/v6_lat.npz', '836/v6_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._V[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def VSCH(self, lat, lon, p):
if not self._VSCH:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v6_vsch_%s.npz')
for p_loads in ps:
self._VSCH[float(p_loads)] = load_data_interpolator(
'836/v6_lat.npz', '836/v6_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._VSCH[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def rho(self, lat, lon, p):
if not self._rho:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v6_rho_%s.npz')
for p_loads in ps:
self._rho[float(p_loads)] = load_data_interpolator(
'836/v6_lat.npz', '836/v6_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._rho[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def topo_alt(self, lat, lon):
if self._topo_alt is None:
self._topo_alt = load_data_interpolator(
'836/v6_topolat.npz', '836/v6_topolon.npz',
'836/v6_topo_0dot5.npz', bicubic_2D_interpolator)
return self._topo_alt(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def surface_water_vapour_density(self, lat, lon, p, alt=None):
return __interpolator_836__(
self, data=self.rho, lat=lat, lon=lon, p=p, alt=alt,
alt_res_fcn=self.topo_alt)
def total_water_vapour_content(self, lat, lon, p, alt=None):
return __interpolator_836__(
self, data=self.V, lat=lat, lon=lon, p=p, alt=alt,
alt_res_fcn=self.topo_alt)
class _ITU836_5():
def __init__(self):
self.__version__ = 5
self.year = 2013
self.month = 9
self.link = 'https://www.itu.int/rec/R-REC-P.836-5-201309-I/en'
self._V = {}
self._VSCH = {}
self._rho = {}
def V(self, lat, lon, p):
if not self._V:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v5_v_%s.npz')
for p_loads in ps:
self._V[float(p_loads)] = load_data_interpolator(
'836/v5_lat.npz', '836/v5_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._V[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def VSCH(self, lat, lon, p):
if not self._VSCH:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v5_vsch_%s.npz')
for p_loads in ps:
self._VSCH[float(p_loads)] = load_data_interpolator(
'836/v5_lat.npz', '836/v5_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._VSCH[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def rho(self, lat, lon, p):
if not self._rho:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v5_rho_%s.npz')
for p_loads in ps:
self._rho[float(p_loads)] = load_data_interpolator(
'836/v5_lat.npz', '836/v5_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._rho[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def surface_water_vapour_density(self, lat, lon, p, alt=None):
return __interpolator_836__(
self, data=self.rho, lat=lat, lon=lon, p=p, alt=alt,
alt_res_fcn=topographic_altitude)
def total_water_vapour_content(self, lat, lon, p, alt=None):
return __interpolator_836__(
self, data=self.V, lat=lat, lon=lon, p=p, alt=alt,
alt_res_fcn=topographic_altitude)
class _ITU836_4():
def __init__(self):
self.__version__ = 4
self.year = 2009
self.month = 10
self.link = 'https://www.itu.int/rec/R-REC-P.836-4-200910-S/en'
self._V = {}
self._VSCH = {}
self._rho = {}
def V(self, lat, lon, p):
if not self._V:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v4_v_%s.npz')
for p_loads in ps:
self._V[float(p_loads)] = load_data_interpolator(
'836/v4_lat.npz', '836/v4_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._V[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def VSCH(self, lat, lon, p):
if not self._VSCH:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v4_vsch_%s.npz')
for p_loads in ps:
self._VSCH[float(p_loads)] = load_data_interpolator(
'836/v4_lat.npz', '836/v4_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._VSCH[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def rho(self, lat, lon, p):
if not self._rho:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '836/v4_rho_%s.npz')
for p_loads in ps:
self._rho[float(p_loads)] = load_data_interpolator(
'836/v4_lat.npz', '836/v4_lon.npz',
d_dir % (str(p_loads).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._rho[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
# The procedure to compute the surface water vapour density and the
# total water vapour content is similar to the ones in recommendation
# ITU-P R.836-5.
def surface_water_vapour_density(self, lat, lon, p, alt=None):
return __interpolator_836__(
self, data=self.rho, lat=lat, lon=lon, p=p, alt=alt,
alt_res_fcn=topographic_altitude)
def total_water_vapour_content(self, lat, lon, p, alt=None):
return __interpolator_836__(
self, data=self.V, lat=lat, lon=lon, p=p, alt=alt,
alt_res_fcn=topographic_altitude)
__model = __ITU836()
def change_version(new_version):
"""
Change the version of the ITU-R P.836 recommendation currently being used.
This function changes the model used for the ITU-R P.836 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 6: Activates recommendation ITU-R P.836-6 (12/17) (Current version)
* 5: Activates recommendation ITU-R P.836-5 (09/13) (Superseded)
* 4: Activates recommendation ITU-R P.836-4 (10/09) (Superseded)
"""
global __model
__model = __ITU836(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.836 recommendation currently being used.
Returns
-------
version: int
Version currently being used.
"""
return __model.__version__
def surface_water_vapour_density(lat, lon, p, alt=None):
"""
Compute the surface water vapour density along a path.
This method computes the surface water vapour density along a path at a
desired location on the surface of the Earth.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
p : number
Percentage of time exceeded for p% of the average year
alt : number, sequence, or numpy.ndarray
Altitude of the receivers. If None, use the topographical altitude as
described in recommendation ITU-R P.1511
Returns
-------
rho: Quantity
Surface water vapour density (g/m3)
References
----------
[1] Water vapour: surface density and total columnar content
https://www.itu.int/rec/R-REC-P.836/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
alt = prepare_input_array(alt)
alt = prepare_quantity(alt, u.km, 'Altitude of the receivers')
val = __model.surface_water_vapour_density(lat, lon, p, alt)
return prepare_output_array(val, type_output) * u.g / u.m**3
def total_water_vapour_content(lat, lon, p, alt=None):
"""
Compute the total water vapour content along a path.
This method computes the total water vapour content along a path at a
desired location on the surface of the Earth.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
p : number
Percentage of time exceeded for p% of the average year
alt : number, sequence, or numpy.ndarray
Altitude of the receivers. If None, use the topographical altitude as
described in recommendation ITU-R P.1511
Returns
-------
V: Quantity
Total water vapour content (kg/m2)
References
----------
[1] Water vapour: surface density and total columnar content
https://www.itu.int/rec/R-REC-P.836/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
alt = prepare_input_array(alt)
alt = prepare_quantity(alt, u.km, 'Altitude of the receivers')
val = __model.total_water_vapour_content(lat, lon, p, alt)
return prepare_output_array(val, type_output) * u.kg / u.m**2
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu839.py | itur/models/itu839.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from astropy import units as u
from itur.models.itu1144 import bilinear_2D_interpolator
from itur.utils import (prepare_input_array, prepare_output_array,
load_data_interpolator, get_input_type)
class __ITU839__():
"""Rain height model for prediction methods.
Not available versions:
* P.839-0 (03/92) (Superseded)
* P.839-1 (83/97) (Superseded)
Available versions include:
* P.839-2 (10/99) (Superseded)
* P.839-3 (02/01) (Superseded)
* P.839-4 (09/2013) (Current version)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.839 recommendation.
def __init__(self, version=4):
if version == 4:
self.instance = _ITU839_4_()
elif version == 3:
self.instance = _ITU839_3_()
elif version == 2:
self.instance = _ITU839_2_()
# elif version == 1:
# self.instance = _ITU839_1()
# elif version == 0:
# self.instance = _ITU839_0()
else:
raise ValueError(
f"Version {version} is not implemented for the ITU-R P.839 model."
)
self._zero_isoterm_data = {}
@property
def __version__(self):
return self.instance.__version__
def rain_height(self, lat, lon):
# Abstract method to compute the rain height
return self.instance.rain_height(lat, lon)
def isoterm_0(self, lat, lon):
# Abstract method to compute the zero isoterm height
return self.instance.isoterm_0(lat, lon)
class _ITU839_4_():
def __init__(self):
self.__version__ = 4
self.year = 2013
self.month = 9
self.link = 'https://www.itu.int/rec/R-REC-P.839/' +\
'recommendation.asp?lang=en&parent=R-REC-P.839-4-201309-I'
self._zero_isoterm_data = {}
def isoterm_0(self, lat, lon):
if not self._zero_isoterm_data:
self._zero_isoterm_data = load_data_interpolator(
'839/v4_esalat.npz', '839/v4_esalon.npz',
'839/v4_esa0height.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._zero_isoterm_data(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def rain_height(self, lat_d, lon_d):
"""The rain height is computed as
..math:
h_r = h_0 + 0.36 (km)
"""
return self.isoterm_0(lat_d, lon_d) + 0.36
class _ITU839_3_():
def __init__(self):
self.__version__ = 3
self.year = 2001
self.month = 2
self.link = 'https://www.itu.int/rec/R-REC-P.839-3-200102-S/en'
self._zero_isoterm_data = {}
def isoterm_0(self, lat, lon):
if not self._zero_isoterm_data:
self._zero_isoterm_data = load_data_interpolator(
'839/v3_esalat.npz', '839/v3_esalon.npz',
'839/v3_esa0height.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._zero_isoterm_data(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def rain_height(self, lat_d, lon_d):
"""
The rain height is computed as
..math:
h_r = h_0 + 0.36 (km)
"""
return self.isoterm_0(lat_d, lon_d) + 0.36
class _ITU839_2_():
def __init__(self):
self.__version__ = 2
self.year = 1999
self.month = 10
self.link = 'https://www.itu.int/rec/R-REC-P.839-2-199910-S/en'
@staticmethod
def isoterm_0(lat_d, lon_d):
"""
The 0C mean isotherm height can be approximated as
"""
# TODO: Complete this with the equation in ITU-R P.839-2
h0 = np.where(
lat_d > 23, 5 - 0.075 * (lat_d - 23),
np.where(
np.logical_and(0 < lat_d, lat_d < 23),
5, np.where(
np.logical_and(-21 < lat_d, lat_d < 0),
5, np.where(
np.logical_and(-71 < lat_d, lat_d < -21),
5 + 0.1 * (lat_d + 21),
0))))
return h0
def rain_height(self, lat_d, lon_d):
"""
For areas of the world where no specific information is available,
the mean rain height, may be approximated by the mean 0C isotherm
height, and for for North America and for Europe west of 60° E
longitude the mean rain height is approximated by
..math:
h_r = 3.2 - 0.075 (\\lambda - 35) \\qquad for \\qquad
35 \\le \\lambda \\le 70 (km)
"""
h0 = self.isoterm_0(lat_d, lon_d)
return np.where(np.logical_and(np.logical_and(35 < lat_d, lat_d < 70),
lon_d < 60),
3.2 - 0.075 * (lat_d - 35), h0)
__model = __ITU839__()
def change_version(new_version):
"""
Change the version of the ITU-R P.839 recommendation currently being used.
This function changes the model used for the ITU-R P.839 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 4: Activates recommendation ITU-R P.839-4 (09/2013) (Current version)
* 3: Activates recommendation ITU-R P.839-3 (02/01) (Superseded)
* 2: Activates recommendation ITU-R P.839-2 (10/99) (Superseded)
"""
global __model
__model = __ITU839__(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.839 recommendation currently being used.
Returns
-------
version: int
Version currently being used.
"""
return __model.__version__
def isoterm_0(lat, lon):
"""
Estimate the zero degree Celsius isoterm height for propagation prediction.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Returns
-------
h0: numpy.ndarray
Zero degree Celsius isoterm height (km)
References
----------
[1] Rain height model for prediction methods:
https://www.itu.int/rec/R-REC-P.839/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.isoterm_0(lat, lon)
return prepare_output_array(val, type_output) * u.km
def rain_height(lat, lon):
"""
Estimate the annual mean rain height for propagation prediction.
The mean annual rain height above mean sea level, :math:`h_R`,
may be obtained from the 0° C isotherm as:
.. math::
h_R = h_0 + 0.36 \\qquad \\text{km}
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Returns
-------
hR: numpy.ndarray
Annual mean rain height (km)
References
----------
[1] Rain height model for prediction methods:
https://www.itu.int/rec/R-REC-P.839/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.rain_height(lat, lon)
return prepare_output_array(val, type_output) * u.km
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu840.py | itur/models/itu840.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from astropy import units as u
from itur.models.itu1144 import bilinear_2D_interpolator
from itur.utils import (dataset_dir, prepare_input_array, prepare_output_array,
prepare_quantity, load_data_interpolator,
get_input_type)
def __fcn_columnar_content_reduced_liquid__(Lred, lat, lon, p):
available_p = np.array(
[0.1, 0.2, 0.3, 0.5, 1.0, 2.0, 3.0, 5.0, 10.0, 20.0, 30.0, 50.0,
60.0, 70.0, 80.0, 90.0, 95.0, 99.0])
if p in available_p:
p_below = p_above = p
pExact = True
else:
pExact = False
idx = available_p.searchsorted(p, side='right') - 1
idx = np.clip(idx, 0, len(available_p))
p_below = available_p[idx]
p_above = available_p[idx + 1]
# Compute the values of Lred_a
Lred_a = Lred(lat, lon, p_above)
if not pExact:
Lred_b = Lred(lat, lon, p_below)
Lred = Lred_b + (Lred_a - Lred_b) * (np.log(p) - np.log(p_below)) \
/ (np.log(p_above) - np.log(p_below))
return Lred
else:
return Lred_a
class __ITU840__():
"""Attenuation due to clouds and fog: This Recommendation provides methods
to predict the attenuation due to clouds and fog on Earth-space paths.
Available versions include:
* P.840-4 (10/09) (Superseded)
* P.840-5 (02/12) (Superseded)
* P.840-6 (09/13) (Superseded)
* P.840-7 (12/17) (Superseded)
* P.840-8 (08/19) (Current version)
Non-available versions include:
* P.840-1 (08/94) (Superseded) - Tentative similar to P.840-4
* P.840-2 (08/97) (Superseded) - Tentative similar to P.840-4
* P.840-3 (10/99) (Superseded) - Tentative similar to P.840-4
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.840 recommendation.
def __init__(self, version=7):
if version == 8:
self.instance = _ITU840_8_()
elif version == 7:
self.instance = _ITU840_7_()
elif version == 6:
self.instance = _ITU840_6_()
elif version == 5:
self.instance = _ITU840_5_()
elif version == 4:
self.instance = _ITU840_4_()
else:
raise ValueError(
f"Version {version} is not implemented for the ITU-R P.840 model."
)
@property
def __version__(self):
return self.instance.__version__
def specific_attenuation_coefficients(self, f, T):
# Abstract method to compute the specific attenuation coefficients
fcn = np.vectorize(self.instance.specific_attenuation_coefficients)
return fcn(f, T)
def columnar_content_reduced_liquid(self, lat, lon, p):
# Abstract method to compute the columnar content of reduced liquid
fcn = np.vectorize(__fcn_columnar_content_reduced_liquid__,
excluded=[0, 1, 2], otypes=[np.ndarray])
return np.array(fcn(self.instance.Lred, lat, lon, p).tolist())
def cloud_attenuation(self, lat, lon, el, f, p, Lred=None):
# Abstract method to compute the cloud attenuation
Kl = self.specific_attenuation_coefficients(f, T=0)
if Lred is None:
Lred = self.columnar_content_reduced_liquid(lat, lon, p)
A = Lred * Kl / np.sin(np.deg2rad(el))
return A
def lognormal_approximation_coefficient(self, lat, lon):
# Abstract method to compute the lognormal approximation coefficients
return self.instance.lognormal_approximation_coefficient(lat, lon)
class _ITU840_8_():
def __init__(self):
self.__version__ = 8
self.year = 2019
self.month = 8
self.link = 'https://www.itu.int/rec/R-REC-P.840-8-201908-I/en'
self._Lred = {}
self._M = None
self._sigma = None
self._Pclw = None
# Note: The dataset used in recommendation 840-8 is the same as the
# dataset use in recommendation 840-7. (The zip files included in
# both recommendations are identical)
def Lred(self, lat, lon, p):
if not self._Lred:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '840/v7_lred_%s.npz')
for p_load in ps:
self._Lred[float(p_load)] = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
d_dir % (str(p_load).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._Lred[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def M(self, lat, lon):
if not self._M:
self._M = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
'840/v7_m.npz', bilinear_2D_interpolator, flip_ud=False)
return self._M(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def sigma(self, lat, lon):
if not self._sigma:
self._sigma = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
'840/v7_sigma.npz', bilinear_2D_interpolator, flip_ud=False)
return self._sigma(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def Pclw(self, lat, lon):
if not self._Pclw:
self._Pclw = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
'840/v7_pclw.npz', bilinear_2D_interpolator, flip_ud=False)
return self._Pclw(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
@staticmethod
def specific_attenuation_coefficients(f, T):
"""
"""
return _ITU840_6_.specific_attenuation_coefficients(f, T)
def lognormal_approximation_coefficient(self, lat, lon):
m = self.M(lat, lon)
sigma = self.sigma(lat, lon)
Pclw = self.Pclw(lat, lon)
return m, sigma, Pclw
class _ITU840_7_():
def __init__(self):
self.__version__ = 7
self.year = 2017
self.month = 12
self.link = 'https://www.itu.int/rec/R-REC-P.840-7-201712-I/en'
self._Lred = {}
self._M = {}
self._sigma = {}
self._Pclw = {}
def Lred(self, lat, lon, p):
if not self._Lred:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '840/v7_lred_%s.npz')
for p_load in ps:
self._Lred[float(p_load)] = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
d_dir % (str(p_load).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._Lred[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def M(self, lat, lon):
if not self._M:
self._M = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
'840/v7_m.npz', bilinear_2D_interpolator, flip_ud=False)
return self._M(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def sigma(self, lat, lon):
if not self._sigma:
self._sigma = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
'840/v7_sigma.npz', bilinear_2D_interpolator, flip_ud=False)
return self._sigma(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def Pclw(self, lat, lon):
if not self._Pclw:
self._Pclw = load_data_interpolator(
'840/v7_lat.npz', '840/v7_lon.npz',
'840/v7_pclw.npz', bilinear_2D_interpolator, flip_ud=False)
return self._Pclw(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
@staticmethod
def specific_attenuation_coefficients(f, T):
"""
"""
return _ITU840_6_.specific_attenuation_coefficients(f, T)
def lognormal_approximation_coefficient(self, lat, lon):
# TODO: This is the wrong method, Need to update
m = self.M(lat, lon)
sigma = self.sigma(lat, lon)
Pclw = self.Pclw(lat, lon)
return m, sigma, Pclw
class _ITU840_6_():
def __init__(self):
self.__version__ = 6
self.year = 2013
self.month = 9
self.link = 'https://www.itu.int/rec/R-REC-P.840-6-201202-I/en'
self._Lred = {}
self._M = {}
self._sigma = {}
self._Pclw = {}
def Lred(self, lat, lon, p):
if not self._Lred:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '840/v6_lred_%s.npz')
for p_load in ps:
self._Lred[float(p_load)] = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
d_dir % (str(p_load).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._Lred[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def M(self, lat, lon):
if not self._M:
self._M = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v6_m.npz', bilinear_2D_interpolator, flip_ud=False)
return self._M(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def sigma(self, lat, lon):
if not self._sigma:
self._sigma = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v6_sigma.npz', bilinear_2D_interpolator, flip_ud=False)
return self._sigma(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def Pclw(self, lat, lon):
if not self._Pclw:
self._Pclw = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v6_pclw.npz', bilinear_2D_interpolator, flip_ud=False)
return self._Pclw(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
@staticmethod
def specific_attenuation_coefficients(f, T):
"""
"""
if np.any(f > 1000):
raise ValueError('Frequency must be introduced in GHz and the '
'maximum range is 1000 GHz')
T_kelvin = T + 273.15
theta = 300.0 / T_kelvin # Eq. 9
# Compute the values of the epsilons
epsilon0 = 77.66 + 103.3 * (theta - 1) # Eq. 6
epsilon1 = 0.0671 * epsilon0 # Eq. 7
epsilon2 = 3.52 # Eq. 8
# Compute the principal and secondary relacation frequencies
fp = 20.20 - 146 * (theta - 1) + 316.0 * (theta - 1)**2 # Eq. 10
fs = 39.8 * fp # Eq. 11
# Compute the dielectric permitivity of water
epsilonp = (epsilon0 - epsilon1) / (1 + (f / fp) ** 2) + \
(epsilon1 - epsilon2) / (1 + (f / fs) ** 2) + epsilon2 # Eq. 5
epsilonpp = f * (epsilon0 - epsilon1) / (fp * (1 + (f / fp)**2)) + \
f * (epsilon1 - epsilon2) / (fs * (1 + (f / fs)**2)) # Eq. 4
eta = (2 + epsilonp) / epsilonpp # Eq. 3
Kl = (0.819 * f) / (epsilonpp * (1 + eta**2)) # Eq. 2
return Kl # Specific attenuation coefficient (dB/km)/(g/m3)
def lognormal_approximation_coefficient(self, lat, lon):
m = self.M(lat, lon)
sigma = self.sigma(lat, lon)
Pclw = self.Pclw(lat, lon)
return m, sigma, Pclw
class _ITU840_5_():
def __init__(self):
self.__version__ = 5
self.year = 2012
self.month = 2
self.link = 'https://www.itu.int/rec/R-REC-P.840-5-201202-S/en'
self._Lred = {}
self._M = {}
self._sigma = {}
self._Pclw = {}
def Lred(self, lat, lon, p):
if not self._Lred:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '840/v4_esawred_%s.npz')
for p_load in ps:
self._Lred[float(p_load)] = load_data_interpolator(
'840/v4_lat.npz', '840/v4_lon.npz',
d_dir % (str(p_load).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._Lred[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def M(self, lat, lon):
if not self._M:
self._M = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v4_wred_lognormal_mean.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._M(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def sigma(self, lat, lon):
if not self._sigma:
self._sigma = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v4_wred_lognormal_stdev.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._sigma(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def Pclw(self, lat, lon):
if not self._Pclw:
self._Pclw = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v4_wred_lognormal_pclw.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._Pclw(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
@staticmethod
def specific_attenuation_coefficients(f, T):
"""
"""
return _ITU840_4_.specific_attenuation_coefficients(f, T)
def lognormal_approximation_coefficient(self, lat, lon):
m = self.M(lat, lon)
sigma = self.sigma(lat, lon)
Pclw = self.Pclw(lat, lon)
return m, sigma, Pclw
class _ITU840_4_():
def __init__(self):
self.__version__ = 4
self.year = 2013
self.month = 9
self.link = 'https://www.itu.int/rec/R-REC-P.840-6-201202-I/en'
self._Lred = {}
self._M = {}
self._sigma = {}
self._Pclw = {}
def Lred(self, lat, lon, p):
if not self._Lred:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30,
50, 60, 70, 80, 90, 95, 99]
d_dir = os.path.join(dataset_dir, '840/v4_esawred_%s.npz')
for p_load in ps:
self._Lred[float(p_load)] = load_data_interpolator(
'840/v4_lat.npz', '840/v4_lon.npz',
d_dir % (str(p_load).replace('.', '')),
bilinear_2D_interpolator, flip_ud=False)
return self._Lred[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def M(self, lat, lon):
if not self._M:
self._M = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v4_wred_lognormal_mean.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._M(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def sigma(self, lat, lon):
if not self._sigma:
self._sigma = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v4_wred_lognormal_stdev.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._sigma(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def Pclw(self, lat, lon):
if not self._Pclw:
self._Pclw = load_data_interpolator(
'840/v6_lat.npz', '840/v6_lon.npz',
'840/v4_wred_lognormal_pclw.npz', bilinear_2D_interpolator,
flip_ud=False)
return self._Pclw(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
@staticmethod
def specific_attenuation_coefficients(f, T):
"""
"""
if np.any(f > 1000):
raise ValueError(
'Frequency must be introduced in GHz and the maximum range'
' is 1000 GHz')
T_kelvin = T + 273.15
theta = 300.0 / T_kelvin # Eq. 9
# Compute the values of the epsilons
epsilon0 = 77.66 + 103.3 * (theta - 1) # Eq. 6
epsilon1 = 5.48 # Eq. 7
epsilon2 = 3.51 # Eq. 8
# Compute the principal and secondary relacation frequencies
fp = 20.09 - 142 * (theta - 1) + 294.0 * (theta - 1)**2 # Eq. 10
fs = 590 - 1500 * (theta - 1) # Eq. 11
# Compute the dielectric permitivity of water
epsilonp = (epsilon0 - epsilon1) / (1 + (f / fp) ** 2) + \
(epsilon1 - epsilon2) / (1 + (f / fs) ** 2) + epsilon2 # Eq. 5
epsilonpp = f * (epsilon0 - epsilon1) / (fp * (1 + (f / fp)**2)) + \
f * (epsilon1 - epsilon2) / (fs * (1 + (f / fs)**2)) # Eq. 4
eta = (2 + epsilonp) / epsilonpp # Eq. 3
Kl = (0.819 * f) / (epsilonpp * (1 + eta**2)) # Eq. 2
return Kl # Specific attenuation coefficient (dB/km)/(g/m3)
def lognormal_approximation_coefficient(self, lat, lon):
m = self.M(lat, lon)
sigma = self.sigma(lat, lon)
Pclw = self.Pclw(lat, lon)
return m, sigma, Pclw
__model = __ITU840__()
def change_version(new_version):
"""
Change the version of the ITU-R P.840 recommendation currently being used.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 8: Activates recommendation ITU-R P.840-8 (08/19) (Current version)
* 7: Activates recommendation ITU-R P.840-7 (12/17) (Superseded)
* 6: Activates recommendation ITU-R P.840-6 (09/13) (Superseded)
* 5: Activates recommendation ITU-R P.840-5 (02/12) (Superseded)
* 4: Activates recommendation ITU-R P.840-4 (10/09) (Superseded)
"""
global __model
__model = __ITU840__(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.840 recommendation currently being used.
Returns
-------
version: int
Version currently being used.
"""
return __model.__version__
def specific_attenuation_coefficients(f, T):
"""
Compute the specific attenuation coefficient for cloud attenuation.
A method to compute the specific attenuation coefficient. The method is
based on Rayleigh scattering, which uses a double-Debye model for the
dielectric permittivity of water.
This model can be used to calculate the value of the specific attenuation
coefficient for frequencies up to 1000 GHz:
Parameters
----------
f : number
Frequency (GHz)
T : number
Temperature (degrees C)
Returns
-------
Kl: numpy.ndarray
Specific attenuation coefficient (dB/km)
References
----------
[1] Attenuation due to clouds and fog:
https://www.itu.int/rec/R-REC-P.840/en
"""
f = prepare_quantity(f, u.GHz, 'Frequency')
T = prepare_quantity(T, u.deg_C, 'Temperature')
return __model.specific_attenuation_coefficients(f, T)
def columnar_content_reduced_liquid(lat, lon, p):
"""
Compute the total columnar contents of reduced cloud liquid water.
A method to compute the total columnar content of reduced cloud liquid
water, Lred (kg/m2), exceeded for p% of the average year
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
p : number
Percentage of time exceeded for p% of the average year
Returns
-------
Lred: numpy.ndarray
Total columnar content of reduced cloud liquid water, Lred (kg/m2),
exceeded for p% of the average year
References
----------
[1] Attenuation due to clouds and fog:
https://www.itu.int/rec/R-REC-P.840/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.columnar_content_reduced_liquid(lat, lon, p)
return prepare_output_array(val, type_output) * u.kg / u.m**2
def cloud_attenuation(lat, lon, el, f, p, Lred=None):
"""
Compute the cloud attenuation in a slant path.
A method to estimate the attenuation due to clouds along slant paths for
a given probability. If local measured data of the total columnar content
of cloud liquid water reduced to a temperature of 273.15 K, Lred, is
available from other sources, (e.g., from ground radiometric measurements,
Earth observation products, or meteorological numerical products), the
value should be used directly.
The value of the cloud attenuation is computed as:
.. math::
A=\\frac{L_{red}(\\text{lat}, \\text{lon}, p, T) \\cdot K_l(f, T)}{\\sin(\\text{el})}
where:
* :math:`L_{red}` : total columnar content of liquid water reduced to a
temperature of 273.15 K (kg/m2);
* :math:`K_l` : specific attenuation coefficient ((dB/km)/(g/m3));
* :math:`el` : path elevation angle (deg).
* :math:`f` : frequency (GHz).
* :math:`p` : Percentage of time exceeded for p% of the average year (%).
* :math:`T` : temperature (K). Equal to 273.15 K.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
el : number, sequence, or numpy.ndarray
Elevation angle of the receiver points (deg)
f : number
Frequency (GHz)
p : number
Percentage of time exceeded for p% of the average year
Lred: number
Total columnar contents of reduced cloud liquid water. (kg/m2)
Returns
-------
A: numpy.ndarray
Cloud attenuation, A (dB), exceeded for p% of the average year
References
----------
[1] Attenuation due to clouds and fog:
https://www.itu.int/rec/R-REC-P.840/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
el = prepare_quantity(el, u.deg, 'Elevation angle')
f = prepare_quantity(f, u.GHz, 'Frequency')
Lred = prepare_quantity(
Lred, u.kg / u.m**2,
'Total columnar contents of reduced cloud liquid water.')
val = __model.cloud_attenuation(lat, lon, el, f, p, Lred)
# The values of attenuation cannot be negative. The ITU models end up
# giving out negative values for certain inputs
val[val < 0] = 0
return prepare_output_array(val, type_output) * u.dB
def lognormal_approximation_coefficient(lat, lon):
"""
Total columnar contents of cloud liquid water distribution coefficients.
The annual statistics of the total columnar content of reduced cloud
liquid water content can be approximated by a log-normal distribution.
This function computes the coefficients for the mean, :math:`m`,
standard deviation, :math:`\\sigma`, and probability of non-zero reduced
total columnar content of cloud liquid water, :math:`Pclw`, for such the
log-normal distribution.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Returns
-------
m: numpy.ndarray
Mean of the lognormal distribution
σ: numpy.ndarray
Standard deviation of the lognormal distribution
Pclw: numpy.ndarray
Probability of cloud liquid water of the lognormal distribution
References
----------
[1] Attenuation due to clouds and fog:
https://www.itu.int/rec/R-REC-P.840/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.lognormal_approximation_coefficient(lat, lon)
u_adim = u.dimensionless_unscaled
return (prepare_output_array(val[0], type_output) * u_adim,
prepare_output_array(val[1], type_output) * u_adim,
prepare_output_array(val[2], type_output) * u_adim)
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu1510.py | itur/models/itu1510.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from astropy import units as u
from itur.models.itu1144 import (bilinear_2D_interpolator,
bicubic_2D_interpolator)
from itur.utils import (prepare_input_array, prepare_output_array,
load_data_interpolator, get_input_type)
class __ITU1510__():
"""Annual mean surface temperature
Available versions include:
* P.1510-0 (02/01) (Current version)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.1510 recommendation.
def __init__(self, version=1):
if version == 1:
self.instance = _ITU1510_1_()
elif version == 0:
self.instance = _ITU1510_0_()
else:
raise ValueError('Version ' + str(version) + ' is not implemented'
' for the ITU-R P.1510 model.')
@property
def __version__(self):
return self.instance.__version__
def surface_mean_temperature(self, lat, lon):
"""
Method to compute the annual mean surface temperature (K).
The temperature is computed at 2 m above the surface of the Earth.
"""
return self.instance.temperature(lat, lon)
def surface_month_mean_temperature(self, lat, lon, m):
# Abstract method to compute the monthly surface mean temperature
fcn = np.vectorize(self.instance.surface_month_mean_temperature,
excluded=[0, 1], otypes=[np.ndarray])
return np.array(fcn(lat, lon, m).tolist())
class _ITU1510_1_():
def __init__(self):
self.__version__ = 1
self.year = 2017
self.month = 6
self.link = 'https://www.itu.int/rec/R-REC-P.1510/' +\
'recommendation.asp?lang=en&parent=R-REC-P.1510-1-201706-I'
self.__months = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
self._temperature = {}
self._month_temperature = {}
def temperature(self, lat, lon):
if not self._temperature:
self._temperature = load_data_interpolator(
'1510/v1_lat.npz', '1510/v1_lon.npz',
'1510/v1_t_annual.npz', bilinear_2D_interpolator)
lon[lon > 180] = lon[lon > 180] - 360
return self._temperature(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def month_temperature(self, lat, lon, m):
if not self._month_temperature:
for _m in self.__months:
self._month_temperature[_m] = load_data_interpolator(
"1510/v1_lat.npz",
"1510/v1_lon.npz",
f"1510/v1_t_month{_m:02d}.npz",
bilinear_2D_interpolator,
)
lon[lon > 180] = lon[lon > 180] - 360
return self._month_temperature[m](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def surface_mean_temperature(self, lat, lon):
"""
Method to compute the annual mean surface temperature (K) at 2 m
above the surface of the Earth
"""
return self.temperature(lat, lon)
def surface_month_mean_temperature(self, lat, lon, m):
return self.month_temperature(lat, lon, m)
class _ITU1510_0_():
def __init__(self):
self.__version__ = 0
self.year = 2001
self.month = 2
self.link = 'https://www.itu.int/rec/R-REC-P.1510/' +\
'recommendation.asp?lang=en&parent=R-REC-P.1510-0-200102-I'
self._temperature = {}
def temperature(self, lat, lon):
if not self._temperature:
self._temperature = load_data_interpolator(
'1510/v0_lat.npz', '1510/v0_lon.npz',
'1510/v0_temp.npz', bicubic_2D_interpolator)
return self._temperature(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def surface_month_mean_temperature(self, lat, lon, m):
raise NotImplementedError(
f"The monthly mean temperature is notimplemented in recomendation ITU-R P.1510-{self.__version__}"
)
__model = __ITU1510__()
def change_version(new_version):
"""
Change the version of the ITU-R P.1510 recommendation currently being used.
This function changes the model used for the ITU-R P.1510 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 1: Activates recommendation ITU-R P.1510-1 (06/17) (Current version)
* 0: Activates recommendation ITU-R P.1510-0 (02/01) (Current version)
"""
global __model
__model = __ITU1510__(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.1510 recommendation currently being used.
Returns
-------
version: int
Version currently being used.
"""
return __model.__version__
def surface_mean_temperature(lat, lon):
"""
Annual mean surface temperature (K) at 2 m above the surface of the Earth.
A method to estimate the annual mean surface temperature (K) at 2 m
above the surface of the Earth
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Returns
-------
annual_temperature: numpy.ndarray
Annual mean surface temperature (K). Same dimensions as lat and lon.
References
----------
[1] Annual mean surface temperature:
https://www.itu.int/rec/R-REC-P.1510/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.surface_mean_temperature(lat, lon)
return prepare_output_array(val, type_output) * u.Kelvin
def surface_month_mean_temperature(lat, lon, m):
"""
Monthly mean surface temperature (K) at 2 m above the surface of the Earth.
A method to estimate the monthly mean surface temperature (K) at 2 m
above the surface of the Earth
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
m : integer
Index of the month (1=Jan, 2=Feb, 3=Mar, 4=Apr, ...)
Returns
-------
monthly_temperature: numpy.ndarray
Monthly mean surface temperature (K). Same dimensions as lat and lon.
References
----------
[1] Annual mean surface temperature:
https://www.itu.int/rec/R-REC-P.1510/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.surface_month_mean_temperature(lat, lon, m)
return prepare_output_array(val, type_output) * u.Kelvin
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu1853.py | itur/models/itu1853.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import scipy.stats as stats
from scipy.signal import lfilter
from itur.models.itu618 import rain_attenuation, scintillation_attenuation_sigma
from itur.models.itu676 import gamma0_exact, slant_inclined_path_equivalent_height
from itur.models.itu840 import (
lognormal_approximation_coefficient,
specific_attenuation_coefficients,
)
from itur.models.itu835 import standard_pressure, standard_water_vapour_density
from itur.models.itu836 import total_water_vapour_content
from itur.models.itu837 import rainfall_probability
from itur.models.itu676 import zenit_water_vapour_attenuation
from itur.models.itu1510 import surface_mean_temperature
from itur.models.itu1511 import topographic_altitude
from itur.utils import prepare_quantity
from astropy import units as u
class __ITU1853:
"""Tropospheric attenuation time series synthesis
Available versions include:
* P.1853-0 (10/09) (Superseded)
* P.1853-1 (02/12) (Current version)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.1853 recommendation.
def __init__(self, version=1):
if version == 1:
self.instance = _ITU1853_1()
elif version == 0:
self.instance = _ITU1853_0()
else:
raise ValueError(
f"Version {version} is not implemented for the ITU-R P.1853 model."
)
@staticmethod
def set_seed(seed):
np.random.seed(seed)
@property
def __version__(self):
return self.instance.__version__
def rain_attenuation_synthesis(self, lat, lon, f, el, hs, Ns, Ts=1, tau=45, n=None):
return self.instance.rain_attenuation_synthesis(
lat, lon, f, el, hs, Ns, Ts=Ts, tau=tau, n=n
)
def total_attenuation_synthesis(
self,
lat,
lon,
f,
el,
p,
D,
Ns,
Ts=1,
tau=45,
hs=None,
eta=0.65,
rho=None,
H=None,
P=None,
hL=1000,
return_contributions=False,
):
return self.instance.total_attenuation_synthesis(
lat,
lon,
f,
el,
p,
D,
Ns,
Ts=Ts,
tau=tau,
hs=hs,
eta=eta,
rho=rho,
H=H,
P=P,
hL=hL,
return_contributions=return_contributions,
)
def scintillation_attenuation_synthesis(self, Ns, f_c=0.1, Ts=1):
return self.instance.scintillation_attenuation_synthesis(Ns, f_c=f_c, Ts=Ts)
def cloud_liquid_water_synthesis(self, lat, lon, Ns, Ts=1, n=None):
return self.instance.cloud_liquid_water_synthesis(lat, lon, Ns, Ts=Ts, n=n)
def integrated_water_vapour_synthesis(self, lat, lon, Ns, Ts=1, n=None):
return self.instance.integrated_water_vapour_synthesis(lat, lon, Ns, Ts=Ts, n=n)
class _ITU1853_1:
def __init__(self):
self.__version__ = 1
self.year = 2012
self.month = 2
self.link = "https://www.itu.int/rec/R-REC-P.1853-1-201202-I/en"
@staticmethod
def rain_attenuation_synthesis(lat, lon, f, el, hs, Ns, tau=45, Ts=1, n=None):
"""
For Earth-space paths, the time series synthesis method is valid for
frequencies between 4 GHz and 55 GHz and elevation angles between
5 deg and 90 deg.
"""
# Step A1: Determine Prain (% of time), the probability of rain on the
# path. Prain can be well approximated as P0(lat, lon)
P_rain = rainfall_probability(lat, lon).to(u.dimensionless_unscaled).value
# Step A2: Construct the set of pairs [Pi, Ai] where Pi (% of time) is
# the probability the attenuation Ai (dB) is exceeded where Pi < P_K
p_i = np.array([0.01, 0.02, 0.03, 0.05, 0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10])
Pi = np.array([p for p in p_i if p < P_rain * 100], dtype=float)
Ai = np.array([0 for p in p_i if p < P_rain * 100], dtype=float)
for i, p in enumerate(Pi):
Ai[i] = rain_attenuation(lat, lon, f, el, hs, p, tau=tau).value
# Step A3: Transform the set of pairs [Pi, Ai] to [Q^{-1}(Pi/P_k),
# ln(Ai)]
Q = stats.norm.ppf((Pi / 100))
lnA = np.log(Ai)
# Step A4: Determine the variables sigma_lna, m_lna by performing a
# least-squares fit to lnAi = sigma_lna Q^{-1}(Pi/P_k) + m_lna
m, sigma = np.linalg.lstsq(np.vstack([np.ones(len(Q)), Q]).T, lnA, rcond=None)[
0
]
# Step B: Set the low-pass filter parameter
beta = 2e-4
# Step C: compute the attenuation offset
A_offset = np.exp(m + sigma * stats.norm.ppf(P_rain))
# Step D: Time series synthesis
# D1: Synthesize a white Gaussian noise time series
if n is None:
n = np.random.normal(0, 1, int(Ns * Ts + 2e5))[::Ts]
discard_samples = True
else:
discard_samples = False
# D2, D3 : Filter the noise time series with a recursive low-pass
# filter
rho = np.exp(-beta * Ts)
X = lfilter([np.sqrt(1 - rho**2)], [1, -rho], n, 0)
# D4: Compute Y_rain
Y_rain = np.exp(m + sigma * X)
# D5: Compute Arain
A_rain = np.maximum(Y_rain - A_offset, 0)
# D6: Discard the first 200 000 samples from the synthesized
if discard_samples:
A_rain = A_rain[np.ceil(200000 / Ts).astype(int) :]
return A_rain.flatten()
@classmethod
def fftnoise(cls, f):
f = np.array(f, dtype="complex")
Np = (len(f) - 1) // 2
phases = np.random.rand(Np) * 2 * np.pi
phases = np.cos(phases) + 1j * np.sin(phases)
f[1 : Np + 1] *= phases
f[-1 : -1 - Np : -1] = np.conj(f[1 : Np + 1])
return np.fft.ifft(f).real
@staticmethod
def scintillation_attenuation_synthesis(Ns, f_c=0.1, Ts=1):
"""
For Earth-space paths, the time series synthesis method is valid for
frequencies between 4 GHz and 55 GHz and elevation angles between
5 deg and 90 deg.
"""
freqs = np.abs(np.fft.fftfreq(2 * int(Ns + 2e5), 1 / Ts))
H_f = np.where(
freqs <= f_c, 1, 10 ** ((np.log10(freqs) - np.log10(f_c)) * (-8 / 3))
)
H_f = H_f[0 : int(Ns + 2e5)]
sci = _ITU1853_1.fftnoise(np.fft.fftshift(H_f))
return sci[200000:].flatten()
@staticmethod
def cloud_liquid_water_synthesis(lat, lon, Ns, Ts=1, n=None):
""" """
# Step A: Estimation of m, sigma and Pcwl
m, sigma, Pcwl = lognormal_approximation_coefficient(lat, lon)
m = m.value
sigma = sigma.value
Pcwl = Pcwl.value / 100
# Step B: Low pass filter parameters
beta_1 = 7.17e-4
beta_2 = 2.01e-5
gamma_1 = 0.349
gamma_2 = 0.830
# Step C: Truncation threshold
alpha = stats.norm.ppf(1 - Pcwl)
# Step D: Time series synthesis
# Step D1: Synthesize a white Gaussian noise time series
if n is None:
n = np.random.normal(0, 1, int(Ns * Ts + 5e5))[::Ts]
discard_samples = True
else:
discard_samples = False
# Step D3: Filter the noise time series, with two recursive low-pass
# filters
rho_1 = np.exp(-beta_1 * Ts)
X_1 = lfilter([np.sqrt(1 - rho_1**2)], [1, -rho_1], n, 0)
rho_2 = np.exp(-beta_2 * Ts)
X_2 = lfilter([np.sqrt(1 - rho_2**2)], [1, -rho_2], n, 0)
# Step D4: Compute Gc(kTs),
G_c = gamma_1 * X_1 + gamma_2 * X_2
# Step D5: Compute L(kTs) (dB)
L = np.where(
G_c > alpha,
np.exp(m + sigma * stats.norm.ppf(1 - 1 / Pcwl * stats.norm.sf(G_c))),
0,
)
# D6: Discard the first 500 000 samples from the synthesized
if discard_samples:
L = L[np.ceil(500000 / Ts).astype(int) :]
return L.flatten()
@staticmethod
def integrated_water_vapour_coefficients(lat, lon):
# A Estimation of κ and λ
ps = np.array([0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30, 50])
Vi = np.array([total_water_vapour_content(lat, lon, p_i).value for p_i in ps])
ln_lnPi = np.log(-np.log(ps / 100))
ln_Vi = np.log(Vi)
a, b = np.linalg.lstsq(
np.vstack([ln_Vi, np.ones(len(ln_Vi))]).T, ln_lnPi, rcond=None
)[0]
kappa = a
lambd = np.exp(-b / a)
return kappa, lambd
def integrated_water_vapour_synthesis(self, lat, lon, Ns, Ts=1, n=None):
# A Estimation of κ and λ
kappa, lambd = self.integrated_water_vapour_coefficients(lat, lon)
# B Low-pass filter parameter
beta_V = 3.24e-6
# Step C: Time series synthesis
# Step C1: Synthesize a white Gaussian noise time series
if n is None:
n = np.random.normal(0, 1, (Ns * Ts + 5000000))[::Ts]
discard_samples = True
else:
discard_samples = False
# Step C3: Filter the noise time series, with two recursive low-pass
# filters
rho = np.exp(-beta_V * Ts)
G_v = lfilter([np.sqrt(1 - rho**2)], [1, -rho], n, 0)
# Step C4: Compute Compute V(kTs),
V = lambd * (-np.log10(stats.norm.sf(G_v))) ** (1 / kappa)
# Step C5: Discard the first 5 000 000 samples from the synthesized
if discard_samples:
V = V[np.ceil(5000000 / Ts).astype(int) :]
return V.flatten()
def total_attenuation_synthesis(
self,
lat,
lon,
f,
el,
p,
D,
Ns,
Ts=1,
hs=None,
tau=45,
eta=0.65,
rho=None,
H=None,
P=None,
hL=1000,
return_contributions=False,
):
t_disc = int(5e6)
# Step A Correlation coefficients:
C_RC = 1
C_CV = 0.8
# Step B Scintillation polynomials
def a_Fade(p):
return (
-0.061 * np.log10(p) ** 3
+ 0.072 * np.log10(p) ** 2
- 1.71 * np.log10(p)
+ 3
)
def a_Enhanc(p):
return (
-0.0597 * np.log10(p) ** 3
- 0.0835 * np.log10(p) ** 2
- 1.258 * np.log10(p)
+ 2.672
)
# Step C1-C3:
n_R = np.random.normal(0, 1, int((Ns * Ts + t_disc)))
n_L0 = np.random.normal(0, 1, int((Ns * Ts + t_disc)))
n_V0 = np.random.normal(0, 1, int((Ns * Ts + t_disc)))
# Step C4-C5:
n_L = C_RC * n_R + np.sqrt(1 - C_RC**2) * n_L0
n_V = C_CV * n_L + np.sqrt(1 - C_CV**2) * n_V0
# Step C6: Compute the rain attenuation time series
if hs is None:
hs = topographic_altitude(lat, lon)
Ar = self.rain_attenuation_synthesis(
lat, lon, f, el, hs, Ns, Ts=1, tau=tau, n=n_R
)
Ar = Ar[t_disc:]
# Step C7: Compute the cloud integrated liquid water content time
# series
L = self.cloud_liquid_water_synthesis(lat, lon, Ns, Ts=1, n=n_L)
L = L[t_disc:]
Ac = L * specific_attenuation_coefficients(f, T=0) / np.sin(np.deg2rad(el))
Ac = Ac.flatten()
# Step C9: Identify time stamps where A_R > 0 L > 1
idx = np.where(np.logical_and(Ar > 0, L > 1))[0]
idx_no = np.where(np.logical_not(np.logical_and(Ar > 0, L > 1)))[0]
# Step C10: Discard the previous values of Ac and re-compute them by
# linear interpolation vs. time starting from the non-discarded cloud
# attenuations values
Ac[idx] = np.interp(idx, idx_no, Ac[idx_no])
# Step C11: Compute the integrated water vapour content time series
V = self.integrated_water_vapour_synthesis(lat, lon, Ns, Ts=1, n=n_V)
V = V[t_disc:]
# Step C12: Convert the integrated water vapour content time series
# V into water vapour attenuation time series AV(kTs)
Av = zenit_water_vapour_attenuation(lat, lon, p, f, V_t=V).value
# Step C13: Compute the mean annual temperature Tm for the location of
# interest using experimental values if available.
Tm = surface_mean_temperature(lat, lon).value
# Step C14: Convert the mean annual temperature Tm into mean annual
# oxygen attenuation AO following the method recommended in
# Recommendation ITU-R P.676.
if P is None:
P = standard_pressure(hs).value
if rho is None:
rho = standard_water_vapour_density(hs).value
e = Tm * rho / 216.7
go = gamma0_exact(f, P, rho, Tm).value
ho, _ = slant_inclined_path_equivalent_height(f, P + e, rho).value
Ao = ho * go * np.ones_like(Ar)
# Step C15: Synthesize unit variance scintillation time series
sci_0 = self.scintillation_attenuation_synthesis(Ns * Ts, Ts=1)
# Step C16: Compute the correction coefficient time series Cx(kTs) in
# order to distinguish between scintillation fades and enhancements:
Q_sci = 100 * stats.norm.sf(sci_0)
C_x = np.where(sci_0 > 0, a_Fade(Q_sci) / a_Enhanc(Q_sci), 1)
# Step C17: Transform the integrated water vapour content time series
# V(kTs) into the Gamma distributed time series Z(kTs) as follows:
kappa, lambd = self.integrated_water_vapour_coefficients(lat, lon)
Z = stats.gamma.ppf(1 - np.exp(-((V / lambd) ** kappa)), 10, 0.1)
# Step C18: Compute the scintillation standard deviation σ following
# the method recommended in Recommendation ITU-R P.618.
sigma = scintillation_attenuation_sigma(
lat, lon, f, el, p, D, eta, Tm, H, P, hL
).value
# Step C19: Compute the scintillation time series sci:
As = np.where(
Ar > 1, sigma * sci_0 * C_x * Z * Ar ** (5 / 12), sigma * sci_0 * C_x * Z
)
# Step C20: Compute total tropospheric attenuation time series A(kTs)
# as follows:
A = Ar + Ac + Av + Ao + As
if return_contributions:
return (Ao + Av)[::Ts], Ac[::Ts], Ar[::Ts], As[::Ts], A[::Ts]
else:
return A[::Ts]
class _ITU1853_0:
def __init__(self):
self.__version__ = 0
self.year = 2009
self.month = 10
self.link = "https://www.itu.int/rec/R-REC-P.1853-0-200910-I/en"
@staticmethod
def rain_attenuation_synthesis(*args, **kwargs):
return _ITU1853_1.rain_attenuation_synthesis(*args, **kwargs)
@staticmethod
def scintillation_attenuation_synthesis(*args, **kwargs):
return _ITU1853_1.scintillation_attenuation_synthesis(*args, **kwargs)
@staticmethod
def cloud_liquid_water_synthesis(*args, **kwargs):
raise NotImplementedError(
"Recommendation ITU-R P.1853 does not specify a method to compute "
"time series for the cloud liquid water content."
)
@staticmethod
def integrated_water_vapour_synthesis(*args, **kwargs):
raise NotImplementedError(
"Recommendation ITU-R P.1853 does not specify a method to compute "
"time series for the water vapour content."
)
@staticmethod
def total_attenuation_synthesis(*args, **kwargs):
raise NotImplementedError(
"Recommendation ITU-R P.1853 does not specify a method to compute "
"time series for the total atmospheric attenuation."
)
__model = __ITU1853()
def change_version(new_version):
"""
Change the version of the ITU-R P.1853 recommendation currently being used.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 1: Activates recommendation ITU-R P.1853-1 (02/12) (Current version)
* 0: Activates recommendation ITU-R P.1853-0 (10/09) (Superseded)
"""
global __model
__model = __ITU1853(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.1853 recommendation currently being used.
Returns
-------
version: int
Version currently being used.
"""
global __model
return __model.__version__
def set_seed(seed):
"""
Set the seed used to generate random numbers.
Parameters
----------
seed : int
Seed used to generate random numbers
"""
__model.set_seed(seed)
def rain_attenuation_synthesis(lat, lon, f, el, hs, Ns, Ts=1, tau=45, n=None):
"""
A method to generate a synthetic time series of rain attenuation values.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
f : number or Quantity
Frequency (GHz)
el : sequence, or number
Elevation angle (degrees)
hs : number, sequence, or numpy.ndarray, optional
Heigh above mean sea level of the earth station (km). If local data for
the earth station height above mean sea level is not available, an
estimate is obtained from the maps of topographic altitude
given in Recommendation ITU-R P.1511.
Ns : int
Number of samples
Ts : int
Time step between consecutive samples (seconds)
tau : number, optional
Polarization tilt angle relative to the horizontal (degrees)
(tau = 45 deg for circular polarization). Default value is 45
n : list, np.array, optional
Additive White Gaussian Noise used as input for the
Returns
-------
rain_att: numpy.ndarray
Synthesized rain attenuation time series (dB)
References
----------
[1] Characteristics of precipitation for propagation modelling
https://www.itu.int/rec/R-REC-P.1853/en
"""
global __model
lon = np.mod(lon, 360)
f = prepare_quantity(f, u.GHz, "Frequency")
el = prepare_quantity(el, u.deg, "Elevation angle")
hs = prepare_quantity(hs, u.km, "Heigh above mean sea level of the earth station")
Ts = prepare_quantity(Ts, u.second, "Time step between samples")
val = __model.rain_attenuation_synthesis(
lat, lon, f, el, hs, Ns, Ts=Ts, tau=tau, n=n
)
return val * u.dB
def scintillation_attenuation_synthesis(Ns, f_c=0.1, Ts=1):
"""
A method to generate a synthetic time series of scintillation attenuation
values.
Parameters
----------
Ns : int
Number of samples
f_c : float
Cut-off frequency for the low pass filter
Ts : int
Time step between consecutive samples (seconds)
Returns
-------
sci_att: numpy.ndarray
Synthesized scintilation attenuation time series (dB)
References
----------
[1] Characteristics of precipitation for propagation modelling
https://www.itu.int/rec/R-REC-P.1853/en
"""
global __model
val = __model.scintillation_attenuation_synthesis(Ns, f_c, Ts)
return val * u.dB
def integrated_water_vapour_synthesis(lat, lon, Ns, Ts=1, n=None):
"""The time series synthesis method generates a time series that
reproduces the spectral characteristics and the distribution of water
vapour content.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Ns : int
Number of samples
Ts : int
Time step between consecutive samples (seconds)
n : list, np.array, optional
Additive White Gaussian Noise used as input for the
Returns
-------
L: numpy.ndarray
Synthesized water vapour content time series (kg/m2)
References
----------
[1] Characteristics of precipitation for propagation modelling
https://www.itu.int/rec/R-REC-P.1853/en
"""
global __model
lon = np.mod(lon, 360)
val = __model.integrated_water_vapour_synthesis(lat, lon, Ns, Ts, n)
return val * u.kg / u.m**2
def cloud_liquid_water_synthesis(lat, lon, Ns, Ts=1, n=None):
"""The time series synthesis method generates a time series that
reproduces the spectral characteristics, rate of change and duration
statistics of cloud liquid content events.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Ns : int
Number of samples
Ts : int
Time step between consecutive samples (seconds)
n : list, np.array, optional
Additive White Gaussian Noise used as input for the
Returns
-------
V: numpy.ndarray
Synthesized cloud liquid water time series (mm)
References
----------
[1] Characteristics of precipitation for propagation modelling
https://www.itu.int/rec/R-REC-P.1853/en
"""
global __model
lon = np.mod(lon, 360)
val = __model.cloud_liquid_water_synthesis(lat, lon, Ns, Ts, n)
return val * u.mm
def total_attenuation_synthesis(
lat,
lon,
f,
el,
p,
D,
Ns,
Ts=1,
hs=None,
tau=45,
eta=0.65,
rho=None,
H=None,
P=None,
hL=1000,
return_contributions=False,
):
"""The time series synthesis method generates a time series that
reproduces the spectral characteristics, rate of change and duration
statistics of the total atmospheric attenuation events.
The time series is obtained considering the contributions of gaseous,
cloud, rain, and scintillation attenuation.
Parameters
----------
lat : number
Latitudes of the receiver points
lon : number
Longitudes of the receiver points
f : number or Quantity
Frequency (GHz)
el : number
Elevation angle (degrees)
p : number
Percentage of the time the rain attenuation value is exceeded.
D: number or Quantity
Physical diameter of the earth-station antenna (m)
Ns : int
Number of samples
Ts : int
Time step between consecutive samples (seconds)
tau : number, optional
Polarization tilt angle relative to the horizontal (degrees)
(tau = 45 deg for circular polarization). Default value is 45
hs : number, sequence, or numpy.ndarray, optional
Heigh above mean sea level of the earth station (km). If local data for
the earth station height above mean sea level is not available, an
estimate is obtained from the maps of topographic altitude
given in Recommendation ITU-R P.1511.
eta: number, optional
Antenna efficiency. Default value 0.5 (conservative estimate)
rho : number or Quantity, optional
Water vapor density (g/m3). If not provided, an estimate is obtained
from Recommendation Recommendation ITU-R P.836.
H: number, sequence, or numpy.ndarray, optional
Average surface relative humidity (%) at the site. If None, uses the
ITU-R P.453 to estimate the wet term of the radio refractivity.
P: number, sequence, or numpy.ndarray, optional
Average surface pressure (hPa) at the site. If None, uses the
ITU-R P.453 to estimate the wet term of the radio refractivity.
hL : number, optional
Height of the turbulent layer (m). Default value 1000 m
return_contributions: bool, optional
Determines whether individual contributions from gases, rain, clouds
and scintillation are returned in addition ot the total attenuation
(True), or just the total atmospheric attenuation (False).
Default is False
Returns
---------
A : Quantity
Synthesized total atmospheric attenuation time series (dB)
Ag, Ac, Ar, As, A : tuple
Synthesized Gaseous, Cloud, Rain, Scintillation contributions to total
attenuation time series, and synthesized total attenuation time seires
(dB).
References
----------
[1] Characteristics of precipitation for propagation modelling
https://www.itu.int/rec/R-REC-P.1853/en
"""
global __model
f = prepare_quantity(f, u.GHz, "Frequency")
el = prepare_quantity(el, u.deg, "Elevation angle")
D = prepare_quantity(D, u.m, "Antenna diameter")
hs = prepare_quantity(hs, u.km, "Heigh above mean sea level of the earth station")
eta = prepare_quantity(eta, u.one, "Antenna efficiency")
rho = prepare_quantity(rho, u.g / u.m**3, "Water vapor density")
H = prepare_quantity(H, u.percent, "Average surface relative humidity")
P = prepare_quantity(P, u.hPa, "Average surface pressure")
hL = prepare_quantity(hL, u.m, "Height of the turbulent layer")
val = __model.total_attenuation_synthesis(
lat,
lon,
f,
el,
p,
D,
Ns,
Ts=Ts,
tau=tau,
hs=hs,
eta=eta,
rho=rho,
H=H,
P=P,
hL=hL,
return_contributions=return_contributions,
)
if return_contributions:
return tuple([v * u.dB for v in val])
else:
return val * u.dB
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu530.py | itur/models/itu530.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from astropy import units as u
from scipy.optimize import bisect
from itur.models.itu453 import DN65
from itur.models.itu837 import rainfall_rate
from itur.models.itu1144 import bilinear_2D_interpolator
from itur.models.itu838 import (rain_specific_attenuation,
rain_specific_attenuation_coefficients)
from itur.utils import (prepare_input_array, prepare_quantity,
get_input_type,
prepare_output_array, load_data_interpolator)
class __ITU530__():
"""Private class to model the ITU-R P.530 recommendations.
Propagation data and prediction methods required for the design of
terrestrial line-of-sight systems
Available versions:
* P.530-16 (07/15) (Current version)
Not available versions:
This recommendation includes prediction methods for the propagation effects
that should be taken into account in the design of digital fixed
line-of-sight links, both in clear-air and rainfall conditions. It also
provides link design guidance in clear step-by-step procedures including
the use of mitigation techniques to minimize propagation impairments. The
final outage predicted is the base for other Recommendations addressing
error performance and availability.
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.530 recommendation.
def __init__(self, version=17):
if version == 17:
self.instance = _ITU530_17_()
elif version == 16:
self.instance = _ITU530_16()
else:
raise ValueError(
'Version ' +
str(version) +
' is not implemented' +
' for the ITU-R P.530 model.')
@property
def __version__(self):
return self.instance.__version__
def fresnel_ellipse_radius(self, d1, d2, f):
return self.instance.fresnel_ellipse_radius(d1, d2, f)
def diffraction_loss(self, d1, d2, h, f):
return self.instance.diffraction_loss(d1, d2, h, f)
def multipath_loss_for_A(self, lat, lon, h_e, h_r, d, f, A):
return self.instance.multipath_loss_for_A(lat, lon, h_e, h_r, d, f, A)
def multipath_loss(self, lat, lon, h_e, h_r, d, f, A):
return self.instance.multipath_loss(lat, lon, h_e, h_r, d, f, A)
def rain_attenuation(self, lat, lon, d, f, el, p, tau=45, R001=None):
return self.instance.rain_attenuation(lat, lon, d, f, el, p, tau, R001)
def inverse_rain_attenuation(self, lat, lon, d, f, el,
Ap, tau=45, R001=None):
return self.instance.inverse_rain_attenuation(lat, lon, d, f, el,
Ap, tau, R001)
def rain_event_count(self, lat, lon, d, f, el, A, tau=45, R001=None):
return self.instance.rain_event_count(lat, lon, d, f, el, A, tau, R001)
def XPD_outage_clear_air(self, lat, lon, h_e, h_r,
d, f, XPD_g, C0_I, XPIF=0):
return self.instance.XPD_outage_clear_air(lat, lon, h_e, h_r, d, f,
XPD_g, C0_I, XPIF)
def XPD_outage_precipitation(self, lat, lon, d, f, el, C0_I, tau=45,
U0=15, XPIF=0):
return self.instance.XPD_outage_precipitation(lat, lon, d, f, el, C0_I,
tau, U0, XPIF)
class _ITU530_17_():
_s_a = {}
def __init__(self):
self.__version__ = 17
self.year = 2017
self.month = 12
self.link = 'https://www.itu.int/rec/R-REC-P.530-17-201712-S/en'
@classmethod
def s_a(self, lat, lon):
"""
Standard deviation of terrain heights.
Computes the Standard deviation of terrain heights (m) within a
110 km × 110 km area with a 30 s resolution (e.g. the Globe “gtopo30”
data).
The value for the mid-path may be obtained from an area roughness
with 0.5 × 0.5 degree resolution of geographical coordinates
using bi-linear interpolation.
"""
if not _ITU530_17_._s_a:
_ITU530_17_._s_a = load_data_interpolator(
'530/v16_lat.npz', '530/v16_lon.npz',
'530/v16_gtopo_30.npz', bilinear_2D_interpolator,
flip_ud=False)
return _ITU530_17_._s_a(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
###########################################################################
# Section 2.2 #
###########################################################################
@classmethod
def fresnel_ellipse_radius(self, d1, d2, f):
"""
Compute the Fresnel ellipse radius at a given frequency.
Implementation of 'fresnel_ellipse_radius' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.fresnel_ellipse_radius'
"""
return 17.3 * np.sqrt(d1 * d2 / (f * (d1 + d2)))
@classmethod
def diffraction_loss(self, d1, d2, h, f):
"""
Compute the diffraction losses at a given frequency.
Implementation of 'diffraction_loss' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.diffraction_loss'
"""
F1 = self.fresnel_ellipse_radius(d1, d2, f) # Eq. 2 [m]
Ad = -20 * h / F1 + 10 # Eq. 3 [dB]
return Ad
###########################################################################
# Section 2.3 #
###########################################################################
@classmethod
def multipath_loss_for_A(self, lat, lon, h_e, h_r, d, f, A):
"""
Computes the percentage of time that a fade depth A is exceeded
due to multi-path losses.
Implementation of 'multipath_loss_for_A' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.multipath_loss_for_A'
"""
# Step 1: Estimate the geoclimatic factor K
# DN1 point refractivity gradient in the lowest 65 m of the atmosphere
# not exceeded for 1% of an average year
# s_a is the area terrain roughness
s_a = self.s_a(lat, lon)
dN1 = DN65(lat, lon, 1).value
K = 10**(-4.4 - 0.0027 * dN1) * (10 + s_a)**(-0.46) # Eq. 4 [-]
# Step 2: Calculate the magnitude of the path inclination
# Eq. 5 [mrad]
e_p = np.abs(h_r - h_e) / d
# Step 3: For detailed link design applications calculate the
# percentage of time (p_W) that fade depth A (dB) is exceeded in the
# average worst month
h_L = np.minimum(h_e, h_r)
p_W = K * d**3.4 * (1 + e_p)**-1.03 * f**0.8 * \
10**(-0.00076 * h_L - A / 10)
# Eq. 7 [%]
return p_W
@classmethod
def multipath_loss(self, lat, lon, h_e, h_r, d, f, A):
"""
Estimate the number of fade events exceeding attenuation 'A'
for 10 seconds or longer.
Implementation of 'multipath_loss' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.multipath_loss'
"""
# Step 1: Using the method multipath_loss_for_A calculate the
# multi-path occurrence factor, p0
p0 = self.multipath_loss_for_A(
lat, lon, h_e, h_r, d, f, 0) # Eq. 10 [%]
# Step 2: Calculate the value of fade depth, At, at which the
# transition occurs between the deep-fading distribution and the
# shallow-fading distribution
At = 25 + 1.2 * np.log10(p0) # Eq. 12 [dB]
# Step 3: Calculate the percentage of time that A is exceeded in the
# average worst month:
def step_3b(p_0, At, A):
p_t = p_0 * 10 ** (-At / 10)
qa_p = -20 * np.log10(-np.log((100 - p_t) / 100)) / At
q_t = ((qa_p - 2) /
(1 + 0.3 * 10 ** (-At / 20) * 10 ** (-0.016 * At)) -
4.3 * (10**(-At / 20) + At / 800))
q_a = 2 + (1 + 0.3 * 10**(-A / 20)) * (10**(-0.016 * A)) *\
(q_t + 4.3 * (10**(-A / 20 + A / 800)))
p_W = 100 * (1 - np.exp(-10 ** (-q_a * A / 20)))
return p_W
p_W = np.where(A >= At, p0 * 10 ** (-A / 10), step_3b(p0, At, A))
# Eq. 13 and Eq. 18 [%]
return p_W
###########################################################################
# Section 2.4 #
###########################################################################
@classmethod
def rain_attenuation(self, lat, lon, d, f, el, p, tau=45, R001=None):
"""
Estimate long-term statistics of rain attenuation.
Implementation of 'rain_attenuation' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.rain_attenuation'
"""
# Step 1: Obtain the rain rate R0.01 exceeded for 0.01% of the time
# (with an integration time of 1 min).
if R001 is None:
R001 = rainfall_rate(lat, lon, 0.01).value
# Step 2: Compute the specific attenuation, 'gammar' (dB/km) for the
# frequency, polarization and rain rate of interest using
# Recommendation ITU-R P.838
gammar = rain_specific_attenuation(R001, f, el, tau).value
_, alpha = rain_specific_attenuation_coefficients(f, el, tau)
# Step 3: Compute the effective path length, 'deff', of the link by
# multiplying the actual path length d by a distance factor r
r = 1 / (0.477 * d ** 0.633 * R001 ** (0.073 * alpha) *
f**(0.123) - 10.579 * (1 - np.exp(-0.024 * d))) # Eq. 32 [-]
deff = np.minimum(r, 2.5) * d
# Step 4: An estimate of the path attenuation exceeded for 0.01% of
# the time is given by:
A001 = gammar * deff # Eq. 33 [dB]
# Step 5: The attenuation exceeded for other percentages of time p in
# the range 0.001% to 1% may be deduced from the following power law
C0 = np.where(f >= 10, 0.12 + 0.4 * (np.log10(f / 10)**0.8), 0.12)
# Eq. 35a [-]
C1 = (0.07**C0) * (0.12**(1 - C0))
# Eq. 35b [-]
C2 = 0.855 * C0 + 0.546 * (1 - C0)
C3 = 0.139 * C0 + 0.043 * (1 - C0) # Eq. 35c [-]
Ap = A001 * C1 * p ** (- (C2 + C3 * np.log10(p))) # Eq. 34 [dB]
return Ap
@staticmethod
def inverse_rain_attenuation(
lat, lon, d, f, el, Ap, tau=45, R001=None):
"""
Estimate the percentage of time a given attenuation is exceeded due to
rain events.
Implementation of 'inverse_rain_attenuation' method for
recommendation ITU-P R.530-16. See documentation for function
'ITUR530.inverse_rain_attenuation'
"""
# Step 1: Obtain the rain rate R0.01 exceeded for 0.01% of the time
# (with an integration time of 1 min).
if R001 is None:
R001 = rainfall_rate(lat, lon, 0.01).value
# Step 2: Compute the specific attenuation, 'gammar' (dB/km) for the
# frequency, polarization and rain rate of interest using
# Recommendation ITU-R P.838
gammar = rain_specific_attenuation(R001, f, el, tau).value
_, alpha = rain_specific_attenuation_coefficients(f, el, tau)
# Step 3: Compute the effective path length, 'deff', of the link by
# multiplying the actual path length d by a distance factor r
r = 1 / (0.477 * d ** 0.633 * R001 ** (0.073 * alpha) *
f**(0.123) - 10.579 * (1 - np.exp(-0.024 * d))) # Eq. 32 [-]
deff = np.minimum(r, 2.5) * d
# Step 4: An estimate of the path attenuation exceeded for 0.01% of
# the time is given by:
A001 = gammar * deff # Eq. 33 [dB]
# Step 5: The attenuation exceeded for other percentages of time p in
# the range 0.001% to 1% may be deduced from the following power law
C0 = np.where(f >= 10, 0.12 + 0.4 * (np.log10(f / 10)**0.8), 0.12)
# Eq. 35a [-]
C1 = (0.07**C0) * (0.12**(1 - C0))
# Eq. 35b [-]
C2 = 0.855 * C0 + 0.546 * (1 - C0)
C3 = 0.139 * C0 + 0.043 * (1 - C0) # Eq. 35c [-]
def func_bisect(p):
return A001 * C1 * p ** (- (C2 + C3 * np.log10(p))) - Ap
if np.sign(func_bisect(1e-9)) == np.sign(func_bisect(100)):
return 0
return bisect(func_bisect, 1e-9, 100)
@classmethod
def rain_event_count(self, lat, lon, d, f, el, A, tau=45, R001=None):
"""
Estimate the number of fade events exceeding attenuation 'A'
for 10 seconds or longer.
Implementation of 'rain_event_count' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.rain_event_count'
"""
# Compute the the percentage of time that the rain attenuation A(dB)
# exceeded in the average year.
p_A = self.inverse_rain_attenuation(lat, lon, d, f, el, Ap=A,
tau=tau, R001=R001)
# The number of fade events exceeding attenuation A for 10 s or longer
N10s = 1 + 1313 * p_A**0.945 # Eq. 78 [-]
return N10s
###########################################################################
# Section 4 #
###########################################################################
@classmethod
def XPD_outage_clear_air(self, lat, lon, h_e, h_r,
d, f, XPD_g, C0_I, XPIF=0):
"""
Estimate the probability of outage due to cross-polar discrimination
reduction due to clear-air effects, assuming that a target C0_I is
required.
Implementation of 'XPD_outage_clear_air' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.XPD_outage_clear_air'
"""
# Step 1
XPD_0 = np.where(XPD_g <= 35, XPD_g + 5, 40) # Eq. 101
# Step 2: Evaluate the multi-path activity parameter:
P0 = self.multipath_loss_for_A(lat, lon, h_e, h_r, d, f, 0)
eta = 1 - np.exp(-0.2 * P0**0.75) # Eq. 102
# Step 3:
kXP = 0.7 # Eq. 104
Q = - 10 * np.log10(kXP * eta / P0) # Eq. 103
# Step 4: Derive the parameter C:
C = XPD_0 + Q # Eq. 105
# Step 5: Calculate the probability of outage PXP due to clear-air
# cross-polarization:
M_XPD = C - C0_I + XPIF
P_XP = P0 * 10 ** (- M_XPD / 10) # Eq. 106 [%]
return P_XP
@classmethod
def XPD_outage_precipitation(self, lat, lon, d, f, el, C0_I, tau=45,
U0=15, XPIF=0):
"""
Estimate the probability of outage due to cross-polar discrimination
reduction due to precipitation effects, assuming that a target C0_I is
required.
Implementation of 'XPD_outage_precipitation' method for recommendation
ITU-P R.530-16. See documentation for function
'ITUR530.XPD_outage_precipitation'
"""
# Step 1: Determine the path attenuation, A0.01 (dB), exceeded
# for 0.01% of the time
A001 = self.rain_attenuation(lat, lon, d, f, el, 0.01)
# Step 2: Determine the equivalent path attenuation, Ap
U = U0 + 30 * np.log10(f)
V = np.where(f < 20, 12.8 * f**0.19, 22.6)
Ap = 10 ** ((U - C0_I + XPIF) / V) # Eq. 112
# Step 3: Determine parameters m and n
m = min(23.26 * np.log10(Ap / (0.12 * A001)), 40) # Eq. 113
n = (-12.7 + np.sqrt(161.23 - 4 * m)) / 2 # Eq. 114
# Step 4 : Determine the outage probability
P_XPR = 10**(n - 2) # Eq. 115 [%]
return P_XPR
class _ITU530_16():
def __init__(self):
self.__version__ = 16
self.year = 2015
self.month = 7
self.link = 'https://www.itu.int/rec/R-REC-P.530-16-201507-S/en'
self._s_a = {}
@staticmethod
def s_a(self, *args, **kwargs):
return _ITU530_17_.s_a(*args, **kwargs)
###########################################################################
# Section 2.2 #
###########################################################################
@staticmethod
def fresnel_ellipse_radius(*args, **kwargs):
return _ITU530_17_.fresnel_ellipse_radius(*args, **kwargs)
@staticmethod
def diffraction_loss(*args, **kwargs):
return _ITU530_17_.diffraction_loss(*args, **kwargs)
###########################################################################
# Section 2.3 #
###########################################################################
@staticmethod
def multipath_loss_for_A(*args, **kwargs):
return _ITU530_17_.multipath_loss_for_A(*args, **kwargs)
@staticmethod
def multipath_loss(*args, **kwargs):
return _ITU530_17_.multipath_loss(*args, **kwargs)
###########################################################################
# Section 2.4 #
###########################################################################
@staticmethod
def rain_attenuation(*args, **kwargs):
return _ITU530_17_.rain_attenuation(*args, **kwargs)
@staticmethod
def inverse_rain_attenuation(*args, **kwargs):
return _ITU530_17_.inverse_rain_attenuation(*args, **kwargs)
@staticmethod
def rain_event_count(*args, **kwargs):
return _ITU530_17_.rain_event_count(*args, **kwargs)
###########################################################################
# Section 4 #
###########################################################################
@staticmethod
def XPD_outage_clear_air(*args, **kwargs):
return _ITU530_17_.XPD_outage_clear_air(*args, **kwargs)
@staticmethod
def XPD_outage_precipitation(*args, **kwargs):
return _ITU530_17_.XPD_outage_precipitation(*args, **kwargs)
__model = __ITU530__()
def change_version(new_version):
"""
Change the version of the ITU-R P.530 recommendation currently being used.
This function changes the model used for the ITU-R P.530 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 16: Activates recommendation ITU-R P.530-16 (07/15) (Current version)
"""
global __model
__model = __ITU530__(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.530 recommendation currently being used.
Returns
-------
version: int
The version of the ITU-R P.530 recommendation being used.
"""
return __model.__version__
def fresnel_ellipse_radius(d1, d2, f):
"""Compute the radius of the first Fresnel ellipsoid.
Parameters
----------
d1 : number, sequence, or numpy.ndarray
Distances from the first terminal to the path obstruction. [km]
d2 : number, sequence, or numpy.ndarray
Distances from the second terminal to the path obstruction. [km]
f : number
Frequency of the link [GHz]
Returns
-------
F1: Quantity
Radius of the first Fresnel ellipsoid [m]
References
----------
[1] Propagation data and prediction methods required for the design of
terrestrial line-of-sight systems: https://www.itu.int/rec/R-REC-P.530/en
"""
type_output = get_input_type(d1)
d1 = prepare_quantity(d1, u.km, 'Distance to the first terminal')
d2 = prepare_quantity(d2, u.km, 'Distance to the second terminal')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.fresnel_ellipse_radius(d1, d2, f)
return prepare_output_array(val, type_output) * u.m
def diffraction_loss(d1, d2, h, f):
"""Estimate the diffraction loss over average terrain.
Diffraction loss over average terrain. This value is valid for losses
greater than 15 dB.
Parameters
----------
d1 : number, sequence, or numpy.ndarray
Distances from the first terminal to the path obstruction. [km]
d2 : number, sequence, or numpy.ndarray
Distances from the second terminal to the path obstruction. [km]
h : number, sequence, or numpy.ndarray
Height difference between most significant path blockage
and the path trajectory. h is negative if the top of the obstruction
of interest is above the virtual line-of-sight. [m]
f : number
Frequency of the link [GHz]
Returns
-------
A_d: Quantity
Diffraction loss over average terrain [dB]
References
----------
[1] Propagation data and prediction methods required for the design of
terrestrial line-of-sight systems: https://www.itu.int/rec/R-REC-P.530/en
"""
type_output = get_input_type(d1)
d1 = prepare_quantity(d1, u.km, 'Distance to the first terminal')
d2 = prepare_quantity(d2, u.km, 'Distance to the second terminal')
h = prepare_quantity(h, u.m, 'Height difference')
f = prepare_quantity(f, u.GHz, 'Frequency')
val = __model.diffraction_loss(d1, d2, h, f)
return prepare_output_array(val, type_output) * u.m
def multipath_loss_for_A(lat, lon, h_e, h_r, d, f, A):
"""Estimate the single-frequency (or narrow-band) fading distribution.
Method for predicting the single-frequency (or narrow-band) fading
distribution at large fade depths in the average worst month in any part
of the world. Given a fade depth value 'A', determines the amount of time
it will be exceeded during a year
This method does not make use of the path profile and can be used for
initial planning, licensing, or design purposes.
This method is only valid for small percentages of time.
Multi-path fading and enhancement only need to be calculated for path
lengths longer than 5 km, and can be set to zero for shorter paths.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
h_e : number
Emitter antenna height (above the sea level) [m]
h_r : number
Receiver antenna height (above the sea level) [m]
d : number, sequence, or numpy.ndarray
Distances between antennas [km]
f : number
Frequency of the link [GHz]
A : number
Fade depth [dB]
Returns
-------
p_w: Quantity
percentage of time that fade depth A is exceeded in the average
worst month [%]
References
----------
[1] Propagation data and prediction methods required for the design of
terrestrial line-of-sight systems: https://www.itu.int/rec/R-REC-P.530/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
h_e = prepare_quantity(
h_e, u.m, 'Emitter antenna height (above sea level)')
h_r = prepare_quantity(
h_r, u.m, 'Receiver antenna height (above sea level)')
d = prepare_quantity(d, u.km, 'Distance between antennas')
f = prepare_quantity(f, u.GHz, 'Frequency')
A = prepare_quantity(A, u.dB, 'Fade depth')
val = __model.multipath_loss_for_A(lat, lon, h_e, h_r, d, f, A)
return prepare_output_array(val, type_output) * u.percent
def multipath_loss(lat, lon, h_e, h_r, d, f, A):
"""Estimate the percentage of time that any fade depth is exceeded.
Method for predicting the percentage of time that any fade depth is
exceeded. This method combines the deep fading distribution given in the
multipath_loss_for_A' and an empirical interpolation procedure for shallow
fading down to 0 dB.
This method does not make use of the path profile and can be used for
initial planning, licensing, or design purposes.
Multi-path fading and enhancement only need to be calculated for path
lengths longer than 5 km, and can be set to zero for shorter paths.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
h_e : number
Emitter antenna height (above the sea level) [m]
h_r : number
Receiver antenna height (above the sea level) [m]
d : number, sequence, or numpy.ndarray
Distances between antennas [km]
f : number
Frequency of the link [GHz]
A : number
Fade depth [dB]
Returns
-------
p_w: Quantity
percentage of time that fade depth A is exceeded in the average
worst month [%]
References
----------
[1] Propagation data and prediction methods required for the design of
terrestrial line-of-sight systems: https://www.itu.int/rec/R-REC-P.530/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
h_e = prepare_quantity(
h_e, u.m, 'Emitter antenna height (above sea level)')
h_r = prepare_quantity(
h_r, u.m, 'Receiver antenna height (above sea level)')
d = prepare_quantity(d, u.km, 'Distance between antennas')
f = prepare_quantity(f, u.GHz, 'Frequency')
A = prepare_quantity(A, u.dB, 'Fade depth')
val = __model.multipath_loss(lat, lon, h_e, h_r, d, f, A)
return prepare_output_array(val, type_output) * u.percent
def rain_attenuation(lat, lon, d, f, el, p, tau=45, R001=None):
"""Estimate long-term statistics of rain attenuation.
Attenuation can also
occur as a result of absorption and scattering by such hydro-meteors as
rain, snow, hail and fog. Although rain attenuation can be ignored at
frequencies below about 5 GHz, it must be included in design calculations
at higher frequencies, where its importance increases rapidly.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
d : number, sequence, or numpy.ndarray
Path length [km]
f : number
Frequency of the link [GHz]
el : sequence, or number
Elevation angle (degrees)
p : number
Percentage of the time the rain attenuation value is exceeded.
R001: number, optional
Point rainfall rate for the location for 0.01% of an average year
(mm/h). If not provided, an estimate is obtained from Recommendation
Recommendation ITU-R P.837. Some useful values:
* 0.25 mm/h : Drizzle
* 2.5 mm/h : Light rain
* 12.5 mm/h : Medium rain
* 25.0 mm/h : Heavy rain
* 50.0 mm/h : Downpour
* 100 mm/h : Tropical
* 150 mm/h : Monsoon
tau : number, optional
Polarization tilt angle relative to the horizontal (degrees)
(tau = 45 deg for circular polarization). Default value is 45
Returns
-------
A_r: Quantity
Attenuation exceeded during p percent of the time [dB]
References
----------
[1] Propagation data and prediction methods required for the design of
terrestrial line-of-sight systems: https://www.itu.int/rec/R-REC-P.530/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
d = prepare_quantity(d, u.km, 'Distance between antennas')
f = prepare_quantity(f, u.GHz, 'Frequency')
el = prepare_quantity(el, u.deg, 'Elevation Angle')
R001 = prepare_quantity(R001, u.mm / u.hr, 'Rainfall Rate')
val = __model.rain_attenuation(lat, lon, d, f, el, p, tau, R001)
return prepare_output_array(val, type_output) * u.dB
def inverse_rain_attenuation(lat, lon, d, f, el, Ap, tau=45, R001=None):
"""Estimate the percentage of time a given attenuation is exceeded.
Estimate the percentage of time a given attenuation is exceeded due to
rain events.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
d : number, sequence, or numpy.ndarray
Path length [km]
f : number
Frequency of the link [GHz]
el : sequence, or number
Elevation angle (degrees)
Ap : number
Fade depth
R001: number, optional
Point rainfall rate for the location for 0.01% of an average year
(mm/h). If not provided, an estimate is obtained from Recommendation
Recommendation ITU-R P.837. Some useful values:
* 0.25 mm/h : Drizzle
* 2.5 mm/h : Light rain
* 12.5 mm/h : Medium rain
* 25.0 mm/h : Heavy rain
* 50.0 mm/h : Downpour
* 100 mm/h : Tropical
* 150 mm/h : Monsoon
tau : number, optional
Polarization tilt angle relative to the horizontal (degrees)
(tau = 45 deg for circular polarization). Default value is 45
Returns
-------
p: Quantity
Percentage of time that the attenuation A is exceeded.
References
----------
[1] Propagation data and prediction methods required for the design of
terrestrial line-of-sight systems: https://www.itu.int/rec/R-REC-P.530/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
d = prepare_quantity(d, u.km, 'Distance between antennas')
f = prepare_quantity(f, u.GHz, 'Frequency')
el = prepare_quantity(el, u.deg, 'Elevation Angle')
Ap = prepare_quantity(Ap, u.dB, 'Fade depth')
R001 = prepare_quantity(R001, u.mm / u.hr, 'Rainfall Rate')
val = __model.inverse_rain_attenuation(lat, lon, d, f, el, Ap, tau, R001)
return prepare_output_array(val, type_output) * u.percent
def rain_event_count(lat, lon, d, f, el, A, tau=45, R001=None):
"""Estimate the number of fade events exceeding attenuation 'A'.
Estimate the number of fade events exceeding attenuation 'A'
for 10 seconds or longer.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
d : number, sequence, or numpy.ndarray
Path length [km]
f : number
Frequency of the link [GHz]
el : sequence, or number
Elevation angle (degrees)
A : number
Fade depth
R001: number, optional
Point rainfall rate for the location for 0.01% of an average year
(mm/h). If not provided, an estimate is obtained from Recommendation
Recommendation ITU-R P.837. Some useful values:
* 0.25 mm/h : Drizzle
* 2.5 mm/h : Light rain
* 12.5 mm/h : Medium rain
* 25.0 mm/h : Heavy rain
* 50.0 mm/h : Downpour
* 100 mm/h : Tropical
* 150 mm/h : Monsoon
tau : number, optional
Polarization tilt angle relative to the horizontal (degrees)
(tau = 45 deg for circular polarization). Default value is 45
Returns
-------
p: Quantity
Percentage of time that the attenuation A is exceeded.
References
----------
[1] Propagation data and prediction methods required for the design of
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | true |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu1511.py | itur/models/itu1511.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from astropy import units as u
from itur.models.itu1144 import bicubic_2D_interpolator
from itur.utils import (prepare_input_array, prepare_output_array,
load_data_interpolator, get_input_type)
class __ITU1511():
"""Topography for Earth-to-space propagation modelling. This model shall be
used to obtain the height above mean sea level when no local data are
available or when no data with a better spatial resolution is available.
Available versions include:
* P.1511-0 (02/01) (Superseded)
* P.1511-1 (07/15) (Superseded)
* P.1511-2 (08/19) (Current version)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.1511 recommendation.
def __init__(self, version=2):
if version == 2:
self.instance = _ITU1511_2_()
elif version == 1:
self.instance = _ITU1511_1_()
elif version == 0:
self.instance = _ITU1511_0_()
else:
raise ValueError('Version ' + str(version) + ' is not implemented'
' for the ITU-R P.1511 model.')
@property
def __version__(self):
"""
Version of the model (similar to version of the ITU Recommendation)
"""
return self.instance.__version__
def topographic_altitude(self, lat, lon):
# Abstract method to compute the topographic altitude
return self.instance.topographic_altitude(lat, lon)
class _ITU1511_2_():
"""
The values of topographical height (km) above mean sea level of the surface
of the Earth are provided on a 0.5° grid in both latitude and longitude.
For a location different from the gridpoints, the height above mean sea
level at the desired location can be obtained by performing a bi-cubic
interpolation.
"""
def __init__(self):
self.__version__ = 2
self.year = 2019
self.month = 8
self.link = 'https://www.itu.int/rec/R-REC-P.1511/' +\
'recommendation.asp?lang=en&parent=R-REC-P.1511-2-201908-I'
self._altitude = None
self._wgs4_altitude = None
def altitude(self, lat, lon):
if not self._altitude:
self._altitude = load_data_interpolator(
'1511/v2_lat.npz', '1511/v2_lon.npz',
'1511/v2_topo.npz', bicubic_2D_interpolator)
return self._altitude(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def wgs4_altitude(self, lat, lon):
if not self._wgs4_altitude:
self._wgs4_altitude = load_data_interpolator(
'1511/v2_lat.npz', '1511/v2_lon.npz',
'1511/v2_egm2008.npz', bicubic_2D_interpolator)
return self._wgs4_altitude(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def topographic_altitude(self, lat_d, lon_d):
"""
Method to compute the values of topographical height (km) above mean
sea level of the surface of the Earth.
"""
# The new recommendation provides the output in meters and uses
# a -180 to 180 longitude refernce
lon_d = np.where(lon_d > 180, lon_d - 360, lon_d)
return self.altitude(lat_d, lon_d) / 1000
class _ITU1511_1_():
"""
The values of topographical height (km) above mean sea level of the surface
of the Earth are provided on a 0.5° grid in both latitude and longitude.
For a location different from the gridpoints, the height above mean sea
level at the desired location can be obtained by performing a bi-cubic
interpolation.
"""
def __init__(self):
self.__version__ = 1
self.year = 2015
self.month = 7
self.link = 'https://www.itu.int/rec/R-REC-P.1511/' +\
'recommendation.asp?lang=en&parent=R-REC-P.1511-1-201507-I'
self._altitude = None
def altitude(self, lat, lon):
if not self._altitude:
self._altitude = load_data_interpolator(
'1511/v1_lat.npz', '1511/v1_lon.npz',
'1511/v1_topo_0dot5.npz', bicubic_2D_interpolator)
return self._altitude(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def topographic_altitude(self, lat_d, lon_d):
"""
Method to compute the values of topographical height (km) above mean
sea level of the surface of the Earth.
"""
return self.altitude(lat_d, lon_d)
class _ITU1511_0_():
"""
The values of topographical height (km) above mean sea level of the surface
of the Earth are provided on a 0.5° grid in both latitude and longitude.
For a location different from the gridpoints, the height above mean sea
level at the desired location can be obtained by performing a bi-cubic
interpolation.
"""
def __init__(self):
self.__version__ = 0
self.year = 2001
self.month = 2
self.link = 'https://www.itu.int/rec/R-REC-P.1511/' +\
'recommendation.asp?lang=en&parent=R-REC-P.1511-0-200102-I'
self._altitude = None
def altitude(self, lat, lon):
if not self._altitude:
self._altitude = load_data_interpolator(
'1511/v1_lat.npz', '1511/v1_lon.npz',
'1511/v1_topo_0dot5.npz', bicubic_2D_interpolator)
return self._altitude(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def topographic_altitude(self, lat_d, lon_d):
"""
Method to compute the values of topographical height (km) above mean
sea level of the surface of the Earth.
"""
return self.altitude(lat_d, lon_d)
__model = __ITU1511()
def change_version(new_version):
"""
Change the version of the ITU-R P.1511 recommendation currently being used.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 1: Activates recommendation ITU-R P.1511-1 (07/15) (Current version)
* 0: Activates recommendation ITU-R P.1511-0 (02/01) (Superseded)
"""
global __model
__model = __ITU1511(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.1511 recommendation currently being used.
Returns
-------
version: int
Version currently being used.
"""
return __model.__version__
def topographic_altitude(lat, lon):
"""
Topographical height (km) above mean sea level of the surface of the Earth.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Returns
-------
altitude: numpy.ndarray
Topographic altitude (km)
References
----------
[1] Topography for Earth-to-space propagation modelling:
https://www.itu.int/rec/R-REC-P.1511/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.topographic_altitude(lat, lon)
val = np.maximum(val, 1e-9)
return prepare_output_array(val, type_output) * u.km
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/__init__.py | itur/models/__init__.py | __all__ = ['itu453', 'itu530', 'itu618', 'itu676', 'itu835', 'itu836',
'itu837', 'itu838', 'itu839', 'itu840', 'itu1144', 'itu1510',
'itu1511', 'itu1623', 'itu1853']
import itur.models.itu453
import itur.models.itu530
import itur.models.itu618
import itur.models.itu835
import itur.models.itu836
import itur.models.itu837
import itur.models.itu838
import itur.models.itu839
import itur.models.itu840
import itur.models.itu1144
import itur.models.itu1510
import itur.models.itu1511
import itur.models.itu1623
import itur.models.itu1853
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu835.py | itur/models/itu835.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from astropy import units as u
from itur.utils import (prepare_input_array,
prepare_output_array,
get_input_type,
prepare_quantity)
class __ITU835__():
"""Class to model the ITU-R P.835 recommendation.
The procedures to compute the reference standard atmosphere parameters
pressented in these versions are identical to those included in version
ITU_T P.835.
Available versions:
* P.835-6 (12/17) (Current version)
* P.835-5 (02/12) (Superseded)
Not available versions:
* P.835-1 (08/94) (Superseded)
* P.835-2 (08/97) (Superseded)
* P.835-3 (10/99) (Superseded)
* P.835-4 (03/05) (Superseded)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.835 recommendation.
def __init__(self, version=6):
if version == 6:
self.instance = _ITU835_6()
elif version == 5:
self.instance = _ITU835_5()
else:
raise ValueError(
'Version ' +
str(version) +
' is not implemented' +
' for the ITU-R P.835 model.')
@property
def __version__(self):
return self.instance.__version__
def temperature(self, lat, h, season='summer'):
#
return self.instance.temperature(lat, h, season)
def pressure(self, lat, h, season='summer'):
return self.instance.pressure(lat, h, season)
def water_vapour_density(self, lat, h, season='summer'):
return self.instance.water_vapour_density(lat, h, season)
def standard_temperature(self, h, T_0):
return self.instance.standard_temperature(h, T_0)
def standard_pressure(self, h, T_0, P_0):
return self.instance.standard_pressure(h, T_0, P_0)
def standard_water_vapour_density(self, h, h_0, rho_0):
return self.instance.standard_water_vapour_density(h, h_0, rho_0)
def standard_water_vapour_pressure(self, h, h_0=2, rho_0=7.5):
return self.instance.standard_water_vapour_pressure(h, h_0, rho_0)
class _ITU835_6():
def __init__(self):
self.__version__ = 6
self.year = 2017
self.month = 12
self.link = 'https://www.itu.int/rec/R-REC-P.835-6-201712-I/en'
@staticmethod
def standard_temperature(h, T_0=288.15):
"""
"""
h_p = 6356.766 * h / (6356.766 + h)
# Warnings because of sqrt are expected
with np.errstate(invalid='ignore'):
T = np.where(h_p <= 11, 288.15 - 6.5 * h_p,
np.where(np.logical_and(11 < h_p, h_p <= 20),
216.65,
np.where(np.logical_and(20 < h_p, h_p <= 32),
216.65 + (h_p - 20),
np.where(np.logical_and(32 < h_p, h_p <= 47),
228.65 + 2.8 * (h_p - 32),
np.where(np.logical_and(47 < h_p, h_p <= 51),
270.65,
np.where(np.logical_and(51 < h_p, h_p <= 71),
270.65 - 2.8 * (h_p - 51),
np.where(np.logical_and(71 < h_p, h_p <= 84.852),
214.65 - 2.0 * (h_p - 71),
np.where(np.logical_and(86 <= h, h <= 91),
186.8673,
np.where(np.logical_and(91 < h, h <= 100),
263.1905 - 76.3232 * np.sqrt((1 - ((h - 91)/19.9429)**2)),
195.08134)))))))))
return T
@staticmethod
def standard_pressure(h, T_0=None, P_0=None):
"""
"""
h_p = 6356.766 * h / (6356.766 + h)
with np.errstate(invalid='ignore'):
P = np.where(h_p <= 11,
1013.25 * (288.15 / (288.15 - 6.5 * h_p))**(-34.1632 / 6.5),
np.where(np.logical_and(11 < h_p, h_p <= 20),
226.3226 * np.exp(-34.1632 * (h_p - 11) / 216.65),
np.where(np.logical_and(20 < h_p, h_p <= 32),
54.74980 * (216.65 / (216.65 + (h_p - 20))) ** 34.1632,
np.where(np.logical_and(32 < h_p, h_p <= 47),
8.680422 * (228.65 / (228.65 + 2.8 * (h_p - 32))) **
(34.1632 / 2.8),
np.where(np.logical_and(47 < h_p, h_p <= 51),
1.109106 * np.exp(-34.1632 * (h_p - 47) / 270.65),
np.where(np.logical_and(51 < h_p, h_p <= 71),
0.6694167 * (270.65 / (270.65 - 2.8 * (h_p - 51)))**(-34.1632 / 2.8),
np.where(np.logical_and(71 < h_p, h_p <= 84.852),
0.03956649 *(214.65 / (214.65 - 2.0 * (h_p - 71)))**(-34.1632 / 2.0),
np.where(np.logical_and(86 <= h, h <= 100),
np.exp(95.571899 -4.011801 * h + 6.424731e-2 * h**2 -
4.789660e-4 * h**3 + 1.340543e-6 * h**4),
1e-62)))))))).astype(float)
return P
@staticmethod
def standard_water_vapour_density(h, h_0=2, rho_0=7.5):
"""
"""
return rho_0 * np.exp(-h / h_0)
def standard_water_vapour_pressure(self, h, h_0=2, rho_0=7.5):
"""
"""
rho_h = self.standard_water_vapour_density(h, h_0, rho_0)
T_h = self.standard_temperature(h)
return rho_h * T_h / 216.7
# Low latitude standard atmosphere functions (Section ITU-R P.835-6 2) #
@staticmethod
def low_latitude_temperature(h):
"""Section 2 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h < 17)),
300.4222 - 6.3533 * h + 0.005886 * h**2,
np.where(np.logical_and((17 <= h), (h < 47)),
194 + (h - 17) * 2.533,
np.where(np.logical_and((47 <= h), (h < 52)), 270,
np.where(np.logical_and((52 <= h), (h < 80)),
270 - (h - 52) * 3.0714,
np.where(np.logical_and((80 <= h), (h <= 100)), 184, 184)))))
def low_latitude_pressure(self, h):
"""Section 2 of Recommendation ITU-R P.835-6."""
P10 = 284.8526 # Pressure at 10 km using the equation below
P72 = 0.0313660 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1012.0306 - 109.0338 * h + 3.6316 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.165 * (h - 72)), np.nan)))
@staticmethod
def low_latitude_water_vapour(h):
"""Section 3.1 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h <= 15)), 19.6542 *
np.exp(- 0.2313 * h - 0.1122 * h**2 + 0.01351 * h**3 -
0.0005923 * h**4), 0)
# Mid latitude standard atmosphere functions (Section ITU-R P.835-6 3)
@staticmethod
def mid_latitude_temperature_summer(h):
"""Section 3.1 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h < 13)),
294.9838 - 5.2159 * h - 0.07109 * h**2,
np.where(np.logical_and((13 <= h), (h < 17)), 215.15,
np.where(np.logical_and((17 <= h), (h < 47)),
215.15 * np.exp((h - 17) * 0.008128),
np.where(np.logical_and((47 <= h), (h < 53)), 275,
np.where(np.logical_and((53 <= h), (h < 80)),
275 + 20 * (1 - np.exp((h - 53) * 0.06)),
np.where(np.logical_and((80 <= h), (h <= 100)),
175, np.nan))))))
def mid_latitude_pressure_summer(self, h):
"""Section 3.1 of Recommendation ITU-R P.835-6."""
P10 = 283.7096 # Pressure at 10 km using the equation below
P72 = 0.03124022 # Pressure at 72 km using the equation below
return np.where(
np.logical_and((0 <= h), (h <= 10)),
1012.8186 - 111.5569 * h + 3.8646 * h**2, np.where(
np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(
np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.165 * (h - 72)),
np.nan)))
@staticmethod
def mid_latitude_water_vapour_summer(h):
"""Section 3.1 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h <= 15)),
14.3542 * np.exp(- 0.4174 * h - 0.02290 * h**2 +
0.001007 * h**3), 0)
@staticmethod
def mid_latitude_temperature_winter(h):
"""Section 3.2 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h < 10)),
272.7241 - 3.6217 * h - 0.1759 * h**2,
np.where(np.logical_and((10 <= h), (h < 33)), 218,
np.where(np.logical_and((33 <= h), (h < 47)),
218 + (h - 33) * 3.3571,
np.where(np.logical_and((47 <= h), (h < 53)), 265,
np.where(np.logical_and((53 <= h), (h < 80)),
265 - (h - 53) * 2.0370,
np.where(np.logical_and((80 <= h), (h <= 100)),
210, np.nan))))))
def mid_latitude_pressure_winter(self, h):
"""Section 3.2 of Recommendation ITU-R P.835-6."""
P10 = 258.9787 # Pressure at 10 km using the equation below
P72 = 0.02851702 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1018.8627 - 124.2954 * h + 4.8307 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.155 * (h - 72)), np.nan)))
@staticmethod
def mid_latitude_water_vapour_winter(h):
"""Section 3.2 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and(0 <= h, h <= 10),
3.4742 * np.exp(- 0.2697 * h - 0.03604 * h**2 +
0.0004489 * h**3), 0)
# High latitude standard atmosphere functions (Section ITU-R P.835-6 4) #
@staticmethod
def high_latitude_temperature_summer(h):
"""Section 4.1 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h < 10)),
286.8374 - 4.7805 * h - 0.1402 * h**2,
np.where(np.logical_and((10 <= h), (h < 23)), 225,
np.where(np.logical_and((23 <= h), (h < 48)),
225 * np.exp((h - 23) * 0.008317),
np.where(np.logical_and((48 <= h), (h < 53)), 277,
np.where(np.logical_and((53 <= h), (h < 79)),
277 - (h - 53) * 4.0769,
np.where(np.logical_and((79 <= h), (h <= 100)),
171, np.nan))))))
def high_latitude_pressure_summer(self, h):
"""Section 4.1 of Recommendation ITU-R P.835-6."""
P10 = 269.6138 # Pressure at 10 km using the equation below
P72 = 0.04582115 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1008.0278 - 113.2494 * h + 3.9408 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.140 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.165 * (h - 72)), np.nan)))
@staticmethod
def high_latitude_water_vapour_summer(h):
"""Section 4.1 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h <= 15)),
8.988 * np.exp(- 0.3614 * h - 0.005402 * h**2 -
0.001955 * h**3), 0)
@staticmethod
def high_latitude_temperature_winter(h):
"""Section 4.2 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h < 8.5)),
257.4345 + 2.3474 * h - 1.5479 * h**2 +
0.08473 * h**3,
np.where(np.logical_and((8.5 <= h), (h < 30)), 217.5,
np.where(np.logical_and((30 <= h), (h < 50)),
217.5 + (h - 30) * 2.125,
np.where(np.logical_and((50 <= h), (h < 54)), 260,
np.where(np.logical_and((54 <= h), (h <= 100)),
260 - (h - 54) * 1.667, np.nan)))))
def high_latitude_pressure_winter(self, h):
"""Section 4.2 of Recommendation ITU-R P.835-6."""
P10 = 243.8718 # Pressure at 10 km using the equation below
P72 = 0.02685355 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1010.8828 - 122.2411 * h + 4.554 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.150 * (h - 72)), np.nan)))
@staticmethod
def high_latitude_water_vapour_winter(h):
"""Section 4.2 of Recommendation ITU-R P.835-6."""
return np.where(np.logical_and((0 <= h), (h <= 10)),
1.2319 * np.exp(0.07481 * h - 0.0981 * h**2 +
0.00281 * h**3), 0)
def temperature(self, lat, h, season='summer'):
""" Section 2 of Recommendation ITU-R P.835-6."""
if season == 'summer':
return np.where(
np.abs(lat) < 22, self.low_latitude_temperature(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_temperature_summer(h),
self.high_latitude_temperature_summer(h)))
elif season == 'winter':
return np.where(
np.abs(lat) < 22, self.low_latitude_temperature(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_temperature_winter(h),
self.high_latitude_temperature_winter(h)))
else:
raise ValueError("The value for argument 'season' is not correct."
"Valid values are 'summer' and 'winter'.")
def pressure(self, lat, h, season='summer'):
""" Section 2 of Recommendation ITU-R P.835-6."""
if season == 'summer':
return np.where(
np.abs(lat) < 22, self.low_latitude_pressure(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_pressure_summer(h),
self.high_latitude_pressure_summer(h)))
elif season == 'winter':
return np.where(
np.abs(lat) < 22, self.low_latitude_pressure(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_pressure_winter(h),
self.high_latitude_pressure_winter(h)))
else:
raise ValueError("The value for argument 'season' is not correct."
"Valid values are 'summer' and 'winter'")
def water_vapour_density(self, lat, h, season='summer'):
""" Section 2 of Recommendation ITU-R P.835-6."""
if season == 'summer':
return np.where(
np.abs(lat) < 22, self.low_latitude_water_vapour(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_water_vapour_summer(h),
self.high_latitude_water_vapour_summer(h)))
elif season == 'winter':
return np.where(
np.abs(lat) < 22, self.low_latitude_water_vapour(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_water_vapour_winter(h),
self.high_latitude_water_vapour_winter(h)))
else:
raise ValueError("The value for argument 'season' is not correct."
"Valid values are 'summer' and 'winter'")
class _ITU835_5():
def __init__(self):
self.__version__ = 5
self.year = 2012
self.month = 2
self.link = 'https://www.itu.int/rec/R-REC-P.835-5-201202-I/en'
@staticmethod
def standard_temperature(h, T_0=288.15):
"""
"""
H = np.array([0, 11, 20, 32, 47, 51, 71, 85])
T = np.array([0, -65, -65, -53, -11, -11, -67, -95]) + T_0
return np.interp(h, H, T)
@staticmethod
def standard_pressure(h, T_0=288.15, P_0=1013.25):
"""
"""
H = [0, 11, 20, 32, 47, 51, 71, 85]
L = [-6.5, 0, 1, 2.8, 0, -2.8, -2]
T = np.array([0, -65, -65, -53, -11, -11, -67, -95]) + T_0
num_splits = np.minimum(np.searchsorted(H, h), 7)
if not hasattr(num_splits, '__iter__'):
num_splits = list([num_splits])
ret = np.ones_like(h) * P_0
for ret_i, n in enumerate(num_splits):
n = n.squeeze()
P = np.zeros((n + 1))
P[0] = P_0
for i in range(n):
h_p = h[ret_i] if i == (n - 1) else H[i + 1]
if L[i] != 0:
P[i + 1] = P[i] * \
(T[i] / (T[i] + L[i] * (h_p - H[i])))**(34.163 / L[i])
else:
P[i + 1] = P[i] * np.exp(-34.162 * (h_p - H[i]) / T[i])
ret[ret_i] = P[-1]
return ret
@staticmethod
def standard_water_vapour_density(h, h_0=2, rho_0=7.5):
"""
"""
return rho_0 * np.exp(-h / h_0)
def standard_water_vapour_pressure(self, h, h_0=2, rho_0=7.5):
"""
"""
rho_h = self.standard_water_vapour_density(h, h_0, rho_0)
T_h = self.standard_temperature(h)
return rho_h * T_h / 216.7
# Low latitude standard atmosphere functions (Section ITU-R P.835 5-2) #
@staticmethod
def low_latitude_temperature(h):
"""Section 2 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h < 17)),
300.4222 - 6.3533 * h + 0.005886 * h**2,
np.where(np.logical_and((17 <= h), (h < 47)),
194 + (h - 17) * 2.533,
np.where(np.logical_and((47 <= h), (h < 52)), 270,
np.where(np.logical_and((52 <= h), (h < 80)),
270 - (h - 52) * 3.0714,
np.where(np.logical_and((80 <= h), (h <= 100)), 184, np.nan)))))
def low_latitude_pressure(self, h):
"""Section 2 of Recommendation ITU-R P.835-5."""
P10 = 284.8526 # Pressure at 10 km using the equation below
P72 = 0.03136608 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1012.0306 - 109.0338 * h + 3.6316 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.165 * (h - 72)), np.nan)))
@staticmethod
def low_latitude_water_vapour(h):
"""Section 3.1 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h <= 15)), 19.6542 *
np.exp(- 0.2313 * h - 0.1122 * h**2 + 0.01351 * h**3 -
0.0005923 * h**4), 0)
# Mid latitude standard atmosphere functions (Section ITU-R P.835-6 3)
@staticmethod
def mid_latitude_temperature_summer(h):
"""Section 3.1 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h < 13)),
294.9838 - 5.2159 * h - 0.07109 * h**2,
np.where(np.logical_and((13 <= h), (h < 17)), 215.15,
np.where(np.logical_and((17 <= h), (h < 47)),
215.15 * np.exp((h - 17) * 0.008128),
np.where(np.logical_and((47 <= h), (h < 53)), 275,
np.where(np.logical_and((53 <= h), (h < 80)),
275 + 20 * (1 - np.exp((h - 53) * 0.06)),
np.where(np.logical_and((80 <= h), (h <= 100)),
175, np.nan))))))
def mid_latitude_pressure_summer(self, h):
"""Section 3.1 of Recommendation ITU-R P.835-5."""
P10 = 283.7096 # Pressure at 10 km using the equation below
P72 = 0.031240222 # Pressure at 72 km using the equation below
return np.where(
np.logical_and((0 <= h), (h <= 10)),
1012.8186 - 111.5569 * h + 3.8646 * h**2, np.where(
np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(
np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.165 * (h - 72)),
np.nan)))
@staticmethod
def mid_latitude_water_vapour_summer(h):
"""Section 3.1 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h <= 15)),
14.3542 * np.exp(- 0.4174 * h - 0.02290 * h**2 +
0.001007 * h**3), 0)
@staticmethod
def mid_latitude_temperature_winter(h):
"""Section 3.2 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h < 10)),
272.7241 - 3.6217 * h - 0.1759 * h**2,
np.where(np.logical_and((10 <= h), (h < 33)), 218,
np.where(np.logical_and((33 <= h), (h < 47)),
218 + (h - 33) * 3.3571,
np.where(np.logical_and((47 <= h), (h < 53)), 265,
np.where(np.logical_and((53 <= h), (h < 80)),
265 - (h - 53) * 2.0370,
np.where(np.logical_and((80 <= h), (h <= 100)),
210, np.nan))))))
def mid_latitude_pressure_winter(self, h):
"""Section 3.2 of Recommendation ITU-R P.835-5."""
P10 = 258.9787 # Pressure at 10 km using the equation below
P72 = 0.02851702 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1018.8627 - 124.2954 * h + 4.8307 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.155 * (h - 72)), np.nan)))
@staticmethod
def mid_latitude_water_vapour_winter(h):
"""Section 3.2 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and(0 <= h, h <= 10),
3.4742 * np.exp(- 0.2697 * h - 0.03604 * h**2 +
0.0004489 * h**3), 0)
# High latitude standard atmosphere functions (Section ITU-R P.835-5 4) #
@staticmethod
def high_latitude_temperature_summer(h):
"""Section 4.1 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h < 10)),
286.8374 - 4.7805 * h - 0.1402 * h**2,
np.where(np.logical_and((10 <= h), (h < 23)), 225,
np.where(np.logical_and((23 <= h), (h < 48)),
225 * np.exp((h - 23) * 0.008317),
np.where(np.logical_and((48 <= h), (h < 53)), 277,
np.where(np.logical_and((53 <= h), (h < 79)),
277 - (h - 53) * 4.0769,
np.where(np.logical_and((79 <= h), (h <= 100)),
171, np.nan))))))
def high_latitude_pressure_summer(self, h):
"""Section 4.1 of Recommendation ITU-R P.835-5."""
P10 = 269.6138 # Pressure at 10 km using the equation below
P72 = 0.04582115 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1008.0278 - 113.2494 * h + 3.9408 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.140 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.165 * (h - 72)), np.nan)))
@staticmethod
def high_latitude_water_vapour_summer(h):
"""Section 4.1 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h <= 15)),
8.988 * np.exp(- 0.3614 * h - 0.005402 * h**2 -
0.001955 * h**3), 0)
@staticmethod
def high_latitude_temperature_winter(h):
"""Section 4.2 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h < 8.5)),
257.4345 + 2.3474 * h - 1.5479 * h**2 +
0.08473 * h**3,
np.where(np.logical_and((8.5 <= h), (h < 30)), 217.5,
np.where(np.logical_and((30 <= h), (h < 50)),
217.5 + (h - 30) * 2.125,
np.where(np.logical_and((50 <= h), (h < 54)), 260,
np.where(np.logical_and((54 <= h), (h <= 100)),
260 - (h - 54) * 1.667, np.nan)))))
def high_latitude_pressure_winter(self, h):
"""Section 4.2 of Recommendation ITU-R P.835-5."""
P10 = 243.8718 # Pressure at 10 km using the equation below
P72 = 0.02685355 # Pressure at 72 km using the equation below
return np.where(np.logical_and((0 <= h), (h <= 10)),
1010.8828 - 122.2411 * h + 4.554 * h**2,
np.where(np.logical_and((10 < h), (h <= 72)),
P10 * np.exp(-0.147 * (h - 10)),
np.where(np.logical_and((72 < h), (h <= 100)),
P72 * np.exp(-0.150 * (h - 72)), np.nan)))
@staticmethod
def high_latitude_water_vapour_winter(h):
"""Section 4.2 of Recommendation ITU-R P.835-5."""
return np.where(np.logical_and((0 <= h), (h <= 10)),
1.2319 * np.exp(0.07481 * h - 0.0981 * h**2 +
0.00281 * h**3), 0)
def temperature(self, lat, h, season='summer'):
""" Section 2 of Recommendation ITU-R P.835-5."""
if season == 'summer':
return np.where(
np.abs(lat) < 22, self.low_latitude_temperature(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_temperature_summer(h),
self.high_latitude_temperature_summer(h)))
else:
return np.where(
np.abs(lat) < 22, self.low_latitude_temperature(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_temperature_winter(h),
self.high_latitude_temperature_winter(h)))
def pressure(self, lat, h, season='summer'):
""" Section 2, 3, 4 of Recommendation ITU-R P.835-5."""
if season == 'summer':
return np.where(
np.abs(lat) < 22, self.low_latitude_pressure(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_pressure_summer(h),
self.high_latitude_pressure_summer(h)))
else:
return np.where(
np.abs(lat) < 22, self.low_latitude_pressure(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_pressure_winter(h),
self.high_latitude_pressure_winter(h)))
def water_vapour_density(self, lat, h, season='summer'):
""" Section 2 of Recommendation ITU-R P.835-5."""
if season == 'summer':
return np.where(
np.abs(lat) < 22, self.low_latitude_water_vapour(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_water_vapour_summer(h),
self.high_latitude_water_vapour_summer(h)))
else:
return np.where(
np.abs(lat) < 22, self.low_latitude_water_vapour(h),
np.where(
np.abs(lat) < 45, self.mid_latitude_water_vapour_winter(h),
self.high_latitude_water_vapour_winter(h)))
__model = __ITU835__()
def change_version(new_version):
"""
Change the version of the ITU-R P.835 recommendation currently being used.
This function changes the model used for the ITU-R P.835 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 6: Activates recommendation ITU-R P.835-6 (12/17) (Current version)
* 5: Activates recommendation ITU-R P.835-5 (02/12) (Superseded)
"""
global __model
__model = __ITU835__(new_version)
def get_version():
"""The version of the model currently in use for the ITU-R P.835 recommendation.
Obtain the version of the ITU-R P.835 recommendation currently being used.
Returns
-------
version: int
The version of the ITU-R P.835 recommendation being used.
"""
global __model
return __model.__version__
def temperature(lat, h, season='summer'):
""" Determine the temperature at a given latitude and height.
Method to determine the temperature as a function of altitude and latitude,
for calculating gaseous attenuation along an Earth-space path. This method
is recommended when more reliable local data are not available.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
h : number or Quantity
Height (km)
season : string
Season of the year (available values, 'summer', and 'winter').
Default 'summer'
Returns
-------
T: Quantity
Temperature (K)
References
----------
[1] Reference Standard Atmospheres
https://www.itu.int/rec/R-REC-P.835/en
"""
global __model
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
h = prepare_quantity(h, u.km, 'Height')
val = __model.temperature(lat, h, season)
return prepare_output_array(val, type_output) * u.Kelvin
def pressure(lat, h, season='summer'):
""" Determine the atmospheric pressure at a given latitude and height.
Method to determine the pressure as a function of altitude and latitude,
for calculating gaseous attenuation along an Earth-space path.
This method is recommended when more reliable local data are not available.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
h : number or Quantity
Height (km)
season : string
Season of the year (available values, 'summer', and 'winter').
Default 'summer'
Returns
-------
P: Quantity
Pressure (hPa)
References
----------
[1] Reference Standard Atmospheres
https://www.itu.int/rec/R-REC-P.835/en
"""
global __model
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
h = prepare_quantity(h, u.km, 'Height')
val = __model.pressure(lat, h, season)
return prepare_output_array(val, type_output) * u.hPa
def water_vapour_density(lat, h, season='summer'):
""" Determine the water vapour density at a given latitude and height.
Method to determine the water-vapour density as a
function of altitude and latitude, for calculating gaseous attenuation
along an Earth-space path. This method is recommended when more reliable
local data are not available.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
h : number or Quantity
Height (km)
season : string
Season of the year (available values, 'summer', and 'winter').
Default 'summer'
Returns
-------
rho: Quantity
Water vapour density (g/m^3)
References
----------
[1] Reference Standard Atmospheres
https://www.itu.int/rec/R-REC-P.835/en
"""
global __model
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | true |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu618.py | itur/models/itu618.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import scipy.stats as stats
import scipy.special
import scipy.integrate
from astropy import units as u
from itur.models.itu453 import water_vapour_pressure,\
wet_term_radio_refractivity, map_wet_term_radio_refractivity
from itur.models.itu837 import rainfall_rate, rainfall_probability
from itur.models.itu838 import rain_specific_attenuation
from itur.models.itu839 import rain_height
from itur.models.itu1511 import topographic_altitude
from itur.utils import prepare_input_array, prepare_output_array,\
prepare_quantity, compute_distance_earth_to_earth, get_input_type, EPSILON
import warnings
def __CDF_bivariate_normal__(alpha_x, alpha_y, rho):
# This function calculates the complementary bivariate normal
# distribution with limits alpha_x, alpha_y and correlation factor rho
def CDF_bivariate_normal_fcn(x, y, rho):
return np.exp(- (x**2 - 2 * rho * x * y + y**2) /
(2. * (1 - rho**2)))
def CDF_bivariate_normal_int(alpha, y, rho):
return scipy.integrate.quad(
CDF_bivariate_normal_fcn, alpha, np.inf, args=(y, rho))[0]
return 1 / (2 * np.pi * np.sqrt(1 - rho**2)) * scipy.integrate.quad(
lambda y: CDF_bivariate_normal_int(alpha_x, y, rho),
alpha_y,
np.inf)[0]
class _ITU618():
"""
Propagation data and prediction methods required for the design of
Earth-space telecommunication systems.
Available versions include:
* P.618-13 (12/17) (Current version)
* P.618-12 (07/15) (Superseded)
Versions that need to be implemented
* P.618-11
* P.618-10
* P.618-09
* P.618-08
* P.618-07
* P.618-06
* P.618-05
* P.618-04
* P.618-03
* P.618-02
* P.618-01
Recommendation ITU-R P.618 provides methods to estimate the propagation
loss on an Earth-space path, relative to the free-space loss. This value
is the sum of different contributions as follows:
* attenuation by atmospheric gases;
* attenuation by rain, other precipitation and clouds;
* focusing and defocusing;
* decrease in antenna gain due to wave-front incoherence;
* scintillation and multipath effects;
* attenuation by sand and dust storms.
Each of these contributions has its own characteristics as a function of
frequency, geographic location and elevation angle. As a rule, at elevation
angles above 10°, only gaseous attenuation, rain and cloud attenuation and
possibly scintillation will be significant, depending on propagation
conditions.
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.618 recommendation.
def __init__(self, version=13):
if version == 13:
self.instance = _ITU618_13()
elif version == 12:
self.instance = _ITU618_12()
# elif version == 11:
# self.instance = _ITU618_11()
# elif version == 10:
# self.instance = _ITU618_10()
# elif version == 9:
# self.instance = _ITU618_9()
# elif version == 8:
# self.instance = _ITU618_8()
# elif version == 7:
# self.instance = _ITU618_7()
# elif version == 6:
# self.instance = _ITU618_6()
# elif version == 5:
# self.instance = _ITU618_5()
# elif version == 4:
# self.instance = _ITU618_4()
# elif version == 3:
# self.instance = _ITU618_3()
# elif version == 2:
# self.instance = _ITU618_2()
# elif version == 1:
# self.instance = _ITU618_1()
else:
raise ValueError(
f"Version {version} is not implemented" " for the ITU-R P.618 model."
)
@property
def __version__(self):
return self.instance.__version__
def rain_attenuation(self, lat, lon, f, el, hs=None, p=0.01, R001=None,
tau=45, Ls=None):
fcn = np.vectorize(self.instance.rain_attenuation,
excluded=[0, 1, 3, 4, 6], otypes=[np.ndarray])
return np.array(fcn(lat, lon, f, el, hs, p, R001, tau, Ls).tolist())
def rain_attenuation_probability(self, lat, lon, el, hs, Ls, P0=None):
fcn = np.vectorize(self.instance.rain_attenuation_probability,
excluded=[0, 1, 2], otypes=[np.ndarray])
return np.array(fcn(lat, lon, el, hs, Ls, P0).tolist())
def rain_cross_polarization_discrimination(self, Ap, f, el, p, tau):
fcn = np.vectorize(
self.instance.rain_cross_polarization_discrimination)
return fcn(Ap, f, el, p, tau)
def scintillation_attenuation(self, lat, lon, f, el, p, D, eta,
T, H, P, hL):
fcn = np.vectorize(self.instance.scintillation_attenuation,
excluded=[0, 1, 3, 7, 8, 9], otypes=[np.ndarray])
return np.array(fcn(lat, lon, f, el, p, D, eta, T, H, P, hL).tolist())
def scintillation_attenuation_sigma(self, lat, lon, f, el, p, D, eta,
T, H, P, hL):
fcn = np.vectorize(self.instance.scintillation_attenuation_sigma,
excluded=[0, 1, 3, 7, 8, 9], otypes=[np.ndarray])
return np.array(fcn(lat, lon, f, el, p, D, eta, T, H, P, hL).tolist())
def fit_rain_attenuation_to_lognormal(self, lat, lon, f, el, hs, P_k, tau):
fcn = np.vectorize(self.instance.fit_rain_attenuation_to_lognormal)
return fcn(lat, lon, f, el, hs, P_k, tau)
def site_diversity_rain_outage_probability(self, lat1, lon1, a1, el1,
lat2, lon2, a2, el2, f, tau=45,
hs1=None, hs2=None):
fcn = np.vectorize(
self.instance.site_diversity_rain_outage_probability)
return np.array(fcn(lat1, lon1, a1, el1,
lat2, lon2, a2, el2,
f, tau, hs1, hs2).tolist())
class _ITU618_13():
def __init__(self):
self.__version__ = 13
@classmethod
def rain_attenuation(self, lat, lon, f, el, hs=None, p=0.01, R001=None,
tau=45, Ls=None):
if np.logical_or(p < 0.001, p > 5).any():
warnings.warn(
RuntimeWarning('The method to compute the rain attenuation in '
'recommendation ITU-P 618-12 is only valid for '
'unavailability values between 0.001 and 5'))
Re = 8500 # Efective radius of the Earth (8500 km)
if hs is None:
hs = topographic_altitude(lat, lon).to(u.km).value
# Step 1: Compute the rain height (hr) based on ITU - R P.839
hr = rain_height(lat, lon).value
# Step 2: Compute the slant path length
if Ls is None:
Ls = np.where(
el >= 5, (hr - hs) / (np.sin(np.deg2rad(el))), # Eq. 1
2 * (hr - hs) / (((np.sin(np.deg2rad(el)))**2 +
2 * (hr - hs) / Re)**0.5 + (np.sin(np.deg2rad(el))))) # Eq. 2
# Step 3: Calculate the horizontal projection, LG, of the
# slant-path length
Lg = np.abs(Ls * np.cos(np.deg2rad(el)))
# Obtain the raingall rate, exceeded for 0.01% of an average year,
# if not provided, as described in ITU-R P.837.
if R001 is None:
R001 = rainfall_rate(lat, lon, 0.01).to(u.mm / u.hr).value + EPSILON
# Step 5: Obtain the specific attenuation gammar using the frequency
# dependent coefficients as given in ITU-R P.838
# https://www.itu.int/dms_pubrec/itu-r/rec/p/R-REC-P.838-3-200503-I!!PDF-E.pdf
gammar = rain_specific_attenuation(
R001, f, el, tau).to(
u.dB / u.km).value
# Step 6: Calculate the horizontal reduction factor, r0.01,
# for 0.01% of the time:
r001 = 1. / (1 + 0.78 * np.sqrt(Lg * gammar / f) -
0.38 * (1 - np.exp(-2 * Lg)))
# Step 7: Calculate the vertical adjustment factor, v0.01,
# for 0.01% of the time:
eta = np.rad2deg(np.arctan2(hr - hs, Lg * r001))
Delta_h = np.where(hr - hs <= 0, EPSILON, (hr - hs))
Lr = np.where(eta > el, Lg * r001 / np.cos(np.deg2rad(el)),
Delta_h / np.sin(np.deg2rad(el)))
xi = np.where(np.abs(lat) < 36, 36 - np.abs(lat), 0)
v001 = 1. / (1 + np.sqrt(np.sin(np.deg2rad(el))) *
(31 * (1 - np.exp(-(el / (1 + xi)))) *
np.sqrt(Lr * gammar) / f**2 - 0.45))
# Step 8: calculate the effective path length:
Le = Lr * v001 # (km)
# Step 9: The predicted attenuation exceeded for 0.01% of an average
# year
A001 = gammar * Le # (dB)
# Step 10: The estimated attenuation to be exceeded for other
# percentages of an average year
if p >= 1:
beta = np.zeros_like(A001)
else:
beta = np.where(np.abs(lat) >= 36,
np.zeros_like(A001),
np.where((np.abs(lat) < 36) & (el > 25),
-0.005 * (np.abs(lat) - 36),
-0.005 * (np.abs(lat) - 36) + 1.8 -
4.25 * np.sin(np.deg2rad(el))))
A = A001 * (p / 0.01)**(
-(0.655 + 0.033 * np.log(p) - 0.045 * np.log(A001) -
beta * (1 - p) * np.sin(np.deg2rad(el))))
return A
@classmethod
def rain_attenuation_probability(self, lat, lon, el, hs=None,
Ls=None, P0=None):
Re = 8500
if hs is None:
hs = topographic_altitude(lat, lon).to(u.km).value
# Step 1: Estimate the probability of rain, at the earth station either
# from Recommendation ITU-R P.837 or from local measured rainfall
# rate data
if P0 is None:
P0 = rainfall_probability(lat, lon).\
to(u.dimensionless_unscaled).value
# Step 2: Calculate the parameter alpha using the inverse of the
# Q-function alpha = Q^{-1}(P0) -> Q(alpha) = P0
alpha = stats.norm.ppf(1 - P0)
# Step 3: Calculate the spatial correlation function, rho:
hr = rain_height(lat, lon).value
if Ls is None:
Ls = np.where(
el >= 5, (hr - hs) / (np.sin(np.deg2rad(el))), # Eq. 1
2 * (hr - hs) / (((np.sin(np.deg2rad(el)))**2 +
2 * (hr - hs) / Re)**0.5 + (np.sin(np.deg2rad(el))))) # Eq. 2
d = Ls * np.cos(np.deg2rad(el))
rho = 0.59 * np.exp(-abs(d) / 31) + 0.41 * np.exp(-abs(d) / 800)
# Step 4: Calculate the complementary bivariate normal distribution
biva_fcn = np.vectorize(__CDF_bivariate_normal__)
c_B = biva_fcn(alpha, alpha, rho)
# Step 5: Calculate the probability of rain attenuation on the slant
# path:
P = 1 - (1 - P0) * ((c_B - P0**2) / (P0 * (1 - P0)))**P0
return P
@classmethod
def fit_rain_attenuation_to_lognormal(self, lat, lon, f, el, hs, P_k, tau):
# Performs the log-normal fit of rain attenuation vs. probability of
# occurrence for a particular path
# Step 1: Construct the set of pairs [Pi, Ai] where Pi (% of time) is
# the probability the attenuation Ai (dB) is exceeded where Pi < P_K
p_i = np.array([0.01, 0.02, 0.03, 0.05,
0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10])
Pi = np.array([p for p in p_i if p < P_k], dtype=float)
Ai = np.array([0 for p in p_i if p < P_k], dtype=float)
for i, p in enumerate(Pi):
Ai[i] = self.rain_attenuation(lat, lon, f, el, hs, p, tau=tau)
# Step 2: Transform the set of pairs [Pi, Ai] to [Q^{-1}(Pi/P_k),
# ln(Ai)]
Q = stats.norm.ppf(1 - (Pi / P_k))
lnA = np.log(Ai)
# Step 3: Determine the variables sigma_lna, m_lna by performing a
# least-squares fit to lnAi = sigma_lna Q^{-1}(Pi/P_k) + m_lna
m_lna, sigma_lna = np.linalg.lstsq(np.vstack([np.ones(len(Q)), Q]).T,
lnA, rcond=None)[0]
return sigma_lna, m_lna
@classmethod
def site_diversity_rain_outage_probability(self, lat1, lon1, a1, lat2,
lon2, a2, f, el1, el2, tau=45,
hs1=None, hs2=None):
# The diversity prediction method assumes a log-normal distribution of
# rain intensity and rain attenuation. This method predicts
# Pr(A1 > a1, A2 > a2), the joint probability (%) that the attenuation
# on the path to the first site is greater than a1 and the attenuation
# on the path to the second site is greater than a2.
d = compute_distance_earth_to_earth(lat1, lon1, lat2, lon2)
rho_r = 0.7 * np.exp(-d / 60) + 0.3 * np.exp(-(d / 700)**2)
P_1 = rainfall_probability(lat1, lon1).\
to(u.dimensionless_unscaled).value
P_2 = rainfall_probability(lat2, lon2).\
to(u.dimensionless_unscaled).value
R_1 = stats.norm.ppf(1 - P_1)
R_2 = stats.norm.ppf(1 - P_2)
biva_fcn = np.vectorize(__CDF_bivariate_normal__)
P_r = biva_fcn(R_1, R_2, rho_r)
sigma_lna1, m_lna1 = self.fit_rain_attenuation_to_lognormal(
lat1, lon1, f, el1, hs1, P_1 * 100, tau)
sigma_lna2, m_lna2 = self.fit_rain_attenuation_to_lognormal(
lat2, lon2, f, el2, hs2, P_2 * 100, tau)
rho_a = 0.94 * np.exp(-d / 30) + 0.06 * np.exp(-(d / 500)**2)
lim_1 = (np.log(a1) - m_lna1) / sigma_lna1
lim_2 = (np.log(a2) - m_lna2) / sigma_lna2
P_a = biva_fcn(lim_1, lim_2, rho_a)
return 100 * P_r * P_a
@classmethod
def rain_cross_polarization_discrimination(self, Ap, f, el, p, tau):
# Frequency reuse by means of orthogonal polarizations is often used to
# increase the capacity of space telecommunication systems. This
# technique is restricted, however, by depolarization on atmospheric
# propagation paths. Various depolarization mechanisms, especially
# hydrometeor effects, are important in the troposphere
# The method described below to calculate cross-polarization
# discrimination (XPD) statistics from rain attenuation statistics for
# the same path is valid for 6 < f < 55 GHz and el < 60°.
if f < 4 or f > 55:
warnings.warn(
RuntimeWarning(
'The method to compute the cross '
'polarization discrimination in recommendation '
'ITU-P 618-12 is only valid for frequency values between'
' 4 and 55 GHz'))
if el > 60:
warnings.warn(
RuntimeWarning(
'The method to compute thecross '
'polarization discrimination in recommendation ITU-P '
'618-12 is only valid for elevation angle values below '
'60 degrees'))
# In case that the frequency is comprised between 4 and 6 GHz, scaling
# is necessary
scale_to_orig_f = False
if 4 <= f < 6:
f_orig = f
f = 6
scale_to_orig_f = True
# Step 1: Calculate the frequency-dependent term:
if 6 <= f < 9:
C_f = 60 * np.log10(f) - 28.3
elif 9 <= f < 36:
C_f = 26 * np.log10(f) + 4.1
elif 36 <= f <= 55:
C_f = 35.9 * np.log10(f) - 11.3
# Step 2: Calculate the rain attenuation dependent term:
if 6 <= f < 9:
V = 30.8 * f**-0.21
elif 9 <= f < 20:
V = 12.8 * f**0.19
elif 20 <= f < 40:
V = 22.6
elif 40 <= f <= 55:
V = 13.0 * f**0.15
C_a = V * np.log10(Ap)
# Step 3: Calculate the polarization improvement factor:
C_tau = -10 * np.log10(1 - 0.484 * (1 + np.cos(np.deg2rad(4 * tau))))
# Step 4: Calculate the elevation angle-dependent term:
C_theta = -40 * np.log10(np.cos(np.deg2rad(el)))
# Step 5: Calculate the canting angle dependent term:
if p <= 0.001:
C_sigma = 0.0053 * 15**2
elif p <= 0.01:
C_sigma = 0.0053 * 10**2
elif p <= 0.1:
C_sigma = 0.0053 * 5**2
else:
C_sigma = 0
# Step 6: Calculate rain XPD not exceeded for p% of the time:
XPD_rain = C_f - C_a + C_tau + C_theta + C_sigma
# Step 7: Calculate the ice crystal dependent term:
C_ice = XPD_rain * (0.3 + 0.1 * np.log10(p)) / 2
# Step 8: Calculate the XPD not exceeded for p% of the time,
# including the effects of ice:
XPD_p = XPD_rain - C_ice
if scale_to_orig_f:
# Long-term XPD statistics obtained at one frequency and
# polarization tilt angle can be scaled to another frequency and
# polarization tilt angle using the semi-empirical formula:
XPD_p = XPD_p - 20 * np.log10(
f_orig * np.sqrt(1 - 0.484 * (1 + np.cos(np.deg2rad(4 * tau)))) /
(f * np.sqrt(1 - 0.484 * (1 + np.cos(np.deg2rad(4 * tau))))))
return XPD_p
@classmethod
def scintillation_attenuation_sigma(cls, lat, lon, f, el, p, D, eta=0.5,
T=None, H=None, P=None, hL=1000):
# Step 1: For the value of t, calculate the saturation water vapour
# pressure, es, (hPa), as specified in Recommendation ITU-R P.453.
if T is not None and H is not None and P is not None:
e = water_vapour_pressure(T, P, H).value
# Step 2: Compute the wet term of the radio refractivity, Nwet,
# corresponding to es, t and H as given in Recommendation ITU-R
# P.453.
N_wet = wet_term_radio_refractivity(e, T).value
else:
N_wet = map_wet_term_radio_refractivity(lat, lon, 50).value
# Step 3: Calculate the standard deviation of the reference signal
# amplitude:
sigma_ref = 3.6e-3 + 1e-4 * N_wet # Eq. 43 [dB]
# Step 4: Calculate the effective path length L:
L = 2 * hL / (np.sqrt(np.sin(np.deg2rad(el))**2 + 2.35e-4) +
np.sin(np.deg2rad(el))) # Eq. 44 [m]
# Step 5: Estimate the effective antenna diameter, Deff
D_eff = np.sqrt(eta) * D # Eq. 45 [m]
# Step 6: Step 6: Calculate the antenna averaging factor
x = 1.22 * D_eff**2 * f / L
g = np.where(x >= 7.0, 0,
np.sqrt(3.86 * (x**2 + 1)**(11. / 12) *
np.sin(11. / 6 * np.arctan2(1, x)) -
7.08 * x**(5. / 6))) # Eq. 46 [-]
# Step 7: Calculate the standard deviation of the signal for the
# applicable period and propagation path:
sigma = sigma_ref * f**(7. / 12) * g / np.sin(np.deg2rad(el))**1.2
return sigma
@classmethod
def scintillation_attenuation(cls, lat, lon, f, el, p, D, eta=0.5, T=None,
H=None, P=None, hL=1000):
# Step 1 - 7: Calculate the standard deviation of the signal for the
# applicable period and propagation path:
sigma = cls.scintillation_attenuation_sigma(lat, lon, f, el, p,
D, eta, T, H, P, hL)
# Step 8: Calculate the time percentage factor, a(p), for the time
# percentage, p, in the range between 0.01% < p < 50%:
a = -0.061 * np.log10(p)**3 + 0.072 * \
np.log10(p)**2 - 1.71 * np.log10(p) + 3
# Step 9: Calculate the fade depth, A(p), exceeded for p% of the time:
A_s = a * sigma # Eq. 49 [dB]
return A_s
class _ITU618_12():
def __init__(self):
self.__version__ = 12
@classmethod
def rain_attenuation(self, lat, lon, f, el, hs=None, p=0.01, R001=None,
tau=45, Ls=None):
if p < 0.001 or p > 5:
warnings.warn(
RuntimeWarning('The method to compute the rain attenuation in '
'recommendation ITU-P 618-12 is only valid for '
'unavailability values between 0.001% and 5%'))
Re = 8500 # Efective radius of the Earth (8500 km)
if hs is None:
hs = topographic_altitude(lat, lon).to(u.km).value
# Step 1: Compute the rain height (hr) based on ITU - R P.839
hr = rain_height(lat, lon).value
# Step 2: Compute the slant path length
if Ls is None:
Ls = np.where(
el >= 5, (hr - hs) / (np.sin(np.deg2rad(el))), # Eq. 1
2 * (hr - hs) / (((np.sin(np.deg2rad(el)))**2 +
2 * (hr - hs) / Re)**0.5 +
(np.sin(np.deg2rad(el))))) # Eq. 2
# Step 3: Calculate the horizontal projection, LG, of the
# slant-path length
Lg = np.abs(Ls * np.cos(np.deg2rad(el)))
# Obtain the raingall rate, exceeded for 0.01% of an average year,
# if not provided, as described in ITU-R P.837.
if R001 is None:
R001 = rainfall_rate(lat, lon, 0.01).to(u.mm / u.hr).value + EPSILON
# Step 5: Obtain the specific attenuation gammar using the frequency
# dependent coefficients as given in ITU-R P.838
gammar = rain_specific_attenuation(
R001, f, el, tau).to(
u.dB / u.km).value
# Step 6: Calculate the horizontal reduction factor, r0.01,
# for 0.01% of the time:
r001 = 1. / (1 + 0.78 * np.sqrt(Lg * gammar / f) -
0.38 * (1 - np.exp(-2 * Lg)))
# Step 7: Calculate the vertical adjustment factor, v0.01,
# for 0.01% of the time:
eta = np.rad2deg(np.arctan2(hr - hs, Lg * r001))
Delta_h = np.where(hr - hs <= 0, EPSILON, (hr - hs))
Lr = np.where(eta > el, Lg * r001 / np.cos(np.deg2rad(el)),
Delta_h / np.sin(np.deg2rad(el)))
xi = np.where(np.abs(lat) < 36, 36 - np.abs(lat), 0)
v001 = 1. / (1 + np.sqrt(np.sin(np.deg2rad(el))) *
(31 * (1 - np.exp(-(el / (1 + xi)))) *
np.sqrt(Lr * gammar) / f**2 - 0.45))
# Step 8: calculate the effective path length:
Le = Lr * v001 # (km)
# Step 9: The predicted attenuation exceeded for 0.01% of an average
# year
A001 = gammar * Le # (dB)
# Step 10: The estimated attenuation to be exceeded for other
# percentages of an average year
if p >= 1:
beta = np.zeros_like(A001)
else:
beta = np.where(np.abs(lat) >= 36,
np.zeros_like(A001),
np.where((np.abs(lat) < 36) & (el > 25),
-0.005 * (np.abs(lat) - 36),
-0.005 * (np.abs(lat) - 36) + 1.8 -
4.25 * np.sin(np.deg2rad(el))))
A = A001 * (p / 0.01)**(-(0.655 + 0.033 * np.log(p) -
0.045 * np.log(A001) -
beta * (1 - p) * np.sin(np.deg2rad(el))))
return A
@classmethod
def rain_attenuation_probability(self, *args, **kwargs):
return _ITU618_13.rain_attenuation_probability(*args, **kwargs)
@classmethod
def fit_rain_attenuation_to_lognormal(self, *args, **kwargs):
return _ITU618_13.fit_rain_attenuation_to_lognormal(*args, **kwargs)
@classmethod
def site_diversity_rain_outage_probability(self, *args, **kwargs):
return _ITU618_13.site_diversity_rain_outage_probability(*args,
**kwargs)
@classmethod
def rain_cross_polarization_discrimination(self, *args, **kwargs):
return _ITU618_13.rain_cross_polarization_discrimination(*args,
**kwargs)
@classmethod
def scintillation_attenuation(self, *args, **kwargs):
return _ITU618_13.scintillation_attenuation(*args, **kwargs)
__model = _ITU618()
def change_version(new_version):
"""
Change the version of the ITU-R P.618 recommendation currently being used.
This function changes the model used for the ITU-R P.618 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 13: Activates recommendation ITU-R P.618-13 (12/17) (Current version)
* 12: Activates recommendation ITU-R P.618-12 (07/15) (Superseded)
"""
global __model
__model = _ITU618(new_version)
def get_version():
""" The version of the current model for the ITU-R P.618 recommendation.
Obtain the version of the ITU-R P.618 recommendation currently being used.
Returns
-------
version: int
The version of the ITU-R P.618 recommendation being used.
"""
return __model.__version__
def rain_attenuation(lat, lon, f, el, hs=None, p=0.01, R001=None,
tau=45, Ls=None):
"""
Calculation of long-term rain attenuation statistics from point rainfall
rate.
The following procedure provides estimates of the long-term statistics of
the slant-path rain attenuation at a given location for frequencies up
to 55 GHz.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
f : number
Frequency (GHz)
el : sequence, or number
Elevation angle (degrees)
hs : number, sequence, or numpy.ndarray, optional
Heigh above mean sea level of the earth station (km). If local data for
the earth station height above mean sea level is not available, an
estimate is obtained from the maps of topographic altitude
given in Recommendation ITU-R P.1511.
p : number, optional
Percentage of the time the rain attenuation value is exceeded.
R001: number, optional
Point rainfall rate for the location for 0.01% of an average year
(mm/h).
If not provided, an estimate is obtained from Recommendation
Recommendation ITU-R P.837. Some useful values:
* 0.25 mm/h : Drizzle
* 2.5 mm/h : Light rain
* 12.5 mm/h : Medium rain
* 25.0 mm/h : Heavy rain
* 50.0 mm/h : Downpour
* 100 mm/h : Tropical
* 150 mm/h : Monsoon
tau : number, optional
Polarization tilt angle relative to the horizontal (degrees)
(tau = 45 deg for circular polarization). Default value is 45
Ls :number, optional
Slant path length (km). If not provided, it will be computed using the
rain height and the elevation angle. The ITU model does not require
this parameter as an input.
Returns
-------
attenuation: Quantity
Attenuation due to rain (dB)
References
--------
[1] Propagation data and prediction methods required for the design of
Earth-space telecommunication systems:
https://www.itu.int/dms_pubrec/itu-r/rec/p/R-REC-P.618-12-201507-I!!PDF-E.pdf
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
f = prepare_quantity(f, u.GHz, 'Frequency')
el = prepare_quantity(prepare_input_array(el), u.deg, 'Elevation angle')
hs = prepare_quantity(
hs, u.km, 'Heigh above mean sea level of the earth station')
R001 = prepare_quantity(R001, u.mm / u.hr, 'Point rainfall rate')
tau = prepare_quantity(tau, u.one, 'Polarization tilt angle')
Ls = prepare_quantity(Ls, u.km, 'Slant path length')
val = __model.rain_attenuation(lat, lon, f, el, hs=hs, p=p,
R001=R001, tau=tau, Ls=Ls)
# The values of attenuation cannot be negative. The ITU models end up
# giving out negative values for certain inputs
val[val < 0] = 0
return prepare_output_array(val, type_output) * u.dB
def rain_attenuation_probability(lat, lon, el, hs=None, Ls=None, P0=None):
"""
The following procedure computes the probability of non-zero rain
attenuation on a given slant path Pr(Ar > 0).
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
el : sequence, or number
Elevation angle (degrees)
hs : number, sequence, or numpy.ndarray, optional
Heigh above mean sea level of the earth station (km). If local data for
the earth station height above mean sea level is not available, an
estimate is obtained from the maps of topographic altitude
given in Recommendation ITU-R P.1511.
Ls : number, sequence, or numpy.ndarray, optional
Slant path length from the earth station to the rain height (km). If
data about the rain height is not available, this value is estimated
automatically using Recommendation ITU-R P.838
P0 : number, sequence, or numpy.ndarray, optional
Probability of rain at the earth station, (0 ≤ P0 ≤ 1)
Returns
-------
p: Quantity
Probability of rain attenuation on the slant path (%)
References
----------
[1] Propagation data and prediction methods required for the design of
Earth-space telecommunication systems:
https://www.itu.int/dms_pubrec/itu-r/rec/p/R-REC-P.618-12-201507-I!!PDF-E.pdf
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
el = prepare_quantity(prepare_input_array(el), u.deg, 'Elevation angle')
hs = prepare_quantity(
hs, u.km, 'Heigh above mean sea level of the earth station')
Ls = prepare_quantity(
Ls, u.km, 'Heigh above mean sea level of the earth station')
P0 = prepare_quantity(P0, u.pct, 'Point rainfall rate')
val = __model.rain_attenuation_probability(lat, lon, el, hs, Ls, P0)
return prepare_output_array(val, type_output) * 100 * u.pct
def site_diversity_rain_outage_probability(lat1, lon1, a1, el1, lat2,
lon2, a2, el2, f, tau=45, hs1=None,
hs2=None):
"""
Calculate the link outage probability in a diversity based scenario (two
ground stations) due to rain attenuation. This method is valid for
frequencies below 20 GHz, as at higher frequencies other impairments might
affect affect site diversity performance.
This method predicts Pr(A1 > a1, A2 > a2), the joint probability (%) that
the attenuation on the path to the first site is greater than a1 and the
attenuation on the path to the second site is greater than a2.
Parameters
----------
lat1 : number or Quantity
Latitude of the first ground station (deg)
lon1 : number or Quantity
Longitude of the first ground station (deg)
a1 : number or Quantity
Maximum admissible attenuation of the first ground station (dB)
el1 : number or Quantity
Elevation angle to the first ground station (deg)
lat2 : number or Quantity
Latitude of the second ground station (deg)
lon2 : number or Quantity
Longitude of the second ground station (deg)
a2 : number or Quantity
Maximum admissible attenuation of the second ground station (dB)
el2 : number or Quantity
Elevation angle to the second ground station (deg)
f : number or Quantity
Frequency (GHz)
tau : number, optional
Polarization tilt angle relative to the horizontal (degrees)
(tau = 45 deg for circular polarization). Default value is 45
hs1 : number or Quantity, optional
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | true |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu453.py | itur/models/itu453.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from astropy import units as u
from itur.models.itu1144 import bilinear_2D_interpolator
from itur.utils import (prepare_input_array, prepare_quantity, load_data,
prepare_output_array, dataset_dir, get_input_type)
class __ITU453__():
""" Private class to model the ITU-R P.453 recommendations.
Implementation of the methods in Recommendation ITU-R P.453
"The radio refractive index: its formula and refractivity data"
Available versions:
* P.453-13 (12/17)
* P.453-12 (07/15)
TODO: Implement version P.453-13
Recommendation ITU-R P.453 provides methods to estimate the radio
refractive index and its behaviour for locations worldwide; describes both
surface and vertical profile characteristics; and provides global maps for
the distribution of refractivity parameters and their statistical
variation.
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.453 recommendation.
def __init__(self, version=13):
if version == 13:
self.instance = _ITU453_13_()
elif version == 12:
self.instance = _ITU453_12_()
else:
raise ValueError(
f"Version {version} is not implemented for the ITU-R P.453 model."
)
@property
def __version__(self):
return self.instance.__version__
def wet_term_radio_refractivity(self, e, T):
return self.instance.wet_term_radio_refractivity(e, T)
def dry_term_radio_refractivity(self, Pd, T):
return self.instance.dry_term_radio_refractivity(Pd, T)
def radio_refractive_index(self, Pd, e, T):
return self.instance.radio_refractive_index(Pd, e, T)
def water_vapour_pressure(self, T, P, H, type_hydrometeor='water'):
return self.instance.water_vapour_pressure(
T, P, H, type_hydrometeor=type_hydrometeor)
def saturation_vapour_pressure(self, T, P, type_hydrometeor='water'):
return self.instance.saturation_vapour_pressure(
T, P, type_hydrometeor=type_hydrometeor)
def map_wet_term_radio_refractivity(self, lat, lon, p=50):
fcn = np.vectorize(self.instance.map_wet_term_radio_refractivity,
excluded=[0, 1], otypes=[np.ndarray])
return np.array(fcn(lat, lon, p).tolist())
def DN65(self, lat, lon, p):
fcn = np.vectorize(self.instance.DN65, excluded=[0, 1],
otypes=[np.ndarray])
return np.array(fcn(lat, lon, p).tolist())
def DN1(self, lat, lon, p):
fcn = np.vectorize(self.instance.DN1, excluded=[0, 1],
otypes=[np.ndarray])
return np.array(fcn(lat, lon, p).tolist())
class _ITU453_13_():
def __init__(self):
self.__version__ = 13
self.year = 2017
self.month = 12
self.link = 'https://www.itu.int/rec/R-REC-P.453-13-201712-I/en'
self._N_wet = {}
self._DN65 = {}
self._DN1 = {}
def DN65(self, lat, lon, p):
if not self._DN65:
ps = [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 30, 40, 50, 60, 70, 80,
90, 95, 98, 99, 99.5, 99.8, 99.9]
d_dir = os.path.join(dataset_dir, '453/v12_dn65m_%02dd%02d_v1.npz')
lats = load_data(os.path.join(dataset_dir, '453/v12_lat0d75.npz'))
lons = load_data(os.path.join(dataset_dir, '453/v12_lon0d75.npz'))
for p_loads in ps:
int_p = p_loads // 1
frac_p = round((p_loads % 1.0) * 100)
vals = load_data(d_dir % (int_p, frac_p))
self._DN65[float(p_loads)] = bilinear_2D_interpolator(
lats, lons, vals)
return self._DN65[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def DN1(self, lat, lon, p):
if not self._DN1:
ps = [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 30, 40, 50, 60, 70, 80,
90, 95, 98, 99, 99.5, 99.8, 99.9]
d_dir = os.path.join(dataset_dir, '453/v12_dn_%02dd%02d_v1.npz')
lats = load_data(os.path.join(dataset_dir, '453/v12_lat0d75.npz'))
lons = load_data(os.path.join(dataset_dir, '453/v12_lon0d75.npz'))
for p_loads in ps:
int_p = p_loads // 1
frac_p = round((p_loads % 1.0) * 100)
vals = load_data(d_dir % (int_p, frac_p))
self._DN1[float(p_loads)] = bilinear_2D_interpolator(
lats, lons, vals)
return self._DN1[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def N_wet(self, lat, lon, p):
if not self._N_wet:
ps = [0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10, 20, 30, 50, 60, 70, 80,
90, 95, 99]
d_dir = os.path.join(dataset_dir, '453/v13_nwet_annual_%s.npz')
lats = load_data(os.path.join(dataset_dir, '453/v13_lat_n.npz'))
lons = load_data(os.path.join(dataset_dir, '453/v13_lon_n.npz'))
for p_loads in ps:
vals = load_data(d_dir % (str(p_loads).replace('.', '')))
self._N_wet[float(p_loads)] = bilinear_2D_interpolator(
np.flipud(lats), lons, np.flipud(vals))
lon[lon > 180] = lon[lon > 180] - 360
return self._N_wet[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
@classmethod
def wet_term_radio_refractivity(self, e, T):
N_wet = (72 * e / (T + 273.15) + 3.75e5 * e / (T + 273.15)**2) * 1e-6
return N_wet
@classmethod
def dry_term_radio_refractivity(self, Pd, T):
N_dry = 77.6 * Pd / T # Eq. 3
return N_dry
@classmethod
def radio_refractive_index(self, Pd, e, T):
N = 77.6 * Pd / T + 72 * e / T + 3.75e5 * e / T**2 # Eq. 2 [N-units]
n = 1 + N * 1e-6 # Eq. 1
return n
@classmethod
def water_vapour_pressure(self, T, P, H, type_hydrometeor='water'):
e_s = self.saturation_vapour_pressure(T, P, type_hydrometeor)
return H * e_s / 100 # Eq. 8
@classmethod
def saturation_vapour_pressure(self, T, P, type_hydrometeor='water'):
if type_hydrometeor == 'water':
EF = 1 + 1e-4 * (7.2 + P * (0.0320 + 5.9e-6 * T**2))
a = 6.1121
b = 18.678
c = 257.14
d = 234.5
elif type_hydrometeor == 'ice':
EF = 1 + 1e-4 * (2.2 + P * (0.0383 + 6.4e-6 * T**2))
a = 6.1115
b = 23.036
c = 279.82
d = 333.7
e_s = EF * a * np.exp((b - T / d) * T / (T + c))
return e_s
def map_wet_term_radio_refractivity(self, lat, lon, p):
# Fix lon because the data-set is now indexed -180 to 180 instead
# of 0 to 360
lon[lon > 180] = lon[lon > 180] - 360
lat_f = lat.flatten()
lon_f = lon.flatten()
available_p = np.array([0.1, 0.2, 0.3, 0.5, 1, 2, 3, 5, 10,
20, 30, 50, 60, 70, 80, 90, 95, 99])
if p in available_p:
p_below = p_above = p
pExact = True
else:
pExact = False
idx = available_p.searchsorted(p, side='right') - 1
idx = np.clip(idx, 0, len(available_p) - 1)
p_below = available_p[idx]
idx = np.clip(idx + 1, 0, len(available_p) - 1)
p_above = available_p[idx]
R = -(lat_f - 90) // 0.75
C = (lon_f + 180) // 0.75
lats = np.array([90 - R * 0.75, 90 - (R + 1) * 0.75,
90 - R * 0.75, 90 - (R + 1) * 0.75])
lons = np.array([C * 0.75, C * 0.75,
(C + 1) * 0.75, (C + 1) * 0.75]) - 180
r = - (lat_f - 90) / 0.75
c = (lon_f + 180) / 0.75
N_wet_a = self.N_wet(lats, lons, p_above)
N_wet_a = (N_wet_a[0, :] * ((R + 1 - r) * (C + 1 - c)) +
N_wet_a[1, :] * ((r - R) * (C + 1 - c)) +
N_wet_a[2, :] * ((R + 1 - r) * (c - C)) +
N_wet_a[3, :] * ((r - R) * (c - C)))
if not pExact:
N_wet_b = self.N_wet(lats, lons, p_below)
N_wet_b = (N_wet_b[0, :] * ((R + 1 - r) * (C + 1 - c)) +
N_wet_b[1, :] * ((r - R) * (C + 1 - c)) +
N_wet_b[2, :] * ((R + 1 - r) * (c - C)) +
N_wet_b[3, :] * ((r - R) * (c - C)))
# Compute the values of Lred_a
if not pExact:
rho = N_wet_b + (N_wet_a - N_wet_b) * \
(np.log(p) - np.log(p_below)) / \
(np.log(p_above) - np.log(p_below))
return rho.reshape(lat.shape)
else:
return N_wet_a.reshape(lat.shape)
class _ITU453_12_():
def __init__(self):
self.__version__ = 12
self.year = 2016
self.month = 9
self.link = 'https://www.itu.int/rec/R-REC-P.453-12-201609-I/en'
self._N_wet = {}
self._DN65 = {}
self._DN1 = {}
def DN65(self, lat, lon, p):
if not self._DN65:
ps = [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 30, 40, 50, 60, 70, 80,
90, 95, 98, 99, 99.5, 99.8, 99.9]
d_dir = os.path.join(dataset_dir, '453/v12_dn65m_%02dd%02d_v1.npz')
lats = load_data(os.path.join(dataset_dir, '453/v12_lat0d75.npz'))
lons = load_data(os.path.join(dataset_dir, '453/v12_lon0d75.npz'))
for p_loads in ps:
int_p = p_loads // 1
frac_p = round((p_loads % 1.0) * 100)
vals = load_data(d_dir % (int_p, frac_p))
self._DN65[float(p_loads)] = bilinear_2D_interpolator(
lats, lons, vals)
return self._DN65[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def DN1(self, lat, lon, p):
if not self._DN1:
ps = [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 30, 40, 50, 60, 70, 80,
90, 95, 98, 99, 99.5, 99.8, 99.9]
d_dir = os.path.join(dataset_dir, '453/v12_dn_%02dd%02d_v1.npz')
lats = load_data(os.path.join(dataset_dir, '453/v12_lat0d75.npz'))
lons = load_data(os.path.join(dataset_dir, '453/v12_lon0d75.npz'))
for p_loads in ps:
int_p = p_loads // 1
frac_p = round((p_loads % 1.0) * 100)
vals = load_data(d_dir % (int_p, frac_p))
self._DN1[float(p_loads)] = bilinear_2D_interpolator(
lats, lons, vals)
return self._DN1[float(p)](
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
def N_wet(self, lat, lon):
if not self._N_wet:
vals = load_data(os.path.join(dataset_dir, '453/v12_esanwet.npz'))
lats = load_data(os.path.join(dataset_dir, '453/v12_esalat.npz'))
lons = load_data(os.path.join(dataset_dir, '453/v12_esalon.npz'))
self._N_wet = bilinear_2D_interpolator(lats, lons, vals)
return self._N_wet(
np.array([lat.ravel(), lon.ravel()]).T).reshape(lat.shape)
@staticmethod
def wet_term_radio_refractivity(e, T):
return _ITU453_13_.wet_term_radio_refractivity(e, T)
@staticmethod
def dry_term_radio_refractivity(Pd, T):
return _ITU453_13_.dry_term_radio_refractivity(Pd, T)
@staticmethod
def radio_refractive_index(Pd, e, T):
return _ITU453_13_.radio_refractive_index(Pd, e, T)
@staticmethod
def water_vapour_pressure(T, P, H, type_hydrometeor='water'):
return _ITU453_13_.water_vapour_pressure(T, P, H, type_hydrometeor)
@staticmethod
def saturation_vapour_pressure(T, P, type_hydrometeor='water'):
return _ITU453_13_.saturation_vapour_pressure(T, P, type_hydrometeor)
def map_wet_term_radio_refractivity(self, lat, lon, p):
return self.N_wet(lat, lon)
__model = __ITU453__()
def change_version(new_version):
"""
Change the version of the ITU-R P.453 recommendation currently being used.
This function changes the model used for the ITU-R P.453 recommendation
to a different version.
Parameters
----------
new_version : int
Number of the version to use.
Valid values are:
* 13: Activates recommendation ITU-R P.453-13 (12/17)
* 12: Activates recommendation ITU-R P.453-12 (07/15)
"""
global __model
__model = __ITU453__(new_version)
def get_version():
"""
Obtain the version of the ITU-R P.453 recommendation currently being used.
Returns
-------
version: int
The version of the ITU-R P.453 recommendation being used.
"""
return __model.__version__
def wet_term_radio_refractivity(e, T):
"""Determine the wet term of the radio refractivity.
Parameters
----------
e : number or Quantity
Water vapour pressure (hPa)
T : number or Quantity
Absolute temperature (K)
Returns
-------
N_wet: Quantity
Wet term of the radio refractivity (-)
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
e = prepare_quantity(e, u.hPa, 'Water vapour pressure ')
T = prepare_quantity(T, u.K, 'Absolute temperature')
val = __model.wet_term_radio_refractivity(e, T)
return val * u.dimensionless_unscaled
def dry_term_radio_refractivity(Pd, T):
"""Determine the dry term of the radio refractivity.
Parameters
----------
Pd : number or Quantity
Dry atmospheric pressure (hPa)
T : number or Quantity
Absolute temperature (K)
Returns
-------
N_dry: Quantity
Dry term of the radio refractivity (-)
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
Pd = prepare_quantity(Pd, u.hPa, 'Dry atmospheric pressure')
T = prepare_quantity(T, u.K, 'Absolute temperature')
val = __model.dry_term_radio_refractivity(Pd, T)
return val * u.dimensionless_unscaled
def radio_refractive_index(Pd, e, T):
"""Compute the radio refractive index.
Parameters
----------
Pd : number or Quantity
Dry atmospheric pressure (hPa)
e : number or Quantity
Water vapour pressure (hPa)
T : number or Quantity
Absolute temperature (K)
Returns
-------
n: Quantity
Radio refractive index (-)
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
Pd = prepare_quantity(Pd, u.hPa, 'Dry atmospheric pressure')
e = prepare_quantity(e, u.hPa, 'Water vapour pressure ')
T = prepare_quantity(T, u.K, 'Absolute temperature')
val = __model.radio_refractive_index(Pd, e, T)
return val * u.dimensionless_unscaled
def water_vapour_pressure(T, P, H, type_hydrometeor='water'):
"""Determine the water vapour pressure.
Parameters
----------
T : number or Quantity
Absolute temperature (C)
P : number or Quantity
Total atmospheric pressure (hPa)
H : number or Quantity
Relative humidity (%)
type_hydrometeor : string
Type of hydrometeor. Valid strings are 'water' and 'ice'
Returns
-------
e: Quantity
Water vapour pressure (hPa)
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
T = prepare_quantity(T, u.deg_C, 'Absolute temperature')
P = prepare_quantity(P, u.hPa, 'Total atmospheric pressure')
H = prepare_quantity(H, u.percent, 'Total atmospheric pressure')
val = __model.water_vapour_pressure(T, P, H, type_hydrometeor)
return val * u.hPa
def saturation_vapour_pressure(T, P, type_hydrometeor='water'):
"""Determine the saturation water vapour pressure.
Parameters
----------
T : number or Quantity
Absolute temperature (C)
P : number or Quantity
Total atmospheric pressure (hPa)
type_hydrometeor : string
Type of hydrometeor. Valid strings are 'water' and 'ice'
Returns
-------
e_s: Quantity
Saturation water vapour pressure (hPa)
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
T = prepare_quantity(T, u.deg_C, 'Absolute temperature')
P = prepare_quantity(P, u.hPa, 'Total atmospheric pressure')
val = __model.saturation_vapour_pressure(T, P, type_hydrometeor)
return val * u.hPa
def map_wet_term_radio_refractivity(lat, lon, p=50):
"""Determine the wet term of the radio refractivity.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
Returns
-------
N_wet: Quantity
Wet term of the radio refractivity (-)
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.map_wet_term_radio_refractivity(lat, lon, p)
return prepare_output_array(val, type_output) * u.g / u.m**3
def DN65(lat, lon, p):
"""Determine the statistics of the vertical gradient of radio
refractivity in the lower 65 m from the surface of the Earth.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
p : number
Percentage of time exceeded for p% of the average year
Returns
-------
DN65_p: Quantity
Vertical gradient of radio refractivity in the lowest 65 m from the
surface of the Earth exceeded for p% of the average year
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.DN65(lat, lon, p)
return prepare_output_array(val, type_output) * u.one
def DN1(lat, lon, p):
"""Determine the statistics of the vertical gradient of radio
refractivity over 1 km layer from the surface.
Parameters
----------
lat : number, sequence, or numpy.ndarray
Latitudes of the receiver points
lon : number, sequence, or numpy.ndarray
Longitudes of the receiver points
p : number
Percentage of time exceeded for p% of the average year
Returns
-------
DN1_p: Quantity
Vertical gradient of radio refractivity over a 1 km layer from the
surface exceeded for p% of the average year
References
----------
[1] The radio refractive index: its formula and refractivity data
https://www.itu.int/rec/R-REC-P.453/en
"""
type_output = get_input_type(lat)
lat = prepare_input_array(lat)
lon = prepare_input_array(lon)
lon = np.mod(lon, 360)
val = __model.DN1(lat, lon, p)
return prepare_output_array(val, type_output) * u.one
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/itur/models/itu676.py | itur/models/itu676.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import warnings
import numpy as np
from astropy import units as u
from itur.models.itu453 import radio_refractive_index
from itur.models.itu835 import (standard_pressure, standard_temperature,
standard_water_vapour_density)
from itur.models.itu836 import total_water_vapour_content
from itur.models.itu1511 import topographic_altitude
from itur.utils import (prepare_quantity, prepare_output_array, get_input_type,
prepare_input_array, load_data, dataset_dir)
def __gamma0_exact__(self, f, p, rho, T):
# T in Kelvin
# e : water vapour partial pressure in hPa (total barometric pressure
# ptot = p + e)
theta = 300 / T
e = rho * T / 216.7
f_ox = self.f_ox
D_f_ox = self.a3 * 1e-4 * (p * (theta ** (0.8 - self.a4)) +
1.1 * e * theta)
D_f_ox = np.sqrt(D_f_ox**2 + 2.25e-6)
delta_ox = (self.a5 + self.a6 * theta) * 1e-4 * (p + e) * theta**0.8
F_i_ox = f / f_ox * ((D_f_ox - delta_ox * (f_ox - f)) /
((f_ox - f) ** 2 + D_f_ox ** 2) +
(D_f_ox - delta_ox * (f_ox + f)) /
((f_ox + f) ** 2 + D_f_ox ** 2))
Si_ox = self.a1 * 1e-7 * p * theta**3 * np.exp(self.a2 * (1 - theta))
N_pp_ox = Si_ox * F_i_ox
d = 5.6e-4 * (p + e) * theta**0.8
N_d_pp = f * p * theta**2 * \
(6.14e-5 / (d * (1 + (f / d)**2)) +
1.4e-12 * p * theta**1.5 / (1 + 1.9e-5 * f**1.5))
N_pp = N_pp_ox.sum() + N_d_pp
gamma = 0.1820 * f * N_pp # Eq. 1 [dB/km]
return gamma
def __gammaw_exact__(self, f, p, rho, T):
# T in Kelvin
# e : water vapour partial pressure in hPa (total barometric pressure
# ptot = p + e)
theta = 300 / T
e = rho * T / 216.7
f_wv = self.f_wv
D_f_wv = self.b3 * 1e-4 * (p * theta ** self.b4 +
self.b5 * e * theta ** self.b6)
D_f_wv = 0.535 * D_f_wv + \
np.sqrt(0.217 * D_f_wv**2 + 2.1316e-12 * f_wv**2 / theta)
F_i_wv = f / f_wv * ((D_f_wv) / ((f_wv - f)**2 + D_f_wv**2) +
(D_f_wv) / ((f_wv + f)**2 + D_f_wv**2))
Si_wv = self.b1 * 1e-1 * e * theta**3.5 * np.exp(self.b2 * (1 - theta))
N_pp_wv = Si_wv * F_i_wv
N_pp = N_pp_wv.sum()
gamma = 0.1820 * f * N_pp # Eq. 1 [dB/km]
return gamma
class __ITU676__():
"""Attenuation by atmospheric gases.
Available versions include:
* P.676-9 (02/12) (Superseded)
* P.676-10 (09/13) (Superseded)
* P.676-11 (09/16) (Superseded)
* P.676-11 (08/19) (Current version)
Not available versions:
* P.676-1 (03/92) (Superseded)
* P.676-2 (10/95) (Superseded)
* P.676-3 (08/97) (Superseded)
* P.676-4 (10/99) (Superseded)
* P.676-5 (02/01) (Superseded)
* P.676-6 (03/05) (Superseded)
* P.676-7 (02/07) (Superseded)
* P.676-8 (10/09) (Superseded)
"""
# This is an abstract class that contains an instance to a version of the
# ITU-R P.676 recommendation.
def __init__(self, version=12):
if version == 12:
self.instance = _ITU676_12_()
elif version == 11:
self.instance = _ITU676_11_()
elif version == 10:
self.instance = _ITU676_10_()
elif version == 9:
self.instance = _ITU676_9_()
# elif version == 8:
# self.instance = _ITU676_8()
# elif version == 7:
# self.instance = _ITU676_7()
# elif version == 6:
# self.instance = _ITU676_6()
# elif version == 5:
# self.instance = _ITU676_5()
# elif version == 4:
# self.instance = _ITU676_4()
# elif version == 3:
# self.instance = _ITU676_3()
# elif version == 2:
# self.instance = _ITU676_2()
# elif version == 1:
# self.instance = _ITU676_1()
else:
raise ValueError(
f"Version {version} is not implemented for the ITU-R P.676 model."
)
@property
def __version__(self):
return self.instance.__version__
def gaseous_attenuation_terrestrial_path(self, r, f, el, rho, P, T, mode):
# Abstract method to compute the gaseous attenuation over a slant path
fcn = np.vectorize(self.instance.gaseous_attenuation_terrestrial_path)
return fcn(r, f, el, rho, P, T, mode)
def gaseous_attenuation_inclined_path(
self, f, el, rho, P, T, h1, h2, mode):
# Abstract method to compute the gaseous attenuation over an inclined
# path
fcn = np.vectorize(self.instance.gaseous_attenuation_inclined_path)
return fcn(f, el, rho, P, T, h1, h2, mode)
def gaseous_attenuation_slant_path(self, f, el, rho, P, T, V_t, h, mode):
# Abstract method to compute the gaseous attenuation over a slant path
fcn = np.vectorize(self.instance.gaseous_attenuation_slant_path)
return fcn(f, el, rho, P, T, V_t, h, mode)
def slant_inclined_path_equivalent_height(self, f, P, rho, T):
fcn = np.vectorize(self.instance.slant_inclined_path_equivalent_height,
excluded=[0], otypes=[np.ndarray])
return np.array(fcn(f, P, rho, T).tolist())
def zenit_water_vapour_attenuation(
self, lat, lon, p, f, V_t=None, h=None):
# Abstract method to compute the water vapour attenuation over the
# slant path
fcn = np.vectorize(self.instance.zenit_water_vapour_attenuation,
excluded=[0, 1, 4, 5], otypes=[np.ndarray])
return np.array(fcn(lat, lon, p, f, V_t, h).tolist())
def gamma_exact(self, f, p, rho, t):
# Abstract method to compute the specific attenuation using the
# line-by-line method
fcn = np.vectorize(self.instance.gamma_exact)
return fcn(f, p, rho, t)
def gammaw_exact(self, f, p, rho, t):
# Abstract method to compute the specific attenuation due to water
# vapour
fcn = np.vectorize(self.instance.gammaw_exact)
return fcn(f, p, rho, t)
def gamma0_exact(self, f, p, rho, t):
# Abstract method to compute the specific attenuation due to dry
# atmoshere
fcn = np.vectorize(self.instance.gamma0_exact)
return fcn(f, p, rho, t)
def gammaw_approx(self, f, p, rho, t):
# Abstract method to compute the specific attenuation due to water
# vapour
fcn = np.vectorize(self.instance.gammaw_approx)
with np.errstate(invalid='ignore'):
return fcn(f, p, rho, t)
def gamma0_approx(self, f, p, rho, t):
# Abstract method to compute the specific attenuation due to dry
# atmoshere
fcn = np.vectorize(self.instance.gamma0_approx)
with np.errstate(invalid='ignore'):
return fcn(f, p, rho, t)
class _ITU676_12_():
tmp = load_data(os.path.join(dataset_dir, '676/v12_lines_oxygen.txt'),
skip_header=1)
f_ox = tmp[:, 0]
a1 = tmp[:, 1]
a2 = tmp[:, 2]
a3 = tmp[:, 3]
a4 = tmp[:, 4]
a5 = tmp[:, 5]
a6 = tmp[:, 6]
tmp = load_data(os.path.join(dataset_dir,
'676//v12_lines_water_vapour.txt'),
skip_header=1)
f_wv = tmp[:, 0]
b1 = tmp[:, 1]
b2 = tmp[:, 2]
b3 = tmp[:, 3]
b4 = tmp[:, 4]
b5 = tmp[:, 5]
b6 = tmp[:, 6]
# Coefficients in table 3
t2_coeffs = [(0.1597, 118.750334),
(0.1066, 368.498246),
(0.1325, 424.763020),
(0.1242, 487.249273),
(0.0938, 715.392902),
(0.1448, 773.839490),
(0.1374, 834.145546)]
# Coefficients in table 4
hw_coeffs = [(22.23508, 1.52, 2.56),
(183.310087, 7.62, 10.2),
(325.152888, 1.56, 2.7),
(380.197353, 4.15, 5.7),
(439.150807, 0.2, 0.91),
(448.001085, 1.63, 2.46),
(474.689092, 0.76, 2.22),
(488.490108, 0.26, 2.49),
(556.935985, 7.81, 10),
(620.70087, 1.25, 2.35),
(752.033113, 16.2, 20),
(916.171582, 1.47, 2.58),
(970.315022, 1.36, 2.44),
(987.926764, 1.6, 1.86)]
def __init__(self):
self.__version__ = 12
self.year = 2019
self.month = 8
self.link = 'https://www.itu.int/rec/R-REC-P.676-11-201712-S/en'
def gammaw_approx(self, f, p, rho, T):
warnings.warn(
RuntimeWarning(
'Recommendation ITU-R P.676-12 does not have an explicit '
'method to approximate gamma_w. The exact method shall be '
'used instead.'))
return self.gamma_exact(f, p, rho, T)
def gamma0_approx(self, f, p, rho, T):
warnings.warn(
RuntimeWarning(
'Recommendation ITU-R P.676-12 does not have an explicit '
'method to approximate gamma_w. The exact method shall be '
'used instead.'))
return self.gamma_exact(f, p, rho, T)
@classmethod
def gamma0_exact(self, f, p, rho, T):
return __gamma0_exact__(self, f, p, rho, T)
@classmethod
def gammaw_exact(self, f, p, rho, T):
return __gammaw_exact__(self, f, p, rho, T)
@classmethod
def gamma_exact(self, f, p, rho, T):
return (self.gamma0_exact(f, p, rho, T) +
self.gammaw_exact(f, p, rho, T))
@classmethod
def gaseous_attenuation_approximation(self, f, el, rho, P, T):
if np.any(f > 350):
warnings.warn(
RuntimeWarning(
'The approximated method to computes '
'the gaseous attenuation in recommendation ITU-P 676-11 '
'is only recommended for frequencies below 350GHz'))
if np.any(5 > el) or np.any(np.mod(el, 90) < 5):
warnings.warn(
RuntimeWarning(
'The approximated method to compute '
'the gaseous attenuation in recommendation ITU-P 676-11 '
'is only recommended for elevation angles between '
'5 and 90 degrees'))
# Water vapour attenuation (gammaw) computation as in Section 1 of
# Annex 2 of [1]
gamma0 = self.gamma0_exact(f, P, rho, T)
gammaw = self.gammaw_exact(f, P, rho, T)
return gamma0, gammaw
@classmethod
def slant_inclined_path_equivalent_height(self, f, P, rho, T):
"""
"""
e = rho * T / 216.7
rp = (P + e) / 1013.25
# Eq. 31 - 34
t1 = 5.1040 / (1 + 0.066 * rp**-2.3) * \
np.exp(-((f - 59.7) / (2.87 + 12.4 * np.exp(-7.9 * rp)))**2)
t2 = sum([(ci * np.exp(2.12 * rp)) /
((f - fi)**2 + 0.025 * np.exp(2.2 * rp))
for ci, fi in self.t2_coeffs])
t3 = 0.0114 * f / (1 + 0.14 * rp**-2.6) * \
(15.02 * f**2 - 1353 * f + 5.333e4) / \
(f**3 - 151.3 * f**2 + 9629 * f - 6803)
A = 0.7832 + 0.00709 * (T - 273.15)
# Eq. 30
h0 = 6.1 * A / (1 + 0.17 * rp**-1.1) * (1 + t1 + t2 + t3)
h0 = np.where(f < 70,
np.minimum(h0, 10.7 * rp**0.3),
h0)
# Eq. 36 - 38
A = 1.9298 - 0.04166 * (T - 273.15) + 0.0517 * rho
B = 1.1674 - 0.00622 * (T - 273.15) + 0.0063 * rho
sigmaw = 1.013 / (1 + np.exp(-8.6 * (rp - 0.57)))
# Eq. 35 b
hw = A + B * sum([(ai * sigmaw) / ((f - fi)**2 + bi * sigmaw)
for fi, ai, bi in self.hw_coeffs])
return h0, hw
@classmethod
def gaseous_attenuation_terrestrial_path(
self, r, f, el, rho, P, T, mode='approx'):
"""
"""
if mode == 'approx':
gamma0, gammaw = self.gaseous_attenuation_approximation(
f, el, rho, P, T)
return (gamma0 + gammaw) * r
else:
gamma = self.gamma_exact(f, P, rho, T)
return gamma * r
@classmethod
def gaseous_attenuation_slant_path(self, f, el, rho, P, T, V_t=None,
h=None, mode='approx'):
"""
"""
if mode == 'approx':
gamma0, gammaw = self.gaseous_attenuation_approximation(
f, el, rho, P, T)
h0, hw = self.slant_inclined_path_equivalent_height(f, P, rho, T)
# Use the zenit water-vapour method if the values of V_t
# and h are provided
if V_t is not None and h is not None:
Aw = self.zenit_water_vapour_attenuation(None, None, None,
f, V_t, h)
else:
Aw = gammaw * hw
A0 = gamma0 * h0
return (A0 + Aw) / np.sin(np.deg2rad(el))
else:
delta_h = 0.0001 * \
np.exp((np.arange(0, 922)) / 100) # Eq. 14
h_n = 0.0001 * ((np.exp(np.arange(0, 922) / 100.0) -
1.0) / (np.exp(1.0 / 100.0) - 1.0)) # Eq. 15
T_n = standard_temperature(h_n).to(u.K).value
press_n = standard_pressure(h_n).value
rho_n = standard_water_vapour_density(h_n, rho_0=rho).value
e_n = rho_n * T_n / 216.7
n_n = radio_refractive_index(press_n, e_n, T_n).value
n_ratio = n_n / np.pad(n_n[1:], (0, 1), mode='edge')
r_n = 6371 + h_n
b = np.pi / 2 - np.deg2rad(el)
Agas = 0
for t, press, rho, r, delta, n_r in zip(
T_n, press_n, rho_n, r_n, delta_h, n_ratio):
a = - r * np.cos(b) + 0.5 * np.sqrt(
4 * r**2 * np.cos(b)**2 + 8 * r * delta + 4 * delta**2) # Eq. 17
a_cos_arg = np.clip((-a**2 - 2 * r * delta - delta**2) /
(2 * a * r + 2 * a * delta), -1, 1)
# Eq. 18a
alpha = np.pi - np.arccos(a_cos_arg)
gamma = self.gamma_exact(f, press, rho, t)
Agas += a * gamma # Eq. 13
b = np.arcsin(np.sin(alpha) *
n_r) # Eq. 19a
return Agas
@classmethod
def gaseous_attenuation_inclined_path(
self, f, el, rho, P, T, h1, h2, mode='approx'):
"""
"""
if h1 > 10 or h2 > 10:
raise ValueError(
'Both the transmitter and the receiver must be at'
'altitude of less than 10 km above the sea level.'
'Current altitude Tx: %.2f km, Rx: %.2f km' % (h1, h2))
if mode == 'approx':
rho = rho * np.exp(h1 / 2)
gamma0, gammaw = self.gaseous_attenuation_approximation(
f, el, rho, P, T)
else:
gamma0 = self.gamma0_exact(f, P, rho, T)
gammaw = 0
e = rho * T / 216.7
h0, hw = self.slant_inclined_path_equivalent_height(f, P + e, rho, T)
if 5 < el and el < 90:
h0_p = h0 * (np.exp(-h1 / h0) - np.exp(-h2 / h0))
hw_p = hw * (np.exp(-h1 / hw) - np.exp(-h2 / hw))
return (gamma0 * h0_p + gammaw * hw_p) / np.sin(np.deg2rad(el))
else:
def F(x):
return 1 / (0.661 * x + 0.339 * np.sqrt(x**2 + 5.51))
el1 = el
Re = 8500 # TODO: change to ITU-R P 834
el2 = np.rad2deg(
np.arccos(((Re + h1) / (Re + h2)) * np.cos(np.deg2rad(el1))))
def xi(eli, hi):
return np.tan(np.deg2rad(eli)) * np.sqrt((Re + hi) / h0)
def xi_p(eli, hi):
return np.tan(np.deg2rad(eli)) * np.sqrt((Re + hi) / hw)
def eq_33(h_num, h_den, el, x):
return np.sqrt(Re + h_num) * F(x) * \
np.exp(-h_num / h_den) / np.cos(np.deg2rad(el))
A = gamma0 * np.sqrt(h0) * (eq_33(h1, h0, el1, xi(el1, h1)) -
eq_33(h2, h0, el2, xi(el2, h2))) +\
gammaw * np.sqrt(hw) * (eq_33(h1, hw, el1, xi_p(el1, h1)) -
eq_33(h2, hw, el2, xi_p(el2, h2)))
return A
@classmethod
def zenit_water_vapour_attenuation(
self, lat, lon, p, f, V_t=None, h=None):
f_ref = 20.6 # [GHz]
p_ref = 845 # [hPa]
if h is None:
h = topographic_altitude(lat, lon).value
if V_t is None:
V_t = total_water_vapour_content(lat, lon, p, h).value
rho_ref = V_t / 2.38
t_ref = 14 * np.log(0.22 * V_t / 2.38) + 3 # [Celsius]
a = (0.2048 * np.exp(- ((f - 22.43) / 3.097)**2) +
0.2326 * np.exp(- ((f - 183.5) / 4.096)**2) +
0.2073 * np.exp(- ((f - 325) / 3.651)**2) - 0.1113)
b = 8.741e4 * np.exp(-0.587 * f) + 312.2 * f**(-2.38) + 0.723
h = np.clip(h, 0, 4)
gammaw_approx_vect = np.vectorize(self.gammaw_exact)
Aw_term1 = (0.0176 * V_t *
gammaw_approx_vect(f, p_ref, rho_ref, t_ref + 273.15) /
gammaw_approx_vect(f_ref, p_ref, rho_ref, t_ref + 273.15))
return np.where(f < 20, Aw_term1, Aw_term1 * (a * h ** b + 1))
class _ITU676_11_():
tmp = load_data(os.path.join(dataset_dir, '676/v11_lines_oxygen.txt'),
skip_header=1)
f_ox = tmp[:, 0]
a1 = tmp[:, 1]
a2 = tmp[:, 2]
a3 = tmp[:, 3]
a4 = tmp[:, 4]
a5 = tmp[:, 5]
a6 = tmp[:, 6]
tmp = load_data(os.path.join(dataset_dir,
'676//v11_lines_water_vapour.txt'),
skip_header=1)
f_wv = tmp[:, 0]
b1 = tmp[:, 1]
b2 = tmp[:, 2]
b3 = tmp[:, 3]
b4 = tmp[:, 4]
b5 = tmp[:, 5]
b6 = tmp[:, 6]
idx_approx = np.zeros_like(b1, dtype=bool).squeeze()
asterisk_rows = [0, 3, 4, 5, 7, 12, 20, 24, 34]
idx_approx[np.array(asterisk_rows)] = True
def __init__(self):
self.__version__ = 11
self.year = 2017
self.month = 12
self.link = 'https://www.itu.int/rec/R-REC-P.676-11-201712-S/en'
@classmethod
def gammaw_approx(self, f, p, rho, T):
# T in Kelvin
# e : water vapour partial pressure in hPa (total barometric pressure
# ptot = p + e)
theta = 300 / T
e = rho * T / 216.7
f_wv = self.f_wv[self.idx_approx]
b1 = self.b1[self.idx_approx]
b2 = self.b2[self.idx_approx]
b3 = self.b3[self.idx_approx]
b4 = self.b4[self.idx_approx]
b5 = self.b5[self.idx_approx]
b6 = self.b6[self.idx_approx]
D_f_wv = b3 * 1e-4 * (p * theta ** b4 +
b5 * e * theta ** b6)
F_i_wv = f / f_wv * ((D_f_wv) / ((f_wv - f)**2 + D_f_wv**2) +
(D_f_wv) / ((f_wv + f)**2 + D_f_wv**2))
Si_wv = b1 * 1e-1 * e * theta**3.5 * np.exp(b2 * (1 - theta))
N_pp_wv = Si_wv * F_i_wv
N_pp = N_pp_wv.sum()
gamma = 0.1820 * f * N_pp # Eq. 1 [dB/km]
return gamma
@classmethod
def gamma0_approx(self, f, p, rho, T):
# T in Kelvin
# e : water vapour partial pressure in hPa (total barometric pressure
# ptot = p + e)
theta = 300 / T
e = rho * T / 216.7
f_ox = self.f_ox
D_f_ox = self.a3 * 1e-4 * (p * (theta ** (0.8 - self.a4)) +
1.1 * e * theta)
delta_ox = (self.a5 + self.a6 * theta) * 1e-4 * (p + e) * theta**0.8
F_i_ox = f / f_ox * ((D_f_ox - delta_ox * (f_ox - f)) /
((f_ox - f) ** 2 + D_f_ox ** 2) +
(D_f_ox - delta_ox * (f_ox + f)) /
((f_ox + f) ** 2 + D_f_ox ** 2))
Si_ox = self.a1 * 1e-7 * p * theta**3 * np.exp(self.a2 * (1 - theta))
N_pp_ox = Si_ox * F_i_ox
d = 5.6e-4 * (p + e) * theta**0.8
N_d_pp = f * p * theta**2 * \
(6.14e-5 / (d * (1 + (f / d)**2)) +
1.4e-12 * p * theta**1.5 / (1 + 1.9e-5 * f**1.5))
N_pp = N_pp_ox.sum() + N_d_pp
gamma = 0.1820 * f * N_pp # Eq. 1 [dB/km]
return gamma
@classmethod
def gamma0_exact(self, f, p, rho, T):
return __gamma0_exact__(self, f, p, rho, T)
@classmethod
def gammaw_exact(self, f, p, rho, T):
return __gammaw_exact__(self, f, p, rho, T)
@classmethod
def gamma_exact(self, f, p, rho, T):
return (self.gamma0_exact(f, p, rho, T) +
self.gammaw_exact(f, p, rho, T))
@classmethod
def gaseous_attenuation_approximation(self, f, el, rho, P, T):
"""
T goes in Kelvin
"""
if np.any(f > 350):
warnings.warn(
RuntimeWarning(
'The approximated method to computes '
'the gaseous attenuation in recommendation ITU-P 676-11 '
'is only recommended for frequencies below 350GHz'))
if np.any(5 > el) or np.any(np.mod(el, 90) < 5):
warnings.warn(
RuntimeWarning(
'The approximated method to compute '
'the gaseous attenuation in recommendation ITU-P 676-11 '
'is only recommended for elevation angles between '
'5 and 90 degrees'))
# Water vapour attenuation (gammaw) computation as in Section 1 of
# Annex 2 of [1]
gamma0 = self.gamma0_approx(f, P, rho, T)
gammaw = self.gammaw_approx(f, P, rho, T)
return gamma0, gammaw
@classmethod
def slant_inclined_path_equivalent_height(self, f, P, rho=None, T=None):
"""
"""
rp = P / 1013.25
t1 = 4.64 / (1 + 0.066 * rp**-2.3) * \
np.exp(- ((f - 59.7) / (2.87 + 12.4 * np.exp(-7.9 * rp)))**2)
t2 = (0.14 * np.exp(2.12 * rp)) / \
((f - 118.75)**2 + 0.031 * np.exp(2.2 * rp))
t3 = 0.0114 / (1 + 0.14 * rp**-2.6) * f * \
(-0.0247 + 0.0001 * f + 1.61e-6 * f**2) / \
(1 - 0.0169 * f + 4.1e-5 * f**2 + 3.2e-7 * f**3)
h0 = 6.1 / (1 + 0.17 * rp**-1.1) * (1 + t1 + t2 + t3)
h0 = np.where(f < 70,
np.minimum(h0, 10.7 * rp**0.3),
h0)
sigmaw = 1.013 / (1 + np.exp(-8.6 * (rp - 0.57)))
hw = 1.66 * (1 + (1.39 * sigmaw) / ((f - 22.235)**2 + 2.56 * sigmaw) +
(3.37 * sigmaw) / ((f - 183.31)**2 + 4.69 * sigmaw) +
(1.58 * sigmaw) / ((f - 325.1)**2 + 2.89 * sigmaw))
return h0, hw
@classmethod
def gaseous_attenuation_terrestrial_path(
self, r, f, el, rho, P, T, mode='approx'):
"""
"""
if mode == 'approx':
gamma0, gammaw = self.gaseous_attenuation_approximation(
f, el, rho, P, T)
return (gamma0 + gammaw) * r
else:
gamma = self.gamma_exact(f, P, rho, T)
return gamma * r
@classmethod
def gaseous_attenuation_slant_path(self, f, el, rho, P, T, V_t=None,
h=None, mode='approx'):
"""
"""
if mode == 'approx':
gamma0, gammaw = self.gaseous_attenuation_approximation(
f, el, rho, P, T)
e = rho * T / 216.7
h0, hw = self.slant_inclined_path_equivalent_height(f, P + e)
# Use the zenit water-vapour method if the values of V_t
# and h are provided
if V_t is not None and h is not None:
Aw = self.zenit_water_vapour_attenuation(None, None, None,
f, V_t, h)
else:
Aw = gammaw * hw
A0 = gamma0 * h0
return (A0 + Aw) / np.sin(np.deg2rad(el))
else:
delta_h = 0.0001 * \
np.exp((np.arange(0, 922)) / 100) # Eq. 21
h_n = 0.0001 * ((np.exp(np.arange(0, 922) / 100.0) -
1.0) / (np.exp(1.0 / 100.0) - 1.0))
T_n = standard_temperature(h_n).to(u.K).value
press_n = standard_pressure(h_n).value
rho_n = standard_water_vapour_density(h_n, rho_0=rho).value
e_n = rho_n * T_n / 216.7
n_n = radio_refractive_index(press_n, e_n, T_n).value
n_ratio = n_n / np.pad(n_n[1:], (0, 1), mode='edge')
r_n = 6371 + h_n
b = np.pi / 2 - np.deg2rad(el)
Agas = 0
for t, press, rho, r, delta, n_r in zip(
T_n, press_n, rho_n, r_n, delta_h, n_ratio):
a = - r * np.cos(b) + 0.5 * np.sqrt(
4 * r**2 * np.cos(b)**2 + 8 * r * delta + 4 * delta**2) # Eq. 17
a_cos_arg = np.clip((-a**2 - 2 * r * delta - delta**2) /
(2 * a * r + 2 * a * delta), -1, 1)
# Eq. 18
alpha = np.pi - np.arccos(a_cos_arg)
gamma = self.gamma_exact(f, press, rho, t)
Agas += a * gamma # Eq. 20
b = np.arcsin(np.sin(alpha) *
n_r) # Eq. 19
return Agas
@classmethod
def gaseous_attenuation_inclined_path(
self, f, el, rho, P, T, h1, h2, mode='approx'):
"""
"""
if h1 > 10 or h2 > 10:
raise ValueError(
'Both the transmitter and the receiver must be at'
'altitude of less than 10 km above the sea level.'
'Current altitude Tx: %.2f km, Rx: %.2f km' % (h1, h2))
if mode == 'approx':
rho = rho * np.exp(h1 / 2)
gamma0, gammaw = self.gaseous_attenuation_approximation(
f, el, rho, P, T)
else:
gamma0 = self.gamma0_exact(f, P, rho, T)
gammaw = 0
e = rho * T / 216.7
h0, hw = self.slant_inclined_path_equivalent_height(f, P + e)
if 5 < el and el < 90:
h0_p = h0 * (np.exp(-h1 / h0) - np.exp(-h2 / h0))
hw_p = hw * (np.exp(-h1 / hw) - np.exp(-h2 / hw))
return (gamma0 * h0_p + gammaw * hw_p) / np.sin(np.deg2rad(el))
else:
def F(x):
return 1 / (0.661 * x + 0.339 * np.sqrt(x**2 + 5.51))
el1 = el
Re = 8500 # TODO: change to ITU-R P 834
el2 = np.rad2deg(
np.arccos(((Re + h1) / (Re + h2)) * np.cos(np.deg2rad(el1))))
def xi(eli, hi):
return np.tan(np.deg2rad(eli)) * np.sqrt((Re + hi) / h0)
def xi_p(eli, hi):
return np.tan(np.deg2rad(eli)) * np.sqrt((Re + hi) / hw)
def eq_33(h_num, h_den, el, x):
return np.sqrt(Re + h_num) * F(x) * \
np.exp(-h_num / h_den) / np.cos(np.deg2rad(el))
A = gamma0 * np.sqrt(h0) * (eq_33(h1, h0, el1, xi(el1, h1)) -
eq_33(h2, h0, el2, xi(el2, h2))) +\
gammaw * np.sqrt(hw) * (eq_33(h1, hw, el1, xi_p(el1, h1)) -
eq_33(h2, hw, el2, xi_p(el2, h2)))
return A
@classmethod
def zenit_water_vapour_attenuation(
self, lat, lon, p, f, V_t=None, h=None):
f_ref = 20.6 # [GHz]
p_ref = 815 # [hPa]
if h is None:
h = topographic_altitude(lat, lon).value
if V_t is None:
V_t = total_water_vapour_content(lat, lon, p, h).value
rho_ref = V_t / 3.67
t_ref = 14 * np.log(0.22 * V_t / 3.67) + 3 # [Celsius]
a = (0.2048 * np.exp(- ((f - 22.43) / 3.097)**2) +
0.2326 * np.exp(- ((f - 183.5) / 4.096)**2) +
0.2073 * np.exp(- ((f - 325) / 3.651)**2) - 0.113)
b = 8.741e4 * np.exp(-0.587 * f) + 312.2 * f**(-2.38) + 0.723
h = np.minimum(h, 4)
gammaw_approx_vect = np.vectorize(self.gammaw_approx)
Aw_term1 = (0.0176 * V_t *
gammaw_approx_vect(f, p_ref, rho_ref, t_ref + 273.15) /
gammaw_approx_vect(f_ref, p_ref, rho_ref, t_ref + 273.15))
return np.where(f < 20, Aw_term1, Aw_term1 * (a * h ** b + 1))
class _ITU676_10_():
tmp = load_data(os.path.join(dataset_dir, '676/v10_lines_oxygen.txt'),
skip_header=1)
f_ox = tmp[:, 0]
a1 = tmp[:, 1]
a2 = tmp[:, 2]
a3 = tmp[:, 3]
a4 = tmp[:, 4]
a5 = tmp[:, 5]
a6 = tmp[:, 6]
tmp = load_data(os.path.join(dataset_dir,
'676//v10_lines_water_vapour.txt'),
skip_header=1)
f_wv = tmp[:, 0]
b1 = tmp[:, 1]
b2 = tmp[:, 2]
b3 = tmp[:, 3]
b4 = tmp[:, 4]
b5 = tmp[:, 5]
b6 = tmp[:, 6]
def __init__(self):
self.__version__ = 10
self.year = 2013
self.month = 9
self.link = 'https://www.itu.int/rec/R-REC-P.676-10-201309-S/en'
@classmethod
def gammaw_approx(self, f, P, rho, T):
rp = P / 1013
rt = 288 / (T)
eta1 = 0.955 * rp * rt**0.68 + 0.006 * rho
eta2 = 0.735 * rp * rt**0.50 + 0.0353 * rt**4 * rho
def g(f, fi): return 1 + ((f - fi) / (f + fi))**2
gammaw = (
(3.98 * eta1 * np.exp(2.23 * (1 - rt))) /
((f - 22.235) ** 2 + 9.42 * eta1 ** 2) * g(f, 22.0) +
(11.96 * eta1 * np.exp(0.70 * (1 - rt))) /
((f - 183.310) ** 2 + 11.14 * eta1 ** 2) +
(0.081 * eta1 * np.exp(6.44 * (1 - rt))) /
((f - 321.226) ** 2 + 6.29 * eta1 ** 2) +
(3.660 * eta1 * np.exp(1.60 * (1 - rt))) /
((f - 325.153) ** 2 + 9.22 * eta1 ** 2) +
(25.37 * eta1 * np.exp(1.09 * (1 - rt))) / ((f - 380.000) ** 2) +
(17.40 * eta1 * np.exp(1.46 * (1 - rt))) / ((f - 448.000) ** 2) +
(844.6 * eta1 * np.exp(0.17 * (1 - rt))) / ((f - 557.000) ** 2) *
g(f, 557.0) + (290.0 * eta1 * np.exp(0.41 * (1 - rt))) /
((f - 752.000) ** 2) * g(f, 752.0) +
(8.3328e4 * eta2 * np.exp(0.99 * (1 - rt))) /
((f - 1780.00) ** 2) *
g(f, 1780.0)) * f ** 2 * rt ** 2.5 * rho * 1e-4
return gammaw
@classmethod
def gamma0_approx(self, f, P, rho, T):
rp = P / 1013.0
rt = 288.0 / (T)
def phi(rp, rt, a, b, c, d): return (
rp**a * np.power(rt, b) * np.exp(c * (1 - rp) + d * (1 - rt)))
# Dry air attenuation (gamma0) computation as in Section 1 of Annex 2
# of [1]
delta = -0.00306 * phi(rp, rt, 3.211, -14.94, 1.583, -16.37)
xi1 = phi(rp, rt, 0.0717, -1.8132, 0.0156, -1.6515)
xi2 = phi(rp, rt, 0.5146, -4.6368, -0.1921, -5.7416)
xi3 = phi(rp, rt, 0.3414, -6.5851, 0.2130, -8.5854)
xi4 = phi(rp, rt, -0.0112, 0.0092, -0.1033, -0.0009)
xi5 = phi(rp, rt, 0.2705, -2.7192, -0.3016, -4.1033)
xi6 = phi(rp, rt, 0.2445, -5.9191, 0.0422, -8.0719)
xi7 = phi(rp, rt, -0.1833, 6.5589, -0.2402, 6.131)
gamma54 = 2.192 * phi(rp, rt, 1.8286, -1.9487, 0.4051, -2.8509)
gamma58 = 12.59 * phi(rp, rt, 1.0045, 3.5610, 0.1588, 1.2834)
gamma60 = 15.00 * phi(rp, rt, 0.9003, 4.1335, 0.0427, 1.6088)
gamma62 = 14.28 * phi(rp, rt, 0.9886, 3.4176, 0.1827, 1.3429)
gamma64 = 6.819 * phi(rp, rt, 1.4320, 0.6258, 0.3177, -0.5914)
gamma66 = 1.908 * phi(rp, rt, 2.0717, -4.1404, 0.4910, -4.8718)
def fcn_le_54():
return (((7.2 * rt**2.8) / (f**2 + 0.34 * rp**2 * rt**1.6) +
(0.62 * xi3) / ((54 - f)**(1.16 * xi1) + 0.83 * xi2)) *
f**2 * rp**2 * 1e-3)
def fcn_le_60():
return (np.exp(np.log(gamma54) / 24.0 * (f - 58) * (f - 60) -
np.log(gamma58) / 8.0 * (f - 54) * (f - 60) +
np.log(gamma60) / 12.0 * (f - 54) * (f - 58)))
def fcn_le_62():
return (gamma60 + (gamma62 - gamma60) * (f - 60) / 2.0)
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | true |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/test/itur_test.py | test/itur_test.py | # -*- coding: utf-8 -*-
import os
import sys
import warnings
import numpy as np
import unittest as test
import itur
import itur.models as models
from astropy import units as u
basepath = os.path.dirname(os.path.realpath(__file__))
test_data = os.path.join(basepath, 'test_data')
def suite():
""" A test suite for the ITU-P Recommendations. Recommendations tested:
"""
suite = test.TestSuite()
# Test valid versions
suite.addTest(TestVersions('test_change_to_not_implemented_versions'))
# For each version test all functions for vectorization and for
suite.addTest(TestFunctionsRecommendation453('test_453'))
suite.addTest(TestFunctionsRecommendation530('test_530'))
suite.addTest(TestFunctionsRecommendation618('test_618'))
suite.addTest(TestFunctionsRecommendation676('test_676'))
suite.addTest(TestFunctionsRecommendation835('test_835'))
suite.addTest(TestFunctionsRecommendation836('test_836'))
suite.addTest(TestFunctionsRecommendation837('test_837'))
suite.addTest(TestFunctionsRecommendation838('test_838'))
suite.addTest(TestFunctionsRecommendation839('test_839'))
suite.addTest(TestFunctionsRecommendation840('test_840'))
suite.addTest(TestFunctionsRecommendation1510('test_1510'))
suite.addTest(TestFunctionsRecommendation1511('test_1511'))
suite.addTest(TestFunctionsRecommendation1623('test_1623'))
suite.addTest(TestFunctionsRecommendation1853('test_1853'))
# Basic import module functionality
suite.addTest(TestImportModules('test_import_itur'))
suite.addTest(TestImportModules('test_import_itur_utils'))
suite.addTest(TestImportModules('test_import_itur_plotting'))
suite.addTest(TestImportModules('test_import_itur_models'))
suite.addTest(TestImportModules('test_import_itur_models_1853'))
suite.addTest(TestImportModules('test_import_itur_models_1623'))
suite.addTest(TestImportModules('test_import_itur_models_1511'))
suite.addTest(TestImportModules('test_import_itur_models_1510'))
suite.addTest(TestImportModules('test_import_itur_models_453'))
suite.addTest(TestImportModules('test_import_itur_models_530'))
suite.addTest(TestImportModules('test_import_itur_models_618'))
suite.addTest(TestImportModules('test_import_itur_models_676'))
suite.addTest(TestImportModules('test_import_itur_models_835'))
suite.addTest(TestImportModules('test_import_itur_models_836'))
suite.addTest(TestImportModules('test_import_itur_models_837'))
suite.addTest(TestImportModules('test_import_itur_models_838'))
suite.addTest(TestImportModules('test_import_itur_models_839'))
suite.addTest(TestImportModules('test_import_itur_models_840'))
# Test version of itur
suite.addTest(TestImportModules('test_version'))
# Test slant_path_attenuation calls
suite.addTest(TestIturMainFunctions('test_slant_path_attenuation'))
suite.addTest(TestIturMainFunctions('test_slant_path_attenuation_p_below'))
suite.addTest(TestIturMainFunctions('test_slant_path_attenuation_p_above'))
suite.addTest(TestIturMainFunctions(
'test_slant_path_attenuation_without_rain'))
suite.addTest(TestIturMainFunctions(
'test_slant_path_attenuation_without_gas'))
suite.addTest(TestIturMainFunctions(
'test_slant_path_attenuation_without_clouds'))
suite.addTest(TestIturMainFunctions(
'test_slant_path_attenuation_without_scintillation'))
# Test utils library
suite.addTest(TestIturUtils('test_read_file_npz'))
suite.addTest(TestIturUtils('test_read_file_npy'))
suite.addTest(TestIturUtils('test_read_file_txt'))
suite.addTest(TestIturUtils('test_read_file_txt_as_txt'))
suite.addTest(TestIturUtils('test_distance_wsg84'))
suite.addTest(TestIturUtils('test_distance_haversine'))
suite.addTest(TestIturUtils('test_prepare_quantity'))
suite.addTest(TestIturUtils('test_prepare_output_array'))
suite.addTest(TestIturUtils('test_regular_lat_lon_grid'))
return suite
class TestVersions(test.TestCase):
def test_change_to_not_implemented_versions(self):
for i in range(1, 12):
self.assertRaises(ValueError,
models.itu453.change_version, i)
for i in range(1, 16):
self.assertRaises(ValueError,
models.itu530.change_version, i)
for i in range(1, 12):
self.assertRaises(ValueError,
models.itu618.change_version, i)
for i in range(1, 2):
self.assertRaises(ValueError,
models.itu839.change_version, i)
for i in range(1, 9):
self.assertRaises(ValueError,
models.itu676.change_version, i)
for i in range(1, 5):
self.assertRaises(ValueError,
models.itu835.change_version, i)
for i in range(1, 12):
self.assertRaises(ValueError,
models.itu453.change_version, i)
for i in range(1, 5):
self.assertRaises(ValueError,
models.itu835.change_version, i)
for i in range(1, 4):
self.assertRaises(ValueError,
models.itu836.change_version, i)
for i in range(1, 6):
self.assertRaises(ValueError,
models.itu837.change_version, i)
for i in range(1, 4):
self.assertRaises(ValueError,
models.itu840.change_version, i)
class TestImportModules(test.TestCase):
""" Tests that all submodules are imporable.
"""
@staticmethod
def test_version():
import itur as itu
print(itu.__version__)
@staticmethod
def test_import_itur():
import itur
@staticmethod
def test_import_itur_utils():
import itur.utils
@staticmethod
def test_import_itur_plotting():
import itur.plotting
@staticmethod
def test_import_itur_models():
import itur.models
@staticmethod
def test_import_itur_models_1510():
import itur.models.itu1510
@staticmethod
def test_import_itur_models_1511():
import itur.models.itu1511
@staticmethod
def test_import_itur_models_1623():
import itur.models.itu1853
@staticmethod
def test_import_itur_models_1853():
import itur.models.itu1853
@staticmethod
def test_import_itur_models_453():
import itur.models.itu453
@staticmethod
def test_import_itur_models_530():
import itur.models.itu530
@staticmethod
def test_import_itur_models_618():
import itur.models.itu618
@staticmethod
def test_import_itur_models_676():
import itur.models.itu676
@staticmethod
def test_import_itur_models_835():
import itur.models.itu835
@staticmethod
def test_import_itur_models_836():
import itur.models.itu836
@staticmethod
def test_import_itur_models_837():
import itur.models.itu837
@staticmethod
def test_import_itur_models_838():
import itur.models.itu838
@staticmethod
def test_import_itur_models_839():
import itur.models.itu839
@staticmethod
def test_import_itur_models_840():
import itur.models.itu840
class TestIturMainFunctions(test.TestCase):
def setUp(self):
self.lat = 0
self.lon = 0
self.f = 22 * u.GHz
self.el = 45
self.p = 0.01
self.D = 1
def test_slant_path_attenuation(self):
itur.atmospheric_attenuation_slant_path(
lat=self.lat, lon=self.lon, f=self.f, el=self.el, p=self.p,
D=self.D)
@test.skipIf(sys.version_info[0] < 3, "Only supported in Python 3+")
def test_slant_path_attenuation_p_below(self):
with self.assertWarns(RuntimeWarning):
itur.atmospheric_attenuation_slant_path(
lat=self.lat, lon=self.lon, f=self.f, el=self.el, p=1e-4,
D=self.D)
@test.skipIf(sys.version_info[0] < 3, "Only supported in Python 3+")
def test_slant_path_attenuation_p_above(self):
with self.assertWarns(RuntimeWarning):
itur.atmospheric_attenuation_slant_path(
lat=self.lat, lon=self.lon, f=self.f, el=self.el, p=90,
D=self.D)
def test_slant_path_attenuation_without_rain(self):
itur.atmospheric_attenuation_slant_path(
lat=self.lat, lon=self.lon, f=self.f, el=self.el, p=self.p,
D=self.D, include_rain=False)
def test_slant_path_attenuation_without_clouds(self):
itur.atmospheric_attenuation_slant_path(
lat=self.lat, lon=self.lon, f=self.f, el=self.el, p=self.p,
D=self.D, include_clouds=False)
def test_slant_path_attenuation_without_gas(self):
itur.atmospheric_attenuation_slant_path(
lat=self.lat, lon=self.lon, f=self.f, el=self.el, p=self.p,
D=self.D, include_gas=False)
def test_slant_path_attenuation_without_scintillation(self):
itur.atmospheric_attenuation_slant_path(
lat=self.lat, lon=self.lon, f=self.f, el=self.el, p=self.p,
D=self.D, include_scintillation=False)
class TestIturUtils(test.TestCase):
def test_read_file_npy(self):
path = os.path.join(test_data, 'v3_esalat.npy')
itur.utils.load_data(path)
def test_read_file_npz(self):
path = os.path.join(test_data, 'v3_esalat.npz')
itur.utils.load_data(path)
def test_read_file_txt(self):
path = os.path.join(test_data, 'v12_lines_oxygen.txt')
itur.utils.load_data(path, skip_header=1)
def test_read_file_txt_as_txt(self):
path = os.path.join(test_data, 'v12_lines_oxygen.txt')
itur.utils.load_data(path, is_text=True)
def test_distance_haversine(self):
val = itur.utils.compute_distance_earth_to_earth_haversine(
lat_p=0, lon_p=0, lat_grid=10, lon_grid=10)
np.testing.assert_allclose(val, 1568.5205567985759)
val = itur.utils.compute_distance_earth_to_earth_haversine(
lat_p=0, lon_p=0, lat_grid=np.array([10, 20]),
lon_grid=np.array([10, 20]))
np.testing.assert_allclose(val, np.array([1568.5205567985759,
3112.445040079722]))
val = itur.utils.compute_distance_earth_to_earth_haversine(
lat_p=0, lon_p=0, lat_grid=np.array([[10], [20]]),
lon_grid=np.array([[10], [20]]))
np.testing.assert_allclose(val, np.array([[1568.5205567985759],
[3112.445040079722]]))
def test_distance_wsg84(self):
val = itur.utils.compute_distance_earth_to_earth(
lat_p=0, lon_p=0, lat_grid=10, lon_grid=10,
method='WGS84')
self.assertAlmostEqual(val, 1565.10909921789)
val = itur.utils.compute_distance_earth_to_earth(
lat_p=0, lon_p=0, lat_grid=np.array([10, 20]),
lon_grid=np.array([10, 20]),
method='WGS84')
np.testing.assert_allclose(val, np.array([1565.10909922,
3106.12677679]))
val = itur.utils.compute_distance_earth_to_earth(
lat_p=0, lon_p=0, lat_grid=np.array([[10], [20]]),
lon_grid=np.array([[10], [20]]),
method='WGS84')
np.testing.assert_allclose(val, np.array([[1565.10909922],
[3106.12677679]]))
def test_prepare_quantity(self):
# Test temperature conversion
val = itur.utils.prepare_quantity((273.15, 373.15) * itur.u.K,
units=itur.u.Celsius)
np.testing.assert_array_equal(val, np.array([0, 100]))
# Test individual numbers
val = itur.utils.prepare_quantity(1, units=itur.u.m)
self.assertEqual(val, 1)
val = itur.utils.prepare_quantity(1 * itur.u.km, units=itur.u.m)
self.assertEqual(val, 1000)
val = itur.utils.prepare_quantity(None, units=itur.u.m)
self.assertEqual(val, None)
# Test tuples of values
val = itur.utils.prepare_quantity((1, 2), units=itur.u.m)
np.testing.assert_array_equal(val, np.array([1, 2]))
val = itur.utils.prepare_quantity((1, 2) * itur.u.km, units=itur.u.m)
np.testing.assert_array_equal(val, np.array([1000, 2000]))
# Test numpy arrays
val = itur.utils.prepare_quantity(np.array([1, 2]), units=itur.u.m)
np.testing.assert_array_equal(val, np.array([1, 2]))
val = itur.utils.prepare_quantity(np.array([1, 2]) * itur.u.km,
units=itur.u.m)
np.testing.assert_array_equal(val, np.array([1000, 2000]))
# Test lists of values
val = itur.utils.prepare_quantity([1, 2], units=itur.u.m)
np.testing.assert_array_equal(val, np.array([1, 2]))
val = itur.utils.prepare_quantity([1, 2] * itur.u.km, units=itur.u.m)
np.testing.assert_array_equal(val, np.array([1000, 2000]))
# Check that invalid formats raise an exception
with self.assertRaises(ValueError):
itur.utils.prepare_quantity({}, units=itur.u.m)
def test_prepare_output_array(self):
out_array = np.array([[1, 2], [3, 4]])
# Check values without units
val = itur.utils.prepare_output_array(out_array,
type_input=list)
self.assertEqual(val, out_array.tolist())
val = itur.utils.prepare_output_array(out_array,
type_input=np.ndarray)
np.testing.assert_array_equal(val, out_array)
val = itur.utils.prepare_output_array(5, type_input=float)
self.assertEqual(val, 5.0)
val = itur.utils.prepare_output_array([5, 10], type_input=list)
self.assertEqual(val, [5, 10])
# Check values with units
val = itur.utils.prepare_output_array(5 * itur.u.m, type_input=float)
self.assertEqual(val, 5.0 * itur.u.m)
val = itur.utils.prepare_output_array([5, 10] * itur.u.m,
type_input=list)
np.testing.assert_array_equal(val, [5, 10] * itur.u.m)
val = itur.utils.prepare_output_array(out_array * itur.u.m,
type_input=list)
np.testing.assert_array_equal(val, out_array.tolist() * itur.u.m)
val = itur.utils.prepare_output_array(out_array * itur.u.m,
type_input=np.ndarray)
np.testing.assert_array_equal(val, out_array.tolist() * itur.u.m)
def test_regular_lat_lon_grid(self):
itur.utils.regular_lat_lon_grid(lon_start_0=True)
itur.utils.regular_lat_lon_grid(lon_start_0=False)
class TestFunctionsRecommendation453(test.TestCase):
def setUp(self):
self.versions = [12, 13]
@staticmethod
def test_all_functions_453():
T = 15 * itur.u.deg_C
e = (T.value * 7.5 / 216.7) * u.hPa
Pd = 1013.15 * itur.u.hPa
P = 1013.15 * itur.u.hPa
H = 60 * itur.u.percent
lat = 51
lon = -53
p = 0.51
p_exact = 0.5
models.itu453.wet_term_radio_refractivity(e, T)
models.itu453.wet_term_radio_refractivity([e, e], [T, T])
models.itu453.wet_term_radio_refractivity(e, [T, T])
models.itu453.radio_refractive_index(P, e, T)
models.itu453.radio_refractive_index([P, P], [e, e], [T, T])
models.itu453.radio_refractive_index([P, P], [e, e], T)
models.itu453.radio_refractive_index([P, P], e, [T, T])
models.itu453.radio_refractive_index(P, [e, e], [T, T])
models.itu453.dry_term_radio_refractivity(Pd, T)
models.itu453.dry_term_radio_refractivity([Pd, Pd], [T, T])
models.itu453.dry_term_radio_refractivity([Pd, Pd], [T, T])
models.itu453.water_vapour_pressure(T, P, H, type_hydrometeor='water')
models.itu453.water_vapour_pressure(T, P, H, type_hydrometeor='ice')
models.itu453.water_vapour_pressure([T, T], [P, P], [H, H],
type_hydrometeor='water')
models.itu453.water_vapour_pressure([T, T], [P, P], [H, H],
type_hydrometeor='ice')
models.itu453.water_vapour_pressure([T, T], P, [H, H],
type_hydrometeor='water')
models.itu453.water_vapour_pressure([T, T], P, [H, H],
type_hydrometeor='ice')
models.itu453.water_vapour_pressure([T, T], [P, P], H,
type_hydrometeor='water')
models.itu453.water_vapour_pressure([T, T], [P, P], H,
type_hydrometeor='ice')
models.itu453.saturation_vapour_pressure(T, P,
type_hydrometeor='water')
models.itu453.saturation_vapour_pressure(T, P,
type_hydrometeor='ice')
models.itu453.saturation_vapour_pressure([T, T], [P, P],
type_hydrometeor='water')
models.itu453.saturation_vapour_pressure([T, T], [P, P],
type_hydrometeor='ice')
models.itu453.saturation_vapour_pressure([T, T], P,
type_hydrometeor='water')
models.itu453.saturation_vapour_pressure([T, T], P,
type_hydrometeor='ice')
models.itu453.saturation_vapour_pressure(T, [P, P],
type_hydrometeor='water')
models.itu453.saturation_vapour_pressure(T, [P, P],
type_hydrometeor='ice')
models.itu453.map_wet_term_radio_refractivity(lat, lon, p).value
models.itu453.map_wet_term_radio_refractivity(
[lat, lat], [lon, lon], [p, p])
models.itu453.map_wet_term_radio_refractivity(lat, lon, [p, p])
models.itu453.DN65(lat, lon, p_exact)
models.itu453.DN65([lat, lat], [lon, lon], [p_exact, p_exact])
models.itu453.DN65(lat, lon, [p_exact, p_exact])
models.itu453.DN1(lat, lon, p_exact)
models.itu453.DN1([lat, lat], [lon, lon], [p_exact, p_exact])
models.itu453.DN1(lat, lon, [p_exact, p_exact])
def test_453(self):
for version in self.versions:
models.itu453.change_version(version)
self.test_all_functions_453()
self.assertEqual(models.itu453.get_version(), version)
class TestFunctionsRecommendation530(test.TestCase):
def setUp(self):
self.versions = [16, 17]
@staticmethod
def test_all_functions_530():
d1 = d2 = d = 10 * itur.u.km
f = 29 * itur.u.GHz
h = h_e = h_r = 100 * itur.u.m
A = Ap = 10 * itur.u.dB
el = 45
XPD_g = 20 * itur.u.dB
C0_I = 20 * itur.u.dB
lat = 51
lon = -53
p = 0.05
models.itu530.fresnel_ellipse_radius(d1, d2, f)
models.itu530.diffraction_loss(d1, d2, h, f)
models.itu530.multipath_loss_for_A(lat, lon, h_e, h_r, d, f, A)
models.itu530.multipath_loss(lat, lon, h_e, h_r, d, f, A)
models.itu530.rain_attenuation(lat, lon, d, f, el, p)
models.itu530.inverse_rain_attenuation(lat, lon, d, f, el, Ap)
models.itu530.rain_event_count(lat, lon, d, f, el, A)
models.itu530.XPD_outage_clear_air(
lat, lon, h_e, h_r, d, f, XPD_g, C0_I)
models.itu530.XPD_outage_precipitation(lat, lon, d, f, el, C0_I)
def test_530(self):
for version in self.versions:
models.itu530.change_version(version)
self.test_all_functions_530()
self.assertEqual(models.itu530.get_version(), version)
class TestFunctionsRecommendation618(test.TestCase):
def setUp(self):
self.versions = [12, 13]
@staticmethod
def test_all_functions_618():
f = 29 * itur.u.GHz
el = 31
tau = 45
Ls = 3 * itur.u.km
hs = 0.05 * itur.u.km
D = 1.2 * itur.u.m
R001 = 34 * itur.u.mm / itur.u.hr
lat = 51
lon = -53
p = 0.51
a1 = 10
a2 = 12
lat1 = 51
lon1 = -53
lat2 = 52
lon2 = -53
el1 = 30
el2 = 54
Ap = 10
P_k = P0 = 0.5
models.itu618.rain_attenuation(lat, lon, f, el, p=p)
models.itu618.rain_attenuation([lat, lat], [lon, lon], f, el, p=p)
models.itu618.rain_attenuation([lat, lat], [lon, lon], [f, f],
[el, el], p=p)
models.itu618.rain_attenuation([lat, lat], [lon, lon], [f, f],
[el, el], p=[p, p])
models.itu618.rain_attenuation(lat, lon, f, el, hs=hs, p=p, R001=R001,
tau=tau, Ls=Ls)
models.itu618.rain_attenuation([lat, lat], [lon, lon], f, el, hs=hs,
p=p, R001=R001, tau=tau, Ls=Ls)
models.itu618.rain_attenuation([lat, lat], [lon, lon], [f, f],
[el, el], hs=hs, p=p, R001=R001,
tau=tau, Ls=Ls)
models.itu618.rain_attenuation([lat, lat], [lon, lon], [f, f],
[el, el], hs=[hs, hs], p=[p, p],
R001=[R001, R001], Ls=[Ls, Ls],
tau=[tau, tau])
models.itu618.rain_attenuation_probability(lat, lon, el)
models.itu618.rain_attenuation_probability([lat, lat], [lon, lon], el)
models.itu618.rain_attenuation_probability(
[lat, lat], [lon, lon], [el, el])
models.itu618.rain_attenuation_probability(
lat, lon, el, hs=hs, Ls=Ls, P0=P0)
models.itu618.rain_attenuation_probability([lat, lat], [lon, lon], el,
hs=hs, Ls=Ls, P0=P0)
models.itu618.rain_attenuation_probability(
[lat, lat], [lon, lon], [el, el], hs=[hs, hs], Ls=[Ls, Ls],
P0=[P0, P0])
models.itu618.site_diversity_rain_outage_probability(
lat1, lon1, a1, el1, lat2, lon2, a2, el2, f,
tau=45, hs1=None, hs2=None)
models.itu618.scintillation_attenuation(lat, lon, f, el, p, D)
models.itu618.scintillation_attenuation([lat, lat], [lon, lon], [f, f],
[el, el], p, D)
models.itu618.scintillation_attenuation([lat, lat], [lon, lon], [f, f],
[el, el], [p, p], [D, D])
models.itu618.rain_cross_polarization_discrimination(
Ap, f, el, p, tau=45)
models.itu618.rain_cross_polarization_discrimination(
[Ap, Ap], [f, f], [el, el], [p, p], tau=45)
models.itu618.rain_cross_polarization_discrimination(
[Ap, Ap], [f, f], [el, el], [p, p], tau=tau)
models.itu618.rain_cross_polarization_discrimination(
[Ap, Ap], [f, f], [el, el], [p, p], tau=[tau, tau])
models.itu618.fit_rain_attenuation_to_lognormal(
lat, lon, f, el, hs, P_k, tau)
models.itu618.fit_rain_attenuation_to_lognormal(
[lat, lat], [lon, lon], [f, f], [el, el], hs, P_k, tau)
models.itu618.fit_rain_attenuation_to_lognormal(
[lat, lat], [lon, lon], [f, f], [el, el], [hs, hs], [P_k, P_k],
[tau, tau])
def test_618(self):
for version in self.versions:
models.itu618.change_version(version)
self.test_all_functions_618()
self.assertEqual(models.itu618.get_version(), version)
class TestFunctionsRecommendation676(test.TestCase):
def setUp(self):
self.versions = [9, 10, 11, 12]
@staticmethod
def test_all_functions_676():
r = 5 * itur.u.km
f = 29 * itur.u.GHz
el = 71
el_low = 4
rho = 7.5
P = 1013 * itur.u.hPa
T = 15 * itur.u.deg_C
V_t = 20 * itur.u.kg / itur.u.m**2
h = 0.05 * itur.u.km
h1 = 0.05 * itur.u.km
h2 = 0.15 * itur.u.km
lat = 51
lon = -53
p = 0.51
models.itu676.gaseous_attenuation_terrestrial_path(
r, f, el, rho, P, T, 'approx')
models.itu676.gaseous_attenuation_terrestrial_path(
r, f, el, rho, P, T, 'exact')
models.itu676.gaseous_attenuation_terrestrial_path(
r, [f, f], [el, el], [rho, rho], [P, P], [T, T], 'approx')
models.itu676.gaseous_attenuation_terrestrial_path(
r, [f, f], [el, el], [rho, rho], [P, P], [T, T], 'exact')
models.itu676.gaseous_attenuation_inclined_path(
f, el, rho, P, T, h1=h1, h2=h2, mode='approx')
models.itu676.gaseous_attenuation_inclined_path(
f, el, rho, P, T, h1=h1, h2=h2, mode='exact')
models.itu676.gaseous_attenuation_inclined_path(
[f, f], [el, el], [rho, rho], [P, P], [T, T],
h1=h1, h2=h2, mode='approx')
models.itu676.gaseous_attenuation_inclined_path(
[f, f], [el, el], [rho, rho], [P, P], [T, T],
h1=h1, h2=h2, mode='exact')
models.itu676.gaseous_attenuation_inclined_path(
f, el, rho, P, T, h1=h1, h2=h2, mode='approx')
models.itu676.gaseous_attenuation_inclined_path(
f, el, rho, P, T, h1=h1, h2=h2, mode='exact')
with warnings.catch_warnings(record=True) as w:
models.itu676.gaseous_attenuation_inclined_path(
f, el_low, rho, P, T, h1=h1, h2=h2, mode='approx')
models.itu676.gaseous_attenuation_inclined_path(
f, el_low, rho, P, T, h1=h1, h2=h2, mode='exact')
models.itu676.gaseous_attenuation_inclined_path(
[f, f], [el_low, el_low], [rho, rho], [P, P], [T, T],
h1=h1, h2=h2, mode='approx')
models.itu676.gaseous_attenuation_inclined_path(
[f, f], [el_low, el_low], [rho, rho], [P, P], [T, T],
h1=h1, h2=h2, mode='exact')
models.itu676.gaseous_attenuation_inclined_path(
f, el_low, rho, P, T, h1=h1, h2=h2, mode='approx')
models.itu676.gaseous_attenuation_inclined_path(
f, el_low, rho, P, T, h1=h1, h2=h2, mode='exact')
models.itu676.gaseous_attenuation_slant_path(
f, el, rho, P, T, V_t=None, h=None, mode='approx')
models.itu676.gaseous_attenuation_slant_path(
f, el, rho, P, T, V_t=None, h=None, mode='exact')
models.itu676.gaseous_attenuation_slant_path(
[f, f], [el, el], [rho, rho], [P, P], [T, T],
V_t=None, h=None, mode='approx')
models.itu676.gaseous_attenuation_slant_path(
[f, f], [el, el], [rho, rho], [P, P], [T, T],
V_t=None, h=None, mode='exact')
models.itu676.gaseous_attenuation_slant_path(
f, el, rho, P, T, V_t=V_t, h=h, mode='approx')
models.itu676.gaseous_attenuation_slant_path(
f, el, rho, P, T, V_t=V_t, h=h, mode='exact')
models.itu676.gaseous_attenuation_slant_path(
[f, f], [el, el], [rho, rho], [P, P], [T, T],
V_t=[V_t, V_t], h=[h, h], mode='approx')
models.itu676.gaseous_attenuation_slant_path(
[f, f], [el, el], [rho, rho], [P, P], [T, T],
V_t=[V_t, V_t], h=[h, h], mode='exact')
models.itu676.zenit_water_vapour_attenuation(
lat, lon, p, f, V_t=None, h=None)
models.itu676.zenit_water_vapour_attenuation(
[lat, lat], [lon, lon], [p, p], [f, f], V_t=None, h=None)
models.itu676.zenit_water_vapour_attenuation(
lat, lon, p, f, V_t=V_t, h=h)
models.itu676.zenit_water_vapour_attenuation(
[lat, lat], [lon, lon], [p, p], [f, f], V_t=[V_t, V_t], h=[h, h])
models.itu676.gammaw_approx(f, P, rho, T)
models.itu676.gammaw_approx([f, f], [P, P], [rho, rho], [T, T])
models.itu676.gamma0_approx(f, P, rho, T)
models.itu676.gamma0_approx([f, f], [P, P], [rho, rho], [T, T])
if models.itu676.get_version() == 11:
models.itu676.gamma0_exact(f, P, rho, T)
models.itu676.gamma0_exact([f, f], [P, P], [rho, rho], [T, T])
models.itu676.gammaw_exact(f, P, rho, T)
models.itu676.gammaw_exact([f, f], [P, P], [rho, rho], [T, T])
def test_676(self):
for version in self.versions:
models.itu676.change_version(version)
self.test_all_functions_676()
self.assertEqual(models.itu676.get_version(), version)
class TestFunctionsRecommendation835(test.TestCase):
def setUp(self):
self.versions = [5, 6]
@staticmethod
def test_all_functions_835():
T_0 = 15 * itur.u.deg_C
h_0 = 2 * itur.u.km
P_0 = 1013.25 * itur.u.hPa
rho_0 = 7.5 * itur.u.g / itur.u.m**3
h = 0.05 * itur.u.km
lat = 51
models.itu835.standard_water_vapour_pressure(h, h_0, rho_0)
models.itu835.standard_water_vapour_density([h, h], h_0, rho_0)
models.itu835.standard_pressure(h, T_0, P_0)
models.itu835.standard_pressure([h, h], T_0, P_0)
models.itu835.standard_temperature(h, T_0)
models.itu835.standard_temperature([h, h], T_0)
models.itu835.water_vapour_density(lat, h, season='summer')
models.itu835.water_vapour_density(lat, h, season='winter')
models.itu835.water_vapour_density([lat, lat], [h, h], season='summer')
models.itu835.water_vapour_density([lat, lat], [h, h], season='winter')
models.itu835.pressure(lat, h, season='summer')
models.itu835.pressure(lat, h, season='winter')
models.itu835.pressure([lat, lat], [h, h], season='summer')
models.itu835.pressure([lat, lat], [h, h], season='winter')
models.itu835.temperature(lat, h, season='summer')
models.itu835.temperature(lat, h, season='winter')
models.itu835.temperature([lat, lat], [h, h], season='summer')
models.itu835.temperature([lat, lat], [h, h], season='winter')
def test_835(self):
for version in self.versions:
models.itu835.change_version(version)
self.test_all_functions_835()
self.assertEqual(models.itu835.get_version(), version)
class TestFunctionsRecommendation836(test.TestCase):
def setUp(self):
self.versions = [4, 5, 6]
@staticmethod
def test_all_functions_836():
lat = 51
lon = -63
p = 0.51
alt = 0.5
models.itu836.surface_water_vapour_density(lat, lon, p)
models.itu836.surface_water_vapour_density(
[lat, lat], [lon, lon], p)
models.itu836.surface_water_vapour_density(
[lat, lat], [lon, lon], [p, p])
models.itu836.surface_water_vapour_density(lat, lon, p, alt)
models.itu836.surface_water_vapour_density(
[lat, lat], [lon, lon], p, [alt, alt])
models.itu836.surface_water_vapour_density(
[lat, lat], [lon, lon], [p, p], [alt, alt])
models.itu836.total_water_vapour_content(lat, lon, p)
models.itu836.total_water_vapour_content(
[lat, lat], [lon, lon], p)
models.itu836.total_water_vapour_content(
[lat, lat], [lon, lon], [p, p])
models.itu836.total_water_vapour_content(lat, lon, p, alt)
models.itu836.total_water_vapour_content(
[lat, lat], [lon, lon], p, [alt, alt])
models.itu836.total_water_vapour_content(
[lat, lat], [lon, lon], [p, p], [alt, alt])
def test_836(self):
for version in self.versions:
models.itu836.change_version(version)
self.test_all_functions_836()
self.assertEqual(models.itu836.get_version(), version)
class TestFunctionsRecommendation837(test.TestCase):
def setUp(self):
self.versions = [6, 7]
@staticmethod
def test_all_functions_837():
lat = 51
lon = -63
p = 0.51
R = 10
models.itu837.rainfall_probability(lat, lon)
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | true |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/test/examples_test.py | test/examples_test.py | # -*- coding: utf-8 -*-
import matplotlib
matplotlib.use('agg')
import itur.models.itu835 as itu835
import itur.models.itu676 as itu676
import itur
import unittest as test
import numpy as np
import sys
import matplotlib.pyplot as plt
def suite():
"""A test suite for the examples includes in itur. """
suite = test.TestSuite()
# Test valid versions
suite.addTest(TestMapAfrica('test_map_africa'))
suite.addTest(TestGaseousAttenuation('test_gaseous_attenuation'))
suite.addTest(TestMultipleLocations('test_multiple_locations'))
suite.addTest(TestSingleLocation('test_single_location'))
suite.addTest(TestSingleLocationVsFrequency('test_single_location_vs_f'))
suite.addTest(TestSingleLocationVsUnavailability(
'test_single_location_vs_p'))
return suite
class TestMapAfrica(test.TestCase):
@staticmethod
def test_map_africa():
# Generate a regular grid of latitude and longitudes with 0.1
# degree resolution for the region of interest.
lat, lon = itur.utils.regular_lat_lon_grid(lat_max=60,
lat_min=-60,
lon_max=65,
lon_min=-35,
resolution_lon=1,
resolution_lat=1)
# Satellite coordinates (GEO, 4 E)
lat_sat = 0
lon_sat = 4
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground stations
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat, lat, lon)
# Set the link parameters
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
p = 0.1 # Unavailability (Vals exceeded 0.1% of time)
# Compute the atmospheric attenuation
Att = itur.atmospheric_attenuation_slant_path(lat, lon, f, el, p, D)
# Now we show the surface mean temperature distribution
T = itur.surface_mean_temperature(lat, lon)\
.to(itur.u.Celsius, equivalencies=itur.u.temperature())
# Plot the results
try:
m = itur.plotting.plot_in_map(Att.value, lat, lon,
cbar_text='Atmospheric attenuation [dB]',
cmap='magma')
# Plot the satellite location
m.scatter(lon_sat, lat_sat, c='white', s=20)
m = itur.plotting.plot_in_map(
T.value, lat, lon, cbar_text='Surface mean temperature [C]',
cmap='RdBu_r')
except RuntimeError as e:
print(e)
class TestMultipleLocations(test.TestCase):
@staticmethod
def test_multiple_locations():
# Obtain the coordinates of the different cities
cities = {'Boston': (42.36, -71.06),
'New York': (40.71, -74.01),
'Los Angeles': (34.05, -118.24),
'Denver': (39.74, -104.99),
'Las Vegas': (36.20, -115.14),
'Seattle': (47.61, -122.33),
'Washington DC': (38.91, -77.04)}
lat = [coords[0] for coords in cities.values()]
lon = [coords[1] for coords in cities.values()]
# Satellite coordinates (GEO, 4 E)
lat_sat = 0
lon_sat = -77
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground stations
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat, lat, lon)
# Set the link parameters
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
p = 0.1 # Unavailability (Vals exceeded 0.1% of time)
# Compute the atmospheric attenuation
Ag, Ac, Ar, As, Att = itur.atmospheric_attenuation_slant_path(
lat, lon, f, el, p, D, return_contributions=True)
# Plot the results
city_idx = np.arange(len(cities))
width = 0.15
fig, ax = plt.subplots(1, 1)
ax.bar(city_idx, Att.value, 0.6, label='Total atmospheric Attenuation')
ax.bar(city_idx - 1.5 * width, Ar.value, width,
label='Rain attenuation')
ax.bar(city_idx - 0.5 * width, Ag.value, width,
label='Gaseous attenuation')
ax.bar(city_idx + 0.5 * width, Ac.value, width,
label='Clouds attenuation')
ax.bar(city_idx + 1.5 * width, As.value, width,
label='Scintillation attenuation')
# Set the labels
ticks = ax.set_xticklabels([''] + list(cities.keys()))
for t in ticks:
t.set_rotation(45)
ax.set_ylabel('Atmospheric attenuation exceeded for 0.1% [dB]')
# Format image
ax.yaxis.grid(which='both', linestyle=':')
ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.3), ncol=2)
plt.tight_layout(rect=(0, 0, 1, 0.85))
class TestSingleLocation(test.TestCase):
@staticmethod
def test_single_location():
# Location of the receiver ground stations
lat = 41.39
lon = -71.05
# Link parameters
el = 60 # Elevation angle equal to 60 degrees
f = 22.5 * itur.u.GHz # Frequency equal to 22.5 GHz
D = 1 * itur.u.m # Receiver antenna diameter of 1 m
p = 0.1 # We compute values exceeded during 0.1 % of
# the average year
# Compute atmospheric parameters
hs = itur.topographic_altitude(lat, lon)
T = itur.surface_mean_temperature(lat, lon)
P = itur.models.itu835.pressure(lat, hs)
rho_p = itur.surface_water_vapour_density(lat, lon, p, hs)
itur.models.itu835.water_vapour_density(lat, hs)
itur.models.itu835.temperature(lat, hs)
itur.models.itu836.total_water_vapour_content(lat, lon, p, hs)
# Compute rain and cloud-related parameters
itur.models.itu618.rain_attenuation_probability(
lat, lon, el, hs)
itur.models.itu837.rainfall_probability(lat, lon)
itur.models.itu837.rainfall_rate(lat, lon, p)
itur.models.itu839.isoterm_0(lat, lon)
itur.models.itu839.rain_height(lat, lon)
itur.models.itu840.columnar_content_reduced_liquid(
lat, lon, p)
itur.models.itu676.zenit_water_vapour_attenuation(
lat, lon, p, f, h=hs)
# Compute attenuation values
itur.gaseous_attenuation_slant_path(f, el, rho_p, P, T)
itur.rain_attenuation(lat, lon, f, el, hs=hs, p=p)
itur.cloud_attenuation(lat, lon, el, f, p)
itur.scintillation_attenuation(lat, lon, f, el, p, D)
itur.atmospheric_attenuation_slant_path(lat, lon, f, el, p, D)
class TestSingleLocationVsFrequency(test.TestCase):
@staticmethod
def test_single_location_vs_f():
# Ground station coordinates (Boston)
lat_GS = 42.3601
lon_GS = -71.0942
################################################
# First case: Attenuation vs. frequency #
################################################
# Satellite coordinates (GEO, 77 W)
lat_sat = 0
lon_sat = -77
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground station
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat,
lat_GS, lon_GS)
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
p = 1
f = np.logspace(-0.2, 2, 100) * itur.u.GHz
Ag, Ac, Ar, As, A =\
itur.atmospheric_attenuation_slant_path(lat_GS, lon_GS, f,
el, p, D,
return_contributions=True)
# Plot the results
fig, ax = plt.subplots(1, 1)
ax.loglog(f, Ag, label='Gaseous attenuation')
ax.loglog(f, Ac, label='Cloud attenuation')
ax.loglog(f, Ar, label='Rain attenuation')
ax.loglog(f, As, label='Scintillation attenuation')
ax.loglog(f, A, label='Total atmospheric attenuation')
ax.set_xlabel('Frequency [GHz]')
ax.set_ylabel('Atmospheric attenuation [dB]')
ax.grid(which='both', linestyle=':')
plt.legend()
################################################
# Second case: Attenuation vs. elevation angle #
################################################
f = 22.5 * itur.u.GHz
el = np.linspace(5, 90, 100)
Ag, Ac, Ar, As, A =\
itur.atmospheric_attenuation_slant_path(lat_GS, lon_GS,
f, el, p, D,
return_contributions=True)
# Plot the results
fig, ax = plt.subplots(1, 1)
ax.plot(el, Ag, label='Gaseous attenuation')
ax.plot(el, Ac, label='Cloud attenuation')
ax.plot(el, Ar, label='Rain attenuation')
ax.plot(el, As, label='Scintillation attenuation')
ax.plot(el, A, label='Total atmospheric attenuation')
ax.set_xlabel('Elevation angle [deg]')
ax.set_ylabel('Atmospheric attenuation [dB]')
ax.grid(which='both', linestyle=':')
plt.legend()
class TestSingleLocationVsUnavailability(test.TestCase):
@staticmethod
def test_single_location_vs_p():
# Ground station coordinates (Boston)
lat_GS = 42.3601
lon_GS = -71.0942
# Satellite coordinates (GEO, 77 W)
lat_sat = 0
lon_sat = -77
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground station
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat,
lat_GS, lon_GS)
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
# Define unavailabilities vector in logarithmic scale
p = np.logspace(-1.5, 1.5, 100)
A_g, A_c, A_r, A_s, A_t = \
itur.atmospheric_attenuation_slant_path(
lat_GS, lon_GS, f, el, p, D, return_contributions=True)
# Plot the results using matplotlib
f, ax = plt.subplots(1, 1)
ax.semilogx(p, A_g.value, label='Gaseous attenuation')
ax.semilogx(p, A_c.value, label='Cloud attenuation')
ax.semilogx(p, A_r.value, label='Rain attenuation')
ax.semilogx(p, A_s.value, label='Scintillation attenuation')
ax.semilogx(p, A_t.value, label='Total atmospheric attenuation')
ax.set_xlabel('Percentage of time attenuation value is exceeded [%]')
ax.set_ylabel('Attenuation [dB]')
ax.grid(which='both', linestyle=':')
plt.legend()
class TestGaseousAttenuation(test.TestCase):
@staticmethod
def test_gaseous_attenuation():
# Define atmospheric parameters
rho_wet = 7.5 * itur.u.g / itur.u.m**3
rho_dry = 0 * itur.u.g / itur.u.m**3
P = 1013.25 * itur.u.hPa
T = 15 * itur.u.deg_C
# Define frequency logspace parameters
N_freq = 1000
fs = np.linspace(0, 1000, N_freq)
# Compute the attenuation values
att_wet = itu676.gamma_exact(fs, P, rho_wet, T)
att_dry = itu676.gamma_exact(fs, P, rho_dry, T)
# Plot the results
plt.figure()
plt.plot(fs, att_wet.value, 'b--', label='Wet atmosphere')
plt.plot(fs, att_dry.value, 'r', label='Dry atmosphere')
plt.xlabel('Frequency [GHz]')
plt.ylabel('Specific attenuation [dB/km]')
plt.yscale('log')
plt.xscale('linear')
plt.xlim(0, 1000)
plt.ylim(1e-3, 1e5)
plt.legend()
plt.grid(which='both', linestyle=':', color='gray',
linewidth=0.3, alpha=0.5)
plt.grid(which='major', linestyle=':', color='black')
plt.title('FIGURE 1. - Specific attenuation due to atmospheric gases,'
'\ncalculated at 1 GHz intervals, including line centres')
plt.tight_layout()
#######################################################################
# Specific attenuation at different altitudes #
#######################################################################
# Define atmospheric parameters
hs = np.array([0, 5, 10, 15, 20]) * itur.u.km
# Define frequency logspace parameters
N_freq = 2001
fs = np.linspace(50, 70, N_freq)
# Plot the results
plt.figure()
# Loop over heights and compute values
for h in hs:
rho = itu835.standard_water_vapour_density(h)
P = itu835.standard_pressure(h)
T = itu835.standard_temperature(h)
atts = itu676.gamma_exact(fs * itur.u.GHz, P, rho, T)
plt.plot(fs, atts.value, label=f"Altitude {h.value} km")
plt.xlabel('Frequency [GHz]')
plt.ylabel('Specific attenuation [dB/km]')
plt.yscale('log')
plt.xscale('linear')
plt.xlim(50, 70)
plt.ylim(1e-3, 1e2)
plt.legend()
plt.grid(which='both', linestyle=':', color='gray',
linewidth=0.3, alpha=0.5)
plt.grid(which='major', linestyle=':', color='black')
plt.title('FIGURE 2. - Specific attenuation in the range 50-70 GHz'
' at the\n altitudes indicated, calculated at intervals of'
' 10 MHz\nincluding line centers (0, 5, 10 15, 20) km')
plt.tight_layout()
#######################################################################
# Comparison of line-by-line and approximate method #
#######################################################################
# Define atmospheric parameters
el = 90
rho = 7.5 * itur.u.g / itur.u.m**3
P = 1013.25 * itur.u.hPa
T = 15 * itur.u.deg_C
# Define frequency logspace parameters
N_freq = 350
fs = np.linspace(0, 350, N_freq)
# Initialize result vectors
atts_approx = []
atts_exact = []
# Loop over frequencies and compute values
atts_approx = itu676.gaseous_attenuation_slant_path(
fs, el, rho, P, T, mode='approx')
atts_exact = itu676.gaseous_attenuation_slant_path(
fs, el, rho, P, T, mode='exact')
# Plot the results
plt.figure()
plt.plot(fs, atts_approx.value, 'b--',
label='Approximate method Annex 2')
plt.plot(fs, atts_exact.value, 'r', label='Exact line-by-line method')
plt.xlabel('Frequency [GHz]')
plt.ylabel('Attenuation [dB]')
plt.yscale('log')
plt.xscale('log')
plt.legend()
plt.grid(which='both', linestyle=':', color='gray',
linewidth=0.3, alpha=0.5)
plt.grid(which='major', linestyle=':', color='black')
plt.title('Comparison of line-by-line method to approximate method')
plt.tight_layout()
if __name__ == '__main__':
suite = suite()
print('Test examples of the code')
print('------------------------')
print(
'A total of %d test-cases are going to be tested' %
suite.countTestCases())
sys.stdout.flush()
test.TextTestRunner(verbosity=2).run(suite)
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/test/ITU_validation_report_test.py | test/ITU_validation_report_test.py | # -*- coding: utf-8 -*-
import sys
import numpy as np
import pandas as pd
import os.path as path
from collections import defaultdict, OrderedDict
import unittest as test
import itur.models as models
from itur import atmospheric_attenuation_slant_path
pd.set_option('display.max_colwidth', None)
basepath = path.dirname(path.realpath(__file__))
test_data = path.join(basepath, "test_data")
html_path = path.join(basepath, "../docs/validation")
desc_validation = """This page contains the validation examples for Recommendation {0}: {1}.
All test cases were extracted from the
`ITU Validation examples file (rev 5.1) <https://www.itu.int/en/ITU-R/study-groups/rsg3/ionotropospheric/CG-3M3J-13-ValEx-Rev5_1.xlsx>`_.
.. contents:: Functions tested
:depth: 2
"""
desc_test_case = """The table below contains the results of testing function ``{0}``.
The test cases were extracted from spreadsheet ``{1}`` from the
`ITU Validation examples file (rev 5.1) <https://www.itu.int/en/ITU-R/study-groups/rsg3/ionotropospheric/CG-3M3J-13-ValEx-Rev5_1.xlsx>`_.
In addition to the input-arguments, expected result (``ITU Validation``), and
ITU-Rpy computed result (``ITUR-py Result``), the absolute and relative errors
are shown. Each test case is color-coded depending on the magnitude of the
errors (green = pass, errors are negligible, red = fail, relative error is
above 0.01%).
In addition, the code snippet below shows an example of how to generate the
first row of the results in the table:
.. code-block:: python
{2}
"""
html_header = """
<html>
<head>
<title>Validation results {0}</title>
<style>
table {{
border-collapse: collapse;
font-size: 10px;
width: 100%;
padding: 2px;
}}
th {{
background-color: black;
color: white;
}}
th, td {{
text-align: center;
padding: 2px;
width: 1%;
font-family: Arial, Helvetica, sans-serif;
white-space: nowrap;
}}
tr:nth-child(even) {{background-color: #f2f2f2;}}
tr:hover {{background-color: khaki;}}
</style>
</head>
<body>
"""
html_footer = """
<br><br>
</body>
</html>
"""
def create_ITU_suite():
"""A test suite for the ITU-P Recommendations. Recommendations tested:
* ITU-P R-676-11
* ITU-P R-618-13
* ITU-P R-453-14
* ITU-P R-837-7
* ITU-P R-838-3
* ITU-P R-839-4
* ITU-P R-840-8
* ITU-P R-1511-1
* ITU-P R-1511-2
"""
suite = ITU_Suite()
# ITU-R P.453 tests (Gaseous attenuation)
suite.add_test(ITUR453_14TestCase("test_wet_term_radio_refractivity"))
# ITU-R P.618
suite.add_test(ITUR618_13TestCase("test_rain_attenuation"))
suite.add_test(ITUR618_13TestCase("test_rain_probability"))
suite.add_test(ITUR618_13TestCase("test_scintillation_attenuation"))
suite.add_test(ITUR618_13TestCase("test_total_attenuation"))
suite.add_test(ITUR618_13TestCase("test_cross_polarization_discrimination"))
# ITU-R P.676
suite.add_test(ITUR676_12TestCase("test_gamma0"))
suite.add_test(ITUR676_12TestCase("test_gammaw"))
suite.add_test(ITUR676_12TestCase("test_gamma"))
suite.add_test(ITUR676_12TestCase("test_zenith_attenuation"))
suite.add_test(ITUR676_12TestCase("test_attenuation_gas"))
# ITU-R P.836
suite.add_test(ITUR836_6TestCase("test_surface_water_vapour_density_annual"))
suite.add_test(ITUR836_6TestCase("test_total_water_vapour_content_annual"))
# ITU-R P.837
suite.add_test(ITUR837_7TestCase("test_rainfall_rate"))
suite.add_test(ITUR837_7TestCase("test_rainfall_rate_probability"))
suite.add_test(ITUR837_7TestCase("test_rainfall_rate_R001"))
# ITU-R P.838
suite.add_test(ITUR838_3TestCase("test_rain_specific_attenuation"))
# ITU-R P.839
suite.add_test(ITUR839_4TestCase("test_isoterm_0_deg"))
suite.add_test(ITUR839_4TestCase("test_rain_height"))
# ITU-R P.840
suite.add_test(ITUR840_8TestCase("test_columnar_content_reduced_liquid"))
suite.add_test(ITUR840_8TestCase("test_cloud_attenuation"))
# ITU-R P.1510
suite.add_test(ITUR1510_1TestCase("test_surface_mean_temperature"))
# ITU-R P.1511
suite.add_test(ITUR1511_1TestCase("test_topographic_altitude"))
suite.add_test(ITUR1511_2TestCase("test_topographic_altitude"))
# ITU-R P.1623
suite.add_test(ITUR1623_1TestCase("test_fade_duration_cummulative_probability"))
suite.add_test(ITUR1623_1TestCase("test_fade_duration_number_fades"))
suite.add_test(ITUR1623_1TestCase("test_fade_duration_probability"))
suite.add_test(ITUR1623_1TestCase("test_fade_duration_total_exceedance_time"))
return suite
# Format HTML code
def formatter_fcn(s):
return '\t\t\t<td style="text-align:left">' + str(s)
def formatter_rel_error_cell(s):
if np.isnan(float(s)) or np.isinf(float(s)):
return '\t\t\t<td bgcolor="cornflowerblue">{0:.3f}'.format(s)
elif abs(float(s)) < 0.01:
return '\t\t\t<td bgcolor="lightgreen">{0:.3f}'.format(s)
else:
return '\t\t\t<td bgcolor="salmon">{0:.3f}'.format(s)
def formatter_error(s):
if np.isnan(float(s)):
return '\t\t\t<td bgcolor="cornflowerblue">{0:.2e}'.format(s)
elif abs(float(s)) < 0.1:
return '\t\t\t<td bgcolor="lightgreen">{0:.2e}'.format(s)
else:
return '\t\t\t<td bgcolor="salmon">{0:.3e}'.format(s)
def format_table(table):
# Fix cells with a cell within
table = table.replace("<td><td", "<td")
# Format headers
table = table.replace("res_val", "ITU Validation")
table = table.replace("res_fcn", "ITU-Rpy Result")
table = table.replace("error_rel", "Relative Error")
table = table.replace("error", "Absolute Error")
table = table.replace("fcn", "ITU-Rpy Function")
return table
def formatter_digits(fcn, val):
ret = []
COL_STR = '<span style="color: {1}">{0}</span>'
for f, v in zip(fcn, val):
i_equal = 0
# Convert numbers to strings
s_f = f"{f:.6f}"
s_v = f"{v:.6f}"
# Determine how many numbers are equal
for c_f, c_v in zip(s_f, s_v):
if c_f == c_v:
i_equal += 1
else:
break
# Format the digits by coloring equal and different sections
if i_equal > 0:
s = COL_STR.format(s_f[:i_equal], "darkgreen")
if i_equal < len(s_f):
s += COL_STR.format(s_f[i_equal:], "darkred")
else:
s = COL_STR.format(s_f, "darkred")
ret.append(s)
return ret
class ITU_Suite(test.TestSuite):
def __init__(self):
test.TestSuite.__init__(self)
self.test_cases = OrderedDict({})
def add_test(self, test_case):
self.test_cases[test_case.__class__.__name__] = test_case
self.addTest(test_case)
def rst_reports(self, path_report=None):
for test_name, test_case in self.test_cases.items():
ret = test_case.produce_rst_report()
if path_report:
fpath = path.join(
path_report, test_name.lower().replace("testcase", "") + ".rst"
)
with open(fpath, "w", encoding="utf-8") as fd:
fd.write(ret)
class ITU_TestCase(test.TestCase):
report = defaultdict(dict)
def read_csv(self, path_name, columns):
self.path_name = path_name
df = pd.read_csv(path_name, sep=",", skiprows=range(1, 2), encoding="cp1252")
units = pd.read_csv(path_name, sep=",", nrows=2, encoding="cp1252")
self.units = dict(units[columns].iloc[0])
return df[columns]
def setUp(self):
self.tests = []
def __run__(self, test_name, test_fcn, df, attributes, result_value, n_places=5):
test_fcn_name = test_fcn
test_fcn = eval(test_fcn)
# Add units for the result value and function
self.units["res_val"] = self.units[result_value]
self.units["res_fcn"] = self.units[result_value]
# self.units['error'] = self.units[result_value]
# self.units['error_rel'] = '(%)'
# Evaluate all the functions
res = []
for i, row in df.iterrows():
args = {a: row[a] for a in attributes}
# Evaluate function
res_fcn = test_fcn(**args)
res_val = row[result_value]
# Format dictionary to be added to report
line = dict(args)
line["fcn"] = test_fcn_name
line["res_fcn"] = res_fcn.value
line["res_val"] = res_val
line["error"] = res_val - res_fcn.value
if res_val == 0 and abs(line["error"]) < 1e-6:
line["error_rel"] = 0
else:
line["error_rel"] = round((res_val - res_fcn.value) / res_val * 100, 3)
res.append(line)
# Create data frame with the report
order = ["fcn"] + attributes + ["res_val", "res_fcn", "error", "error_rel"]
df = pd.DataFrame(res)
self.report[self.__class__.__name__][test_name] = {
"df": df,
"class_name": self.__class__.__name__,
"test_name": test_name,
"units": self.units,
"path_csv": self.path_name,
"test_fcn": test_fcn_name,
"attributes": attributes,
"n_places": n_places,
"report_html": df[order],
}
# Do the assert equal for all the tests
for ret in res:
res_val = ret["res_val"]
res_fcn = ret["res_fcn"]
try:
self.assertAlmostEqual(res_val, res_fcn, places=n_places)
except AssertionError as e:
print(e)
def generate_code_example(self, report):
"""Generate a code example of the call used for this function
Parameters
----------
report : dict
The dictionary containing the parameters used in a test case.
Returns
-------
str
A string with an example of code used to call this test.
"""
ret = [" import itur", "", " # Define input attributes"]
attributes = report["attributes"]
test_fcn = report["test_fcn"]
test_fcn_name = report["test_fcn"].split(".")[-1]
row_1 = report["df"].iloc[0]
units = report["units"]
# Write the attributesf
for attr_name, attr_val in zip(attributes, row_1[attributes]):
ret.append(f" {attr_name} = {attr_val} # {units[attr_name]}")
# Add call to test-function
ret.extend(
[
"",
f" # Make call to test-function {test_fcn_name}",
f" itur_val = itur.{test_fcn}({', '.join([att + '=' + att for att in attributes])})",
]
)
# Compute errors
ret.extend(
[
"",
" # Compute error with respect to value in ITU example file",
f" ITU_example_val = {row_1['res_val']} # {units['res_val']}",
" error = ITU_example_val - itur_val.value",
" error_rel = error / ITU_example_val * 100 # (%)",
]
)
return "\n".join(ret)
def produce_rst_report(self):
ret = []
title = f"Validation results {self.itu_name}"
ret.append(title)
ret.append("=" * len(title))
ret.append("")
ret.append(desc_validation.format(self.itu_name, self.itu_description))
for test_name in self.report[self.__class__.__name__]:
# Create HTML table for this test
report = self.report[self.__class__.__name__][test_name]
table = self.create_html_table(report, include_header=True)
html_file = path.join(html_path, test_name + "_table.html")
with open(html_file, "w", encoding="utf-8") as fd:
fd.write(table)
# Create HTML table for this test
test_fcn_name = report["test_fcn"].split(".")[-1]
test_case_name = f"Function {test_fcn_name}"
ret.append(test_case_name)
ret.append("-" * len(test_case_name))
ret.append("")
ret.append(
desc_test_case.format(
test_fcn_name,
path.basename(report["path_csv"]),
self.generate_code_example(report),
)
)
ret.extend(
[
".. raw:: html",
f" :file: {test_name + '_table.html'}",
"",
"",
]
)
return "\n".join(ret)
def create_html_table(self, report, include_header=False):
fmtrs = {
"error_rel": formatter_rel_error_cell,
"error": formatter_error,
"fcn": formatter_fcn,
}
if include_header:
table = html_header.format(report["test_fcn"])
df = report["report_html"]
df["res_fcn"] = formatter_digits(df["res_fcn"], df["res_val"])
# Add units to header attributes
col_dict = {}
for col in df.columns:
if col in report["units"]:
col_dict[col] = f"{col} {report['units'][col]}"
else:
col_dict[col] = col
df.rename(columns=col_dict, inplace=True)
table += df.to_html(
bold_rows=True,
index=False,
justify="center",
table_id=report["test_name"].lower(),
escape=False,
formatters=fmtrs,
)
table = format_table(table)
if include_header:
table += html_footer
return table
class ITUR453_14TestCase(ITU_TestCase):
itu_name = "ITU-R P.453-14"
itu_description = "TBD"
def test_wet_term_radio_refractivity(self):
# Set the version to the
models.itu453.change_version(13)
# Read the test data
df = self.read_csv(
path.join(test_data, "453/ITURP453-14_Nwet.csv"),
columns=["lat", "lon", "p", "Nwet"],
)
# Run test and generate the report
self.__run__(
"test_wet_term_radio_refractivity",
test_fcn="models.itu453.map_wet_term_radio_refractivity",
df=df,
attributes=["lat", "lon", "p"],
result_value="Nwet",
n_places=5,
)
class ITUR618_13TestCase(ITU_TestCase):
itu_name = "ITU-R P.618-13"
itu_description = (
"Propagation data and prediction methods required for"
+ " the design of Earth-space telecommunication systems"
)
def test_rain_attenuation(self):
# Set the version to the
models.itu618.change_version(13)
# Read the test data
df = self.read_csv(
path.join(test_data, "618/ITURP618-13_A_rain.csv"),
columns=["lat", "lon", "hs", "el", "f", "tau", "p", "R001", "A_rain"],
)
# Run test and generate the report
self.__run__(
"test_rain_attenuation",
test_fcn="models.itu618.rain_attenuation",
df=df,
attributes=["lat", "lon", "hs", "el", "f", "tau", "p", "R001"],
result_value="A_rain",
n_places=5,
)
def test_rain_probability(self):
# Set the version to the
models.itu618.change_version(13)
# Read the test data
df = self.read_csv(
path.join(test_data, "618/ITURP618-13_A_rain.csv"),
columns=["lat", "lon", "hs", "el", "Ls", "P0", "P_rain"],
)
# Run test and generate the report
self.__run__(
"test_rain_probability",
test_fcn="models.itu618.rain_attenuation_probability",
df=df,
attributes=["lat", "lon", "hs", "el", "Ls", "P0"],
result_value="P_rain",
n_places=5,
)
def test_scintillation_attenuation(self):
# Set the version to the
models.itu618.change_version(13)
# Read the test data
df = self.read_csv(
path.join(test_data, "618/ITURP618-13_A_sci.csv"),
columns=["lat", "lon", "f", "el", "p", "D", "eta", "A_scin"],
)
# Run test and generate the report
self.__run__(
"test_scintillation_attenuation",
test_fcn="models.itu618.scintillation_attenuation",
df=df,
attributes=["lat", "lon", "f", "el", "p", "D", "eta"],
result_value="A_scin",
n_places=5,
)
def test_cross_polarization_discrimination(self):
# Set the version to the
models.itu618.change_version(13)
# Read the test data
df = self.read_csv(
path.join(test_data, "618/ITURP618-13_A_xpd.csv"),
columns=["f", "el", "p", "tau", "Ap", "XPD"],
)
# Run test and generate the report
self.__run__(
"test_cross_polarization_discrimination",
test_fcn="models.itu618.rain_cross_polarization_discrimination",
df=df,
attributes=["f", "el", "p", "tau", "Ap"],
result_value="XPD",
n_places=5,
)
def test_total_attenuation(self):
# Set the version to the
models.itu618.change_version(13)
# Read the test data
df = self.read_csv(
path.join(test_data, "618/ITURP618-13_A_total.csv"),
columns=["lat", "lon", "f", "el", "p", "D", "eta", "tau", "hs", "A_total"],
)
# Run test and generate the report
self.__run__(
"test_total_attenuation",
test_fcn="atmospheric_attenuation_slant_path",
df=df,
attributes=["lat", "lon", "f", "el", "p", "D", "eta", "tau", "hs"],
result_value="A_total",
n_places=4,
)
class ITUR676_12TestCase(ITU_TestCase):
itu_name = "ITU-R P.676-12"
itu_description = "Attenuation by atmospheric gases and related effects"
def test_gamma0(self):
# Set the version to the
models.itu676.change_version(12)
path_file = "676/ITURP676-12_gamma.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["f", "P", "rho", "T", "gamma0"]
)
# Run test and generate the report
self.__run__(
"test_gamma0",
test_fcn="models.itu676.gamma0_exact",
df=df,
attributes=["f", "P", "rho", "T"],
result_value="gamma0",
n_places=5,
)
def test_gammaw(self):
# Set the version to the
models.itu676.change_version(12)
path_file = "676/ITURP676-12_gamma.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["f", "P", "rho", "T", "gammaw"]
)
# Run test and generate the report
self.__run__(
"test_gammaw",
test_fcn="models.itu676.gammaw_exact",
df=df,
attributes=["f", "P", "rho", "T"],
result_value="gammaw",
n_places=5,
)
def test_gamma(self):
# Set the version to the
models.itu676.change_version(12)
path_file = "676/ITURP676-12_gamma.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["f", "P", "rho", "T", "gamma"]
)
# Run test and generate the report
self.__run__(
"test_gamma",
test_fcn="models.itu676.gamma_exact",
df=df,
attributes=["f", "P", "rho", "T"],
result_value="gamma",
n_places=5,
)
def test_attenuation_gas(self):
# Set the version to the
models.itu676.change_version(12)
path_file = "676/ITURP676-12_A_gas.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file),
columns=["f", "el", "P", "rho", "T", "h", "V_t", "A_gas"],
)
# Run test and generate the report
self.__run__(
"test_attenuation_gas",
test_fcn="models.itu676.gaseous_attenuation_slant_path",
df=df,
attributes=["f", "el", "rho", "P", "T", "h", "V_t"],
result_value="A_gas",
n_places=5,
)
def test_zenith_attenuation(self):
# Set the version to the
models.itu676.change_version(12)
path_file = "676/ITURP676-12_zenith_attenuation.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file),
columns=["lat", "lon", "p", "f", "h", "V_t", "Aw"],
)
# Run test and generate the report
self.__run__(
"test_zenith_attenuation",
test_fcn="models.itu676.zenit_water_vapour_attenuation",
df=df,
attributes=["lat", "lon", "p", "f", "h", "V_t"],
result_value="Aw",
n_places=5,
)
class ITUR836_6TestCase(ITU_TestCase):
itu_name = "ITU-R P.836-6"
itu_description = "Water vapour: surface density and total columnar content"
def test_surface_water_vapour_density_annual(self):
# Set the version to the
models.itu836.change_version(6)
path_file = "836/ITURP836-6_surface_water_vapour_density_annual.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "alt", "p", "rho"]
)
# Run test and generate the report
self.__run__(
"test_surface_water_vapour_density_annual",
test_fcn="models.itu836.surface_water_vapour_density",
df=df,
attributes=["lat", "lon", "alt", "p"],
result_value="rho",
n_places=5,
)
def test_total_water_vapour_content_annual(self):
# Set the version to the
models.itu836.change_version(6)
path_file = "836/ITURP836-6_total_water_vapour_content_annual.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "alt", "p", "V"]
)
# Run test and generate the report
self.__run__(
"test_total_water_vapour_content_annual",
test_fcn="models.itu836.total_water_vapour_content",
df=df,
attributes=["lat", "lon", "alt", "p"],
result_value="V",
n_places=5,
)
class ITUR837_7TestCase(ITU_TestCase):
itu_name = "ITU-R P.837-7"
itu_description = "Characteristics of precipitation for propagation " + "modelling"
def test_rainfall_rate(self):
# Set the version to the
models.itu837.change_version(7)
path_file = "837/ITURP837-7_rainfall_rate.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "p", "Rp"]
)
# Run test and generate the report
self.__run__(
"test_rainfall_rate",
test_fcn="models.itu837.rainfall_rate",
df=df,
attributes=["lat", "lon", "p"],
result_value="Rp",
n_places=3,
)
def test_rainfall_rate_R001(self):
# Set the version to the
models.itu837.change_version(7)
path_file = "837/ITURP837-7_rainfall_rate_R001.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "p", "Rp"]
)
# Run test and generate the report
self.__run__(
"test_rainfall_rate_R001",
test_fcn="models.itu837.rainfall_rate",
df=df,
attributes=["lat", "lon", "p"],
result_value="Rp",
n_places=5,
)
def test_rainfall_rate_probability(self):
# Set the version to the
models.itu837.change_version(7)
path_file = "837/ITURP837-7_rainfall_rate_probability.csv"
# Read the test data
df = self.read_csv(path.join(test_data, path_file), columns=["lat", "lon", "p"])
# Run test and generate the report
self.__run__(
"test_rainfall_rate_probability",
test_fcn="models.itu837.rainfall_probability",
df=df,
attributes=["lat", "lon"],
result_value="p",
n_places=5,
)
class ITUR838_3TestCase(ITU_TestCase):
itu_name = "ITU-R P.838-3"
itu_description = (
"Specific attenuation model for rain for use in " + "prediction methods"
)
def test_rain_specific_attenuation(self):
# Set the version to the
models.itu838.change_version(3)
path_file = "838/ITURP838-3_rain_specific_attenuation.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["el", "f", "R", "tau", "gamma_r"]
)
# Run test and generate the report
self.__run__(
"test_rain_specific_attenuation",
test_fcn="models.itu838.rain_specific_attenuation",
df=df,
attributes=["el", "f", "R", "tau"],
result_value="gamma_r",
n_places=5,
)
class ITUR839_4TestCase(ITU_TestCase):
itu_name = "ITU-R P.839-4"
itu_description = "Rain height model for prediction methods"
def test_isoterm_0_deg(self):
# Set the version to the
models.itu839.change_version(4)
path_file = "839/ITURP839-4_rain_height.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "h0"]
)
# Run test and generate the report
self.__run__(
"test_isoterm_0_deg",
test_fcn="models.itu839.isoterm_0",
df=df,
attributes=["lat", "lon"],
result_value="h0",
n_places=5,
)
def test_rain_height(self):
# Set the version to the
models.itu839.change_version(4)
path_file = "839/ITURP839-4_rain_height.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "hr"]
)
# Run test and generate the report
self.__run__(
"test_rain_height",
test_fcn="models.itu839.rain_height",
df=df,
attributes=["lat", "lon"],
result_value="hr",
n_places=5,
)
class ITUR840_8TestCase(ITU_TestCase):
itu_name = "ITU-R P.840-8"
itu_description = "Attenuation due to clouds and fog"
def test_columnar_content_reduced_liquid(self):
# Set the version to the
models.itu840.change_version(8)
path_file = "840/ITURP840-8_columnar_content_reduced_liquid.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "p", "Lred"]
)
# Run test and generate the report
self.__run__(
"test_columnar_content_reduced_liquid",
test_fcn="models.itu840.columnar_content_reduced_liquid",
df=df,
attributes=["lat", "lon", "p"],
result_value="Lred",
n_places=5,
)
def test_cloud_attenuation(self):
# Set the version to the
models.itu840.change_version(8)
path_file = "840/ITURP840-8_cloud_attenuation.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file),
columns=["lat", "lon", "f", "el", "p", "Ac"],
)
# Run test and generate the report
self.__run__(
"test_cloud_attenuation",
test_fcn="models.itu840.cloud_attenuation",
df=df,
attributes=["lat", "lon", "f", "el", "p"],
result_value="Ac",
n_places=5,
)
class ITUR1510_1TestCase(ITU_TestCase):
itu_name = "ITU-R P.1510-1"
itu_description = "Mean surface temperature"
def test_surface_mean_temperature(self):
# Set the version to the
models.itu1510.change_version(1)
path_file = "1510/ITURP1510-1_temperature.csv"
# Read the test data
df = self.read_csv(path.join(test_data, path_file), columns=["lat", "lon", "T"])
# Run test and generate the report
self.__run__(
"test_surface_mean_temperature",
test_fcn="models.itu1510.surface_mean_temperature",
df=df,
attributes=["lat", "lon"],
result_value="T",
n_places=5,
)
class ITUR1511_1TestCase(ITU_TestCase):
itu_name = "ITU-R P.1511-1"
itu_description = "Topography for Earth-to-space propagation modelling"
def test_topographic_altitude(self):
# Set the version to the
models.itu1511.change_version(1)
path_file = "1511/ITURP1511-1_topographic_altitude.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "hs"]
)
# Run test and generate the report
self.__run__(
"test_topographic_altitude",
test_fcn="models.itu1511.topographic_altitude",
df=df,
attributes=["lat", "lon"],
result_value="hs",
n_places=5,
)
class ITUR1511_2TestCase(ITU_TestCase):
itu_name = "ITU-R P.1511-2"
itu_description = "Topography for Earth-to-space propagation modelling"
def test_topographic_altitude(self):
# Set the version to the
models.itu1511.change_version(2)
path_file = "1511/ITURP1511-2_topographic_altitude.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["lat", "lon", "hs"]
)
# Run test and generate the report
self.__run__(
"test_topographic_altitude",
test_fcn="models.itu1511.topographic_altitude",
df=df,
attributes=["lat", "lon"],
result_value="hs",
n_places=5,
)
class ITUR1623_1TestCase(ITU_TestCase):
itu_name = "ITU-R P.1623-1"
itu_description = "Prediction method of fade dynamics on Earth-space paths"
def test_fade_duration_probability(self):
# Set the version to the
models.itu1623.change_version(1)
path_file = "1623/ITURP1623-1_fade_duration_params.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["D", "A", "el", "f", "P"]
)
# Run test and generate the report
self.__run__(
"test_fade_duration_probability",
test_fcn="models.itu1623.fade_duration_probability",
df=df,
attributes=["D", "A", "el", "f"],
result_value="P",
n_places=5,
)
def test_fade_duration_cummulative_probability(self):
# Set the version to the
models.itu1623.change_version(1)
path_file = "1623/ITURP1623-1_fade_duration_params.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["D", "A", "el", "f", "F"]
)
# Run test and generate the report
self.__run__(
"test_fade_duration_cummulative_probability",
test_fcn="models.itu1623.fade_duration_cummulative_probability",
df=df,
attributes=["D", "A", "el", "f"],
result_value="F",
n_places=5,
)
def test_fade_duration_total_exceedance_time(self):
# Set the version to the
models.itu1623.change_version(1)
path_file = "1623/ITURP1623-1_fade_duration_params.csv"
# Read the test data
df = self.read_csv(
path.join(test_data, path_file), columns=["D", "A", "el", "f", "T_tot", "T"]
)
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | true |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/test/__init__.py | test/__init__.py | __all__ = [
'itur_test',
'ITU_validation_report_test',
'ITU_validation_test',
'examples_test']
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/test/ITU_validation_test.py | test/ITU_validation_test.py | # -*- coding: utf-8 -*-
import unittest as test
import itur
import itur.models as models
import sys
from astropy import units as u
def suite():
""" A test suite for the ITU-P Recommendations. Recommendations tested:
* ITU-P R-676-9
* ITU-P R-676-11
* ITU-P R-618-12
* ITU-P R-618-13
* ITU-P R-453-12
* ITU-P R-837-6
* ITU-P R-837-7
* ITU-P R-838-3
* ITU-P R-839-4
* ITU-P R-840-4
* ITU-P R-840-7
* ITU-P R-1511-1
"""
suite = test.TestSuite()
# Ensure models are in the right version
models.itu453.change_version(13)
models.itu618.change_version(13)
models.itu676.change_version(11)
models.itu836.change_version(6)
models.itu837.change_version(7)
models.itu838.change_version(3)
models.itu839.change_version(4)
models.itu840.change_version(7)
models.itu1510.change_version(1)
models.itu1511.change_version(1)
# ITU-R P.676 tests (Gaseous attenuation)
suite.addTest(ITUR676_9TestCase('test_gammaw'))
suite.addTest(ITUR676_9TestCase('test_gamma0'))
# suite.addTest(ITUR676_9TestCase('test_zenit_water_vapour_attenuation'))
suite.addTest(ITUR676_11TestCase('test_gammaw_exact'))
suite.addTest(ITUR676_11TestCase('test_gamma0_exact'))
suite.addTest(ITUR676_11TestCase('test_gammaw_approx'))
suite.addTest(ITUR676_11TestCase('test_gamma0_approx'))
suite.addTest(ITUR676_11TestCase('test_zenit_water_vapour_attenuation'))
# ITU-R P.618 tests (Rain attenuation)
suite.addTest(ITUR618_12TestCase(
'test_rain_cross_polarization_discrimination'))
suite.addTest(ITUR618_12TestCase('test_rain_attenuation'))
suite.addTest(ITUR618_12TestCase('test_scintillation_attenuation'))
suite.addTest(ITUR618_13TestCase('test_rain_attenuation'))
suite.addTest(ITUR618_13TestCase('test_probability_of_rain_attenuation'))
# suite.addTest(ITUR618_13TestCase('test_site_diversity'))
suite.addTest(ITUR618_13TestCase('test_scintillation_attenuation'))
suite.addTest(ITUR618_13TestCase(
'test_rain_cross_polarization_discrimination'))
suite.addTest(ITUR618_13TestCase('test_total_attenuation'))
# ITU-R P.453 tests (Wet term radio refractivity)
suite.addTest(ITUR453_12TestCase('test_wet_term_radio_refractivity'))
suite.addTest(ITUR453_13TestCase('test_wet_term_radio_refractivity'))
# ITU-R P.836 tests (Water vapour density)
suite.addTest(ITUR836_6TestCase('test_surface_water_vapour_density'))
suite.addTest(ITUR836_6TestCase('test_total_water_vapour_content'))
# ITU-R P.836 tests (Rainfall rate)
suite.addTest(ITUR837_6TestCase('test_rainfall_rate'))
suite.addTest(ITUR837_7TestCase('test_rainfall_rate'))
suite.addTest(ITUR837_7TestCase('test_rainfall_probability'))
suite.addTest(ITUR837_7TestCase('test_rainfall_rate_R001'))
# ITU-R P.836 tests (Rainfall specific attenuation)
suite.addTest(ITUR838_3TestCase('test_rain_specific_attenuation'))
# ITU-R P.839 tests (Rain height)
suite.addTest(ITUR839_4TestCase('test_isoterm_0_deg'))
suite.addTest(ITUR839_4TestCase('test_rain_height'))
# ITU-R P.840 tests (Clouds attenuation)
# suite.addTest(ITUR840_4TestCase('test_columnar_content_reduced_liquid'))
# suite.addTest(ITUR840_4TestCase('test_cloud_attenuation'))
suite.addTest(ITUR840_7TestCase('test_columnar_content_reduced_liquid'))
suite.addTest(ITUR840_7TestCase('test_cloud_attenuation'))
# ITU-R P.1511 tests (Topographic altitude)
suite.addTest(ITUR1511_1TestCase('test_topographic_altitude'))
suite.addTest(ITUR1511_2TestCase('test_topographic_altitude'))
return suite
class ITUR453_12TestCase(test.TestCase):
def setUp(self):
models.itu453.change_version(12)
def test_wet_term_radio_refractivity(self):
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(51.5, 359.86).value,
45.130667, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(41.9, 12.49).value,
53.756489, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(33.94, 18.43).value,
76.349680, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(51.5, 359.86).value,
45.130667, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(41.9, 12.49).value,
53.756489, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(33.94, 18.43).value,
76.349680, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(22.9, 316.77).value,
87.907733, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(25.78, 279.78).value,
101.416373, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(22.9, 316.77).value,
87.907733, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(25.78, 279.78).value,
101.416373, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(28.717, 77.3).value,
60.060569, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(3.133, 101.7).value,
105.920333, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(9.05, 38.7).value,
50.162000, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(28.717, 77.3).value,
60.060569, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(3.133, 101.7).value,
105.920333, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(9.05, 38.7).value,
50.162000, places=5)
class ITUR453_13TestCase(test.TestCase):
def setUp(self):
models.itu453.change_version(13)
def test_wet_term_radio_refractivity(self):
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
3.133, 101.7, 50).value,
128.14080027, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
22.9, -43.23, 50).value,
104.35847467, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
23, 30, 50).value,
36.47166667, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
25.78, -80.22, 50).value,
113.2738672, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
28.717, 77.3, 50).value,
75.66013547, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
33.94, 18.43, 50).value,
80.14015964, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
41.9, 12.49, 50).value,
61.21890044, places=5)
self.assertAlmostEqual(
models.itu453.map_wet_term_radio_refractivity(
51.5, -0.14, 50).value,
50.38926222, places=5)
class ITUR676_9TestCase(test.TestCase):
def setUp(self):
models.itu676.change_version(9)
models.itu836.change_version(4)
def test_gammaw(self):
# The ITU models are non-sense and believe that the conversion between
# Kelvin is 273 instead of 273.15
self.assertAlmostEqual(
models.itu676.gammaw_approx(12, 1013.25, 4.98154290000,
(5.9435147000 - 0.15) * u.deg_C).value,
0.00705700000, places=5)
self.assertAlmostEqual(
models.itu676.gammaw_approx(20, 1013.25, 4.98154290000,
(5.9435147000 - 0.15) * u.deg_C).value,
0.06742720000, places=5)
self.assertAlmostEqual(
models.itu676.gammaw_approx(60, 1013.25, 4.98154290000,
(5.9435147000 - 0.15) * u.deg_C).value,
0.11538020000, places=5)
self.assertAlmostEqual(
models.itu676.gammaw_approx(90, 1013.25, 4.98154290000,
(5.9435147000 - 0.15) * u.deg_C).value,
0.25568340000, places=5)
self.assertAlmostEqual(
models.itu676.gammaw_approx(130, 1013.25, 4.98154290000,
(5.9435147000 - 0.15) * u.deg_C).value,
0.56358380000, places=5)
def test_gamma0(self):
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
282.724 - 0.15).value,
0.00941327, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.00898682, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.00851359, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
282.724 - 0.15).value,
0.00941327, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.00898682, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.00851359, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
282.724 - 0.15).value,
0.00941327, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.00898682, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.00851359, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
282.724 - 0.15).value,
0.00941327, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.00898682, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.00851359, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
282.724 - 0.15).value,
0.02043748, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.01954568, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.01856193, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
282.724 - 0.15).value,
0.02043748, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.01954568, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.01856193, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
282.724 - 0.15).value,
0.02043748, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.01954568, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.01856193, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
282.724 - 0.15).value,
0.02043748, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.4834667 - 0.15).value,
0.01954568, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
293.1487022 - 0.15).value,
0.01856193, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
296.602 - 0.15).value,
0.00824203, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(
14.25, 1013.25, 7.5, 296.7208533 - 0.15).value,
0.0082329, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
296.602 - 0.15).value,
0.00824203, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
296.7208533 - 0.15).value,
0.0082329, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
296.602 - 0.15).value,
0.00824203, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
296.7208533 - 0.15).value,
0.0082329, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
296.602 - 0.15).value,
0.00824203, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(
14.25, 1013.25, 7.5, 296.7208533 - 0.15).value,
0.0082329, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.602 - 0.15).value,
0.01800011, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.7208533 - 0.15).value,
0.01798125, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.602 - 0.15).value,
0.01800011, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.7208533 - 0.15).value,
0.01798125, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.602 - 0.15).value,
0.01800011, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.7208533 - 0.15).value,
0.01798125, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.602 - 0.15).value,
0.01800011, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
296.7208533 - 0.15).value,
0.01798125, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.00805331, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.00814064, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.444 - 0.15).value,
0.00899025, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.00805331, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.00814064, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.444 - 0.15).value,
0.00899025, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.00805331, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.00814064, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.444 - 0.15).value,
0.00899025, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.00805331, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.00814064, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(14.25, 1013.25, 7.5,
287.444 - 0.15).value,
0.00899025, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.01761077, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.01779083, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.444 - 0.15).value,
0.01955282, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.01761077, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.01779083, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.444 - 0.15).value,
0.01955282, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.01761077, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.01779083, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.444 - 0.15).value,
0.01955282, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
299.0966578 - 0.15).value,
0.01761077, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
297.9322267 - 0.15).value,
0.01779083, places=5)
self.assertAlmostEqual(
models.itu676.gamma0_approx(29, 1013.25, 7.5,
287.444 - 0.15).value,
0.01955282, places=5)
# def zenit_water_vapour_attenuation(self, lat, lon, el, p, f, alt):
# gamma_w = models.itu676.zenit_water_vapour_attenuation(lat,
# lon,
# p,
# f,
# None,
# alt=alt).value
# return gamma_w / np.sin(np.deg2rad(el))
#
# def test_zenit_water_vapour_attenuation(self):
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 1, 14.25, 0.06916422),
# 0.12789267, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 1, 14.25, 0.05670104),
# 0.10865204, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 1, 14.25, 0),
# 0.10205633, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 0.1, 14.25, 0.06916422),
# 0.15315923, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 0.1, 14.25, 0.05670104),
# 0.12223686, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 0.1, 14.25, 0),
# 0.12410189, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 0.01, 14.25, 0.06916422),
# 0.15315923, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 0.01, 14.25, 0.05670104),
# 0.12223686, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 0.01, 14.25, 0),
# 0.12410189, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 0.001, 14.25, 0.06916422),
# 0.15315923, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 0.001, 14.25, 0.05670104),
# 0.12223686, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 0.001, 14.25, 0),
# 0.12410189, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 1, 29, 0.06916422),
# 0.60896934, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 1, 29, 0.05670104),
# 0.51690529, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 1, 29, 0),
# 0.48519817, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 0.1, 29, 0.06916422),
# 0.72784676, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 0.1, 29, 0.05670104),
# 0.58076456, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 0.1, 29, 0),
# 0.58863533, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 0.01, 29, 0.06916422),
# 0.72784676, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 0.01, 29, 0.05670104),
# 0.58076456, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 0.01, 29, 0),
# 0.58863533, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 51.5, 359.86, 30.87067768, 0.001, 29, 0.06916422),
# 0.72784676, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 41.9, 12.49, 40.97052773, 0.001, 29, 0.05670104),
# 0.58076456, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 33.94, 18.43, 47.91280491, 0.001, 29, 0),
# 0.58863533, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 1, 14.25, 0),
# 0.1181882, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 1, 14.25, 0.00007511),
# 0.16093386, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 0.1, 14.25, 0),
# 0.13730617, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 0.1, 14.25, 0.00007511),
# 0.17798382, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 0.01, 14.25, 0),
# 0.13730617, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 0.01, 14.25, 0.00007511),
# 0.17798382, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 0.001, 14.25, 0),
# 0.13730617, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 0.001, 14.25, 0.00007511),
# 0.17798382, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 1, 29, 0),
# 0.55983815, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 1, 29, 0.00007511),
# 0.76047761, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 0.1, 29, 0),
# 0.64906814, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 0.1, 29, 0.00007511),
# 0.83981774, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 0.01, 29, 0),
# 0.64906814, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 0.01, 29, 0.00007511),
# 0.83981774, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 22.9, 316.77, 59.81487174, 0.001, 29, 0),
# 0.64906814, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 25.78, 279.78, 49.20900369, 0.001, 29, 0.00007511),
# 0.83981774, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 28.717, 77.3, 55.90591362, 1, 14.25, 0.21755946),
# 0.18628614, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 3.133, 101.7, 67.76751981, 1, 14.25, 0.23610446),
# 0.13468573, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 9.05, 38.7, 38.14104832, 1, 14.25, 2.45000492),
# 0.08369587, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 28.717, 77.3, 55.90591362, 0.1, 14.25, 0.21755946),
# 0.20242415, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 3.133, 101.7, 67.76751981, 0.1, 14.25, 0.23610446),
# 0.14372476, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 9.05, 38.7, 38.14104832, 0.1, 14.25, 2.45000492),
# 0.09153026, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 28.717, 77.3, 55.90591362, 0.01, 14.25, 0.21755946),
# 0.20242415, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 3.133, 101.7, 67.76751981, 0.01, 14.25, 0.23610446),
# 0.14372476, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 9.05, 38.7, 38.14104832, 0.01, 14.25, 2.45000492),
# 0.09153026, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 28.717, 77.3, 55.90591362, 0.001, 14.25, 0.21755946),
# 0.20242415, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 3.133, 101.7, 67.76751981, 0.001, 14.25, 0.23610446),
# 0.14372476, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 9.05, 38.7, 38.14104832, 0.001, 14.25, 2.45000492),
# 0.09153026, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 28.717, 77.3, 55.90591362, 1, 29, 0.21755946),
# 0.8771945, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 3.133, 101.7, 67.76751981, 1, 29, 0.23610446),
# 0.63623574, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 9.05, 38.7, 38.14104832, 1, 29, 2.45000492),
# 0.39942177, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 28.717, 77.3, 55.90591362, 0.1, 29, 0.21755946),
# 0.95194476, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 3.133, 101.7, 67.76751981, 0.1, 29, 0.23610446),
# 0.67829402, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 9.05, 38.7, 38.14104832, 0.1, 29, 2.45000492),
# 0.43646179, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 28.717, 77.3, 55.90591362, 0.01, 29, 0.21755946),
# 0.95194476, places=5)
# self.assertAlmostEqual(
# self.zenit_water_vapour_attenuation(
# 3.133, 101.7, 67.76751981, 0.01, 29, 0.23610446),
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | true |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/docs/build_module_tables.py | docs/build_module_tables.py | # -*- coding: utf-8 -*-
import re
import requests
import importlib
from functools import reduce
def make_rst_table(grid):
cell_width = 2 + max(
reduce(lambda x, y: x + y, [[len(item) for item in row] for row in grid], [])
)
num_cols = len(grid[0])
rst = table_div(num_cols, cell_width, 0)
header_flag = 1
for row in grid:
new_row = "| " + "| ".join([normalize_cell(x, cell_width - 1) for x in row])
rst = rst + new_row
rst = rst + " " * ((cell_width + 1) * num_cols - len(new_row)) + "|\n"
rst = rst + table_div(num_cols, cell_width, header_flag)
header_flag = 0
return rst
def table_div(num_cols, col_width, header_flag):
if header_flag == 1:
return num_cols * ("+" + (col_width) * "=") + "+\n"
else:
return num_cols * ("+" + (col_width) * "-") + "+\n"
def normalize_cell(string, length):
return string + ((length - len(string)) * " ")
URL = "https://www.itu.int/rec/R-REC-P.{0}/en"
URL_PDF = "https://www.itu.int/dms_pubrec/itu-r/rec/p/R-REC-P.{0}!!PDF-E.pdf"
RECs = [618, 676, 453, 530, 835, 836, 837, 838, 839, 840, 1510, 1511]
# RECs = [840]
for rec in RECs:
rec = str(rec)
url = URL.format(rec)
resp = requests.get(url)
txt = resp.text.replace("\xa0", " ")
header = re.findall("<strong>(.+)</strong>", txt)[0]
title = header.split(":")[0].strip().capitalize()
desc = header.split(":")[-1].strip()
approved_in = re.findall("Approved in (.+)</p>", txt)[0]
recs_and_links = re.findall('<a href="(.+)"><strong>(.+)</strong></a>', txt)
# Latest version
link_latest, name_latest = recs_and_links[0]
name_latest, date_latest = name_latest.strip().split(" ")
date_latest = date_latest.replace("(", "").replace(")", "")
code_latest = rec + link_latest.split(rec)[-1]
link_latest = URL.format(code_latest)
pdf_latest = URL_PDF.format(code_latest)
table = [
["Title", "PDF", "Latest approved in"],
[
f"`Recommendation ITU-R {title} <{url}>`_",
f"`[PDF] <{pdf_latest}>`_",
approved_in,
],
[desc],
["**Current recommendation version (In force)**", "", "**Date**"],
[
f"`Recommendation ITU-R {name_latest} <{link_latest}>`_",
f"`[PDF] <{pdf_latest}>`_",
date_latest,
],
]
rec_implemented = [["**Recommendations implemented in ITU-Rpy**", "", "**Date**"]]
rec_not_implemented = [
["**Recommendations not implemented in ITU-Rpy**", "", "**Date**"]
]
module = importlib.import_module(f"itur.models.itu{rec}")
recs_and_links = sorted(recs_and_links, key=lambda x: (len(x[1]), x))[::-1]
for rec_link, rec_name in recs_and_links:
name, date = rec_name.split(" ")
date = date.replace(".", "/").replace("(", "").replace(")", "")
code_rec = rec + rec_link.split(rec)[-1]
link = URL.format(code_rec)
pdf_link = URL_PDF.format(code_rec)
version = int(name.split("-")[-1].strip())
if len(date) == 5 and date[3] == "9":
date = date[:3] + "19" + date[3:]
elif len(date) == 5 and date[3] in ["0", "1", "2"]:
date = date[:3] + "20" + date[3:]
# Check if version is available in ITU-Rpy
try:
module.change_version(version)
rec_implemented.append(
[
f"`Recommendation ITU-R {name} <{link}>`_",
f"`[PDF] <{pdf_link}>`_",
date,
]
)
except Exception:
rec_not_implemented.append(
[
f"`Recommendation ITU-R {name} <{link}>`_",
f"`[PDF] <{pdf_link}>`_",
date,
]
)
# Join tables that have rows
if len(rec_implemented) > 1:
table += rec_implemented
if len(rec_not_implemented) > 1:
table += rec_not_implemented
# Write table to a file
table_str = make_rst_table(table)
with open(f"./apidoc/itu{rec}_table.rst", "w") as fd:
fd.write(table_str)
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/docs/conf.py | docs/conf.py | # -*- coding: utf-8 -*-
#
# ITU-Rpy documentation build configuration file, created by
# sphinx-quickstart on Mon Sep 14 12:36:21 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import datetime
import sys
import os
import shlex
import sphinx_rtd_theme
sys.path.insert(0, '..')
import itur
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.napoleon',
'sphinx.ext.autosectionlabel',
]
# Configure autodoc member order
autodoc_member_order = 'bysource'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ITU-Rpy'
copyright = '2016-%s, %s' % (datetime.datetime.now().year,
'Inigo del Portillo')
author = 'Inigo del Portillo'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = itur.__version__
# The full version, including alpha/beta/rc tags.
release = itur.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_static']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# Insert options
'collapse_navigation': False
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
using_rtd_theme = True
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = ""
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {'**': ['localtoc.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ITURpydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc,
'ITU-Rpy.tex',
u'ITU-Rpy Documentation',
'Inigo del Portillo',
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'itur', u'ITU-Rpy Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ITU-Rpy', u'ITU-Rpy Documentation',
author, 'ITU-Rpy', 'A python implementation of ITU-R P. recommendations for atmospheric attenuation.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Show init as well as moduledoc
autoclass_content = 'both'
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/gaseous_attenuation.py | examples/gaseous_attenuation.py | # -*- coding: utf-8 -*-
"""
This example reproduces the graphs plotted in ITU-R P.676 recommendation.
"""
import itur
import itur.models.itu676 as itu676
import itur.models.itu835 as itu835
import numpy as np
import matplotlib.pyplot as plt
###############################################################################
# Comparison of wet and dry atmospheres #
###############################################################################
# Define atmospheric parameters
rho_wet = 7.5 * itur.u.g / itur.u.m**3
rho_dry = 0 * itur.u.g / itur.u.m**3
P = 1013.25 * itur.u.hPa
T = 15 * itur.u.deg_C
# Define frequency logspace parameters
N_freq = 1000
fs = np.linspace(0, 1000, N_freq)
# Compute the attenuation values
att_wet = itu676.gamma_exact(fs, P, rho_wet, T)
att_dry = itu676.gamma_exact(fs, P, rho_dry, T)
# Plot the results
plt.figure()
plt.plot(fs, att_wet.value, 'b--', label='Wet atmosphere')
plt.plot(fs, att_dry.value, 'r', label='Dry atmosphere')
plt.xlabel('Frequency [GHz]')
plt.ylabel('Specific attenuation [dB/km]')
plt.yscale('log')
plt.xscale('linear')
plt.xlim(0, 1000)
plt.ylim(1e-3, 1e5)
plt.legend()
plt.grid(which='both', linestyle=':', color='gray', linewidth=0.3, alpha=0.5)
plt.grid(which='major', linestyle=':', color='black')
plt.title('FIGURE 1. - Specific attenuation due to atmospheric gases,\n '
'calculated at 1 GHz intervals, including line centres')
plt.tight_layout()
###############################################################################
# Specific attenuation at different altitudes #
###############################################################################
# Define atmospheric parameters
hs = np.array([0, 5, 10, 15, 20]) * itur.u.km
# Define frequency logspace parameters
N_freq = 2001
fs = np.linspace(50, 70, N_freq)
# Plot the results
plt.figure()
# Loop over heights and compute values
for h in hs:
rho = itu835.standard_water_vapour_density(h)
P = itu835.standard_pressure(h)
T = itu835.standard_temperature(h)
atts = itu676.gamma_exact(fs * itur.u.GHz, P, rho, T)
plt.plot(fs, atts.value, label=f"Altitude {h.value} km")
plt.xlabel('Frequency [GHz]')
plt.ylabel('Specific attenuation [dB/km]')
plt.yscale('log')
plt.xscale('linear')
plt.xlim(50, 70)
plt.ylim(1e-3, 1e2)
plt.legend()
plt.grid(which='both', linestyle=':', color='gray', linewidth=0.3, alpha=0.5)
plt.grid(which='major', linestyle=':', color='black')
plt.title('FIGURE 2. - Specific attenuation in the range 50-70 GHz at the\n'
' altitudes indicated, calculated at intervals of 10 MHz\n'
' including line centers (0, 5, 10 15, 20) km')
plt.tight_layout()
###############################################################################
# Comparison of line-by-line and approximate method #
###############################################################################
# Define atmospheric parameters
el = 90
rho = 7.5 * itur.u.g / itur.u.m**3
P = 1013.25 * itur.u.hPa
T = 15 * itur.u.deg_C
# Define frequency logspace parameters
N_freq = 350
fs = np.linspace(0, 350, N_freq)
# Loop over frequencies and compute values
atts_approx = itu676.gaseous_attenuation_slant_path(fs, el, rho, P, T,
mode='approx')
atts_exact = itu676.gaseous_attenuation_slant_path(fs, el, rho, P, T,
mode='exact')
# Plot the results
plt.figure()
plt.plot(fs, atts_approx.value, 'b--', label='Approximate method Annex 2')
plt.plot(fs, atts_exact.value, 'r', label='Exact line-by-line method')
plt.xlabel('Frequency [GHz]')
plt.ylabel('Attenuation [dB]')
plt.yscale('log')
plt.xscale('log')
plt.legend()
plt.grid(which='both', linestyle=':', color='gray', linewidth=0.3, alpha=0.5)
plt.grid(which='major', linestyle=':', color='black')
plt.title('Comparison of line-by-line method to approximate method')
plt.tight_layout()
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/single_location.py | examples/single_location.py | # -*- coding: utf-8 -*-
"""
In this example we compute different atmospheric parameters for a single ground
station.
The ground station is located in Boston (GPS coordinates 41.36, -71.05) and
the link operates at a frequency of 22.5 GHz, elevation angle of 60 degrees,
and with a receiver antenna of 1 m diameter.
Values exceeded during 0.1 % of the average year are reported.
"""
import itur
# Location of the receiver ground stations
lat = 41.39
lon = -71.05
# Link parameters
el = 60 # Elevation angle equal to 60 degrees
f = 22.5 * itur.u.GHz # Frequency equal to 22.5 GHz
D = 1 * itur.u.m # Receiver antenna diameter of 1 m
p = 0.1 # We compute values exceeded during 0.1 % of the average
# year
# Compute atmospheric parameters
hs = itur.topographic_altitude(lat, lon)
T = itur.surface_mean_temperature(lat, lon)
P = itur.models.itu835.pressure(lat, hs)
rho_p = itur.surface_water_vapour_density(lat, lon, p, hs)
rho_sa = itur.models.itu835.water_vapour_density(lat, hs)
T_sa = itur.models.itu835.temperature(lat, hs)
V = itur.models.itu836.total_water_vapour_content(lat, lon, p, hs)
print(
f"The ITU recommendations predict the following values for the point located at coordinates ({lat}, {lon})"
)
print(
f" - Height above the sea level [ITU-R P.1511] {hs.to(itur.u.m):.1f}"
)
T_C = T.to(itur.u.Celsius, equivalencies=itur.u.temperature())
print(f" - Surface mean temperature [ITU-R P.1510] {T_C:.1f}")
print(f" - Surface pressure [ITU-R P.835] {P:.1f}")
T_sa_C = T_sa.to(itur.u.Celsius, equivalencies=itur.u.temperature())
print(f" - Standard surface temperature [ITU-R P.835] {T_sa_C:.1f}")
print(f" - Standard water vapour density [ITU-R P.835] {rho_sa:.1f}")
print(f" - Water vapor density (p={p}%) [ITU-R P.836] {rho_p:.1f}")
print(f" - Total water vapour content (p={p}%) [ITU-R P.836] {V:.1f}")
# Compute rain and cloud-related parameters
R_prob = itur.models.itu618.rain_attenuation_probability(lat, lon, el, hs)
R_pct = itur.models.itu837.rainfall_probability(lat, lon)
R001 = itur.models.itu837.rainfall_rate(lat, lon, p)
h_0 = itur.models.itu839.isoterm_0(lat, lon)
h_rain = itur.models.itu839.rain_height(lat, lon)
L_red = itur.models.itu840.columnar_content_reduced_liquid(lat, lon, p)
A_w = itur.models.itu676.zenit_water_vapour_attenuation(lat, lon, p, f, h=hs)
print(f" - Rain attenuation probability [ITU-R P.618] {R_prob:.1f}")
print(f" - Rain percentage probability [ITU-R P.837] {R_pct:.1f}")
print(f" - Rainfall rate exceeded for p={p}% [ITU-R P.837] {R001:.1f}")
print(f" - 0 degree C isotherm height [ITU-R P.839] {h_0:.1f}")
print(f" - Rain height [ITU-R P.839] {h_rain:.1f}")
print(f" - Columnar content of reduced liquid (p={p}%) [ITU-R P.840] {L_red:.1f}")
print(f" - Zenit water vapour attenuation (p={p}%) [ITU-R P.676] {A_w:.1f}")
# Compute attenuation values
A_g = itur.gaseous_attenuation_slant_path(f, el, rho_p, P, T)
A_r = itur.rain_attenuation(lat, lon, f, el, hs=hs, p=p)
A_c = itur.cloud_attenuation(lat, lon, el, f, p)
A_s = itur.scintillation_attenuation(lat, lon, f, el, p, D)
A_t = itur.atmospheric_attenuation_slant_path(lat, lon, f, el, p, D)
print(
f"\n\nAttenuation values exceeded for p={p}% of the average year "
f"for a link with el={el} deg, f={f}, \nD={D} and "
f"receiver ground station located at coordinates ({lat}, {lon})"
)
print(f" - Rain attenuation [ITU-R P.618] {A_r:.1f}")
print(f" - Gaseous attenuation [ITU-R P.676] {A_g:.1f}")
print(f" - Clouds attenuation [ITU-R P.840] {A_c:.1f}")
print(f" - Scintillation attenuation [ITU-R P.618] {A_s:.1f}")
print(f" - Total atmospheric attenuation [ITU-R P.618] {A_t:.1f}")
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/itu_837_rainfall_map.py | examples/itu_837_rainfall_map.py | # -*- coding: utf-8 -*-
""" This example shows how to compute the rainfall-rate (mm/hr) exceeded
for 0.01 % of the time of the average year over a large region of the Earth.
This image is similar to the one plotted in page 5 of Recommendation
ITU-R P.837-7.
"""
import itur
import matplotlib.pyplot as plt
# Set Recommendation ITU-R P.837 to version 7
itur.models.itu837.change_version(7)
# Generate a regular grid of latitude and longitudes with 0.1 degree resolution
# for the region of interest.
lat, lon = itur.utils.regular_lat_lon_grid(resolution_lat=0.1,
resolution_lon=0.1)
# Compute the rainfall rate exceeded for 0.01 % of the time.
p = 0.01
R001 = itur.models.itu837.rainfall_rate(lat, lon, p)
# Display the results in a map
fig = plt.figure(figsize=(16, 8))
ax = fig.add_subplot(1, 1, 1)
m = itur.plotting.plot_in_map(
R001, lat, lon, cmap='jet', vmin=0, vmax=90, ax=ax,
cbar_text='Rainfall rate exceeded for 0.01% of an average year [mm/hr]')
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/itu_1510_mean_surface_temperature.py | examples/itu_1510_mean_surface_temperature.py | # -*- coding: utf-8 -*-
""" This example shows how to compute the anuaml mean surface temperature
over a large region of the Earth.
This image is similar to the one plotted in page 3 of Recommendation
ITU-R P.1510-1.
"""
import itur
# Set Recommendation ITU-R P.1510 to version 1
itur.models.itu1510.change_version(1)
# Generate a regular grid of latitude and longitudes with 0.1 degree resolution
# for the region of interest.
lat, lon = itur.utils.regular_lat_lon_grid(resolution_lat=0.1,
resolution_lon=0.1)
# Compute the surface mean temperature
T = itur.models.itu1510.surface_mean_temperature(lat, lon)
# Display the results in a map
ax = itur.plotting.plot_in_map(
T, lat, lon, cmap='jet', vmin=230, vmax=310,
cbar_text='Annual mean surface temperature [K]')
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/single_location_vs_unavailability.py | examples/single_location_vs_unavailability.py | # -*- coding: utf-8 -*-
""" This example shows how to compute the attenuation vs. percentage of time
of the average year that values are exceeded for a single location.
The link is a space-to-Earth link between a ground station in Boston and a
satellite in GEO orbit (slot 77W). The link operates at 22.5 GHz and the
receiver antenna has a 1.2 m diameter.
"""
import itur
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import ScalarFormatter
# Ground station coordinates (Boston)
lat_GS = 42.3601
lon_GS = -71.0942
# Satellite coordinates (GEO, 77 W)
lat_sat = 0
lon_sat = -77
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground station
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat, lat_GS, lon_GS)
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
# Define unavailabilities vector in logarithmic scale
p = np.logspace(-1.5, 1.5, 100)
# Compute the attenuation values for different unavailabilities.
# The unavailability is the only parameter that is not vectorized in ITU-Rpy
A_g, A_c, A_r, A_s, A_t = \
itur.atmospheric_attenuation_slant_path(lat_GS, lon_GS, f, el, p, D,
return_contributions=True)
# Plot the results using matplotlib
f, ax = plt.subplots(1, 1)
ax.semilogx(p, A_g.value, label='Gaseous attenuation')
ax.semilogx(p, A_c.value, label='Cloud attenuation')
ax.semilogx(p, A_r.value, label='Rain attenuation')
ax.semilogx(p, A_s.value, label='Scintillation attenuation')
ax.semilogx(p, A_t.value, label='Total atmospheric attenuation')
ax.xaxis.set_major_formatter(ScalarFormatter())
ax.set_xlabel('Percentage of time attenuation value is exceeded [%]')
ax.set_ylabel('Attenuation [dB]')
ax.grid(which='both', linestyle=':')
plt.legend()
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/map_africa.py | examples/map_africa.py | # -*- coding: utf-8 -*-
""" This example shows how to compute the atmospheric attenuation exceeded
for 0.1 % of the time of the average year over a large region of the Earth.
It is assumed that the satellite is located in geostationary orbit, at the
4 E slot, and the link operates at 22.5 GHz with receiver-dishes of 1.2 m
diameter.
The satellite covers Africa, Europe and the Middle East.
Finally, we also plot the surface mean temperature distribution to illustrate
that other variables can also be computed using vectorized operations.
"""
import itur
# Generate a regular grid of latitude and longitudes with 0.1 degree resolution
# for the region of interest.
lat, lon = itur.utils.regular_lat_lon_grid(lat_max=60,
lat_min=-60,
lon_max=65,
lon_min=-35,
resolution_lon=1,
resolution_lat=1)
# Satellite coordinates (GEO, 4 E)
lat_sat = 0
lon_sat = 4
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground stations
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat, lat, lon)
# Set the link parameters
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
p = 0.1 # Unavailability (Values exceeded 0.1% of time)
# Compute the atmospheric attenuation
Att = itur.atmospheric_attenuation_slant_path(lat, lon, f, el, p, D)
# Plot the results
m = itur.plotting.plot_in_map(Att, lat, lon,
cbar_text='Atmospheric attenuation [dB]',
cmap='magma')
# Plot the satellite location
m.scatter(lon_sat, lat_sat, c='white', s=20)
# Now we show the surface mean temperature distribution
T = itur.surface_mean_temperature(lat, lon)\
.to(itur.u.Celsius, equivalencies=itur.u.temperature())
m = itur.plotting.plot_in_map(T, lat, lon,
cbar_text='Surface mean temperature [C]',
cmap='RdBu_r')
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/multiple_location.py | examples/multiple_location.py | # -*- coding: utf-8 -*-
""" This example shows how to compute the atmospheric attenuation exceeded
for 0.1 % of the time for multiple ground stations.
It is assumed that the satellite is located in geostationary orbit, at the
77 W slot, and the link operates at 22.5 GHz with receiver-dishes of 1.2 m
diameter.
Finally, we also plot the surface mean temperature distribution to illustrate
that other variables can also be computed using vectorized operations.
"""
import itur
import numpy as np
import matplotlib.pyplot as plt
# Obtain the coordinates of the different cities
cities = {'Boston': (42.36, -71.06),
'New York': (40.71, -74.01),
'Los Angeles': (34.05, -118.24),
'Denver': (39.74, -104.99),
'Las Vegas': (36.20, -115.14),
'Seattle': (47.61, -122.33),
'Washington DC': (38.91, -77.04)}
lat = [coords[0] for coords in cities.values()]
lon = [coords[1] for coords in cities.values()]
# Satellite coordinates (GEO, 4 E)
lat_sat = 0
lon_sat = -77
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground stations
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat, lat, lon)
# Set the link parameters
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
p = 0.1 # Unavailability (Values exceeded 0.1% of time)
# Compute the atmospheric attenuation
Ag, Ac, Ar, As, Att = itur.atmospheric_attenuation_slant_path(
lat, lon, f, el, p, D, return_contributions=True)
# Plot the results
city_idx = np.arange(len(cities))
width = 0.15
fig, ax = plt.subplots(1, 1)
ax.bar(city_idx, Att.value, 0.6, label='Total atmospheric Attenuation')
ax.bar(city_idx - 1.5 * width, Ar.value, width, label='Rain attenuation')
ax.bar(city_idx - 0.5 * width, Ag.value, width, label='Gaseous attenuation')
ax.bar(city_idx + 0.5 * width, Ac.value, width, label='Clouds attenuation')
ax.bar(city_idx + 1.5 * width, As.value, width,
label='Scintillation attenuation')
# Set the labels
ticks = ax.set_xticklabels([''] + list(cities.keys()))
for t in ticks:
t.set_rotation(45)
ax.set_ylabel('Atmospheric attenuation exceeded for 0.1% [dB]')
# Format image
ax.yaxis.grid(which='both', linestyle=':')
ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.3), ncol=2)
plt.tight_layout(rect=(0, 0, 1, 0.85))
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
inigodelportillo/ITU-Rpy | https://github.com/inigodelportillo/ITU-Rpy/blob/e69587f75bdb7f8b1049259f36eb31a36ca5c570/examples/single_location_vs_freq_el.py | examples/single_location_vs_freq_el.py | # -*- coding: utf-8 -*-
""" This example shows how to compute the 'attenuation exceeded for 0.1 % of
time of the average year' vs. 'frequency' and 'elevation angle'
for a single location.
For the 'attenuation exceeded for 0.1 % of time of the average year' vs.
'frequency' case the link is assume to be a space-to-Earth link between
a ground station in Boston and a satellite in GEO orbit (slot 77W).
For the 'attenuation exceeded for 0.1 % of time of the average year' vs.
'elevation' case, the link operates at 22.5 GHz.
The receiver antenna has a 1.2 m diameter in both cases.
"""
import itur
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import ScalarFormatter
# Ground station coordinates (Boston)
lat_GS = 42.3601
lon_GS = -71.0942
################################################
# First case: Attenuation vs. frequency #
################################################
# Satellite coordinates (GEO, 77 W)
lat_sat = 0
lon_sat = -77
h_sat = 35786 * itur.u.km
# Compute the elevation angle between satellite and ground station
el = itur.utils.elevation_angle(h_sat, lat_sat, lon_sat, lat_GS, lon_GS)
f = 22.5 * itur.u.GHz # Link frequency
D = 1.2 * itur.u.m # Antenna diameters
p = 1
f = np.logspace(-0.2, 2, 100) * itur.u.GHz
Ag, Ac, Ar, As, A =\
itur.atmospheric_attenuation_slant_path(lat_GS, lon_GS, f, el, p, D,
return_contributions=True)
# Plot the results
fig, ax = plt.subplots(1, 1)
ax.loglog(f, Ag, label='Gaseous attenuation')
ax.loglog(f, Ac, label='Cloud attenuation')
ax.loglog(f, Ar, label='Rain attenuation')
ax.loglog(f, As, label='Scintillation attenuation')
ax.loglog(f, A, label='Total atmospheric attenuation')
ax.xaxis.set_major_formatter(ScalarFormatter())
ax.yaxis.set_major_formatter(ScalarFormatter())
ax.set_xlabel('Frequency [GHz]')
ax.set_ylabel('Atmospheric attenuation [dB]')
ax.grid(which='both', linestyle=':')
plt.legend()
################################################
# Second case: Attenuation vs. elevation angle #
################################################
f = 22.5 * itur.u.GHz
el = np.linspace(5, 90, 100)
Ag, Ac, Ar, As, A =\
itur.atmospheric_attenuation_slant_path(lat_GS, lon_GS, f, el, p, D,
return_contributions=True)
# Plot the results
fig, ax = plt.subplots(1, 1)
ax.plot(el, Ag, label='Gaseous attenuation')
ax.plot(el, Ac, label='Cloud attenuation')
ax.plot(el, Ar, label='Rain attenuation')
ax.plot(el, As, label='Scintillation attenuation')
ax.plot(el, A, label='Total atmospheric attenuation')
ax.xaxis.set_major_formatter(ScalarFormatter())
ax.yaxis.set_major_formatter(ScalarFormatter())
ax.set_xlabel('Elevation angle [deg]')
ax.set_ylabel('Atmospheric attenuation [dB]')
ax.grid(which='both', linestyle=':')
plt.legend()
| python | MIT | e69587f75bdb7f8b1049259f36eb31a36ca5c570 | 2026-01-05T07:12:38.084174Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/python/ipv6.py | python/ipv6.py | import base64
import json
import re
import requests
# Configuration URLs
config_urls = [
"https://raw.githubusercontent.com/Surfboardv2ray/Proxy-sorter/refs/heads/main/submerge/converted.txt",
"https://raw.githubusercontent.com/Surfboardv2ray/TGParse/refs/heads/main/configtg.txt",
# "https://raw.githubusercontent.com/ndsphonemy/proxy-sub/refs/heads/main/my.txt",
"https://raw.githubusercontent.com/soroushmirzaei/telegram-configs-collector/main/splitted/mixed",
"https://github.com/Surfboardv2ray/v2ray-worker-sub/raw/refs/heads/master/providers/providers",
]
# Helper functions
def is_base64(s):
try:
return base64.b64encode(base64.b64decode(s)).decode() == s
except Exception:
return False
def is_ipv6(address):
return re.match(r'^[0-9a-fA-F:]+$', address) and ':' in address
def parse_config(line):
line = line.strip()
if line.startswith("vmess://"):
config = line[8:]
if is_base64(config):
try:
config_json = json.loads(base64.b64decode(config))
if is_ipv6(config_json.get("add", "")):
return line
except json.JSONDecodeError:
print(f"Invalid JSON format in vmess config.")
elif any(line.startswith(proto) for proto in ["vless://", "trojan://", "ss://", "hy2://", "hysteria2://", "tuic://"]):
at_index = line.find('@')
if at_index != -1:
ip_port_part = line[at_index + 1:]
if ip_port_part.startswith("["):
end_bracket_index = ip_port_part.find("]")
if end_bracket_index > -1:
ip = ip_port_part[1:end_bracket_index]
if is_ipv6(ip):
return line
else:
colon_index = ip_port_part.find(":")
if colon_index > -1:
ip = ip_port_part[:colon_index]
if is_ipv6(ip):
return line
return None
# Fetch and filter configurations
def fetch_and_filter_configs():
ipv6_configs = []
for url in config_urls:
try:
response = requests.get(url)
response.raise_for_status()
text = response.text
lines = base64.b64decode(text).decode().split('\n') if is_base64(text.strip()) else text.split('\n')
for line in lines:
valid_config = parse_config(line)
if valid_config:
ipv6_configs.append(valid_config)
except Exception as e:
print(f"Error processing URL {url}: {e}")
return ipv6_configs
# Save to files
def save_configs(ipv6_configs):
with open("custom/ipv6.txt", "w") as f_ipv6, open("custom/ipv64.txt", "w") as f_ipv64:
ipv6_data = "\n".join(ipv6_configs)
f_ipv6.write(ipv6_data)
f_ipv64.write(base64.b64encode(ipv6_data.encode()).decode())
# Main execution
if __name__ == "__main__":
ipv6_configs = fetch_and_filter_configs()
save_configs(ipv6_configs)
print("Configs saved to ipv6.txt and ipv64.txt")
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/python/sorter.py | python/sorter.py | import base64
import json
import requests
import re
import socket
import os
def get_country_code(ip_address):
try:
# Try to resolve the hostname to an IP address
ip_address = socket.gethostbyname(ip_address)
except socket.gaierror:
print(f"Unable to resolve hostname: {ip_address}")
return None
except UnicodeError:
print(f"Hostname violates IDNA rules: {ip_address}")
return None
try:
# Retrieve the base URL from the environment variable
base_url = os.getenv('GET_IPGEO')
if not base_url:
raise ValueError("Environment variable GET_IPGEO not set")
response = requests.get(f'{base_url}/{ip_address}')
return response.text
except requests.exceptions.RequestException as e:
print(f"Error sending request: {e}")
return None
def country_code_to_emoji(country_code):
# Convert the country code to corresponding Unicode regional indicator symbols
return ''.join(chr(ord(letter) + 127397) for letter in country_code.upper())
# Counter for all proxies
proxy_counter = 0
def process_vmess(proxy):
global proxy_counter
base64_str = proxy.split('://')[1]
missing_padding = len(base64_str) % 4
if missing_padding:
base64_str += '='* (4 - missing_padding)
try:
decoded_str = base64.b64decode(base64_str).decode('utf-8')
proxy_json = json.loads(decoded_str)
ip_address = proxy_json['add']
country_code = get_country_code(ip_address)
if country_code is None:
return None
flag_emoji = country_code_to_emoji(country_code)
proxy_counter += 1
remarks = flag_emoji + country_code + '_' + str(proxy_counter) + '_' + '@Surfboardv2ray'
proxy_json['ps'] = remarks
encoded_str = base64.b64encode(json.dumps(proxy_json).encode('utf-8')).decode('utf-8')
processed_proxy = 'vmess://' + encoded_str
return processed_proxy
except Exception as e:
print("Error processing vmess proxy: ", e)
return None
def process_vless(proxy):
global proxy_counter
ip_address = proxy.split('@')[1].split(':')[0]
country_code = get_country_code(ip_address)
if country_code is None:
return None
flag_emoji = country_code_to_emoji(country_code)
proxy_counter += 1
remarks = flag_emoji + country_code + '_' + str(proxy_counter) + '_' + '@Surfboardv2ray'
processed_proxy = proxy.split('#')[0] + '#' + remarks
return processed_proxy
# Process the proxies and write them to converted.txt
with open('input/proxies.txt', 'r') as f, open('output/converted.txt', 'w') as out_f:
proxies = f.readlines()
for proxy in proxies:
proxy = proxy.strip()
if proxy.startswith('vmess://'):
processed_proxy = process_vmess(proxy)
elif proxy.startswith('vless://'):
processed_proxy = process_vless(proxy)
if processed_proxy is not None:
out_f.write(processed_proxy + '\n')
# Read from converted.txt and separate the proxies based on the country code
with open('output/converted.txt', 'r') as in_f:
proxies = in_f.readlines()
with open('output/IR.txt', 'w') as ir_f, open('output/US.txt', 'w') as us_f:
for proxy in proxies:
if 'IR_' in proxy:
ir_f.write(proxy)
elif 'US_' in proxy:
us_f.write(proxy)
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/python/ipv6_ddremoval.py | python/ipv6_ddremoval.py | import base64
import json
import re
import requests
# Configuration URLs
config_urls = [
"https://raw.githubusercontent.com/Surfboardv2ray/Proxy-sorter/refs/heads/main/submerge/converted.txt",
"https://raw.githubusercontent.com/Surfboardv2ray/TGParse/refs/heads/main/configtg.txt",
"https://raw.githubusercontent.com/ndsphonemy/proxy-sub/refs/heads/main/my.txt",
"https://raw.githubusercontent.com/soroushmirzaei/telegram-configs-collector/main/splitted/mixed",
"https://github.com/Surfboardv2ray/v2ray-worker-sub/raw/refs/heads/master/providers/providers",
"https://github.com/ndsphonemy/proxy-sub/raw/refs/heads/main/lt-sub-all.txt",
]
# Helper functions
def is_base64(s):
try:
return base64.b64encode(base64.b64decode(s)).decode() == s
except Exception:
return False
def is_ipv6(address):
return re.match(r'^[0-9a-fA-F:]+$', address) and ':' in address
def parse_config(line, seen_ipv6):
line = line.strip()
if line.startswith("vmess://"):
config = line[8:]
if is_base64(config):
try:
config_json = json.loads(base64.b64decode(config))
ipv6_address = config_json.get("add", "")
if is_ipv6(ipv6_address) and ipv6_address not in seen_ipv6:
seen_ipv6.add(ipv6_address)
return line
except json.JSONDecodeError:
print("Invalid JSON format in vmess config.")
elif any(line.startswith(proto) for proto in ["vless://", "trojan://", "ss://", "hy2://", "hysteria2://", "tuic://"]):
at_index = line.find('@')
if at_index != -1:
ip_port_part = line[at_index + 1:]
if ip_port_part.startswith("["):
end_bracket_index = ip_port_part.find("]")
if end_bracket_index > -1:
ipv6_address = ip_port_part[1:end_bracket_index]
if is_ipv6(ipv6_address) and ipv6_address not in seen_ipv6:
seen_ipv6.add(ipv6_address)
return line
else:
colon_index = ip_port_part.find(":")
if colon_index > -1:
ipv6_address = ip_port_part[:colon_index]
if is_ipv6(ipv6_address) and ipv6_address not in seen_ipv6:
seen_ipv6.add(ipv6_address)
return line
return None
# Fetch and filter configurations
def fetch_and_filter_configs():
ipv6_configs = []
seen_ipv6 = set()
for url in config_urls:
try:
response = requests.get(url)
response.raise_for_status()
text = response.text
lines = base64.b64decode(text).decode().split('\n') if is_base64(text.strip()) else text.split('\n')
for line in lines:
valid_config = parse_config(line, seen_ipv6)
if valid_config:
ipv6_configs.append(valid_config)
except Exception as e:
print(f"Error processing URL {url}: {e}")
return ipv6_configs
# Save to files
def save_configs(ipv6_configs):
with open("custom/ipv6.txt", "w") as f_ipv6, open("custom/ipv64.txt", "w") as f_ipv64:
ipv6_data = "\n".join(ipv6_configs)
f_ipv6.write(ipv6_data)
f_ipv64.write(base64.b64encode(ipv6_data.encode()).decode())
# Main execution
if __name__ == "__main__":
ipv6_configs = fetch_and_filter_configs()
save_configs(ipv6_configs)
print("Configs saved to ipv6.txt and ipv64.txt")
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/python/7899.py | python/7899.py | import base64
import json
import urllib.parse
def modify_proxies():
with open('ws_tls/proxies/wstls', 'r') as f:
proxies = f.readlines()
modified_proxies = []
for proxy in proxies:
proxy = proxy.strip()
if proxy.startswith('vmess://'):
base64_str = proxy.split('vmess://')[1]
proxy_info = base64.b64decode(base64_str).decode('utf-8')
proxy_dict = json.loads(proxy_info)
proxy_dict['add'] = '127.0.0.1'
proxy_dict['port'] = '7899'
modified_proxy = 'vmess://' + base64.b64encode(json.dumps(proxy_dict).encode('utf-8')).decode('utf-8')
modified_proxies.append(modified_proxy)
elif proxy.startswith('vless://'):
proxy_info = proxy.split('vless://')[1]
proxy_info = urllib.parse.unquote(proxy_info)
uuid, rest = proxy_info.split('@', 1) # Split at the first occurrence of '@'
ip_port, rest = rest.split('?', 1) # Split at the first occurrence of '?'
modified_proxy = 'vless://' + uuid + '@127.0.0.1:7899?' + rest
modified_proxies.append(modified_proxy)
with open('ws_tls/7899/7899', 'w') as f:
for proxy in modified_proxies:
f.write(proxy + '\n')
if __name__ == '__main__':
modify_proxies()
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/python/wstls.py | python/wstls.py | import base64
import json
import urllib.parse
def filter_proxies():
with open('output/converted.txt', 'r') as f:
proxies = f.readlines()
filtered_proxies = []
for proxy in proxies:
proxy = proxy.strip()
if proxy.startswith('vmess://'):
base64_str = proxy.split('vmess://')[1]
proxy_info = base64.b64decode(base64_str).decode('utf-8')
proxy_dict = json.loads(proxy_info)
if proxy_dict.get('tls') == 'tls' and proxy_dict.get('net') == 'ws' and proxy_dict.get('port') == '443':
filtered_proxies.append(proxy)
elif proxy.startswith('vless://'):
proxy_info = proxy.split('vless://')[1]
url_parts = urllib.parse.urlparse('http://' + proxy_info)
query_params = urllib.parse.parse_qs(url_parts.query)
port = url_parts.port # Extract the port directly from the URL
if str(port) == '443' and query_params.get('security') == ['tls'] and query_params.get('type') == ['ws']:
filtered_proxies.append(proxy)
with open('ws_tls/proxies/wstls', 'w') as f:
for proxy in filtered_proxies:
f.write(proxy + '\n')
if __name__ == '__main__':
filter_proxies()
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/xrayping.py | xray/xrayping.py | import sys
sys.path.append('./modules/xray_url_decoder/')
sys.path.append('./modules/clash_meta_url_decoder/')
sys.path.append('./')
sys.path.append('./modules')
sys.path.append('xray/modules/gitRepo.py')
import json
import uuid
from ruamel.yaml import YAML
from modules.gitRepo import commitPushRActiveProxiesFile, getLatestActiveConfigs
from modules.xray_url_decoder.XrayUrlDecoder import XrayUrlDecoder
from modules.XrayPing import XrayPing
from modules.clash_meta_url_decoder.ClashMetaUrlDecoder import ClashMetaDecoder
def is_good_for_game(config: XrayUrlDecoder):
return (config.type in ['tcp', 'grpc']) and (config.security in [None, "tls"])
# for more info, track this issue https://github.com/MetaCubeX/Clash.Meta/issues/801
def is_buggy_in_clash_meta(config: ClashMetaDecoder):
return config.security == "reality" and config.type == "grpc"
with open("xray/configs/raw-url/all.txt", 'r') as rowProxiesFile:
configs = []
clash_meta_configs = []
for_game_proxies = []
for url in rowProxiesFile:
if len(url) > 10:
try:
cusTag = uuid.uuid4().hex
# ############# xray ############
c = XrayUrlDecoder(url, cusTag)
c_json = c.generate_json_str()
if c.isSupported and c.isValid:
configs.append(c_json)
# ############# clash Meta ##########
ccm = ClashMetaDecoder(url, cusTag)
ccm_json = ccm.generate_obj_str()
if c.isSupported and c.isValid and (not is_buggy_in_clash_meta(ccm)):
clash_meta_configs.append(json.loads(ccm_json))
if is_good_for_game(c):
for_game_proxies.append(url)
except:
print("There is error with this proxy => " + url)
# getLatestGoodForGame()
# with open("xray/configs/row-url/for_game.txt", 'w') as forGameProxiesFile:
# for forGame in for_game_proxies:
# forGameProxiesFile.write(forGame)
# commitPushForGameProxiesFile()
delays = XrayPing(configs, 200)
getLatestActiveConfigs()
yaml = YAML()
with open("xray/configs/clash-meta/all.yaml", 'w') as allClashProxiesFile:
yaml.dump({"proxies": clash_meta_configs}, allClashProxiesFile)
with open("xray/configs/clash-meta/actives_under_1000ms.yaml", 'w') as active1000ClashProxiesFile:
values_to_filter = {d['proxy']['tag'].split("_@_")[0] for d in delays.realDelay_under_1000}
filtered_array = [item for item in clash_meta_configs if item['name'].split("_@_")[0] in values_to_filter]
yaml.dump({"proxies": filtered_array}, active1000ClashProxiesFile)
with open("xray/configs/clash-meta/actives_under_1500ms.yaml", 'w') as active1500ClashProxiesFile:
values_to_filter = {d['proxy']['tag'].split("_@_")[0] for d in delays.realDelay_under_1500}
filtered_array = [item for item in clash_meta_configs if item['name'].split("_@_")[0] in values_to_filter]
yaml.dump({"proxies": filtered_array}, active1500ClashProxiesFile)
with open("xray/configs/xray-json/actives_all.txt", 'w') as activeProxiesFile:
for active in delays.actives:
activeProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_under_1000ms.txt", 'w') as active1000ProxiesFile:
for active in delays.realDelay_under_1000:
active1000ProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_under_1500ms.txt", 'w') as active1500ProxiesFile:
for active in delays.realDelay_under_1500:
active1500ProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_no_403_under_1000ms.txt", 'w') as active1000no403ProxiesFile:
for active in delays.no403_realDelay_under_1000:
active1000no403ProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_for_ir_server_no403_u1s.txt",
'w') as active1000no403ForServerProxiesFile:
for active in delays.no403_realDelay_under_1000:
if active['proxy']["streamSettings"]["network"] not in ["ws", "grpc"]:
active1000no403ForServerProxiesFile.write(json.dumps(active['proxy']) + "\n")
commitPushRActiveProxiesFile()
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/client.py | xray/modules/client.py | from telethon import TelegramClient
from dotenv import load_dotenv
import os
load_dotenv()
api_id = int(os.getenv('API_ID'))
api_hash = os.getenv('API_HASH')
if not (api_id or api_hash):
raise Exception('You have to pass both API_ID and API_HASH env variables')
proxy = {}
if len(os.getenv('PROXY_URL')) > 2:
p = os.getenv('PROXY_URL')
[schema, urlPort] = p.split("://", 1)
[url, port] = urlPort.rsplit(":", 1)
proxy = {
"proxy_type": schema,
"addr": url,
"port": int(port),
}
print("using proxy for telegram connection: ")
print(proxy)
app = TelegramClient("v2ray-proxy-grabber-telegram", api_id, api_hash, proxy=proxy, auto_reconnect=True, connection_retries=20, retry_delay=5)
PROXY_CHANNELS = os.getenv('PROXY_CHANNELS').lower().split("@")
IS_DEBUG = bool(int(os.getenv('DEBUG_MODE')))
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/XrayConfig.py | xray/modules/XrayConfig.py | from XrayRouting import *
from XrayInbound import *
class XrayConfigSimple:
policy: dict
log: dict
inbounds: List[Inbound]
outbounds: List[dict]
stats: dict
routing: XrayRouting
def __init__(self, inbounds: List[Any], outbounds: List[dict], routing: XrayRouting, stats: dict = None,
policy: dict = None, log: dict = None) -> None:
self.policy = policy if policy is not None else {
"system": {
"statsOutboundUplink": True,
"statsOutboundDownlink": True
}
}
self.log = log if log is not None else {
"access": "",
"error": "",
"loglevel": "warning"
}
self.inbounds = inbounds
self.outbounds = outbounds
self.stats = stats
self.routing = routing
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/cleanProxiesRowUrl.py | xray/modules/cleanProxiesRowUrl.py | import json
import shutil
import sys
from urllib.parse import urlparse
sys.path.append('./xray_url_decoder/')
from gitRepo import getLatestActiveConfigs, getLatestRowProxies, commitPushRowProxiesFile
from xray_url_decoder.XrayUrlDecoder import XrayUrlDecoder
def is_duplicated_config(proxy: str, seen_lines: set[str]):
isDuplicated = False
configs: list[XrayUrlDecoder] = []
for url in seen_lines:
if len(url) > 10:
try:
configs.append(XrayUrlDecoder(url))
except:
pass
try:
c_str = XrayUrlDecoder(proxy).generate_json_str()
for conf in configs:
if conf.is_equal_to_config(c_str):
isDuplicated = True
except:
pass
return isDuplicated
def keep_only_lines_and_remove_duplicates(file_path, lines_to_keep):
with open(file_path, 'r') as file:
lines = file.readlines()
if lines_to_keep is None:
new_lines = lines
else:
lines_to_keep = set(lines_to_keep) # Convert to a set for faster lookup
new_lines = [line for i, line in enumerate(lines, start=1) if i in lines_to_keep]
unique_lines = []
seen_lines = set()
for line in new_lines:
if line not in seen_lines:
if not is_duplicated_config(line, seen_lines):
unique_lines.append(line)
seen_lines.add(line)
new_content = '\n'.join(line.rstrip() for line in unique_lines if line.strip())
with open(file_path, 'w') as file:
file.write(new_content)
getLatestActiveConfigs()
getLatestRowProxies()
lineNumberOfFounds = []
with open("collected-proxies/xray-json/actives_all.txt", 'r') as activeProxiesFile:
for activeConfig in activeProxiesFile:
if len(activeConfig) < 10: continue
with open("collected-proxies/row-url/all.txt", 'r') as rowProxiesFile:
# remove if it's not in active proxies
for (index, rowProxyUrl) in enumerate(rowProxiesFile):
if len(rowProxyUrl) < 10: continue
try:
config = XrayUrlDecoder(rowProxyUrl)
if config.isSupported and config.isValid and config.is_equal_to_config(activeConfig):
lineNumberOfFounds.append(index + 1)
except:
pass
shutil.copyfile("collected-proxies/row-url/all.txt", "collected-proxies/row-url/actives.txt")
keep_only_lines_and_remove_duplicates("collected-proxies/row-url/actives.txt", lineNumberOfFounds)
keep_only_lines_and_remove_duplicates("collected-proxies/row-url/all.txt", None)
commitPushRowProxiesFile("------cleaning url list-------")
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/gitRepo.py | xray/modules/gitRepo.py | import shutil
from git import Repo
from dotenv import load_dotenv
import os
load_dotenv()
GITHUB_TOKEN = os.getenv('TOKEN_GITHUB')
REPO = os.getenv('REPO', 'Surfboardv2ray/Proxy-sorter')
IS_DEBUG = '0'
if os.path.exists("./repo/.git"):
repo = Repo("./repo/")
else:
repo = Repo.clone_from(
"https://mrm:{TOKEN_GITHUB}@github.com/{REPO}".format(TOKEN_GITHUB=GITHUB_TOKEN, REPO=REPO), "./repo")
with repo.config_reader() as git_config:
try:
mainGitEmail = git_config.get_value('user', 'email')
mainGitUser = git_config.get_value('user', 'name')
except:
mainGitEmail = "None"
mainGitUser = "None"
def changeGitUserToBot():
with repo.config_writer() as gitConfig:
gitConfig.set_value('user', 'email', 'bot@auto.com')
gitConfig.set_value('user', 'name', 'Bot-auto')
def resetGitUser():
global mainGitUser, mainGitEmail
with repo.config_writer() as gitCnf:
gitCnf.set_value('user', 'email', mainGitEmail)
gitCnf.set_value('user', 'name', mainGitUser)
def getLatestRowProxies():
if not IS_DEBUG:
repo.git.execute(["git", "fetch", "--all"])
repo.git.execute(["git", "checkout", "remotes/origin/master", "xray/configs"])
shutil.copytree("./repo/xray/configs/raw-url", "xray/configs/raw-url", dirs_exist_ok=True)
def getLatestActiveConfigs():
if not IS_DEBUG:
repo.git.execute(["git", "fetch", "--all"])
repo.git.execute(["git", "checkout", "remotes/origin/master", "xray/configs"])
shutil.copytree("./repo/xray/configs/xray-json", "xray/configs/xray-json", dirs_exist_ok=True)
shutil.copytree("./repo/xray/configs/clash-meta", "xray/configs/clash-meta", dirs_exist_ok=True)
def commitPushRowProxiesFile(chanelUsername):
if not IS_DEBUG:
repo.git.execute(["git", "fetch", "--all"])
repo.git.execute(["git", "reset", "--hard", "origin/master"])
repo.git.execute(["git", "pull"])
shutil.copytree("xray/configs/raw-url", "./repo/xray/configs/raw-url", dirs_exist_ok=True)
repo.index.add([r'xray/configs/raw-url/*'])
changeGitUserToBot()
repo.index.commit('update proxies from {}'.format(chanelUsername))
repo.remotes.origin.push()
resetGitUser()
print('pushed => update proxies from {}'.format(chanelUsername))
def commitPushRActiveProxiesFile():
if not IS_DEBUG:
repo.git.execute(["git", "fetch", "--all"])
repo.git.execute(["git", "reset", "--hard", "origin/master"])
repo.git.execute(["git", "pull"])
shutil.copytree("xray/configs/xray-json", "./repo/xray/configs/xray-json", dirs_exist_ok=True)
shutil.copytree("xray/configs/clash-meta", "./repo/xray/configs/clash-meta", dirs_exist_ok=True)
repo.index.add([r'xray/configs/clash-meta/*'])
repo.index.add([r'xray/configs/xray-json/*'])
changeGitUserToBot()
repo.index.commit('update active proxies')
repo.remotes.origin.push()
resetGitUser()
print('pushed => update active proxies')
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/checkProxies.py | xray/modules/checkProxies.py | import json
import sys
import uuid
from ruamel.yaml import YAML
from gitRepo import commitPushRActiveProxiesFile, getLatestActiveConfigs
sys.path.append('./xray_url_decoder/')
sys.path.append('./clash_meta_url_decoder/')
sys.path.append('./')
from xray_url_decoder.XrayUrlDecoder import XrayUrlDecoder
from xray_ping.XrayPing import XrayPing
from clash_meta_url_decoder.ClashMetaUrlDecoder import ClashMetaDecoder
def is_good_for_game(config: XrayUrlDecoder):
return (config.type in ['tcp', 'grpc']) and (config.security in [None, "tls"])
# for more info, track this issue https://github.com/MetaCubeX/Clash.Meta/issues/801
def is_buggy_in_clash_meta(config: ClashMetaDecoder):
return config.security == "reality" and config.type == "grpc"
with open("xray/configs/raw-url/all.txt", 'r') as rowProxiesFile:
configs = []
clash_meta_configs = []
for_game_proxies = []
for url in rowProxiesFile:
if len(url) > 10:
try:
cusTag = uuid.uuid4().hex
# ############# xray ############
c = XrayUrlDecoder(url, cusTag)
c_json = c.generate_json_str()
if c.isSupported and c.isValid:
configs.append(c_json)
# ############# clash Meta ##########
ccm = ClashMetaDecoder(url, cusTag)
ccm_json = ccm.generate_obj_str()
if c.isSupported and c.isValid and (not is_buggy_in_clash_meta(ccm)):
clash_meta_configs.append(json.loads(ccm_json))
if is_good_for_game(c):
for_game_proxies.append(url)
except:
print("There is error with this proxy => " + url)
# getLatestGoodForGame()
# with open("xray/configs/raw-url/for_game.txt", 'w') as forGameProxiesFile:
# for forGame in for_game_proxies:
# forGameProxiesFile.write(forGame)
# commitPushForGameProxiesFile()
delays = XrayPing(configs, 200)
getLatestActiveConfigs()
yaml = YAML()
with open("xray/configs/clash-meta/all.yaml", 'w') as allClashProxiesFile:
yaml.dump({"proxies": clash_meta_configs}, allClashProxiesFile)
with open("xray/configs/clash-meta/actives_under_1000ms.yaml", 'w') as active1000ClashProxiesFile:
values_to_filter = {d['proxy']['tag'].split("_@_")[0] for d in delays.realDelay_under_1000}
filtered_array = [item for item in clash_meta_configs if item['name'].split("_@_")[0] in values_to_filter]
yaml.dump({"proxies": filtered_array}, active1000ClashProxiesFile)
with open("xray/configs/clash-meta/actives_under_1500ms.yaml", 'w') as active1500ClashProxiesFile:
values_to_filter = {d['proxy']['tag'].split("_@_")[0] for d in delays.realDelay_under_1500}
filtered_array = [item for item in clash_meta_configs if item['name'].split("_@_")[0] in values_to_filter]
yaml.dump({"proxies": filtered_array}, active1500ClashProxiesFile)
with open("xray/configs/xray-json/actives_all.txt", 'w') as activeProxiesFile:
for active in delays.actives:
activeProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_under_1000ms.txt", 'w') as active1000ProxiesFile:
for active in delays.realDelay_under_1000:
active1000ProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_under_1500ms.txt", 'w') as active1500ProxiesFile:
for active in delays.realDelay_under_1500:
active1500ProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_no_403_under_1000ms.txt", 'w') as active1000no403ProxiesFile:
for active in delays.no403_realDelay_under_1000:
active1000no403ProxiesFile.write(json.dumps(active['proxy']) + "\n")
with open("xray/configs/xray-json/actives_for_ir_server_no403_u1s.txt",
'w') as active1000no403ForServerProxiesFile:
for active in delays.no403_realDelay_under_1000:
if active['proxy']["streamSettings"]["network"] not in ["ws", "grpc"]:
active1000no403ForServerProxiesFile.write(json.dumps(active['proxy']) + "\n")
commitPushRActiveProxiesFile()
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/XrayPing.py | xray/modules/XrayPing.py | import json
import time
from random import randint
import subprocess
from threading import Thread
from pathlib import Path
import requests
from XrayInbound import *
from XrayRouting import *
from XrayConfig import XrayConfigSimple
def real_delay(port: int, proxy_name: str):
test_url = 'http://detectportal.firefox.com/success.txt'
err_403_url = 'https://open.spotify.com/'
proxy = "socks5://127.0.0.1:{}".format(port)
delay = -1
statusCode = -1
try:
start_time = time.time()
requests.get(test_url, timeout=10, proxies=dict(http=proxy, https=proxy))
end_time = time.time()
delay = end_time - start_time
err_403_res = requests.get(err_403_url, timeout=10, proxies=dict(http=proxy, https=proxy))
statusCode = err_403_res.status_code
except:
pass
print(f"Delay of {proxy_name}: {delay} seconds ")
return dict(proxy=proxy_name, realDelay_ms=round(delay if delay <= 0 else delay * 1000), is403=(statusCode == 403))
def appendBypassMode(config: XrayConfigSimple) -> XrayConfigSimple:
inbounds = [Inbound(
"bypass_mode_43583495349",
3080,
"0.0.0.0",
"socks",
Sniffing(),
SocksSettings()
)] + config.inbounds
outbounds = [{'tag': 'direct-out_095667567568', 'protocol': 'freedom'}] + config.outbounds
rules = [Rule(
"bypass_mode_43583495349",
"direct-out_095667567568",
[]
)] + config.routing.rules
print(config.outbounds)
route = XrayRouting(
"IPIfNonMatch",
"hybrid",
rules
)
return XrayConfigSimple(inbounds, outbounds, route)
class XrayPing:
result: list[dict] = []
actives: list[dict] = []
realDelay_under_1000: list[dict] = []
realDelay_under_1500: list[dict] = []
no403_realDelay_under_1000: list[dict] = []
def __init__(self, configs: list[str], limit: int = 200) -> None:
confs: list[dict] = [json.loads(c) for c in configs]
socks = []
rules = []
# just to make sure there is no duplicated port :|
socksPorts = list(set([randint(2000, 49999) for _ in range(len(confs) * 2)]))
for index, outbound in enumerate(confs):
socksInbound = Inbound(
"socks__" + outbound["tag"],
socksPorts[index],
"0.0.0.0",
"socks",
Sniffing(),
SocksSettings()
)
rule = Rule(
socksInbound.tag,
outbound["tag"],
[]
)
socks.append(socksInbound)
rules.append(rule)
route = XrayRouting(
"IPIfNonMatch",
"hybrid",
rules
)
xrayConfig = appendBypassMode(XrayConfigSimple(socks, confs, route))
confFinalStr = json.dumps(xrayConfig, default=lambda x: x.__dict__)
configFilePath = "./xray_config_ping.json"
with open(configFilePath, 'w') as f:
f.write(confFinalStr)
runXrayThread = Thread(target=subprocess.run,
args=([Path("xray_ping/xray").resolve(), "run", "-c", configFilePath],))
runXrayThread.daemon = True
runXrayThread.start()
# runXrayThread.join()
time.sleep(5)
if real_delay(3080, "bypass_mode")["realDelay_ms"] < 0:
print("***************************************************************************")
print(confFinalStr)
raise Exception("Created config is incorrect! it's printed above")
proxiesSorted = []
for index, s in enumerate(socks):
proxiesSorted.append(real_delay(s.port, s.tag.split("__")[1]))
proxiesSorted = sorted(proxiesSorted, key=lambda d: d['realDelay_ms'])
for index, r in enumerate(proxiesSorted):
r["proxy"] = confs[index]
self.result.append(r)
if r["realDelay_ms"] > 0 and len(self.actives) < limit:
self.actives.append(r)
if 1000 >= r['realDelay_ms'] > 0 and len(self.realDelay_under_1000) < limit:
self.realDelay_under_1000.append(r)
if not r["is403"]:
self.no403_realDelay_under_1000.append(r)
if 1500 >= r['realDelay_ms'] > 0 and len(self.realDelay_under_1500) < limit:
self.realDelay_under_1500.append(r)
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/dontLetGrowAllRowProxies.py | xray/modules/dontLetGrowAllRowProxies.py | import shutil
from gitRepo import getLatestRowProxies, commitPushRowProxiesFile
with open("collected-proxies/row-url/all.txt", 'r') as rowProxiesFile:
if len(rowProxiesFile.readlines()) < 1000:
print("row proxies count(under 1000) => ", rowProxiesFile.readlines())
exit(0)
getLatestRowProxies()
shutil.copyfile("collected-proxies/row-url/actives.txt", "collected-proxies/row-url/all.txt")
commitPushRowProxiesFile("------cleaning all row url list base on actives-------")
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/main.py | xray/modules/main.py | from client import app, PROXY_CHANNELS
from telethon.sync import events
import re
import datetime
from gitRepo import commitPushRowProxiesFile, getLatestRowProxies
def extract_v2ray_links(text) -> list[str]:
regex = r"(vless|vmess|trojan):\/\/[^\\\n]*"
matches = re.finditer(regex, text, re.MULTILINE)
return [i.group() for i in matches]
# commit after every 50 proxy founded
PROXY_COUNTER_DEFAULT = 100
temp_proxy_holder = []
@app.on(events.NewMessage())
async def handler(event):
usernameMatched = (event.sender.username is not None) and (event.sender.username != "") and event.sender.username.lower() in PROXY_CHANNELS
idMatched = event.sender.id in PROXY_CHANNELS
if usernameMatched or idMatched:
global PROXY_COUNTER_DEFAULT, temp_proxy_holder
messageText = event.text
has_v2ray_proxy = "vless://" in messageText or "vmess://" in messageText or "trojan://" in messageText
if has_v2ray_proxy:
v2rayProxies = extract_v2ray_links(messageText)
print(str(datetime.datetime.now()) + " ===> ", end=None)
print(v2rayProxies)
temp_proxy_holder = temp_proxy_holder + v2rayProxies
if len(temp_proxy_holder) > PROXY_COUNTER_DEFAULT:
getLatestRowProxies()
with open("collected-proxies/row-url/all.txt", 'a') as f:
f.write("\n".join(temp_proxy_holder))
f.write("\n")
commitPushRowProxiesFile(event.sender.username)
temp_proxy_holder = []
app.start()
app.run_until_disconnected() | python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/XrayRouting.py | xray/modules/XrayRouting.py | from typing import List, Any
class Rule:
inboundTag: str
domain: List[Any]
outboundTag: str
type: str
def __init__(self, inbound_tag: str, outbound_tag: str, domain: List[Any], type: str = None) -> None:
self.inboundTag = inbound_tag
self.domain = domain
self.outboundTag = outbound_tag
self.type = type if type is not None else "field"
class XrayRouting:
domainStrategy: str
domainMatcher: str
rules: List[Rule]
def __init__(self, domain_strategy: str, domain_matcher: str, rules: List[Rule]) -> None:
self.domainStrategy = domain_strategy
self.domainMatcher = domain_matcher
self.rules = rules
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/__init__.py | xray/modules/__init__.py | python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false | |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/XrayInbound.py | xray/modules/XrayInbound.py | from typing import List
class SocksSettings:
auth: str
udp: bool
allow_transparent: bool
def __init__(self, auth: str = None, udp: bool = None, allow_transparent: bool = None) -> None:
self.auth = auth if auth is not None else "noauth"
self.udp = udp if udp is not None else True
self.allow_transparent = allow_transparent if allow_transparent is not None else False
class Sniffing:
enabled: bool
destOverride: List[str]
routeOnly: bool
def __init__(self, enabled: bool = None, dest_override: List[str] = None, route_only: bool = None) -> None:
self.enabled = enabled if enabled is not None else True
self.destOverride = dest_override if dest_override is not None else ["http", "tls"]
self.routeOnly = route_only if route_only is not None else False
class Inbound:
tag: str
port: int
listen: str
protocol: str
sniffing: Sniffing
settings: SocksSettings
def __init__(self, tag: str, port: int, listen: str, protocol: str, sniffing: Sniffing, settings: SocksSettings) -> None:
self.tag = tag
self.port = port
self.listen = listen
self.protocol = protocol
self.sniffing = sniffing
self.settings = settings
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/xray_url_decoder/IsValid.py | xray/modules/xray_url_decoder/IsValid.py | import uuid
from curses.ascii import isalnum
from .XraySetting import TLSSettings, RealitySettings
from .vless import UserVless, VnextVless
def is_valid_uuid(value):
try:
uuid.UUID(str(value))
return True
except ValueError:
return False
def isValid_link(username: str, address: str, port: int) -> bool:
if is_valid_uuid(username):
return True
return False
def isValid_tls(config: TLSSettings) -> bool:
return True
# if config.serverName is not None and len(config.serverName):
# pass
#
# return False
def isValid_reality(config: RealitySettings) -> bool:
if config.serverName is not None and config.publicKey is not None and len(config.serverName) > 2 and len(
config.publicKey) > 2 and (config.shortId == "" or isalnum(config.shortId)):
return True
return False
def isValid_userVless(config: UserVless) -> bool:
if config.id is not None and len(config.id) > 2:
return True
return False
def isValid_vnextVless(config: VnextVless) -> bool:
if config.address is not None and config.port is not None and len(config.address) > 2 and config.port > 0 and len(
config.users) > 0:
return True
return False
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/xray_url_decoder/trojan.py | xray/modules/xray_url_decoder/trojan.py | import re
from random import randint
from typing import List
from xray_url_decoder.XraySetting import StreamSettings, Mux
class ServerTrojan:
address: str
port: int
password: str
email: str
level: int
def __init__(self, address: str, port: int, password: str, email: str = "t@t.tt", level: int = 1) -> None:
self.address = address
self.port = port
self.password = password
self.email = email
self.level = level
class SettingsTrojan:
servers: List[ServerTrojan]
def __init__(self, servers: List[ServerTrojan]) -> None:
self.servers = servers
class Trojan:
tag: str
protocol: str
settings: SettingsTrojan
streamSettings: StreamSettings
mux: Mux
def __init__(self, name: str, settings: SettingsTrojan, stream_settings: StreamSettings, mux: Mux) -> None:
self.tag = name # "proxy_" + str(randint(1111, 9999999)) + "_" + re.sub(r'([/:+])+', '', name[:120])
self.protocol = "trojan"
self.settings = settings
self.streamSettings = stream_settings
self.mux = mux
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/xray_url_decoder/vless.py | xray/modules/xray_url_decoder/vless.py | import re
from random import randint
from typing import List
from xray_url_decoder.XraySetting import StreamSettings, Mux
class UserVless:
id: str
alterId: int
email: str
security: str
encryption: str
flow: str
def __init__(self, id: str, alter_id: int = 0, email: str = "t@t.tt", security: str = "auto",
encryption: str = "none", flow: str = "") -> None:
self.id = id
self.alterId = alter_id
self.email = email
self.security = security
self.encryption = encryption
self.flow = flow
class VnextVless:
address: str
port: int
users: List[UserVless]
def __init__(self, address: str, port: int, users: List[UserVless]) -> None:
self.address = address
self.port = port
self.users = users
class SettingsVless:
vnext: List[VnextVless]
def __init__(self, vnext: List[VnextVless]) -> None:
self.vnext = vnext
class Vless:
tag: str
protocol: str
settings: SettingsVless
streamSettings: StreamSettings
mux: Mux
def __init__(self, name: str, settings: SettingsVless, stream_settings: StreamSettings, mux: Mux) -> None:
self.tag = name # "proxy_" + str(randint(1111, 9999999)) + "_" + re.sub(r'([/:+])+', '', name[:120])
self.protocol = "vless"
self.settings = settings
self.streamSettings = stream_settings
self.mux = mux
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/xray_url_decoder/XrayUrlDecoder.py | xray/modules/xray_url_decoder/XrayUrlDecoder.py | import ipaddress
import json
import base64
import uuid
from urllib.parse import parse_qs, ParseResult, urlencode, urlparse, urlunparse
from .IsValid import isValid_tls, isValid_reality, isValid_userVless, isValid_vnextVless, isValid_link
from .XraySetting import GrpcSettings, TCPSettings, WsSettingsVless, RealitySettings, TLSSettings, Mux
from .trojan import Trojan, ServerTrojan, SettingsTrojan
from .vless import Vless, UserVless, SettingsVless, VnextVless
from .vmess import Vmess, UserVmess, VnextVmess, SettingsVmess
from .XraySetting import StreamSettings
from collections import namedtuple
def is_ipv6_address(hostname):
try:
ipaddress.IPv6Address(hostname)
return True
except ipaddress.AddressValueError:
return False
def convertVmessLinkToStandardLink(link):
data: dict = json.loads(base64.b64decode(link[8:]).decode('utf-8'))
data['type'] = data['net']
data['path'] = data.get('path', None)
data['aid'] = data.get('aid', None)
data['security'] = data.get('tls', None)
if is_ipv6_address(data["add"]):
data["add"] = "[{}]".format(data["add"])
Components = namedtuple(
typename='Components',
field_names=['scheme', 'netloc', 'url', 'path', 'query', 'fragment']
)
url = urlunparse(
Components(
scheme='vmess',
netloc='{username}@{hostname}:{port}'.format(username=data['id'], hostname=data["add"], port=data["port"]),
query=urlencode(data),
path='',
url='',
fragment=data['ps']
)
)
return url
class XrayUrlDecoder:
url: ParseResult
queries: dict
link: str
name: str
isSupported: bool
isValid: bool
type: str
security: str
def __init__(self, link, tagUUID=None):
match link[:5]:
case "vmess":
link = convertVmessLinkToStandardLink(link)
if tagUUID is None:
tagUUID = uuid.uuid4().hex
self.link = link
self.url = urlparse(self.link)
self.name = tagUUID + "_@_" + (self.url.fragment if len(self.url.fragment) > 0 else "")
q = parse_qs(self.url.query)
self.queries = {key: value[0] for key, value in q.items()}
self.isSupported = True
self.isValid = True
self.type = self.getQuery("type")
self.security = self.getQuery("security")
if not isValid_link(self.url.username, self.url.hostname, self.url.port):
self.isValid = False
def setIsValid(self, status: bool):
if not status:
self.isValid = status
def getQuery(self, key) -> str | None:
try:
return self.queries[key]
except KeyError:
return None
def generate_json(self) -> Vless | Vmess | Trojan | None:
match self.url.scheme:
case "vless":
return self.vless_json()
case "vmess":
return self.vmess_json()
case "trojan":
return self.trojan_json()
case _:
self.isSupported = False
print("schema {} is not supported yet".format(self.url.scheme))
def generate_json_str(self) -> str:
json_obj = self.generate_json()
if json_obj is None:
return ""
return json.dumps(json_obj, default=lambda x: x.__dict__, ensure_ascii=False)
def stream_setting_obj(self) -> StreamSettings | None:
wsSetting = None
grpcSettings = None
tcpSettings = None
tlsSettings = None
realitySettings = None
match self.type:
case "grpc":
grpcSettings = GrpcSettings(self.getQuery("serviceName"))
case "ws":
headers = {}
if self.getQuery("sni") is not None:
headers["Host"] = self.getQuery("sni")
if self.getQuery("host"):
headers["Host"] = self.getQuery("host")
wsSetting = WsSettingsVless(self.getQuery("path"), headers)
case "tcp":
if self.getQuery("headerType") == "http":
header = {
"type": "http",
"request": {
"version": "1.1",
"method": "GET",
"path": [
(self.getQuery("path") if self.getQuery("path") is not None else "/")
],
"headers": {
"Host": [
self.getQuery("host")
],
"User-Agent": [
""
],
"Accept-Encoding": [
"gzip, deflate"
],
"Connection": [
"keep-alive"
],
"Pragma": "no-cache"
}
}
}
tcpSettings = TCPSettings(None, header)
case _:
self.isSupported = False
print("type '{}' is not supported yet".format(self.type))
return
match self.security:
case "tls":
alpn = None
if self.getQuery("alpn") is not None:
alpn = self.getQuery("alpn").split(",")
tlsSettings = TLSSettings(self.getQuery("sni"), fingerprint=self.getQuery("fp"), alpn=alpn)
self.setIsValid(isValid_tls(tlsSettings))
case "reality":
realitySettings = RealitySettings(self.getQuery("sni"), self.getQuery("pbk"),
fingerprint=self.getQuery("fp"), spider_x=self.getQuery("spx"),
short_id=self.getQuery("sid"))
self.setIsValid(isValid_reality(realitySettings))
# case _:
# self.isSupported = False
# print("security '{}' is not supported yet".format(self.security))
# return
streamSetting = StreamSettings(self.type, self.security, wsSetting, grpcSettings,
tcpSettings, tlsSettings, realitySettings)
return streamSetting
def vless_json(self) -> Vless:
user = UserVless(self.url.username, flow=self.getQuery("flow"))
vnext = VnextVless(self.url.hostname, self.url.port, [user])
setting = SettingsVless([vnext])
streamSetting = self.stream_setting_obj()
mux = Mux()
vless = Vless(self.name, setting, streamSetting, mux)
self.setIsValid(isValid_userVless(user) and isValid_vnextVless(vnext))
return vless
def vmess_json(self) -> Vmess:
user = UserVmess(self.url.username, alterId=self.getQuery("aid"), security=self.getQuery("scy"))
vnext = VnextVmess(self.url.hostname, self.url.port, [user])
setting = SettingsVmess([vnext])
streamSetting = self.stream_setting_obj()
mux = Mux()
vmess = Vmess(self.name, setting, streamSetting, mux)
return vmess
def trojan_json(self) -> Trojan:
server = ServerTrojan(self.url.hostname, self.url.port, self.url.username)
setting = SettingsTrojan([server])
streamSetting = self.stream_setting_obj()
mux = Mux()
trojan = Trojan(self.name, setting, streamSetting, mux)
return trojan
def is_equal_to_config(self, config_srt: str) -> bool:
config = json.loads(config_srt)
if config['protocol'] != self.url.scheme:
return False
match self.url.scheme:
case "vless":
return (config["settings"]["vnext"][0]["users"][0]["id"] == self.url.username and
config["settings"]["vnext"][0]["port"] == self.url.port and
config["settings"]["vnext"][0]["address"] == self.url.hostname)
case "vmess":
return (config["settings"]["vnext"][0]["users"][0]["id"] == self.url.username and
config["settings"]["vnext"][0]["port"] == self.url.port and
config["settings"]["vnext"][0]["address"] == self.url.hostname)
case "trojan":
return (config["settings"]["servers"][0]["password"] == self.url.username and
config["settings"]["servers"][0]["port"] == self.url.port and
config["settings"]["servers"][0]["address"] == self.url.hostname)
return False
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/xray_url_decoder/XraySetting.py | xray/modules/xray_url_decoder/XraySetting.py | from typing import List
class Mux:
enabled: bool
concurrency: int
def __init__(self, enabled: bool = None, concurrency: int = None) -> None:
self.enabled = enabled if enabled is not None else False
self.concurrency = concurrency if concurrency is not None else -1
class TLSSettings:
serverName: str
rejectUnknownSni: bool
allowInsecure: bool
alpn: List[str]
minVersion: str
maxVersion: str
cipherSuites: str
certificates: List[str]
disableSystemRoot: bool
enableSessionResumption: bool
fingerprint: str
pinnedPeerCertificateChainSha256: List[str]
def __init__(self, server_name: str, reject_unknown_sni: bool = None, allow_insecure: bool = None,
alpn: List[str] = None, min_version: str = None, max_version: str = None, cipher_suites: str = None,
certificates: List[str] = None, disable_system_root: bool = None,
enable_session_resumption: bool = None, fingerprint: str = None,
pinned_peer_certificate_chain_sha256: List[str] = None) -> None:
if server_name is not None:
self.serverName = server_name
if reject_unknown_sni is not None:
self.rejectUnknownSni = reject_unknown_sni
if allow_insecure is not None:
self.allowInsecure = allow_insecure
if alpn is not None:
self.alpn = alpn
if min_version is not None:
self.minVersion = min_version
if max_version is not None:
self.maxVersion = max_version
if cipher_suites is not None:
self.cipherSuites = cipher_suites
if certificates is not None:
self.certificates = certificates
if disable_system_root is not None:
self.disableSystemRoot = disable_system_root
if enable_session_resumption is not None:
self.enableSessionResumption = enable_session_resumption
if fingerprint is not None:
self.fingerprint = fingerprint
if pinned_peer_certificate_chain_sha256 is not None:
self.pinnedPeerCertificateChainSha256 = pinned_peer_certificate_chain_sha256
class WsSettingsVless:
path: str
headers: dict
def __init__(self, path: str = None, headers=None) -> None:
self.path = path if path is not None else "/"
self.headers = headers if headers is not None else {}
class GrpcSettings:
serviceName: str
multiMode: bool
idleTimeout: int
healthCheckTimeout: int
permitWithoutStream: bool
initialWindowsSize: int
def __init__(self, service_name: str = None, multi_mode: bool = None, idle_timeout: int = None,
health_check_timeout: int = None,
permit_without_stream: bool = None, initial_windows_size: int = None) -> None:
if service_name is not None:
self.serviceName = service_name
if multi_mode is not None:
self.multiMode = multi_mode
if idle_timeout is not None:
self.idleTimeout = idle_timeout
if health_check_timeout is not None:
self.healthCheckTimeout = health_check_timeout
if permit_without_stream is not None:
self.permitWithoutStream = permit_without_stream
if initial_windows_size is not None:
self.initialWindowsSize = initial_windows_size
class RealitySettings:
serverName: str
fingerprint: str
show: bool
publicKey: str
shortId: str
spiderX: str
def __init__(self, server_name: str, public_key: str, short_id: str = None, fingerprint: str = None,
show: bool = None, spider_x: str = None) -> None:
self.serverName = server_name
self.publicKey = public_key
self.fingerprint = fingerprint if fingerprint is not None else "chrome"
self.show = show if show is not None else False
self.shortId = short_id if short_id is not None else ""
self.spiderX = spider_x if spider_x is not None else "/"
class TCPSettings:
acceptProxyProtocol: bool
header: dict
def __init__(self, accept_proxy_protocol: bool = None, header: dict = None) -> None:
self.acceptProxyProtocol = accept_proxy_protocol if accept_proxy_protocol is not None else False
if header is not None:
self.header = header
class StreamSettings:
network: str
security: str
tlsSettings: TLSSettings
realitySettings: RealitySettings
wsSettings: WsSettingsVless
grpcSettings: GrpcSettings
tcpSettings: TCPSettings
def __init__(self, network: str, security: str, ws_settings: WsSettingsVless = None,
grpc_settings: GrpcSettings = None, tcp_settings: TCPSettings = None, tls_settings: TLSSettings = None,
reality_settings: RealitySettings = None) -> None:
self.network = network
self.security = security
if tls_settings is not None:
self.tlsSettings = tls_settings
if ws_settings is not None:
self.wsSettings = ws_settings
if reality_settings is not None:
self.realitySettings = reality_settings
if grpc_settings is not None:
self.grpcSettings = grpc_settings
if tcp_settings is not None:
self.tcpSettings = tcp_settings
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/xray_url_decoder/__init__.py | xray/modules/xray_url_decoder/__init__.py | python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false | |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/xray_url_decoder/vmess.py | xray/modules/xray_url_decoder/vmess.py | import re
from random import randint
from typing import List
from xray_url_decoder.XraySetting import StreamSettings, Mux
class UserVmess:
id: str
security: str
level: int
alterId: int
def __init__(self, id: str, alterId: int = None, security: str = None, level: int = None) -> None:
self.id = id
self.security = security if security is not None else "auto"
self.level = level if level is not None else 0
self.alterId = alterId if alterId is not None else 0
class VnextVmess:
address: str
port: int
users: List[UserVmess]
def __init__(self, address: str, port: int, users: List[UserVmess]) -> None:
self.address = address
self.port = port
self.users = users
class SettingsVmess:
vnext: List[VnextVmess]
def __init__(self, vnext: List[VnextVmess]) -> None:
self.vnext = vnext
class Vmess:
tag: str
protocol: str
settings: SettingsVmess
streamSettings: StreamSettings
mux: Mux
def __init__(self, name: str, settings: SettingsVmess, stream_settings: StreamSettings, mux: Mux) -> None:
self.tag = name # "proxy_" + str(randint(1111, 9999999)) + "_" + re.sub(r'([/:+])+', '', name[:120])
self.protocol = "vmess"
self.settings = settings
self.streamSettings = stream_settings
self.mux = mux
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/clash_meta_url_decoder/VNetwork.py | xray/modules/clash_meta_url_decoder/VNetwork.py | class RealityOpts:
publicKey: str
shortId: str
def __init__(self, publicKey: str, shortId: str = None):
self.publicKey = publicKey
self.shortId = shortId if shortId is not None else ""
class GrpcOpts:
grpcServiceName: str
def __init__(self, grpcServiceName: str):
self.grpcServiceName = grpcServiceName
class WsHeaders:
Host: str
def __init__(self, Host: str):
self.Host = Host
class WsOpts:
path: str
headers: WsHeaders
def __init__(self, host: str = None, path: str = None):
if host is not None:
self.headers = WsHeaders(host)
self.path = path if path is not None else "/"
class VNetwork:
network: str
tls: bool
servername: str
flow: str
udp: bool
clientFingerprint: str
fingerprint: str
wsOpts: WsOpts
grpcOpts: GrpcOpts
realityOpts: RealityOpts
def __init__(self,
network: str,
tls: bool = None,
servername: str = None,
flow: str = None,
udp: bool = None,
clientFingerprint: str = None,
fingerprint: str = None,
wsOpts: WsOpts = None,
grpcOpts: GrpcOpts = None,
realityOpts: RealityOpts = None) -> None:
self.network = network
self.tls = tls if tls is not None else False
self.udp = udp if udp is not None else False
self.clientFingerprint = clientFingerprint if clientFingerprint is not None else "chrome"
if servername is not None:
self.servername = servername
if flow is not None:
self.flow = flow
if fingerprint is not None:
self.fingerprint = fingerprint
if wsOpts is not None:
self.wsOpts = wsOpts
if grpcOpts is not None:
self.grpcOpts = grpcOpts
if realityOpts is not None:
self.realityOpts = realityOpts
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/clash_meta_url_decoder/IsValid.py | xray/modules/clash_meta_url_decoder/IsValid.py | import uuid
def is_valid_uuid(value):
try:
uuid.UUID(str(value))
return True
except ValueError:
return False
def isValid_link(username: str, address: str, port: int) -> bool:
if is_valid_uuid(username):
return True
return False
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/clash_meta_url_decoder/ClashMetaUrlDecoder.py | xray/modules/clash_meta_url_decoder/ClashMetaUrlDecoder.py | import ipaddress
import json
import base64
import re
import uuid
from collections import namedtuple
from urllib.parse import parse_qs, ParseResult, urlencode, urlparse, urlunparse
from clash_meta_url_decoder.IsValid import isValid_link
from clash_meta_url_decoder.VBase import VBase
from clash_meta_url_decoder.VNetwork import VNetwork, GrpcOpts, WsOpts, RealityOpts
from clash_meta_url_decoder.trojan import Trojan
from clash_meta_url_decoder.vless import Vless
from clash_meta_url_decoder.vmess import Vmess
def camel_to_kebab(obj):
if isinstance(obj, dict):
new_obj = {}
for key, value in obj.items():
# Convert camelCase to kebab-case
kebab_key = re.sub(r'([a-z0-9])([A-Z])', r'\1-\2', key).lower()
new_obj[kebab_key] = camel_to_kebab(value) if isinstance(value, dict) else value
return new_obj
elif isinstance(obj, list):
return [camel_to_kebab(item) for item in obj]
else:
return obj
def is_ipv6_address(hostname):
try:
ipaddress.IPv6Address(hostname)
return True
except ipaddress.AddressValueError:
return False
def convertVmessLinkToStandardLink(link):
data: dict = json.loads(base64.b64decode(link[8:]).decode('utf-8'))
data['type'] = data['net']
data['path'] = data.get('path', None)
data['aid'] = data.get('aid', None)
data['security'] = data.get('tls', None)
if is_ipv6_address(data["add"]):
data["add"] = "[{}]".format(data["add"])
Components = namedtuple(
typename='Components',
field_names=['scheme', 'netloc', 'url', 'path', 'query', 'fragment']
)
url = urlunparse(
Components(
scheme='vmess',
netloc='{username}@{hostname}:{port}'.format(username=data['id'], hostname=data["add"], port=data["port"]),
query=urlencode(data),
path='',
url='',
fragment=data['ps']
)
)
return url
def class_to_json_str_kabab(obj):
return json.dumps(camel_to_kebab(vars(obj)), default=lambda x: camel_to_kebab(vars(x)), ensure_ascii=False)
class ClashMetaDecoder:
url: ParseResult
queries: dict
link: str
name: str
isSupported: bool
isValid: bool
type: str
security: str
def __init__(self, link, tagUUID=None):
match link[:5]:
case "vmess":
link = convertVmessLinkToStandardLink(link)
if tagUUID is None:
tagUUID = uuid.uuid4().hex
self.link = link
self.url = urlparse(self.link)
self.name = tagUUID + "_@_" + (self.url.fragment if len(self.url.fragment) > 0 else "")
q = parse_qs(self.url.query)
self.queries = {key: value[0] for key, value in q.items()}
self.isSupported = True
self.isValid = True
self.type = self.getQuery("type")
self.security = self.getQuery("security")
if not isValid_link(self.url.username, self.url.hostname, self.url.port):
self.isValid = False
def setIsValid(self, status: bool):
if not status:
self.isValid = status
def getQuery(self, key) -> str | None:
try:
return self.queries[key]
except KeyError:
return None
def generate_obj_str(self) -> str:
match self.url.scheme:
case "vless":
return class_to_json_str_kabab(self.vless())
case "vmess":
return class_to_json_str_kabab(self.vmess()).replace("alter-id", "alterId")
case "trojan":
return class_to_json_str_kabab(self.trojan())
case _:
self.isSupported = False
print("schema {} is not supported yet".format(self.url.scheme))
def vNetwork_obj(self) -> VNetwork | None:
wsOpts = None
grpcOpts = None
realityOpts = None
isTLS = False
match self.type:
case "grpc":
if self.getQuery("serviceName") is not None:
grpcOpts = GrpcOpts(self.getQuery("serviceName"))
case "ws":
wsOpts = WsOpts(self.getQuery("sni"), self.getQuery("path"))
case _:
self.type = "tcp"
match self.security:
case "tls":
isTLS = True
case "reality":
isTLS = True
realityOpts = RealityOpts(self.getQuery("pbk"), self.getQuery("sid"))
case "none":
isTLS = False
case _:
isTLS = False
vNetwork = VNetwork(self.type, isTLS, self.getQuery("sni"), self.getQuery("flow"), False, self.getQuery("fp"),
"", wsOpts, grpcOpts, realityOpts)
return vNetwork
def vless(self) -> Vless:
vBase = VBase("vless", self.name, self.url.hostname, self.url.port)
vNetwork = self.vNetwork_obj()
vless = Vless(self.url.username, vBase, vNetwork)
return vless
def vmess(self) -> Vmess:
vBase = VBase("vmess", self.name, self.url.hostname, self.url.port)
vNetwork = self.vNetwork_obj()
vmess = Vmess(self.url.username, vBase, vNetwork, alterId=self.getQuery("aid"), cipher=self.getQuery("scy"))
return vmess
def trojan(self) -> Trojan:
vBase = VBase("trojan", self.name, self.url.hostname, self.url.port)
trojan = Trojan(self.url.username, vBase)
return trojan
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/clash_meta_url_decoder/trojan.py | xray/modules/clash_meta_url_decoder/trojan.py | from clash_meta_url_decoder.VBase import VBase
class Trojan(VBase):
password: str
clientFingerprint: str
fingerprint: str
udp: bool
sni: str
alpn: list[str]
skipCertVerify: bool
def __init__(self, password: str, vBase: VBase, clientFingerprint: str = None, fingerprint: str = None,
udp: bool = None, sni: str = None, alpn: list[str] = None, skipCertVerify: bool = None) -> None:
VBase.__init__(self, vBase.type, vBase.name, vBase.server, vBase.port)
self.password = password
self.clientFingerprint = clientFingerprint if clientFingerprint is not None else "chrome"
if fingerprint is not None:
self.fingerprint = fingerprint
if udp is not None:
self.udp = udp
if sni is not None:
self.sni = sni
if alpn is not None:
self.alpn = alpn
if skipCertVerify is not None:
self.skipCertVerify = skipCertVerify
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/clash_meta_url_decoder/vless.py | xray/modules/clash_meta_url_decoder/vless.py | from clash_meta_url_decoder.VBase import VBase
from clash_meta_url_decoder.VNetwork import VNetwork
class Vless(VBase, VNetwork):
uuid: str
def __init__(self, uuid: str, vBase: VBase, bNetwork: VNetwork) -> None:
self.uuid = uuid
VBase.__init__(self, vBase.type, vBase.name, vBase.server, vBase.port)
VNetwork.__init__(self,
(bNetwork.network if hasattr(bNetwork, "network") else None),
(bNetwork.tls if hasattr(bNetwork, "tls") else None),
(bNetwork.servername if hasattr(bNetwork, "servername") else None),
(bNetwork.flow if hasattr(bNetwork, "flow") else None),
(bNetwork.udp if hasattr(bNetwork, "udp") else None),
(bNetwork.clientFingerprint if hasattr(bNetwork, "clientFingerprint") else None),
(bNetwork.fingerprint if hasattr(bNetwork, "fingerprint") else None),
(bNetwork.wsOpts if hasattr(bNetwork, "wsOpts") else None),
(bNetwork.grpcOpts if hasattr(bNetwork, "grpcOpts") else None),
(bNetwork.realityOpts if hasattr(bNetwork, "realityOpts") else None))
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/clash_meta_url_decoder/VBase.py | xray/modules/clash_meta_url_decoder/VBase.py | import re
from random import randint
def generateName(name):
return "proxy_" + str(randint(1111, 9999999)) + "_" + re.sub(r'([/:+])+', '', name[:120])
class VBase:
name: str
type: str
port: int
server: str
def __init__(self, type: str, name: str, server: str, port: int) -> None:
self.server = server
self.name = name #generateName(name)
self.type = type
self.port = port
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/xray/modules/clash_meta_url_decoder/vmess.py | xray/modules/clash_meta_url_decoder/vmess.py | from clash_meta_url_decoder.VBase import VBase
from clash_meta_url_decoder.VNetwork import VNetwork
class Vmess(VBase, VNetwork):
uuid: str
alterId: int
cipher: str
def __init__(self, uuid: str, vBase: VBase, bNetwork: VNetwork, alterId: int = None, cipher: str = None) -> None:
self.uuid = uuid
self.cipher = cipher if cipher is not None else "auto"
try:
self.alterId = int(alterId)
except:
self.alterId = 0
VBase.__init__(self, vBase.type, vBase.name, vBase.server, vBase.port)
VNetwork.__init__(self,
(bNetwork.network if hasattr(bNetwork, "network") else None),
(bNetwork.tls if hasattr(bNetwork, "tls") else None),
(bNetwork.servername if hasattr(bNetwork, "servername") else None),
(bNetwork.flow if hasattr(bNetwork, "flow") else None),
(bNetwork.udp if hasattr(bNetwork, "udp") else None),
(bNetwork.clientFingerprint if hasattr(bNetwork, "clientFingerprint") else None),
(bNetwork.fingerprint if hasattr(bNetwork, "fingerprint") else None),
(bNetwork.wsOpts if hasattr(bNetwork, "wsOpts") else None),
(bNetwork.grpcOpts if hasattr(bNetwork, "grpcOpts") else None),
(bNetwork.realityOpts if hasattr(bNetwork, "realityOpts") else None))
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
Surfboardv2ray/Proxy-sorter | https://github.com/Surfboardv2ray/Proxy-sorter/blob/c3826a3f304c57855729c678ccb66b0957052705/selector/random_lines.py | selector/random_lines.py | import random
# File names
input_file = 'output/converted.txt'
output_file = 'selector/random'
# Number of lines to select
num_lines_to_select = 100
def select_random_lines(input_file, output_file, num_lines):
with open(input_file, 'r') as f:
lines = f.readlines()
# Ensure we do not exceed the number of available lines
num_lines = min(num_lines, len(lines))
# Randomly sample lines without replacement
selected_lines = random.sample(lines, num_lines)
with open(output_file, 'w') as f:
f.writelines(selected_lines)
if __name__ == "__main__":
select_random_lines(input_file, output_file, num_lines_to_select)
| python | MIT | c3826a3f304c57855729c678ccb66b0957052705 | 2026-01-05T06:58:19.259148Z | false |
tobilg/duckdb-nodejs-layer | https://github.com/tobilg/duckdb-nodejs-layer/blob/caa10aa16f944aee73e66a4f7c0c9dd68208bb40/src/configure.py | src/configure.py | import os
import sys
import json
import pickle
# list of extensions to bundle
extensions = ['parquet', 'icu', 'json', 'httpfs', 'fts']
# path to target
basedir = os.getcwd()
target_dir = os.path.join(basedir, 'src', 'duckdb')
gyp_in = os.path.join(basedir, 'binding.gyp.in')
gyp_out = os.path.join(basedir, 'binding.gyp')
cache_file = os.path.join(basedir, 'filelist.cache')
# path to package_build.py
os.chdir(os.path.join('..', '..'))
scripts_dir = 'scripts'
sys.path.append(scripts_dir)
import package_build
defines = ['DUCKDB_EXTENSION_{}_LINKED'.format(ext.upper()) for ext in extensions]
if os.environ.get('DUCKDB_NODE_BUILD_CACHE') == '1' and os.path.isfile(cache_file):
with open(cache_file, 'rb') as f:
cache = pickle.load(f)
source_list = cache['source_list']
include_list = cache['include_list']
libraries = cache['libraries']
windows_options = cache['windows_options']
cflags = cache['cflags']
elif 'DUCKDB_NODE_BINDIR' in os.environ:
def find_library_path(libdir, libname):
flist = os.listdir(libdir)
for fname in flist:
fpath = os.path.join(libdir, fname)
if os.path.isfile(fpath) and package_build.file_is_lib(fname, libname):
return fpath
raise Exception(f"Failed to find library {libname} in {libdir}")
# existing build
existing_duckdb_dir = os.environ['DUCKDB_NODE_BINDIR']
cflags = os.environ['DUCKDB_NODE_CFLAGS']
libraries = os.environ['DUCKDB_NODE_LIBS'].split(' ')
include_directories = [os.path.join('..', '..', include) for include in package_build.third_party_includes()]
include_list = package_build.includes(extensions)
result_libraries = package_build.get_libraries(existing_duckdb_dir, libraries, extensions)
libraries = []
for libdir, libname in result_libraries:
if libdir is None:
continue
libraries.append(find_library_path(libdir, libname))
source_list = []
cflags = []
windows_options = []
if os.name == 'nt':
windows_options = [x for x in os.environ['DUCKDB_NODE_CFLAGS'].split(' ') if x.startswith('/')]
else:
if '-g' in os.environ['DUCKDB_NODE_CFLAGS']:
cflags += ['-g']
if '-O0' in os.environ['DUCKDB_NODE_CFLAGS']:
cflags += ['-O0']
if '-DNDEBUG' in os.environ['DUCKDB_NODE_CFLAGS']:
defines += ['NDEBUG']
if 'DUCKDB_NODE_BUILD_CACHE' in os.environ:
cache = {
'source_list': source_list,
'include_list': include_list,
'libraries': libraries,
'cflags': cflags,
'windows_options': windows_options,
}
with open(cache_file, 'wb+') as f:
pickle.dump(cache, f)
else:
# fresh build - copy over all of the files
(source_list, include_list, original_sources) = package_build.build_package(target_dir, extensions, False)
# # the list of all source files (.cpp files) that have been copied into the `duckdb_source_copy` directory
# print(source_list)
# # the list of all include files
# print(include_list)
source_list = [os.path.relpath(x, basedir) if os.path.isabs(x) else os.path.join('src', x) for x in source_list]
include_list = [os.path.join('src', 'duckdb', x) for x in include_list]
libraries = []
windows_options = ['/GR']
cflags = ['-frtti']
def sanitize_path(x):
return x.replace('\\', '/')
source_list = [sanitize_path(x) for x in source_list]
include_list = [sanitize_path(x) for x in include_list]
libraries = [sanitize_path(x) for x in libraries]
with open(gyp_in, 'r') as f:
input_json = json.load(f)
def replace_entries(node, replacement_map):
if type(node) == type([]):
for key in replacement_map.keys():
if key in node:
node.remove(key)
node += replacement_map[key]
for entry in node:
if type(entry) == type([]) or type(entry) == type({}):
replace_entries(entry, replacement_map)
if type(node) == type({}):
for key in node.keys():
replace_entries(node[key], replacement_map)
replacement_map = {}
replacement_map['${SOURCE_FILES}'] = source_list
replacement_map['${INCLUDE_FILES}'] = include_list
replacement_map['${DEFINES}'] = defines
replacement_map['${LIBRARY_FILES}'] = libraries
replacement_map['${CFLAGS}'] = cflags
replacement_map['${WINDOWS_OPTIONS}'] = windows_options
replace_entries(input_json, replacement_map)
with open(gyp_out, 'w+') as f:
json.dump(input_json, f, indent=4, separators=(", ", ": ")) | python | MIT | caa10aa16f944aee73e66a4f7c0c9dd68208bb40 | 2026-01-05T07:07:26.578811Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/QnQSec/2025/web/QnQsec_portal/app.py | ctfs/QnQSec/2025/web/QnQsec_portal/app.py | import os
import sqlite3
import secrets
import hashlib
from hashlib import md5
from datetime import datetime, timedelta, timezone
import jwt
from flask import (
Flask, request, render_template, redirect, session,
flash, url_for, g, abort, make_response
)
from admin_routes import admin_bp,generate_jwt
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
SECRET_DIR = os.path.join(BASE_DIR, 'secret')
FLAG_PATH = os.path.join(SECRET_DIR, 'flag.txt')
FLAG_PREFIX = 'QnQsec'
def ensure_flag():
os.makedirs(SECRET_DIR, exist_ok=True)
if not os.path.exists(FLAG_PATH):
with open(FLAG_PATH, 'w') as f:
f.write(f"{FLAG_PREFIX}{{{secrets.token_hex(16)}}}")
ensure_flag()
app = Flask(__name__)
base = os.environ.get("Q_SECRET", "qnqsec-default")
app.config['SECRET_KEY'] = hashlib.sha1(("pepper:" + base).encode()).hexdigest()
app.config['JWT_SECRET'] = hashlib.sha256(("jwtpepper:" + base).encode()).hexdigest()
app.config['JWT_EXPIRES_MIN'] = 60
app.register_blueprint(admin_bp)
DB_PATH = os.path.join(BASE_DIR, 'users.db')
def get_db():
if 'db' not in g:
g.db = sqlite3.connect(DB_PATH, timeout=10)
g.db.row_factory = sqlite3.Row
return g.db
@app.teardown_appcontext
def close_db(_exc):
db = g.pop('db', None)
if db is not None:
db.close()
def init_db():
with sqlite3.connect(DB_PATH, timeout=10) as db:
db.execute('PRAGMA journal_mode=WAL')
db.execute('drop table if exists users')
db.execute('create table users(username text primary key, password text not null)')
db.execute('insert into users values("flag", "401b0e20e4ccf7a8df254eac81e269a0")')
db.commit()
if not os.path.exists(DB_PATH):
init_db()
@app.route('/')
def index():
return redirect(url_for('login'))
@app.route('/sign_up', methods=['GET', 'POST'])
def sign_up():
if request.method == 'GET':
return render_template('sign_up.html')
username = (request.form.get('username') or '').strip()
password = request.form.get('password') or ''
if not username or not password:
flash('Missing username or password', 'error')
return render_template('sign_up.html')
try:
db = get_db()
db.execute(
'insert into users values(lower(?), ?)',
(username, md5(password.encode()).hexdigest())
)
db.commit()
flash(f'User {username} created', 'message')
return redirect(url_for('login'))
except sqlite3.IntegrityError:
flash('Username is already registered', 'error')
return render_template('sign_up.html')
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
username = (request.form.get('username') or '').strip()
password = request.form.get('password') or ''
if not username or not password:
flash('Missing username or password', 'error')
return render_template('login.html')
db = get_db()
row = db.execute(
'select username, password from users where username = lower(?) and password = ?',
(username, md5(password.encode()).hexdigest())
).fetchone()
if row:
session['user'] = username.title()
role = "admin" if username.lower() == "flag" else "user"
token = generate_jwt(session['user'],role,app.config['JWT_EXPIRES_MIN'],app.config['JWT_SECRET'])
resp = make_response(redirect(url_for('account')))
resp.set_cookie("admin_jwt", token, httponly=False, samesite="Lax")
return resp
flash('Invalid username or password', 'error')
return render_template('login.html')
@app.route('/logout')
def logout():
session.pop('user', None)
resp = make_response(redirect(url_for('login')))
resp.delete_cookie("admin_jwt")
return resp
@app.route('/account')
def account():
user = session.get('user')
if not user:
return redirect(url_for('login'))
if user == 'Flag':
return render_template('account.html', user=user, is_admin=True)
return render_template('account.html', user=user, is_admin=False)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=False, use_reloader=False)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/VuwCTF/2025/misc/not_turing_complete/parser.py | ctfs/VuwCTF/2025/misc/not_turing_complete/parser.py | # parser.py
VARIABLES = [ "a", "b", "c" ]
def trim_line(line):
line = line.strip()
return line
class LeftExpr:
def __init__(self, var_name):
if var_name not in VARIABLES:
raise ValueError()
self.var_name = var_name
def validate(self):
if self.var_name not in VARIABLES:
raise ValueError()
def __repr__(self):
return f"LeftExpr({self.var_name})"
RIGHT_EXPR_TYPES = [ "literal", "variable", "arithmetic" ]
ARITHMETIC_OPERATORS = [ "+", "-", "*", "/", "^", "&", "|" ]
class RightExpr:
def __init__(self, type: str, data: list):
if type not in RIGHT_EXPR_TYPES:
raise ValueError()
self.type = type
self.data = data
def validate(self):
if self.type == "literal":
if len(self.data) != 1 or not isinstance(self.data[0], int):
raise ValueError()
elif self.type == "variable":
if len(self.data) != 1 or self.data[0] not in VARIABLES:
raise ValueError()
elif self.type == "arithmetic":
if len(self.data) != 3:
raise ValueError()
op, left, right = self.data
if op not in ARITHMETIC_OPERATORS:
raise ValueError()
if not isinstance(left, RightExpr) or not isinstance(right, RightExpr):
raise ValueError()
left.validate()
right.validate()
def __repr__(self):
return f"RightExpr({self.type}, {self.data})"
class Parser:
def __init__(self):
self.code = []
def parse_line(self, line: str):
line = trim_line(line)
if len(line) == 0:
return
if "=" not in line:
raise ValueError()
lhs, rhs = line.split("=", 1)
lhs = lhs.strip()
rhs = rhs.strip()
left_expr = LeftExpr(lhs)
right_expr = self.parse_right_expr(rhs)
self.code.append((left_expr, right_expr))
def parse_primitive(self, token: str):
if token in VARIABLES:
return RightExpr("variable", [token])
try:
value = int(token, 0)
return RightExpr("literal", [value])
except ValueError:
pass
return None
def parse_right_expr(self, expr: str) -> RightExpr:
primitive = self.parse_primitive(expr)
if primitive is not None:
return primitive
for op in ARITHMETIC_OPERATORS:
if op in expr:
left, right = expr.split(op, 1)
left = left.strip()
right = right.strip()
left_primitive = self.parse_primitive(left)
right_primitive = self.parse_primitive(right)
if left_primitive is None or right_primitive is None:
raise ValueError()
return RightExpr("arithmetic", [op, left_primitive, right_primitive])
else:
raise ValueError()
def validate(self):
for left, right in self.code:
if not isinstance(left, LeftExpr) or not isinstance(right, RightExpr):
raise ValueError()
left.validate()
right.validate()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/VuwCTF/2025/misc/not_turing_complete/interpreter.py | ctfs/VuwCTF/2025/misc/not_turing_complete/interpreter.py | # interpreter.py
from parser import VARIABLES, ARITHMETIC_OPERATORS, LeftExpr, RightExpr
class Interpreter:
def __init__(self, code: list):
self.code = code
self.variables = { var: 0 for var in VARIABLES }
def initialize(self, a_value: int = 0, b_value: int = 0, c_value: int = 0):
self.variables['a'] = a_value
self.variables['b'] = b_value
self.variables['c'] = c_value
def interpret(self):
for lhs, rhs in self.code:
value = self.evaluate_right_expr(rhs)
self.variables[lhs.var_name] = value
def evaluate_right_expr(self, expr: RightExpr) -> int:
if expr.type == "literal":
literal_value = expr.data[0]
return literal_value
elif expr.type == "variable":
var_name = expr.data[0]
return self.variables[var_name]
elif expr.type == "arithmetic":
op, left_expr, right_expr = expr.data
left_value = self.evaluate_right_expr(left_expr)
right_value = self.evaluate_right_expr(right_expr)
if op == "+":
return left_value + right_value
elif op == "-":
return left_value - right_value
elif op == "*":
return left_value * right_value
elif op == "/":
return left_value // right_value
elif op == "^":
return left_value ^ right_value
elif op == "&":
return left_value & right_value
elif op == "|":
return left_value | right_value
else:
raise ValueError()
else:
raise ValueError() | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/VuwCTF/2025/misc/not_turing_complete/server.py | ctfs/VuwCTF/2025/misc/not_turing_complete/server.py | from parser import Parser
from interpreter import Interpreter
import sys
import os
import secrets
import xxhash
FLAG = os.getenv('FLAG', 'VuwCTF{ntc_test_flag}')
BANNER = """
.-----------------. .----------------. .----------------.
| .--------------. || .--------------. || .--------------. |
| | ____ _____ | || | _________ | || | ______ | |
| ||_ \\|_ _| | || | | _ _ | | || | .' ___ | | |
| | | \\ | | | || | |_/ | | \\_| | || | / .' \\_| | |
| | | |\\ \\| | | || | | | | || | | | | |
| | _| |_\\ |_ | || | _| |_ | || | \\ `.___.'\\ | |
| ||_____|\\____| | || | |_____| | || | `._____.' | |
| | | || | | || | | |
| '--------------' || '--------------' || '--------------' |
'----------------' '----------------' '----------------'
"""
print(BANNER)
print("Welcome to the Not Turing-Complete Interpreter!")
print("Here at VuW, we're proud to offer state-of-the-art")
print("virtual machine capabilities for your code.")
print()
print("To celebrate the launch of our new product, we're offering")
print("a flag to anyone who can solve this programming challenge")
print("using our programming language.")
print()
print("In order to streamline the programming experience,")
print("we've simplified our language to include only the following features:")
print("- Three integer variables: a, b, c")
print("- Advanced integer arithmetic operations: +, -, *, /, ^, &, |")
print("- No control flow (if, loops, etc.)")
print("- No function calls or recursion")
print("- No additional memory or I/O")
print("VuW is proud to be pushing the bounds of computer science")
print("by offering Deterministic Finite Automaton-based computation.")
print()
print("Enter your lines of code, ending with an 'EOF' line:")
parser = Parser()
while True:
try:
line = input()
if line.strip() == "EOF":
break
parser.parse_line(line)
except Exception as e:
print("Error accepting code")
sys.exit(1)
NUM_TRIALS = 10
print(f"Running {NUM_TRIALS} trials to validate your code...")
interpreter = Interpreter(parser.code)
def run_trial():
try:
scramble = secrets.token_bytes(32) # 256-bit seed!
input_a = int.from_bytes(scramble, byteorder='little')
interpreter.initialize(a_value=input_a)
interpreter.interpret()
output_a = interpreter.variables['a']
expected = int.from_bytes(xxhash.xxh32(scramble).digest(), byteorder='big')
return output_a == expected
except Exception as e:
return False
accepted = False
for trial in range(NUM_TRIALS):
if not run_trial():
break
else:
accepted = True
if accepted:
print("Code accepted")
print(f"Flag: {FLAG}")
else:
print("Code rejected")
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/VuwCTF/2025/crypto/totally_random_art/randart.py | ctfs/VuwCTF/2025/crypto/totally_random_art/randart.py | import numpy as np
import random
# Generates random ascii art of totally-not-sensitive data!
WIDTH = 10
HEIGHT = 5
PALETTE = ".:-=+*#%@oT0w&8R"
def generate_random_art(data: bytes, width=WIDTH, height=HEIGHT) -> str:
rng = random.Random(data[0:4]) # Seed with first 4 bytes of data
remaining_data = data[4:]
image = np.zeros((height, width), dtype=int)
image.fill(len(PALETTE)) # overflow means unset
# Randomwalk parameters
position = np.array([height // 2, width // 2])
used_positions = {tuple(position)}
for byte in remaining_data:
steps, stroke = divmod(byte, len(PALETTE))
for i in range(steps):
direction = rng.choice([(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1,1), (-1,-1)])
position += np.array(direction)
if tuple(position) not in used_positions:
used_positions.add(tuple(position))
else:
# reroll to improve chances
direction = rng.choice([(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1,1), (-1,-1)])
position += np.array(direction)
used_positions.add(tuple(position))
# wrap around
position[0] = position[0] % height
position[1] = position[1] % width
image[position[0], position[1]] += stroke
image[position[0], position[1]] %= len(PALETTE)
frame = "+-" + str(len(data)).center(width, "-") + "-+"
art_lines = [frame]
for row in image:
line = "| " + "".join(PALETTE[val] if val < len(PALETTE) else " " for val in row) + " |"
art_lines.append(line)
art_lines.append(frame)
return "\n".join(art_lines)
if __name__ == "__main__":
import sys
if len(sys.argv) != 3:
print("Expected args: <input_file> <output_file>")
sys.exit(1)
input_file = sys.argv[1]
output_file = sys.argv[2]
with open(input_file, "rb") as f:
data = f.read()
art = generate_random_art(data)
print("Random art generated:")
print(art)
with open(output_file, "w") as f:
f.write(art)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/VuwCTF/2025/web/fishsite/fishsite.py | ctfs/VuwCTF/2025/web/fishsite/fishsite.py | import os
import sqlite3
import flask
app = flask.Flask(__name__)
app.secret_key = os.urandom(32)
@app.route('/')
def index():
return flask.render_template("index.html")
@app.post('/login')
def login():
username = flask.request.form.get('username')
password = flask.request.form.get('password')
db = sqlite3.connect("file:db.db?mode=ro", uri=True)
cur = db.cursor()
cur.execute("SELECT COUNT(*) FROM fish WHERE username = '" + username + "' AND password ='" + password +"';")
try:
count = cur.fetchone()[0]
if count > 0:
flask.session["username"] = username
cur.close()
db.close()
return flask.redirect('/admarine')
else:
cur.close()
db.close()
return flask.render_template("index.html", error="Incorrect password")
except TypeError:
cur.close()
db.close()
return flask.render_template("index.html", error="No user found")
@app.route('/admarine')
def admin():
if 'username' not in flask.session:
return flask.redirect('/')
return flask.render_template("admin.html")
DISALLOWED_WORDS = ["insert", "create", "alter", "drop", "delete", "backup", "transaction", "commit", "rollback", "replace", "update", "pragma", "attach", "load", "vacuum"]
@app.post('/monitor')
def monitor():
if 'username' not in flask.session:
return flask.redirect('/')
query = flask.request.form.get('query')
for word in DISALLOWED_WORDS:
if word in query.lower():
return flask.redirect('/admarine')
db = sqlite3.connect("file:db.db?mode=ro", uri=True)
cur = db.cursor()
try:
cur.execute(query)
except:
cur.close()
db.close()
return flask.render_template('/admin.html', error="Invalid query")
cur.close()
db.close()
return flask.render_template("/admin.html", error="Successful process")
if __name__ == '__main__':
app.run(host="0.0.0.0", port=9995) | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/UNbreakableInternational/2024/crypto/krotate/chall.py | ctfs/UNbreakableInternational/2024/crypto/krotate/chall.py | from Crypto.Random import get_random_bytes
KEY_LEN = 100
key = get_random_bytes(KEY_LEN)
R = 0x01
def RGEN():
global R
R = ((R << 1) ^ (0x71 if (R & 0x80) else 0)) & 0xFF
return R
def xor_text(text, key):
return bytes([text[i] ^ key[i] for i in range(len(text))])
def next_key(key):
return bytes([key[i] ^ RGEN() for i in range(len(key))])
def encrypt(text, key):
ciphertext = b""
blocks = [text[i : i + KEY_LEN] for i in range(0, len(text), KEY_LEN)]
for block in blocks:
ciphertext += xor_text(block, key)
key = next_key(key)
return ciphertext
# ---
text = b""
with open("text.txt", "rb") as f:
text = f.read()
ciphertext = encrypt(text, key)
with open("../res/ciphertext.txt", "wb") as g:
g.write(ciphertext)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/UNbreakableInternational/2024/crypto/sat1sf1/sah652.py | ctfs/UNbreakableInternational/2024/crypto/sat1sf1/sah652.py | from functools import reduce
import sys
def KekF1601onLanes(lanes):
R = 1
for _ in range(24):
C = [reduce(lambda a, b: a ^ b, lanes[x]) for x in range(len(lanes))]
D = [
C[(x + 4) % len(lanes)] ^ C[(x + 1) % len(lanes)] for x in range(len(lanes))
]
lanes = [[lanes[x][y] ^ D[y] for y in range(8)] for x in range(len(lanes))]
for i in range(len(lanes)):
aux = lanes[i][0]
for j in range(len(lanes[0]) - 1):
lanes[i][j] = lanes[i][j + 1]
lanes[i][-1] = aux
aux = lanes[0]
for j in range(len(aux)):
for i in range(len(lanes) - 1):
lanes[i][j] = lanes[i][j] ^ lanes[i + 1][j]
lanes[-1][j] = lanes[-1][j] ^ aux[j]
R = ((R << 1) ^ ((R >> 7) * 0x71)) % 256
lanes[0][0] ^= R & 2
return lanes
def KekF1601(state):
lanes = [state[x * 8 : (x + 1) * 8] for x in range(len(state) // 8)]
lanes = KekF1601onLanes(lanes)
state = [item for row in lanes for item in row]
return state
OUT_LEN = 69
def Kek(rate, state, delimitedSuffix):
rateInBytes = rate // 8
blockSize = 69
state[blockSize] = state[blockSize] ^ delimitedSuffix
state[rateInBytes - 1] = state[rateInBytes - 1] ^ 0x80
state = KekF1601(state)
return state[0:OUT_LEN]
def SAH3_652(state_arr):
return Kek(1088, state_arr, 0x06)
state0 = list(sys.argv[1].encode("utf-8")) # Flag
state0.extend([0] * (200 - len(state0)))
out = SAH3_652(state0)
print(bytes(out).hex())
# 2033251f4b3161e4455a4c261e3f631e18653c3a6c136e30304037373e6e1f6c6f6448673e686b1e18603d10306d323f3a4b626eee636c3c3c62483592123e6d6c6c3a49ca
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/manage.py | ctfs/RACTF/2021/web/Secret_Store/src/manage.py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'secretstore.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/views.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/views.py | from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.shortcuts import render
from django.views.generic import CreateView
from rest_framework import viewsets, filters
from rest_framework.permissions import IsAuthenticated
from secret.models import Secret
from secret.permissions import IsSecretOwnerOrReadOnly
from secret.serializers import SecretSerializer
class SecretViewSet(viewsets.ModelViewSet):
queryset = Secret.objects.all()
serializer_class = SecretSerializer
permission_classes = (IsAuthenticated & IsSecretOwnerOrReadOnly,)
filter_backends = [filters.OrderingFilter]
ordering_fields = "__all__"
class RegisterFormView(CreateView):
template_name = "registration/register.html"
form_class = UserCreationForm
model = User
success_url = "/"
def home(request):
if request.user.is_authenticated:
secret = Secret.objects.filter(owner=request.user)
if secret:
return render(request, "home.html", context={"secret": secret[0].value})
return render(request, "home.html")
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/permissions.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/permissions.py | from rest_framework import permissions
class IsSecretOwnerOrReadOnly(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
return request.method in permissions.SAFE_METHODS and obj.owner == request.user
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/admin.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/admin.py | from django.contrib import admin
# Register your models here.
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/models.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/models.py | from django.contrib.auth.models import User
from django.db import models
from django.db.models import CASCADE
class Secret(models.Model):
value = models.CharField(max_length=255)
owner = models.OneToOneField(User, on_delete=CASCADE)
last_updated = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/serializers.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/serializers.py | from rest_framework import serializers
from secret.models import Secret
class SecretSerializer(serializers.ModelSerializer):
class Meta:
model = Secret
fields = ["id", "value", "owner", "last_updated", "created"]
read_only_fields = ["owner", "last_updated", "created"]
extra_kwargs = {"value": {"write_only": True}}
def create(self, validated_data):
validated_data["owner"] = self.context["request"].user
if Secret.objects.filter(owner=self.context["request"].user):
return super(SecretSerializer, self).update(Secret.objects.get(owner=self.context['request'].user), validated_data)
return super(SecretSerializer, self).create(validated_data)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/__init__.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/__init__.py | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false | |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/tests.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/tests.py | from django.test import TestCase
# Create your tests here.
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/apps.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/apps.py | from django.apps import AppConfig
class SecretConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'secret'
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/forms.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/forms.py | from django.forms import ModelForm
from secret.models import Secret
class SecretForm(ModelForm):
class Meta:
model = Secret
fields = ["secret"]
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/urls.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/urls.py | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false | |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/migrations/0001_initial.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/migrations/0001_initial.py | # Generated by Django 3.2.6 on 2021-08-03 02:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Secret',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.CharField(max_length=255)),
('last_updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
('owner', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secret/migrations/__init__.py | ctfs/RACTF/2021/web/Secret_Store/src/secret/migrations/__init__.py | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false | |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secretstore/asgi.py | ctfs/RACTF/2021/web/Secret_Store/src/secretstore/asgi.py | """
ASGI config for secretstore project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'secretstore.settings')
application = get_asgi_application()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secretstore/settings.py | ctfs/RACTF/2021/web/Secret_Store/src/secretstore/settings.py | """
Django settings for secretstore project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'secret.apps.SecretConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'secretstore.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [str(BASE_DIR.joinpath('templates'))],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'secretstore.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secretstore/__init__.py | ctfs/RACTF/2021/web/Secret_Store/src/secretstore/__init__.py | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false | |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secretstore/wsgi.py | ctfs/RACTF/2021/web/Secret_Store/src/secretstore/wsgi.py | """
WSGI config for secretstore project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'secretstore.settings')
application = get_wsgi_application()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Secret_Store/src/secretstore/urls.py | ctfs/RACTF/2021/web/Secret_Store/src/secretstore/urls.py | """secretstore URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.views.generic import TemplateView
from rest_framework.routers import DefaultRouter
from secret import views
router = DefaultRouter()
router.register('secret', views.SecretViewSet, basename='secret')
urlpatterns = [
path('admin/', admin.site.urls),
path('auth/register', views.RegisterFormView.as_view(), name='register'),
path('auth/', include('django.contrib.auth.urls')),
path('', views.home, name='home'),
path('api/', include(router.urls))
]
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/manage.py | ctfs/RACTF/2021/web/Emojibook/manage.py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'notebook.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notebook/asgi.py | ctfs/RACTF/2021/web/Emojibook/notebook/asgi.py | """
ASGI config for notebook project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'notebook.settings')
application = get_asgi_application()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notebook/settings.py | ctfs/RACTF/2021/web/Emojibook/notebook/settings.py | """
Django settings for notebook project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-ccl^w$g=w#j_6gsiy^921q#eotiyd+o9xqni1cndz=k^a@pm+8'
SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies"
SESSION_SERIALIZER = "django.contrib.sessions.serializers.PickleSerializer"
LOGIN_REDIRECT_URL = "/"
LOGOUT_REDIRECT_URL = "/"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'notes.apps.NotesConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'notebook.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'notebook.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notebook/__init__.py | ctfs/RACTF/2021/web/Emojibook/notebook/__init__.py | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false | |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notebook/wsgi.py | ctfs/RACTF/2021/web/Emojibook/notebook/wsgi.py | """
WSGI config for notebook project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'notebook.settings')
application = get_wsgi_application()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notebook/urls.py | ctfs/RACTF/2021/web/Emojibook/notebook/urls.py | """notebook URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.views.generic import TemplateView
from notes import views
urlpatterns = [
path('admin/', admin.site.urls),
path('auth/register', views.RegisterFormView.as_view(), name="register"),
path('auth/', include('django.contrib.auth.urls')),
path('new/', views.create_note, name="new"),
path('<int:pk>/', views.view_note, name="note"),
path('', views.home, name="home")
]
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notes/views.py | ctfs/RACTF/2021/web/Emojibook/notes/views.py | import base64
import os
import re
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.urls import reverse
from django.views.generic import CreateView
from notes.forms import NoteCreateForm
from notes.models import Note
class RegisterFormView(CreateView):
template_name = "registration/register.html"
form_class = UserCreationForm
model = User
success_url = "/"
def home(request: HttpRequest) -> HttpResponse:
if request.user.is_authenticated:
notes = Note.objects.filter(author=request.user)
return render(request, "index.html", {"user": request.user, "notes": notes})
return render(request, "index.html", {"user": request.user})
def create_note(request: HttpRequest) -> HttpResponse:
if request.method == "POST":
form = NoteCreateForm(request.POST, user=request.user)
if form.is_valid():
instance = form.save()
return HttpResponseRedirect(redirect_to=reverse("note", kwargs={"pk": instance.pk}))
else:
form = NoteCreateForm(user=request.user)
return render(request, "create.html", {"form": form})
def view_note(request: HttpRequest, pk: int) -> HttpResponse:
note = get_object_or_404(Note, pk=pk)
text = note.body
for include in re.findall("({{.*?}})", text):
print(include)
file_name = os.path.join("emoji", re.sub("[{}]", "", include))
with open(file_name, "rb") as file:
text = text.replace(include, f"<img src=\"data:image/png;base64,{base64.b64encode(file.read()).decode('latin1')}\" width=\"25\" height=\"25\" />")
return render(request, "note.html", {"note": note, "text": text})
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notes/admin.py | ctfs/RACTF/2021/web/Emojibook/notes/admin.py | from django.contrib import admin
# Register your models here.
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notes/models.py | ctfs/RACTF/2021/web/Emojibook/notes/models.py | from django.contrib.auth.models import User
from django.db import models
from django.db.models import CASCADE
class Note(models.Model):
name = models.CharField(max_length=255)
body = models.TextField()
author = models.ForeignKey(to=User, on_delete=CASCADE)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notes/__init__.py | ctfs/RACTF/2021/web/Emojibook/notes/__init__.py | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false | |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notes/tests.py | ctfs/RACTF/2021/web/Emojibook/notes/tests.py | from django.test import TestCase
# Create your tests here.
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notes/apps.py | ctfs/RACTF/2021/web/Emojibook/notes/apps.py | from django.apps import AppConfig
class NotesConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'notes'
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/RACTF/2021/web/Emojibook/notes/forms.py | ctfs/RACTF/2021/web/Emojibook/notes/forms.py | import json
import re
from django import forms
from django.forms import Textarea
from notes.models import Note
class NoteCreateForm(forms.ModelForm):
class Meta:
model = Note
fields = ["name", "body"]
widgets = {
"body": Textarea(attrs={"cols": 60, "rows": 20}),
}
def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user")
super(NoteCreateForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
instance = super(NoteCreateForm, self).save(commit=False)
instance.author = self.user
instance.body = instance.body.replace("{{", "").replace("}}", "").replace("..", "")
with open("emoji.json") as emoji_file:
emojis = json.load(emoji_file)
for emoji in re.findall("(:[a-z_]*?:)", instance.body):
instance.body = instance.body.replace(emoji, "{{" + emojis[emoji.replace(":", "")] + ".png}}")
if commit:
instance.save()
self._save_m2m()
return instance
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.