index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
36,627
|
edose/photrix
|
refs/heads/master
|
/test/test_util.py
|
from datetime import datetime, timezone, timedelta
from math import isnan, sqrt
import pandas as pd
import pytest
from photrix import util # call: util.ra_as_degrees()
__author__ = "Eric Dose :: Bois d'Arc Observatory, Kansas"
def test_ra_as_degrees():
assert util.ra_as_degrees("180") == 180.0
assert util.ra_as_degrees("0") == 0.0
assert util.ra_as_degrees("360") == 360.0
assert util.ra_as_degrees("-0.1") is None
assert util.ra_as_degrees("360.1") is None
assert util.ra_as_degrees("12:00") == util.ra_as_degrees("12 00") == 180.0
assert util.ra_as_degrees("12:00:00") == util.ra_as_degrees("12 00 00") == 180.0
assert util.ra_as_degrees("0:00:00") == util.ra_as_degrees("0 00 00") == 0.0
assert util.ra_as_degrees("11:16:30") == util.ra_as_degrees("11 16 30") == 169.125
assert util.ra_as_degrees("24:00:01") is None
assert util.ra_as_degrees("24 00 01") is None
def test_hex_degrees_as_degrees():
assert util.hex_degrees_as_degrees("12") == 12.0
assert util.hex_degrees_as_degrees("-12") == -12.0
assert util.hex_degrees_as_degrees("0") == 0.0
assert util.hex_degrees_as_degrees("-0") == 0.0
assert util.hex_degrees_as_degrees("90") == 90
assert util.hex_degrees_as_degrees("-90") == -90
assert util.hex_degrees_as_degrees("90.125") == 90.125
assert util.hex_degrees_as_degrees("-90.125") == -90.125
assert util.hex_degrees_as_degrees("88:45") == util.hex_degrees_as_degrees("88 45") == 88.75
assert util.hex_degrees_as_degrees("-88:45") == util.hex_degrees_as_degrees("-88 45") == -88.75
assert util.hex_degrees_as_degrees("12:34:30") == \
util.hex_degrees_as_degrees("12 34 30") == 12.575
assert util.hex_degrees_as_degrees("-12:34:30") == \
util.hex_degrees_as_degrees("-12 34 30") == -12.575
assert util.hex_degrees_as_degrees("91:34:30") == \
util.hex_degrees_as_degrees("91 34 30") == 91.575
assert util.hex_degrees_as_degrees("-91:34:30") == \
util.hex_degrees_as_degrees("-91 34 30") == -91.575
assert util.hex_degrees_as_degrees("91:45") == util.hex_degrees_as_degrees("91 45") == 91.75
assert util.hex_degrees_as_degrees("-91:45") == util.hex_degrees_as_degrees("-91 45") == -91.75
def test_dec_as_degrees():
assert util.dec_as_degrees("12") == 12.0
assert util.dec_as_degrees("-12") == -12.0
assert util.dec_as_degrees("0") == 0.0
assert util.dec_as_degrees("-0") == 0.0
assert util.dec_as_degrees("90") == 90
assert util.dec_as_degrees("-90") == -90
assert util.dec_as_degrees("90.1") is None
assert util.dec_as_degrees("-90.1") is None
assert util.dec_as_degrees("88:45") == util.dec_as_degrees("88 45") == 88.75
assert util.dec_as_degrees("-88:45") == util.dec_as_degrees("-88 45") == -88.75
assert util.dec_as_degrees("12:34:30") == util.dec_as_degrees("12 34 30") == 12.575
assert util.dec_as_degrees("-12:34:30") == util.dec_as_degrees("-12 34 30") == -12.575
assert util.dec_as_degrees("91:34:30") is None
assert util.dec_as_degrees("91 34 30") is None
assert util.dec_as_degrees("-91:34:30") is None
assert util.dec_as_degrees("-91 34 30") is None
assert util.dec_as_degrees("91:45") is None
assert util.dec_as_degrees("91 45") is None
assert util.dec_as_degrees("-91:45") is None
assert util.dec_as_degrees("-91 45") is None
def test_ra_as_hours():
assert util.ra_as_hours(0.0) == "00:00:00.000"
assert util.ra_as_hours(20.0) == "01:20:00.000"
assert util.ra_as_hours(169.125) == "11:16:30.000"
assert util.ra_as_hours(180.0) == "12:00:00.000"
assert util.ra_as_hours(360.0) == "00:00:00.000"
assert util.ra_as_hours(359.99) == "23:59:57.600"
assert util.ra_as_hours(359.999) == "23:59:59.760"
assert util.ra_as_hours(359.9999) == "23:59:59.976"
assert util.ra_as_hours(359.99999) == "23:59:59.998"
assert util.ra_as_hours(359.999999) == "00:00:00.000"
assert util.ra_as_hours(359.9999999) == "00:00:00.000"
assert util.ra_as_hours(359.99999999) == "00:00:00.000"
assert util.ra_as_hours(-0.01) is None
assert util.ra_as_hours(360.01) is None
assert util.ra_as_hours(-44) is None
assert util.ra_as_hours(654) is None
def test_dec_as_hex():
assert util.dec_as_hex(0.0) == "+00:00:00.00"
assert util.dec_as_hex(+90.0) == "+90:00:00.00"
assert util.dec_as_hex(-90.0) == "-90:00:00.00"
assert util.dec_as_hex(0.001) == "+00:00:03.60"
assert util.dec_as_hex(-0.001) == "-00:00:03.60"
assert util.dec_as_hex(-69.125) == "-69:07:30.00"
assert util.dec_as_hex(69.125) == "+69:07:30.00"
assert util.dec_as_hex(90.001) is None
assert util.dec_as_hex(-90.001) is None
assert util.dec_as_hex(255) is None
assert util.dec_as_hex(-255) is None
def test_degrees_as_hex():
assert util.degrees_as_hex(0.0) == "+00:00:00.00"
assert util.degrees_as_hex(0.0) == util.degrees_as_hex(0.0, 2) # default
assert util.degrees_as_hex(+90.0) == "+90:00:00.00"
assert util.degrees_as_hex(-90.0) == "-90:00:00.00"
assert util.degrees_as_hex(0.001) == "+00:00:03.60"
assert util.degrees_as_hex(-0.001) == "-00:00:03.60"
assert util.degrees_as_hex(-69.125) == "-69:07:30.00"
assert util.degrees_as_hex(69.125) == "+69:07:30.00"
assert util.degrees_as_hex(90.001) == "+90:00:03.60"
assert util.degrees_as_hex(-90.001, 4) == "-90:00:03.6000"
assert util.degrees_as_hex(255) == "+255:00:00.00"
assert util.degrees_as_hex(-255, 6) == "-255:00:00.000000"
def test_weighted_mean():
with pytest.raises(ValueError) as e:
util.weighted_mean([], []) # zero-length
assert 'lengths of values & weights must be equal & non-zero' in str(e)
with pytest.raises(ValueError) as e:
util.weighted_mean([2, 3], [4, 5, 3]) # unequal lengths
assert 'lengths of values & weights must be equal & non-zero' in str(e)
with pytest.raises(ValueError) as e:
util.weighted_mean([2, 3, 4], [1, 4, -5]) # sum(weights)==0
assert 'sum of weights must be positive' in str(e)
assert util.weighted_mean([1, 3, 8], [0, 3, 9]) == (81/12, pytest.approx(3.535533),
pytest.approx(2.795085))
assert util.weighted_mean([1, 3, 8], [0, 3, 9]) == util.weighted_mean([3, 8], [3, 9])
value_series = pd.Series([1, 3, 8], index=[4, 2, 999])
weights_series = pd.Series([0, 3, 9], index=['e', 'XXX', '0-&&'])
assert util.weighted_mean(value_series, weights_series) == \
util.weighted_mean([1, 3, 8], [0, 3, 9])
assert util.weighted_mean([-2, -1, 0, 1, 2], [1, 1, 1, 1, 1]) == \
(pytest.approx(0, abs=0.000001),
pytest.approx(sqrt(2.5)), pytest.approx(sqrt(0.5)))
assert util.weighted_mean([1, 2, 5, 11], [0, 0, 3, 0]) == (5, 0, 0) # only 1 nonzero weight
def test_ladder_round():
assert util.ladder_round(0) == 0
assert util.ladder_round(0.12) == 0.125
assert util.ladder_round(45) == 50
assert util.ladder_round(10) == 10
assert util.ladder_round(100) == 100
assert util.ladder_round(64) == 64
assert util.ladder_round(99.7) == 100
assert util.ladder_round(-99.7) == -100
assert util.ladder_round(-45) == -50
assert util.ladder_round(-64) == -64
def test_Timespan():
# Set up tests.
dt1 = datetime(2016, 9, 10, 0, 0, 0, tzinfo=timezone.utc)
dt2 = dt1 + timedelta(hours=1.5)
ts1 = util.Timespan(dt1, dt2)
# Test fields:
assert ts1.start == dt1
assert ts1.end == dt2
assert ts1.seconds == 1.5 * 3600
assert ts1.midpoint == dt1 + (dt2-dt1) / 2
# Test copy():
tscopy = ts1.copy()
assert tscopy.start == ts1.start
assert tscopy.end == ts1.end
# Test equality:
ts1eq = util.Timespan(dt1,dt2)
assert ts1eq == ts1
ts1neq1 = util.Timespan(dt1, dt2+timedelta(hours=1))
assert ts1neq1 != ts1
ts1neq2 = util.Timespan(dt1+timedelta(hours=1), dt2)
assert ts1neq2 != ts1
ts1neq3 = util.Timespan(dt1+timedelta(hours=1), dt2+timedelta(hours=1))
assert ts1neq3 != ts1
# Test .delay_seconds():
ts1delay = ts1.delay_seconds(120)
assert ts1delay.start == ts1.start + timedelta(seconds=120)
assert ts1delay.end == ts1.end + timedelta(seconds=120)
# Test .expand_seconds():
ts1expand = ts1.expand_seconds(27)
assert ts1expand.start == ts1.start - timedelta(seconds=27)
assert ts1expand.end == ts1.end + timedelta(seconds=27)
ts1contract = ts1.expand_seconds(-0.6 * ts1.seconds)
assert ts1contract.start == ts1.midpoint
assert ts1contract.end == ts1.midpoint
# Test .intersect():
ts1a = util.Timespan(dt1 + timedelta(hours=0.5), dt2 + timedelta(hours=3))
ts1a_int1 = ts1.intersect(ts1a)
assert ts1a_int1.start == ts1a.start
assert ts1a_int1.end == ts1.end
ts1a_int2 = ts1a.intersect(ts1)
assert ts1a_int1.start == ts1a_int2.start
assert ts1a_int1.end == ts1a_int2.end
# Test .subtract():
ts1_sub = ts1.subtract(ts1.delay_seconds(+10000)) # no overlap
assert ts1_sub == ts1
ts1_sub = ts1.subtract(ts1.delay_seconds(-10000)) # also no overlap
assert ts1_sub == ts1
ts1_sub = ts1.subtract(ts1.delay_seconds(+1800)) # partial overlap of 1 hour
assert ts1_sub.start == ts1.start
assert ts1_sub.end == ts1.start + timedelta(seconds=+1800)
ts1_sub = ts1.subtract(ts1.delay_seconds(-1800)) # partial overlap of 1 hour
assert ts1_sub.start == ts1.delay_seconds(-1800).end
assert ts1_sub.end == ts1.end
ts1_other = util.Timespan(ts1.start+timedelta(seconds=100),
ts1.end+timedelta(seconds=-150)) # ts1_other within ts1 (early side).
ts1_sub = ts1.subtract(ts1_other)
assert ts1_sub.start == ts1_other.end
assert ts1_sub.end == ts1.end
ts1_other = util.Timespan(ts1.start+timedelta(seconds=160),
ts1.end+timedelta(seconds=-70)) # ts1_other within ts1 (late side).
ts1_sub = ts1.subtract(ts1_other)
assert ts1_sub.start == ts1.start
assert ts1_sub.end == ts1_other.start
ts1_other = util.Timespan(ts1.start+timedelta(seconds=-160),
ts1.end+timedelta(seconds=+7000)) # ts1 contained in ts1_other.
ts1_sub = ts1.subtract(ts1_other)
assert ts1_sub.start == ts1.start
assert ts1_sub.end == ts1.start
assert ts1_sub.seconds == 0
# Test .contains_time():
assert ts1.contains_time(dt1)
assert ts1.contains_time(dt2)
assert ts1.contains_time(dt1 + timedelta(hours=0.5))
assert not ts1.contains_time(dt1 - timedelta(hours=0.5))
assert not ts1.contains_time(dt2 + timedelta(hours=0.5))
# Test .contains_timespan():
ts_contained = util.Timespan(dt1+timedelta(hours=0.1), dt2+timedelta(hours=-0.1))
assert ts1.contains_timespan(ts_contained)
assert not ts_contained.contains_timespan(ts1)
ts_identical = util.Timespan(dt1+timedelta(hours=0), dt2+timedelta(hours=0))
assert ts1.contains_timespan(ts_identical)
assert ts_identical.contains_timespan(ts1)
ts_shift_later = util.Timespan(dt1+timedelta(hours=0.5), dt2+timedelta(hours=0.5))
assert not ts1.contains_timespan(ts_shift_later)
assert not ts_shift_later.contains_timespan(ts1)
# Test str():
s = str(ts1)
assert s.startswith("Timespan ") and s.endswith(" = 5400 seconds.")
# Case: input times equal
dt3 = dt1
ts2 = util.Timespan(dt1, dt3)
assert ts2.start == dt1
assert ts2.end == dt3
assert ts2.seconds == 0
assert ts2.midpoint == dt1 == dt3
assert ts2.contains_time(dt1)
assert ts2.contains_time(dt3)
assert not ts2.contains_time(dt1 + timedelta(hours=0.5))
assert not ts2.contains_time(dt1 - timedelta(hours=0.5))
assert not ts2.contains_time(dt3 + timedelta(hours=0.5))
# Case: input times inverted
ts3 = util.Timespan(dt2, dt1)
assert ts3.start == dt2
assert ts3.end == dt2
assert ts3.seconds == 0
assert ts3.midpoint == dt2
assert ts3.contains_time(dt2) # but not necessarily dt1 which is ~ invalid
assert not ts3.contains_time(dt2 + timedelta(hours=0.5))
assert not ts3.contains_time(dt2 - timedelta(hours=0.5))
assert not ts3.contains_time(dt1 + timedelta(hours=0.5))
# Test .longer():
dt1 = datetime(2016, 9, 10, 0, 0, 0, tzinfo=timezone.utc)
dt2 = dt1 + timedelta(hours=1.5)
dt3 = dt2 + timedelta(hours=1)
tsa = util.Timespan(dt1, dt2)
tsb = util.Timespan(dt1, dt3)
# simple case: equal start, unequal end: returns longer Timespan.
assert util.Timespan.longer(tsa, tsb) == tsb
assert util.Timespan.longer(tsb, tsa) == tsb
# edge case: identical inputs: returns first input.
ts_copy = tsa.copy()
assert util.Timespan.longer(tsa, ts_copy) == tsa
assert util.Timespan.longer(ts_copy, tsa) == ts_copy
# edge case: one zero-length input: returns the non-zero-length Timespan.
ts_zero = util.Timespan(dt1, dt1)
assert util.Timespan.longer(tsa, ts_zero) == tsa
# edge case: both zero-length inputs: returns earlier Timespan.
ts_zero_2 = util.Timespan(dt2, dt2)
assert util.Timespan.longer(ts_zero, ts_zero_2) == ts_zero
assert util.Timespan.longer(ts_zero_2, ts_zero) == ts_zero
# case if_tie=="earlier": returns earlier Timespan.
tsc = tsa.delay_seconds(7200)
assert util.Timespan.longer(tsa, tsc, on_tie="earlier") == tsa
assert util.Timespan.longer(tsc, tsa, on_tie="earlier") == tsa
# case if_tie=="first": returns first input.
assert util.Timespan.longer(tsa, tsc, on_tie="first") == tsa
assert util.Timespan.longer(tsc, tsa, on_tie="first") == tsc
# case if_tie==some other string: returns first input.
assert util.Timespan.longer(tsa, tsc, on_tie="whatever") == tsa
assert util.Timespan.longer(tsc, tsa, on_tie="whatever") == tsc
def test_RaDec():
# Set up tests.
rd1 = util.RaDec(60, 70)
rd2 = util.RaDec(70, +80)
rd3 = util.RaDec("10:30:00", "-45:15:00")
rd4 = util.RaDec("10:30:00", -45.25)
rd5 = util.RaDec(157.5, "-45:15:00")
# Test fields and __eq__().
assert rd1.as_degrees == (60, +70)
assert rd1.as_degrees != (60, -70)
assert rd1.as_hex == ("04:00:00.000", "+70:00:00.00")
assert rd3.as_hex == ("10:30:00.000", "-45:15:00.00")
assert rd3.ra == 10.5*15
assert rd3.dec == -45.25
assert rd1 != rd2
assert rd3 == rd4 == rd5
# Test .degrees_from() and .farther_from().
assert rd1.degrees_from(rd2) == pytest.approx(10.293451406994343)
assert util.RaDec(0, 60).degrees_from(util.RaDec(180, 60)) == pytest.approx(60.0)
assert util.RaDec(0, 0).degrees_from(util.RaDec(180, 0)) == pytest.approx(180.0)
assert rd1.farther_from(rd2, 10)
assert not rd1.farther_from(rd2, 11)
assert rd1.farther_from(rd3, 130)
assert not rd1.farther_from(rd3, 140)
# Test __str__() and __repr__().
assert str(rd1) == "RaDec object: 04:00:00.000 +70:00:00.00"
assert str(rd5) == "RaDec object: 10:30:00.000 -45:15:00.00"
assert repr(rd2) == "RaDec('04:40:00.000', '+80:00:00.00')"
assert repr(rd5) == "RaDec('10:30:00.000', '-45:15:00.00')"
assert eval('util.' + repr(rd1)) == rd1
assert eval('util.' + repr(rd5)) == rd5
assert eval('util.' + repr(rd1)) != rd5
def test_get_phase():
assert util.get_phase(5, 2, 10) == pytest.approx(0.3)
assert util.get_phase(-5, 2, 10) == pytest.approx(0.3)
assert util.get_phase(-335, 2, 10) == pytest.approx(0.3)
assert util.get_phase(335, 2, 10) == pytest.approx(0.3)
assert util.get_phase(3352, 2, 10) == 0
assert util.get_phase(2, 2, 10) == 0
assert util.get_phase(1.99, 2, 10) == pytest.approx(0.999)
assert util.get_phase(12, 2, 10) == 0
assert util.get_phase(7, 2, 10) == 0.5
def test_jd_from_datetime_utc():
one_second = 1.0 / (24.0 * 3600.0) # tolerance of 1 second, in days (for JD)
datetime_j2000 = datetime(2000, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc)
assert util.jd_from_datetime_utc(datetime_j2000) == pytest.approx(2451544.5, abs=one_second)
assert util.jd_from_datetime_utc(None) is None
assert util.jd_from_datetime_utc() is None
datetime_1 = datetime(2017, 1, 9, 15, 23, 53).replace(tzinfo=timezone.utc)
assert util.jd_from_datetime_utc(datetime_1) == pytest.approx(2457763.14158398, abs=one_second)
datetime_2 = datetime(2020, 7, 9, 6, 23, 53).replace(tzinfo=timezone.utc)
assert util.jd_from_datetime_utc(datetime_2) == pytest.approx(2459039.76658403, abs=one_second)
datetime_3 = datetime(1986, 10, 11, 3, 12, 7).replace(tzinfo=timezone.utc)
assert util.jd_from_datetime_utc(datetime_3) == pytest.approx(2446714.63341273, abs=one_second)
def test_datetime_utc_from_jd():
jd_j2000 = 2451544.5
datetime_j2000 = datetime(2000, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc)
assert (util.datetime_utc_from_jd(jd_j2000) - datetime_j2000).total_seconds() == \
pytest.approx(0, abs=1.0)
datetime_now = datetime.now(timezone.utc)
jd_now = util.jd_from_datetime_utc(datetime_now) # tested just above.
assert (util.datetime_utc_from_jd(jd_now) - datetime_now).total_seconds() == \
pytest.approx(0, abs=1.0)
datetime_1 = datetime(2017, 1, 9, 15, 23, 53).replace(tzinfo=timezone.utc)
jd_1 = 2457763.14158398
assert (util.datetime_utc_from_jd(jd_1) - datetime_1).total_seconds() == \
pytest.approx(0, abs=1.0)
datetime_2 = datetime(2020, 7, 9, 6, 23, 53).replace(tzinfo=timezone.utc)
jd_2 = 2459039.76658403
assert (util.datetime_utc_from_jd(jd_2) - datetime_2).total_seconds() == \
pytest.approx(0, abs=1.0)
datetime_3 = datetime(1986, 10, 11, 3, 12, 7).replace(tzinfo=timezone.utc)
jd_3 = 2446714.63341273
assert (util.datetime_utc_from_jd(jd_3) - datetime_3).total_seconds() == \
pytest.approx(0, abs=1.0)
def test_time_hhmm():
dt = datetime(2016, 1, 1, 23, 34, 45, 454545).replace(tzinfo=timezone.utc)
assert util.hhmm_from_datetime_utc(dt) == '2335'
dt = datetime(2016, 1, 1, 23, 34, 29, 999999).replace(tzinfo=timezone.utc)
assert util.hhmm_from_datetime_utc(dt) == '2334'
dt = datetime(2016, 1, 1, 23, 59, 31, 454545).replace(tzinfo=timezone.utc)
assert util.hhmm_from_datetime_utc(dt) == '0000'
dt = datetime(2016, 1, 31, 0, 0, 0, 0).replace(tzinfo=timezone.utc)
assert util.hhmm_from_datetime_utc(dt) == '0000'
dt = datetime(2016, 1, 31, 0, 0, 30, 0).replace(tzinfo=timezone.utc) # banker's rounding.
assert util.hhmm_from_datetime_utc(dt) == '0000'
dt = datetime(2016, 1, 31, 0, 1, 30, 0).replace(tzinfo=timezone.utc) # banker's rounding.
assert util.hhmm_from_datetime_utc(dt) == '0002'
dt = datetime(2016, 1, 31, 0, 0, 30, 1).replace(tzinfo=timezone.utc)
assert util.hhmm_from_datetime_utc(dt) == '0001'
# Test .az_alt_at_datetime_utc():
aldebaran = util.RaDec('4:35:55.31', '+16:30:30.249')
mira = util.RaDec('02:19:20.804', '-2:58:43.606')
hip_113116 = util.RaDec('22:54:25.073', '84:20:46.620')
long, lat = '-95:53:18', '+38:55:29' # Bois d'Arc Obs, Kansas
dt = datetime(2016, 1, 31, 0, 9, 30, 780000).replace(tzinfo=timezone.utc)
az, alt = util.az_alt_at_datetime_utc(long, lat, aldebaran, dt) # in degrees
assert az == pytest.approx(util.hex_degrees_as_degrees('118:27:06'), abs=0.5)
assert alt== pytest.approx(util.hex_degrees_as_degrees('+53:30:20'), abs=0.5)
az, alt = util.az_alt_at_datetime_utc(long, lat, mira, dt) # in degrees
assert az == pytest.approx(util.hex_degrees_as_degrees('181:40:22'), abs=0.5)
assert alt == pytest.approx(util.hex_degrees_as_degrees('+48:09:18'), abs=0.5)
az, alt = util.az_alt_at_datetime_utc(long, lat, hip_113116, dt) # in degrees
assert az == pytest.approx(util.hex_degrees_as_degrees('354:02:03'), abs=0.5)
assert alt == pytest.approx(util.hex_degrees_as_degrees('+42:09:15'), abs=0.5)
dt = datetime(2016, 6, 13, 3, 17, 53, 780000).replace(tzinfo=timezone.utc)
az, alt = util.az_alt_at_datetime_utc(long, lat, aldebaran, dt) # in degrees
assert az == pytest.approx(util.hex_degrees_as_degrees('323:35:35'), abs=0.5)
assert alt== pytest.approx(util.hex_degrees_as_degrees('-26:10:56'), abs=0.5)
az, alt = util.az_alt_at_datetime_utc(long, lat, mira, dt) # in degrees
assert az == pytest.approx(util.hex_degrees_as_degrees('0:42:33'), abs=0.5)
assert alt == pytest.approx(util.hex_degrees_as_degrees('-53:58:40'), abs=0.5)
az, alt = util.az_alt_at_datetime_utc(long, lat, hip_113116, dt) # in degrees
assert az == pytest.approx(util.hex_degrees_as_degrees('5:22:23'), abs=0.5)
assert alt == pytest.approx(util.hex_degrees_as_degrees('+35:21:32'), abs=0.5)
def test_isfloat():
assert util.isfloat('-25') is True
assert util.isfloat('+34.4') is True
assert util.isfloat(' 34.4 ') is True
assert util.isfloat('-.6') is True
assert util.isfloat('-123.E6') is True
assert util.isfloat('-inf') is True
assert util.isfloat('NaN') is True
assert util.isfloat(True) is True
assert util.isfloat(False) is True
assert util.isfloat('') is False
assert util.isfloat('12.2.2') is False
assert util.isfloat('-') is False
assert util.isfloat('+') is False
assert util.isfloat('(3)') is False
def test_float_or_none():
assert util.float_or_none('-25') == -25
assert util.float_or_none('+34.4') == pytest.approx(34.4)
assert util.float_or_none(' 34.4 ') == pytest.approx(34.4)
assert util.float_or_none('-.6') == pytest.approx(-0.6)
assert util.float_or_none('-123.E6') == pytest.approx(-123000000.0)
assert util.float_or_none('-inf') == float('-inf')
assert isnan(util.float_or_none('NaN'))
assert util.float_or_none(True) == 1.0
assert util.float_or_none(False) == 0.0
assert util.float_or_none('') is None
assert util.float_or_none('12.2.2') is None
assert util.float_or_none('-') is None
assert util.float_or_none('+') is None
assert util.float_or_none('(3)') is None
def test_event_utcs_in_timespan():
# Test normal case:
ts1 = util.Timespan(datetime(2017, 2, 10, 1, 30, 0).replace(tzinfo=timezone.utc),
datetime(2017, 2, 10, 10, 30, 0).replace(tzinfo=timezone.utc))
min1_utc_list = util.event_utcs_in_timespan(2455336.44, 0.3641393156, ts1) # DE CVn 1'min
assert len(min1_utc_list) == 1
assert util.jd_from_datetime_utc(min1_utc_list[0]) == pytest.approx(2457794.74452, abs=0.0001)
min2_utc_list = util.event_utcs_in_timespan(2455336.26, 0.3641393156, ts1) # DE CVn 2'min
assert len(min2_utc_list) == 2
assert util.jd_from_datetime_utc(min2_utc_list[0]) == pytest.approx(2457794.56452, abs=0.0001)
assert util.jd_from_datetime_utc(min2_utc_list[1]) == pytest.approx(2457794.92866, abs=0.0001)
# Case: no mins in timespan:
ts1 = util.Timespan(datetime(2017, 2, 10, 7, 0, 0).replace(tzinfo=timezone.utc),
datetime(2017, 2, 10, 8, 30, 0).replace(tzinfo=timezone.utc))
min1_utc_list = util.event_utcs_in_timespan(2455336.44, 0.3641393156, ts1) # DE CVn 1'min
assert len(min1_utc_list) == 0
min2_utc_list = util.event_utcs_in_timespan(2455336.26, 0.3641393156, ts1) # DE CVn 2'min
assert len(min2_utc_list) == 0
# Case: missing data:
assert util.event_utcs_in_timespan(None, 0.3641393156, ts1) is None
assert util.event_utcs_in_timespan(2455336.44, None, ts1) is None
def test_mixed_model_fit_class():
import numpy as np
import pandas as pd
# First, construct test data frame:
points = 80
np.random.seed(1234)
d = {'A': np.random.randn(points),
'B': np.random.randn(points),
'C': np.random.randn(points),
'Ran': np.random.randint(0, 3, points),
'Dep': 0}
df = pd.DataFrame(d)
df['Dep'] = 17 + 1*df.A + 2*df.B + 4*df.C + 5*(df.Ran-1) + 1*np.random.randn(len(df))
categories = ['X', 'Y', 'Z']
df['Ran'] = [categories[r] for r in df['Ran']]
df.index = df.index + 200
# Split test data into model and test blocks:
df_model = df[0:int(3*points/4)]
df_test = df[len(df_model):]
# Construct fit object:
fit = util.MixedModelFit(df_model, dep_var='Dep', fixed_vars=['A', 'B', 'C'], group_var='Ran')
# Test object and scalar attributes:
assert isinstance(fit, util.MixedModelFit)
assert fit.converged is True
assert fit.nobs == len(df_model)
assert fit.likelihood == pytest.approx(-95.7673)
assert fit.dep_var == 'Dep'
assert fit.fixed_vars == ['A', 'B', 'C']
assert fit.group_var == 'Ran'
assert fit.sigma == pytest.approx(1.030723)
# Test fixed-effect results dataframe:
assert list(fit.df_fixed_effects.index) == ['Intercept', 'A', 'B', 'C']
assert list(fit.df_fixed_effects['Name']) == list(fit.df_fixed_effects.index)
assert list(fit.df_fixed_effects['Value']) == pytest.approx([16.648186, 0.946692,
1.959923, 4.069383], abs=0.00001)
assert list(fit.df_fixed_effects['Stdev']) == pytest.approx([2.844632, 0.142185,
0.134386, 0.145358], abs=0.00001)
assert list(fit.df_fixed_effects['Tvalue']) == pytest.approx([5.8525, 6.65818,
14.58429, 27.99568], abs=0.0001)
assert list(fit.df_fixed_effects['Pvalue'] * 10**9) == pytest.approx([4.8426, 0.027724,
0, 0], abs=0.0001)
# Test random-effect (group) results dataframe:
assert list(fit.df_random_effects.index) == ['X', 'Y', 'Z']
assert list(fit.df_random_effects['GroupName']) == list(fit.df_random_effects.index)
assert list(fit.df_random_effects['GroupValue']) == pytest.approx([-5.164649, 0.543793,
4.620857], abs=0.00001)
# Test observation results dataframe:
assert list(fit.df_observations['FittedValue'])[0:4] == pytest.approx([24.809899, 10.130408,
19.834543, 7.758331],
abs=0.00001)
assert list(fit.df_observations['Residual'])[0:4] == pytest.approx([0.490179, -0.786949,
0.58315, -1.23926],
abs=0.00001)
# Verify predictions on model data:
# Case 1: INCLUDING random effects:
predictions_with_1 = fit.predict(df_model[0:4], include_random_effect=True)
predictions_with_2 = fit.predict(df_model[0:4])
assert list(predictions_with_1) == list(predictions_with_2) # verify default is inclusion.
assert list(predictions_with_1) == pytest.approx(list(fit.df_observations['FittedValue'])[0:4])
# Case 2: OMITTING random effects:
predictions_without = fit.predict(df_model[0:4], include_random_effect=False)
random_effect_contributions = pd.Series([fit.df_random_effects.loc[group, 'GroupValue']
for group in df_model.iloc[0:4]['Ran']],
index=predictions_without.index)
expected_predictions = predictions_with_1 - random_effect_contributions
assert list(expected_predictions) == pytest.approx(list(predictions_without))
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,628
|
edose/photrix
|
refs/heads/master
|
/test/test_planning.py
|
import os
import pandas as pd
import pytest
from collections import namedtuple
import random
from photrix import planning
# from photrix import util
from photrix.fov import Fov
from photrix.user import Astronight
# from photrix.web import get_aavso_webobs_raw_table
__author__ = "Eric Dose :: Bois d'Arc Observatory, Kansas"
PHOTRIX_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEST_FOV_DIRECTORY = os.path.join(PHOTRIX_ROOT_DIRECTORY, "test", "$fovs_for_test")
TEST_DATA_DIRECTORY = os.path.join(PHOTRIX_ROOT_DIRECTORY, "test", "$data_for_test")
def test_make_df_fov():
df_fov = planning.make_df_fov(fov_directory=TEST_FOV_DIRECTORY)
required_columns = ["fov_name", "fov", "main_target", "fov_priority",
"obs_style", "ra", "dec", 'period',
'target_type', 'max_exposure', 'radec']
assert list(df_fov.columns) == required_columns # in order
required_fov_names = set(['AU Aur multi-exposure', 'AU Aur',
'NSV 14581', 'ST Tri', 'Std_SA100'])
assert set(df_fov.fov_name) == required_fov_names # not necessarily in order
assert df_fov.shape == (len(required_fov_names), len(required_columns))
assert df_fov['fov_name'].is_monotonic_increasing
def test_filter_df_fov_by_obs_styles():
df_all = planning.make_df_fov(fov_directory=TEST_FOV_DIRECTORY)
# Test normal case (as a list).
df = planning.filter_df_fov_by_obs_styles(df_all, ["Standard", "LPV"])
assert list(df.columns) == list(df_all.columns)
assert set(df["fov_name"]) == set(["AU Aur multi-exposure", "Std_SA100", "AU Aur"])
assert df.shape == (3, len(df_all.columns))
# Test normal case (as a single string, not as list):
df = planning.filter_df_fov_by_obs_styles(df_all, "LPV")
assert list(df.columns) == list(df_all.columns)
assert set(df["fov_name"]) == set(["AU Aur multi-exposure", "AU Aur"])
assert df.shape == (2, len(df_all.columns))
# Test empty obs_style list, returns input df_fov:
df = planning.filter_df_fov_by_obs_styles(df_all, [])
assert list(df.columns) == list(df_all.columns)
assert set(df['fov_name']) == set(df_all['fov_name'])
assert df.shape == (len(df_all), len(df_all.columns))
# Test default (no action, returns input df_fov):
df = planning.filter_df_fov_by_obs_styles(df_all)
assert list(df.columns) == list(df_all.columns)
assert set(df["fov_name"]) == set(df_all["fov_name"]) # returns original df
assert df.shape == (len(df_all), len(df_all.columns))
def test_filter_df_fov_by_fov_priority():
df_all = planning.make_df_fov(fov_directory=TEST_FOV_DIRECTORY)
# Test normal case (including standard fovs).
min_priority = 3
df = planning.filter_df_fov_by_fov_priority(df_all, min_priority)
assert df.shape == (5, len(df_all.columns))
assert list(df.columns) == list(df_all.columns)
assert set(df["fov_name"]) == \
{'AU Aur multi-exposure', 'AU Aur', 'NSV 14581', 'ST Tri', 'Std_SA100'}
priority_ok = df["fov_priority"] >= min_priority
is_standard_fov = (df["obs_style"].str.lower() == "Standard".lower())
assert all(priority_ok | is_standard_fov)
# Test exclude standard fovs.
min_priority = 4
df = planning.filter_df_fov_by_fov_priority(df_all, min_priority, include_std_fovs=False)
assert df.shape == (3, len(df_all.columns))
assert list(df.columns) == list(df_all.columns)
assert set(df["fov_name"]) == {'AU Aur multi-exposure', 'AU Aur', 'ST Tri'}
priority_ok = df["fov_priority"] >= min_priority
is_standard_fov = (df["obs_style"].str.lower() == "Standard".lower())
assert all(priority_ok)
assert all(~ is_standard_fov)
# Test default min_priority (which should return all fovs).
df = planning.filter_df_fov_by_fov_priority(df_all) # absent min_fov_priority
assert df.shape == df_all.shape
assert list(df.columns) == list(df_all.columns)
assert set(df["fov_name"]) == set(df_all["fov_name"]) # returns original df
def test_filter_df_fov_available():
df_all = planning.make_df_fov(fov_directory=TEST_FOV_DIRECTORY)
# Test normal case, no moon avoidance.
df = planning.complete_df_fov_an(df_all, an_string='20160919', site_name='DSW',
min_moon_degrees=0, remove_zero_an_priority=False,
remove_unobservables=False) # no removes, so test all FOVs
assert list(df['fov_name']) == \
['NSV 14581', 'ST Tri', 'Std_SA100', 'AU Aur', 'AU Aur multi-exposure']
moon_deg_expected = [74, 24.5, 88, 45, 45]
seconds_expected = [37415, 26957, 0, 20989, 20989]
assert list(df['moon_deg']) == pytest.approx(moon_deg_expected, abs=1)
assert list(df['seconds']) == pytest.approx(seconds_expected, abs=60)
# Test normal case, wide moon avoidance.
df = planning.complete_df_fov_an(df_all, an_string='20160919', site_name='DSW',
min_moon_degrees=70, remove_zero_an_priority=False,
remove_unobservables=False)
assert list(df['fov_name']) == \
['ST Tri', 'AU Aur', 'AU Aur multi-exposure', 'NSV 14581', 'Std_SA100']
moon_deg_expected = [24.5, 45, 45, 74, 88]
seconds_expected = [0, 0, 0, 37415, 0]
assert list(df['moon_deg']) == pytest.approx(moon_deg_expected, abs=1)
assert list(df['seconds']) == pytest.approx(seconds_expected, abs=60)
# Test case: 6 months after previous case (for opposite fov availabilities):
df_all = planning.make_df_fov(fov_directory=TEST_FOV_DIRECTORY)
# Test normal case, no moon effect.
df = planning.complete_df_fov_an(df_all, an_string='20170319', site_name='DSW',
min_moon_degrees=65, remove_zero_an_priority=False,
remove_unobservables=False)
assert list(df['fov_name']) == \
['ST Tri', 'AU Aur', 'AU Aur multi-exposure', 'Std_SA100', 'NSV 14581']
moon_deg_expected = [136.4, 147.5, 147.5, 130, 108]
seconds_expected = [4080, 13968, 13968, 20471, 2908]
assert list(df['moon_deg']) == pytest.approx(moon_deg_expected, abs=1)
assert list(df['seconds']) == pytest.approx(seconds_expected, abs=60)
# Test case: near-new moon (moon 3% phase, no factor at all whatever targets' sky position):
df_all = planning.make_df_fov(fov_directory=TEST_FOV_DIRECTORY)
# Test normal case, no moon effect.
df = planning.complete_df_fov_an(df_all, an_string='20161226', site_name='DSW',
min_moon_degrees=120, remove_zero_an_priority=False,
remove_unobservables=False)
assert list(df['fov_name']) == \
['NSV 14581', 'ST Tri', 'AU Aur', 'AU Aur multi-exposure', 'Std_SA100']
moon_deg_expected = [109, 146, 148, 148, 118]
seconds_expected = [23005, 28004, 37892, 37892, 25350]
assert list(df['moon_deg']) == pytest.approx(moon_deg_expected, abs=1)
assert list(df['seconds']) == pytest.approx(seconds_expected, abs=60)
# Test case: moon very near at least one FOV:
df_all = planning.make_df_fov(fov_directory=TEST_FOV_DIRECTORY)
# Test normal case, no moon effect.
df = planning.complete_df_fov_an(df_all, an_string='20170113', site_name='DSW',
min_moon_degrees=70, remove_zero_an_priority=False,
remove_unobservables=False)
assert list(df['fov_name']) == \
['AU Aur', 'AU Aur multi-exposure', 'NSV 14581', 'ST Tri', 'Std_SA100']
moon_deg_expected = [65, 65, 83, 90, 16]
seconds_expected = [3422, 3422, 17920, 22922, 0]
assert list(df['moon_deg']) == pytest.approx(moon_deg_expected, abs=1)
assert list(df['seconds']) == pytest.approx(seconds_expected, abs=60)
# Test removing unobservables:
df = planning.complete_df_fov_an(df_all, an_string='20160919', site_name='DSW',
min_moon_degrees=70, remove_zero_an_priority=False,
remove_unobservables=True) # NB: remove-unobs. == True here.
assert list(df['fov_name']) == ['NSV 14581'] # from wide moon avoidance case, above.
def test_reorder_actions():
Plan = namedtuple('Plan', ['plan_id', 'action_list'])
Action = namedtuple('Action', ['action_type', 'action_data'])
action_list_1_ordered = [Action(action_type='Plan', action_data=3),
Action(action_type='Chill', action_data=2),
Action(action_type='Waituntil', action_data='x'),
Action(action_type='Quitat', action_data=14.3),
Action(action_type='AFINTERVAL', action_data=90),
Action(action_type='autofocus', action_data=None),
Action(action_type='fov', action_data='AU Aur'),
Action(action_type='Stare', action_data='ST Tri'),
Action(action_type='Burn', action_data='Burner'),
Action(action_type='Comment', action_data='this is a comment'),
Action(action_type='Chain', action_data='AN_B.txt')]
plan_1_ordered = Plan(plan_id='111', action_list=action_list_1_ordered)
action_list_2_ordered = [Action(action_type='Plan', action_data=3),
Action(action_type='Quitat', action_data=14.3),
Action(action_type='AFINTERVAL', action_data=90),
Action(action_type='Comment', action_data='this is a comment'),
Action(action_type='fov', action_data='AU Aur'),
Action(action_type='flats', action_data=None),
Action(action_type='SHUTDOWN', action_data=None)]
plan_2_ordered = Plan(plan_id='22222', action_list=action_list_2_ordered)
plan_list_ordered = [plan_1_ordered, plan_2_ordered]
n_trials = 20
for i_trial in range(n_trials):
# Can't use random.sample directly: keep all fov, stare, and burn in original order.
indices_ordered = [0, 1, 2, 3, 4, 'observations', 10]
indices_disordered = random.sample(indices_ordered, len(indices_ordered))
index_obs = indices_disordered.index('observations')
indices_disordered[index_obs:index_obs+1] = [5, 6, 7, 8, 9]
list_1_disordered = [action_list_1_ordered[i] for i in indices_disordered]
indices_ordered = [0, 1, 2, 'observations', 6]
indices_disordered = random.sample(indices_ordered, len(indices_ordered))
index_obs = indices_disordered.index('observations')
indices_disordered[index_obs:index_obs+1] = [3, 4, 5]
list_2_disordered = [action_list_2_ordered[i] for i in indices_disordered]
plan_list_disordered = [Plan(plan_id=plan_list_ordered[0].plan_id,
action_list=list_1_disordered),
Plan(plan_id=plan_list_ordered[1].plan_id,
action_list=list_2_disordered)]
plan_list_reordered = planning.reorder_actions(plan_list_disordered)
assert plan_list_reordered[0].plan_id == plan_list_ordered[0].plan_id
assert plan_list_reordered[0].action_list == plan_list_reordered[0].action_list
assert plan_list_reordered[1].plan_id == plan_list_ordered[1].plan_id
assert plan_list_reordered[1].action_list == plan_list_reordered[1].action_list
# print('loop ' + str(i_trial) + ' ok.')
def test_class_aavso_webobs():
# This function tests only class AavsoWebobs, that is,
# it assumes correct operation of photrix.web.get_aavso_webobs_raw_table().
# Use dataframe stored as .csv file, rather than using web now:
data_fullpath = os.path.join(TEST_DATA_DIRECTORY, "ST_Tri_192.csv")
df192 = pd.read_csv(data_fullpath, index_col=0)
aw = planning.AavsoWebobs(dataframe=df192)
assert len(aw.table) == len(df192)
assert aw.star_id == df192['target_name'].iloc[0]
def test_extract_ra_dec():
assert planning.extract_ra_dec('ARFARF 12:34:56 -31:44:52') == ('ARFARF', '12:34:56', '-31:44:52')
assert planning.extract_ra_dec('ARFARF 12:34:56 -31:44:52') != ('ARFARF', '12:34:56', '-31:44:32')
assert planning.extract_ra_dec('ARFARF 12:34:56.23 -31:44:52.042') == \
('ARFARF', '12:34:56.23', '-31:44:52.042')
assert planning.extract_ra_dec('MEOW 06h 49m 40.531s +63° 00\' 06.920\" ') == \
('MEOW', '06:49:40.531', '+63:00:06.920')
assert planning.extract_ra_dec('INVALID 06h 49m +63° 00\' 06.920\" ') is None
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,629
|
edose/photrix
|
refs/heads/master
|
/photrix/__init__.py
|
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,630
|
edose/photrix
|
refs/heads/master
|
/photrix/image.py
|
import os
import re
from datetime import datetime, timezone, timedelta
from math import floor, ceil, cos, sin, pi, sqrt, log
import numpy as np
import pandas as pd
from scipy.stats import trim_mean
import astropy.io.fits
from dateutil.parser import parse
from photrix.util import RaDec, ra_as_degrees, dec_as_degrees
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
TOP_DIRECTORY = 'C:/Astro/Images/Borea Photrix'
FITS_REGEX_PATTERN = '^(.+)\.(f[A-Za-z]{2,3})$'
FITS_EXTENSIONS = ['fts', 'fit', 'fits'] # allowed filename extensions
ISO_8601_FORMAT = '%Y-%m-%dT%H:%M:%S'
# ISO_8601_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
FWHM_PER_SIGMA = 2.0 * sqrt(2.0 * log(2))
SUBIMAGE_MARGIN = 1.5 # subimage pixels around outer annulus, for safety
RADIANS_PER_DEGREE = pi / 180.0
# R_DISC altered 10 -> 9 Aug 16 2019 for new L-500 mount.
# R_DISC and other radii increased to ca. mp2021 trial values, using nominal FWHM = 6.5 pixels.
R_DISC = 11 # for aperture photometry, likely to be adaptive (per image) later.
R_INNER = 18 # "
R_OUTER = 26 # "
class Image:
"""
Holds an astronomical image and apertures for photometric processing.
Contains a FITS object, but doesn't know of its implementation and doesn't change it.
"""
def __init__(self, fits_object):
"""
Main constructor when FITS object is already available.
:param fits_object: an object of the FITS class (this module).
"""
self.fits = fits_object
self.top_directory = fits_object.top_directory
self.rel_directory = fits_object.rel_directory
self.plate_solution = fits_object.plate_solution
self.image = self.fits.image
self.xsize = self.image.shape[0]
self.ysize = self.image.shape[1]
self.apertures = dict() # initially empty dictionary of Aperture objects
self.df_punches = pd.DataFrame()
@classmethod
def from_fits_path(cls, top_directory=TOP_DIRECTORY, rel_directory=None, filename=None):
"""
Alternate constructor that starts by fetching the FITS object via its given filename.
"""
this_fits = FITS(top_directory, rel_directory, filename)
return Image(this_fits)
def add_aperture(self, star_id, x0, y0):
"""
Make one aperture from position (x,y) in image, refine its sub-pixel position by
computing its bkgd-adjusted flux centroid, and append the aperture to self.apertures
Will replace if aperture already exists for this starID.
:param star_id: this aperture's name, e.g., '114_1', 'ST Tri'. Unique to this Image [string]
:param x0: initial x position of aperture center (will be refined) [float]
:param y0: initial y position of aperture center (will be refined) [float]
:return: [None]
"""
if len(self.df_punches) >= 1:
df_ap_punches = self.df_punches.loc[self.df_punches['StarID'] == star_id, :]
else:
df_ap_punches = None
self.apertures[star_id] = Aperture(self, star_id, x0, y0, df_ap_punches)
self._recenter_aperture(star_id)
def add_punches(self, df_punches):
"""
Add all punches to this Image's dataframe of punches, then update all affected apertures.
:param df_punches: new punches, columns=[StarID, dNorth, dEast] [pandas DataFrame]
:return: [None]
"""
# Apply punches if any; then for any apertures affected:
self.df_punches = self.df_punches.append(df_punches) # simple append, duplicates OK.
if len(self.df_punches) >= 1:
ap_names_affected = set(df_punches['StarID'])
else:
ap_names_affected = []
# Replace all affected apertures (incl. updating centroid and results):
for ap_name in ap_names_affected:
if ap_name in self.apertures:
ap_previous = self.apertures[ap_name]
self.add_aperture(ap_name, ap_previous.xcenter, ap_previous.ycenter) # replace it.
else:
print('>>>>> Warning: df_punch StarID \'' + ap_name +
'\' is not a valid aperture name in ' + self.fits.filename + '.')
def results_from_aperture(self, star_id):
"""
Return tuple of best positiion, fluxes etc for this aperture.
:param star_id: which aperture [string]
:return: Series of results [indexed pandas Series of floats]
"""
ap = self.apertures[star_id]
return pd.Series({'r_disc': ap.r_disc,
'r_inner': ap.r_inner,
'r_outer': ap.r_outer,
'n_disc_pixels': ap.n_disc_pixels,
'n_annulus_pixels': ap.n_annulus_pixels,
'annulus_flux': ap.annulus_flux,
'annulus_flux_sigma': ap.annulus_flux_sigma,
'net_flux': ap.net_flux,
'net_flux_sigma': ap.net_flux_sigma,
'x_centroid': ap.x_centroid,
'y_centroid': ap.y_centroid,
'fwhm': ap.fwhm,
'x1024': ap.x1024,
'y1024': ap.y1024,
'vignette': ap.vignette,
'sky_bias': ap.sky_bias,
'max_adu': ap.max_adu})
def _recenter_aperture(self, ap_name, max_cycles=2, pixels_convergence=0.05):
"""
For one Aperture object, reset center to previously calculated centroid, update entry in
Image's dict of Aperture objects.
:param ap_name:
:param max_cycles: max number of recentering cycles [int]
:param pixels_convergence: movement by fewer pixels than this stops the refinement. [float]
:return: [None]
"""
for i_cycle in range(max_cycles):
ap = self.apertures[ap_name]
x_previous, y_previous = ap.xcenter, ap.ycenter
x_new, y_new = ap.x_centroid, ap.y_centroid
distance_to_move = sqrt((x_new - x_previous) ** 2 + (y_new - y_previous) ** 2)
if distance_to_move <= pixels_convergence:
break
# Here, the movement distance warrants a new Aperture object:
self.apertures[ap_name] = ap.yield_recentered()
def _recenter_all_apertures(self):
for ap_name in self.apertures.keys():
self._recenter_aperture(ap_name)
class Aperture:
"""
Used directly only by class Image. Contains everything about one aperture.
"""
def __init__(self, image_obj, star_id, x0, y0, df_punches=None):
"""
:param image_obj: Image to which this Aperture applies [Image class object]
:param star_id: name of this aperture [string]
:param x0: initial x center, in pixels [float]
:param y0: initial y center, in pixels [float]
:param df_punches: one row for each punch columns=[StarID, dNorth, dEast].
Only rows with StarID matching star_id will be applied. [pandas DataFrame]
"""
self.image_obj = image_obj # reference to Image class object for this aperture.
self.image = image_obj.image # reference to (x,y) array holding the image data.
self.star_id = star_id
self.xcenter = float(x0)
self.ycenter = float(y0)
self.df_punches = None # default if no punch lines available for this aperture.
if df_punches is not None:
if len(df_punches) >= 1:
self.df_punches = df_punches.loc[df_punches['StarID'] == star_id, :]
self.r_disc = R_DISC
self.r_inner = R_INNER
self.r_outer = R_OUTER
# Aperture evaluation fields, with default (no-flux) values:
self.n_disc_pixels, self.n_annulus_pixels = 0, 0
self.net_flux = 0.0
self.net_flux_sigma = 0.0
self.annulus_flux = 0.0
self.annulus_flux_sigma = 0.0
self.sn = 0.0
self.x_centroid = self.xcenter
self.y_centroid = self.ycenter
self.fwhm = 0.0
self.sky_bias = 0.0
self.max_adu = 0.0
# Compute needed boundaries of subimage around this aperture:
image_xsize, image_ysize = self.image.shape
test_radius = self.r_outer + SUBIMAGE_MARGIN
xlow = int(floor(self.xcenter - test_radius))
xhigh = int(ceil(self.xcenter + test_radius))
ylow = int(floor(self.ycenter - test_radius))
yhigh = int(ceil(self.ycenter + test_radius))
# Compute whether needed subimage will fall entirely within image, or not:
subimage_within_image = (xlow >= 0) & (xhigh <= image_xsize - 1) & \
(ylow >= 0) & (yhigh <= image_ysize - 1)
# Compute values only if subimage entirely contained in current image:
if subimage_within_image:
self.subimage = self.image[xlow:xhigh + 1, ylow:yhigh + 1].copy()
# Construct mask arrays to represent disc and annulus (both same shape as subimage):
nx = xhigh - xlow + 1 # number of columns in subimage.
ny = yhigh - ylow + 1 # number of rows in subimage.
self.ygrid, self.xgrid = np.meshgrid(ylow + np.arange(ny), xlow + np.arange(nx))
dx = self.xgrid - self.xcenter
dy = self.ygrid - self.ycenter
dist2 = dx**2 + dy**2
self.disc_mask = np.clip(np.sign(self.r_disc**2 - dist2), 0.0, 1.0)
inside_outer_edge = np.clip(np.sign(self.r_outer**2 - dist2), 0.0, 1.0)
outside_inner_edge = np.clip(np.sign(dist2 - self.r_inner**2), 0.0, 1.0)
self.annulus_mask = inside_outer_edge * outside_inner_edge
# Apply punches:
if df_punches is not None:
if len(df_punches) >= 1:
self._apply_punches(image_obj.plate_solution) # only punches for this aperture.
# Evaluate and store several new fields:
self.evaluate()
del self.subimage, self.xgrid, self.ygrid, self.disc_mask, self.annulus_mask
# Add other fields useful to calling code:
image_center_x = self.image.shape[0] / 2.0
image_center_y = self.image.shape[1] / 2.0
self.x1024 = (self.xcenter - image_center_x) / 1024.0
self.y1024 = (self.ycenter - image_center_y) / 1024.0
self.vignette = self.x1024**2 + self.y1024**2 # no sqrt...meant to be parabolic term
def evaluate(self):
"""
Compute and several fields in this Aperture object. Put them in Aperture object.
:return: [None]
"""
self.n_disc_pixels = np.sum(self.disc_mask)
self.n_annulus_pixels = np.sum(self.annulus_mask)
self.annulus_flux = self._eval_sky_005() # average adus / pixel, sky background
estimated_background = self.n_disc_pixels * self.annulus_flux
disc_values = np.ravel(self.subimage[self.disc_mask > 0]) # only values in mask.
self.max_adu = np.max(disc_values)
this_net_flux = np.sum(disc_values) - estimated_background
if this_net_flux > 0:
self.net_flux = this_net_flux
gain = 1.57 # TODO: this should come from Instrument object.
annulus_values = np.ravel(self.subimage[self.annulus_mask > 0])
self.annulus_flux_sigma = np.std(annulus_values)
# net_flux_sigma equation after APT paper, PASP 124, 737 (2012), but pi/2 in 3rd term
# set to 1 as pi/2 seems hard to justify, and as 1 gives S/N closer to VPhot's values.
self.net_flux_sigma = sqrt((self.net_flux / gain) +
(self.n_disc_pixels * self.annulus_flux_sigma**2) +
1.0 * ((self.n_disc_pixels*self.annulus_flux_sigma)**2 /
self.n_annulus_pixels))
self.sn = self.net_flux / self.net_flux_sigma
# Compute centroid (x,y) of net flux:
net_flux_grid = self.disc_mask * (self.subimage - self.annulus_flux)
normalizor = np.sum(net_flux_grid)
if (self.x_centroid is not None) and (self.y_centroid is not None):
self.xcenter = self.x_centroid # new subimage center
self.ycenter = self.y_centroid # "
self.x_centroid = np.sum(net_flux_grid * self.xgrid) / normalizor
self.y_centroid = np.sum(net_flux_grid * self.ygrid) / normalizor
# Other evaluation results:
self.fwhm = self._eval_fwhm()
sky_flux_bias = self.n_disc_pixels * self.annulus_flux_sigma
self.sky_bias = abs(-2.5 * (sky_flux_bias / self.net_flux) / log(10.0))
def yield_recentered(self):
x_new, y_new = self.x_centroid, self.y_centroid
return Aperture(self.image_obj, self.star_id, x_new, y_new, self.df_punches)
def _apply_punches(self, plate_solution):
"""
Apply punches to (remove appropriate pixels from) this Aperture's annulus mask.
:param plate_solution:
:return: [None]
"""
dnorth_dx = 3600.0 * plate_solution['CD2_1'] # in arcseconds northward /pixel
dnorth_dy = 3600.0 * plate_solution['CD2_2'] # "
deast_dx = 3600.0 * plate_solution['CD1_1'] # in arcseconds eastward (not RA) /pixel
deast_dy = 3600.0 * plate_solution['CD1_2'] # "
ann_mask_new = self.annulus_mask.copy() # to begin.
for dnorth, deast in zip(self.df_punches['dNorth'], self.df_punches['dEast']):
coefficients = np.array([[dnorth_dx, dnorth_dy], [deast_dx, deast_dy]])
dep_vars = np.array([dnorth, deast])
solution = np.linalg.solve(coefficients, dep_vars)
dx_punch, dy_punch = solution[0], solution[1]
x_punch = self.xcenter + dx_punch
y_punch = self.ycenter + dy_punch
x_dist = self.xgrid - x_punch # x distance from center
y_dist = self.ygrid - y_punch # y distance from center
dist2 = x_dist**2 + y_dist**2
punch_mask = np.clip(np.sign(dist2 - self.r_disc**2), 0.0, 1.0)
ann_mask_new = ann_mask_new * punch_mask # do the punch (pixels within punch set to 0).
self.annulus_mask = ann_mask_new
def _eval_sky_005(self):
"""
Winning sky-background measurement strategy of 2015 tournament of strategies.
Insensitive to cosmic rays and background stars in or near the annulus.
:return robust estimate of sky background in adu/pixel [float]
"""
slice_list = self._make_sky_slices(n_slices=12, method='trimmed_mean')
sky_adu = trim_mean(slice_list, proportiontocut=0.3)
return sky_adu
def _make_sky_slices(self, n_slices=12, method='trimmed_mean'):
radians_per_slice = (2.0 * pi) / n_slices
min_pixels_per_slice = 0.5 * (self.n_annulus_pixels / n_slices)
angle_grid = np.arctan2(self.ygrid-self.ycenter, self.xgrid-self.xcenter) # -pi to +pi
slice_list = []
for i_slice in range(n_slices):
# Radians delimiting this slice:
angle_min = i_slice * radians_per_slice - pi
angle_max = (i_slice + 1) * radians_per_slice - pi
above_min = np.clip(np.sign(angle_grid - angle_min), 0.0, 1.0)
below_max = np.clip(np.sign(angle_max - angle_grid), 0.0, 1.0)
slice_mask = above_min * below_max * self.annulus_mask
n_slice_pixels = np.sum(slice_mask)
if n_slice_pixels >= min_pixels_per_slice:
slice_values = np.ravel(self.subimage[slice_mask > 0]) # only values in mask.
slice_mean = trim_mean(slice_values, 0.4)
slice_list.append(slice_mean)
return slice_list
def _eval_fwhm(self):
# TODO: Probably need a better FWHM algorithm.
"""
Returns estimate of Full Width at Half-Maximum from mean dist2 (=2*sigma^2) of net flux.
This algorithm may be replaced later:
overestimates FWHM compared to MaxIm, PinPoint, and sometimes to even visual inspection.
:return: estimate of FWHM in pixels.
"""
dx = self.xgrid - self.x_centroid
dy = self.ygrid - self.y_centroid
dist2 = dx ** 2 + dy ** 2
net_flux_xy = (self.disc_mask * (self.subimage - self.annulus_flux))
mean_dist2 = max(0.0, np.sum(net_flux_xy * dist2) / np.sum(net_flux_xy))
sigma = sqrt(mean_dist2 / 2.0)
# this math is verified 20170723, but yields larger FWHM than does MaxIm.
return FWHM_PER_SIGMA * sigma
class FITS:
"""
Holds data from a FITS file. Immutable. Used mostly by an Image object (class Image).
Isolates details of FITS implementation from calling code.
"""
def __init__(self, top_directory, rel_directory, filename):
"""
:param top_directory:
:param rel_directory:
:param filename:
"""
# If filename has FITS extension already, use it:
actual_fits_fullpath = None
test_fullpath = os.path.join(top_directory, rel_directory, filename)
if os.path.exists(test_fullpath):
actual_fits_fullpath = test_fullpath
# If no file with FITS extension, try to find a matching FITS filename (w/extension):
if actual_fits_fullpath is None:
for fits_ext in FITS_EXTENSIONS:
test_fullpath = os.path.join(top_directory, rel_directory,
filename + '.' + fits_ext)
if os.path.exists(test_fullpath):
actual_fits_fullpath = test_fullpath
break
if actual_fits_fullpath is None:
print("Not a valid file name: '" + filename + "'")
self.is_valid = False
return
self.fullpath = actual_fits_fullpath
try:
hdulist = astropy.io.fits.open(self.fullpath)
except IOError:
self.is_valid = False
return
self.header = hdulist[0].header
# FITS convention = (vert/Y, horiz/X), pixel (1,1) at bottom left -- NOT USED by photrix.
# MaxIm/Astrometrica convention = (horiz/X, vert/Y) pixel (0,0 at top left). USE THIS.
# NB: self.image_fits, self.image_xy, and self.image are different views of the SAME array.
# They are meant to be read-only--changing any one of them *will* change the others.
self.image_fits = hdulist[0].data.astype(np.float64)
self.image_xy = np.transpose(self.image_fits) # x and y axes as expected (not like FITS).
self.image = self.image_xy # alias
hdulist.close()
self.top_directory = top_directory
self.rel_directory = rel_directory
self.filename = filename
self.all_header_keys = self.header.keys()
self.object = self.header_value('OBJECT')
self.is_calibrated = self._is_calibrated()
self.focal_length = self._get_focal_length()
self.exposure = self.header_value(['EXPTIME', 'EXPOSURE']) # seconds
self.temperature = self.header_value(['SET-TEMP', 'CCD-TEMP']) # deg C
self.utc_start = self._get_utc_start()
self.utc_mid = self.utc_start + timedelta(seconds=self.exposure / 2.0)
self.filter = self.header_value('FILTER')
self.airmass = self.header_value('AIRMASS')
self.guide_exposure = self.header_value('TRAKTIME') # seconds
self.fwhm = self.header_value('FWHM') # pixels
self.plate_solution = self._get_plate_solution() # a pd.Series
self.is_plate_solved = not any(self.plate_solution.isnull())
self.ra = ra_as_degrees(self.header_value(['RA', 'OBJCTRA']))
self.dec = dec_as_degrees(self.header_value(['DEC', 'OBJCTDEC']))
self.is_valid = True # if it got through all that initialization.
# self.is_valid = all(x is not None
# for x in [self.object, self.exposure, self.filter,
# self.airmass, self.utc_start, self.focal_length])
def header_value(self, key):
"""
:param key: FITS header key [string] or list of keys to try [list of strings]
:return: value of FITS header entry, typically [float] if possible, else [string]
"""
if isinstance(key, str):
return self.header.get(key, None)
for k in key:
value = self.header.get(k, None)
if value is not None:
return value
return None
def header_has_key(self, key):
return key in self.header
def xy_from_radec(self, radec):
"""
Computes zero-based pixel x and y for a given RA and Dec sky coordinate.
May be outside image's actual boundaries.
Assumes flat image (no distortion, i.e., pure Tan projection).
:param radec: sky coordinates [RaDec class object]
:return: x and y pixel position, zero-based, in this FITS image [2-tuple of floats]
"""
cd11 = self.plate_solution['CD1_1']
cd12 = self.plate_solution['CD1_2']
cd21 = self.plate_solution['CD2_1']
cd22 = self.plate_solution['CD2_2']
crval1 = self.plate_solution['CRVAL1']
crval2 = self.plate_solution['CRVAL2']
crpix1 = self.plate_solution['CRPIX1'] # 1 at edge (FITS convention)
crpix2 = self.plate_solution['CRPIX2'] # "
d_ra = radec.ra - crval1
d_dec = radec.dec - crval2
deg_ew = d_ra * cos((pi / 180.0) * radec.dec)
deg_ns = d_dec
a = cd22 / cd12
dx = (deg_ns - deg_ew * a) / (cd21 - cd11 * a)
dy = (deg_ew - cd11 * dx) / cd12
x = crpix1 + dx
y = crpix2 + dy
return x - 1, y - 1 # FITS image origin=(1,1), but our (MaxIm/python) convention=(0,0)
def radec_from_xy(self, x, y):
pass
# """
# Computes RA and Dec for a give x and y pixel count. Assumes flat image (no distortion,
# i.e., pure Tan projection).
# :param x: pixel position in x [float]
# :param y: pixel position in y [float]
# :return: RA and Dec [RaDec object]
# """
# cd11 = self.plate_solution['CD1_1']
# cd12 = self.plate_solution['CD1_2']
# cd21 = self.plate_solution['CD2_1']
# cd22 = self.plate_solution['CD2_2']
# crval1 = self.plate_solution['CRVAL1']
# crval2 = self.plate_solution['CRVAL2']
# crpix1 = self.plate_solution['CRPIX1']
# crpix2 = self.plate_solution['CRPIX2']
# # Do the calculation (inverse of self.xy_from_radec(self, radec)):
# return RaDec(0, 0)
def _is_calibrated(self):
calib_fn_list = [self._is_calibrated_by_maxim_5_6()] # may add more fns when available.
return any([is_c for is_c in calib_fn_list])
def _is_calibrated_by_maxim_5_6(self):
hval = self.header_value('CALSTAT')
if hval is not None:
if hval.strip().upper() == 'BDF': # calib. by MaxIm DL v. 5 or 6
return True
return False
def _get_focal_length(self):
# If FL available, return it. Else, compute FL from plate solution.
value = self.header_value('FOCALLEN')
if value is not None:
return value # mm
x_pixel = self.header_value('XPIXSZ')
y_pixel = self.header_value('YPIXSZ')
x_scale = self.header_value('CDELT1')
y_scale = self.header_value('CDELT2')
if any([val is None for val in [x_pixel, y_pixel, x_scale, y_scale]]):
return None
fl_x = x_pixel / abs(x_scale) * (206265.0 / (3600 * 1800))
fl_y = y_pixel / abs(y_scale) * (206265.0 / (3600 * 1800))
return (fl_x + fl_y) / 2.0
def _get_utc_start(self):
utc_string = self.header_value('DATE-OBS')
# dateutil.parse.parse handles MaxIm 6.21 inconsistent format; datetime.strptime() can fail.
utc_dt = parse(utc_string).replace(tzinfo=timezone.utc)
return utc_dt
def _get_plate_solution(self):
plate_solution_index = ['CD1_1', 'CD1_2', 'CD2_1', 'CD2_2',
'CRVAL1', 'CRVAL2', 'CRPIX1', 'CRPIX2']
plate_solution_values = [np.float64(self.header_value(key))
for key in plate_solution_index]
solution = pd.Series(plate_solution_values, index=plate_solution_index)
# If CD terms are absent, and if plate solution resides in other terms
# (e.g., in WCS from Astrometrica), then try to generate CD terms from other plate solution terms:
if np.isnan(solution['CD1_1']):
if self.header_value('CDELT1') is not None and self.header_value('CROTA2') is not None:
solution['CD1_1'] = self.header_value('CDELT1') * \
cos(self.header_value('CROTA2') * RADIANS_PER_DEGREE)
if np.isnan(solution['CD1_2']):
if self.header_value('CDELT2') is not None and self.header_value('CROTA2') is not None:
solution['CD1_2'] = - self.header_value('CDELT2') * \
sin(self.header_value('CROTA2') * RADIANS_PER_DEGREE)
if np.isnan(solution['CD2_1']):
if self.header_value('CDELT1') is not None and self.header_value('CROTA2') is not None:
solution['CD2_1'] = self.header_value('CDELT1') * \
sin(self.header_value('CROTA2') * RADIANS_PER_DEGREE)
if np.isnan(solution['CD2_2']):
if self.header_value('CDELT2') is not None and self.header_value('CROTA2') is not None:
solution['CD2_2'] = self.header_value('CDELT2') * \
cos(self.header_value('CROTA2') * RADIANS_PER_DEGREE)
return solution
def all_fits_files(top_directory, rel_directory, validate_fits=False):
"""
Return list of all FITS files in given directory_path.
:param top_directory:
:param rel_directory:
:param validate_fits: If True, open FITS files and include only if valid.
If False, include filename if it appears valid without opening the FITS file.
:return: List of all FITS files in given directory_path [list of strings]
"""
pass
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,631
|
edose/photrix
|
refs/heads/master
|
/photrix/web.py
|
__author__ = "Eric Dose, Albuquerque"
""" This module: web.py
Various web-page access routines.
"""
# Python core:
import os
# External packages:
import pandas as pd
import requests
from bs4 import BeautifulSoup
# Author's packages:
from .util import float_or_none
THIS_PACKAGE_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
HTTP_OK_CODE = 200 # "OK. The request has succeeded."
MAX_WEBOBS_LINES = 200 # for safety (& as dictated in any case by webobs API (as of Jan 2017)).
__________AAVSO_ACCESS______________________________________________________ = 0
def get_aavso_webobs_raw_table(star_id, num_obs=100, jd_start=None, jd_end=None):
# simply returns an pandas dataframe containing data,
# no parsing or cacheing. If star ID not in webobs, this returns a dataframe with no rows.
"""
Downloads observations from AAVSO's webobs for ONE star (not fov), returns pandas dataframe.
If star not in AAVSO's webobs site, return a dataframe with no rows.
Columns: target_name, date_string, filter, observer, jd, mag, error.
:param star_id: the STAR id (not the fov's name).
:param num_obs: number of observations to get.
:param jd_start: optional Julian date.
:param jd_end: optional JD.
:return: simple pandas dataframe containing data for 1 star, 1 row per observation downloaded.
"""
star_safe_name = star_id.replace("+", "%2B").replace(" ", "+")
num_obs = min(num_obs, MAX_WEBOBS_LINES)
url = "https://www.aavso.org/apps/webobs/results/?star=" + star_safe_name + \
"&num_results=" + str(num_obs) + \
"&obs_types=vis+ccd"
if jd_start is not None:
url += "&start=" + str(jd_start)
if jd_end is not None:
url += "&end=" + str(jd_end)
# TODO: Try to use requests Session objects for performance.
# print('get_aavso_webobs_raw_table() >' + url + '<')
r = requests.get(url)
obs_list = []
if r.status_code == HTTP_OK_CODE:
soup = BeautifulSoup(r.text, 'html.parser')
obs_lines = soup.find_all('tr', class_='obs') # NB: "class_" not "class" (reserved).
for line in obs_lines:
cells = line.find_all('td')
cell_strings = [cell.text for cell in cells]
obs_list.append(cell_strings)
df = pd.DataFrame(obs_list, columns=['X0', 'target_name', 'jd_str', 'date_string',
'mag_str', 'error_str',
'filter', 'observer', 'X8'])
df = df.assign(jd=[float_or_none(xx) for xx in df['jd_str']],
mag=[float_or_none(xx) for xx in df['mag_str']],
error=[float_or_none(xx) for xx in df['error_str']])
df = df.drop(['X0', 'X8', 'jd_str', 'mag_str', 'error_str'], axis=1)
return df
def get_aavso_vsp_chart(chart_id=None):
"""
Gets AAVSO VSP chart as a JSON text string.
:param chart_id:
:return:
"""
if chart_id is None:
return ""
url = "https://www.aavso.org/apps/vsp/api/chart/" + chart_id.strip() + "/?format=json"
# print('get_aavso_vsp_chart() >' + url + '<')
r = requests.get(url)
if r.status_code == HTTP_OK_CODE:
return r.text
else:
return ""
def go(starname):
import webbrowser
star_safe_name = starname.replace("+", "%2B").replace(" ", "+")
url = 'http://www.aavso.org/cgi-bin/lcg.pl?name=' + \
star_safe_name + '&auid=' + star_safe_name + \
'&lastdays=200&v=on&iband=on&r=on&visual=on&grid=on&width=900&height=750'
print('go() >' + url + '<')
webbrowser.open(url)
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,632
|
edose/photrix
|
refs/heads/master
|
/test/test_image.py
|
import os
import numpy as np
import pandas as pd
from datetime import datetime, timezone, timedelta
import pytest
from photrix import image
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
PHOTRIX_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEST_TOP_DIRECTORY = os.path.join(PHOTRIX_ROOT_DIRECTORY, "test")
def test_class_fits():
test_rel_directory = '$data_for_test'
# Test failure on missing file:
fits = image.FITS(TEST_TOP_DIRECTORY, rel_directory=test_rel_directory, filename='$no_file.txt')
assert fits.is_valid is False
# Test exception handling for non-FITS file format:
fits = image.FITS(TEST_TOP_DIRECTORY, rel_directory=test_rel_directory, filename='dummy.txt')
assert fits.is_valid is False
# Open FITS file with known extension:
given_filename = 'CE Aur-0001-V.fts'
fits = image.FITS(TEST_TOP_DIRECTORY, rel_directory=test_rel_directory,
filename=given_filename)
assert fits.is_valid
assert fits.fullpath == os.path.join(TEST_TOP_DIRECTORY, test_rel_directory, given_filename)
assert fits.header_has_key('NAXIS')
assert not fits.header_has_key('XXX')
assert fits.object == 'CE Aur'
assert fits.is_calibrated
assert fits.is_plate_solved
assert fits.focal_length == pytest.approx(2702, abs=1)
assert fits.exposure == pytest.approx(587, abs=1)
assert fits.temperature == pytest.approx(-35, abs=0.1)
target_start_utc = datetime(2017, 4, 24, 4, 0, 31).replace(tzinfo=timezone.utc)
diff_seconds = (fits.utc_start - target_start_utc).total_seconds()
assert abs(diff_seconds) < 1
target_mid_utc = target_start_utc + timedelta(seconds=fits.exposure / 2.0)
diff_seconds = (fits.utc_mid - target_mid_utc).total_seconds()
assert abs(diff_seconds) < 1
assert fits.filter == 'V'
assert fits.airmass == pytest.approx(1.5263, abs=0.0001)
assert fits.guide_exposure == pytest.approx(1.4, abs=0.001)
assert fits.fwhm == pytest.approx(5.01, abs=0.01)
assert fits.plate_solution['CD1_1'] == pytest.approx(-1.92303985969E-006)
assert fits.plate_solution['CD2_1'] == pytest.approx(1.90588522664E-004)
assert fits.plate_solution['CRVAL1'] == pytest.approx(1.03834010522E+002)
assert fits.ra == pytest.approx(103.83791666666667)
assert fits.dec == pytest.approx(46.28638888888889)
# Test .image (== .image_xy):
assert fits.image.shape == (3072, 2047) # *image* (x,y), which is *array* (n_rows, n_columns)
assert fits.image[0, 0] == 275 # upper-left corner
assert fits.image[0, 2046] == 180 # lower-left corner
assert fits.image[3071, 2046] == 265 # lower-right corner
assert fits.image[3071, 0] == 285 # upper-right corner
# Open FITS file without known FITS extension (which FITS constructor must first find):
given_filename = 'CE Aur-0001-V'
fits = image.FITS(TEST_TOP_DIRECTORY, rel_directory='$data_for_test',
filename=given_filename)
assert fits.is_valid
assert fits.fullpath == os.path.join(TEST_TOP_DIRECTORY, test_rel_directory,
given_filename +'.fts')
assert fits.object == 'CE Aur'
assert fits.airmass == pytest.approx(1.5263, abs=0.0001)
# Open FITS file with no calibration (at least not by MaxIm 5/6) and no plate solution:
given_filename = 'AD Dra-S001-R001-C001-I.fts'
fits = image.FITS(TEST_TOP_DIRECTORY, rel_directory='$data_for_test', filename=given_filename)
assert fits.is_valid
assert fits.is_calibrated is False
assert fits.is_plate_solved is False
def test_class_image():
test_rel_directory = '$data_for_test'
# Open FITS file with known extension:
given_filename = 'CE Aur-0001-V.fts'
fits_obj = image.FITS(TEST_TOP_DIRECTORY, rel_directory=test_rel_directory,
filename=given_filename)
im = image.Image(fits_obj)
assert im.fits.object == 'CE Aur'
assert im.top_directory == TEST_TOP_DIRECTORY
assert im.rel_directory == test_rel_directory
assert im.xsize, im.ysize == fits_obj.image_xy.shape # .shape is in nrows, ncols
# Image dimensions are x,y == *image* cols,rows, the reverse of numpy storage.
# Images are zero based; [0,0] -> upper-left, [n, 0] is on top edge of *image* (not of storage).
assert im.image.shape == (3072, 2047)
assert im.image[0, 0] == 275 # upper-left corner
assert im.image[0, 2046] == 180 # lower-left corner
assert im.image[3071, 2046] == 265 # lower-right corner
assert im.image[3071, 0] == 285 # upper-right corner
# Aperture very simple case: near image center, no punches or interfering signals:
im.add_aperture('dummy_1', 1523, 1011) # star near image center, no punches.
assert len(im.apertures) == 1
this_ap = im.apertures['dummy_1']
assert this_ap.x_centroid == pytest.approx(1524.784, abs=0.005)
results = im.results_from_aperture('dummy_1')
assert results['x_centroid'] == this_ap.x_centroid
assert results['fwhm'] == pytest.approx(6.42, abs=0.02)
assert set(results.index) == set(['r_disc', 'r_inner', 'r_outer', 'n_disc_pixels',
'n_annulus_pixels', 'net_flux', 'net_flux_sigma',
'annulus_flux', 'annulus_flux_sigma',
'x_centroid', 'y_centroid', 'fwhm',
'x1024', 'y1024', 'vignette', 'sky_bias', 'max_adu'])
# Aperture case: near image center, two punches:
im.add_aperture('dummy_2', 1535, 979)
df_punches = pd.DataFrame({'StarID': 'dummy_2',
'dNorth': [-11.1, +9.6],
'dEast': [0.0, +3.4]})
im.add_punches(df_punches)
assert len(im.apertures) == 2
this_ap = im.apertures['dummy_2']
assert [this_ap.x_centroid, this_ap.y_centroid] == pytest.approx([1534.456, 978.697], abs=0.005)
results = im.results_from_aperture('dummy_2')
assert results['x_centroid'] == this_ap.x_centroid
assert results['fwhm'] == pytest.approx(6.15, abs=0.02)
# Aperture case: far from image center, one punch:
im.add_aperture('dummy_3', 510, 483)
df_punches = pd.DataFrame({'StarID': ['dummy_2', 'trash', 'dummy_3'],
'dNorth': [-11.1, -99.9, +8.9],
'dEast': [0.0, +99.9, 0.0]}) # verify safety of non-relevant rows.
im.add_punches(df_punches)
assert len(im.apertures) == 3
this_ap = im.apertures['dummy_3']
assert [this_ap.x_centroid, this_ap.y_centroid] == pytest.approx([505.53, 481.35], abs=0.005)
results = im.results_from_aperture('dummy_3')
assert results['annulus_flux'] == pytest.approx(252.7, abs=1)
assert results['annulus_flux_sigma'] == pytest.approx(15.8, abs=0.5)
assert results['fwhm'] == pytest.approx(7.05, abs=0.1)
assert results['max_adu'] == 441
assert results['n_annulus_pixels'] == pytest.approx(448, abs=1)
assert results['n_disc_pixels'] == pytest.approx(315, abs=1)
assert results['net_flux'] == pytest.approx(7193, abs=1)
assert results['net_flux_sigma'] == pytest.approx(372.4, abs=1)
assert results['r_disc'] == 10
assert results['r_inner'] == 15
assert results['r_outer'] == 20
assert results['sky_bias'] == pytest.approx(0.75, abs=0.1)
assert results['vignette'] == pytest.approx(results['x1024']**2 + results['y1024']**2, abs=0.01)
assert results['x1024'] == pytest.approx(-1.006, abs=0.001)
assert results['x_centroid'] == pytest.approx(505.5, abs=0.1)
assert results['y1024'] == pytest.approx(-0.529, abs=0.001)
assert results['y_centroid'] == pytest.approx(481.4, abs=0.1)
assert results['y_centroid'] == this_ap.y_centroid # verify equivalence
def test_fits__xy_from_radec():
from photrix.util import RaDec
fits = image.FITS(TEST_TOP_DIRECTORY, rel_directory='$data_for_test',
filename='CE Aur-0001-V.fts')
# All tests lack distortion corrections (as none available in FITS header),
# and so in real images calculated (x,y) values at edges will not quite line up with stars.
radec_near_center = RaDec('06:55:21.25', '+46:17:33.0')
x, y = fits.xy_from_radec(radec_near_center)
assert list((x, y)) == pytest.approx([1557.6, 1005.8], abs=0.25)
radec_upper_left = RaDec('06:56:10.6', '+46:02:27.1')
x, y = fits.xy_from_radec(radec_upper_left)
assert list((x, y)) == pytest.approx([229.8, 270.3], abs=0.25)
radec_upper_right = RaDec('06:56:14.3', '+46:29:11.6')
x, y = fits.xy_from_radec(radec_upper_right)
assert list((x, y)) == pytest.approx([2567.6, 197.2], abs=0.25)
radec_lower_left = RaDec('06:54:26.0', '+46:02:44.9')
x, y = fits.xy_from_radec(radec_lower_left)
assert list((x, y)) == pytest.approx([271.8, 1857.0], abs=0.25)
radec_lower_right = RaDec('06:54:18.0', '+46:30:02.0')
x, y = fits.xy_from_radec(radec_lower_right)
assert list((x, y)) == pytest.approx([2658.7, 1946.6], abs=0.25)
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,633
|
edose/photrix
|
refs/heads/master
|
/photrix/user.py
|
from math import pi
from datetime import datetime, timedelta, timezone
import os
import json
import ephem
from copy import copy
from .util import Timespan, RaDec, hex_degrees_as_degrees
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
PHOTRIX_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SITE_DIRECTORY = os.path.join(PHOTRIX_ROOT_DIRECTORY, "site")
MOON_PHASE_NO_FACTOR = 0.05
MIN_MOON_DIST = 45 # in degrees
MAX_ABS_HOUR_ANGLE = 7 # hours from meridian limiting target availability (move to Site?)
class Site:
"""
Object: holds site info (no instrument or AN info), read from a json site file.
Usage: site = Site("DSW")
Attributes (string unless otherwise noted):
.name : site's name
.filename : json file name
.description : site description
.longitude, .latitude : of site, in degrees (float)
.elevation : of site, in meters (float)
.min_altitude : in degrees (float)
.twilight_sun_alt : in degrees (float)
.extinction: nominal / typical extinction, per filter band [dict of floats]
.is_valid : True if attribute values appear valid (boolean)
"""
def __init__(self, site_name, site_directory=SITE_DIRECTORY):
site_fullpath = os.path.join(site_directory, site_name + ".json")
with open(site_fullpath) as data_file:
data = json.load(data_file)
self.name = data.get("name", site_name)
self.filename = site_name + ".json"
self.description = data.get("description", "")
self.longitude = data.get("longitude", None) # West longitude is negative.
if self.longitude is not None:
self.longitude = hex_degrees_as_degrees(str(self.longitude))
self.latitude = data.get("latitude", None) # South latitude is negative.
if self.latitude is not None:
self.latitude = hex_degrees_as_degrees(str(self.latitude))
self.elevation = data.get("elevation", 500) # in meters
self.min_altitude = data.get("min_altitude", 0)
self.twilight_sun_alt = data.get("twilight_sun_alt", -10)
self.extinction = data.get("extinction") # dict of floats, key = filter name
is_valid = (self.longitude is not None) and (self.latitude is not None) # default
if is_valid:
if not ((self.longitude >= -180) and (self.longitude <= +180)):
is_valid = False
if not ((self.latitude >= -90) and (self.longitude <= +90)):
is_valid = False
self.is_valid = is_valid
def __repr__(self):
return "site('" + self.filename + "')"
def __str__(self):
return self.__repr__() + " valid=" + str(self.is_valid)
class Instrument:
"""
Object: holds instrument info (no site or AN info).
Usage: inst = Instrument("Borea")
Attributes (string unless otherwise noted):
.name :
.filename :
.description :
.min_distance_full_moon : in degrees (float)
.mount["model"] :
.mount["slew_rate_ra"] : in deg/sec (float)
.mount["slew_rate_dec"] : in degrees (float)
.mount["sec_to_speed_ra"] : in seconds (float)
.mount["sec_to_speed_dec"] : in seconds (float)
.ota["model"] :
.ota["focal_length_mm"] : in millimeters (float)
.camera["model"] :
.camera["pixels_x"] : (int)
.camera["pixels_y"] : (int)
.camera["microns_per_pixel"] : (float)
.camera["shortest_exposure"] : in seconds (float)
.camera["saturation_adu"] : maximum ADUs while linear (float)
.filters[filter(string)]["reference_exposure_mag10"] : possibly several (float)
[for transforms use either self.transform(filter, ci_type) or self.transforms(filter)
.filter_list: list of filters for this instrument [list of strings]
.filter_data: complex data structure of filter data, best avoided.
.is_valid : True if attribute values appear valid (boolean)
"""
def __init__(self, instrument_name):
photrix_root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
instrument_fullpath = os.path.join(photrix_root_dir, "instrument",
(instrument_name + ".json"))
with open(instrument_fullpath) as data_file:
data = json.load(data_file)
self.name = data.get("name", instrument_name)
self.filename = instrument_name + ".json"
self.description = data.get("description", "")
self.min_distance_full_moon = data.get("min_distance_full_moon", 60) # degrees
mount = data.get("mount")
mount["model"] = mount.get("model", "")
mount["slew_rate_ra"] = mount.get("slew_rate_ra", 4)
mount["slew_rate_dec"] = mount.get("slew_rate_dec", 4)
mount["sec_to_speed_ra"] = mount.get("sec_to_speed_ra", 1)
mount["sec_to_speed_dec"] = mount.get("secto_speed_dec", 1)
self.mount = mount
ota = data.get("ota")
ota["model"] = ota.get("model", "")
ota["focal_length_mm"] = ota.get("focal_length_mm", 0)
self.ota = ota
camera = data.get("camera")
camera["model"] = camera.get("model", "")
camera["pixels_x"] = camera.get("pixels_x", 0)
camera["pixels_y"] = camera.get("pixels_y", 0)
camera["microns_per_pixel"] = camera.get("microns_per_pixel", 0)
camera["shortest_exposure"] = camera.get("shortest_exposure", 0)
camera["saturation_adu"] = camera.get("saturation_adu", 64000)
self.camera = camera
self.filter_data = data.get('filters')
self.filter_list = list(self.filter_data.keys())
is_valid = True # default to be falsified if any error.
if len(self.filter_list) == 0:
is_valid = False
self.is_valid = is_valid
def transforms(self, filter):
"""
Returns all transform info for one given filter name, e.g., 'V'.
:param filter: as 'V' [string]
:return: list of 2-tuples (CI_type, CI_value), most preferred CI_type first [list].
"""
if filter not in self.filter_list:
return []
transform_list = list(self.filter_data[filter]['transform'])
keys = [list(t.keys())[0] for t in transform_list]
vals = [list(t.values())[0] for t in transform_list]
pairs = [(k, v) for (k, v) in zip(keys, vals) if v is not None]
return pairs
def transform(self, filter, ci_type):
"""
Returns ONE transform for one given filter and Color Index type.
:param filter: as 'V' [string]
:param ci_type: as 'V-I' [string]
:return: transform value [float] if it exists for this instrument, else None.
"""
if filter not in self.filter_list:
return None
pairs = self.transforms(filter)
for pair in pairs:
if pair[0] == ci_type:
return pair[1]
return None
def __repr__(self):
return "Instrument('" + self.filename + "'), \'" + self.description + '\''
def __str__(self):
return self.__repr__() + " valid=" + str(self.is_valid)
class Astronight:
def __init__(self, an_date_string, site_name):
"""
Object: relevant info specific to one observing night at one site.
Usage: an = Astronight("20160102", "DSW")
Attributes (string unless otherwise noted):
.an_date_string : as "20161011"
.site_name : as "DSW"
.site : this site (Site object)
._observer : this site and this astronight (ephem.Observer object)
.ts_dark : Timespan(twilight_dusk, twilight_dawn) for this astronight.
.local_middark_ut : local mid-dark time, this astronight, in UTC (datetime object)
.local_middark_jd : local mid-dark time, this astronight, in Julian date (float)
.local_middark_lst : local mid-dark time, this astronight, LST in degrees (float)
.moon_radec : moon location at middark (RaDec object)
.moon_phase : moon phase at middark (float, range 0-1)
.ts_dark_no_moon : Timespan both dark and moonless (or just dark if moon phase < MIN)
"""
self.an_date_string = an_date_string
self.site_name = site_name
self.site = Site(site_name)
site_obs = ephem.Observer() # for local use (within __init__()).
site_obs.lat, site_obs.lon = str(self.site.latitude), str(self.site.longitude)
site_obs.elevation = self.site.elevation
# get local middark times for requested Astronight.
an_year = int(an_date_string[0:4])
an_month = int(an_date_string[4:6])
an_day = int(an_date_string[6:8])
approx_midnight_utc = \
datetime(an_year, an_month, an_day, 0, 0, 0).replace(tzinfo=timezone.utc) + \
timedelta(hours=-self.site.longitude / 15.0) + \
timedelta(hours=+24)
sun = ephem.Sun()
site_obs.horizon = '0'
sunset_utc = site_obs.previous_setting(sun,
start=approx_midnight_utc).datetime().replace(tzinfo=timezone.utc)
sunrise_utc = site_obs.next_rising(sun,
start=approx_midnight_utc).datetime().replace(tzinfo=timezone.utc)
self.ts_nosun = Timespan(sunset_utc, sunrise_utc)
site_obs.horizon = str(self.site.twilight_sun_alt)
sun.compute(site_obs)
twilight_dusk = site_obs.previous_setting(sun,
start=approx_midnight_utc).datetime().replace(tzinfo=timezone.utc)
twilight_dawn = site_obs.next_rising(sun,
start=approx_midnight_utc).datetime().replace(tzinfo=timezone.utc)
self.ts_dark = Timespan(twilight_dusk, twilight_dawn)
self.local_middark_utc = self.ts_dark.midpoint
self.local_middark_jd = ephem.julian_date(self.local_middark_utc)
site_obs.date = self.local_middark_utc
self.local_middark_lst = site_obs.sidereal_time() * 180. / pi # in degrees
# Prep moon, using RaDec at local mid-dark.
moon = ephem.Moon()
site_obs.date = self.local_middark_utc
site_obs.horizon = "0" # when moon is at all visible
site_obs.epoch = '2000'
moon.compute(site_obs)
self.moon_radec = RaDec(str(moon.ra), str(moon.dec))
self.moon_phase = moon.moon_phase
# get .ts_dark_no_moon (Timespan object) for this Astronight.
if self.moon_phase <= MOON_PHASE_NO_FACTOR:
self.ts_dark_no_moon = self.ts_dark
else:
moonrise_1 = site_obs.previous_rising(moon,
start=approx_midnight_utc).datetime().replace(tzinfo=timezone.utc)
moonset_1 = site_obs.next_setting(moon,
start=moonrise_1).datetime().replace(tzinfo=timezone.utc)
ts_moon_up_1 = Timespan(moonrise_1, moonset_1)
moonset_2 = site_obs.next_setting(moon,
start=approx_midnight_utc).datetime().replace(tzinfo=timezone.utc)
moonrise_2 = site_obs.previous_rising(moon,
start=moonset_2).datetime().replace(tzinfo=timezone.utc)
ts_moon_up_2 = Timespan(moonrise_2, moonset_2)
self.ts_dark_no_moon = self.ts_dark.subtract(ts_moon_up_1).subtract(ts_moon_up_2)
moon_transit_before = site_obs.previous_transit(moon, start=self.local_middark_utc)
moon_transit_after = site_obs.next_transit(moon, start=self.local_middark_utc)
seconds_before = abs((moon_transit_before.datetime().replace(tzinfo=timezone.utc) -
self.local_middark_utc).total_seconds())
seconds_after = abs((moon_transit_after.datetime().replace(tzinfo=timezone.utc) -
self.local_middark_utc).total_seconds())
# Store the moon transit closer to middark time.
if seconds_before < seconds_after:
self.moon_transit = moon_transit_before.datetime().replace(tzinfo=timezone.utc)
else:
self.moon_transit = moon_transit_after.datetime().replace(tzinfo=timezone.utc)
# TODO: Prep all other solar-system bodies, using their RaDecs at local mid-dark (low priority).
# jupiter = ephem.Jupiter(obs)
# self.jupiter_radec = RaDec(str(jupiter.ra), str(jupiter.dec))
# saturn = ephem.Saturn(obs)
# self.saturn_radec = RaDec(str(saturn.ra), str(saturn.dec))
# venus = ephem.Venus(obs)
# self.venus_radec = RaDec(str(venus.ra), str(venus.dec))
def ts_observable(self, target_radec=RaDec('0', '+0'), min_alt=None, min_moon_dist=None):
"""
Returns Timespan object defining when this RA,Dec may be observed during this astronight.
Site data are taken from ephem Observer object self._observer stored in this object.
Usage: ts = an.observable(util.RaDec(fov.ra, fov.dec), +28, 45)
:param target_radec: required, a RaDec position for the object to be observed.
:param min_alt: min object altitude to observe, in degrees; default->use Site min alt.
:param min_moon_dist: min distance from moon to observe, enforced only when moon is up,
in degrees; default->use Site min moon distance.
Set to 0 to ignore moon (i.e., moon makes no difference to observable times).
Set to >=180 to ignore moon phase (i.e., moon must be down to observe at all).
[User may want to set this value by using a Lorentzian fn, as RTML does,
using (possibly auto-computed) distance and days-from-full-moon values.]
:return: Timespan object of start and end times (UTC) that observing is allowed.
"""
if min_alt is None:
min_alt = self.site.min_altitude
if min_moon_dist is None:
min_moon_dist = MIN_MOON_DIST
obs = ephem.Observer() # for local use.
obs.lat, obs.lon = str(self.site.latitude), str(self.site.longitude)
obs.elevation = self.site.elevation
obs.horizon = str(min_alt)
obs.date = self.local_middark_utc
target_ephem = ephem.FixedBody() # so named to suggest restricting its use to ephem.
target_ephem._epoch = '2000'
target_ephem._ra, target_ephem._dec = target_radec.as_hex # text: RA in hours, Dec in deg
target_ephem.compute(obs)
# Compute object-up Timespan, watching for exceptions (i.e., obj Never up or Always up).
try:
obj_rise_1 = obs.previous_rising(target_ephem,
start=self.local_middark_utc).datetime().replace(tzinfo=timezone.utc)
except ephem.NeverUpError:
# return zero-duration Timespans.
obj_ts_1 = Timespan(self.local_middark_utc, self.local_middark_utc)
obj_ts_2 = obj_ts_1
except ephem.AlwaysUpError:
# return Timespans of astronight's entire dark period.
obj_ts_1 = self.ts_dark
obj_ts_2 = obj_ts_1
else:
# get remaining rise and set times, return the 2 candidate Timespans.
obj_set_1 = obs.next_setting(target_ephem,
start=obj_rise_1).datetime().replace(tzinfo=timezone.utc)
obj_set_2 = obs.next_setting(target_ephem,
start=self.local_middark_utc).datetime().replace(tzinfo=timezone.utc)
obj_rise_2 = obs.previous_rising(target_ephem,
start=obj_set_2).datetime().replace(tzinfo=timezone.utc)
obj_ts_1 = Timespan(obj_rise_1, obj_set_1)
obj_ts_2 = Timespan(obj_rise_2, obj_set_2)
moon_dist_deg = RaDec(self.moon_radec.ra, self.moon_radec.dec).degrees_from(target_radec)
# print("moon: ", RaDec(self.moon_radec.ra, self.moon_radec.dec))
# print("target: ", RaDec(target_ephem.ra, target_ephem.dec))
# print("moon dist deg:", moon_dist_deg, "min_moon_dist:", min_moon_dist)
if moon_dist_deg > min_moon_dist:
obj_avail_1 = obj_ts_1.intersect(self.ts_dark)
obj_avail_2 = obj_ts_2.intersect(self.ts_dark)
else:
obj_avail_1 = obj_ts_1.intersect(self.ts_dark_no_moon)
obj_avail_2 = obj_ts_2.intersect(self.ts_dark_no_moon)
# print("obj_avail_1", obj_avail_1)
# print("obj_avail_2", obj_avail_2)
ts_obj_avail = Timespan.longer(obj_avail_1, obj_avail_2, on_tie="earlier")
# Limit by Hour Angle (HA; important only for object near celestial pole).
dt_transit = self.transit(target_radec)
timedelta_ha = timedelta(hours=MAX_ABS_HOUR_ANGLE)
ts_ha = Timespan(start_utc=dt_transit-timedelta_ha, end_utc=dt_transit+timedelta_ha)
ts_obj_avail = ts_obj_avail.intersect(ts_ha)
return ts_obj_avail
def ts_fov_observable(self, fov, min_alt=None, min_moon_dist=None):
""" Convenience function.
Returns Timespan object containing when FOV (or rather, its center RaDec is observable
during this Astronight.
Usage: ts = an.when_FOV_observable(FOV, min_alt, min_moon_dist)
"""
return self.ts_observable(RaDec(fov.ra, fov.dec), min_alt, min_moon_dist)
def transit(self, target_radec):
"""
Returns datetime object containing RaDec's transit closest to this Astronight's mid-dark.
:param target_radec: sky location (RaDec object)
:return: closest transit of this RA/Dec (datetime UTC)
"""
obs = ephem.Observer() # for local use.
obs.lat, obs.lon = str(self.site.latitude), str(self.site.longitude)
obs.elevation = self.site.elevation
obs.horizon = '0' # not needed
obs.date = self.local_middark_utc
target_ephem = ephem.FixedBody() # so named to suggest restricting its use to ephem.
target_ephem._epoch = '2000'
target_ephem._ra, target_ephem._dec = target_radec.as_hex # text: RA in hours, Dec in deg
target_ephem.compute(obs)
prev_transit = obs.previous_transit(target_ephem,
start=self.local_middark_utc).datetime().replace(tzinfo=timezone.utc)
next_transit = obs.next_transit(target_ephem,
start=self.local_middark_utc).datetime().replace(tzinfo=timezone.utc)
prev_delta = abs((prev_transit - self.local_middark_utc).total_seconds())
next_delta = abs((next_transit - self.local_middark_utc).total_seconds())
if prev_delta < next_delta:
best_transit = prev_transit
else:
best_transit = next_transit
return best_transit
def datetime_utc_from_hhmm(self, hhmm_string):
mid_dark = self.local_middark_utc
hour_hhmm = int(hhmm_string[0:2])
if hour_hhmm < 0 or hour_hhmm > 23:
iii = 4
minute_hhmm = int(hhmm_string[2:4])
test_dt = mid_dark.replace(hour=hour_hhmm, minute=minute_hhmm, second=0, microsecond=0)
delta_days = round((test_dt - mid_dark).total_seconds() / (24 * 3600)) # adjust if needed.
best_dt = test_dt - timedelta(days=delta_days)
return best_dt
def __repr__(self):
return "Astronight '" + self.an_date_string + "' at site '" + self.site_name + "'."
def acp_header_string(self):
""" Returns an info string to include atop an ACP plan file, as:
; sunset-rise 0033-1145 UTC , -11 deg alt @ 0119-1055 UTC // LST = cdt + 1128.
; moon 15% @ (~22h04,-10) rise 0936 UTC
Usage: s = an.acp_header_string()
"""
site_obs = ephem.Observer()
site_obs.lat, site_obs.lon = str(self.site.latitude), str(self.site.longitude)
site_obs.elevation = self.site.elevation
# sun = ephem.Sun(site_obs)
# moon = ephem.Moon(site_obs)
# Handle sun data:
sunset_utc = self.ts_nosun.start
sunset_utc_string = sunset_utc.strftime('%H%M')
dark_start_utc_string = self.ts_dark.start.strftime('%H%M')
site_obs.date = self.ts_dark.start
dark_start_lst = site_obs.sidereal_time()
dark_start_lst_minutes = round(60.0 * ((dark_start_lst * 180.0 / pi) / 15.0))
dark_start_lst_string = '{0:02d}'.format(int(dark_start_lst_minutes / 60)) + \
'{0:02d}'.format(dark_start_lst_minutes % 60)
sunrise_utc = self.ts_nosun.end
sunrise_utc_string = sunrise_utc.strftime('%H%M')
dark_end_utc_string = self.ts_dark.end.strftime('%H%M')
site_obs.date = self.ts_dark.end
dark_end_lst = site_obs.sidereal_time()
dark_end_lst_minutes = round(60.0 * ((dark_end_lst * 180.0 / pi) / 15.0))
dark_end_lst_string = '{0:02d}'.format(int(dark_end_lst_minutes / 60)) + \
'{0:02d}'.format(dark_end_lst_minutes % 60)
# site_obs.date = sunrise_utc
# sunrise_lst = site_obs.sidereal_time()
#
# sunrise_lst_minutes = round(60.0 * ((sunrise_lst * 180.0 / pi) / 15.0))
# sunrise_lst_string = '{0:02d}'.format(int(sunrise_lst_minutes / 60)) + \
# '{0:02d}'.format(sunrise_lst_minutes % 60)
# Handle moon data:
moon_phase_string = '{0:d}%'.format(round(100 * self.moon_phase))
moon_ra = round(self.moon_radec.ra / 15, 1)
moon_dec = round(self.moon_radec.dec)
moon_radec_string = ('({0:.1f}h,{1:+d}' + u'\N{DEGREE SIGN}' + ')').\
format(moon_ra, moon_dec)
# need moon logic here [ rise vs set; also add "no factor" if phase < threshold ]
if self.ts_dark_no_moon.seconds <= 0:
dark_no_moon_string = 'MOON UP all night.'
elif self.ts_dark_no_moon == self.ts_dark:
dark_no_moon_string = 'MOON DOWN all night.'
else:
dark_no_moon_string = 'no moon: ' + \
self.ts_dark_no_moon.start.strftime('%H%M') + '-' + \
self.ts_dark_no_moon.end.strftime('%H%M') + ' UTC'
moon_transit_string = 'transit: ' + self.moon_transit.strftime('%H%M')
# Handle LST vs UTC:
lst_middark_seconds = self.local_middark_lst / 15 * 3600
utc_middark_seconds = self.local_middark_utc.hour * 3600 + \
self.local_middark_utc.minute * 60 + \
self.local_middark_utc.second + \
self.local_middark_utc.microsecond/1000000.0
diff_seconds = (lst_middark_seconds - utc_middark_seconds) % (24 * 3600) # to make > 0.
diff_hour = int(diff_seconds/3600)
diff_minute = round((diff_seconds - (diff_hour*3600)) / 60)
lst_vs_utc_string = '{0:02d}'.format(diff_hour) + '{0:02d}'.format(diff_minute)
diff_seconds = (24 * 3600) - diff_seconds
diff_hour = int(diff_seconds/3600)
diff_minute = round((diff_seconds - (diff_hour*3600)) / 60)
utc_vs_lst_string = '{0:02d}'.format(diff_hour) + '{0:02d}'.format(diff_minute)
# Construct ACP header string:
header_string = '; sun --- down: ' + \
sunset_utc_string + '-' + sunrise_utc_string + ' UTC, ' + \
'dark(' + '{0:+2d}'.format(round(self.site.twilight_sun_alt)) + \
u'\N{DEGREE SIGN}' + '): ' + \
dark_start_utc_string + '-' + dark_end_utc_string + ' UTC = ' + \
dark_start_lst_string + '-' + dark_end_lst_string + ' LST\n'
header_string += '; moon -- ' + moon_phase_string + ' ' + moon_radec_string + \
' ' + dark_no_moon_string + ' ' + moon_transit_string + '\n'
header_string += '; LST = UTC + ' + lst_vs_utc_string + 6 * ' '
header_string += ' UTC = LST + ' + utc_vs_lst_string + ' @ middark = ' + \
self.local_middark_utc.strftime('%H%M') + ' UTC'
return header_string
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,634
|
edose/photrix
|
refs/heads/master
|
/photrix/process.py
|
# Python system imports:
import os
from math import floor, sqrt, log10, log
from datetime import datetime, timezone
import shutil
# External library imports:
import numpy as np
import pandas as pd
from scipy.interpolate import UnivariateSpline
import statsmodels.formula.api as smf
import matplotlib.pyplot as plt
# Internal (photrix) imports:
from .image import FITS, Image, R_DISC, Aperture
from .user import Instrument, Site
from .util import MixedModelFit, weighted_mean, jd_from_datetime_utc, RaDec
from .fov import Fov, FOV_DIRECTORY
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
THIS_SOFTWARE_VERSION = '2.0.1' # as of 20101210
# AN_TOP_DIRECTORY = 'C:/Astro/Images/Borea Photrix'
AN_TOP_DIRECTORY = 'C:/Astro/Borea Photrix'
DF_MASTER_FILENAME = 'df_master.csv'
FITS_REGEX_PATTERN = '^(.+)\.(f[A-Za-z]{2,3})$'
MIN_FWHM = 1.0
AAVSO_REPORT_DELIMITER = ','
START_PROCESSING_HERE___________ = ''
#######
#
# photrix.process workflow:
# import photrix.planning as pl, photrix.process as pr
# pr.start(an_rel_directory='20170509')
# [do MaxIm DL calibration]
# pr.assess(an_rel_directory='20170509')
# pr.make_df_master(an_rel_directory='20170509', instrument_name='Borea', ask_user=False)
# V = pr.SkyModel(an_rel_directory='20170509', instrument_name='Borea', filter='V')
# R = pr.SkyModel(an_rel_directory='20170509', instrument_name='Borea', filter='R')
# I = pr.SkyModel(an_rel_directory='20170509', instrument_name='Borea', filter='I')
# ... [whilte editing omit.txt until all V, R, and I models are right]
# ps = pr.PredictionSet(an_rel_directory='20170816', skymodel_list=[V,R,I])
# IF STARES exist in this AN:
# ps.stare_comps(fov='ST Tri', star_id='ST Tri', this_filter='V')
# ps.stare_plot(star_id='ST Tri')
# ps.markup_report()
# ps.aavso_report()
# --> [upload to AAVSO]
# --> [LCG review; edit FOV files if needed]
# --> [check guiding exp times for possible FOV #CENTER (RaDec) adjustment]
#
#######
# Regex pattern to match Ur FITS filenames (from ACP):
# r'^(.{3,}?)-S\d{3}-R\d{3}-C\d{3}-([a-zA-Z]{1,2}?)(_dupe-\d{1,4})?.f\w{1,2}'
# Regex pattern to match photrix FITS filenames
# r'(.{3,}?)-\d{4}-([a-zA-Z]{1,2}?).f\w{1,2}'
#######
def start(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
"""First step in photrix processing pipeline.
Always starts with raw directory_path exactly as downloaded from telescope PC.
Typical usage: pr.start(an_rel_directory='20180804')
:param an_top_directory:
:param an_rel_directory:
:return: None
"""
# Copy original files to \Ur as backup (immutable):
target_subdir = os.path.join(an_top_directory, an_rel_directory, 'Ur')
if not os.path.exists(target_subdir) and not os.path.isdir(target_subdir):
fits_subdir = os.path.join(an_top_directory, an_rel_directory)
shutil.copytree(fits_subdir, target_subdir)
else:
print('>>>>> Could not create \'' + target_subdir + '\' as backup. (Already exists?)')
# Move FITS to \Uncalibrated (not a full backup: some of these files may get Excluded):
target_subdir = os.path.join(an_top_directory, an_rel_directory, 'Uncalibrated')
if not os.path.exists(target_subdir) and not os.path.isdir(target_subdir):
os.mkdir(target_subdir)
else:
print('>>>>> Could not create \'' + target_subdir + '\'. (Already exists?)')
fits_subdir = os.path.join(an_top_directory, an_rel_directory)
n_moved = 0
for fits_entry in os.scandir(fits_subdir):
if fits_entry.is_file():
shutil.move(os.path.join(fits_entry.path),
os.path.join(target_subdir, fits_entry.name))
n_moved += 1
# Make remaining needed subdirectories:
needed_subdirectories = ['Calibrated', 'Exclude', 'Photometry', 'FOV']
for subdir_name in needed_subdirectories:
new_subdir_path = os.path.join(an_top_directory, an_rel_directory, subdir_name)
if not os.path.exists(new_subdir_path) and not os.path.isdir(new_subdir_path):
os.mkdir(new_subdir_path)
else:
print('>>>>> Could not create \'' + new_subdir_path + '\'. (Already exists?)')
# Arrange any calibration (incl. autoflat) files, write advisory if present:
autoflat_path = os.path.join(an_top_directory, an_rel_directory, 'AutoFlat')
if os.path.exists(autoflat_path) and os.path.isdir(autoflat_path):
# Make calibration subdirectory if it doesn't already exist:
calibration_path = os.path.join(an_top_directory, an_rel_directory, 'Calibration')
if not os.path.exists(calibration_path) and not os.path.isdir(calibration_path):
os.mkdir(calibration_path)
else:
print('>>>>> Could not create \'' + calibration_path + '\'. (Already exists?)')
# Move all autoflat FITS files to /Calibration.
if os.path.exists(calibration_path) and os.path.isdir(calibration_path):
for autoflat_entry in os.scandir(autoflat_path):
if autoflat_entry.is_file():
shutil.move(autoflat_entry.path, calibration_path)
os.rmdir(autoflat_path)
# Rename FITS files to photrix convention
# (within this rel_directory's /Uncalibrated subdirectory):
_rename_to_photrix(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory)
print('.start() has moved', str(n_moved), 'FITS files to /Uncalibrated & has renamed them.')
print('\n >>>>> Next:')
print(' 1. Calibrate with MaxIm now (File > Batch Save and Convert,')
print(' from /Uncalibrated to /Calibrated.')
print(' 2. Visually inspect all FITS, e.g., with MaxIm')
print(' 3. Run assess().')
def assess(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None, auto_delete_src_files=True,
warn_each_missing_fov_file=False, warn_any_missing_fov_file=False):
"""
Rigorously assess FITS files and directory_path structure for readiness to construct df_master.
Collect and print all warnings and summary stats. Makes no changes to data.
May be run as many times as needed, after start() and before make_df_master().
Typical usage: pr.assess(an_rel_directory='20180811')
:param an_top_directory: [string]
:param an_rel_directory: [string]
:param auto_delete_src_files: True iff automatically delete .src (plate-solution) files [boolean]
:param warn_each_missing_fov_file: True to give separate warning for each missing FOV file
(e.g., variable stars), False to suppress. [boolean]
:param warn_any_missing_fov_file: True to give warning for any missing FOV file and total count
(e.g., variable stars), False to suppress. [boolean]
:return: [None]
"""
# TODO: Add checks for guide exposure time. (?)
# TODO: Add checks for binning=(1,1), when binning fields become available in FITS objects.
# Make DataFrame of all files (& dirs) in directory_path, add some per-file info, and sort:
filenames, isdir, extensions = [], [], []
fits_path = os.path.join(an_top_directory, an_rel_directory, 'Calibrated')
for entry in os.scandir(fits_path):
filenames.append(entry.name)
isdir.append(entry.is_dir())
extensions.append(os.path.splitext(entry.name)[-1].lower()) # file extensions
df = pd.DataFrame({'Filename': filenames, 'IsDir': isdir, 'Extensions': extensions},
index=filenames).sort_values(by=['Filename'])
# Offer to delete any .src source files found (by-product of TheSkyX plate solutions).
src_files = df.loc[df['Extensions'] == '.src', 'Filename']
if len(src_files) >= 1:
if auto_delete_src_files is False:
answer = input(' ..... ' + str(len(src_files)) +
' .src files found. Delete them? (y/n, recommend y):')
else:
answer = 'yes'
if answer.strip().lower()[0] == 'y':
for filename in src_files:
fullpath = os.path.join(an_top_directory, an_rel_directory, "Calibrated", filename)
os.remove(fullpath)
if auto_delete_src_files is True:
print(str(len(src_files)), '.src files deleted.')
df = df.loc[~(df['Extensions'] == '.src'), :] # remove df rows for files just deleted.
# Directories: should be none; report and remove them from df:
dirs = df.loc[df['IsDir'], 'Filename']
# dirs = [dir for dir in dirs if not dir.lower() in ('exclude', 'excluded')]
if len(dirs) >= 1:
print('Subdirectories found within /Calibrated (please remove them, except for /Exclude):')
for this_dir in dirs:
print(' ' + this_dir)
df = df.loc[~df['IsDir'], :] # remove rows referring to directories.
del df['IsDir'] # as all rows in df refer to files and not directories.
# Add empty columns to df:
df['Valid'] = True
df['PlateSolved'] = False
df['Calibrated'] = False
df['Object'] = ''
df['ObjectMatchesName'] = False
df['FovFileReady'] = False
df['FWHM'] = np.nan
df['FocalLength'] = np.nan
# Try to open all filenames as FITS, collect info relevant to errors and warnings:
fits_subdir = os.path.join(an_rel_directory, 'Calibrated')
fov_name_cache = []
for filename in df['Filename']:
fits = FITS(an_top_directory, fits_subdir, filename)
df.loc[filename, 'Valid'] = fits.is_valid
if fits.is_valid:
df.loc[filename, 'PlateSolved'] = fits.is_plate_solved
df.loc[filename, 'Calibrated'] = fits.is_calibrated
df.loc[filename, 'Object'] = fits.object
df.loc[filename, 'ObjectMatchesName'] = filename.startswith(fits.object + '-')
fov_proven_ready = False
if fits.object in fov_name_cache:
fov_proven_ready = True
else:
fov = Fov(fits.object, warn_on_no_fov_file=False)
if fov.is_valid:
if fov.fov_name == fits.object:
fov_proven_ready = True
fov_name_cache.append(fits.object)
df.loc[filename, 'FovFileReady'] = fov_proven_ready
df.loc[filename, 'FWHM'] = fits.fwhm
df.loc[filename, 'FocalLength'] = fits.focal_length
# Non-FITS files: should be none; report and REMOVE THEM from df:
invalid_fits = df.loc[~ df['Valid'], 'Filename']
if len(invalid_fits) >= 1:
print('\nINVALID FITS files:')
for f in invalid_fits:
print(' ' + f)
print('\n')
df = df.loc[df['Valid'], :] # keep only rows for valid FITS files.
del df['Valid'] # as all rows in df now refer to valid FITS files.
else:
print('All ' + str(len(df)) + ' files can be read as FITS files.')
# Now assess all FITS, and report errors & warnings:
not_calibrated = df.loc[~ df['Calibrated'], 'Filename']
if len(not_calibrated) >= 1:
print('\nNOT CALIBRATED:')
for f in not_calibrated:
print(' ' + f)
print('\n')
else:
print('All calibrated.')
not_platesolved = df.loc[~ df['PlateSolved'], 'Filename']
if len(not_platesolved) >= 1:
print('\nNO PLATE SOLUTION:')
for f in not_platesolved:
print(' ' + f)
print('\n')
else:
print('All platesolved.')
object_nonmatch = df.loc[~ df['ObjectMatchesName'], 'Filename']
if len(object_nonmatch) >= 1:
print('\nFITS Object does not match filename:')
for f in object_nonmatch:
# fits_object = FITS(an_top_directory, fits_subdir, f).object
fits_object = df.loc[f, 'Object']
print(' ' + f + ' has FITS Object = \'' + fits_object + '\'.')
print('\n')
else:
print('All FITS objects match their filenames.')
n_missing_fov_files = 0
if warn_any_missing_fov_file:
fov_file_not_ready = df.loc[~ df['FovFileReady'], 'Filename']
n_missing_fov_files = len(fov_file_not_ready)
if n_missing_fov_files >= 1:
if warn_each_missing_fov_file:
print('\nFOV files ABSENT:')
for f in fov_file_not_ready:
fits_object = df.loc[f, 'Object']
# fits_object = FITS(an_top_directory, fits_subdir, f).object
print(' ' + f + ' is missing FOV file \'' + fits_object + '\'.')
else:
print('All FOV files are ready.')
print('\n' + str(n_missing_fov_files), 'of', str(len(df)), 'FOV files missing.')
else:
print('(FOV files excluded from assessment.)')
odd_fwhm_list = []
for f in df['Filename']:
fwhm = df.loc[f, 'FWHM']
if fwhm < 1.5 or fwhm > R_DISC: # too small, or larger than half the aperture diameter:
odd_fwhm_list.append((f, fwhm))
if len(odd_fwhm_list) >= 1:
print('\nUnusual FWHM (in pixels):')
for f, fwhm in odd_fwhm_list:
print(' ' + f + ' has unusual FWHM of ' + '{0:.2f}'.format(fwhm) + ' pixels.')
print('\n')
else:
print('All FWHM values seem OK.')
odd_fl_list = []
mean_fl = df['FocalLength'].mean()
for f in df['Filename']:
fl = df.loc[f, 'FocalLength']
if abs((fl - mean_fl)) / mean_fl > 0.03:
odd_fl_list.append((f, fl))
if len(odd_fl_list) >= 1:
print('\nUnusual FocalLength (vs mean of ' + '{0:.1f}'.format(mean_fl) + ' mm:')
for f, fl in odd_fl_list:
print(' ' + f + ' has unusual Focal length of ' + str(fl))
print('\n')
else:
print('All Focal Lengths seem OK.')
# Set all FITS file extensions to '.fts' (MaxIm calibration sets it to '.fit' for some reason):
_set_fits_extensions(an_top_directory=an_top_directory, fits_subdir=fits_subdir,
fits_filenames=df['Filename'])
print('All FITS extensions are OK (=\'.fts\').')
# Summarize and write instructions for next steps:
n_warnings = len(not_calibrated) + len(not_platesolved) + len(object_nonmatch) +\
len(odd_fwhm_list) + len(odd_fl_list) + len(invalid_fits)
if warn_any_missing_fov_file:
n_warnings += n_missing_fov_files
if n_warnings == 0:
print('\n >>>>> ALL ' + str(len(df)) + ' FITS FILES APPEAR OK.')
print('Now... 1. Visually inspect all FITS files, if not already done.')
print(' 2. Run make_df_master().')
else:
print('\n >>>>> ' + str(n_warnings) + ' warnings (see listing above).')
print(' Correct errors and rerun assess() until no errors remain.')
def make_df_master(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None,
instrument_name='Borea', ask_user=True):
"""Make the master DataFrame of all required information for downstream photometric processing.
Typical usage: pr.make_df_master(an_rel_directory='20180811', ask_user=False)
:param an_top_directory:
:param an_rel_directory:
:param instrument_name: name of Instrument (object) that took data [string]
:param ask_user: True to ask user before building df_master; False to proceed directly.
:return: [None] df_master is written as csv file to Photometry/df_master.txt.
"""
# Build cross-reference DataFrame fits_fov_list:
fits_fov_list = []
fov_dict = {}
an_directory = os.path.join(an_top_directory, an_rel_directory)
fits_directory = os.path.join(an_directory, 'Calibrated')
for entry in os.scandir(fits_directory):
this_fits = FITS(an_top_directory, os.path.join(an_rel_directory, 'Calibrated'), entry.name)
fov_name = this_fits.object
fits_fov_list.append((entry.name, fov_name)) # list of 2-tuples
if fov_name not in fov_dict.keys():
fov = Fov(fov_name)
fov_dict[fov_name] = fov # caching Fov objects
fits_list, fov_list = zip(*fits_fov_list) # unzip 2-tuples to 2 parallel lists
df_fits_fov = pd.DataFrame({'FITSname': fits_list, 'FOVname': fov_list}, index=fits_list)
# Build display DataFrame one row per FOV file:
df_ask_user = df_fits_fov.groupby(['FOVname']).count().sort_index().copy()
df_ask_user.rename(columns={'FITSname': 'N_FITS'}, inplace=True)
df_ask_user['FOV_file_exists'] = False # default
df_ask_user['FOV_file_exists_text'] = '*NA*' # default
df_ask_user['N_CheckStars'] = 0 # default
df_ask_user['CheckMsg'] = '' # default
for fov_name in df_ask_user.index:
this_fov = fov_dict[fov_name] # these were cached above.
df_ask_user.loc[fov_name, 'FOV_file_exists'] = this_fov.is_valid
df_ask_user.loc[fov_name, 'N_CheckStars'] = sum([aa.star_type.lower() == 'check'
for aa in this_fov.aavso_stars])
df_ask_user['FOV_file_exists_text'] = ['OK' if ex else 'MISSING'
for ex in df_ask_user['FOV_file_exists']]
df_ask_user['CheckMsg'] = ['OK' if n == 1 else 'WARNING: Target FOVs must have one Check Star.'
for n in df_ask_user['N_CheckStars']]
# List FOVs and FITS file counts & FOV status, ask user to continue:
# df_ask_user.drop(['FOV_file_exists', 'N_CheckStars'], axis=1, inplace=True) # drop columns
df_ask_user.rename(columns={'FOV_file_exists': 'FOV_exists', 'CheckMsg': 'CheckStar'},
inplace=True)
df_ask_user['FOV'] = df_ask_user.index
len_fov = max(len(s) for s in df_ask_user.index) + 1
print('\n' + ' FOV'.ljust(len_fov), 'N FITS', 'FOV exists', 'Check Star')
for ind in df_ask_user.index:
n_fits = df_ask_user.loc[ind, 'N_FITS']
singleton_mark = '*' if n_fits == 1 else ' '
print(df_ask_user.loc[ind, 'FOV'].ljust(len_fov),
(str(df_ask_user.loc[ind, 'N_FITS']) + ' ' + singleton_mark).rjust(len('N FITS')),
' ' + df_ask_user.loc[ind, 'FOV_file_exists_text'].ljust(6),
df_ask_user.loc[ind, 'CheckStar'])
# TODO: Also verify that all charts exist, stop if not.
all_fovs_exist = all(df_ask_user['FOV_exists'])
if not all_fovs_exist:
print(' >>>>> STOPPING: at least one FOV file is missing.')
return None
print(' ------------------------------\n ' + str(len(df_ask_user)) + ' FOVS.')
if ask_user is True:
answer = input(' .....Proceed? (y/n): ')
if answer.strip().lower()[0] != 'y':
print(' >>>>> STOPPING at user request.')
return None
else:
print('ask_user = False, so make_df_master() continues...')
print()
instrument = Instrument(instrument_name)
df_ur = pd.read_csv(os.path.join(an_top_directory, an_rel_directory,
'Photometry', 'File-renaming.txt'),
sep=';', index_col='PhotrixName')
fov_names = df_fits_fov['FOVname'].drop_duplicates().sort_values()
df_master_list = []
n_rows = 0
for fov_name in fov_names:
# Prepare data for this FOV:
print('FOV >', fov_name)
fov = fov_dict[fov_name]
star_data = []
for star in fov.aavso_stars:
star_data.append((star.star_id, star.ra, star.dec, star.star_type, star.mags))
star_id_list, ra_list, dec_list, star_type_list, mags_list = zip(*star_data)
df_star_data_numbered = pd.DataFrame({'Number': range(len(star_data)),
'StarID': star_id_list,
'degRA': ra_list, 'degDec': dec_list,
'StarType': [s.title() for s in star_type_list],
'Mags': mags_list})
df_star_data_numbered.index = range(len(df_star_data_numbered))
if len(fov.punches) >= 1:
punch_id_list, d_north_list, d_east_list = zip(*fov.punches)
df_punches = pd.DataFrame({'StarID': punch_id_list,
'dNorth': d_north_list, 'dEast': d_east_list})
else:
df_punches = pd.DataFrame()
df_fov_list = []
fits_names = df_fits_fov.loc[df_fits_fov['FOVname'] == fov_name, 'FITSname']
for fits_name in fits_names:
# Construct Image object for this FITS file:
image = Image.from_fits_path(an_top_directory,
os.path.join(an_rel_directory, 'Calibrated'), fits_name)
for star_id, ra, dec in zip(df_star_data_numbered['StarID'],
df_star_data_numbered['degRA'],
df_star_data_numbered['degDec']):
x0, y0 = image.fits.xy_from_radec(RaDec(ra, dec))
image.add_aperture(star_id, x0, y0)
image.add_punches(df_punches=df_punches)
# Build df_apertures:
ap_list = []
ap_names = [k for k in image.apertures.keys()]
for ap_name in ap_names:
ap_list.append(dict(image.results_from_aperture(ap_name)))
df_apertures = pd.DataFrame(ap_list, index=ap_names) # constructor: list of dicts
df_apertures['StarID'] = df_apertures.index
df_apertures.rename(columns={'r_disc': 'DiscRadius',
'r_inner': 'SkyRadiusInner',
'r_outer': 'SkyRadiusOuter',
'x_centroid': 'Xcentroid',
'y_centroid': 'Ycentroid',
'annulus_flux': 'SkyADU',
'annulus_flux_sigma': 'SkySigma',
'fwhm': 'FWHM',
'x1024': 'X1024',
'y1024': 'Y1024',
'vignette': 'Vignette',
'sky_bias': 'SkyBias'},
inplace=True)
df_apertures = df_apertures.loc[df_apertures['net_flux'] > 0.0, :]
df_apertures['InstMag'] = -2.5 * np.log10(df_apertures['net_flux']) + \
2.5 * log10(image.fits.exposure)
df_apertures['InstMagSigma'] = (2.5 / log(10)) * \
(df_apertures['net_flux_sigma'] /
df_apertures['net_flux']) # math verified 20170726.
df_apertures['ModelStarID'] = image.fits.object + '_' + df_apertures['StarID']
df_apertures.drop(['n_disc_pixels', 'n_annulus_pixels', 'max_adu',
'net_flux', 'net_flux_sigma'],
axis=1, inplace=True) # delete columns
# For each aperture, add its max ADU from the original ("Ur", uncalibrated) FITS file:
ur_filename = df_ur.loc[fits_name, 'UrName']
df_apertures['UrFITSfile'] = ur_filename
ur_image = Image.from_fits_path(an_top_directory, os.path.join(an_rel_directory, 'Ur'),
ur_filename)
df_apertures['MaxADU_Ur'] = np.nan
df_apertures['LogADU'] = np.nan
for star_id in df_apertures.index:
ap = Aperture(ur_image, star_id,
df_apertures.loc[star_id, 'Xcentroid'],
df_apertures.loc[star_id, 'Ycentroid'],
df_punches=None)
df_apertures.loc[star_id, 'MaxADU_Ur'] = ap.max_adu
if ap.max_adu > 0.0:
df_apertures.loc[star_id, 'LogADU'] = log10(ap.max_adu)
# Add FOV star data to each aperture row:
df = pd.merge(df_apertures, df_star_data_numbered, how='left', on='StarID')
df.sort_values(by='Number', inplace=True) # stars in FOV order (not strictly needed).
df.index = df['StarID']
# Add catalog mag, CatMagError, and color index from FOV stars and Image's filter:
df['CatMag'] = np.nan
df['CatMagError'] = np.nan
df['CI'] = np.nan # old-school V-I color index
for star_id in df['StarID']:
mags = df.loc[star_id, 'Mags']
# We extract from mags (dict) with .get() in case this filter is missing.
mag_and_error = mags.get(image.fits.filter, (np.nan, np.nan))
df.loc[star_id, 'CatMag'] = mag_and_error[0]
df.loc[star_id, 'CatMagError'] = mag_and_error[1]
# TODO: Make choice of color index passbands more flexible.
ci_mag_1 = mags.get('V', (np.nan, np.nan))[0] # old-school V-I color index
ci_mag_2 = mags.get('I', (np.nan, np.nan))[0] # old-school V-I color index
df.loc[star_id, 'CI'] = ci_mag_1 - ci_mag_2 # old-school V-I color index
df.drop('Mags', axis=1, inplace=True)
# Add FITS data to all rows:
df['FITSfile'] = image.fits.filename
df['Object'] = image.fits.object
df['JD_start'] = jd_from_datetime_utc(image.fits.utc_start)
df['UTC_start'] = image.fits.utc_start
df['Exposure'] = image.fits.exposure
df['JD_mid'] = jd_from_datetime_utc(image.fits.utc_mid)
df['JD_fract'] = np.nan # placeholder value (replaced with real values, below)
df['JD_fract2'] = np.nan # placeholder value (replaced with real values, below)
df['Filter'] = image.fits.filter
df['Airmass'] = image.fits.airmass
# Add FOV data to all rows:
df['FOV'] = fov.fov_name
df['FOV_date'] = fov.fov_date
df['Chart'] = fov.chart
# Append this image's dataframe to list, write image line to console:
df_fov_list.append(df)
n_rows += len(df)
if len(df) >= 1:
print(fits_name, '=', len(df), 'rows',
'--> df_master_now_has', n_rows, 'rows.')
else:
print(fits_name, '=', len(df), 'rows',
'--> NO ROWS from this file ... df_master_now_has', n_rows, 'rows.')
# Make df_fov by concatenating all and complete df_fov
df_fov = pd.DataFrame(pd.concat(df_fov_list, ignore_index=True, sort=True))
df_fov = _add_ci_values(df_fov, df_star_data_numbered, instrument)
df_master_list.append(df_fov)
# Make df_master by concatenating all fov dataframes:
df_master = pd.DataFrame(pd.concat(df_master_list, ignore_index=True, sort=True))
df_master.sort_values(['JD_mid', 'StarType', 'Number'], inplace=True)
df_master.drop(['Number', 'Object'], axis=1, inplace=True)
df_master.insert(0, 'Serial', range(1, 1 + len(df_master))) # inserts in place
df_master.index = list(df_master['Serial'])
# Fill in the JD_fract and JD_fract2 columns:
JD_floor = floor(df_master['JD_mid'].min()) # requires that all JD_mid values be known.
df_master['JD_fract'] = df_master['JD_mid'] - JD_floor
df_master['JD_fract2'] = df_master['JD_fract']**2
# For comp stars w/ CatMagError==NA, overwrite w/ largest CatMagError for same FOV and filter:
# TODO: revisit using largest CatMagError for all CatMagError values. Seems too harsh.
# TODO: move this loop to CatMagError computation (just above). Avoids .groupby().
df_groupby = df_master[df_master['StarType'] == 'Comp'].groupby(['FOV', 'Filter'])
for group_name, df_group in df_groupby:
serials_nan_error = df_group.loc[np.isnan(df_group['CatMagError']), 'Serial'].tolist()
serials_zero_error = df_group.loc[df_group['CatMagError'] <= 0.0, 'Serial'].tolist()
serials_to_update = serials_nan_error + serials_zero_error
if sum(serials_to_update) >= 1:
max_catmagerror = df_group['CatMagError'].max()
df_master.loc[serials_to_update, 'CatMagError'] = max_catmagerror
# Write df_master to file:
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', DF_MASTER_FILENAME)
df_master.to_csv(fullpath, sep=';', quotechar='"',
quoting=2, index=False) # quoting=2-->quotes around non-numerics.
# Finish & exit:
_archive_fov_files(an_top_directory, an_rel_directory, fov_names)
_write_omit_txt_stub(an_top_directory, an_rel_directory)
print()
# return df_master
class SkyModel:
def __init__(self, an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None, filter=None,
instrument_name='Borea', site_name='DSW',
max_cat_mag_error=0.01, max_inst_mag_sigma=0.03, max_color_vi=+2.5,
saturation_adu=None,
fit_sky_bias=True, fit_vignette=True, fit_xy=False,
fit_transform=False, fit_extinction=True, fit_log_adu=True,
fit_jd=False, fit_jd2=False,
do_plots=True):
"""Constructs a sky model using mixed-model regression on df_master.
Normally used by make_model()
Typical usage: V = pr.SkyModel(an_rel_directory='20180804', filter='V', fit_extinction=True)
:param an_top_directory: e.g., 'C:\Astro\Images\Borea Photrix' [string]
:param an_rel_directory: e.g., '20170504'. The dir 'Photometry' is subdir of this. [string]
:param filter: name of filter to which this model applies [string, e.g., 'V' or 'R']
:param instrument_name: name of Instrument, e.g., 'Borea' [string; name of Instrument obj]
:param site_name: name of observing site, e.g., 'DSW' [string; name of Site object]
:param max_cat_mag_error: maximum catalog error allowed to stars in model [float]
:param max_inst_mag_sigma: max instrument magnitude error allowed star observations [float]
:param max_color_vi: maximum V-I color allowed to stars in model [float]
:param saturation_adu: ccd ADUs that constitute saturation [float; from Instrument if None]
:param fit_sky_bias: True to fit sky bias term [bool]
:param fit_vignette: True to fit vignette term (dist^2 from ccd center) [bool]
:param fit_xy: True to fit X and Y gradient terms [bool]
:param fit_transform: True to fit transform terms; else use values from Instument obj [bool]
:param fit_extinction: True to fit extinction terms; else use values from Site obj [bool]
:param fit_log_adu: True to fit log(MaxADU_Ur) as CCD nonlinearity measure [bool]
:param fit_jd: True to fit jd_fract, to capture linearly incr or decr cirrus [bool]
:param fit_jd2: True to fit jd_fract^2, to capture quadratically incr or decr cirrus [bool]
Parameter 'fit_star_id' is not included in this version (has never been used in R, and
would lead to crossed RE vars).
"""
self.an_top_directory = an_top_directory
self.an_rel_directory = an_rel_directory
self.filter = filter
self.instrument_name = instrument_name
self.site_name = site_name
self.max_cat_mag_error = max_cat_mag_error
self.max_inst_mag_sigma = max_inst_mag_sigma
self.max_color_vi = max_color_vi
if saturation_adu is not None:
self.saturation_adu = saturation_adu
else:
instrument = Instrument(self.instrument_name)
self.saturation_adu = instrument.camera['saturation_adu']
self.fit_sky_bias = fit_sky_bias
self.fit_vignette = fit_vignette
self.fit_xy = fit_xy
self.fit_transform = fit_transform
self.fit_extinction = fit_extinction
self.fit_log_adu = fit_log_adu
self.fit_jd = fit_jd
self.fit_jd2 = fit_jd2
self.dep_var_name = 'InstMag_with_offsets'
self.df_model = None # data to/from regression, one row per input pt [pandas DataFrame]
self.mm_fit = None # placeholder [MixedModelFit object]
self.df_star = None # one row per unique model star [pandas DataFrame]
self.extinction = None # scalar result, placeholder
self.transform = None # "
self.vignette = None # "
self.x = None # "
self.y = None # "
self.sky_bias = None # "
self.log_adu = None # "
self.converged = False # "
self.n_obs = None # "
self.n_images = None # "
self.sigma = None # "
self.df_image = None # one row per image, placeholder
# Rows from df_master, as curated by user in file omit.txt:
df, warning_lines = _apply_omit_txt(self.an_top_directory, self.an_rel_directory)
# Remove rows for several causes of ineligibility to help form a sky model:
df = df[df['Filter'] == self.filter]
df = df[df['StarType'] == 'Comp']
df = df[df['CatMag'].notnull()]
df = df[df['Airmass'].notnull()]
df = df[df['InstMagSigma'] <= self.max_inst_mag_sigma]
df = df[df['MaxADU_Ur'].notnull()]
df = df[df['MaxADU_Ur'] <= self.saturation_adu]
df = df[df['CI'].notnull()]
df = df[df['CI'] <= self.max_color_vi]
df = df[df['CatMagError'].notnull()]
df = df[df['CatMagError'] <= self.max_cat_mag_error]
df = df[df['FWHM'] >= MIN_FWHM]
self.df_model = df
self._prep_and_do_regression()
self._build_output()
if do_plots:
self.plots()
self.print_high_cirrus()
# self.to_json_file() # GIVE UP on JSON -- it can't handle DataFrames and Series.
_write_stare_comps_txt_stub(self.an_top_directory, self.an_rel_directory)
# @classmethod
# def from_json(cls, an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None, filter=None):
# """
# Alternate constructor, reads from JSON file previously written.
# Normally used by _predict_fixed_only(), which requires final (immutable) Models.
# :param an_top_directory: path to an_rel_folder [str]
# :param an_rel_directory: folder for this instrument on this Astronight [string]
# :param filter: the filter to which this model applies [string, e.g., 'V' or 'R']
# :return: newly constructed Model object [class Model]
# """
# json_fullpath = os.path.join(an_top_directory, an_rel_directory, "model-" +
# filter + ".txt")
# with open(json_fullpath, 'r') as f:
# d = json.load(f)
# # Series and DataFrames (pandas) were stored as dictionaries, so convert them back:
#
#
#
# # Distribute dictionary elements to their original object attributes:
# return cls(an_top_directory=an_top_directory, an_rel_directory=an_rel_directory,
# filter=filter,
# instrument_name=d['instrument_name'],
# max_inst_mag_sigma=d['max_inst_mag_sigma'],
# max_cat_mag_error=d['max_cat_mag_error'],
# max_color_vi=+d['max_color_vi'], saturated_adu=d['saturation_adu'],
# fit_skyBias=d['fit_sky_bias'],
# fit_vignette=d['fit_vignette'], fit_xy=d['fit_xy'],
# fit_transform=d['fit_transform'], fit_extinction=d['fit_extinction'])
#
# def to_json_file(self):
# """
# Writes (most of) the current object to a JSON file.
# Does NOT include df_master (which is huge, but can be found in the same directory_path).
# Also does not include the statsmodels::MixedModelLm which cannot be serialized.
# :return: True if file successfully written, else False.
# """
# json_fullpath = os.path.join(self.an_top_directory, self.an_rel_directory, 'Photometry',
# "model-" + self.filter + ".json")
# json_dict = vars(self).copy()
#
# # Convert pandas DataFrames to dictionaries without json-illegal int64, etc.
# json_dict['df_model'] = convert_pandas_to_json_compatible_dict(json_dict['df_model'])
# json_dict['df_star'] = convert_pandas_to_json_compatible_dict(json_dict['df_star'])
# # Convert pandas Series to dictionaries without json-illegal int64, etc.
# json_dict['mm_fit'].fitted_values = \
# convert_pandas_to_json_compatible_dict(json_dict['mm_fit'].fitted_values)
# json_dict['mm_fit'].group_values = \
# convert_pandas_to_json_compatible_dict(json_dict['mm_fit'].group_values)
# json_dict['mm_fit'].residuals = \
# convert_pandas_to_json_compatible_dict(json_dict['mm_fit'].residuals)
#
# with open(json_fullpath, 'w') as f:
# json.dump(json_dict, f, indent=4)
def _prep_and_do_regression(self):
# Initiate dependent-variable offset, which will aggregate all such offset terms:
dep_var_offset = self.df_model['CatMag'].copy() # *copy* CatMag, or it will be damaged
# Build fixed-effect (x) variable list and construct dep-var offset:
fixed_effect_var_list = []
if self.fit_transform:
fixed_effect_var_list.append('CI')
else:
instrument = Instrument(self.instrument_name)
transform_vi = instrument.transform(self.filter, 'V-I')
dep_var_offset += transform_vi * self.df_model['CI']
print(' >>>>> Transform (Color Index V-I) not fit: value fixed at',
'{0:.3f}'.format(transform_vi))
if self.fit_extinction:
fixed_effect_var_list.append('Airmass')
else:
site = Site(self.site_name)
extinction = site.extinction[self.filter]
dep_var_offset += extinction * self.df_model['Airmass']
print(' >>>>> Extinction (Airmass) not fit: value fixed at',
'{0:.3f}'.format(extinction))
if self.fit_sky_bias:
if sum([x != 0 for x in self.df_model['SkyBias']]) > int(len(self.df_model) / 2):
fixed_effect_var_list.append('SkyBias')
if self.fit_log_adu:
fixed_effect_var_list.append('LogADU')
if self.fit_vignette:
fixed_effect_var_list.append('Vignette')
if self.fit_xy:
fixed_effect_var_list.extend(['X1024', 'Y1024'])
if self.fit_jd:
fixed_effect_var_list.extend(['JD_fract'])
if self.fit_jd2:
fixed_effect_var_list.extend(['JD_fract2'])
# Build 'random-effect' variable:
random_effect_var_name = 'FITSfile' # cirrus effect is per-image
# Build dependent (y) variable:
self.df_model[self.dep_var_name] = self.df_model['InstMag'] - dep_var_offset
# Execute regression:
self.mm_fit = MixedModelFit(data=self.df_model, dep_var=self.dep_var_name,
fixed_vars=fixed_effect_var_list,
group_var=random_effect_var_name)
if self.mm_fit.statsmodels_object.scale != 0.0 and \
self.mm_fit.statsmodels_object.nobs == len(self.df_model):
print(self.mm_fit.statsmodels_object.summary())
def _build_output(self):
"""
Builds appropriate output attributes for external use.
:return: None
"""
# Add 1/obs regression data as new df_model columns:
self.df_model['FittedValue'] = self.mm_fit.df_observations['FittedValue']
self.df_model['Residual'] = self.mm_fit.df_observations['Residual']
dep_var_offset = self.df_model['InstMag'] - self.df_model[self.dep_var_name]
self.df_model['FittedInstMag'] = self.df_model['FittedValue'] + dep_var_offset
# Build df_star (star ID and count only):
self.df_star = self.df_model[['Serial', 'ModelStarID']].groupby('ModelStarID').count()
self.df_star['ModelStarID'] = self.df_star.index
# Build df_image (from Mixed Model random effect), 1 row per FITS file,
# index = FITSfile, columns = FITSfile, JD_mid, Value:
df = self.mm_fit.df_random_effects.copy()
# df = df.rename(columns={'GroupName': 'FITSfile', 'GroupValue': 'Value'}) # old statsmodel ver.
# Next line accommodates statsmodels package's STUPID BREAKING CHANGE of/before their version 0.10.
df = df.rename(columns={'GroupName': 'FITSfile', 'Group': 'Value'}) # for new statsmodels ver 0.10.
df_xref = self.df_model[['FITSfile', 'JD_mid']].drop_duplicates()
df = pd.merge(df, df_xref, on='FITSfile', how='left', sort=False).sort_values(by='JD_mid')
self.df_image = df.copy()
self.df_image.index = self.df_image['FITSfile']
# Extract and store scalar results:
if self.fit_transform:
self.transform = self.mm_fit.df_fixed_effects.loc['CI', 'Value'] # .loc(row, col)
else:
instrument = Instrument(self.instrument_name)
self.transform = instrument.transform(self.filter, 'V-I')
if self.fit_extinction:
self.extinction = self.mm_fit.df_fixed_effects.loc['Airmass', 'Value']
else:
site = Site(self.site_name)
self.extinction = site.extinction[self.filter]
self.vignette = self.mm_fit.df_fixed_effects.loc['Vignette', 'Value'] \
if self.fit_vignette is True else 0.0
self.x = self.mm_fit.df_fixed_effects.loc['X1024', 'Value'] if self.fit_xy is True else 0.0
self.y = self.mm_fit.df_fixed_effects['Y1024', 'Value'] if self.fit_xy is True else 0.0
self.sky_bias = self.mm_fit.df_fixed_effects.loc['SkyBias', 'Value'] \
if self.fit_sky_bias is True else 0.0
self.log_adu = self.mm_fit.df_fixed_effects.loc['LogADU', 'Value'] \
if self.fit_log_adu is True else 0.0
self.jd = self.mm_fit.df_fixed_effects.loc['JD_fract', 'Value'] \
if self.fit_jd is True else 0.0
self.jd2 = self.mm_fit.df_fixed_effects.loc['JD_fract2', 'Value'] \
if self.fit_jd2 is True else 0.0
self.converged = self.mm_fit.converged
self.n_obs = len(self.df_model)
self.n_images = len(self.df_model['FITSfile'].drop_duplicates())
self.sigma = self.mm_fit.sigma
print('\n', len(self.df_model), ' observations --> sigma=',
round((1000.0 * self.sigma), 1), ' mMag')
def _predict_fixed_only(self, df_predict_input):
"""
Uses current model to predict best star magnitudes for *observed* InstMag and other inputs.
FIXED-EFFECTS ONLY: does not include random effect.
:param df_predict_input: data, all needed input columns for skymodel [pandas DataFrame]
will NOT include or use random effects.
:return: a dependent variable prediction for each input row [pandas Series of floats,
with index = index of predict_input]
"""
# First, verify that input is a DataFrame and that all needed columns are present.
# Names must be same as in model.
if not isinstance(df_predict_input, pd.DataFrame):
print('>>>>> SkyModel._predict_fixed_only(): predict_input is not a pandas DataFrame.')
return None
required_input_columns = ['Serial', 'FITSfile', 'InstMag', 'CI', 'Airmass']
if self.fit_sky_bias:
required_input_columns.append('SkyBias')
if self.fit_log_adu:
required_input_columns.append('LogADU')
if self.fit_vignette:
required_input_columns.append('Vignette')
if self.fit_xy:
required_input_columns.extend(['X1024', 'Y1024'])
if self.fit_jd:
required_input_columns.append('JD_fract')
if self.fit_jd2:
required_input_columns.append('JD_fract2')
all_present = all([name in df_predict_input.columns for name in required_input_columns])
if not all_present:
print('>>>>> SkyModel._predict_fixed_only(): at least one column missing.')
print(' Current model requires these columns:')
print(' ' + ', '.join(required_input_columns))
return None
# Make parsimonious copy of dataframe; add bogus CatMag column (required by model):
df_for_mm_predict = (df_predict_input.copy())[required_input_columns]
bogus_cat_mag = 0.0
df_for_mm_predict['CatMag'] = bogus_cat_mag # totally bogus local value, reversed later
# Execute MixedModelFit.predict(), giving Intercept + bogus CatMag + FEs + REs (pd.Series)
# DOES NOT INCLUDE RANDOM EFFECTS (these will be added back as Cirrus Effect terms):
raw_predictions = self.mm_fit.predict(df_for_mm_predict, include_random_effect=False)
# Compute dependent-variable offsets for unknown stars:
dep_var_offsets = pd.Series(len(df_for_mm_predict) * [0.0], index=raw_predictions.index)
if self.fit_transform is False:
dep_var_offsets += self.transform * df_for_mm_predict['CI']
if self.fit_extinction is False:
dep_var_offsets += self.extinction * df_for_mm_predict['Airmass']
# Extract best CatMag d'un seul coup, per (eq B - eq A), above:
predicted_star_mags = \
df_for_mm_predict['InstMag'] - dep_var_offsets - raw_predictions + bogus_cat_mag
return predicted_star_mags
def plots(self):
# Setup for all Figures.
obs_is_std = [name.startswith('Std_') for name in self.df_model['FOV']]
obs_point_colors = ['darkgreen' if obs_is_std[i] is True else 'black'
for i, x in enumerate(obs_is_std)]
image_is_std = [name.startswith('Std_') for name in self.df_image['FITSfile']]
image_point_colors = ['darkgreen' if image_is_std[i] is True else 'black'
for i, x in enumerate(image_is_std)]
jd_floor = floor(min(self.df_model['JD_mid']))
obs_jd_fract = self.df_model['JD_mid']-jd_floor
xlabel_jd = 'JD(mid)-' + str(jd_floor)
obs_residuals_mmag = self.df_model['Residual'] * 1000.0
# FIGURE 1 (Q-Q plot):
from scipy.stats import norm
df_y = self.df_model.copy()[['Residual', 'Serial']]
df_y['Residual'] *= 1000.0
df_y['Colors'] = obs_point_colors # keep colors attached to correct residuals when sorted
df_y = df_y.sort_values(by='Residual')
n = len(df_y)
t_values = [norm.ppf((k-0.5)/n) for k in range(1, n+1)]
# Construct Q-Q plot:
fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(12, 8)) # (width, height) in "inches"
ax.grid(True, color='lightgray', zorder=-1000)
ax.set_title('Q-Q plot of Residuals: ' +
self.an_rel_directory + ' ' + self.filter +
' filter', color='darkblue', fontsize=20, weight='bold')
ax.set_xlabel('t (sigma.residuals = ' + str(round(1000.0 * self.sigma, 1)) + ' mMag)')
ax.set_ylabel('Residual (mMag)')
ax.scatter(x=t_values, y=df_y['Residual'], alpha=0.6, color=df_y['Colors'], zorder=+1000)
# Label potential outliers:
mean_y = df_y['Residual'].mean()
std_y = df_y['Residual'].std()
z_score_y = (df_y['Residual'] - mean_y) / std_y
df_y['T'] = t_values
df_to_label = df_y[abs(z_score_y) >= 2.0]
for x, y, label in zip(df_to_label['T'], df_to_label['Residual'], df_to_label['Serial']):
ax.annotate(label, xy=(x, y), xytext=(4, -4),
textcoords='offset points', ha='left', va='top', rotation=-40)
# Add reference line:
x_low = 1.10 * min(df_y['T'])
x_high = 1.10 * max(df_y['T'])
y_low = x_low * std_y
y_high = x_high * std_y
ax.plot([x_low, x_high], [y_low, y_high], color='gray', zorder=-100, linewidth=1)
# Add annotation: number of observations:
fig.text(x=0.5, y=0.87,
s=str(len(self.df_model)) + ' observations in model.',
verticalalignment='top', horizontalalignment='center',
fontsize=12)
fig.canvas.set_window_title(self.filter + ': Q-Q')
plt.show()
# FIGURE 2 (multiplot): Set up plot grid and style parameters:
fig, axes = plt.subplots(ncols=4, nrows=3, figsize=(16, 10)) # (width, height) in "inches"
def make_labels(ax, title, xlabel, ylabel, zero_line=True):
ax.set_title(title, y=0.89)
ax.set_xlabel(xlabel, labelpad=-27)
ax.set_ylabel(ylabel, labelpad=-8)
if zero_line is True:
ax.axhline(y=0, color='lightgray', linewidth=1, zorder=-100)
# Cirrus Plot (one point per image):
# TODO: Modify this somehow to also account for JD_fract and JD_fract2.
ax = axes[0, 0]
make_labels(ax, 'Image Cirrus Plot', xlabel_jd, 'mMag')
ax.scatter(x=self.df_image['JD_mid']-jd_floor, y=self.df_image['Value'] * 1000.0,
alpha=0.6, color=image_point_colors)
ax.invert_yaxis() # per custom of plotting magnitudes brighter=upward
# Sky background vs JD_mid:
ax = axes[0, 1]
make_labels(ax, 'Sky background vs JD_mid', xlabel_jd, 'Sky ADU',
zero_line=False)
ax.scatter(x=obs_jd_fract, y=self.df_model['SkyADU'],
alpha=0.6, color=obs_point_colors)
# Residuals vs Instrument Magnitude:
ax = axes[0, 2]
make_labels(ax, 'Residuals vs Instrument Mag', 'Instrument Mag', 'mMag')
ax.scatter(x=self.df_model['InstMag'], y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs Max ADU:
ax = axes[0, 3]
xlabel_text = 'Max ADU, uncalibrated [log scale]'
make_labels(ax, 'Residuals vs Max ADU', xlabel_text, 'mMag')
ax.set_xlabel(xlabel_text, labelpad=-30)
ax.set_xscale('log')
x_scale_min = min(1000.0, 0.9 * min(self.df_model['MaxADU_Ur']))
x_scale_max = 1.1 * max(self.df_model['MaxADU_Ur'])
ax.set_xlim(x_scale_min, x_scale_max)
# ax.xaxis.set_major_locator(ticker.LogLocator(base=10.0, numticks=20))
ax.scatter(x=self.df_model['MaxADU_Ur'], y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs Sky background:
ax = axes[1, 0]
make_labels(ax, 'Residuals vs Sky background', 'Sky ADUs', 'mMag')
ax.scatter(x=self.df_model['SkyADU'], y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs JD:
ax = axes[1, 1]
make_labels(ax, 'Residuals vs JD', xlabel_jd, 'mMag')
ax.scatter(x=obs_jd_fract, y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs Color Index:
ax = axes[1, 2]
make_labels(ax, 'Residuals vs Color Index', 'Color Index (V-I)', 'mMag')
ax.scatter(x=self.df_model['CI'], y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs Airmass:
ax = axes[1, 3]
make_labels(ax, 'Residuals vs Airmass', 'Airmass', 'mMag')
ax.scatter(x=self.df_model['Airmass'], y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs Exposure Time:
ax = axes[2, 0]
make_labels(ax, 'Residuals vs Exposure Time', 'seconds', 'mMag')
ax.scatter(x=self.df_model['Exposure'], y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs Vignette:
ax = axes[2, 1]
make_labels(ax, 'Residuals vs Vignette', 'pixels from CCD center', 'mMag')
ax.scatter(x=1024*np.sqrt(self.df_model['Vignette']), y=obs_residuals_mmag,
alpha=0.6, color=obs_point_colors)
# Residuals vs X:
ax = axes[2, 2]
make_labels(ax, 'Residuals vs X', 'X pixels from CCD center', 'mMag')
ax.scatter(x=1024*self.df_model['X1024'], y=self.df_model['Residual'] * 1000.0, alpha=0.6,
color=obs_point_colors)
# Residuals vs Y:
ax = axes[2, 3]
make_labels(ax, 'Residuals vs Y', 'Y pixels from CCD center', 'mMag')
ax.scatter(x=1024*self.df_model['Y1024'], y=self.df_model['Residual'] * 1000.0, alpha=0.6,
color=obs_point_colors)
# Finish the figure, and show the entire plot:
fig.tight_layout(rect=(0, 0, 1, 0.925))
fig.subplots_adjust(left=0.06, bottom=0.06, right=0.94, top=0.85, wspace=0.25, hspace=0.25)
fig.suptitle(self.an_rel_directory +
' ' + self.filter + ' filter ' +
'{:%Y-%m-%d %H:%M utc}'.format(datetime.now(timezone.utc)),
color='darkblue', fontsize=20, weight='bold')
fig.canvas.set_window_title(self.filter + ': 12 plots')
plt.show()
def print_high_cirrus(self):
df_cirrus = self.df_image.copy()
df_cirrus['AbsValue'] = abs(df_cirrus['Value'])
lines_to_print = max(6, int(0.05 * len(df_cirrus)))
df_cirrus = df_cirrus.sort_values(by='AbsValue', ascending=False).head(lines_to_print)
df_cirrus['mmag Cirrus'] = 1000.0 * df_cirrus['Value']
print('--------------------------------------------')
print(pd.DataFrame(df_cirrus['mmag Cirrus']))
class PredictionSet:
def __init__(self, an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None,
instrument_name='Borea', site_name='DSW',
max_inst_mag_sigma=0.05, skymodel_list=None):
""" Constructs a prediction set, i.e., a set of best estimates of comp, check, and target star
magnitudes, ready for marking up (curating) and reporting to the AAVSO (for example).
Usages:
ps = pr.PredictionSet(an_rel_directory='20170816', skymodel_list=[V,R,I])
ps.markup_report()
ps.aavso_report()
:param an_top_directory: e.g., 'C:\Astro\Images\Borea Photrix' [string]
:param an_rel_directory: e.g., '20170504'. The dir 'Photometry' is subdir of this. [string]
:param instrument_name: name of Instrument, e.g., 'Borea' [string; name of Instrument obj]
:param site_name: name of observing site, e.g., 'DSW' [string; name of Site object]
:param max_inst_mag_sigma: max instrument magnitude error allowed star observations [float]
:param skymodel_list: SkyModel objects ready to be used [list of SkyModel objs]
"""
self.an_top_directory = an_top_directory
self.an_rel_directory = an_rel_directory
self.instrument_name = instrument_name
self.instrument = Instrument(self.instrument_name)
self.site_name = site_name
self.site = Site(self.site_name)
self.max_inst_mag_sigma = max_inst_mag_sigma
# ---- we'll deal with transform preferences in a later version.
# for now, we'll stick with V-I for all filters
# if transform_preferences is None:
# transform_preferences = self.get_transform_preferences(self.instrument)
# else:
# self.transform_preferences = transform_preferences
self.saturation_adu = self.instrument.camera['saturation_adu']
self.skymodels = dict((skymodel.filter, skymodel) for skymodel in skymodel_list)
self.df_all_eligible_obs = None
self.df_all_curated_obs = None
# df_comps_mags: redefined 5/2017 to include all comps (not only from images w/targets)
self.df_comp_mags = None
self.df_cirrus_effect = None
self.df_transformed = None
# TODO: make the next two attributes either: both lists or both Serials.
self.images_with_eligible_comps = None # pd.Series of strings
self.images_with_targets_and_comps = None # list of strings
# Do workflow steps:
self.df_all_eligible_obs, warning_lines = _apply_omit_txt(self.an_top_directory,
self.an_rel_directory)
self.df_all_curated_obs, warning_lines = _curate_stare_comps(self.an_top_directory,
self.an_rel_directory,
self.df_all_eligible_obs)
self.df_comp_mags = self.compute_comp_mags()
self.df_cirrus_effect = self._compute_cirrus_effect(self.df_comp_mags)
df_transformed_without_errors = self._compute_transformed_mags()
print('\nWait for it...\n')
self.df_transformed = self._compute_all_errors(df_transformed_without_errors)
_write_aavso_report_map_stub(self.an_top_directory, self.an_rel_directory)
self._write_summary_to_console()
def compute_comp_mags(self):
"""
Get raw, predicted mag estimates for ALL eligible comp-star observations, in all filters.
This means for ALL images with or without targets, and with eligible comp observations.
5/26/2017: Includes standards and other comp-containing images without targets.
:return: dataframe of
"""
df = self.df_all_curated_obs.copy()
# Select rows, then remove rows that suffer various causes of ineligibility:
df = df[df['StarType'] == 'Comp']
df = df[df['MaxADU_Ur'].notnull()]
df = df[df['MaxADU_Ur'] <= self.saturation_adu]
df = df[df['InstMagSigma'] <= self.max_inst_mag_sigma]
df = df[df['CatMag'].notnull()]
df = df[df['CI'].notnull()]
df = df[df['Airmass'].notnull()]
df = df[df['FWHM'] >= MIN_FWHM]
df_comp_obs_with_estimates = df
# Make lists of images with comps, and of images with comps&targets:
self.images_with_eligible_comps = df_comp_obs_with_estimates['FITSfile'].drop_duplicates()
images_with_targets = \
(self.df_all_curated_obs[self.df_all_curated_obs['StarType'].str.lower() ==
'target'])['FITSfile'].drop_duplicates() # remove std FOVs etc
self.images_with_targets_and_comps = [im for im in images_with_targets
if im in self.images_with_eligible_comps.values]
# Get best magnitude estimates for all eligible comps:
df_list = []
for filter_name, skymodel in self.skymodels.items():
comp_obs_to_include = (df_comp_obs_with_estimates['Filter'] == filter_name) & \
(df_comp_obs_with_estimates['FITSfile']
.isin(self.images_with_eligible_comps))
df_predict_input = df_comp_obs_with_estimates[comp_obs_to_include]
df_estimates_this_skymodel = \
df_predict_input[['Serial', 'ModelStarID', 'FITSfile', 'StarID', 'Chart',
'Xcentroid', 'Ycentroid', 'InstMag', 'InstMagSigma', 'StarType',
'CatMag', 'CatMagError', 'Exposure',
'JD_mid', 'Filter', 'Airmass', 'CI', 'SkyBias', 'LogADU',
'Vignette', 'JD_fract', 'JD_fract2']]
# column 'EstimatedMag' does NOT include image/cirrus effect!
df_estimates_this_skymodel.loc[:, 'EstimatedMag'] = \
skymodel._predict_fixed_only(df_predict_input)
df_list.append(df_estimates_this_skymodel) # list.append() performs in-place
# Collect and return the dataframe:
df_comp_mags = pd.concat(df_list, ignore_index=True)
df_comp_mags['UseInEnsemble'] = True # default, may be reset later
df_comp_mags.index = df_comp_mags['Serial']
return df_comp_mags
def _compute_cirrus_effect(self, df_comp_mags):
"""
Generates per-image cirrus effect with more careful inclusion/exclusion criteria than
were used in the MixedModelFit, esp. in rejecting outlier comp observations.
As of 5/26/2017: now includes images without targets, rendering resulting df
a SUPERSET of R::df_cirrus_effect.
:param: df_comp_mags: comprehensive comp-magnitude data [pandas DataFrame]. As of
5/2017 includes all eligible comps from images with or without targets.
:return: data for best per-image cirrus-effects, in magnitudes [pandas DataFrame]
"""
df_row_list = []
for image in self.images_with_eligible_comps:
df_estimates_comps_this_image = \
df_comp_mags[df_comp_mags['FITSfile'] == image].copy()
cirrus_effect_from_comps = \
df_estimates_comps_this_image['EstimatedMag'] - \
df_estimates_comps_this_image['CatMag']
sigma2 = df_estimates_comps_this_image['CatMagError']**2 + \
df_estimates_comps_this_image['InstMagSigma']**2
least_allowed_sigma = 0.01
# raw_weights ~ 1/s^2 where s cannot be less than least_allowed_sigma:
raw_weights = pd.Series([1.0 / max(s2, least_allowed_sigma**2) for s2 in sigma2],
index=sigma2.index)
normalized_weights = raw_weights / sum(raw_weights)
# Compute cirrus effect of mean value & sigma of mean (not sigma of indiv comp values):
cirrus_effect_this_image, _, cirrus_sigma_this_image = \
weighted_mean(cirrus_effect_from_comps, normalized_weights)
if len(df_estimates_comps_this_image) == 1:
cirrus_sigma_this_image = df_estimates_comps_this_image['CatMagError'].iloc[0]
comp_ids_used = df_estimates_comps_this_image['StarID'] # starting point = use all
num_comps_used = len(df_estimates_comps_this_image) # "
num_comps_removed = 0 # "
# Reject this image's worst comp stars and recalculate, in the case
# (we start with at least 4 comp stars) AND (criterion1 >= 16 OR criterion2 >= 20).
resid2 = (cirrus_effect_from_comps - cirrus_effect_this_image)**2 # pd.Series
criterion1 = 0 # how many times worse is the worst comp vs avg of other comps
criterion2 = 0 # square of worst comp's effective t-value, relative to CatMagError
if len(df_estimates_comps_this_image) >= 4:
x = normalized_weights * resid2 # pd.Series; will be >= 0
c1 = x / ((sum(x) - x) / (len(x) - 1)) # pd.Series
criterion1 = max(c1)
c2 = raw_weights * resid2 # a pd.Series; will be >= 0
criterion2 = max(c2)
if (criterion1 >= 16) or (criterion2 >= 20):
# score > 1 for each row (comp) that may be removed:
score = pd.Series([max(c_1/16.0, c_2/20.0) for (c_1, c_2) in zip(c1, c2)],
index=c1.index)
max_to_remove = floor(len(score) / 4)
to_remove = (score >= 1) & \
(score.rank(ascending=True, method='first') >
(len(score) - max_to_remove))
# Now, set weights to zero for the worst comps, recalc cirrus effect & sigmas:
raw_weights[to_remove] = 0.0
normalized_weights = raw_weights / sum(raw_weights)
cirrus_effect_this_image, _, cirrus_sigma_this_image = \
weighted_mean(cirrus_effect_from_comps, normalized_weights)
n_nonzero_weights = sum([w != 0 for w in normalized_weights])
if n_nonzero_weights == 1:
cirrus_sigma_this_image = \
sum([sigma for (nwt, sigma)
in zip(normalized_weights,
df_estimates_comps_this_image['CatMagError'])
if nwt != 0])
comp_ids_used = (df_estimates_comps_this_image['StarID'])[~ to_remove]
num_comps_used = len(comp_ids_used)
num_comps_removed = len(df_estimates_comps_this_image) - num_comps_used
# Record these omitted comp stars in the master comp-star dataframe.
removed_serials = (df_estimates_comps_this_image['Serial'])[to_remove]
df_comp_mags.loc[(df_comp_mags['Serial'].isin(removed_serials)),
'UseInEnsemble'] = False
# Insert results into this image's row in df_cirrus_effect:
df_row_this_image = {'Image': image,
'CirrusEffect': cirrus_effect_this_image,
'CirrusSigma': cirrus_sigma_this_image,
'Criterion1': criterion1,
'Criterion2': criterion2,
'NumCompsUsed': num_comps_used,
'CompIDsUsed': ','.join(comp_ids_used),
'NumCompsRemoved': num_comps_removed}
df_row_list.append(df_row_this_image)
df_cirrus_effect = pd.DataFrame(df_row_list)
df_cirrus_effect.index = df_cirrus_effect['Image']
return df_cirrus_effect
def _compute_transformed_mags(self):
"""
Return best mag estimates for all target and check stars, all observations, all filters.
:return:
"""
# Construct df_input_checks_targets:
df = self.df_all_eligible_obs.copy()
df = df[[(st in ['Check', 'Target']) for st in df['StarType']]]
df = df[df['MaxADU_Ur'] <= self.saturation_adu]
df = df[df['InstMagSigma'] <= self.max_inst_mag_sigma]
df['CI'] = 0.0 # because they are presumed unknown; populated later
df['CatMagSaved'] = df['CatMag'].copy()
df['CatMag'] = 0.0 # estimated below by imputation from predicted magnitudes
df_input_checks_targets = df
# Make df_estimates_checks_targets (which will account for Airmass(extinction),
# but not (yet) for Color Index (transforms) which will be handled below:
df_filter_list = []
for this_filter, skymodel in self.skymodels.items():
rows_to_select = (df_input_checks_targets['Filter'] == this_filter) & \
(df_input_checks_targets['FITSfile']\
.isin(self.images_with_targets_and_comps))
df_input_this_skymodel = (df_input_checks_targets.copy())[rows_to_select]
predict_output = skymodel._predict_fixed_only(df_input_this_skymodel)
df_estimates_this_filter = df_input_this_skymodel.copy()
df_estimates_this_filter['PredictedMag'] = predict_output
df_filter_list.append(df_estimates_this_filter)
df_estimates_checks_targets = pd.concat(df_filter_list, ignore_index=True)
df_estimates_checks_targets.index = df_estimates_checks_targets['Serial'] # to ensure
columns_post_predict = ["Serial", "ModelStarID", "FITSfile", "StarID", "Chart",
"Xcentroid", "Ycentroid", "InstMag", "InstMagSigma", "StarType",
"CatMag", "CatMagSaved", "CatMagError", "Exposure", "JD_mid",
"Filter", "Airmass", "CI", "SkyBias", "Vignette",
"JD_fract", "JD_fract2", "LogADU", "PredictedMag"]
df_estimates_checks_targets = df_estimates_checks_targets[columns_post_predict]
df_estimates_checks_targets['CatMag'] = df_estimates_checks_targets['CatMagSaved']
df_estimates_checks_targets = df_estimates_checks_targets.drop(['CatMagSaved'], axis=1)
df_estimates_checks_targets['UseInEnsemble'] = None
# CIRRUS CORRECTION: Apply per-image cirrus-effect to checks and targets (for all filters):
# df_predictions_checks_targets = pd.merge(left=df_estimates_checks_targets,
# right=self.df_cirrus_effect,
# how='left', left_on='FITSfile', right_on='Image')
# Next line accommodates pandas package's STUPID BREAKING CHANGE in versions 0.23-0.24:
df_predictions_checks_targets = pd.merge(left=df_estimates_checks_targets, # ok per pandas 0.25.
right=self.df_cirrus_effect,
how='left', left_on='FITSfile', right_index=True)
# df_predictions_checks_targets.sort_values(by=['FOV', 'ModelStarID', 'Serial'],
# inplace=True)
df_predictions_checks_targets.index = df_predictions_checks_targets['Serial']
df_predictions_checks_targets['UntransformedMag'] = \
df_predictions_checks_targets['PredictedMag'] - \
df_predictions_checks_targets['CirrusEffect']
# COLOR CORRECTION: interpolate Color Index values, then apply them (transform):
transforms = {k: v.transform for (k, v) in self.skymodels.items()} # a dict of transforms
df_predictions_checks_targets = _impute_target_ci(df_predictions_checks_targets,
ci_filters=['V', 'I'],
transforms=transforms)
df_transforms = pd.DataFrame([transforms], index=['Transform']).transpose() # lookup table
df = pd.merge(df_predictions_checks_targets, df_transforms,
how='left', left_on='Filter', right_index=True)
df['TransformedMag'] = df['UntransformedMag'] - df['Transform'] * df['CI']
df_transformed_without_errors = df.loc[~np.isnan(df['TransformedMag']), :]
return df_transformed_without_errors
def _compute_all_errors(self, df_transformed_without_errors):
"""
Compute 3 error contributors and total sigma for each target and check star in each image.
model_sigma: from mixed-model regression (same for all observations in this filter).
cirrus_sigma: from variance in ensemble comp stars (same for all obs in this image).
inst_mag_sigma: from shot noise & background noise in specific obs (unique to each obs).
:param: df_transformed_without_errors:
:return: df_transformed, including errors [pandas DataFrame]
"""
df_transformed = df_transformed_without_errors.copy()
# Make new empty columns in df_transformed, to be populated later:
# (note: column InstMagSigma already exists from photometry)
df_transformed['ModelSigma'] = np.float64(np.NaN)
df_transformed['CirrusSigma'] = np.float64(np.NaN)
df_transformed['TotalSigma'] = np.float64(np.NaN)
for this_filter, skymodel in self.skymodels.items():
images_this_filter = (self.df_comp_mags[self.df_comp_mags['Filter'] ==
this_filter])['FITSfile'].drop_duplicates()
for image in images_this_filter:
n = max(1, len(self.df_comp_mags[(self.df_comp_mags['FITSfile'] == image) &
(self.df_comp_mags['UseInEnsemble'] == True)]))
model_sigma = skymodel.sigma / sqrt(n)
cirrus_sigma = \
float(self.df_cirrus_effect.loc[self.df_cirrus_effect['Image'] == image,
'CirrusSigma'])
# df_targets_checks = (df_transformed[df_transformed['FITSfile'] == image])\
# [['Serial', 'InstMagSigma']] # USE NEXT LINE...(uses .loc[]
df_targets_checks = df_transformed.loc[df_transformed['FITSfile'] == image,
['Serial', 'InstMagSigma']]
for serial in df_targets_checks['Serial']:
inst_mag_sigma = \
float(df_targets_checks.loc[df_targets_checks['Serial'] == serial,
'InstMagSigma'])
# Add up total error in quadrature:
total_sigma = sqrt(model_sigma**2 + cirrus_sigma**2 + inst_mag_sigma**2)
# Write new data into correct cells in df_transformed:
this_row = (df_transformed['Serial'] == serial)
df_transformed.loc[this_row, 'InstMagSigma'] = inst_mag_sigma
df_transformed.loc[this_row, 'ModelSigma'] = model_sigma
df_transformed.loc[this_row, 'CirrusSigma'] = cirrus_sigma
df_transformed.loc[this_row, 'TotalSigma'] = total_sigma
# Finish forming df_transformed (return object):
df_transformed = df_transformed.drop(['PredictedMag', 'Criterion1',
'Criterion2', 'UntransformedMag', 'Transform'],
axis=1) # remove columns as a bit of cleanup
df_columns_to_join = self.df_all_curated_obs[['Serial', 'FOV', 'MaxADU_Ur', 'FWHM',
'SkyADU', 'SkySigma']] # for markup report
# df_transformed = pd.merge(left=df_transformed, right=df_columns_to_join, on='Serial')
# Next line accommodates pandas package's STUPID BREAKING CHANGE of their version 0.21.
df_transformed = pd.merge(left=df_transformed, right=df_columns_to_join,
left_index=True, right_index=True,
suffixes=('', 'dupe')) # suffixes retain exactly 1 column 'Serial'.
df_transformed.index = df_transformed['Serial'] # explicitly ensure 2018 behavior is retained.
df_transformed = df_transformed.sort_values(by=['ModelStarID', 'JD_mid'])
return df_transformed
def _write_summary_to_console(self):
counts_by_star_type = self.df_transformed[['ModelStarID', 'StarType']]\
.groupby(['StarType']).count()['ModelStarID']
n_target_stars = len(self.df_transformed.loc[self.df_transformed['StarType'] == 'Target',
'ModelStarID'].unique())
print('\nThis PredictionSet yields',
str(counts_by_star_type['Target']), 'raw Target obs for',
str(n_target_stars), 'targets and',
str(counts_by_star_type['Check']), 'Check observations.\n')
print('Now you are ready to:\n',
' 1. (Optional) run '
'df_master=pr.get_df_master(an_rel_directory=\'' + self.an_rel_directory +
'\').sort_values([\'ModelStarID\', \'Filter\'])\n',
' 2. (If stares) run ps.stare_comps(fov=\'XX Xxx\', star_id=\'\', '
'this_filter=\'\')\n',
' 3. run ps.markup_report(), then combine/reject target obs in'
' report_map.txt\n',
' 4. run ps.aavso_report() and submit it to AAVSO.\n')
def stare_comps(self, fov, star_id, this_filter):
lines = get_stare_comps(self.df_transformed, fov, star_id, this_filter)
print('\n'.join(lines))
def stare_plot(self, star_id):
# TODO: add this_filter parm and facility, default this-filter=None to mean "all filters".
# Setup data:
df = (self.df_transformed.copy())
df = df.loc[df['StarID'] == star_id, :]
if len(df) == 0:
print('This PredictionSet has no data for star_id \'' + star_id + '\'.')
return
floor_x = floor(df['JD_mid'].min())
x = df['JD_mid'] - floor_x
y = df['TransformedMag']
color_dict = {'V': 'green', 'R': 'orange', 'I': 'red', 'B': 'blue'}
colors = [color_dict[f] for f in df['Filter']]
# Construct & draw stare plot:
fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(12, 8)) # (width, height) in "inches"
ax.grid(True, color='lightgray', zorder=-1000)
ax.set_title(star_id + ' ' + self.an_rel_directory,
color='darkblue', fontsize=20, weight='bold')
ax.set_xlabel('JD(mid) - ' + str(floor_x))
ax.set_ylabel('Best Mag')
ax.scatter(x=x, y=y, color=colors, alpha=0.8, zorder=+1000)
plt.gca().invert_yaxis() # per custom of plotting magnitudes brighter=upward
fig.canvas.set_window_title(star_id)
plt.show()
def markup_report(self):
"""
Makes markup report from current PredictionSet object.
:return: text string ready for printing (e.g., by copy/paste into Microsoft Word).
"""
print('\nWriting \'markup_report.txt\'...', end='', flush=True)
# First, make check-star dataframe:
df = self.df_transformed.copy()
df_check_stars = df.loc[df['StarType'] == 'Check',
['FITSfile', 'StarID', 'TransformedMag', 'CatMag']]
df_check_stars = df_check_stars.rename(columns={
'StarID': 'Check', 'TransformedMag': 'CkMag', 'CatMag': 'CkCat'})
# Make df for markup report:
df = self.df_transformed.copy()
df = df[df['StarType'] == 'Target']
df = df[['Serial', 'FITSfile', 'StarID', 'Filter', 'Exposure', 'TransformedMag',
'InstMagSigma', 'ModelSigma', 'CirrusSigma', 'TotalSigma', 'MaxADU_Ur',
'FWHM', 'JD_num', 'FOV']]
df = df.rename(columns={'StarID': 'Target', 'Exposure': 'Exp', 'TransformedMag': 'Mag',
'MaxADU_Ur': 'MaxADU'})
df = pd.merge(df, df_check_stars, how='left', on='FITSfile')
df.index = df['Serial']
# df = df.sort_values(by=['Target', 'FITSfile', 'Filter', 'Exp'])
# df = df.sort_values(by=['Target', 'FOV', 'JD_num'])
# TODO: verify same order as for aavso_report.
df = df.sort_values(by=['Target', 'JD_num']) # same as for aavso_report (verify)
# A nested helper function:
def format_column(iterable, decimal_pts=None, min_width=0, left_pad=1):
if decimal_pts is not None:
this_list = [('{0:.' + str(decimal_pts) + 'f}').format(x) for x in iterable]
else:
this_list = [str(x) for x in iterable]
n_chars = max(min_width, max([len(x) for x in this_list])) + left_pad
return pd.Series([x.rjust(n_chars) for x in this_list])
# Make dataframe df_text of text columns ~ ready to print (column FOV omitted from text):
df_text = pd.DataFrame()
df_text['Serial'] = format_column(df['Serial'], min_width=4)
df_text['Target'] = format_column(df['Target'], min_width=6)
df_text['FITSfile'] = format_column(df['FITSfile'], min_width=8)
df_text['FITSfile'] = [f.split('.fts')[0]
for f in df_text['FITSfile']] # remove '.fts'
df_text['Filt'] = format_column(df['Filter'], min_width=4)
df_text['Exp'] = format_column(df['Exp'], decimal_pts=1, min_width=5)
df_text['Exp'] = [s[:-2] + ' ' if s.endswith('.0') else s
for s in df_text['Exp']] # remove any trailing decimal point and zero
df_text['Mag'] = format_column(df['Mag'], decimal_pts=3)
df_text['MaxADU'] = format_column(round(df['MaxADU'].astype(int)), min_width=6)
df_text['FWHM'] = format_column(df['FWHM'], decimal_pts=2, min_width=4)
df_text['JD_fract'] = format_column(df['JD_num'], decimal_pts=4, min_width=6)
df_text['Check'] = format_column(df['Check'], min_width=3)
df_text['CkMag'] = format_column(df['CkMag'], decimal_pts=3, min_width=5)
df_text['CkCat'] = format_column(df['CkCat'], decimal_pts=3, min_width=5)
df_text['Inst'] = format_column(round(df['InstMagSigma']*1000.0).astype(int), min_width=3)
df_text['Model'] = format_column(round(df['ModelSigma']*1000.0).astype(int), min_width=3)
df_text['Cirr'] = format_column(round(df['CirrusSigma']*1000.0).astype(int), min_width=3)
df_text['Sigma'] = format_column(round(df['TotalSigma']*1000.0).astype(int), min_width=3)
# Make dict of just-sufficient column widths:
column_list = ['Serial', 'Target', 'FITSfile', 'Filt', 'Exp', 'Mag', 'MaxADU',
'FWHM', 'JD_fract', 'Check', 'CkMag', 'CkCat',
'Inst', 'Model', 'Cirr', 'Sigma']
dict_widths = {col: max(len(col), max([len(ss) for ss in df_text[col]]))
for col in column_list}
# Make text lines of report:
lines = ['MARKUP REPORT for ' + self.an_rel_directory +
' generated by photrix ' + THIS_SOFTWARE_VERSION +
' at ' + '{:%Y-%m-%d %H:%M UTC}'.format(datetime.now(timezone.utc)) +
' ' + str(len(df)) + ' raw observations.']
left_spacer = 2 * ' '
nan_text = ' - '
header_line = left_spacer + ' '.join([col.rjust(dict_widths[col]) for col in column_list])
line_length = len(header_line)
lines.extend(['', line_length * '_', '', header_line, ''])
for i in range(len(df)):
this_row = pd.Series([s if s.strip().lower() != 'nan' else nan_text
for s in df_text.iloc[i]], index=column_list)
line = left_spacer + ' '.join(this_row[col].rjust(dict_widths[col])
for col in column_list)
lines.append(line)
# Add blank line between targets:
if i < len(df) - 1:
this_target = df_text.iloc[i]['Target']
next_target = df_text.iloc[i+1]['Target']
if next_target != this_target:
lines.append('')
lines.extend(['', line_length * '_'])
lines = [line + '\n' for line in lines]
output_fullpath = os.path.join(self.an_top_directory, self.an_rel_directory,
'Photometry', 'markup_report.txt')
with open(output_fullpath, 'w') as this_file:
this_file.write(''.join(lines))
print('Done.\n' + 'Written to file \'' + output_fullpath + '\'.', flush=True)
def aavso_report(self, write_file=True, return_df=False):
"""
Construct AAVSO report (Enhanced Format) from this PredictionSet object.
Writes this report as a text file in current (PredictionSet's) directory_path.
:param write_file: True to write text file to current dir, False to not write.
:param return_df: True to return a DataFrame of results, False to return None.
:return: table of results if requested [DataFrame], else None.
"""
df_report = self._apply_report_map_txt()
df_report = df_report.sort_values(by=['TargetName', 'JD']) # same as for markup_report (verify)
# Construct text header lines:
header = ["#TYPE=Extended",
"#OBSCODE=DERA", # DERA = Eric Dose's observer code @ AAVSO
"#SOFTWARE=custom python scripts available at"
" https://github.com/edose/photrix, tag/version=" +
THIS_SOFTWARE_VERSION,
"#DELIM=" + AAVSO_REPORT_DELIMITER,
"#DATE=JD",
"#OBSTYPE=CCD",
"#This report of " + str(len(df_report)) + " observations was generated " +
'{:%Y-%m-%d %H:%M:%S UTC}'.format(datetime.now(timezone.utc)) +
" from raw data in directory_path " + self.an_rel_directory + ".",
"#Eric Dose, New Mexico Mira Project, ABQ, NM",
"#",
"#NAME,DATE,MAG,MERR,FILT,TRANS,MTYPE,CNAME,CMAG,KNAME,KMAG,AMASS," +
"GROUP,CHART,NOTES"
]
# Format observation report text fields, building df_formatted by columns left to right:
df_formatted = pd.DataFrame() # empty
if len(df_report) == 0:
obs_lines = ['\n\n\n >>>>>>>>> NO OBSERVATIONS TO PRINT\n']
else:
df_formatted['TargetName'] = df_report['TargetName'].str.strip().str.upper()
df_formatted['JD'] = df_report['JD'].astype(np.float64).map('{:.5f}'.format)
df_formatted['Mag'] = df_report['Mag'].map('{:.3f}'.format)
df_formatted['MagErr'] = df_report['TotalSigma'].map('{:.3f}'.format)
df_formatted['Filter'] = df_report['Filter'].str.strip().str.upper()
df_formatted['Transformed'] = 'YES' # we always transform our reported data
df_formatted['MType'] = 'STD' # we use std comp stars, not "differential mode"
df_formatted['CompName'] = df_report['CompName'].str.strip().str.upper()
df_formatted['CompMag'] = ['{:.3f}'.format(mag) if not np.isnan(mag) else 'na'
for mag in df_report['CompMag']]
df_formatted['CheckName'] = [name.strip().upper() if isinstance(name, str) else 'na'
for name in df_report['CheckName']]
df_formatted['CheckMag'] = ['{:.3f}'.format(mag) if not np.isnan(mag) else 'na'
for mag in df_report['CheckMag']]
df_formatted['Airmass'] = df_report['Airmass'].map('{:.4f}'.format)
df_formatted['Group'] = 'na' # we don't use the observation grouping facility
df_formatted['Chart'] = df_report['Chart'].str.upper()
df_formatted['Notes'] = [note.strip() if note != '' else 'na'
for note in df_report['Notes']]
# Make all observation text lines:
obs_column_list = ['TargetName', 'JD', 'Mag', 'MagErr', 'Filter', 'Transformed',
'MType', 'CompName', 'CompMag', 'CheckName', 'CheckMag',
'Airmass', 'Group', 'Chart', 'Notes']
obs_lines = df_formatted.loc[:, obs_column_list].\
apply(lambda x: AAVSO_REPORT_DELIMITER.join(x), axis=1).tolist()
# Write file if requested:
if write_file:
lines_to_write = [line + '\n' for line in (header + obs_lines)]
filename = 'AAVSOreport-' + self.an_rel_directory + '.txt'
fullpath = os.path.join(self.an_top_directory, self.an_rel_directory,
'Photometry', filename)
with open(fullpath, 'w') as f:
f.writelines(lines_to_write)
print('AAVSO report for AN ' + self.an_rel_directory + ' written to: ' +
fullpath + '\n = ' + str(len(df_formatted)) + ' reportable observations.')
# Return DataFrame if requested:
if return_df:
return df_report
return None
def _apply_report_map_txt(self):
"""
[Called only by other PredictionSet method.]
:return: df_report: all observation data to construct AAVSO photometry report [DataFrame].
"""
df_report = self.df_transformed.copy()
df_report = df_report[['Serial', 'StarType', 'StarID', 'JD_mid',
'TransformedMag', 'TotalSigma', 'InstMagSigma',
'ModelSigma', 'CirrusSigma', 'Filter']]
df_report.rename(columns={'StarID': 'TargetName', 'JD_mid': 'JD', 'TransformedMag': 'Mag'},
inplace=True)
df_report['CompName'] = ''
df_report['CompMag'] = np.float64(np.nan)
df_report['NComps'] = 0
# df_report['CheckName'] = ''
# df_report['CheckMag'] = np.float64(np.nan)
df_report['Airmass'] = self.df_transformed['Airmass'] # to get column order
df_report['Chart'] = self.df_transformed['Chart'] # "
df_report['Notes'] = ['obs#' + str(s) for s in df_report['Serial']]
df_report = df_report[df_report['StarType'] == 'Target']
df_report.drop('StarType', axis=1, inplace=True) # remove column no longer needed
# Get report_map.txt
fullpath = os.path.join(self.an_top_directory, self.an_rel_directory, 'Photometry',
'report_map.txt')
if not os.path.exists(fullpath):
_write_report_map_stub(self.an_top_directory, self.an_rel_directory)
with open(fullpath) as f:
lines = f.readlines()
lines = [line for line in lines if line is not None] # remove empty list elements
lines = [line.split(";")[0] for line in lines] # remove all comments
lines = [line.strip() for line in lines] # remove lead/trail blanks
lines = [line for line in lines if line != ''] # remove empty lines
lines = [line for line in lines if line.startswith('#')] # keep only directive lines
# Apply #TARGET omissions:
omit_lines = [line for line in lines if line.startswith('#TARGET')]
for this_line in omit_lines:
parms, warning_lines = _get_line_parms(this_line, '#TARGET', False, 1, 1)
if warning_lines is not None:
print('>>>>> Can\'t parse line:', warning_lines)
else:
target_to_omit = parms[0]
if target_to_omit is not None:
rows_to_keep = [t.lower() != target_to_omit.lower()
for t in df_report['TargetName']]
df_report = df_report[rows_to_keep]
print('Target removed:', target_to_omit, '.')
# Apply #SERIAL omissions:
rows_before = len(df_report)
omit_lines = [line for line in lines if line.startswith('#SERIAL')]
for this_line in omit_lines:
parms, warning_lines = _get_line_parms(this_line, '#SERIAL', True, 1, None)
if warning_lines is not None:
print('>>>>> Can\'t parse line:', warning_lines)
else:
if parms is not None:
serials_to_omit = [int(p) for p in parms]
rows_to_keep = ~ df_report['Serial'].isin(serials_to_omit)
df_report = df_report[rows_to_keep]
rows_after = len(df_report)
print(str(rows_before - rows_after), 'serials removed altogether.')
# Apply #JD directives:
omit_lines = [line for line in lines if line.startswith('#JD')]
for this_line in omit_lines:
raw_parms, warning_lines = _get_line_parms(this_line, '#JD', True, 2, 2)
if warning_lines is not None:
print('>>>>> Can\'t parse line:', warning_lines)
else:
min_jd_fract, max_jd_fract = np.float64(raw_parms)
if (min_jd_fract >= 0.0) and (max_jd_fract < 2.0):
floor_jd = floor(min(df_report['JD']))
rows_to_keep = ((df_report['JD'] < floor_jd + min_jd_fract)|\
(df_report['JD'] > floor_jd + max_jd_fract))
rows_before_omit = len(df_report)
df_report = df_report[rows_to_keep]
rows_after_omit = len(df_report)
print('Omitted fractional JD range:', '{:.4f}'.format(min_jd_fract), 'to',
'{:.4f}'.format(max_jd_fract), '=',
str(rows_before_omit - rows_after_omit), 'observations.')
# Add check-star names and mags as new columns (look up from self.df_transformed):
df_checks = ((self.df_transformed.copy())[self.df_transformed['StarType'] == 'Check'])\
[['JD_mid', 'StarID', 'TransformedMag']]
df_checks.rename(columns={'JD_mid': 'JD', 'StarID': 'CheckName',
'TransformedMag': 'CheckMag'}, inplace=True)
df_report = pd.merge(left=df_report, right=df_checks, how='left', on='JD')
df_report.index = df_report['Serial']
# Apply comp-star names and mags:
df_comps = self.df_comp_mags[['Serial', 'JD_mid', 'StarID', 'Filter']].copy()
df_comps['ObsMag'] = self.df_comp_mags['EstimatedMag']
report_jds = df_report['JD'].drop_duplicates().sort_values()
for this_jd in report_jds:
df_comps_this_jd = df_comps[df_comps['JD_mid'] == this_jd]
n_comps_this_jd = len(df_comps_this_jd)
rows_to_update = (df_report['JD'] == this_jd)
df_report.loc[rows_to_update, 'NComps'] = n_comps_this_jd
if n_comps_this_jd <= 0:
print('>>>>> No comp stars in model for JD = ', this_jd)
return None
if n_comps_this_jd > 1:
# 'Ensemble' photometry case:
df_report.loc[rows_to_update, 'CompName'] = 'ENSEMBLE' # 'CompMag' remains NA.
df_report.loc[rows_to_update, 'Notes'] += ' / ' + str(n_comps_this_jd) + ' comps'
else:
# Single comp-star case:
df_report.loc[rows_to_update, 'CompName'] = (df_comps_this_jd['StarID']).iloc[0]
df_report.loc[rows_to_update, 'CompMag'] = (df_comps_this_jd['ObsMag']).iloc[0]
# Nested function for convenience:
def all_same(list_or_series):
this_list = list(list_or_series)
return this_list.count(this_list[0]) == len(this_list)
# Apply #COMBINE directives last (& verify check and comp stars are in fact associated):
combine_lines = [line for line in lines if line.startswith('#COMBINE')]
for this_line in combine_lines:
raw_parms, warning_lines = _get_line_parms(this_line, '#COMBINE', True, 1, None)
if warning_lines is not None:
print('>>>>> Can\'t parse line:', warning_lines)
continue
else:
serials_to_combine = [int(rp) for rp in raw_parms if int(rp) >= 1]
df_combine = df_report[df_report['Serial'].isin(serials_to_combine)]
# Verify that user-selected combine obs are in fact eligible to be combined:
if len(df_combine) <= 1:
print('>>>>> Fewer than 2 obs to combine for line: \'' + this_line + '\'')
continue # skip this #COMBINE line.
if not all_same(df_combine['TargetName']):
print('>>>>> Non-uniform target names for line: \'' + this_line +
'\'...Combine is skipped.')
continue
if not all_same(df_combine['Filter']):
print('>>>>> Non-uniform Filter for line: \'' + this_line +
'\'...Combine is skipped.')
continue
if not all_same(df_combine['CompName']):
print('>>>>> Non-uniform Comp Names for line: \'' + this_line +
'\'...Combine is skipped.')
for i in range(len(df_combine)):
print(df_combine['Serial'].iloc[i],
df_combine['NComps'].iloc[i],
df_combine['CompName'].iloc[i])
continue
real_check_names = [cn for cn in df_combine['CheckName'] if cn is not None]
if len(real_check_names) >= 2:
if not all_same(real_check_names):
print('>>>>> Non-uniform Check Stars for line: \'' + this_line +
'\'...Combine is skipped.')
continue
if not all_same(df_combine['Chart']):
print('>>>>> Non-uniform Chart IDs for line: \'' + this_line +
'\'...Combine is skipped.')
continue
jd = df_combine['JD'].astype(np.float64)
if max(jd) - min(jd) > 1.0 / 24.0: # one hour
print('>>>>> Range of JD times is too large for line: \'' + this_line +
'\'...Combine is skipped.')
continue
airmass = df_combine['Airmass'].astype(np.float64)
if max(airmass) - min(airmass) > 0.400:
print('>>>>> Range of Airmasses is too large to combine for line: \'' +
this_line + '\'...Combine is skipped.')
continue
# This #COMBINE line has passed all the tests, now execute it:
df_new = df_combine.iloc[0:1].copy() # a 1-row df; will combine rows into this.
serial_to_replace = df_new['Serial'].iloc[0] # scalar
serials_to_delete = df_combine.loc[df_combine['Serial'] != serial_to_replace, 'Serial']
# TODO: Consider weighted mean for combinations (w/limits on differences in weights?).
df_new['JD'] = df_combine['JD'].astype(np.float64).mean()
df_new['Mag'] = df_combine['Mag'].mean()
n_combine = len(df_combine)
# Instrument Mag sigmas are independent.
# inst_mag_sigma = sqrt(df_combine['InstMagSigma'].
# clip_lower(0.001).pow(2).mean()) / sqrt(n_combine)
inst_mag_sigma = sqrt(df_combine['InstMagSigma'].
clip(lower=0.001).pow(2).mean()) / sqrt(n_combine)
# Model sigma is uniform, but not independent and so not decreased by multiple images.
model_sigma = df_combine['ModelSigma'].iloc[0] # uniform across images to combine.
# Cirrus_sigma ~ independent if no extreme outlier comp stars (which would correlate).
# cirrus_sigma = sqrt(df_combine['CirrusSigma'].
# clip_lower(0.001).pow(2).mean()) / sqrt(n_combine)
cirrus_sigma = sqrt(df_combine['CirrusSigma'].
clip(lower=0.001).pow(2).mean()) / sqrt(n_combine)
df_new['InstMagSigma'] = inst_mag_sigma
df_new['ModelSigma'] = model_sigma
df_new['CirrusSigma'] = cirrus_sigma
df_new['TotalSigma'] = sqrt(model_sigma**2 + cirrus_sigma**2 + inst_mag_sigma**2)
df_new['CheckMag'] = df_combine['CheckMag'].mean()
df_new['NComps'] = df_combine['NComps'].min()
df_new['Airmass'] = df_combine['Airmass'].mean()
df_new['Notes'] = str(n_combine) + ' obs >= ' + \
str(int(df_new['NComps'].iloc[0])) + ' comps'
df_report.update(df_new)
df_report.drop(serials_to_delete, inplace=True) # drop rows by index
print('Combination of Serials ' +
' '.join(df_combine['Serial'].astype(int).astype(str)) + ': done.')
return df_report
def delete_uncalibrated(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
import shutil
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Uncalibrated')
if os.path.exists(fullpath) and os.path.isdir(fullpath):
shutil.rmtree(fullpath)
print('Done.')
END_PROCESSING_HERE____________ = ''
class TransformModel:
def __init__(self, an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None,
filter=None, ci_type=None, fovs_to_include="All",
instrument_name='Borea', site_name='DSW',
max_cat_mag_error=0.03, max_inst_mag_sigma=0.03, max_ci=+2.5,
saturation_adu=None, fit_sky_bias=True,
fit_extinction=True, fit_log_adu=True):
"""
Constructs a transform on filter against ci filter description ci_type,
given a df_master.csv in the given directory_path.
:param an_top_directory: e.g., 'C:\Astro\Images\Borea Photrix' [string]
:param an_rel_directory: e.g., '20170504'. The dir 'Photometry' is subdir of this. [string]
:param filter: name of filter to which this model applies [string, e.g., 'V' or 'R']
:param ci_type: two color-index filter names separated by minus sign,
e.g., 'V-I' for V-I index [string]
:param fovs_to_include: defines which eligible rows (by FOV) will be included:
Choices:
"All" will use all eligible rows in df_master;
"Standards" will use all eligible rows from all Standard FOVs in df_master;
One FOV name as a string will use that one FOV only;
A list of strings will use those FOV(s) only.
:param instrument_name: name of Instrument, e.g., 'Borea' [string; name of Instrument obj]
:param site_name: name of observing site, e.g., 'DSW' [string; name of Site object]
:param max_cat_mag_error: maximum catalog error allowed to stars in model [float]
:param max_inst_mag_sigma: max instrument magnitude error allowed star observations [float]
:param max_ci: maximum color index allowed to stars in model [float]
:param saturation_adu: ccd ADUs that constitute saturation [float; None if from Instrument]
:param fit_sky_bias: True to fit sky bias term [bool]
:param fit_log_adu: True to fit log(MaxADU_Ur) as CCD nonlinearity measure [bool]
:param fit_extinction: True to fit extinction terms; else use values from Site obj.
Used only if image_name==False and more than all images are used [bool]
"""
self.an_top_directory = an_top_directory
self.an_rel_directory = an_rel_directory
self.filter = filter
self.ci_filters = [s.strip().upper() for s in (ci_type.strip().split('-'))]
if len(self.ci_filters) != 2:
print(' >>>>> Invalid ci_type \'' + ci_type + '\'')
self.is_valid = False
return
self.fovs_to_include = fovs_to_include
self.instrument_name = instrument_name
self.site_name = site_name
self.max_cat_mag_error = max_cat_mag_error
self.max_inst_mag_sigma = max_inst_mag_sigma
self.max_ci = max_ci
if saturation_adu is not None:
self.saturation_adu = saturation_adu
else:
instrument = Instrument(self.instrument_name)
self.saturation_adu = instrument.camera['saturation_adu']
self.fit_sky_bias = fit_sky_bias
self.fit_extinction = fit_extinction
self.fit_log_adu = fit_log_adu
self.dep_var_name = 'InstMag_with_offsets'
self.df_model = None
self.fov_list = None
self.image_list = None
self.statsmodels_object = None
self.mm_fit_object = None # populated only if >=2 images and mixed model used.
self.fitted_values = None
self.param_values = None
self.residuals = None
self.sigma = None
self.image_effect = None
self.is_valid = False # default until object constructed.
# Execute steps:
self._make_and_curate_model_dataframe(fovs_to_include=self.fovs_to_include)
self._add_ci_column() # to df_model, add new column CI (color index) from catalog mags.
self._prep_and_do_regression()
self._build_output()
def _make_and_curate_model_dataframe(self, fovs_to_include):
df, warning_lines = _apply_omit_txt(self.an_top_directory, self.an_rel_directory)
master_fov_list = df['FOV'].drop_duplicates().tolist()
# Interpret fovs_to_include, ultimately yielding fov_list:
if isinstance(fovs_to_include, list):
fov_list = [fn for fn in master_fov_list if fn in fovs_to_include]
elif isinstance(fovs_to_include, str):
if fovs_to_include.lower() == 'all':
fov_list = master_fov_list.copy()
elif fovs_to_include.lower() in ['standard', 'standards']:
fov_list = [fn for fn in master_fov_list
if Fov(fn).target_type.lower() == 'standard']
else:
fov_list = [fn for fn in master_fov_list if fn in [fovs_to_include]]
else:
print("Couldn't interpret input parm 'fovs_to_include'.")
self.is_valid = False
return
if len(fov_list) <= 0:
print("Input parm 'fovs_to_include' yielded no fovs for folder '",
self.an_rel_directory, "'.")
self.is_valid = False
return
# Retain only the appropriate rows of raw dataframe:
df = df[df['FOV'].isin(fov_list)]
df = df[df['Filter'] == self.filter]
df = df[df['StarType'].isin(['Comp', 'Check'])]
# Retain only the needed columns of dataframe:
df = df[['Serial', 'StarID', 'FITSfile', 'InstMagSigma', 'FWHM', 'MaxADU_Ur', 'StarType',
'JD_mid', 'Filter', 'Airmass', 'FOV', 'CatMag', 'CatMagError', 'InstMag',
'SkyBias', 'LogADU']]
# Curate rows for various quality measures:
df = df[df['CatMag'].notnull()]
df = df[df['Airmass'].notnull()]
df = df[df['InstMagSigma'] <= self.max_inst_mag_sigma]
df = df[df['MaxADU_Ur'].notnull()]
df = df[df['MaxADU_Ur'] <= self.saturation_adu]
df = df[df['CatMagError'].notnull()]
df = df[df['CatMagError'] <= self.max_cat_mag_error]
df = df[df['FWHM'] >= MIN_FWHM]
self.df_model = df
def _add_ci_column(self):
df = self.df_model.copy() # local working copy
df['CI'] = None # new blank column
fov_names = df['FOV'].drop_duplicates()
for fov_name in fov_names:
df_fov = (df.copy())[df['FOV'] == fov_name]
this_fov = Fov(fov_name)
fov_stars = this_fov.aavso_stars # a list of AavsoSequenceStar_WithMagError objects
star_ids = df_fov['StarID']
for star_id in star_ids:
# Extract proper AavsoSS_WME object & mags for this star (a dict):
star_list = [fs for fs in fov_stars if (fs.star_id == star_id and fs.is_valid)]
if len(star_list) == 1:
fov_star = star_list[0] # object found.
# fov_star.mags is tuple (mag, mag_err)
absent = (None, None)
ci_mag_first = fov_star.mags.get(self.ci_filters[0], absent)[0]
ci_mag_second = fov_star.mags.get(self.ci_filters[1], absent)[0]
else:
ci_mag_first, ci_mag_second = (None, None)
if (ci_mag_first is not None) and (ci_mag_second is not None):
this_ci = ci_mag_first - ci_mag_second
else:
this_ci = None
if this_ci is not None:
rows_to_update = (df['FOV'] == fov_name) & (df['StarID'] == star_id)
df.loc[rows_to_update, 'CI'] = this_ci
rows_with_ci = [(ci is not None) for ci in df['CI']]
self.df_model = df[rows_with_ci]
self.fov_list = self.df_model['FOV'].drop_duplicates().tolist()
self.image_list = self.df_model['FITSfile'].drop_duplicates().tolist()
def _prep_and_do_regression(self):
if len(self.image_list) <= 0:
print("No images in image list.")
self.is_valid = False
return
# Build variable list & dep-var offset:
x_var_list = [] # fixed-effects only
dep_var_offset = self.df_model['CatMag'].copy() # *copy* CatMag, or df_model risks damage.
if len(self.image_list) >= 2: # if one image, do nothing (fold extinction into zero-point).
if self.fit_extinction:
x_var_list.append('Airmass')
else:
site = Site(self.site_name)
extinction = site.extinction[self.filter]
dep_var_offset += extinction * self.df_model['Airmass']
if self.fit_sky_bias:
if sum([x != 0 for x in self.df_model['SkyBias']]) > int(len(self.df_model) / 2):
x_var_list.append('SkyBias')
if self.fit_log_adu:
x_var_list.append('LogADU')
x_var_list.append('CI') # to get transform value (the point of this class).
for var in x_var_list:
self.df_model[var] = np.float64(self.df_model[var]) # ensure float64 for statsmodels.
# Build the regression model (ordinary least-squares or mixed-model):
self.df_model['DepVar'] = self.df_model['InstMag'] - dep_var_offset # dependent variable.
if len(self.image_list) == 1:
# Model with ordinary least squares (OLS, as no group/image effects possible):
formula = 'DepVar ~ ' + ' + '.join(x_var_list)
self.statsmodels_object = smf.ols(formula, data=self.df_model).fit()
else:
# Model with photrix.util.MixedModelFit (random effect is per-image, "cirrus effect"):
random_effect_var_name = 'FITSfile' # cirrus effect is per-image
self.mm_fit_object = MixedModelFit(data=self.df_model, dep_var='DepVar',
fixed_vars=x_var_list,
group_var=random_effect_var_name)
self.statsmodels_object = self.mm_fit_object.statsmodels_object
if self.statsmodels_object.scale != 0.0 and \
self.statsmodels_object.nobs == len(self.df_model):
print(self.statsmodels_object.summary())
self.is_valid = True
def _build_output(self):
if len(self.image_list) == 1:
# Ordinary least squares case...image/cirrus effect folded into zero-point term:
so = self.statsmodels_object
self.fitted_values = so.fittedvalues
self.param_values = so.params
self.residuals = so.resid
self.sigma = so.mse_resid ** 0.5
self.transform_value = so.params['CI']
self.transform_sigma = so.bse['CI']
self.image_effect = None
else:
# Mixed-model case, data from util.MixedModelFit object:
so = self.mm_fit_object
self.fitted_values = so.df_observations['FittedValue']
self.param_values = so.df_fixed_effects['Value']
self.residuals = so.df_observations['Residual']
self.sigma = so.sigma
self.transform_value = self.param_values['CI']
self.transform_sigma = so.df_fixed_effects.loc['CI', 'Stdev']
self.image_effect = so.df_random_effects['GroupValue']
def get_df_master(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
"""
Simple utility to read df_master.csv file and return the DataFrame.
:param an_top_directory: path to an_rel_directory [str]
:param an_rel_directory: directory_path for this instrument on this Astronight [string]
:return: pandas DataFrame with all comp, check, and target star raw photometric data
(which dataframe is generated and csv-written by R, as of May 2017).
The DataFrame index is set to equal column Serial (which was already a kind of index).
"""
if an_rel_directory is None:
return None
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', DF_MASTER_FILENAME)
if not os.path.exists(fullpath):
return None
df_master = pd.read_csv(fullpath, sep=';')
df_master.index = df_master['Serial']
return df_master
def _add_ci_values(df_fov, df_star_data_numbered, instrument):
# Killswitch is next line:
return df_fov
df_fov['CI_type'] = ''
df_fov['CI_value'] = np.nan
df_by_filter = df_fov.groupby('Filter')
for this_filter, df in df_by_filter.groups:
transforms = instrument.transforms(this_filter)
# Decide best CI type for this filter:
if len(instrument.transforms(this_filter)) == 1:
# Easy case: only one transform for this filter, so use it:
ci_type = instrument.transforms(this_filter)[0]
ci_filters = [s.strip().upper() for s in (ci_type.strip().split('-'))]
ci_filter_1 = ci_filters[0]
ci_filter_2 = ci_filters[1]
else:
# Hard case: have to decide which transform to use for this FOV and filter:
for this_transform in instrument.transforms(this_filter):
this_ci_type, this_ci_value = this_transform # unpack tuple
these_ci_filters = [s.strip().upper() for s in (this_ci_type.strip().split('-'))]
this_ci_filter_1 = these_ci_filters[0]
this_ci_filter_2 = these_ci_filters[1]
# Score for this filter: how many target obs satisfy these two criteria:
# Criterion 1:
# Criterion 2:
# Fill in CI_type and CI_value (CI_type decided above):
# CI type described by ci_filter_1 and ci_filter_2
star_ids = df['StarID'].unique()
for star_id in star_ids:
mags = df_star_data_numbered.loc[df_star_data_numbered['StarID'] == star_id, 'Mags']
ci_mag_1 = mags[ci_filter_1][0]
ci_mag_2 = mags[ci_filter_2][0]
df.loc[df_star_data_numbered['StarID'] == star_id, 'CI_type'] = ci_type
df.loc[df_star_data_numbered['StarID'] == star_id, 'CI_value'] = ci_mag_1 - ci_mag_2
return df_fov
def get_stare_comps(df_transformed, fov=None, star_id=None, this_filter=None):
df = df_transformed.copy()
df = df[df['FOV'] == fov]
df = df[df['StarID'] == star_id]
df = df[df['Filter'] == this_filter]
joined_comp_stars = ','.join(df['CompIDsUsed'])
comp_stars_available = pd.Series(joined_comp_stars.split(',')).drop_duplicates()
df_comps = pd.DataFrame({'CompID': comp_stars_available, 'IsIncluded': False})
result_lines = ["EDIT file 'pre-predict' with one of the following lines:"]
if len(df) <= 1:
return result_lines + \
[' >>> One or zero qualifying images in dataframe.']
for num_to_test in range(1, 1 + len(df_comps)):
base = list((df_comps[df_comps['IsIncluded']])['CompID'])
test = list((df_comps[~ df_comps['IsIncluded']])['CompID'])
max_images_qualifying = 0
best_test = test[0] # to give a default so comparisons don't fail
for this_test in test:
num_images_qualifying = 0
test_comps = base.copy()
test_comps.append(this_test)
for i_row in range(len(df)):
this_set = (df.iloc[i_row])['CompIDsUsed'].split(',')
if this_test in this_set:
num_images_qualifying += 1
if num_images_qualifying > max_images_qualifying:
# TODO: break ties by choosing lower catmagerror (or initially sorting on them)?
best_test = this_test
max_images_qualifying = num_images_qualifying
df_comps.loc[(df_comps['CompID'] == best_test), 'IsIncluded'] = True
this_line = ' ' + str(sum(df_comps['IsIncluded'])) + ' comps -> ' + \
str(max_images_qualifying) + ' images qualify --> #COMPS ' + fov + \
', ' + this_filter + ', ' + \
', '.join(df_comps.loc[df_comps['IsIncluded'], 'CompID'])
result_lines.append(this_line)
return result_lines
def _rename_to_photrix(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
# Construct DataFrame of files to rename:
fits_path = os.path.join(an_top_directory, an_rel_directory, 'Uncalibrated')
ur_names, object_list, jd_mid_list, filter_list, photrix_names = [], [], [], [], []
for entry in os.scandir(fits_path):
if entry.is_file():
ur_names.append(entry.name)
this_fits = FITS(an_top_directory, os.path.join(an_rel_directory, 'Uncalibrated'),
entry.name)
object_list.append(this_fits.object)
jd_mid_list.append(jd_from_datetime_utc(this_fits.utc_mid))
filter_list.append(this_fits.filter)
df = pd.DataFrame({'UrName': ur_names, 'Object': object_list,
'JD_mid': jd_mid_list, 'Filter': filter_list})
df = df.sort_values(by=['Object', 'JD_mid'])
# Construct new photrix names and add them to DataFrame:
serial_number = 1
for i in range(len(df)):
this_object = df['Object'].iloc[i]
this_filter = df['Filter'].iloc[i]
if i >= 1:
if this_object != df['Object'].iloc[i-1]:
serial_number = 1
else:
serial_number += 1
photrix_name = '-'.join([this_object, '{:04d}'.format(serial_number), this_filter]) + '.fts'
photrix_names.append(photrix_name)
df['PhotrixName'] = photrix_names
df.index = photrix_names
# Rename all the FITS files:
for old_name, new_name in zip(df['UrName'], df['PhotrixName']):
old_path = os.path.join(an_top_directory, an_rel_directory, 'Uncalibrated', old_name)
new_path = os.path.join(an_top_directory, an_rel_directory, 'Uncalibrated', new_name)
os.rename(old_path, new_path)
# Write renaming table to Photometry subdirectory as csv file:
renaming_fullpath = os.path.join(an_top_directory, an_rel_directory,
'Photometry', 'File-renaming.txt')
df.to_csv(renaming_fullpath, sep=';')
def _set_fits_extensions(an_top_directory, fits_subdir, fits_filenames=None):
from photrix.image import FITS_EXTENSIONS
target_ext = '.fts'
for filename in fits_filenames:
f, ext = os.path.splitext(filename)
if (ext.replace('.', '') in FITS_EXTENSIONS) and (ext != target_ext):
old_fullpath = os.path.join(an_top_directory, fits_subdir, filename)
new_fullpath = os.path.join(an_top_directory, fits_subdir, f + target_ext)
if not os.path.exists(new_fullpath):
os.rename(old_fullpath, new_fullpath)
def _archive_fov_files(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None, fov_names=None):
from_dir = FOV_DIRECTORY
to_dir = os.path.join(an_top_directory, an_rel_directory, 'FOV')
for fov_name in fov_names:
from_fullpath = os.path.join(from_dir, fov_name + '.txt')
to_fullpath = os.path.join(to_dir, fov_name + '.txt')
shutil.copy2(from_fullpath, to_fullpath)
def _apply_omit_txt(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
"""
Gets df_master and omit.txt and returns filtered DataFrame.
:param an_top_directory: [string]
:param an_rel_directory: [string]
:return: rows of df_master whose omission is NOT requested by user (in omit.txt) [DataFrame]
"""
if an_rel_directory is None:
return None
df_master = get_df_master(an_top_directory=an_top_directory, an_rel_directory=an_rel_directory)
if df_master is None:
print('>>>>> Could not get df_master.')
return None
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'omit.txt')
if not os.path.exists(fullpath):
_write_omit_txt_stub(an_top_directory, an_rel_directory)
return df_master.copy(), [] # no change or warnings, since omit.txt absent
with open(fullpath) as f:
lines = f.readlines()
lines = [line.split(";")[0] for line in lines] # remove all comments
lines = [line.strip() for line in lines] # remove lead/trail blanks
lines = [line for line in lines if line != ''] # remove empty lines
df_filtered = df_master.copy() # start with a copy, omit lines per user requests.
warning_lines = []
for line in lines:
warning_line = None
rows_to_omit = len(df_filtered) * [False] # default to be overwritten
if line.startswith('#OBS'):
parms, warning_line = _get_line_parms(line, "#OBS", False, 2, 2)
if parms is not None:
fits_file_name = parms[0] + '.fts'
star_id = parms[1]
rows_to_omit = (df_filtered['FITSfile'] == fits_file_name) & \
(df_filtered['StarID'] == star_id) # a pandas Series
else:
warning_lines.append(warning_line)
elif line.startswith('#STAR'):
parms, warning_line = _get_line_parms(line, "#STAR", False, 2, 3)
if parms is not None:
fov = parms[0]
star_id = parms[1]
rows_to_omit = (df_filtered['FOV'] == fov) & (df_filtered['StarID'] == star_id)
if len(parms) == 3:
# further restrict omission request
filter = parms[2]
rows_to_omit = rows_to_omit & (df_filtered['Filter'] == filter)
else:
warning_lines.append(warning_line)
elif line.startswith('#SERIAL'):
parms, warning_line = _get_line_parms(line, "#SERIAL", True, 1, None)
if parms is not None:
serials_to_omit = [int(p) for p in parms]
rows_to_omit = df_filtered['Serial'].isin(serials_to_omit)
else:
warning_lines.append(warning_line)
elif line.startswith('#IMAGE'):
parms, warning_line = _get_line_parms(line, "#IMAGE", False, 1, 1)
if parms is not None:
image_to_omit = parms[0] + '.fts'
rows_to_omit = df_filtered['FITSfile'] == image_to_omit
else:
warning_lines.append(warning_line)
elif line.startswith('#JD'):
parms, warning_line = _get_line_parms(line, "#JD", True, 2, 2)
if parms is not None:
jd_floor = floor(min(df_filtered['JD_mid']))
jd_start = float(parms[0]) + jd_floor
jd_end = float(parms[1]) + jd_floor
rows_to_omit = (df_filtered['JD_mid'] >= jd_start) & \
(df_filtered['JD_mid'] <= jd_end)
else:
warning_lines.append(warning_line)
else:
warning_line = 'Directive not understood: \'' + line + '\'.'
warning_lines.append(warning_line)
if sum(rows_to_omit) >= 1:
df_filtered = df_filtered[~ rows_to_omit] # remove rows as user requested.
else:
if warning_line is None:
warning_lines.append('No rows omitted: \'' + line + '\'.')
for warning_line in warning_lines:
print(warning_line)
print(str(len(df_master) - len(df_filtered)) + ' rows removed via omit.txt. ' +
str(len(df_filtered)) + ' rows remain.')
return df_filtered, warning_lines
def _write_omit_txt_stub(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
"""
Will NOT overwrite existing omit.txt.
"""
lines = [';----- This is omit.txt for AN directory_path ' + an_rel_directory,
';----- Use this file to omit observations from input to SkyModel (all filters).',
';----- Example directive lines:',
';',
';#OBS Obj-0000-V, 132 ; to omit star 132 from FITS image Obj-0000-V.fts',
';#STAR FOV, 132, V ; to omit star 132 from all FITS with FOV '
'and filter V',
';#STAR FOV, 132 ; to omit star 132 from all FITS with FOV '
'and ALL filters',
';#IMAGE Obj-0000-V ; to omit FITS image Obj-0000-V.fts specifically',
';#JD 0.72, 1 ; to omit fractional JD from 0.72 through 1',
';#SERIAL 123,77 54 6 ; to omit observations by Serial number (many per line OK)',
';',
';----- Add your directive lines:',
';']
lines = [line + '\n' for line in lines]
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'omit.txt')
if not os.path.exists(fullpath):
with open(fullpath, 'w') as f:
f.writelines(lines)
lines_written = len(lines)
else:
lines_written = 0
return lines_written
def _curate_stare_comps(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None, df_in=None):
"""
Using user's stare_comps.txt in this an_rel_directory,
remove unwanted (stare) comp observations from further use.
:return: data for all observations remaining eligible after this curation [DataFrame],
and warning messages [list of strings]
"""
if df_in is None:
return None, None
# Read & parse control file stare_comps.txt:
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'stare_comps.txt')
if not os.path.exists(fullpath):
_write_stare_comps_txt_stub(an_top_directory, an_rel_directory)
return None # no change or warnings as stare_comps.txt absent
with open(fullpath) as f:
lines = f.readlines()
lines = [line.split(";")[0] for line in lines] # remove all comments
lines = [line.strip() for line in lines] # remove lead/trail blanks
lines = [line for line in lines if line != ''] # remove empty lines
# Apply directive lines from stare_comps.txt:
df = df_in.copy() # starting point
warning_lines = [] # "
for line in lines:
warning_line = None
if line.startswith("#COMPS"):
# Do not split on spaces, because FOV name could contain spaces.
parms, warning_line = _get_line_parms(line, "#COMPS", False, 3, None)
if parms is not None:
fov_name = parms[0]
filter_name = parms[1]
comp_ids = ' '.join(parms[2:]).split() # parse whether comma- or space-separated
rows_to_remove = (df['StarType'] == 'Comp') & \
(df['FOV'] == fov_name) & \
(df['Filter'] == filter_name) & \
(~ df['StarID'].isin(comp_ids))
df = df[~ rows_to_remove]
else:
warning_lines.append(warning_line)
else:
warning_line = 'Directive not understood: \'' + line + '\'.'
warning_lines.append(warning_line)
return df, warning_lines
def _write_stare_comps_txt_stub(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
"""
Will NOT overwrite existing stare_comps.txt file.
"""
lines = [';----- This is stare_comps.txt for AN directory_path ' + an_rel_directory,
';----- Select comp stars (by FOV, filter, StarID) from input to '
'rerun of PredictionSet() ',
';----- Example directive line:',
';',
';#COMPS Obj, V, 132, 133 144 ; to KEEP from FOV \'Obj\': '
'comp stars \'132\' \'133\' and \'144\' in filter \'V\'',
';',
';----- Add your directive lines:',
';']
lines = [line + '\n' for line in lines]
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'stare_comps.txt')
if not os.path.exists(fullpath):
with open(fullpath, 'w') as f:
f.writelines(lines)
lines_written = len(lines)
else:
lines_written = 0
return lines_written
def _write_report_map_stub(an_top_directory=AN_TOP_DIRECTORY, an_rel_directory=None):
"""
Will NOT overwrite existing report.map file.
"""
lines = [';----- This is report_map.txt for AN directory_path ' + an_rel_directory,
';----- Use this file to omit and/or combine target observations, for AAVSO report',
';----- Example directive line:',
';',
';#TARGET GD Cyg ; to omit this target star altogether from AAVSO report',
';#JD 0.65 0.67 ; to omit this JD (fractional) range from AAVSO report',
';#SERIAL 34 44,129 32 1202 ; to omit these 5 Serial numbers from AAVSO report',
';#COMBINE 80 128 ; to combine (average) these 2 Serial numbers in AAVSO report',
';',
';----- Add your directive lines:',
';']
lines.extend(20 * ['#COMBINE '])
lines = [line + '\n' for line in lines]
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'report_map.txt')
if not os.path.exists(fullpath):
with open(fullpath, 'w') as f:
f.writelines(lines)
lines_written = len(lines)
else:
lines_written = 0
return lines_written
def _get_line_parms(line, directive, sep_by_spaces=False, nparms_min=None, nparms_max=None):
"""
Take directive line and return parms and possible warning text lines.
:param line: the line to parse [string]
:param directive: the directive [string, e.g., '#SERIAL']
:param sep_by_spaces: False if parms may include spaces [e.g., '#IMAGE' and '#TARGET',
which may include spaces, e.g., 'ST Tri';
True if spaces may separate parms [e.g., '#SERIAL' and '#COMBINE' directives] [bool]
:param nparms_min: min number of parms to accept, or None to ignore [int]
:param nparms_max: max number of parms to accept, or None to ignore [int]
:return: 2-tuple of parm list [list of strings] and warning lines [list of strings]
"""
directive_text = line.split(';')[0].strip() # remove any comment text
if directive_text.startswith(directive) is False:
return None, 'Line does not begin with correct directive: \'' + line + '\'.'
line_parms = [p.strip() for p in directive_text[(len(directive)):].split(',')]
if sep_by_spaces is True:
line_parms = ' '.join(line_parms).split()
valid_num_line_parms = True # until falsified
if nparms_min is not None: # ignore nparms_min if None.
valid_num_line_parms = valid_num_line_parms & (len(line_parms) >= nparms_min)
if nparms_max is not None: # ignore nparms_max if None.
valid_num_line_parms = valid_num_line_parms & (len(line_parms) <= nparms_max)
if valid_num_line_parms:
return line_parms, None
else:
return None, 'Line has wrong number of parameters: \'' + line + '\'.'
def _impute_target_ci(df_predictions_checks_targets, ci_filters, transforms):
"""
Impute Color Index value for each target and check star, by time-interpolation from known
(comp) Color Index values. This will REPLACE CI values for Target and Check stars
(which probably had been set to zero for targets but catalog CI for checks).
CALLS _extract_ci_points() to get list of ci_filter observations to interpolate.
:param df_predictions_checks_targets: [pandas DataFrame]
:param ci_filters: ['V', 'I'] for the time being (May 2017).
:param transforms: transforms for all skymodels [dict filter:transform(V-I)]
:return: updated_df_predictions_checks_targets: updated with CI color index for targets
"""
jd_floor = floor(min(df_predictions_checks_targets['JD_mid']))
df_predictions_checks_targets['JD_num'] = df_predictions_checks_targets['JD_mid'] - jd_floor
# Only target & check stars:
target_and_check_rows = df_predictions_checks_targets['StarType'].isin(['Target', 'Check'])
star_ids_targets_checks = \
((df_predictions_checks_targets.copy())[target_and_check_rows])['ModelStarID']
# Sorted for convenience in testing:
star_ids_targets_checks = star_ids_targets_checks.drop_duplicates().sort_values()
# Replace CI color index values for one target star at a time:
for this_star_id in star_ids_targets_checks:
rows_this_star_id = df_predictions_checks_targets['ModelStarID'] == this_star_id
df_star_id = (df_predictions_checks_targets[rows_this_star_id])\
[['Serial', 'ModelStarID', 'Filter', 'JD_num', 'CI', 'UntransformedMag']]\
.sort_values(by='JD_num')
df_ci_points = _extract_ci_points(df_star_id, ci_filters, transforms)
if len(df_ci_points) <= 0:
df_predictions_checks_targets.loc[rows_this_star_id, 'CI'] = None
print(">>>>> ModelStarID=", this_star_id,
" no CI points returned by input_target_ci()")
continue
df_ci_points = df_ci_points.sort_values(by='JD_num')
# Interpolate CI, put values into df_star_id:
# (Interpolation method depends on number of interpolation points avaialable.)
if len(df_ci_points) == 1: # normal case for LPVs
df_star_id['CI'] = df_ci_points.loc[0, 'CI'] # 1 point --> all CIs set to single value
elif len(df_ci_points) in [2, 3]: # 2 or 3 points --> linear fit of CI vs time
x = df_ci_points['JD_num']
y = df_ci_points['CI']
this_linear_fit = np.polyfit(x, y, 1) # (x,y,deg=1 thus linear)
this_fit_function = np.poly1d(this_linear_fit)
df_star_id['CI'] = this_fit_function(df_star_id['JD_num']) # does the linear interpol
# Enforce no extrapolation:
stars_before_jd_range = df_star_id['JD_num'] < min(x)
stars_after_jd_range = df_star_id['JD_num'] > max(x)
df_star_id.loc[stars_before_jd_range, 'CI'] = this_fit_function(min(x))
df_star_id.loc[stars_after_jd_range, 'CI'] = this_fit_function(max(x))
else: # here, 4 or more CI points to use in interpolation (prob a stare)
# TODO: should this be interpolation in V mag rather than (or as alt to) interp in time?
x = df_ci_points['JD_num']
y = df_ci_points['CI']
weights = len(x) * [1.0]
smoothness = len(x) * 0.03**2 # i.e, N * (sigma_CI)**2
# Construct spline; ext=3 -> no extrapolation, rather fixed at boundary values:
spline = UnivariateSpline(x=x, y=y, w=weights, s=smoothness, ext=3)
df_star_id['CI'] = spline(df_star_id['JD_num'])
# Insert CI values for this target star:
indices_to_update = [x
if x in df_predictions_checks_targets['Serial'] else None
for x in df_star_id['Serial']]
df_predictions_checks_targets.loc[indices_to_update, 'CI'] = df_star_id['CI']
return df_predictions_checks_targets
def _extract_ci_points(df_star_id, ci_filters, transforms):
"""
Derive credible raw Color Index points from continguous observations in the Color Index filters.
:param df_star_id: rows from df_predictions holding one ModelStarID [pandas DataFrame]
:param ci_filters: exactly two filters, in order, defining Color Index [e.g., ['V','I']]
:param transforms: transforms in the ci_filters base, from which the relevant transform is
extracted [dict, e.g., {'V': 0.025, 'I': -0.044} ]
:return: very small dataframe of JD and Color Index for this star.
"""
max_diff_jd = 60.0 / (24 * 60) # 60 minutes in days; max time between adjacent obs for color
rows_to_keep = df_star_id['Filter'].isin(ci_filters)
df = (df_star_id.copy())[rows_to_keep].sort_values(by='JD_num')
ci_point_list = []
if len(df) <= 1:
return pd.DataFrame() # can't extract realistic pairs if there's only one point
# Extract a color (one filter's magnitude minus the other's mag) whenever adjacent observations
# of the same ModelStarID are in different filters:
filter = list(df['Filter'])
jd = list(df['JD_num'])
u_mag = list(df['UntransformedMag'])
for ind in range(0, len(df) - 1):
if filter[ind+1] != filter[ind]: # if adj obs differ in filter...
if (jd[ind+1] - jd[ind]) < max_diff_jd: # ...and if not too different in time
new_point_jd = (jd[ind+1] + jd[ind]) / 2.0
new_point_ci = \
_solve_for_real_ci(
untransformed_mags={filter[ind]: u_mag[ind], filter[ind+1]: u_mag[ind+1]},
ci_filters=ci_filters,
transforms=transforms)
ci_point_list.append({'JD_num': new_point_jd, 'CI': new_point_ci}) # dict for 1 row
df_ci_point = pd.DataFrame(ci_point_list)
return df_ci_point
def _solve_for_real_ci(untransformed_mags, ci_filters, transforms):
"""
Solves for best estimate of real (transformed) Color Index.
Dicts are passed (unordered); this function safely deduces the sign of the Color Index.
:param untransformed_mags: exactly two raw magnitudes, by filter
[dict, e.g., {'V': 12.6, 'I': 8.7} ]
:param ci_filters: exactly two filters, in order, defining Color Index [e.g., ['V','I']]
:param transforms: transforms in the ci_filters base, from which the relevant transform is
extracted [dict, e.g., {'V': 0.025, 'I': -0.044} ]
:return: best estimate of real, transformed Color Index [float]
"""
mags_ordered = [untransformed_mags[f] for f in ci_filters]
transforms_ordered = [transforms[f] for f in ci_filters]
real_ci = (mags_ordered[0] - mags_ordered[1]) / \
(1.0 + transforms_ordered[0] - transforms_ordered[1])
return real_ci
def _write_aavso_report_map_stub(an_top_directory, an_rel_directory):
lines = [";----- This is report_map.txt for AN folder " + an_rel_directory,
";----- Use this file to omit and/or combine target observations from AAVSO report.",
";----- Example directive lines:",
";",
";#TARGET GD Cyg ; to omit this target star altogether from AAVSO report.",
";#JD 0.233 0.311 ; to omit this JD (fractional) range from AAVSO report.",
";#SERIAL 34 44,129 32 1202 ; to omit these 5 Serial numbers from AAVSO report.",
";#COMBINE 80,128 ; to combine (average) these 2 Serial numbers within AAVSO report.",
";----- Add your directive lines:",
";"]
lines.extend(24 * ['#COMBINE '])
lines = [line + '\n' for line in lines]
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry',
'report_map.txt')
if not os.path.exists(fullpath):
with open(fullpath, 'w') as f:
f.writelines(lines)
lines_written = len(lines)
else:
lines_written = 0
return lines_written
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,635
|
edose/photrix
|
refs/heads/master
|
/photrix/roof_nms_html.py
|
__author__ = "Eric Dose, Albuquerque"
""" This module:
"""
# Python core:
import os
# Author's packages:
from roof_nms import monitor_nms_status_via_html
THIS_PACKAGE_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
INI_DIRECTORY = os.path.join(THIS_PACKAGE_ROOT_DIRECTORY, 'ini')
if __name__ == '__main__':
# monitor_nms_status_via_e_mail()
monitor_nms_status_via_html()
# record_nms_status_image_name()
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,636
|
edose/photrix
|
refs/heads/master
|
/test/test_user.py
|
from datetime import datetime, timezone, timedelta
from pytest import approx
from photrix import user # call: user.fn() & user.Class()
from photrix.util import hex_degrees_as_degrees, ra_as_hours, dec_as_hex, RaDec
from photrix.fov import Fov
__author__ = "Eric Dose :: Bois d'Arc Observatory, Kansas"
def test_Site():
site_name = "Site_test1"
s = user.Site(site_name)
assert s.is_valid
assert s.name == site_name
assert s.filename == s.name + ".json"
assert s.description.startswith("Bois d'Arc Obs")
assert s.longitude == hex_degrees_as_degrees("-95:53:18")
assert s.latitude == hex_degrees_as_degrees("+38:55:29")
assert s.elevation == 350
assert s.min_altitude == 25
assert s.twilight_sun_alt == -9
assert s.extinction is not None
assert len(s.extinction) == 3
assert s.extinction['V'] == 0.167
assert s.extinction['R'] == 0.128
assert s.extinction['I'] == 0.103
def test_Instrument():
instrument_name = "Instrument_test1"
i = user.Instrument(instrument_name)
assert i.is_valid
assert i.name == instrument_name
assert i.filename == i.name + ".json"
assert i.min_distance_full_moon == 50
assert i.mount["model"].startswith("Paramount MX")
assert i.mount["slew_rate_ra"] == 4
assert i.mount["slew_rate_dec"] == 4
assert i.mount["sec_to_speed_ra"] == 1
assert i.mount["sec_to_speed_dec"] == 1
assert i.ota["model"].startswith("Celestron C14 ")
assert i.ota["focal_length_mm"] == 2710
assert i.camera["model"].startswith("SBIG STXL 6303")
assert i.camera["pixels_x"] == 3072
assert i.camera["pixels_y"] == 2048
assert i.camera["microns_per_pixel"] == 9
assert i.camera["shortest_exposure"] == 0.6
assert i.camera["saturation_adu"] == 54000
assert i.filter_data["V"]["reference_exposure_mag10"] == 22
assert i.filter_data["R"]["reference_exposure_mag10"] == 30
assert len(i.transforms('V')) == 1
assert len(i.transforms('R')) == 3
assert len(i.transforms('I')) == 2 # doesn't include null/None transforms.
assert len(i.transforms('XXX')) == 0 # filter absent from instrument file
assert set(i.filter_list) == {"V", "R", "I"} # set
assert i.transforms('V') == [('V-I', 0.02)]
assert i.transforms('R') == [('V-I', 0.03), ('B-V', -0.04), ('V-R', 0)]
assert i.transforms('I') == [('V-I', 0.025), ('V-R', 0)]
assert i.transform('R', 'B-V') == -0.04
assert i.transform('X', 'V-I') is None
assert i.transform('V', 'A-B') is None
instrument_name = "Instrument_test2"
i = user.Instrument(instrument_name)
assert i.is_valid
assert i.name == "XXX"
assert i.filename == instrument_name + ".json"
assert i.min_distance_full_moon == 60 # absent -> default
assert i.mount["model"] == ""
assert i.mount["slew_rate_ra"] == 7
assert i.mount["slew_rate_dec"] == 4
assert i.mount["sec_to_speed_ra"] == 1
assert i.mount["sec_to_speed_dec"] == 1
assert i.ota["model"] == ""
assert i.ota["focal_length_mm"] == 0
assert i.camera["model"] == ""
assert i.camera["pixels_x"] == 0
assert i.camera["pixels_y"] == 0
assert i.camera["microns_per_pixel"] == 0
assert i.camera["shortest_exposure"] == 0
assert i.camera["saturation_adu"] == 64000
assert i.filter_data["V"]["reference_exposure_mag10"] == 22
assert i.filter_list == ['V']
assert i.transforms('V') == [('V-I', 0.02)]
assert i.transform('V', 'V-I') == 0.02
def test_Astronight():
print()
# Test constructor, case = moon up at midnight. ----------------------------------------------
an_date_string = "20160910"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
assert an.an_date_string == an_date_string
assert an.site_name == site_string # the name
assert abs((an.ts_dark.start -
datetime(2016, 9, 11, 1, 33, 30, 944563, tzinfo=timezone.utc)).total_seconds()) < 1
assert abs((an.ts_dark.end -
datetime(2016, 9, 11, 11, 7, 11, 687656, tzinfo=timezone.utc)).total_seconds()) < 1
target_local_middark_utc = datetime(2016, 9, 11, 6, 20, 21, tzinfo=timezone.utc)
assert abs((an.local_middark_utc - target_local_middark_utc).total_seconds()) <= 1
assert an.local_middark_jd == approx(2457642.764131, 1/(24*3600)) # one sec tolerance
target_lst_seconds = 23*3600 + 19*60 + 37
an_lst_seconds = an.local_middark_lst * 240.0
assert target_lst_seconds == approx(an_lst_seconds, abs=1)
target_moon_ra = (15/3600) * (18*3600+35*60+7.2) # degrees
target_moon_dec = -(19+1/60+24/3600) # degrees
assert an.moon_radec.ra == approx(target_moon_ra, abs=1/3600) # tol = 1 arcsecond
assert an.moon_radec.dec == approx(target_moon_dec, abs=1/3600) # tol = 1 arcsecond
assert an.moon_phase == approx(0.6722, abs=0.005)
assert abs((an.ts_dark_no_moon.start -
datetime(2016, 9, 11, 6, 36, 39, 829350, tzinfo=timezone.utc)).total_seconds()) < 1
assert abs((an.ts_dark_no_moon.end -
datetime(2016, 9, 11, 11, 7, 11, 687656, tzinfo=timezone.utc)).total_seconds()) < 1
# Test constructor, case = full moon, mid-winter. --------------------------------------------
an_date_string = "20161213"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
assert an.an_date_string == an_date_string
assert an.site_name == site_string # the name
assert abs((an.ts_dark.start -
datetime(2016, 12, 14, 0, 1, 23, 877599,
tzinfo=timezone.utc)).total_seconds()) < 1
assert abs((an.ts_dark.end -
datetime(2016, 12, 14, 12, 35, 17, 958638,
tzinfo=timezone.utc)).total_seconds()) < 1
target_local_middark_utc = datetime(2016, 12, 14, 6, 18, 20, 877599, tzinfo=timezone.utc)
assert abs((an.local_middark_utc - target_local_middark_utc).total_seconds()) <= 1
assert an.local_middark_jd == approx(2457736.762742, 1/(24*3600)) # one sec tolerance
target_lst_seconds = 5*3600 + 28*60 + 13
an_lst_seconds = an.local_middark_lst * 240.0
assert target_lst_seconds == approx(an_lst_seconds, abs=1)
target_moon_ra = (15/3600) * (5*3600+44*60+50) # degrees
target_moon_dec = +(18+18/60+39/3600) # degrees
assert an.moon_radec.ra == approx(target_moon_ra, abs=1/3600) # tol = 1 arcsecond
assert an.moon_radec.dec == approx(target_moon_dec, abs=1/3600) # tol = 1 arcsecond
assert an.moon_phase == approx(0.9973, abs=0.005)
assert an.ts_dark_no_moon.seconds == 0
# Test constructor, case = full moon, mid-summer ---------------------------------------------
an_date_string = "20160619"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
assert an.an_date_string == an_date_string
assert an.site_name == site_string # the name
assert abs((an.ts_dark.start -
datetime(2016, 6, 20, 2, 59, 26, 172446,
tzinfo=timezone.utc)).total_seconds()) < 1
assert abs((an.ts_dark.end -
datetime(2016, 6, 20, 9, 50, 52, 262724,
tzinfo=timezone.utc)).total_seconds()) < 1
target_local_middark_utc = datetime(2016, 6, 20, 6, 25, 9, 172446, tzinfo=timezone.utc)
assert abs((an.local_middark_utc - target_local_middark_utc).total_seconds()) <= 1
assert an.local_middark_jd == approx(2457559.767467, 1/(24*3600)) # one sec tolerance
target_lst_seconds = 17*3600 + 57*60 + 12
an_lst_seconds = an.local_middark_lst * 240.0
assert target_lst_seconds == approx(an_lst_seconds, abs=1)
target_moon_ra = (15/3600) * (17*3600+47*60+48) # degrees
target_moon_dec = -(19+16/60+5.50/3600) # degrees
assert an.moon_radec.ra == approx(target_moon_ra, abs=1/3600) # tol = 1 arcsecond
assert an.moon_radec.dec == approx(target_moon_dec, abs=1/3600) # tol = 1 arcsecond
assert an.moon_phase == approx(0.9978, abs=0.005)
assert an.ts_dark_no_moon.seconds == 0
# Test constructor, case = new moon. ---------------------------------------------------------
an_date_string = "20160930"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
assert an.an_date_string == an_date_string
assert an.site_name == site_string # the name
assert abs((an.ts_dark.start -
datetime(2016, 10, 1, 1, 0, 32, 860858,
tzinfo=timezone.utc)).total_seconds()) < 1
assert abs((an.ts_dark.end -
datetime(2016, 10, 1, 11, 26, 15, 525660,
tzinfo=timezone.utc)).total_seconds()) < 1
target_local_middark_utc = datetime(2016, 10, 1, 6, 13, 23, 860858, tzinfo=timezone.utc)
assert abs((an.local_middark_utc - target_local_middark_utc).total_seconds()) <= 1
assert an.local_middark_jd == approx(2457662.759304, 1/(24*3600)) # one sec tolerance
target_lst_seconds = 0*3600 + 31*60 + 30
an_lst_seconds = an.local_middark_lst * 240.0
assert target_lst_seconds == approx(an_lst_seconds, abs=1)
target_moon_ra = (15/3600) * (12*3600+45*60+16.08) # degrees
target_moon_dec = -(2+41/60+18.0/3600) # degrees
assert an.moon_radec.ra == approx(target_moon_ra, abs=1/3600) # tol = 1 arcsecond
assert an.moon_radec.dec == approx(target_moon_dec, abs=1/3600) # tol = 1 arcsecond
assert an.moon_phase == approx(0.0011, abs=0.005)
assert an.ts_dark_no_moon == an.ts_dark
# Test ts_observable(), set up. =============================================================
an_date_string = "20160919"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
hip_116928 = RaDec('23:42:02.662', '+01:46:45.557')
# Test ts_observable(), case = object farther than min dist from moon (common case). --------
ts_obs = an.ts_observable(hip_116928, min_moon_dist=45)
assert abs((ts_obs.start - datetime(2016, 9, 20, 2, 13, 12, 660671,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 9, 20, 10, 3, 0, 540779,
tzinfo=timezone.utc)).total_seconds()) <= 60
# Test ts_observable(), case = object closer than min dist from moon. ------------------------
ts_obs = an.ts_observable(hip_116928, min_moon_dist=90)
# print("b", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 2, 13, 12, 660671,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 9, 20, 2, 38, 2, 264284,
tzinfo=timezone.utc)).total_seconds()) <= 60
# Test ts_observable(), case = ignore moon altogether (set min_moon_dist to 0). --------------
ts_obs = an.ts_observable(hip_116928, min_moon_dist=0)
# print("c", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 2, 13, 12, 660671,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 9, 20, 10, 3, 11, 540779,
tzinfo=timezone.utc)).total_seconds()) <= 60
# Test ts_observable(), case = disable observing any time moon is up at all. -----------------
ts_obs = an.ts_observable(hip_116928, min_moon_dist=200)
# print("d", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 2, 13, 12, 660671,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 9, 20, 2, 38, 2, 264284,
tzinfo=timezone.utc)).total_seconds()) <= 60
# Test ts_observable(), case = object farther than min dist from moon, higher min_alt. ------
ts_obs = an.ts_observable(hip_116928, min_moon_dist=45, min_alt=35)
# print("e", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 3, 9, 53, 907299,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 9, 20, 9, 6, 30, 294148,
tzinfo=timezone.utc)).total_seconds()) <= 60
# Test ts_observable(), case = object closer than min dist from moon, higher min_alt. --------
ts_obs = an.ts_observable(hip_116928, min_moon_dist=90, min_alt=35)
# print("f", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0 # start and end times are unimportant (indeed they are undefined).
# For remaining tests, assume Astronight object's returned values are ok (as were tested above).
# Wide range of sky positions, to exercise all functions and exception handling.
altais = RaDec('19:12:33.405', '+67:39:43.092') # in NW sky (from North America)
hip_22783 = RaDec('04:54:03.012', '+66:20:33.763') # in NE sky
mira = RaDec('02:19:20.804', '-02:58:43.518') # in SE sky
algedi = RaDec('20:18:03.324', '-12:32:41.419') # in SW sky
ankaa = RaDec('00:26:17.310', '-42:18:27.446') # too far south to observe
polaris = RaDec('02:31:49.133', '+89:15:50.598') # circumpolar north
# All targets, allow any moon.
ts_obs = an.ts_observable(altais, min_moon_dist=0, min_alt=25)
# print("altais / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - an.ts_dark.start).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 9, 20, 8, 38, 36, 48834,
tzinfo=timezone.utc)).total_seconds()) <= 60 # @ HA=7
ts_obs = an.ts_observable(hip_22783, min_moon_dist=0, min_alt=25)
# print("hip_22783 / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 4, 20, 9, 834898,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
ts_obs = an.ts_observable(mira, min_moon_dist=0, min_alt=25)
# print("mira / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 5, 8, 37, 879958,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
ts_obs = an.ts_observable(algedi, min_moon_dist=0, min_alt=25)
# print("algedi / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - an.ts_dark.start).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 9, 20, 5, 35, 16, 643651,
tzinfo=timezone.utc)).total_seconds()) <= 60
ts_obs = an.ts_observable(ankaa, min_moon_dist=0, min_alt=25)
# print("ankaa / any moon >> ", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(polaris, min_moon_dist=0, min_alt=25)
# print("polaris / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 2, 18, 56, 904009,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
# assert ts_obs == an.ts_dark
# All targets, allow NO moon.
ts_obs = an.ts_observable(altais, min_moon_dist=220, min_alt=25)
# print("altais / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs == an.ts_dark_no_moon
ts_obs = an.ts_observable(hip_22783, min_moon_dist=220, min_alt=25)
# print("hip_22783 / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(mira, min_moon_dist=220, min_alt=25)
# print("mira / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(algedi, min_moon_dist=220, min_alt=25)
# print("algedi / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs == an.ts_dark_no_moon
ts_obs = an.ts_observable(ankaa, min_moon_dist=220, min_alt=25)
# print("ankaa / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(polaris, min_moon_dist=220, min_alt=25)
# print("polaris / NO moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 9, 20, 2, 18, 56, 904009,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark_no_moon.end).total_seconds()) <= 60
# assert ts_obs == an.ts_dark_no_moon
# Continue testing .ts_observable(): --------------------------------------------------------
# Same targets, new astronight with ~ opposite moon phase to previous astronight.
an_date_string = "20161008"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
# Case: allow any moon.
ts_obs = an.ts_observable(altais, min_moon_dist=0, min_alt=25)
# print("altais / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - an.ts_dark.start).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 10, 9, 7, 23, 52, 684662,
tzinfo=timezone.utc)).total_seconds()) <= 60
ts_obs = an.ts_observable(hip_22783, min_moon_dist=0, min_alt=25)
# print("hip_22783 / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 10, 9, 3, 5, 28, 819820,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
ts_obs = an.ts_observable(mira, min_moon_dist=0, min_alt=25)
# print("mira / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 10, 9, 3, 53, 55, 971837,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 10, 9, 11, 6, 47, 395358,
tzinfo=timezone.utc)).total_seconds()) <= 60
ts_obs = an.ts_observable(algedi, min_moon_dist=0, min_alt=25)
# print("algedi / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - an.ts_dark.start).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 10, 9, 4, 20, 34, 71309,
tzinfo=timezone.utc)).total_seconds()) <= 60
ts_obs = an.ts_observable(ankaa, min_moon_dist=0, min_alt=25)
# print("ankaa / any moon >> ", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(polaris, min_moon_dist=0, min_alt=25)
# print("polaris / any moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - datetime(2016, 10, 9, 1, 4, 39, 692713,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
# assert ts_obs == an.ts_dark
# Case: allow NO moon:
ts_obs = an.ts_observable(altais, min_moon_dist=220, min_alt=25)
# print("altais / NO moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - an.ts_dark_no_moon.start).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 10, 9, 7, 23, 52, 684662,
tzinfo=timezone.utc)).total_seconds()) <= 60
ts_obs = an.ts_observable(hip_22783, min_moon_dist=220, min_alt=25)
# print("hip_22783 / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs == an.ts_dark_no_moon
ts_obs = an.ts_observable(mira, min_moon_dist=220, min_alt=25)
# print("mira / NO moon >> ", ts_obs, "\n\n\n")
assert abs((ts_obs.start - an.ts_dark_no_moon.start).total_seconds()) <= 60
assert abs((ts_obs.end - datetime(2016, 10, 9, 11, 6, 47, 395358,
tzinfo=timezone.utc)).total_seconds()) <= 60
ts_obs = an.ts_observable(algedi, min_moon_dist=220, min_alt=25)
# print("algedi / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(ankaa, min_moon_dist=220, min_alt=25)
# print("ankaa / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(polaris, min_moon_dist=220, min_alt=25)
# print("polaris / NO moon >> ", ts_obs, "\n\n\n")
assert ts_obs == an.ts_dark_no_moon
# Case: object near sun (wholly unobservable) and during new moon (moon matters little):
an_date_string = "20160930"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
porrima = RaDec('12:41:38.954', '-01:26:56.733')
ts_obs = an.ts_observable(porrima, min_moon_dist=0, min_alt=25)
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(porrima, min_moon_dist=220, min_alt=25)
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(porrima, min_moon_dist=0, min_alt=2)
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(porrima, min_moon_dist=220, min_alt=2)
assert ts_obs.seconds == 0
# Case: object circumpolar and during new moon:
polaris = RaDec('02:31:49.133', '+89:15:50.598') # circumpolar north
ts_obs = an.ts_observable(polaris, min_moon_dist=0, min_alt=25)
assert abs((ts_obs.start - datetime(2016, 10, 1, 1, 35, 56, 993007,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
ts_obs = an.ts_observable(polaris, min_moon_dist=220, min_alt=25)
assert abs((ts_obs.start - datetime(2016, 10, 1, 1, 35, 56, 993007,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
ts_obs = an.ts_observable(polaris, min_moon_dist=0, min_alt=2)
assert abs((ts_obs.start - datetime(2016, 10, 1, 1, 35, 56, 993007,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
ts_obs = an.ts_observable(polaris, min_moon_dist=220, min_alt=2)
assert abs((ts_obs.start - datetime(2016, 10, 1, 1, 35, 56, 993007,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((ts_obs.end - an.ts_dark.end).total_seconds()) <= 60
ts_obs = an.ts_observable(polaris, min_moon_dist=0, min_alt=60) # it's never this high.
assert ts_obs.seconds == 0
ts_obs = an.ts_observable(polaris, min_moon_dist=220, min_alt=60)
assert ts_obs.seconds == 0
# Test ts_fov_observable (based on ts_observable() having been tested above):
an_date_string = "20160930"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
fov = Fov('ST Tri')
ts_from_radec = an.ts_observable(RaDec(fov.ra, fov.dec), min_moon_dist=50, min_alt=40)
ts_from_fov = an.ts_fov_observable(fov, min_moon_dist=50, min_alt=40)
assert ts_from_fov == ts_from_radec
# Test .datetime_utc_from_hhmm():
an_date_string = "20160930"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
assert datetime_utc_from_hhmm_OK('2323', an)
assert datetime_utc_from_hhmm_OK('2020', an)
assert datetime_utc_from_hhmm_OK('2000', an)
assert datetime_utc_from_hhmm_OK('0000', an)
assert datetime_utc_from_hhmm_OK('0600', an)
assert datetime_utc_from_hhmm_OK('0900', an)
an_date_string = "20170101"
site_string = "BDO_Kansas"
an = user.Astronight(an_date_string, site_string)
assert datetime_utc_from_hhmm_OK('2323', an)
assert datetime_utc_from_hhmm_OK('2020', an)
assert datetime_utc_from_hhmm_OK('2000', an)
assert datetime_utc_from_hhmm_OK('0000', an)
assert datetime_utc_from_hhmm_OK('0600', an)
assert datetime_utc_from_hhmm_OK('0900', an)
# Test transit():
site_string = "DSW"
an1 = user.Astronight("20170101", site_string)
an2 = user.Astronight("20170601", site_string)
porrima = RaDec('12:41:38.954', '-01:26:56.733')
assert abs((an1.transit(porrima) - datetime(2017, 1, 2, 12, 55, 43, 509000,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((an2.transit(porrima) - datetime(2017, 6, 2, 3, 2, 3, 209000,
tzinfo=timezone.utc)).total_seconds()) <= 60
st_tri = RaDec('2:41:33', '35:43:31')
assert abs((an1.transit(st_tri) - datetime(2017, 1, 2, 2, 57, 27, 210000,
tzinfo=timezone.utc)).total_seconds()) <= 60
assert abs((an2.transit(st_tri) - datetime(2017, 6, 2, 16, 59, 48, 210000,
tzinfo=timezone.utc)).total_seconds()) <= 60
# ----- HELPER FUNCTIONS. --------------------------------------------------------------------
def datetime_utc_from_hhmm_OK(hhmm_string, an):
dt = an.datetime_utc_from_hhmm(hhmm_string)
hh = int(hhmm_string[0:2])
mm = int(hhmm_string[2:4])
days_diff = abs(dt-an.local_middark_utc).total_seconds() / (24*3600)
return dt.hour == hh and dt.minute == mm and days_diff <= 0.5
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,637
|
edose/photrix
|
refs/heads/master
|
/photrix/planning.py
|
"""photrix.planning.py"""
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
# Python system imports:
import os
import os.path
from collections import OrderedDict
from datetime import datetime, timezone, timedelta
from math import floor, sqrt, ceil, cos, sin, pi
from random import seed, uniform, shuffle
# External library imports:
import ephem
import numpy as np
import pandas as pd
from astropy.coordinates import SkyCoord, EarthLocation, AltAz, Angle
from astropy.stats import circmean
from astropy import units as u
from astropy.time import Time
# Internal (photrix) imports:
from .fov import make_fov_dict, FovError, Fov
from .user import Astronight, Instrument, MOON_PHASE_NO_FACTOR
from .util import RaDec, datetime_utc_from_jd, hhmm_from_datetime_utc, \
ra_as_hours, dec_as_hex, az_alt_at_datetime_utc, \
degrees_as_hex, jd_from_datetime_utc, Timespan, event_utcs_in_timespan
from .web import get_aavso_webobs_raw_table
DEGREES_PER_RADIAN = 180.0 / pi
# USAGE: *******************************************************************
# pl.make_an_roster('20170525', 'c:/Astro/ACP/AN20170525', user_update_tolerance_days=0.1,
# exp_time_factor=0.75)
# pl.make_an_plan('c:/Astro/ACP/AN20170525/planning.xlsx', exp_time_factor=0.75)
# ROSTER Target Statement types:
# AZ Her ; standard FOV target
# STARE 6 ST Tri ; standard stare FOV target (6 reps)
# BURN AA Aur 11:00:00 +34:00:00 ; Burn target (240 sec in V and I)
# IMAGE target_name V=12 B=12.5(2) 12:00:00 +23:34:45 ; arbitrary image, exp time from magnitude
# IMAGE target_name Clear=240sec(5) 12:00:00 +23:34:45 ; arbitrary image, exp time requested directly
FOV_DIRECTORY = "C:/Dev/Photometry/FOV/"
STARE_EVENT_TYPES = {"eclipser": "minima", "exoplanet": "minima",
"delta scuti": "maxima", 'rr lyrae': 'maxima'}
MIN_AVAILABLE_SECONDS_DEFAULT = 900
MIN_AVAILABLE_SECONDS_STARE = 5400
MIN_MOON_DEGREES_DEFAULT = 45
MIN_MOON_DEGREES_STARE = 60
STARE_AN_PRIORITY_DIVIDER = 7.5 # >= this goes into the normal Roster list; < goes to low-pri list.
FITS_DIRECTORY = "C:/Astro/Images"
# DEFAULT_PLAN_DIRECTORY = 'C:/Astro/Plans'
DT_FMT = '%Y-%m-%d %H:%M:%S.%f%z' # kludge around py inconsistency in python's datetime formats
PHOTRIX_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOCAL_OBS_CACHE_FULLPATH = os.path.join(PHOTRIX_ROOT_DIRECTORY, "local_obs_cache.csv")
EARLIEST_AN_DATE = '20170101'
LATEST_AN_DATE = '20241231' # Update this later, I suppose.
AN_START_REL_UTC_0000 = 19 # timedelta(UTC of actual AN start - nominal AN @ 0000 hours UTC)
# (19 is good for North America)
NO_COMMENT_STRING = '[no plan comment given]'
# ********** Roster & cache parameters:
AAVSO_WEBOBS_ROWS_TO_GET = 100
MIN_ROWS_ONE_STARE = 10
MAX_DAYS_ONE_STARE = 0.5
DEFAULT_UPDATE_TOLERANCE_DAYS = 0.0416667 # 1 hour
FORCE_AUTOGUIDE_TOKEN = 'AG+'
# ********** ACP Timing:
CHILL_DURATION = 240 # seconds.
PLAN_START_DURATION = 30 # seconds
AUTOFOCUS_DURATION = 260 # seconds, includes slew & filter wheel changes
CHAIN_DURATION = 3 # seconds; a guess
QUITAT_DURATION = 3 # seconds
SHUTDOWN_DURATION = 480 # seconds; a guess
# ********** Mount (L-500) Timing:
NEW_TARGET_DURATION = 34.3 # seconds; slew + settle + ACP processing (no guider start etc)
# ********** Dome (generic azimuth-tracking) Timing:
DOME_IS_ACTIVE = True
DOME_AZ_SLEW_SPEED = 3 # degrees/second.
DOME_OPEN_TIME = 270 # seconds.
DOME_CLOSE_TIME = 270 # seconds.
# ********** Camera & filter wheel (STXL-6303E) Timing:
MAX_AGGREGATE_EXPOSURE_NO_GUIDING = 119 # seconds;
GUIDE_STAR_ACQUISITION = 17.2 # seconds (if needed) (was 14.2)
GUIDER_CHECK_DURATION = 7 # seconds (if needed) (was 4)
NEW_FILTER_DURATION = 5 # seconds; filter change and focuser change
NEW_EXPOSURE_DURATION_EX_GUIDER_CHECK = 19.3 # seconds; image download, plate solving (excl exposure)
# ********** EVD Preferences:
BURN_EXPOSURE = 240 # seconds per exposure
V_MAG_WARNING = 16.5 # a predicted V magnitude > this will trigger a warning line in Summary file.
ABSOLUTE_MAX_EXPOSURE_TIME = 900 # seconds
ABSOLUTE_MIN_EXPOSURE_TIME = 2.5 # seconds [20190318, was 3 seconds]
MIN_TOTAL_EXP_TIME_PER_FILTER = 9 # seconds, thus 4 [was 3] exposures max per filter for LPVs
# ********** MP parameters:
# defining MP color sequence as tuple of tuples: (filter, seconds exposure, repeats).
# Sloan-filter exposures at V mag = 14, intended to give S/N ~ 200.
# Estimated from first Sloan images taken AN20220408, NMS.
COLOR_SEQUENCE_AT_V14 = (('SR', 90, 1), ('SG', 200, 1), ('SI', 180, 1),
('SR', 90, 1), ('SI', 180, 1), ('SG', 200, 1), ('SR', 90, 1))
COLOR_FORCE_AUTOGUIDE = True # rather than cluttering Excel file with 'AG+'.
def make_df_fov(fov_directory=FOV_DIRECTORY, fov_names_selected=None):
"""
Returns new, basic fov data frame, by reading FOV files (all or selected) in given directory_path.
:param fov_directory: the directory_path from which to read FOV files.
:param fov_names_selected: default = all FOV files within given directory_path.
:return: basic data frame with columns: fov_name, main_target, fov_priority, obs_style,
ra, dec. Index == fov_name.
"""
fov_dict = make_fov_dict(fov_directory, fov_names_selected)
fov_names = list(fov_dict.keys())
df_fov = pd.DataFrame({'fov_name': fov_names}) # 1 column ('fov_name') only.
# Add column of Fov objects, matched to column fov_names:
df_fov['fov'] = [fov_dict[name] for name in fov_names]
# Add other columns (directly from fov) for later convenience:
df_fov['main_target'] = [fov_dict[name].main_target for name in fov_names]
df_fov['fov_priority'] = [fov_dict[name].priority for name in fov_names]
df_fov['obs_style'] = [fov_dict[name].observing_style for name in fov_names]
df_fov['ra'] = [fov_dict[name].ra for name in fov_names]
df_fov['dec'] = [fov_dict[name].dec for name in fov_names]
df_fov['period'] = [fov_dict[name].period for name in fov_names]
df_fov['target_type'] = [fov_dict[name].target_type for name in fov_names]
df_fov['max_exposure'] = [fov_dict[name].max_exposure for name in fov_names]
# Construct column 'radec' from 'ra' and 'dec':
df_fov['radec'] = RaDec(0, 0) # dummy value to be replaced (needed to set column object type).
for ind in df_fov.index:
ra = df_fov.loc[ind, 'ra']
dec = df_fov.loc[ind, 'dec']
df_fov.loc[ind, 'radec'] = RaDec(ra, dec)
# Sort by fov name, set index to fov name.
df_fov.sort_values(by='fov_name', inplace=True)
df_fov.index = df_fov['fov_name']
return df_fov
def filter_df_fov_by_obs_styles(df_fov, obs_style_list=None):
"""
Returns df_fov filtered to contain only specified observing styles.
:param df_fov: input fov dataframe.
:param obs_style_list: list of observing styles to include (or string for one style).
None simply returns the input df_fov.
:return: filtered df_fov.
"""
if obs_style_list is None:
return df_fov
if isinstance(obs_style_list, str):
obs_style_list = [obs_style_list]
if len(obs_style_list) <= 0:
return df_fov
obs_style_list_lower = [style.lower() for style in obs_style_list]
return df_fov[[style.lower() in obs_style_list_lower for style in df_fov.obs_style]]
def filter_df_fov_by_fov_priority(df_fov, min_fov_priority=None, include_std_fovs=True):
"""
Returns df_fov filtered to contain only fovs with specified minimum fov_priority.
:param df_fov: input fov dataframe.
:param min_fov_priority: min fov priority to permit. None simply returns the input df_fov.
:param include_std_fovs: True to include standard FOVs (even though they have no fov_priority).
:return: filtered df_fov.
Optionally includes all standard FOVs (default=include standard fovs).
"""
if min_fov_priority is None:
return df_fov
fov_priority_ok = df_fov["fov_priority"] >= min_fov_priority
if include_std_fovs:
is_standard_fov = df_fov["obs_style"].str.lower() == "standard"
return df_fov[fov_priority_ok | is_standard_fov]
else:
return df_fov[fov_priority_ok]
def complete_df_fov_an(df_fov, user_update_tolerance_days=DEFAULT_UPDATE_TOLERANCE_DAYS,
an_string=None, site_name="NMS_Dome",
min_available_seconds=MIN_AVAILABLE_SECONDS_DEFAULT,
min_moon_degrees=MIN_MOON_DEGREES_DEFAULT,
remove_zero_an_priority=True,
remove_unobservables=True):
if an_string is None or site_name == "":
return df_fov
an = Astronight(an_string, site_name)
# Construct columns (specific to night and site) for available obs time, this astronight.
df_fov = df_fov.assign(moon_deg=0.0) \
.assign(start=an.local_middark_utc) \
.assign(end=an.local_middark_utc) \
.assign(mid=an.local_middark_utc) \
.assign(seconds=0.0) \
.assign(available=' - '.join(2 * [4 * ' '])) \
.assign(an_priority=0.0) \
.assign(an_priority_bars='') # all dummy values to be overwritten later.
# Fill in most columns.
for ind in df_fov.index:
ts_obs = an.ts_observable(df_fov.loc[ind, 'radec'], min_alt=an.site.min_altitude,
min_moon_dist=min_moon_degrees)
df_fov.loc[ind, 'moon_deg'] = df_fov.loc[ind, 'radec'].degrees_from(an.moon_radec)
df_fov.loc[ind, 'start'] = ts_obs.start
df_fov.loc[ind, 'end'] = ts_obs.end
df_fov.loc[ind, 'mid'] = ts_obs.midpoint
df_fov.loc[ind, 'seconds'] = ts_obs.seconds
if ts_obs.seconds > 0:
df_fov.loc[ind, 'available'] = ' - '.join([hhmm_from_datetime_utc(ts_obs.start),
hhmm_from_datetime_utc(ts_obs.end)])
# Remove targets that can't be observed this astronight, *before* getting data from AAVSO:
if remove_unobservables:
enough_dark_time = df_fov['seconds'] >= min_available_seconds
moon_dist_ok = df_fov['moon_deg'] >= min_moon_degrees
is_observable = enough_dark_time & moon_dist_ok
# print('Querying AAVSO for', str(sum(is_observable)), 'of', str(len(df_fov)), 'targets.')
df_fov = df_fov[is_observable]
# Update observations cache from AAVSO:
loc = LocalObsCache()
loc.update_fov_entries(df_fov, user_update_tolerance_days=user_update_tolerance_days)
# Compute each target's priority for this astronight:
for ind in df_fov.index:
this_fov = df_fov.loc[ind, 'fov']
df_fov.loc[ind, 'an_priority'] = loc.calc_an_priority(this_fov, an,
user_update_tolerance_days)
max_bars = 16
int_an_priority = int(round(df_fov.loc[ind, 'an_priority']))
df_fov.loc[ind, 'an_priority_bars'] = \
(8 * '.' + (max_bars - 8) * '#')[0: min(max_bars, int_an_priority)].ljust(max_bars)
if remove_zero_an_priority:
df_fov = df_fov[df_fov['an_priority'] > 0.0]
return df_fov.sort_values(by=['mid', 'an_priority'], ascending=[True, False])
class LocalObsCache:
"""
Holds a cache dataframe of most recent relevant observations for ~all FOVs.
Can hold only one dataframe row per fov (however many filters constitute a previous obs).
Will query AAVSO webobs site to refresh a database row if fov's main target looks too old.
Cache dataframe columns are:
fov_name [string]
main_target [string]
obs_style [string]
cache_datetime: datetime this row was updated [datetime.datetime UTC]
obs_datetime: datetime of most recent known observation [datetime.datetime UTC]
obs_mag: magnitude of most recent observation [float]
obs_mag_filter: filter in which obs_mag was measured [string]
Typical usage: pl.make_an_plan('c:/Astro/ACP/AN20170525/planning.xlsx', exp_time_factor=0.75)
"""
def __init__(self):
# Read in local cache if it exists.
if os.path.isfile(LOCAL_OBS_CACHE_FULLPATH):
self.df_cache = self._read_cache_from_csv()
need_to_create_empty_cache = self.df_cache is None
else:
need_to_create_empty_cache = True
if need_to_create_empty_cache:
# Create *empty* dataframe with dtypes (incl. utc datetimes), write to cache file:
self.df_cache = pd.DataFrame.from_dict(OrderedDict([
('fov_name', ['dummy']),
('main_target', ['dummy']),
('obs_style', ['dummy']),
('cache_datetime', [datetime.now(timezone.utc)]),
('obs_datetime', [datetime.now(timezone.utc)]),
('obs_mag', [0.0]),
('obs_mag_filter', ['dummy'])]))[:0]
self.df_cache.index.name = 'row_index'
csv_fullpath = self._write_cache_to_csv() # empty cache to csv
print('LocalObsCache: wrote new, empty cache file to ' + csv_fullpath)
print('LocalObsCache opened; ' + str(len(self.df_cache)) + ' fovs.')
def update_fov_entries(self, df_fov,
user_update_tolerance_days=DEFAULT_UPDATE_TOLERANCE_DAYS,
max_fovs_since_write=6):
"""
For each fov available this night (in df_fov_list), update the cache.
:param df_fov: df_fov (typically of fovs available this night) [pandas DataFrame].
:param user_update_tolerance_days: pass-through parm [float].
:param max_fovs_since_write: controls frequence of writes to cache.
:return: number of fovs updated (fn effect is to update this class's cache dataframe.
"""
fovs_since_write = 0
for fov in df_fov['fov']:
need_to_write_csv = (fovs_since_write >= max_fovs_since_write - 1)
self.update_one_fov_entry(fov, user_update_tolerance_days, write_csv=need_to_write_csv)
if need_to_write_csv:
fovs_since_write = 0
else:
fovs_since_write += 1
self._write_cache_to_csv() # ensure cache written at end.
def update_one_fov_entry(self, fov, user_update_tolerance_days=DEFAULT_UPDATE_TOLERANCE_DAYS,
write_csv=False):
"""
This class's engine. Updates cache's entry for one fov, if entry is too aged.
:param fov: fov to update in cache now [Fov object]
:param user_update_tolerance_days: pass-through parm [float]
:param write_csv:
:return: if cache was updated, datetime (UTC) of new obs; else None.
"""
# TODO: If query to AAVSO yields no latest obs, put some placeholder with cache_dt at least.
if fov is None:
raise FovError
main_target = fov.main_target
# self._curate_df_cache(fov_name, main_target)
# Determine whether update is needed, return if not.
cache_row_pre_exists = fov.fov_name.lower() in list(self.df_cache['fov_name'].str.lower())
if cache_row_pre_exists:
now = datetime.now(timezone.utc)
current_cache_datetime = self.df_cache.loc[fov.fov_name, 'cache_datetime']
update_age = (now - current_cache_datetime).total_seconds() / (24 * 3600)
if user_update_tolerance_days is None:
update_tolerance_days = DEFAULT_UPDATE_TOLERANCE_DAYS
else:
update_tolerance_days = user_update_tolerance_days
entry_fresh_enough = update_age <= update_tolerance_days
if entry_fresh_enough:
return self.df_cache.loc[fov.fov_name, 'obs_datetime'] # skip updating
# Update fov's cache entry, from AAVSO webobs.
obs_style = fov.observing_style
obs_style_lower = obs_style.lower()
target_type_lower = fov.target_type.lower()
if target_type_lower == 'standard':
return None
if obs_style_lower == 'stare':
num_obs = 200
else:
num_obs = 100
print('AAVSO webobs query ' + fov.target_type +
' \'' + main_target + '\'...', end='', flush=True)
recent_observations = AavsoWebobs(star_id=main_target, num_obs=num_obs) # from AAVSO
print('ok.', end='', flush=True)
latest_obs_df = None # default if no matches.
if (obs_style_lower, target_type_lower) == ('lpv', 'mira'):
latest_obs_df = self._latest_single_obs(fov, obs_style, recent_observations,
allow_filters=['V'])
elif (obs_style_lower, target_type_lower) == ('lpv', 'lpv'):
latest_obs_df = self._latest_single_obs(fov, obs_style, recent_observations,
allow_filters=['V', 'R'])
elif obs_style_lower == 'monitor' and target_type_lower != 'astrometric':
latest_obs_df = self._latest_single_obs(fov, obs_style, recent_observations,
allow_filters=['V', 'R'])
elif obs_style_lower == 'stare':
latest_obs_df = self._latest_stare_obs(fov, recent_observations,
allow_filters=['V', 'R'])
else:
print('\n*** WARNING: for fov \'' + fov.fov_name + '(obs_style, target_type) = (' +
obs_style + ', ' + fov.target_type + ') not understood.', end='', flush=True)
if cache_row_pre_exists:
self.df_cache = latest_obs_df.combine_first(self.df_cache) # overwrites.
else:
# This else-block is kludge for pandas' mis-handling of append to empty DataFrame.
if len(self.df_cache) >= 1:
self.df_cache = self.df_cache.append(latest_obs_df, sort=True)
else:
self.df_cache = latest_obs_df.copy()
if write_csv:
self._write_cache_to_csv()
print('..csv written.', end='', flush=True)
print('')
if latest_obs_df is None:
return None
return latest_obs_df.iloc[0].loc['obs_datetime'] # obs datetime, to signal OK.
def _latest_single_obs(self, fov, obs_style, recent_observations, allow_filters):
"""
Takes a AavsoWebObs object and returns a pandas dataframe ready for inclusion
in LocalCacheObs dataframe df_cache.
Single-observation case (not stare).
:param fov: fov to investigate for recent single observations [Fov object]
:param obs_style: [string] ('Monitor' or 'LPV')
:param recent_observations: recent observations for fov_name [AavsoWebObs object].
:param allow_filters: list of filters [string] to include in finding latest observation.
:return: 1-row dataframe of relevant data about latest stare observation for this fov_name;
return (with some None values) if no qualifying observation is found.
"""
allow_filters_lower = [f.lower() for f in allow_filters]
table_filters_lower = recent_observations.table['filter'].str.lower()
rows_to_keep = [f.lower() in allow_filters_lower for f in table_filters_lower]
if sum(rows_to_keep) <= 0:
latest_obs = None
else:
latest_obs = recent_observations.table[rows_to_keep].nlargest(1, 'jd').iloc[0]
if latest_obs is None:
# If no qualified observation found within webobs query,
# construct placeholder row in df_cache, to prevent repeating query needlessly.
latest_obs_df = pd.DataFrame.from_dict(OrderedDict([
('fov_name', fov.fov_name),
('main_target', fov.main_target),
('obs_style', fov.observing_style),
('cache_datetime', [datetime.now(timezone.utc)]),
('obs_datetime', [None]),
('obs_mag', [None]),
('obs_mag_filter', [None])]))
for column_name in ['cache_datetime']:
latest_obs_df[column_name] = [x.to_pydatetime()
for x in latest_obs_df[column_name]]
else:
latest_obs_df = pd.DataFrame.from_dict(OrderedDict([
('fov_name', fov.fov_name),
('main_target', fov.main_target),
('obs_style', fov.observing_style),
('cache_datetime', [datetime.now(timezone.utc)]),
('obs_datetime', [datetime_utc_from_jd(latest_obs.jd)]),
('obs_mag', [latest_obs.mag]),
('obs_mag_filter', [latest_obs.loc['filter']])]))
for column_name in ['cache_datetime', 'obs_datetime']:
latest_obs_df[column_name] = [x.to_pydatetime()
for x in latest_obs_df[column_name]]
latest_obs_df.index = latest_obs_df['fov_name'].copy()
latest_obs_df.index.name = 'row_index'
return latest_obs_df
def _latest_stare_obs(self, fov, recent_observations, allow_filters):
"""
Takes a AavsoWebObs object and returns a 1-row pandas dataframe ready for inclusion
in LocalCacheObs dataframe df_cache.
Stare case (multiple observations in one night), typically for eclipsers.
:param fov: fov to investigate for recent stare observations [Fov object]
:param recent_observations: recent observations for fov_name [AavsoWebObs object].
:param allow_filters: list of filters [string] to include in finding latest observation.
:return: dataframe of relevant data about latest stare observation for this fov_name;
return (with some None values) if no qualifying stare observation is found.
"""
if len(recent_observations.table) <= MIN_ROWS_ONE_STARE:
return None
# Find latest qualifying stare in each filter, return latest observation of latest stare.
latest_stare_obs_df = None
for this_filter in allow_filters:
stare_already_found_this_filter = False
this_filter_lower = this_filter.lower()
table_filters_lower = recent_observations.table['filter'].str.lower()
rows_to_keep = [f.lower() == this_filter_lower for f in table_filters_lower]
table_this_filter = recent_observations.table[rows_to_keep].sort_values(by='jd',
ascending=False)
num_tests = len(table_this_filter) - MIN_ROWS_ONE_STARE + 1
if num_tests >= 1:
for first_test_irow in range(0, num_tests):
if not stare_already_found_this_filter:
test_latest_jd = table_this_filter['jd'] \
.iloc[first_test_irow]
test_earliest_jd = table_this_filter['jd'] \
.iloc[first_test_irow + MIN_ROWS_ONE_STARE - 1]
if test_latest_jd - test_earliest_jd <= MAX_DAYS_ONE_STARE:
stare_already_found_this_filter = True
if latest_stare_obs_df is None:
need_to_replace = True
else:
candidate_datetime = datetime_utc_from_jd(test_latest_jd)
existing_datetime = latest_stare_obs_df.iloc[0].loc['obs_datetime']
need_to_replace = (candidate_datetime > existing_datetime)
if need_to_replace:
latest_stare_obs = table_this_filter.iloc[first_test_irow]
latest_stare_obs_df = pd.DataFrame.from_dict(OrderedDict([
('fov_name', fov.fov_name),
('main_target', fov.main_target),
('obs_style', fov.observing_style),
('cache_datetime', [datetime.now(timezone.utc)]),
('obs_datetime', [datetime_utc_from_jd(latest_stare_obs.jd)]),
('obs_mag', [latest_stare_obs.mag]),
('obs_mag_filter', [latest_stare_obs.loc['filter']])]))
for column_name in ['cache_datetime', 'obs_datetime']:
latest_stare_obs_df[column_name] = \
[x.to_pydatetime()
for x in latest_stare_obs_df[column_name]]
latest_stare_obs_df.index = latest_stare_obs_df['fov_name'].copy()
latest_stare_obs_df.index.name = 'row_index'
if latest_stare_obs_df is None:
# If no qualified stare observation found within webobs query,
# construct placeholder row in df_cache, to prevent repeating query needlessly.
latest_stare_obs_df = pd.DataFrame.from_dict(OrderedDict([
('fov_name', fov.fov_name),
('main_target', fov.main_target),
('obs_style', fov.observing_style),
('cache_datetime', [datetime.now(timezone.utc)]),
('obs_datetime', [None]),
('obs_mag', [None]),
('obs_mag_filter', [None])]))
for column_name in ['cache_datetime']:
latest_stare_obs_df[column_name] = [x.to_pydatetime()
for x in latest_stare_obs_df[column_name]]
latest_stare_obs_df.index = latest_stare_obs_df['fov_name'].copy()
latest_stare_obs_df.index.name = 'row_index'
return latest_stare_obs_df
@staticmethod
def _read_cache_from_csv():
cache = pd.read_csv(LOCAL_OBS_CACHE_FULLPATH, index_col=0)
if len(cache) <= 0:
return None
for column_name in ['cache_datetime', 'obs_datetime']:
if column_name not in cache.columns:
return None
# Parse cache_datetime column.
cache['cache_datetime'] = [datetime.strptime(s, DT_FMT) for s in cache['cache_datetime']]
# Parse obs_datetime column.
for row_index in cache.index:
if str(cache.loc[row_index, 'obs_datetime']).lower() != 'none':
cache.loc[row_index, 'obs_datetime'] = \
datetime.strptime(cache.loc[row_index, 'obs_datetime'], DT_FMT)
cache.loc[row_index, 'obs_mag'] = float(cache.loc[row_index, 'obs_mag'])
else:
cache.loc[row_index, 'obs_datetime'] = None
cache.loc[row_index, 'obs_mag'] = None
cache.loc[row_index, 'obs_mag_filter'] = None
return cache
def _write_cache_to_csv(self):
# Very specifically writes datetimes in format: '2017-02-07 03:34:45.786374+0000'
dt_format = '{:' + DT_FMT + '}'
lines = [','.join(['row_index', 'fov_name', 'main_target', 'obs_style',
'cache_datetime', 'obs_datetime', 'obs_mag', 'obs_mag_filter']) + '\n']
for row_index in self.df_cache.index:
row = self.df_cache.loc[row_index]
if row['obs_datetime'] is None or isinstance(row['obs_datetime'], type(pd.NaT)):
line = ','.join([row_index, row['fov_name'], row['main_target'], row['obs_style'],
dt_format.format(row['cache_datetime']),
'None', 'None', 'None']) + '\n'
else:
line = ','.join([row_index, row['fov_name'], row['main_target'], row['obs_style'],
dt_format.format(row['cache_datetime']),
dt_format.format(row['obs_datetime']),
'{:.4f}'.format(row['obs_mag']), row['obs_mag_filter']]) + '\n'
lines.append(line)
with open(LOCAL_OBS_CACHE_FULLPATH, 'w') as f:
f.writelines(lines)
# print("Cache written: " + str(len(self.df_cache)) + ' fovs.')
return LOCAL_OBS_CACHE_FULLPATH
def calc_an_priority(self, fov, an, user_update_tolerance_days=DEFAULT_UPDATE_TOLERANCE_DAYS):
"""
Calculates astronight priority for one fov.
:param fov:
:param user_update_tolerance_days: pass-through parm [float].
:param an: Astronight object for the night in question.
:return: an_priority, from fov_priority and age of most recent obs [float].
"""
if fov is None:
print('LOC.calc_an_priority: fov \'' + fov + '\' not found in fov_dict.')
return None
if (fov.priority is None) or (fov.target_type.lower() == 'standard'):
return 0
if fov.priority <= 0:
return 0
# self.update_one_fov_entry(fov, user_update_tolerance_days, write_csv=True)
if fov.fov_name not in self.df_cache.index:
return 2 * fov.priority # the maximum, since no latest obs was accessible.
latest_obs = self.df_cache.loc[fov.fov_name]
if latest_obs.obs_datetime is None:
return 2 * fov.priority # the maximum, since no latest obs was accessible.
jd_latest_obs = jd_from_datetime_utc(latest_obs.obs_datetime)
age_days = an.local_middark_jd - jd_latest_obs
return fov.calc_priority_score(age_days)
def _curate_df_cache(self, fov_name, main_target):
"""
Cull damaged records from self.df_cache.
*** Deactivated 2017-02-07 pending manual debugging. ***
:param fov_name:
:param main_target:
:return: [nothing]
"""
# Curation: If main_target is corrupt, delete all cache lines for that fov.
if main_target is None:
rows_to_delete = self.df_cache['fov_name'].str.lower() == fov_name.lower()
self.df_cache = self.df_cache[rows_to_delete == False]
return
# Curation: If fov and target names don't match, delete all such fov and target lines.
rows_with_wrong_target = \
(self.df_cache['fov_name'].str.lower() == fov_name.lower()) & \
(self.df_cache['main_target'].str.lower() != main_target.lower())
rows_to_keep = [not row for row in rows_with_wrong_target]
self.df_cache = self.df_cache[rows_to_keep]
rows_with_wrong_fov = \
(self.df_cache['main_target'].str.lower() == main_target.lower()) & \
(self.df_cache['fov_name'].str.lower() != fov_name.lower())
rows_to_keep = [not row for row in rows_with_wrong_fov]
self.df_cache = self.df_cache[rows_to_keep]
def __str__(self):
return 'LocalObsCache object with ' + str(len(self.df_cache)) + \
' observations.'
def __repr__(self):
return 'planning.LocalObsCache()'
class AavsoWebobs:
"""
Simple class: one object:one star. Holds dataframe for one star from AAVSO's webobs database.
Also updates local cache file (to unneeded future calls to webobs).
For Observation Styles: LPV, Monitor, and Stare; no need for Standard or Burn.
Usage: table = AavsoWebobs("AU Aur") [for one obs/night], or
table = AavsoWebobs("ST Tri", stare=True) [for at least 10 obs/night in filter].
"""
def __init__(self, star_id=None, num_obs=AAVSO_WEBOBS_ROWS_TO_GET, dataframe=None):
if dataframe is not None:
self.table = dataframe # typically for testing only.
self.star_id = self.table['target_name'].iloc[0]
else:
self.table = get_aavso_webobs_raw_table(star_id, num_obs=num_obs) # normal case
self.star_id = star_id
# def get_local_aavso_reports(report_dir=None, earliest_an=None):
# pass
# report_dict = {}
# for root, dirs, files in os.walk('C:/Astro/Images/Borea Photrix/'):
# if root.endswith("Photometry"):
# report = [file for file in files if file.startswith("AAVSO")]
# if len(report) >= 1:
# report_fullpath = os.path.join(root, report[0])
# with open(report_fullpath) as report_file:
# lines = report_file.readlines()
#
#
# Report = namedtuple('Report', ['JD', 'lines'])
#
#
#
#
# def get_local_obs_age_dict(fov_dict=None, report_dir=None, target_an=None, limit_days=366):
# # TODO: finish writing get_local_obs_age_dict()
# """
# report_dir: directory_path in which all relevant AAVSO reports reside, as
# "C:/Astro/2016/Photometry".
# target_an: target astronight from which to count days, as "20151216".
# limit_days: days into the past to look up old AAVSO reports.
# Returns dict of (fov_name, days_since_last_local_obs).
# """
# pass
# if report_dir is not None and limit_days >= 1:
# fov_age_dict = {name: None for name in fov_dict.keys()} # empty dict to start
# # TODO: get report_list <- [report_text] for every eligible AAVSO report,
# latest to earliest.
#
# for report_text in report_list:
# # TODO: get jd_dict
# i.e., {fov_name: latest jd_obs} for each main target in AAVSO report.
#
# for an_dir in dir_list:
# an_dict = defaultdict(lambda: None)
# # read AAVSO report, fill an_dict with target: latest JD
# for fov_name, fov in fov_dict.items():
# an_age = an_dict[fov.main_target]
# if an_age is not None:
# dict_age = fov_age_dict[fov_name]
# if dict_age is not None:
# if an_age < dict_age:
# fov_age_dict[fov_name] = an_age
# else:
# fov_age_dict[fov_name] = an_age
# return fov_age_dict
#
# ---------------------------------------------
def make_an_roster(an_date_string, output_directory, site_name='NMS_Dome', instrument_name='Borea',
user_update_tolerance_days=DEFAULT_UPDATE_TOLERANCE_DAYS,
exp_time_factor=1, min_an_priority=6):
# TODO: recode download loop to only download those FOVs visible tonight & for which priority might
# be high enough (read from csv: some might already be known to have been recently observed).
"""
Generates new .csv file containing info on each fov available this astronight.
Typical usage: pl.make_an_roster("20170127", "C:/Astro/ACP/AN20170127/",
user_update_tolerance_days=0.1, exp_time_factor=0.8)
:param an_date_string: as '20170127'. Date of the evening to plan for [string]
:param output_directory: directory_path in which to write Roster csv file [string]
:param site_name: [string]
:param instrument_name: [string]
:param user_update_tolerance_days: esp for user to force update [float]
:param exp_time_factor: multiply *raw* exp times by this; typically 0.6-0.9 [float]
:param min_an_priority: hide Monitor and LPV targets with an_priority < this [float]
:return: tuple of number of fovs, each obs style: (n_std, n_monitor_lpv, n_stare). [ints]
"""
an = Astronight(an_date_string=an_date_string, site_name=site_name)
df_fov = make_df_fov(fov_directory=FOV_DIRECTORY, fov_names_selected=None)
print(str(len(df_fov)), 'FOVs read.')
instrument = Instrument(instrument_name)
an_year = int(an_date_string[0:4])
an_month = int(an_date_string[4:6])
an_day = int(an_date_string[6:8])
day_of_week = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] \
[datetime(an_year, an_month, an_day).weekday()]
lines_header = ['ROSTER file for ' + an_date_string + ' ' + day_of_week,
' as generated by photrix ' +
'{:%Y-%m-%d %H:%M UTC}'.format(datetime.now(timezone.utc)),
' using exposure time factor = ' + '{:5.3f}'.format(exp_time_factor),
an.acp_header_string().replace(',', ' '),
'; Site=' + site_name + ' Instrument=' + instrument_name +
' min.alt = ' + '{:.1f}'.format(an.site.min_altitude) + u'\N{DEGREE SIGN}']
# Handle obs_style = 'Standard':
lines_std = ['\n\n\nSTANDARD roster for ' + an_date_string + ': ' + 50 * '-',
',fov,fov, avail_utc,transit,minutes, stars']
df_fov_std = filter_df_fov_by_obs_styles(df_fov, obs_style_list=['Standard'])
df_fov_std = complete_df_fov_an(df_fov_std, user_update_tolerance_days,
an_string=an_date_string, site_name=site_name,
min_available_seconds=MIN_AVAILABLE_SECONDS_DEFAULT,
min_moon_degrees=MIN_MOON_DEGREES_DEFAULT,
remove_zero_an_priority=False, remove_unobservables=True)
for fov_index in df_fov_std.index:
fov_name = df_fov_std.loc[fov_index, 'fov_name']
available = df_fov_std.loc[fov_index, 'available']
this_fov = Fov(fov_name)
transit_hhmm = hhmm_from_datetime_utc(an.transit(RaDec(this_fov.ra, this_fov.dec)))
exp_data = make_fov_exposure_data(fov_name, an, fov_dict=None, instrument=instrument,
exp_time_factor=exp_time_factor,
force_autoguide=False) # default=autoguide iff exp-times warrant.
if exp_data is None:
return # fail
_, _, _, target_overhead, repeat_duration = exp_data
minutes = (target_overhead + repeat_duration) / 60.0
n_stars = len(this_fov.aavso_stars)
this_fov_line = ',' + fov_name + ',' + fov_name + ', ' + available + ',' + \
"=\"" + transit_hhmm + "\"" + ',' + str(int(minutes)) + \
',' + '{:3d}'.format(n_stars) # formatting to placate Excel csv weirdness.
lines_std.append(this_fov_line)
# Handle obs_style = 'Stare':
lines_stare_high_priority = \
['\n\n\nSTARE roster for ' + an_date_string + ': ' + 50 * '-',
',fov,fov, avail_utc,transit,min/rpt, an_priority,,period, events']
lines_stare_low_priority = \
['\n\n\nSTARE roster (alternate; low-priority) for ' + an_date_string + ': ' + 50 * '-',
',fov,fov, avail_utc,transit,min/rpt, an_priority,,period, events']
df_fov_stare = filter_df_fov_by_obs_styles(df_fov, obs_style_list=['Stare'])
# Process each fov equally through most of the code,
# then only in the last if block, write to one list or the other.
df_fov_stare = complete_df_fov_an(df_fov_stare, user_update_tolerance_days,
an_string=an_date_string, site_name=site_name,
min_available_seconds=MIN_AVAILABLE_SECONDS_STARE,
min_moon_degrees=MIN_MOON_DEGREES_STARE,
remove_zero_an_priority=False, remove_unobservables=True)
for fov_index in df_fov_stare.index:
row = df_fov_stare.loc[fov_index]
fov_name = row.loc['fov_name']
available = row.loc['available']
this_fov = Fov(fov_name)
transit_hhmm = hhmm_from_datetime_utc(an.transit(RaDec(this_fov.ra, this_fov.dec)))
exp_data = make_fov_exposure_data(fov_name, an, fov_dict=None, instrument=instrument,
exp_time_factor=exp_time_factor,
force_autoguide=False) # default=autoguide iff exp-times warrant.
if exp_data is None:
return # fail
_, _, _, target_overhead, repeat_duration = exp_data
minutes = (target_overhead + repeat_duration) / 60.0
an_priority = row.loc['an_priority']
an_priority_bars = row.loc['an_priority_bars']
period = row.loc['period']
row_ts = Timespan(row.loc['start'], row.loc['end']) # Timespan object for this row.
# For now, we will consider that each Stare FOV wants either minima or maxima but not both.
event_type_string = STARE_EVENT_TYPES.get(this_fov.target_type.lower(), None)
if event_type_string is None:
print(this_fov.fov_name + ': probable bad target_type in FOV.')
do_minima = event_type_string.lower().startswith("min")
do_maxima = event_type_string.lower().startswith("max")
# Start with an *empty* dataframe of events, with correct dtypes:
df_events = pd.DataFrame.from_dict(OrderedDict(
[('event_type', 'dummy_type'),
('utc', [datetime.now(timezone.utc)])]))[:0]
if do_minima:
list_primary_mins = event_utcs_in_timespan(this_fov.JD_faint, this_fov.period, row_ts)
if list_primary_mins is None:
primaries_exist = False
else:
primaries_exist = len(list_primary_mins) >= 1
if primaries_exist:
df_primary_mins = pd.DataFrame.from_dict(dict([('utc', list_primary_mins,)]))
df_primary_mins['event_type'] = "1'"
df_events = df_events.append(df_primary_mins, sort=True)
list_secondary_mins = event_utcs_in_timespan(this_fov.JD_second, this_fov.period,
row_ts)
if list_secondary_mins is None:
secondaries_exist = False
else:
secondaries_exist = len(list_secondary_mins) >= 1
if secondaries_exist:
df_secondary_mins = pd.DataFrame.from_dict(dict([('utc', list_secondary_mins,)]))
df_secondary_mins['event_type'] = "2'"
df_events = df_events.append(df_secondary_mins, sort=True)
if do_maxima:
list_maxima = event_utcs_in_timespan(this_fov.JD_bright, this_fov.period, row_ts)
if list_maxima is None:
maxima_exist = False
else:
maxima_exist = len(list_maxima) >= 1
if maxima_exist:
df_maxima = pd.DataFrame.from_dict(dict([('utc', list_maxima,)]))
df_maxima['event_type'] = "max"
df_events = df_events.append(df_maxima, sort=True)
if len(df_events) >= 1:
motive = this_fov.motive
df_events.sort_values(by='utc', inplace=True)
events_string = ' '
for row in df_events.itertuples():
events_string += str(row.event_type) + "=" + hhmm_from_datetime_utc(row.utc) + ' '
this_fov_line = ',' + fov_name + ',' + fov_name + ',' + available + ',' + \
"=\"" + transit_hhmm + "\"" + ',' + str(int(minutes)) + ',' + \
str(int(round(an_priority))) + ' ,' + an_priority_bars + ',' + \
'{:7.3f}'.format(period) + ' ,' + events_string + ',' + \
"\" " + motive + "\"" # formatting to placate Excel csv weirdness.
if an_priority >= STARE_AN_PRIORITY_DIVIDER:
lines_stare_high_priority.append(this_fov_line)
else:
lines_stare_low_priority.append(this_fov_line)
# Handle obs_style = 'Monitor' or 'LPV':
lines_mon_lpv = ['\n\n\nMONITOR / LPV roster for ' + an_date_string + ': ' + 50 * '-',
',fov,fov,avail_utc,transit,minutes, an_priority']
df_fov_mon_lpv = filter_df_fov_by_obs_styles(df_fov, obs_style_list=['Monitor', 'LPV'])
df_fov_mon_lpv = filter_df_fov_by_fov_priority(df_fov_mon_lpv,
min_fov_priority=0.5, include_std_fovs=False)
df_fov_mon_lpv = complete_df_fov_an(df_fov_mon_lpv, user_update_tolerance_days,
an_string=an_date_string,
site_name=site_name,
min_available_seconds=MIN_AVAILABLE_SECONDS_DEFAULT,
min_moon_degrees=MIN_MOON_DEGREES_DEFAULT,
remove_zero_an_priority=True, remove_unobservables=True)
for fov_index in df_fov_mon_lpv.index:
an_priority = df_fov_mon_lpv.loc[fov_index, 'an_priority']
if an_priority >= min_an_priority:
fov_name = df_fov_mon_lpv.loc[fov_index, 'fov_name']
available = df_fov_mon_lpv.loc[fov_index, 'available']
this_fov = Fov(fov_name)
transit_hhmm = hhmm_from_datetime_utc(an.transit(RaDec(this_fov.ra, this_fov.dec)))
exp_data = make_fov_exposure_data(fov_name, an, fov_dict=None, instrument=instrument,
exp_time_factor=exp_time_factor,
force_autoguide=False) # so autoguide iff exp-times warrant.
if exp_data is None:
return # fail
_, _, _, target_overhead, repeat_duration = exp_data
minutes = (target_overhead + repeat_duration) / 60.0
an_priority_bars = df_fov_mon_lpv.loc[fov_index, 'an_priority_bars']
motive = Fov(fov_name).motive
this_fov_line = ',' + fov_name + ',' + fov_name + ', ' + available + ',' + \
"=\"" + transit_hhmm + "\"" + ',' + str(int(minutes)) + ',' + \
str(int(round(an_priority))) + ' ,' + an_priority_bars + ',' + \
"\" " + motive + "\"" # formatting to placate Excel csv weirdness.
lines_mon_lpv.append(this_fov_line)
# Assemble all output lines:
lines_all = lines_header + \
lines_std + \
lines_stare_high_priority + lines_stare_low_priority + \
lines_mon_lpv
# Write all lines to file:
os.makedirs(output_directory, exist_ok=True)
output_fullpath = os.path.join(output_directory, 'Roster_' + an.an_date_string + '.csv')
csv_written = False
while not csv_written:
try:
with open(output_fullpath, 'w') as this_file:
this_file.write('\n'.join(lines_all))
csv_written = True
except PermissionError:
input('***** CLOSE file \'' + output_fullpath + '\' and hit Enter.')
print('Done.')
class Plan:
""" Holds all data for one ACP plan.
:param plan_id: name of this plan, minimalist, as 'C' [string]
"""
def __init__(self, plan_id=None, plan_comment=None):
self.plan_id = plan_id
self.plan_comment = plan_comment.strip()
self.directives = [] # as parsed from user input in parse_excel().
# Values populated in make_events():
self.utc_quitat = None # forced stop time (at end of last event)
self.afinterval = None # in minutes; default=None if no afinterval requested for this plan.
self.sets_requested = 1 # default
self.chain_destination = None # next plan filename, None if no chaining requested for this plan.
self.events = [] # holds only ONE element per intended event, no matter how many SETS in a plan.
# Values populated in make_timeline():
self.utc_start = None # actual start time (computed later)
self.utc_end = None # actual end time (computed later)
self.sets_completed = 0 # actual number of set cycles completed (integer)
self.afinterval_autofocus_count = 0 # count of autofocuses caused by AFINTERVAL, this set.
# Lists of text lines to go before and after main body of lines (from make_events()):
self.summary_pre_lines = []
self.summary_post_lines = []
self.acp_pre_lines = []
self.end_warning_lines = []
self.acp_post_lines = []
def quitat_reached_at(self, utc):
if self.utc_quitat is None:
return False
return utc >= self.utc_quitat
def __str__(self):
return 'Plan object: ' + self.plan_id
class Directive:
""" Holds all initial data for one user-given directive (e.g., one cell in Excel spreadsheet).
:param type: type of directive, from approved list, e.g., 'CHILL' or 'fov' [string, case-insens.]
:param spec: dictionary of specification data to hold, depends on directive type [directory_path].
"""
def __init__(self, type, spec_dict):
self.type = type.lower()
self.spec = spec_dict
self.comment = ''
def __str__(self):
return 'Directive object: ' + self.type
class Event:
""" Holds all data for one event to be executed (per set).
Each Event object will result in at least one line in summary doc and in ACP plan file.
"""
def __init__(self, event_type, summary_text, acp_lines, duration_total=0, duration_dict=None,
target_name=None, ra=None, dec=None):
self.type = event_type.lower()
self.summary_text = summary_text # starting text for summary document
self.summary_lines = [] # final lines for summary document
self.acp_lines = acp_lines # list (always) of lines for ACP plan file [list of strings]
self.duration_total = duration_total # total for event; 0 for waituntil, quitat, etc.
# .duration_dict exists only for exposure-event types: burn, stare, fov, and image,
# as: {'target_overhead': in sec, 'repeat_count': n, 'counts': [n], 'exp_times': [in sec]} :
self.duration_dict = duration_dict # dict describing durations of indiv exposures, incl overheads.
self.utc_end = None # for waituntil.
self.target_name = target_name # for any exposure event type (= fov name for most)
self.ra = ra # string, for exposure-event types
self.dec = dec # "
# For this event's summary line. Values later populated by make_timeline():
self.status = None # any of: 'ok', 'chain', 'quitat', 'wait'
self.utc_summary_display = None # stored only for first SET.
self.min_altitude = None # in degrees, for ALL sets in this plan.
def calc_actual_duration(self, utc_start, utc_quitat, afinterval, utc_most_recent_autofocus):
if utc_quitat is None:
return self.duration_total, 0, utc_most_recent_autofocus
if utc_start >= utc_quitat:
return 0, 0, utc_most_recent_autofocus
if self.duration_dict is None:
return None
total_exp_time = self.duration_dict['repeat_count'] * sum([c * e for (c, e) in
zip(self.duration_dict['counts'],
self.duration_dict['exp_times'])])
exposure_count = self.duration_dict['repeat_count'] * sum(self.duration_dict['counts'])
overhead_per_exposure = (self.duration_total - total_exp_time -
self.duration_dict['target_overhead']) / exposure_count
utc_running = utc_start + timedelta(seconds=self.duration_dict['target_overhead'])
event_autofocus_count = 0 # accumulator for this event.
for i_repeat in range(self.duration_dict['repeat_count']):
for c, e in zip(self.duration_dict['counts'], self.duration_dict['exp_times']):
for i_exp in range(c):
# Update clock for any AFINTERVAL-triggered autofocus:
if afinterval is not None:
minutes_since_last_autofocus = \
(utc_running - utc_most_recent_autofocus).total_seconds() / 60.0
if minutes_since_last_autofocus > afinterval:
utc_running += timedelta(seconds=AUTOFOCUS_DURATION)
utc_most_recent_autofocus = utc_running
event_autofocus_count += 1
# Update clock for exposure itself:
utc_running += timedelta(seconds=overhead_per_exposure + e)
# Terminate event if QUITAT time has passed:
if utc_running >= utc_quitat:
return (utc_running - utc_start).total_seconds(), \
event_autofocus_count, utc_most_recent_autofocus
return (utc_running - utc_start).total_seconds(), event_autofocus_count, utc_most_recent_autofocus
def calc_lower_altitude(self, an, utc1, utc2):
longitude, latitude = an.site.longitude, an.site.latitude
longitude_hex, latitude_hex = degrees_as_hex(longitude), degrees_as_hex(latitude)
target_radec = RaDec(self.ra, self.dec)
_, alt_deg_utc1 = az_alt_at_datetime_utc(longitude_hex, latitude_hex, target_radec, utc1)
_, alt_deg_utc2 = az_alt_at_datetime_utc(longitude_hex, latitude_hex, target_radec, utc2)
return min(alt_deg_utc1, alt_deg_utc2)
def __str__(self):
return 'Event object: ' + self.summary_text
def make_an_plan(plan_excel_path='c:/24hrs/Planning.xlsx', site_name='NMS_Dome', instrument_name='Borea',
fov_dict=None, earliest_an_start_hhmm=None, exp_time_factor=1):
""" Main user fn to take sketch Excel file and generate Summary and ACP Plan files.
:param plan_excel_path: full path to Excel file holding all info for one night's observations.
:param site_name: a Site object for location of observations.
:param instrument_name: an Instrument object for scope to be used.
:param fov_dict: fov_dict if available, default=None to generate new fov_dict (normal case).
:param earliest_an_start_hhmm: 'hhmm' time to start plan, default=None for 'earliest possible' (normal case).
:param exp_time_factor: multiply *raw* exp times by this; typically 0.6-0.9 [float]
:return: Writes out Summary file with dateline, and one or more ACP plan files.
Typical usage: pl.make_an_plan('c:/Astro/ACP/AN20170525/planning.xlsx', exp_time_factor=0.7)
"""
# TODO: LocalObsCache updates only for fovs actually used, not including Burns. ???meant for roster???
plan_list, an = parse_excel(plan_excel_path, site_name)
reorder_directives(plan_list)
if fov_dict is None:
fov_dict = make_fov_dict()
instrument = Instrument(instrument_name)
make_events(plan_list, instrument, fov_dict, an=an, exp_time_factor=exp_time_factor)
output_directory = os.path.split(plan_excel_path)[0]
make_timeline(plan_list, an=an, earliest_hhmm=earliest_an_start_hhmm)
make_acp_plan_files(plan_list, an, output_directory, exp_time_factor)
make_summary_file(plan_list, fov_dict, an, output_directory, exp_time_factor)
def parse_excel(excel_path, site_name='NMS_Dome'):
"""
Parses sketch Excel file, returns a list of Plan objects containing all directives and the
relevant Astronight object.
:param excel_path: full path to Excel file holding all info for one night's observations [str].
:param site_name: a Site object for location of observations [string]
:return: list of Plan objects, astronight object (2-tuple)
----- Target types & their syntax:
FOV_name :: for LPV, standards, and other once-per-night targets having FOV files,
e.g., "FF Lyr" and "Std_SA32".
STARE nnn FOV_name :: for stare targets; nnn=number of obs cycles,
e.g., "STARE 100 ST Tri" (typically time-limited by QUITAT, not by number of obs cycles).
BURN FOV_name RA Dec :: for 240-second images in V and I only; no FOV file necessary,
e.g., "BURN FF Lyr 12:00:00 +23:34:45".
IMAGE target_name filter_mag_or_sec_string RA Dec :: arbitrary imaging,
either in magnitudes for the current instrument, or in explicity exposure times (may be mixed on
a single line):
*** Magnitudes syntax: "IMAGE New target V=12 B=12.5(2) 12:00:00 +23:34:45" to image New target in
V filter (once) at targeted mag 12, and B filter twice at targeted mag 12.5.
*** Exposure syntax: "IMAGE New target V=120s B=240s(2) 12:00:00 +23:34:45" to image New target in
V filter (once) at 120 seconds, and B filter twice at 240 seconds (exposure times are NOT
limited, so be careful!)
All text between "IMAGE" and first word including a "=" character will make up the target name.
COLOR target_name multiplier RA Dec :: for MP color imaging (mp_color.py),
e.g., "COLOR MP_1626 1.1x 21:55:08 +24:24:45". 'x' in multipler is optional but recommended.
----- Legal directives:
PLAN plan_id :: starts a plan section and names it.
; comment_text :: semicolon at beginning of cell makes cell a comment only.
AFINTERVAL nnn :: autofocus interval in minutes
SETS nn :: number of times to repeat all targets, autofocuses, chills, etc
AUTOFOCUS :: force autofocus
CHILL -nn :: chill the cooler to -nn deg C
QUITAT nn:nn :: quit plan at nn:nn UTC
WAITUNTIL nn:nn :: wait to start plan (first Set) until nn:nn UTC
SKIPFILTER filter_name :: skip filter for following targets; omit filter_name to restore all filters.
DOMEOPEN :: open dome (or roll-off roof) now.
DOMECLOSE :: close dome (or roll-off roof) now.
SHUTDOWN :: perform ACP shutdown of camera and park scope
CHAIN plan_id :: chain to next plan
BURN target_id RA Dec :: shorthand for IMAGE target_id V=240sec(1) I=240sec(1) RA Dec
IMAGE target_id exp_specs RA Dec :: take images of target at RA, Dec; exp_specs define the
filters and exposures, e.g., V=12.8 R=120sec(2) I=11(3) where 12.8 is a magnitude, 120sec
is 120 seconds, and (2) specifies 2 exposures (and resulting images).
COLOR target_id multipler RA Dec :: take MP color sequence defined by COLOR_SEQUENCE_AT_V14.
fov_name :: if line begins with none of the above, it's a FOV name and takes filters, exposures,
RA, and Dec from its FOV file.
"""
df = pd.read_excel(excel_path, header=None).dropna(axis=0, how='all').dropna(axis=1, how='all')
nrow = len(df)
ncol = len(df.columns)
parsed_list = [] # nested list, one element per ACP plan.
this_plan_id = ''
an_date_string = str(df.iloc[0, 0]).strip()
if int(EARLIEST_AN_DATE) < int(an_date_string) < int(LATEST_AN_DATE):
an = Astronight(an_date_string, site_name)
else:
print('>>>>> STOPPING: an_date_string '" + an_date_string + "
""' SEEMS UNREASONABLE (update LATEST_AN_DATE?).')
return
plan_list = []
this_plan = None
macro_dict = dict()
macro_field_keys = ['^' + str(i + 1) for i in range(9)]
for irow in range(1, nrow):
for icol in range(ncol):
cell = df.iloc[irow, icol]
if isinstance(cell, str):
do_this_cell = True
else:
do_this_cell = ~np.isnan(cell)
if do_this_cell:
# Extract and process substrings from this cell:
cell_str_as_read = str(cell).strip()
cell_str_lower = cell_str_as_read.lower()
# Add MACRO directive that stores text in a dict for later use; then continue loop:
if cell_str_lower.startswith('macro '):
_, macro_key, macro_command = tuple(cell_str_as_read.split(maxsplit=2))
macro_dict[macro_key] = macro_command
continue
# If first word of command is in macro dict, substitute expanded macro for command.
words = cell_str_as_read.split()
macro_command = macro_dict.get(words[0], None)
if macro_command is not None:
macro_misused = False
insert_strings = words[1:]
for i_key, macro_field_key in enumerate(macro_field_keys):
iloc = macro_command.find(macro_field_key)
if iloc >= 0:
if i_key < len(insert_strings):
insert_string = insert_strings[i_key]
else:
macro_misused = True
insert_string = '???'
macro_command = macro_command.replace(macro_field_key, insert_string)
if macro_misused:
print(' >>>>> ERROR: Macro misused in cell \'' + cell_str_as_read + '\'')
cell_str_as_read = macro_command
cell_str_lower = cell_str_as_read.lower() # refresh this variable.
# Handle any comment after first semi-colon:
split_str = cell_str_as_read.split(';', maxsplit=1)
command = split_str[0].strip()
if len(split_str) > 1:
comment = split_str[1].rstrip()
else:
comment = NO_COMMENT_STRING
# Determine action type and add action to directive_list:
if cell_str_lower.startswith('plan'):
if this_plan is not None:
plan_list.append(this_plan) # save previous plan, if any
this_plan_id = an_date_string + '_' + command[len('plan'):].strip()
this_plan = Plan(this_plan_id, comment)
elif cell_str_lower.startswith('sets'):
set_count = command[len('sets'):].strip()
this_plan.directives.append(Directive('sets', {'count': int(set_count)}))
elif cell_str_as_read.startswith(';'):
this_plan.directives.append(Directive('comment', {'text': comment}))
elif cell_str_lower.startswith('afinterval'):
minutes = command[len('afinterval'):].strip()
this_plan.directives.append(Directive('afinterval', {'minutes': int(minutes)}))
elif cell_str_lower.startswith('autofocus'):
this_plan.directives.append(Directive('autofocus', {}))
elif cell_str_lower.startswith('chill'):
tempC = command[len('chill'):].strip()
this_plan.directives.append(Directive('chill', {'tempC': float(tempC)}))
elif cell_str_lower.startswith('quitat'):
hhmm_utc = command[len('quitat'):].strip().replace(':', '')
this_plan.directives.append(Directive('quitat', {'utc': hhmm_utc}))
elif cell_str_lower.startswith('waituntil'):
value = command[len('waituntil'):].strip().replace(':', '')
spec_dict = {'sun_degrees': None, 'utc': None} # overwrite one of these, just below.
if float(value) < 0:
spec_dict['sun_degrees'] = float(value)
else:
spec_dict['utc'] = value
this_plan.directives.append(Directive('waituntil', spec_dict))
elif cell_str_lower.startswith('skipfilter'):
value = command[len('skipfilter'):].strip()
if cell_str_lower.startswith('skipfilters'):
value = command[len('skipfilters'):].strip() # deprecated SKIPFILTERS (plural) case
skipfilter_list = [item.strip() for item in value.split()]
this_plan.directives.append(Directive('skipfilter', {'filters': skipfilter_list}))
elif cell_str_lower.startswith('domeopen'):
this_plan.directives.append(Directive('domeopen', {}))
elif cell_str_lower.startswith('domeclose'):
this_plan.directives.append(Directive('domeclose', {}))
elif cell_str_lower.startswith('shutdown'):
this_plan.directives.append(Directive('shutdown', {}))
elif cell_str_lower.startswith('chain'):
next_plan_filename = 'plan_' + an_date_string + '_' + \
command[len('chain'):].strip().upper()
if not next_plan_filename.endswith('.txt'):
next_plan_filename += '.txt'
this_plan.directives.append(Directive('chain', {'filename': next_plan_filename}))
elif cell_str_lower.startswith('burn'):
value = command[len('burn'):].strip()
this_fov_name, ra_string, dec_string = extract_ra_dec(value)
# this_fov_name, ra_string, dec_string = tuple(value.rsplit(maxsplit=2))
this_plan.directives.append(Directive('burn', {'fov_name': this_fov_name.strip(),
'ra': ra_string.strip(),
'dec': dec_string.strip(),
'force_autoguide': True}))
elif cell_str_lower.startswith('stare'):
value = command[len('stare'):].strip()
repeats, this_fov_name = tuple(value.split(maxsplit=1))
this_plan.directives.append(Directive('stare', {'fov_name': this_fov_name.strip(),
'repeat_count': int(repeats),
'force_autoguide': True}))
elif cell_str_lower.startswith('image'):
value = command[len('image'):].strip()
force_autoguide, value = get_and_remove_option(value, FORCE_AUTOGUIDE_TOKEN)
subvalue, ra_string, dec_string = extract_ra_dec(value)
# subvalue, ra_string, dec_string = tuple(value.rsplit(maxsplit=2))
filter_entries = []
target_name = "WARNING: NO TARGET NAME"
while True:
if len(subvalue) <= 0:
print(">>>>> WARNING: No target name for command '" +
cell_str_as_read + "'.")
break
if len(subvalue.split()) == 1:
target_name = subvalue
break
subsubvalue, item = subvalue.rsplit(maxsplit=1)
is_filter_entry = '=' in item
if is_filter_entry:
filter_entries.append(item.strip())
else:
target_name = subvalue
break
subvalue = subsubvalue
filter_entries.reverse()
if len(filter_entries) >= 1:
this_plan.directives.append(Directive('image',
{'target_name': target_name,
'filter_entries': filter_entries,
'ra': ra_string,
'dec': dec_string,
'force_autoguide': force_autoguide}))
elif cell_str_lower.startswith('color'):
value = command[len('color'):].strip()
force_autoguide, value = get_and_remove_option(value, FORCE_AUTOGUIDE_TOKEN) # ignored.
subvalue, ra_string, dec_string = extract_ra_dec(value)
target_name, multiplier_string = tuple(subvalue.rsplit(maxsplit=1))
multiplier_string = multiplier_string.lower().split('x')[0]
multiplier = float(multiplier_string)
entries = tuple([(filt, multiplier * exp14, repeats)
for (filt, exp14, repeats) in COLOR_SEQUENCE_AT_V14])
this_plan.directives.append(Directive('color',
{'target_name': target_name,
'entries': entries,
'multiplier_string': multiplier_string,
'ra': ra_string,
'dec': dec_string,
'force_autoguide': COLOR_FORCE_AUTOGUIDE,
'comment': comment} # store comment for color.
))
else:
# Anything else we treat as a fov_name:
value = command # use the whole command string (before comment); no directive string.
force_autoguide, value = get_and_remove_option(value, FORCE_AUTOGUIDE_TOKEN)
fov_name = value.strip()
if len(fov_name) >= 2:
this_plan.directives.append(Directive('fov', {'fov_name': fov_name,
'force_autoguide': force_autoguide}))
plan_list.append(this_plan) # Ensure we save the last plan.
return plan_list, an
def get_and_remove_option(string, option):
""" From a value string (e.g., 'IMAGE MP_191 AG+ Clear=200sec(1) 12:34:45 -06:34:21') and an
option string (e.g., 'AG+), determine whether option is in value, and return value string with
all instances of option token (space-bounded) removed, for further processing.
Used in parse_excel().
:param string: value string of directive. [string]
:param option: option string to locate. [string]
:return: (flag value, value string with option removed. [2-tuple of (boolean, string)]
"""
p = ' ' + string + ' '
pu = p.upper()
pu_option = ' ' + option.upper() + ' '
flag = (pu.find(pu_option) >= 0)
while True:
i = pu.find(pu_option)
if i == -1:
break
pu = pu[:i + 1] + pu[i + len(pu_option):]
p = p[:i + 1] + p[i + len(pu_option):]
return flag, p.strip()
def reorder_directives(plan_list):
"""
Puts directives within each Plan object in the desired order, returns the updated plan list.
:param plan_list: the plan list whose directives are to be reordered [list of Plan objects].
:return: the plan list with reordered directives [list of Plan objects].
"""
# Directives within each sublist retain user's given order.
ideal_directive_ordering = [['quitat'],
['afinterval'],
['sets'],
['waituntil', 'chill', 'stare', 'fov', 'burn',
'image', 'color', 'autofocus', 'comment', 'skipfilter',
'domeopen', 'domeclose'],
['shutdown'],
['chain']]
for plan in plan_list:
reordered_directive_list = []
for directive_order_sublist in ideal_directive_ordering:
for i_directive in range(len(plan.directives)):
this_directive = plan.directives[i_directive]
if this_directive.type.lower() in directive_order_sublist:
reordered_directive_list.append(this_directive)
plan.directives = reordered_directive_list
num_omitted = len(plan.directives) - len(reordered_directive_list)
if num_omitted > 0:
print('>>>>> WARNING: ' + str(num_omitted) + ' actions in plan ' + plan.plan_id +
'were omitted during ordering.')
# return plan_list
def make_events(plan_list, instrument, fov_dict, an, exp_time_factor):
""" Translate user's directives into executable events (to be repeated if more than one set).
For simplicity, handle all directives, even if not enough plan time to complete them all (common).
Compute event durations here, but postpone creation of full plan timeline to later function.
:param plan_list:
:param instrument:
:param fov_dict:
:param an:
:param exp_time_factor:
:return: [nothing--it modifies plan_list in place].
"""
for plan in plan_list:
skipfilter_list = [] # default
# For each directive: make event and add it to plan's event list:
for directive in plan.directives:
if directive.type == 'waituntil': # NB there may be >1 waituntil, but only 1 active quitat.
if directive.spec['sun_degrees'] is not None:
sun_degrees = directive.spec['sun_degrees']
site_obs = ephem.Observer()
site_obs.lat, site_obs.lon = str(an.site.latitude), str(an.site.longitude)
site_obs.elevation = an.site.elevation
sun = ephem.Sun(site_obs)
site_obs.horizon = str(sun_degrees)
utc_end = site_obs.previous_setting(sun, an.local_middark_utc) \
.datetime().replace(tzinfo=timezone.utc)
this_summary_text = 'WAITUNTIL sun reaches ' + \
'{0:g}'.format(sun_degrees) + u'\N{DEGREE SIGN}' + ' alt'
this_acp_entry = ['#WAITUNTIL 1, ' + '{0:g}'.format(sun_degrees) + ' ; deg sun alt']
else:
hhmm = ('0' + directive.spec['utc'])[-4:]
utc_end = an.datetime_utc_from_hhmm(hhmm)
formatted_time = '{:%m/%d/%Y %H:%M}'.format(utc_end)
this_summary_text = 'WAITUNTIL ' + hhmm + ' utc'
this_acp_entry = ['#WAITUNTIL 1, ' + formatted_time + ' ; utc']
this_event = Event('waituntil', this_summary_text, this_acp_entry)
this_event.utc_end = utc_end
plan.events.append(this_event)
elif directive.type == 'chill':
this_summary_text = 'CHILL ' + '{0:g}'.format(directive.spec['tempC'])
this_acp_entry = ['#CHILL ' + '{0:g}'.format(directive.spec['tempC'])]
this_event = Event('chill', this_summary_text, this_acp_entry, CHILL_DURATION)
this_event.setpoint = directive.spec['tempC']
plan.events.append(this_event)
elif directive.type == 'stare':
n_repeats = directive.spec['repeat_count']
fov_name = directive.spec['fov_name']
this_summary_text = 'Stare ' + str(n_repeats) + ' repeats at ' + fov_name
if directive.spec['force_autoguide'] is True:
this_summary_text += ' AG+'
exp_data = make_fov_exposure_data(fov_name, an, fov_dict, instrument,
exp_time_factor=exp_time_factor,
skipfilter_list=skipfilter_list,
force_autoguide=directive.spec['force_autoguide'])
if exp_data is None:
return # fail
filters, counts, exp_times, target_overhead, repeat_duration = exp_data
event_duration = target_overhead + n_repeats * repeat_duration
duration_comment = str(round(repeat_duration / 60.0, 1)) + ' min/repeat --> ' + \
str(round(event_duration / 60.0, 1)) + ' min (nominal)'
this_fov = fov_dict[fov_name]
this_acp_entry = [';', '#REPEAT ' + str(n_repeats) + ';',
'#DITHER 0 ;',
'#FILTER ' + ','.join(filters) + ' ;',
'#BINNING ' + ','.join(len(filters) * ['1']) + ' ;',
'#COUNT ' + ','.join([str(c) for c in counts]) + ' ;',
'#INTERVAL ' + ','.join([str(e).split('.0')[0]
for e in exp_times]) +
' ; ' + duration_comment,
';----' + this_fov.acp_comments, fov_name + '\t' +
ra_as_hours(this_fov.ra) + '\t' + dec_as_hex(this_fov.dec)]
if directive.spec['force_autoguide'] is True:
this_acp_entry.insert(1, '#AUTOGUIDE ; Automatic for stare target')
duration_dict = {'target_overhead': target_overhead,
'repeat_count': n_repeats,
'counts': counts,
'exp_times': exp_times}
this_event = Event('stare', this_summary_text, this_acp_entry,
event_duration, duration_dict,
ra=ra_as_hours(this_fov.ra), dec=dec_as_hex(this_fov.dec))
this_event.target_name = fov_name
plan.events.append(this_event)
elif directive.type == 'fov':
fov_name = directive.spec['fov_name']
this_summary_text = fov_name
if directive.spec['force_autoguide'] is True:
this_summary_text += ' AG+'
exp_data = make_fov_exposure_data(fov_name, an, fov_dict, instrument,
exp_time_factor=exp_time_factor,
skipfilter_list=skipfilter_list,
force_autoguide=directive.spec['force_autoguide'])
if exp_data is None:
return # fail
filters, counts, exp_times, target_overhead, repeat_duration = exp_data
event_duration = target_overhead + 1 * repeat_duration
duration_comment = ' --> ' + str(round(event_duration / 60.0, 1)) + ' min'
this_fov = fov_dict[fov_name]
this_acp_entry = [';', '#DITHER 0 ;',
'#FILTER ' + ','.join(filters) + ' ;',
'#BINNING ' + ','.join(len(filters) * ['1']) + ' ;',
'#COUNT ' + ','.join([str(c) for c in counts]) + ' ;',
'#INTERVAL ' + ','.join([str(e).split('.0')[0]
for e in exp_times]) +
' ; ' + duration_comment,
';----' + this_fov.acp_comments, fov_name + '\t' +
ra_as_hours(this_fov.ra) + '\t' + dec_as_hex(this_fov.dec)]
if directive.spec['force_autoguide'] is True:
this_acp_entry.insert(1, '#AUTOGUIDE ; Forced')
duration_dict = {'target_overhead': target_overhead,
'repeat_count': 1,
'counts': counts,
'exp_times': exp_times}
this_event = Event('fov', this_summary_text, this_acp_entry,
event_duration, duration_dict,
ra=ra_as_hours(this_fov.ra), dec=dec_as_hex(this_fov.dec))
this_event.target_name = fov_name
plan.events.append(this_event)
elif directive.type == 'burn':
future_fov_name = directive.spec['fov_name']
ra = directive.spec['ra']
dec = directive.spec['dec']
this_summary_text = 'BURN ' + future_fov_name + ' ' + ra + ' ' + dec
if directive.spec['force_autoguide'] is True:
this_summary_text += ' AG+'
this_acp_entry = [';', '#DITHER 0 ;', '#FILTER V,I ;', '#BINNING 1,1 ;',
'#COUNT 1,1 ;', '#INTERVAL ' +
str(BURN_EXPOSURE) + ',' + str(BURN_EXPOSURE) +
' ;----> BURN for new FOV file.',
future_fov_name + '\t' + ra + '\t' + dec + ' ;']
if directive.spec['force_autoguide'] is True:
this_acp_entry.insert(1, '#AUTOGUIDE ; Automatic for burn target')
target_overhead, repeat_duration = tabulate_target_durations(
filters=['V', 'I'], counts=[1, 1],
exp_times=[BURN_EXPOSURE, BURN_EXPOSURE], force_autoguide=True)
event_duration = target_overhead + 1 * repeat_duration
duration_dict = {'target_overhead': event_duration - 2 * BURN_EXPOSURE,
'repeat_count': 1,
'counts': [1, 1],
'exp_times': 2 * [BURN_EXPOSURE]}
this_event = Event('burn', this_summary_text, this_acp_entry,
event_duration, duration_dict,
ra=ra, dec=dec)
this_event.target_name = future_fov_name
plan.events.append(this_event)
elif directive.type == 'image':
target_name = directive.spec['target_name']
filter_entries = directive.spec['filter_entries']
ra = directive.spec['ra']
dec = directive.spec['dec']
filters, counts, exp_times, target_overhead, repeat_duration = \
make_image_exposure_data(filter_entries, instrument, exp_time_factor=exp_time_factor,
force_autoguide=directive.spec['force_autoguide'])
event_duration = target_overhead + 1 * repeat_duration
this_summary_text = 'Image ' + target_name +\
' ' + ' '.join([f + '=' + '{0:g}'.format(e) + 's(' + str(c) + ')'
for (f, e, c) in zip(filters, exp_times, counts)]) +\
' ' + ra + ' ' + dec
if directive.spec['force_autoguide'] is True:
this_summary_text += ' AG+'
duration_comment = ' --> ' + str(round(event_duration / 60.0, 1)) + ' min'
this_acp_entry = [';', '#DITHER 0 ;',
'#FILTER ' + ','.join(filters) + ' ;',
'#BINNING ' + ','.join(len(filters) * ['1']) + ' ;',
'#COUNT ' + ','.join([str(c) for c in counts]) + ' ;',
'#INTERVAL ' +
','.join([str(round(e, 1)).split('.0')[0]
for e in exp_times]) +
' ; ' + duration_comment,
';---- from IMAGE directive -----',
target_name + '\t' + ra + '\t' + dec]
if directive.spec['force_autoguide'] is True:
this_acp_entry.insert(1, '#AUTOGUIDE ; Forced')
duration_dict = {'target_overhead': target_overhead,
'repeat_count': 1,
'counts': counts,
'exp_times': exp_times}
this_event = Event('image', this_summary_text, this_acp_entry,
event_duration, duration_dict,
ra=ra, dec=dec)
this_event.target_name = target_name
plan.events.append(this_event)
elif directive.type == 'color':
target_name = directive.spec['target_name']
entries = directive.spec['entries']
ra = directive.spec['ra']
dec = directive.spec['dec']
filters, counts, exp_times, target_overhead, repeat_duration = \
make_color_exposure_data(entries, force_autoguide=True)
event_duration = target_overhead + 1 * repeat_duration
this_summary_text = 'Color ' + target_name + \
' ' + directive.spec['multiplier_string'] + \
'x ' + '{0:.1f}'.format(event_duration / 60.0) + ' min.' + \
' ' + ra + ' ' + dec
if directive.spec['force_autoguide'] is True:
this_summary_text += ' AG+'
if directive.spec['comment'] != NO_COMMENT_STRING:
this_summary_text += ' ; ' + directive.spec['comment']
duration_comment = ' --> ' + str(round(event_duration / 60.0, 1)) + ' min'
this_acp_entry = [';', '#DITHER 0 ;',
'#FILTER ' + ','.join(filters) + ' ;',
'#BINNING ' + ','.join(len(filters) * ['1']) + ' ;',
'#COUNT ' + ','.join([str(c) for c in counts]) + ' ;',
'#INTERVAL ' +
','.join([str(round(e, 1)).split('.0')[0]
for e in exp_times]) +
' ; ' + duration_comment,
';---- from COLOR directive -----',
target_name + '\t' + ra + '\t' + dec]
if directive.spec['force_autoguide'] is True:
this_acp_entry.insert(1, '#AUTOGUIDE ; Forced')
duration_dict = {'target_overhead': target_overhead,
'repeat_count': 1,
'counts': counts,
'exp_times': exp_times}
this_event = Event('color', this_summary_text, this_acp_entry,
event_duration, duration_dict,
ra=ra, dec=dec)
this_event.target_name = target_name
plan.events.append(this_event)
elif directive.type == 'autofocus':
this_summary_text = 'AUTOFOCUS'
this_acp_entry = [';', '#AUTOFOCUS']
event_duration = AUTOFOCUS_DURATION
this_event = Event('autofocus', this_summary_text, this_acp_entry, event_duration)
plan.events.append(this_event)
elif directive.type == 'comment':
comment_text = directive.spec['text']
this_summary_text = ';' + comment_text
this_acp_entry = [';' + comment_text]
event_duration = 0
this_event = Event('comment', this_summary_text, this_acp_entry, event_duration)
plan.events.append(this_event)
elif directive.type == 'skipfilter':
new_skipfilter_list = directive.spec['filters']
if len(new_skipfilter_list) == 0:
skipfilter_list_text = 'none'
else:
skipfilter_list_text = ' '.join(new_skipfilter_list)
this_summary_text = 'SKIPFILTER ' + skipfilter_list_text
this_acp_entry = [';', '; (skipfilter: ' + skipfilter_list_text + ')']
skipfilter_list = new_skipfilter_list # changing this state variable
event_duration = 0
this_event = Event('skipfilter', this_summary_text, this_acp_entry, event_duration)
plan.events.append(this_event)
elif directive.type == 'domeopen':
this_summary_text = 'DOMEOPEN'
this_acp_entry = [';', '#DOMEOPEN']
event_duration = DOME_OPEN_TIME
this_event = Event('domeopen', this_summary_text, this_acp_entry, event_duration)
plan.events.append(this_event)
elif directive.type == 'domeclose':
this_summary_text = 'DOMECLOSE'
this_acp_entry = [';', '#DOMECLOSE']
event_duration = DOME_CLOSE_TIME
this_event = Event('domeclose', this_summary_text, this_acp_entry, event_duration)
plan.events.append(this_event)
elif directive.type == 'shutdown':
this_summary_text = 'SHUTDOWN'
this_acp_entry = [';', '#SHUTDOWN']
event_duration = SHUTDOWN_DURATION
this_event = Event('shutdown', this_summary_text, this_acp_entry, event_duration)
plan.events.append(this_event)
elif directive.type == 'quitat':
plan.utc_quitat = an.datetime_utc_from_hhmm(directive.spec['utc'])
elif directive.type == 'afinterval':
plan.afinterval = float(directive.spec['minutes'])
elif directive.type == 'sets':
plan.sets_requested = int(directive.spec['count'])
elif directive.type == 'chain':
plan.chain_destination = directive.spec['filename']
else:
print(">>>>> ERROR: in plan", plan.plan_id,
', directive', directive.type, 'not understood.')
def make_timeline(plan_list, an, earliest_hhmm):
# TODO: SHUTDOWN needs repair, to make it function & stop (1) in mid-plan, (2) even with SETS.
# For now, put #SHUTDOWN in its own (last) plan.
# Initialize times & intervals to state before first plan:
utc_running = None
if earliest_hhmm is not None:
utc_running = an.datetime_utc_from_hhmm(earliest_hhmm)
else:
utc_running = an.datetime_utc_from_hhmm('0000') + timedelta(hours=AN_START_REL_UTC_0000)
if utc_running > an.ts_dark.start:
utc_running -= timedelta(hours=24)
utc_most_recent_autofocus = utc_running - timedelta(days=1000) # keep python happy with a prev value.
shutdown_performed = False
current_chill_setpoint = None
for plan in plan_list:
plan.utc_start = utc_running
no_plan_exposures_yet_encountered = True
for i_set in range(1, plan.sets_requested + 1): # i_set = 1 to sets_requested, inclusive.
# skipfilter_list = [] # reset at beginning of set execution.
for event in plan.events:
# First, do autofocus if AFINTERVAL since latest autofocus has passed or at plan startup:
# TODO: rewrite (move?) this, so that long Stare & Image events can have > 1 autofocus.
if plan.afinterval is not None:
minutes_since_last_autofocus = \
(utc_running - utc_most_recent_autofocus).total_seconds() / 60.0
if event.type in ['burn', 'stare', 'fov', 'image', 'color']:
if minutes_since_last_autofocus > plan.afinterval or \
no_plan_exposures_yet_encountered:
# Perform AFINTERVAL autofocus:
utc_running += timedelta(seconds=AUTOFOCUS_DURATION)
utc_most_recent_autofocus = utc_running
plan.afinterval_autofocus_count += 1
if plan.sets_requested == 1:
event.summary_text += ' (af)'
if plan.quitat_reached_at(utc_running):
break # if quitat time reached during afinterval autofocus, do not run event.
utc_start_event = utc_running
# Store event's actual end time (incl quitat if active):
if event.type == 'waituntil':
# WAITUNTIL only works in first set (set 1):
if i_set == 1:
if plan.utc_quitat is not None:
utc_end_event = min(event.utc_end, plan.utc_quitat) # not later than QUITAT.
else:
utc_end_event = event.utc_end
# But definitely not before utc_running (time goes not backward):
utc_end_event_actual = max(utc_end_event, utc_running)
elif event.type in ['comment', 'skipfilter']:
utc_end_event_actual = utc_start_event # zero duration
elif event.type in ['shutdown', 'autofocus', 'domeopen', 'domeclose']:
utc_end_event_actual = utc_start_event + timedelta(seconds=event.duration_total)
elif event.type == 'chill':
if event.setpoint != current_chill_setpoint:
utc_end_event_actual = utc_start_event + timedelta(seconds=event.duration_total)
current_chill_setpoint = event.setpoint
elif event.type in ['burn', 'stare', 'fov', 'image', 'color']:
actual_duration, event_autofocus_count, utc_most_recent_autofocus = \
event.calc_actual_duration(utc_start_event, plan.utc_quitat,
plan.afinterval, utc_most_recent_autofocus)
if event_autofocus_count >= 1:
event.summary_text += ' (' + str(event_autofocus_count) + ' af)'
plan.afinterval_autofocus_count += event_autofocus_count
utc_end_event_actual = utc_start_event + timedelta(seconds=actual_duration)
no_plan_exposures_yet_encountered = False
else:
print('make_timeline() doesn\'t recognize event type"" ' + event.type)
# Store event's summary display time (hhmm on summary line, usually for set 1):
if i_set == 1:
event.utc_summary_display = utc_start_event
# Update event's minimum altitude (all sets, target event types only):
if event.type in ['burn', 'stare', 'fov', 'image', 'color']:
this_lower_alt = event.calc_lower_altitude(an, utc_start_event, utc_end_event_actual)
if event.min_altitude is None:
event.min_altitude = this_lower_alt
else:
event.min_altitude = min(event.min_altitude, this_lower_alt)
# Store event's status:
if event.type == 'chain':
event.status = 'CHAIN'
elif plan.quitat_reached_at(utc_running):
event.status = 'QUITAT'
elif utc_start_event < an.ts_dark.start or utc_end_event_actual > an.ts_dark.end:
event.status = 'LIGHT'
elif event.type in ['burn', 'stare', 'fov', 'image', 'color', 'autofocus', 'chill']:
event.status = str(i_set) # default
elif event.type in ['shutdown', 'waituntil', 'domeopen', 'domeclose']:
event.status = 'ok'
else:
event.status = ''
# Finally, update master clock at end of this event:
utc_running = utc_end_event_actual
# For SHUTDOWN, signal end of entire run:
if event.type == 'shutdown':
shutdown_performed = True
break # out of event loop (to next set).
# Stop events if shutdown run or quitat reached:
if plan.quitat_reached_at(utc_running) or shutdown_performed:
break # out of event loop (to next set, which will also stop)
# Quit set if shutdown run or quitat reached:
if plan.quitat_reached_at(utc_running) or shutdown_performed:
break # out of set loop (to next plan)
plan.sets_completed = i_set
# Finish any end-of-plan business (incl saving statistics):
plan.utc_end = utc_running
# Quit plan if shutdown run or quitat reached:
if shutdown_performed:
break # out of plan loop to end of timeline.
# Finish end-of-night business (or could go outside this function, instead):
pass
def make_acp_plan_files(plan_list, an, output_directory, exp_time_factor):
# First, delete old ACP plan files:
filenames = os.listdir(output_directory)
for filename in filenames:
if filename.startswith("plan_") and filename.endswith(".txt"):
fullpath = os.path.join(output_directory, filename)
os.remove(fullpath)
# Then, make an ACP-format plan file for each plan:
for plan in plan_list:
if plan.plan_comment is None:
plan_comment = ''
else:
plan_comment = '; ' + plan.plan_comment
# noinspection PyListCreation
plan_acp_lines = ['; ACP PLAN ' + plan.plan_id + plan_comment,
'; as generated by photrix at ' +
'{:%Y-%m-%d %H:%M UTC}'.format(datetime.now(timezone.utc)),
'; using exposure time factor = ' + '{:5.3f}'.format(exp_time_factor)]
plan_acp_lines.append(an.acp_header_string())
# Add SETS ACP directive if one exists:
if plan.sets_requested > 1:
plan_acp_lines.extend([';', '#SETS ' + str(int(plan.sets_requested))])
# Add QUITAT ACP directive if one exists:
if plan.utc_quitat is not None:
formatted_time = '{:%m/%d/%Y %H:%M}'.format(plan.utc_quitat)
plan_acp_lines.extend([';', '#QUITAT ' + formatted_time + ' ; utc'])
# Add AFINTERVAL ACP directive if one exists:
if plan.afinterval is not None:
if plan.afinterval > 0:
plan_acp_lines.append('#AFINTERVAL ' + '{0:g}'.format(plan.afinterval))
if plan.utc_quitat is not None or plan.afinterval is not None:
plan_acp_lines.append(';')
# Add event lines:
for event in plan.events:
plan_acp_lines.extend(event.acp_lines)
# Add CHAIN ACP directive if one exists:
if plan.chain_destination is not None:
plan_acp_lines.extend([';', '#CHAIN ' + plan.chain_destination])
# Write this ACP plan file:
filename = 'plan_' + plan.plan_id + '.txt'
output_fullpath = os.path.join(output_directory, filename)
print('PRINT plan ' + plan.plan_id)
with open(output_fullpath, 'w') as this_file:
this_file.write('\n'.join(plan_acp_lines))
def make_summary_file(plan_list, fov_dict, an, output_directory, exp_time_factor):
# First, delete old summary files:
filenames = os.listdir(output_directory)
for filename in filenames:
if filename.startswith("Summary_") and filename.endswith(".txt"):
fullpath = os.path.join(output_directory, filename)
os.remove(fullpath)
# Unpack summary_lines:
an_year = int(an.an_date_string[0:4])
an_month = int(an.an_date_string[4:6])
an_day = int(an.an_date_string[6:8])
day_of_week = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']\
[datetime(an_year, an_month, an_day).weekday()]
header_lines = ['SUMMARY for AN' + an.an_date_string + ' ' + day_of_week.upper() +
' (site = ' + an.site.name + ')',
' as generated by photrix at ' +
'{:%Y-%m-%d %H:%M UTC}'.format(datetime.now(timezone.utc)),
' using exposure time factor = ' + '{:5.3f}'.format(exp_time_factor) +
' min.alt = ' + '{:.1f}'.format(an.site.min_altitude) + u'\N{DEGREE SIGN}',
an.acp_header_string(), '\n']
moon_is_a_factor = an.moon_phase > MOON_PHASE_NO_FACTOR # for this astronight
radec_dict = dict() # collector to find and warn against Image events of same target w/ diff RA,Dec.
# Local function:
def make_summary_line(status_text, hhmm_text, utc_day_indicator, min_altitude, summary_text):
if status_text is None:
status_text = ''
if hhmm_text is None:
hhmm_text = 4 * ' '
if utc_day_indicator is None:
utc_day_indicator = ' '
if min_altitude is not None:
altitude_text = str(int(round(min_altitude)))
else:
altitude_text = ' '
if status_text == '1':
status_text = 'ok'
return ' '.join([status_text.rjust(8), hhmm_text + utc_day_indicator,
altitude_text, summary_text])
# END local function.
# Construct summary_lines for every event:
for i_plan, plan in enumerate(plan_list):
# Add lines to top of plan summary:
plan.summary_pre_lines.append(
make_summary_line(None, None, None, None, 60 * '-'))
hhmm_start = hhmm_from_datetime_utc(plan.utc_start)
hhmm_end = hhmm_from_datetime_utc(plan.utc_end)
if i_plan == 0:
display_start = 'dusk to '
else:
display_start = hhmm_start + '-'
plan.summary_pre_lines.append(
make_summary_line(None, None, None, None,
'Begin Plan ' + plan.plan_id + ' :: ' + display_start + hhmm_end + ' utc'))
if plan.plan_comment is not None:
if len(plan.plan_comment.strip()) > 0:
plan.summary_pre_lines.append(
make_summary_line(None, None, None, None, plan.plan_comment))
if i_plan > 0:
plan.summary_pre_lines.append(
make_summary_line(None, hhmm_start, None, None, 'Plan entered.'))
if plan.sets_requested > 1:
plan.summary_pre_lines.append(
make_summary_line(None, None, None, None,
'SETS ' + '{0:g}'.format(plan.sets_requested)))
if plan.utc_quitat is not None:
plan.summary_pre_lines.append(
make_summary_line(None, None, None, None,
'QUITAT ' + hhmm_from_datetime_utc(plan.utc_quitat) + ' utc'))
if plan.afinterval is not None:
plan.summary_pre_lines.append(
make_summary_line(None, None, None, None,
'AFINTERVAL ' + '{0:g}'.format(plan.afinterval)))
# Add lines to end of plan summary:
if plan.chain_destination is not None:
plan.summary_post_lines.append(
make_summary_line('CHAIN', hhmm_end, ' ', None,
'Chain to \'' + plan.chain_destination + '\''))
if plan.afinterval is not None:
plan.summary_post_lines.append(
make_summary_line(None, None, None, None,
str(plan.afinterval_autofocus_count) + ' AFINTERVAL autofocuses done.'))
plan.summary_post_lines.append('\n')
for event in plan.events:
# Add warning line if time wasted by waiting to start this plan (prev plan ended early).
if event.type == 'waituntil':
if i_plan > 0:
gap_minutes = (event.utc_end - plan.utc_start).total_seconds() / 60.0
if gap_minutes > 1.0:
plan.summary_pre_lines.append(
' >>>>>>>>>> WARNING: WAITUNTIL gap = ' +
str(int(gap_minutes)) + ' minutes.')
# Construct main summary text line for this event, write into its Event object:
if event.type in ['waituntil', 'comment', 'skipfilter']:
hhmm_text, utc_day_indicator = ' ', ' '
else:
if event.utc_summary_display is not None:
hhmm_text = hhmm_from_datetime_utc(event.utc_summary_display)
if event.utc_summary_display < an.datetime_utc_from_hhmm('0000'):
utc_day_indicator = '-'
elif event.utc_summary_display > an.datetime_utc_from_hhmm('0000') + timedelta(days=1):
utc_day_indicator = '+'
else:
utc_day_indicator = ' '
else:
utc_day_indicator = ' '
if event.type in ['fov', 'stare', 'image', 'color', 'burn', 'autofocus', 'chill']:
if event.status is None:
event.status = 'SKIPPED'
hhmm_text = None
utc_day_indicator = None
event.min_altitude = None
summary_text_line = make_summary_line(event.status, hhmm_text, utc_day_indicator,
event.min_altitude, event.summary_text)
event.summary_lines = [summary_text_line]
# Add warning line if Image event whose RA,Dec differs from previous Image event of same target.
if event.type == 'image':
previous_list = radec_dict.get(event.target_name, None)
this_dict_value = (event.ra, event.dec)
if previous_list is None:
radec_dict[event.target_name] = [this_dict_value] # start list & skip warning.
else:
if any([v != this_dict_value for v in previous_list]):
event.summary_lines.append(
' >>>>>>>>>> WARNING: ' +
' Previous Image entry for ' + event.target_name + ' has different RA, Dec.')
radec_dict[event.target_name].append(this_dict_value) # add value to list.
# Add warning line if moon is too close to this object and moon is up:
if moon_is_a_factor:
if event.type in ['burn', 'image', 'color', 'fov', 'stare']:
moon_dist = an.moon_radec.degrees_from(RaDec(event.ra, event.dec)) # in degrees
if moon_dist < MIN_MOON_DEGREES_DEFAULT:
if event.utc_summary_display is not None:
if not (an.ts_dark_no_moon.start <= event.utc_summary_display
<= an.ts_dark_no_moon.end):
event.summary_lines.append(
' >>>>>>>>>> WARNING: ' + event.target_name +
' MOON DISTANCE = ' +
str(int(round(moon_dist))) +
u'\N{DEGREE SIGN}' + ', vs. min ' +
str(MIN_MOON_DEGREES_DEFAULT) +
u'\N{DEGREE SIGN}')
# Add warning line if fov target is estimated too faint in V:
if event.type == 'fov':
this_fov = fov_dict[event.target_name]
if this_fov.observing_style.lower() == 'lpv':
mags = this_fov.estimate_lpv_mags(an.local_middark_jd)
v_mag = mags.get('V', None)
if v_mag is not None:
if v_mag >= V_MAG_WARNING:
event.summary_lines.append(
' >>>>>>>>>> WARNING: above target estim. V Mag ~ ' +
'{:.2f}'.format(v_mag) +
' very faint (>=' + '{0:g}'.format(V_MAG_WARNING) + ').')
# Add warning line if autofocus and more than one sets requested (causing too many autofocuses):
if event.type == 'autofocus' and plan.sets_requested > 1:
event.summary_lines.append(
' >>>>>>>>>> WARNING: autofocus not recommended when sets > 1.')
if plan.chain_destination is not None:
# Add plan warning line if plan chains to itself:
if plan.chain_destination.lower() == 'plan_' + plan.plan_id.lower() + '.txt':
plan.end_warning_lines.append(
' >>>>>>>>>> ERROR: this plan attempts to chain to itself.')
# Add plan warning line if chained-to plan does not exist:
elif i_plan != len(plan_list) - 1:
if plan.chain_destination.lower() != \
('plan_' + plan_list[i_plan + 1].plan_id + '.txt').lower():
plan.end_warning_lines.append(
' >>>>>>>>>> ERROR: this plan attempts to chain,'
' but not to next plan.')
# Add plan warning if no autofocus (or afinterval) given:
if plan.afinterval is None and all([e.type != 'autofocus' for e in plan.events]):
if any([e.type in ['burn', 'image', 'fov', 'stare', 'color'] for e in plan.events]):
plan.end_warning_lines.append(
' >>>>>>>>>> WARNING: this plan has no autofocus or afinterval.')
# Add plan warning if autofocus and afinterval) both in same plan:
if plan.afinterval is not None and any([e.type == 'autofocus' for e in plan.events]):
if any([e.type in ['burn', 'image', 'fov', 'stare', 'color'] for e in plan.events]):
plan.end_warning_lines.append(
' >>>>>>>>>> WARNING: this plan has both autofocus and afinterval.')
# Construct file contents by appending all required text lines:
all_summary_lines = header_lines
for plan in plan_list:
all_summary_lines.extend(plan.summary_pre_lines)
for event in plan.events:
all_summary_lines.extend(event.summary_lines)
all_summary_lines.extend(plan.end_warning_lines)
all_summary_lines.extend(plan.summary_post_lines)
# Write Summary file:
output_fullpath = os.path.join(output_directory, 'Summary_' + an.an_date_string + '.txt')
print('PRINT summary to ', output_fullpath)
with open(output_fullpath, 'w') as this_file:
this_file.write('\n'.join(all_summary_lines))
def make_fov_exposure_data(fov_name, an, fov_dict=None, instrument=None, exp_time_factor=1,
skipfilter_list=[], force_autoguide=None):
"""
Calculates exposure data for ONE REPEAT of one given fov.
:param fov_name: [string]
:param an: [Astronight object]
:param fov_dict:
:param instrument: instrument data [Instrument object]
:param exp_time_factor:
:param skipfilter_list: list of filter names to omit from this FOV observation [list of strings].
:return: tuple: (filters [list of str], counts [list of int], exp_times [list of float],
target_overhead [float], repeat_duration [float])
"""
if fov_dict is not None:
# this_fov = fov_dict[fov_name]
this_fov = fov_dict.get(fov_name, None)
if this_fov is None:
print(' >>>>> ERROR: FOV file not found for \'' + fov_name + '\'')
return
else:
this_fov = Fov(fov_name)
if not isinstance(instrument, Instrument):
print(" >>>>> ERROR: make_fov_exposure_data() parm 'instrument' must be " +
"a valid Instrument object")
return None
if force_autoguide is None:
print(" >>>>> ERROR in make_fov_exposure_data(): force_autoguide is None but must be boolean.")
return None
obs_style = this_fov.observing_style
filters = []
counts = []
exp_times = []
mags = dict()
omit_list = [f.lower() for f in skipfilter_list]
for obs in this_fov.observing_list:
filter, mag, count = obs
if filter.lower().strip() not in omit_list:
filters.append(filter)
counts.append(count)
if obs_style.lower() in ['standard', 'monitor', 'stare']:
exp_time = calc_exp_time(mag, filter, instrument, this_fov.max_exposure,
exp_time_factor=exp_time_factor)
elif obs_style.lower() == 'lpv':
if len(mags) == 0:
mags = this_fov.estimate_lpv_mags(an.local_middark_jd) # dict (get on 1st obs only)
exp_time = calc_exp_time(mags[filter], filter, instrument, this_fov.max_exposure,
exp_time_factor=exp_time_factor)
else:
print('****** WARNING: fov \'' + fov_name +
'\' has unrecognized observing style \'' + obs_style + '\'.')
return None
exp_times.append(exp_time)
if obs_style.lower() != 'stare':
counts, exp_times = repeat_short_exp_times(counts, exp_times)
target_overhead, repeat_duration = tabulate_target_durations(filters, counts, exp_times,
force_autoguide=force_autoguide)
# return types (3 lists, two floats): [str], [int], [float], float, float
return filters, counts, exp_times, target_overhead, repeat_duration
def make_image_exposure_data(filter_entries, instrument, exp_time_factor=1, force_autoguide=None):
"""
Calculates exposure data for given user-defined target ("IMAGE" directive).
:param exp_time_factor: user-supplied multiplier of exp time from nominal, usually 0.5-1. [float]
:param filter_entries: list of exposure-defining strings, as ['I=12','V=13(3)'], where I and V
are filter names, 12 and 13 are target magnitudes, and (3) is an image count (=1 if absent).
:param instrument: the instrument for which these exposures are wanted. [Instrument object]
:param force_autoguide: True iff user wants to force autoguiding for this target. [boolean]
:return: tuple of equal-length lists: (filters [str], counts [int], exp_times [float])
"""
if force_autoguide is None:
print(" >>>>> ERROR in make_image_exposure_data(): force_autoguide is None but must be boolean.")
return None
filters = []
counts = []
exp_times = []
for entry in filter_entries:
this_filter, this_mag, this_count = None, None, None
raw_filter, mag_string = entry.split("=", maxsplit=1)
this_filter = raw_filter.strip()
bits = mag_string.split("(")
if len(bits) == 1: # case e.g. "V=13.2"
this_count = 1
elif len(bits) == 2: # case e.g. "V=13.2(1)"
try:
this_count = int(bits[1].replace(")", ""))
except ValueError:
# print(' >>>> PARSING ERROR:', entry)
raise ValueError(' >>>> PARSING ERROR (probably the number of repeats): ' + entry)
# TODO: I'm not crazy about the next if-statement's condition.
if 's' in bits[0].lower():
this_exp_time = float(bits[0].lower().split('s')[0])
else:
this_mag = float(bits[0])
this_exp_time = calc_exp_time(this_mag, this_filter, instrument, max_exp_time=None,
exp_time_factor=exp_time_factor)
filters.append(this_filter)
counts.append(this_count)
exp_times.append(this_exp_time)
counts, exp_times = repeat_short_exp_times(counts, exp_times)
target_overhead, repeat_duration = tabulate_target_durations(filters, counts, exp_times,
force_autoguide=force_autoguide)
# return types (3 lists, two floats): [str], [int], [float], float, float
return filters, counts, exp_times, target_overhead, repeat_duration
def make_color_exposure_data(entries, force_autoguide=True):
filters = [e[0] for e in entries]
exp_times = [e[1] for e in entries]
counts = [e[2] for e in entries]
target_overhead, repeat_duration = tabulate_target_durations(filters, counts, exp_times,
force_autoguide=force_autoguide)
return filters, counts, exp_times, target_overhead, repeat_duration
def tabulate_target_durations(filters, counts, exp_times, force_autoguide):
aggregate_exposure = sum([counts[i] * exp_times[i] for i in range(len(counts))])
guiding_is_active = force_autoguide or aggregate_exposure > MAX_AGGREGATE_EXPOSURE_NO_GUIDING
# TODO: get some of the next base values from instrument object.
target_overhead = NEW_TARGET_DURATION + (GUIDE_STAR_ACQUISITION if guiding_is_active else 0)
repeat_duration = aggregate_exposure + \
len(filters) * NEW_FILTER_DURATION + \
sum(counts) * NEW_EXPOSURE_DURATION_EX_GUIDER_CHECK + \
(sum(counts) * GUIDER_CHECK_DURATION if guiding_is_active else 0)
return target_overhead, repeat_duration
def repeat_short_exp_times(counts, exp_times):
for i in range(len(counts)):
if counts[i] * exp_times[i] < MIN_TOTAL_EXP_TIME_PER_FILTER:
counts[i] = ceil(MIN_TOTAL_EXP_TIME_PER_FILTER / exp_times[i])
return counts, exp_times
def calc_exp_time(mag, filter, instrument, max_exp_time, exp_time_factor=1):
# Raw exposure time from mag + properties of instrument (camera & filters).
exp_time_from_mag = instrument.filter_data[filter]['reference_exposure_mag10'] * \
10.0 ** ((mag - 10.0) / 2.5)
# Apply exposure time factor (from user, for this night) (before asymptotes and limits):
exp_time = exp_time_factor * exp_time_from_mag
# Apply absolute maximum as soft asymptote:
exp_time = sqrt(1.0 / (1.0 / exp_time ** 2 + 1.0 / ABSOLUTE_MAX_EXPOSURE_TIME ** 2))
# Apply absolute minimum as soft asymptote:
# as of 20170406, absolute minimum is from this module, not necessarily from instrument object.
# i.e., use more stringent of the two minima.
effective_minimum = max(ABSOLUTE_MIN_EXPOSURE_TIME, instrument.camera['shortest_exposure'])
exp_time = sqrt(exp_time ** 2 + effective_minimum ** 2)
# Apply rounding (at least 2 significant digits):
if exp_time >= 10.0:
exp_time = round(exp_time, 0) # round to nearest second
else:
exp_time = round(exp_time, 1) # round to nearest 0.1 second
# Apply fov's hard maximum:
if max_exp_time is not None:
exp_time = min(max_exp_time, exp_time)
return exp_time
def extract_ra_dec(value_string):
""" Split value string into subvalue, ra, dec, whether ra and dec are in standard hex format
(e.g., 12:34:56 -11:33:42) or in TheSkyX format (e.g., 06h 49m 40.531s +63° 00' 06.920").
:param value_string: input string from parse_excel(), as above.
:return: 3-tuple: subvalue (= everything but RA and Dec), ra, dec. [3-tuple of strings].
"""
split_string = tuple(value_string.rsplit(maxsplit=2))
if split_string[1].endswith('\'') and split_string[2].endswith('\"'):
# RA and Dec are in TheSkyX format, e.g., 06h 49m 40.531s +63° 00' 06.920":
split_string = tuple(value_string.rsplit(maxsplit=6))
if len(split_string) != 7:
raise SyntaxError('Cannot parse apparent TheSkyX-format RA-Dec string: ', value_string)
subvalue = split_string[0]
ra_items = [s.replace('h', '').replace('m', '').replace('s', '').strip()
for s in split_string[1:4]]
ra = ':'.join(ra_items)
dec_items = [s.replace('°', '').replace('\'', '').replace('"', '').strip()
for s in split_string[4:7]]
dec = ':'.join(dec_items)
else:
# RA and Dec are given directly in std hex:
if len(split_string) != 3:
raise SyntaxError('Cannot parse apparent hex-format RA-Dec string ' + split_string)
subvalue, ra, dec = split_string
return subvalue, ra, dec
__________PLANEWAVE_MOUNT_POINT_LIST______________________________________________ = 0
def make_planewave_point_list(n_pts, summer_winter, latitude=32.9, longitude=-105.5,
fraction_extra_near_ecliptic=0.25):
"""Make mount model point list for Planewave L-series mount, points being in rational order.
Minimizes dome movement. Begins near zenith, then makes one circuit North, East, South, West, North.
Each line is one point in degrees: az, alt.
May well return 10-12 points more than requested in n_pts.
:param n_pts: [int]
:param summer_winter: 'summer' or 'winter', to locate ecliptic. [string]
:param latitude: scope latitude in degrees. [float]
:param longitude: scope longitude in degrees, negative West. [float]
:param fraction_extra_near_ecliptic (limited to [0 to 0.5]). [float]
:return: None. Prints list of (az, alt) tuples, in degrees
"""
min_pts = 70
if n_pts < min_pts:
raise ValueError('At least ' + str(min_pts) + ' points needed for proper mount model.')
if summer_winter.lower() not in ['winter', 'summer']:
raise ValueError('Parameter summer_winter must be \'summer\' or \'winter\'.')
seed(2022)
# Block_a: points around zenith, for stable start in case az polar alignment is off.
# Block_b1: points ringing 30-35 degrees alt.
# Block_b2: points between 35 and 70 degrees.
# Block_c: extra points around ecliptic (nb: winter vs summer cases).
n_pts_block_a = 10
n_pts_block_c = round(max(0.0, min(0.5, fraction_extra_near_ecliptic)) * n_pts)
n_pts_blocks_b = n_pts - n_pts_block_a - n_pts_block_c
n_pts_block_b1 = round(0.4 * n_pts_blocks_b)
n_pts_block_b2 = n_pts - n_pts_block_a - n_pts_block_b1 - n_pts_block_c
print('\nblock points =', str(n_pts_block_a),
'(' + str(n_pts_block_b1) + ', ' + str(n_pts_block_b2) + ')',
str(n_pts_block_c))
# Make block a (around zenith):
az_spacing = 360.0 / n_pts_block_a
block_az = [(az_spacing * (i + 0.5) + uniform(-5, 5)) % 360.0 for i in range(n_pts_block_a)]
block_alt = [75.0 + uniform(-3, 3) for i in range(n_pts_block_a)]
block_a_points = list(zip(block_az, block_alt))
block_a_points.sort(key=lambda x: x[0]) # sort by increasing azimuth (N,E,S,W).
# Make block b1 (30-35 alt):
az_spacing_b1 = 360.0 / n_pts_block_b1
az_b1 = [(az_spacing_b1 * (i + 0.5) + uniform(-5, 5)) % 360.0 for i in range(n_pts_block_b1)]
alt_b1 = [30.0 + uniform(0, 5) for i in range(n_pts_block_b1)]
block_b1_points = [(az, alt) for (az, alt) in zip(az_b1, alt_b1)]
# Make block b2 (35-70 alt):
az_spacing_b2 = 360.0 / n_pts_block_b2
az_b2 = [(az_spacing_b2 * (i + 0.5) + uniform(-5, 5)) % 360.0 for i in range(n_pts_block_b2)]
alt_b2 = [35.0 + uniform(0, 35.0) for i in range(n_pts_block_b2)]
block_b2_points = [(az, alt) for (az, alt) in zip(az_b2, alt_b2)]
# Block C: band around ecliptic:
if summer_winter.lower() == 'summer':
dec_center, dec_width = -10, 20 # center declination of ecliptic, total width of band to sample.
else:
dec_center, dec_width = 20, 30
location = EarthLocation(lon=longitude * u.degree, lat=latitude * u.degree)
time = Time.now() # actual time is irrelevant so long as it's constant, as we search all RA values.
block_c_points = []
ra_deg = 0.0
while len(block_c_points) < n_pts_block_c:
# Space points in RA more or less evenly:
ra_deg = (ra_deg + uniform(12.0, 18.0)) % 360.0
dec_deg = dec_center + uniform(-dec_width / 2.0, dec_width / 2.0)
sc = SkyCoord(ra=ra_deg * u.degree, dec=dec_deg * u.degree, frame='icrs')
altaz = sc.transform_to(AltAz(obstime=time, location=location))
az, alt = altaz.az.degree % 360.0, altaz.alt.degree
if alt >= 30.0:
block_c_points.append(tuple([az, alt]))
# Remove block b and c points too low around celestial pole:
# TODO: move this process to astropy2022.
max_abs_hourangle_degrees = 6 * 15 # i.e., RA hours either side of meridian.
min_abs_cos_hourangle = cos(max_abs_hourangle_degrees / DEGREES_PER_RADIAN)
block_bc_points = block_b1_points + block_b2_points + block_c_points
# Formulae (1) and (2) from http://star-www.st-and.ac.uk/~fv/webnotes/chapter7.htm:
# phi=latitude, delta=declination, H=hourangle, A=target azimuth, a=target altitude.
cos_phi, sin_phi = cos(latitude / DEGREES_PER_RADIAN), sin(latitude / DEGREES_PER_RADIAN)
block_bc_points_to_keep = []
for (az, alt) in block_bc_points:
cos_a, sin_a = cos(alt / DEGREES_PER_RADIAN), sin(alt / DEGREES_PER_RADIAN)
cos_A = cos(az / DEGREES_PER_RADIAN)
# (1) sin(δ) = sin(a) sin(φ) + cos(a) cos(φ) cos(A)
sin_delta = (sin_a * sin_phi) + (cos_a * cos_phi * cos_A)
cos_delta = sqrt(1.0 - sin_delta ** 2) # happily, cosine of declination is always non-negative.
# (2) cos(H) = { sin(a) - sin(δ) sin(φ) } / { cos(δ) cos(φ) }
cos_H = (sin_a - sin_delta * sin_phi) / (cos_delta * cos_phi)
if cos_H > min_abs_cos_hourangle:
block_bc_points_to_keep.append((az, alt))
print('point', '{0:.2f}, {1:.2f}'.format(az, alt), 'kept,'
' cos(hourangle) =', '{0:.2f}'.format(cos_H))
else:
print('point', '{0:.2f}, {1:.2f}'.format(az, alt), 'removed for hourangle.')
block_bc_points = block_bc_points_to_keep
# Sample block b and c points into two equal groups, sort each to make CW then CCW az circuits:
seed(2022)
shuffle(block_bc_points) # in-place.
n_1 = floor(len(block_bc_points) / 2.0)
block_bc_1, block_bc_2 = block_bc_points[:n_1], block_bc_points[n_1:]
block_bc_1.sort(key=lambda x: +x[0]) # sort by increasing azimuth (N,E,S,W).
block_bc_2.sort(key=lambda x: -x[0]) # sort by decreasing azimuth (N,W,S,E).
# Construct all_points:
all_points = block_a_points + block_bc_1 + block_bc_2
# Last: add extra points whenever az slews are too long and camera might not keep up.
max_az_change_for_sync = 20 # degrees; the maximum dome slew for which dome can keep up.
az_change_per_extra_point = 36 # degrees; the dome slew expected for a wasted mount slew/image cycle.
alt_dither_per_extra_point = 5 # degrees
new_all_points = [all_points[0]]
for i in range(len(all_points) - 1):
d_az_raw = abs(all_points[i+1][0] - all_points[i][0])
d_az = min(d_az_raw, 360.0 - d_az_raw)
n_extra_points = ceil((d_az - max_az_change_for_sync) / az_change_per_extra_point)
extra_points = []
for i_pt in range(n_extra_points):
fraction = 0.5 + 0.5 * (float(i_pt) / float(n_extra_points))
az_extra = Angle(circmean(data=np.array([all_points[i][0], all_points[i + 1][0]]) * u.deg,
weights = np.array([1.0 - fraction, fraction]))).\
wrap_at(360 * u.deg).degree
# az_extra = (1.0 - fraction) * all_points[i][0] + fraction * all_points[i + 1][0]
alt_extra = (1.0 - fraction) * all_points[i][1] + fraction * all_points[i + 1][1]
extra_point = (az_extra, alt_extra)
print('\n adding extra point', '{0:.2f}, {1:.2f}'.format(extra_point[0], extra_point[1]))
print(' between', '{0:.2f}, {1:.2f}'.format(all_points[i][0], all_points[i][1]),
'and', '{0:.2f}, {1:.2f}'.format(all_points[i + 1][0], all_points[i + 1][1]))
extra_points.append(extra_point)
new_all_points.extend(extra_points)
new_all_points.append(all_points[i+1])
# Write points to file:
point_lines = ['{0:.2f}, {1:.2f}'.format(az, alt) for az, alt in new_all_points]
fullpath = os.path.join(PHOTRIX_ROOT_DIRECTORY, 'point_list_' + summer_winter.lower() + '.txt')
print(fullpath)
print(str(len(new_all_points)), 'points.')
if len(new_all_points) < 4 * n_pts:
with open(fullpath, 'w') as this_file:
this_file.write('\n'.join(point_lines))
def test_make_pw_point_list():
n_pts = 70
summer_winter = 'summer'
make_planewave_point_list(n_pts, summer_winter, latitude=32.9, longitude=-105.5,
fraction_extra_near_ecliptic=0.25)
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,638
|
edose/photrix
|
refs/heads/master
|
/test/test_fov.py
|
import os
import pytest
from photrix import fov
from photrix import util
__author__ = "Eric Dose :: Bois d'Arc Observatory, Kansas"
PHOTRIX_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEST_FOV_DIRECTORY = os.path.join(PHOTRIX_ROOT_DIRECTORY, "test", "$fovs_for_test")
CURRENT_SCHEMA_VERSION = "1.5" # For Schema 1.5 of April 2017.
def test_fov_stare_and_star_list():
"""
This is for observing style = "Stare".
This test function includes all general-case tests
(which are not repeated in test functions for other observing styles).
"""
fovname1 = "ST Tri"
fov1 = fov.Fov(fovname1, fov_directory=TEST_FOV_DIRECTORY)
# Test fields.
assert fov1.fov_name == fovname1
assert fov1.format_version == CURRENT_SCHEMA_VERSION
assert fov1.ra == 2*15 + 42*15/60
assert fov1.dec == 35 + 43/60 + 31/3600
assert fov1.chart == "X15646NP"
assert fov1.fov_date == "12/21/2015"
assert fov1.main_target == "ST Tri"
assert fov1.target_type == "Eclipser"
assert fov1.period == 0.47905145
assert fov1.motive == 'PET. Adopted star. Get 2\'min.'
assert fov1.acp_comments == "Eclipser EB V=14-14.77 NO R COMPS"
assert fov1.JD_bright == 2457360.57
assert fov1.JD_faint == 2457360.69
assert fov1.JD_second == 2457360.45
assert fov1.mag_V_bright == 14
assert fov1.mag_V_faint == 14.77
assert fov1.mag_V_second == 14
assert fov1.color_VI_bright == 0.5
assert fov1.color_VI_faint == 0.55
assert fov1.color_VI_second == 0.5
assert fov1.observing_style == "Stare"
assert len(fov1.observing_list) == 2
assert fov1.observing_list == [("I", 12, 1), ("V", 13, 6)]
assert fov1.alert is None
assert fov1.max_exposure == 240
assert fov1.priority == 8
assert fov1.gap_score_days == [60, 90, 150]
assert fov1.punches == [("143", -7.04, 11.94)]
assert str(fov1) == "FOV 'ST Tri' with 11 sequence stars."
assert len(fov1.aavso_stars) == 11
assert [star.star_id for star in fov1.aavso_stars] == \
['156', '127', '137', '147', '101', '143', '131', '139', '151', 'ST Tri', 'V0680 Per']
assert all([star.is_valid for star in fov1.aavso_stars])
s = fov1.aavso_stars[0]
assert (s.star_id, s.ra, s.dec, s.star_type) == ("156", 40.60386, 35.67683, "comp")
m = s.mags
assert len(m) == 2 # since filters with (mag,error) = (0,0) are missing & not included in obj.
assert m['B'] == (16.310, 0)
assert m['V'] == (15.608, 0)
s = fov1.aavso_stars[2]
assert (s.star_id, s.ra, s.dec, s.star_type) == ("137", 40.55655, 35.71275, "check")
m = s.mags
assert len(m) == 6
assert m['B'] == (14.255, 0.021)
assert m['K'] == (12.164, 0.018)
assert m['I'] == (13.113, 0.055)
assert m['H'] == (12.295, 0.025)
assert m['J'] == (12.527, 0.019)
assert m['V'] == (13.676, 0.014)
s = fov1.aavso_stars[9]
assert (s.star_id, s.ra, s.dec, s.star_type) == ("ST Tri", 40.38631, 35.72553, "target")
assert len(s.mags) == 0
s = fov1.aavso_stars[10]
assert (s.star_id, s.ra, s.dec, s.star_type) == ("V0680 Per", 40.42055, 35.71548, "target")
assert len(s.mags) == 0
# Test calc_gap_score().
assert fov1.calc_gap_score(0) == 0
assert fov1.calc_gap_score(60) == 0
assert fov1.calc_gap_score(75) == 0.5
assert fov1.calc_gap_score(90) == 1
assert fov1.calc_gap_score(120) == 1.5
assert fov1.calc_gap_score(150) == 2
assert fov1.calc_gap_score(365) == 2
assert fov1.calc_gap_score(-1) == 0
# Test calc_priority_score().
ps = fov1.priority
assert fov1.calc_priority_score(0) == 0
assert fov1.calc_priority_score(60) == 0
assert fov1.calc_priority_score(75) == 0.5 * ps
assert fov1.calc_priority_score(90) == 1 * ps
assert fov1.calc_priority_score(120) == 1.5 * ps
assert fov1.calc_priority_score(150) == 2 * ps
assert fov1.calc_priority_score(365) == 2 * ps
assert fov1.calc_priority_score(-1) == 0
def test_fov_lpv():
""" NB: For tests on #STARS section, see test_fov_stare_and_star_list() above."""
fovname2 = "AU Aur"
fov2 = fov.Fov(fovname2, fov_directory=TEST_FOV_DIRECTORY)
# Test fields.
assert fov2.fov_name == fovname2
assert fov2.format_version == CURRENT_SCHEMA_VERSION
assert fov2.main_target == "AU Aur"
assert fov2.target_type == "Mira"
assert fov2.period == 400
assert fov2.motive == ''
assert fov2.acp_comments == "Mira CARBON C6-7(N) V~10-12.5 NO R COMPS"
assert (fov2.JD_bright, fov2.JD_faint, fov2.JD_second) == (2456520, 2456720, None)
assert (fov2.mag_V_bright, fov2.mag_V_faint, fov2.mag_V_second) == (10, 12.5, None)
assert (fov2.color_VI_bright, fov2.color_VI_faint, fov2.color_VI_second) == (3.3, 4.8, None)
assert fov2.observing_style == "LPV"
assert len(fov2.observing_list) == 2
assert fov2.observing_list[0] == ("V", None, 1)
assert fov2.observing_list[1] == ("I", None, 1)
assert fov2.alert is None
assert fov2.priority == 8
assert fov2.gap_score_days == [4, 8, 20]
assert fov2.punches == [("AU Aur", 9.33, 2.71)]
assert str(fov2) == "FOV 'AU Aur' with 11 sequence stars."
assert len(fov2.aavso_stars) == 11
assert all([star.is_valid for star in fov2.aavso_stars])
assert [star.star_id for star in fov2.aavso_stars] == \
['117', 'AU Aur', '141', '154', '132', '146', '124', '118', '155', '107', '112']
# Test .estimate_lpv_mags().
jd = fov2.JD_bright
mags_bright = fov2.estimate_lpv_mags(jd)
assert mags_bright['V'] == fov2.mag_V_bright
assert mags_bright['R'] == fov2.mag_V_bright - 0.5*fov2.color_VI_bright
assert mags_bright['I'] == fov2.mag_V_bright - fov2.color_VI_bright
jd = fov2.JD_faint
mags_faint = fov2.estimate_lpv_mags(jd)
assert mags_faint['V'] == fov2.mag_V_faint
assert mags_faint['R'] == fov2.mag_V_faint - 0.5*fov2.color_VI_faint
assert mags_faint['I'] == fov2.mag_V_faint - fov2.color_VI_faint
jd = fov2.JD_bright + 0.5 * (fov2.JD_faint - fov2.JD_bright) # mid-dimming
mags_jd = fov2.estimate_lpv_mags(jd)
assert mags_jd['V'] == mags_bright['V'] + 0.5*(mags_faint['V']-mags_bright['V'])
assert mags_jd['R'] == mags_bright['R'] + 0.5*(mags_faint['R']-mags_bright['R'])
assert mags_jd['I'] == mags_bright['I'] + 0.5*(mags_faint['I']-mags_bright['I'])
jd = fov2.JD_faint + 0.5 * (fov2.JD_bright+fov2.period - fov2.JD_faint) # mid-brightening
mags_jd = fov2.estimate_lpv_mags(jd)
assert mags_jd['V'] == mags_faint['V'] + 0.5*(mags_bright['V']-mags_faint['V'])
assert mags_jd['R'] == mags_faint['R'] + 0.5*(mags_bright['R']-mags_faint['R'])
assert mags_jd['I'] == mags_faint['I'] + 0.5*(mags_bright['I']-mags_faint['I'])
# Test much later dates than JD_bright etc, but same phase.
jd = fov2.JD_bright + 11*fov2.period
mags_bright = fov2.estimate_lpv_mags(jd)
assert mags_bright['V'] == fov2.mag_V_bright
assert mags_bright['R'] == fov2.mag_V_bright - 0.5*fov2.color_VI_bright
assert mags_bright['I'] == fov2.mag_V_bright - fov2.color_VI_bright
jd = fov2.JD_faint + 11*fov2.period
mags_faint = fov2.estimate_lpv_mags(jd)
assert mags_faint['V'] == fov2.mag_V_faint
assert mags_faint['R'] == fov2.mag_V_faint - 0.5*fov2.color_VI_faint
assert mags_faint['I'] == fov2.mag_V_faint - fov2.color_VI_faint
jd = fov2.JD_bright + 0.5 * (fov2.JD_faint - fov2.JD_bright) + 11*fov2.period # mid-dimming
mags_jd = fov2.estimate_lpv_mags(jd)
assert mags_jd['V'] == mags_bright['V'] + 0.5*(mags_faint['V']-mags_bright['V'])
assert mags_jd['R'] == mags_bright['R'] + 0.5*(mags_faint['R']-mags_bright['R'])
assert mags_jd['I'] == mags_bright['I'] + 0.5*(mags_faint['I']-mags_bright['I'])
jd = fov2.JD_faint + 0.5 * (fov2.JD_bright+fov2.period - fov2.JD_faint) + \
11*fov2.period # mid-brightening
mags_jd = fov2.estimate_lpv_mags(jd)
assert mags_jd['V'] == mags_faint['V'] + 0.5*(mags_bright['V']-mags_faint['V'])
assert mags_jd['R'] == mags_faint['R'] + 0.5*(mags_bright['R']-mags_faint['R'])
assert mags_jd['I'] == mags_faint['I'] + 0.5*(mags_bright['I']-mags_faint['I'])
# Test much earlier dates than JD_bright etc, but same phase.
jd = fov2.JD_bright - 23*fov2.period
mags_bright = fov2.estimate_lpv_mags(jd)
assert mags_bright['V'] == fov2.mag_V_bright
assert mags_bright['R'] == fov2.mag_V_bright - 0.5*fov2.color_VI_bright
assert mags_bright['I'] == fov2.mag_V_bright - fov2.color_VI_bright
jd = fov2.JD_faint - 23*fov2.period
mags_faint = fov2.estimate_lpv_mags(jd)
assert mags_faint['V'] == fov2.mag_V_faint
assert mags_faint['R'] == fov2.mag_V_faint - 0.5*fov2.color_VI_faint
assert mags_faint['I'] == fov2.mag_V_faint - fov2.color_VI_faint
jd = fov2.JD_bright + 0.5 * (fov2.JD_faint - fov2.JD_bright) - 23*fov2.period # mid-dimming
mags_jd = fov2.estimate_lpv_mags(jd)
assert mags_jd['V'] == mags_bright['V'] + 0.5*(mags_faint['V']-mags_bright['V'])
assert mags_jd['R'] == mags_bright['R'] + 0.5*(mags_faint['R']-mags_bright['R'])
assert mags_jd['I'] == mags_bright['I'] + 0.5*(mags_faint['I']-mags_bright['I'])
jd = fov2.JD_faint + 0.5 * (fov2.JD_bright+fov2.period - fov2.JD_faint) - \
23*fov2.period # mid-brightening
mags_jd = fov2.estimate_lpv_mags(jd)
assert mags_jd['V'] == mags_faint['V'] + 0.5*(mags_bright['V']-mags_faint['V'])
assert mags_jd['R'] == mags_faint['R'] + 0.5*(mags_bright['R']-mags_faint['R'])
assert mags_jd['I'] == mags_faint['I'] + 0.5*(mags_bright['I']-mags_faint['I'])
# Test more phases.
from math import sin, pi
fract = 0.15
jd = fov2.JD_bright + fract * (fov2.JD_faint - fov2.JD_bright)
mags_jd = fov2.estimate_lpv_mags(jd)
linear_part = fract
sine_part = (1 + sin((fract-0.5) * pi)) / 2
mag_fract = (0.5 * linear_part) + (0.5 * sine_part)
assert mags_jd['V'] == pytest.approx(mags_bright['V'] +
mag_fract*(mags_faint['V']-mags_bright['V']))
assert mags_jd['R'] == pytest.approx(mags_bright['R'] +
mag_fract*(mags_faint['R']-mags_bright['R']))
assert mags_jd['I'] == pytest.approx(mags_bright['I'] +
mag_fract*(mags_faint['I']-mags_bright['I']))
fract = 0.66
jd = fov2.JD_faint + fract * (fov2.JD_bright - fov2.JD_faint)
mags_jd = fov2.estimate_lpv_mags(jd)
linear_part = fract
sine_part = (1 + sin((fract-0.5) * pi)) / 2
mag_fract = (0.5 * linear_part) + (0.5 * sine_part)
assert mags_jd['V'] == pytest.approx(mags_faint['V'] + mag_fract*(mags_bright['V']-mags_faint['V']))
assert mags_jd['R'] == pytest.approx(mags_faint['R'] + mag_fract*(mags_bright['R']-mags_faint['R']))
assert mags_jd['I'] == pytest.approx(mags_faint['I'] + mag_fract*(mags_bright['I']-mags_faint['I']))
def test_fov_lpv_multi_exposure():
""" NB: For tests on #STARS section, see test_fov_stare_and_star_list() above."""
fov_name = "AU Aur multi-exposure"
f = fov.Fov("AU Aur multi-exposure", TEST_FOV_DIRECTORY)
assert f.fov_name == fov_name
assert f.format_version == CURRENT_SCHEMA_VERSION
assert f.observing_style == "LPV"
assert len(f.observing_list) == 2
assert f.alert is None
assert f.observing_list == [("V", None, 5), ("I", None, 7)]
def test_fov_monitor():
""" NB: For tests on #STARS section, see test_fov_stare_and_star_list() above."""
fov_name = "NSV 14581"
f = fov.Fov("NSV 14581", TEST_FOV_DIRECTORY)
assert f.fov_name == fov_name
assert f.format_version == CURRENT_SCHEMA_VERSION
assert f.ra == util.ra_as_degrees("23:26:50.3")
assert f.dec == util.dec_as_degrees("+82:22:11")
assert f.chart == "X16224AYI"
assert f.fov_date == "9/2/2016"
assert f.main_target == "NSV 14581"
assert f.target_type == "Z Cam"
assert f.period == float("0.194334535")
assert f.motive == 'Gap-fill monitor, ~5 days.'
assert f.JD_bright == float("2452888.328")
assert f.JD_faint == float("2452888.4255")
assert f.JD_second is None
assert f.mag_V_bright == 14
assert f.mag_V_faint == 17.2
assert f.mag_V_second is None
assert f.color_VI_bright == 0.7
assert f.color_VI_faint == 0.7
assert f.color_VI_second is None
assert f.observing_style == "Monitor"
assert f.alert == 2
assert len(f.observing_list) == 2
assert f.observing_list == [("I", 13, 1), ("V", 14, 1)]
assert f.max_exposure == 240
assert f.priority == 3
assert f.gap_score_days == [5, 10, 20]
assert f.acp_comments == 'Z Cam: gap-fill=5 days V~15??'
assert f.punches == []
assert len(f.aavso_stars) == 13
assert all([star.is_valid for star in f.aavso_stars])
assert [star.star_id for star in f.aavso_stars] == \
['157', 'NSV 14581', '136', '148', '145', '151', '110',
'142', '155', '134', '140', '153', '112']
def test_fov_standard():
""" NB: For tests on #STARS section, see test_fov_stare_and_star_list() above."""
fov_name = "Std_SA100"
f = fov.Fov("Std_SA100", TEST_FOV_DIRECTORY)
assert f.fov_name == fov_name
assert f.format_version == CURRENT_SCHEMA_VERSION
assert f.ra == util.ra_as_degrees("08:53:14.3")
assert f.dec == util.dec_as_degrees("-00:37:56")
assert f.chart == "X15687X"
assert f.fov_date == "12/20/2015"
assert f.period is None
assert f.motive == ''
assert f.acp_comments == ''
assert (f.JD_bright, f.JD_faint, f.JD_second) == (None, None, None)
assert (f.mag_V_bright, f.mag_V_faint, f.mag_V_second) == (None, None, None)
assert (f.color_VI_bright, f.color_VI_faint, f.color_VI_second) == (None, None, None)
assert f.main_target == "Standard"
assert f.target_type == "Standard"
assert f.observing_style == "Standard"
assert f.alert is None
assert len(f.observing_list) == 3
assert f.observing_list == [("V", 11.5, 1), ("R", 11.2, 1), ("I", 10.2, 1)]
assert f.max_exposure is None
assert f.priority is None
assert f.gap_score_days is None
assert f.punches == []
assert len(f.aavso_stars) == 6
assert all([star.is_valid for star in f.aavso_stars])
assert [star.star_id for star in f.aavso_stars] == \
['118', '130', '101', '114', '124', '92']
def test_all_fov_names():
all_names = fov.all_fov_names(fov_directory=TEST_FOV_DIRECTORY)
assert isinstance(all_names, list)
assert all([isinstance(name, str) for name in all_names])
assert len(all_names) == 5
assert "ST Tri" in all_names
def test_make_fov_dict(fov_directory=TEST_FOV_DIRECTORY):
# Full dictionary:
fov_dict = fov.make_fov_dict(fov_directory)
assert isinstance(fov_dict, dict)
assert all([isinstance(f, fov.Fov) for f in list(fov_dict.values())])
assert len(fov_dict) == 5
assert "ST Tri" in fov_dict.keys()
# Partial dictionary:
fov_dict = fov.make_fov_dict(fov_directory,
fov_names_selected=["ST Tri", "AU Aur", "ST Tri"])
assert isinstance(fov_dict, dict)
assert all([isinstance(f, fov.Fov) for f in list(fov_dict.values())])
assert len(fov_dict) == 2
assert "ST Tri" in fov_dict.keys()
assert 'Std_SA100' not in fov_dict.keys()
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,639
|
edose/photrix
|
refs/heads/master
|
/photrix/roof_dsw.py
|
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
import winsound
from time import sleep
from datetime import datetime, timezone
import requests
# from bs4 import BeautifulSoup
from util import hhmm_from_datetime_utc
STATUS_URL = 'http://deepskyportal.com/weather/BetaRoofStatusFile.txt'
STATUS_PATH_FOR_TEST = 'C:/24hrs/RoofTest.txt'
HTTP_OK_CODE = 200 # "OK. The request has succeeded."
MAX_CONSECUTIVE_TIMEOUTS = 10
MAX_CONSECUTIVE_ERRORS = 10
SECONDS_BETWEEN_CONSECUTIVE_TIMEOUTS = 10 # retry cadence.
SECONDS_BETWEEN_STARTUP_QUERIES = 5 # initial polling cadence.
SECONDS_BETWEEN_ONGOING_QUERIES = 60 # normal polling cadence.
SECONDS_WHEN_CHANGE_SENSED = 5 # fast cadence when roof status change has been sensed.
SECONDS_WHEN_STATUS_BLANK = 5
CONSISTENT_CHANGE_QUERIES_REQUIRED = 5 # number of uniform query results to confirm status change.
TAG_TO_TEST = 'Roof Status:'
SOUND_HAS_OPENED = 'SystemAsterisk' # = Win 10 Asterisk; use Sonata/Windows Error.wav
SOUND_HAS_CLOSED = 'SystemHand' # = Win 10 Critical Stop; use Sonata/Windows Critical Stop.wav
SOUND_REPETITIONS = 40
def monitor_roof(url=STATUS_URL):
""" Make noise if roof opens or closes, according to very small roof status file from Deep Sky West"""
print('Playing OPENED sound twice, then CLOSED sound twice...')
winsound.PlaySound(SOUND_HAS_OPENED, winsound.SND_ALIAS)
winsound.PlaySound(SOUND_HAS_OPENED, winsound.SND_ALIAS)
winsound.PlaySound(SOUND_HAS_CLOSED, winsound.SND_ALIAS)
winsound.PlaySound(SOUND_HAS_CLOSED, winsound.SND_ALIAS)
status_list = (CONSISTENT_CHANGE_QUERIES_REQUIRED + 1) * ['-'] # start without info (as dashes).
last_event_string = ''
n_consecutive_status_blank = 0
while True:
status_text = get_status_text(url)
if not status_text.startswith('OK'):
print(' >>>>> ERROR=', status_text, '--> STOPPING.')
exit(0)
immed_status = parse_immed_status(status_text)
if immed_status == 'ERROR':
print(' >>>>> ERROR=', immed_status, '--> STOPPING.')
exit(0)
if immed_status == '':
n_consecutive_status_blank += 1
if n_consecutive_status_blank > MAX_CONSECUTIVE_TIMEOUTS:
print(' >>>>> ERROR: too many consecutive blank status returns.')
exit(0)
else:
print('(status blank #' + str(n_consecutive_status_blank) + ')')
sleep(SECONDS_WHEN_STATUS_BLANK)
continue
else:
n_consecutive_status_blank = 0
status_list.append(immed_status) # so that earliest status is first list item (time reads L->R).
status_list = status_list[1:] # pop earliest status off front of list.
hhmm = hhmm_from_datetime_utc(datetime.now(timezone.utc))
print(hhmm + ': is', status_list[-1] + last_event_string)
if roof_has_opened(status_list):
print(32 * '*', '\n >>>>> OPENED at', hhmm)
last_event_string = ' (since ' + hhmm + ')'
for i in range(SOUND_REPETITIONS):
winsound.PlaySound(SOUND_HAS_OPENED, winsound.SND_ALIAS)
elif roof_has_closed(status_list):
print(32 * '*', '\n >>>>> CLOSED at', hhmm)
last_event_string = ' (since ' + hhmm + ')'
for i in range(SOUND_REPETITIONS):
winsound.PlaySound(SOUND_HAS_CLOSED, winsound.SND_ALIAS)
else:
if change_is_sensed(status_list):
sleep(SECONDS_WHEN_CHANGE_SENSED)
elif any([status == '-' for status in status_list]):
sleep(SECONDS_BETWEEN_STARTUP_QUERIES)
else:
sleep(SECONDS_BETWEEN_ONGOING_QUERIES)
def get_status_text(url):
""" Get and return status text from roof status url.
:param url: URL holding updated roof status in Deep Sky West format. [string]
:return status_text = status text prepended by 'OK', 'ERROR', or 'TIMEOUT' (only). [string]
"""
if url.lower() == 'test':
with open(STATUS_PATH_FOR_TEST) as f:
return 'OK' + f.readlines()[0]
for n_timeouts in range(MAX_CONSECUTIVE_TIMEOUTS):
try:
r = requests.get(url)
except requests.exceptions.Timeout:
print(' >>>>> Warning:', str(n_timeouts), 'consecutive timeouts.')
sleep(SECONDS_BETWEEN_CONSECUTIVE_TIMEOUTS)
continue
except requests.exceptions.RequestException as e:
print(e)
return 'ERROR'
if r.status_code != HTTP_OK_CODE:
print(' >>>>> Could not get', STATUS_URL)
return 'ERROR'
return 'OK' + r.text
print(' >>>>> ERROR:', str(MAX_CONSECUTIVE_TIMEOUTS), 'consecutive timeouts.')
return 'TIMEOUT'
def parse_immed_status(status_text):
""" Parses immediate status from text and returns (only) 'open', 'closed', or 'error'. [string] """
try:
core_text = status_text.split(TAG_TO_TEST)[1].strip().upper()
except IndexError:
print(' >>>>> ERROR: cannot parse status text >' + status_text + '<')
winsound.PlaySound(SOUND_HAS_OPENED, winsound.SND_ALIAS)
winsound.PlaySound(SOUND_HAS_CLOSED, winsound.SND_ALIAS)
return 'error'
if core_text.startswith('OPEN'):
return 'open'
elif core_text.startswith('CLOSE'):
return 'closed'
elif core_text == '':
return ''
print(' >>>>> ERROR: cannot parse status text >' + status_text + '<')
return 'error'
def roof_has_opened(status_list):
return all([s == 'closed' for s in status_list[:2]]) and all([s == 'open' for s in status_list[2:]])
def roof_has_closed(status_list):
return all([s == 'open' for s in status_list[:2]]) and all([s == 'closed' for s in status_list[2:]])
def change_is_sensed(status_list):
return ('open' in status_list) and ('closed' in status_list)
if __name__ == '__main__':
monitor_roof()
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,640
|
edose/photrix
|
refs/heads/master
|
/test/test_process.py
|
import os
import shutil
import numpy as np
import pandas as pd
import pytest
from photrix import process
from photrix.user import Instrument, Site
from photrix.util import MixedModelFit
PHOTRIX_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEST_TOP_DIRECTORY = os.path.join(PHOTRIX_ROOT_DIRECTORY, "test")
def test__get_line_parms():
assert process._get_line_parms('#SERIAL 12, 34 44 , 42 ; this is a comment',
'#SERIAL', True, 1, None) == (['12', '34', '44', '42'], None)
assert process._get_line_parms('#SERIAL ST Tri-0000-V ; this is a comment',
'#SERIAL', False, 1, None) == (['ST Tri-0000-V'], None)
assert process._get_line_parms('#JD 0.25 0.5 ; this is a comment',
'#JD', True, 2, 2) == (['0.25', '0.5'], None)
parms = process._get_line_parms('#JD 0.3 ; this is a comment', '#JD', True, 2, 2)
assert parms[0] is None
assert 'wrong number of parameters: ' in parms[1]
parms = process._get_line_parms('#JX 0.3 ; this is a comment', '#JD', True, 2, 2)
assert parms[0] is None
assert 'does not begin with correct directive: ' in parms[1]
def test__write_omit_txt_stub():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'omit.txt')
# Case: omit.txt does not already exist:
savepath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'omit-SAVE.txt')
shutil.copy2(fullpath, savepath) # make a copy to restore later.
if os.path.exists(fullpath):
os.remove(fullpath)
assert os.path.exists(fullpath) is False
lines_written = process._write_omit_txt_stub(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
assert os.path.exists(fullpath) is True
with open(fullpath, 'r') as f:
lines = f.readlines()
assert len(lines) == lines_written == 13
assert all([line.startswith(';') for line in lines])
if os.path.exists(fullpath):
os.remove(fullpath)
shutil.move(savepath, fullpath) # restore saved copy.
# Case: omit.txt does already exist (written just above):
lines_written = process._write_omit_txt_stub(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
assert os.path.exists(fullpath) is True
assert lines_written == 0
with open(fullpath, 'r') as f:
lines = f.readlines()
assert all([line.startswith(';') for line in lines[0:13]]) # in case addl lines have been added
def test__write_stare_comps_txt_stub():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'stare_comps.txt')
# Case: stare_comps.txt does not already exist:
savepath = os.path.join(an_top_directory, an_rel_directory,
'Photometry', 'stare_comps-SAVE.txt')
shutil.copy2(fullpath, savepath) # make a copy to restore later.
if os.path.exists(fullpath):
os.remove(fullpath)
assert os.path.exists(fullpath) is False
lines_written = process._write_stare_comps_txt_stub(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
assert os.path.exists(fullpath) is True
assert lines_written == 8
with open(fullpath, 'r') as f:
lines = f.readlines()
assert all([line.startswith(';') for line in lines])
if os.path.exists(fullpath):
os.remove(fullpath)
shutil.move(savepath, fullpath) # restore saved copy.
# Case: stare_comps.txt does already exist (as written just above):
lines_written = process._write_stare_comps_txt_stub(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
assert os.path.exists(fullpath) is True
assert lines_written == 0
with open(fullpath, 'r') as f:
lines = f.readlines()
assert all([line.startswith(';') for line in lines[0:7]]) # in case addl lines have been added
def test_get_df_master():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
df_master = process.get_df_master(an_top_directory, an_rel_directory)
assert len(df_master) == 2285
assert len(df_master.columns) == 36
assert all([col in df_master.columns for col in ['Serial', 'FITSfile', 'Filter']])
assert list(df_master.index) == list(df_master['Serial'])
def test_apply_omit_txt():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
# Nested function
def do_apply_omit_lines(directive_lines):
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'omit.txt')
# Write new omit.txt with test directive lines:
_overwrite_omit_txt(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory,
directive_lines=directive_lines)
# Make & return output data:
df_filtered, warning_lines = process._apply_omit_txt(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
return df_filtered, warning_lines
# Case: #OBS directives:
df_master = process.get_df_master(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory) # fresh copy
df_filtered, warning_lines = \
do_apply_omit_lines(['#OBS Std_SA107-0001-V, 123 ; one existing star obs',
'#OBS RT Oph-0002-R, 152 ; another existing star obs',
'#OBS RZ Mon-0001-V, 16 ; one non-existant star obs (warning)',
'#CRAZY_DIRECTIVE XXX,999 ; raise warning'
])
assert set(df_filtered.columns) == set(df_master.columns)
assert set(df_master.Serial) - set(df_filtered.Serial) == set([671, 1797])
assert len(warning_lines) == 2
assert warning_lines[0].find('#OBS RZ Mon-0001-V') != -1 # -1 is returned if a is not in b
assert warning_lines[1].find('#CRAZY_DIRECTIVE ') != -1 # -1 is returned if a is not in b
# Case: #STAR directives:
df_master = process.get_df_master(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory) # fresh copy
df_filtered, warning_lines = \
do_apply_omit_lines(['#STAR Std_SA107, 123, V ; one existing star, V-filter only',
'#STAR RU Lyn, 136 ; one existing star, all filters',
'#STAR RZ Mon, 16 ; one non-existant star (warning)'
])
assert set(df_filtered.columns) == set(df_master.columns)
assert set(df_master.Serial) - set(df_filtered.Serial) == set([671] +
[709, 722, 735, 747] + [])
assert len(warning_lines) == 1
assert warning_lines[0].find('#STAR RZ Mon, 16') != -1 # -1 is returned if a is not in b
# Case: #IMAGE directives:
df_master = process.get_df_master(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory) # fresh copy
df_filtered, warning_lines = \
do_apply_omit_lines(['#IMAGE Std_SA107-0002-R ; one existing image',
'#IMAGE RU Lyn-0999-X ; non-existent image (warning)'
])
assert set(df_filtered.columns) == set(df_master.columns)
assert set(df_master.Serial) - set(df_filtered.Serial) == set(list(range(683, 696)) + [])
assert len(warning_lines) == 1
assert warning_lines[0].find('#IMAGE RU Lyn-0999-X') != -1 # -1 is returned if a is not in b
# Case: #JD directives:
df_master = process.get_df_master(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory) # fresh copy
df_filtered, warning_lines = \
do_apply_omit_lines(['#JD 0.668, 0.6695 ; two images',
'#JD 0.2, 0.3 ; no images (warning)'])
assert set(df_filtered.columns) == set(df_master.columns)
assert set(df_master.Serial) - set(df_filtered.Serial) == set(list(range(489, 512)) + [])
assert len(warning_lines) == 1
assert warning_lines[0].find('#JD 0.2, 0.3') != -1 # -1 is returned if a is not in b
# Case: #SERIAL directives:
df_master = process.get_df_master(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory) # fresh copy
df_filtered, warning_lines = \
do_apply_omit_lines(['#SERIAL 99999 ; no images (warning)',
'#SERIAL 12, 14,15,4444 677,, 5 777 765,2000, 14 ; all ex 4444 OK'])
assert set(df_filtered.columns) == set(df_master.columns)
assert set(df_master.Serial) - set(df_filtered.Serial) == \
set([12, 14, 15, 677, 5, 777, 765, 2000, 14]) # 4444 excluded; not an actual Serial #
assert len(warning_lines) == 1
assert warning_lines[0].find('#SERIAL 99999') != -1 # -1 is returned if a is not in b
def test_class_skymodel():
# TODO: add tests for fixed-effect log_adu (parallel to FE sky_bias).
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
test_filter = 'V'
directive_lines = ['#SERIAL 348 203 1884 678 182 177 1653 1880 ; V outliers',
'#IMAGE QZ Aql-0001-V ; crazy cirrus term',
'#SERIAL 352 690 ; R outliers',
'#SERIAL 703 875 193 ; I outliers'] # in actual R processing, for comp.
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'omit.txt')
# Write new omit.txt with test directive lines:
process._write_omit_txt_stub(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
with open(fullpath) as f:
lines = f.readlines()
directive_lines = [line + '\n' for line in directive_lines]
lines.extend(directive_lines)
with open(fullpath, 'w') as f:
f.writelines(lines)
# Case 1: Model WITHOUT log_adu (CCD nonlinearity) term.
modelV = process.SkyModel(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory, filter=test_filter,
fit_extinction=False, fit_log_adu=False, do_plots=False)
# Test attributes from inputs:
assert modelV.an_top_directory == TEST_TOP_DIRECTORY
assert modelV.an_rel_directory == an_rel_directory
assert modelV.filter == test_filter
assert modelV.instrument_name == 'Borea' # default
assert modelV.site_name == 'DSW' # default
assert modelV.max_cat_mag_error == 0.01 # default
assert modelV.max_inst_mag_sigma == 0.03 # default
assert modelV.max_color_vi == +2.5 # default
assert modelV.saturation_adu == Instrument(modelV.instrument_name).camera['saturation_adu']
assert modelV.fit_sky_bias is True
assert modelV.fit_log_adu is False
assert modelV.fit_vignette is True
assert modelV.fit_xy is False
assert modelV.fit_transform is False
assert modelV.fit_extinction is False
# Test results attributes:
assert modelV.converged is True
assert modelV.n_obs == 96
assert len(modelV.df_model) == modelV.n_obs
assert modelV.n_images == 18
assert len(modelV.df_model['FITSfile'].drop_duplicates()) == modelV.n_images
assert isinstance(modelV.mm_fit, MixedModelFit)
assert len(modelV.mm_fit.df_fixed_effects) == 3
assert modelV.transform ==\
(Instrument(modelV.instrument_name)).transform(modelV.filter, 'V-I')
assert modelV.extinction == Site(modelV.site_name).extinction[modelV.filter]
assert modelV.vignette == pytest.approx(-0.00603, abs=0.0001) # changed
assert modelV.x == 0
assert modelV.y == 0
assert modelV.sky_bias == pytest.approx(0.6671, abs=0.001)
assert modelV.log_adu == 0
assert modelV.sigma == pytest.approx(0.0143, abs=0.001)
# Test SkyModel._predict_fixed_only():
df_input = pd.DataFrame({'Serial': [9997, 9998, 9999],
'SkyBias': [0.55, 0.9, 0.5454],
'Vignette': [0.322, 0, 1],
'CI': [0.577, 2.2, 0.12],
'Airmass': [1.57, 1.0, 2.1],
'FITSfile': ['BG Gem-0001-V.fts', 'BG Gem-0001-V.fts',
'Std_SA35-0001-V.fts'],
'InstMag': [-7.68043698, -10.7139893, -6.500945076]},
index=['9997a', '9998a', '9999a'])
expected_star_mags = [12.3553, 9.2302, 13.2887] # ideal CatMags
mag_predictions_fixed_only = modelV._predict_fixed_only(df_input)
random_effect_values = modelV.df_image.loc[df_input['FITSfile'], 'Value']
# Remember: we SUBTRACT random effects (because original fit was
# InstMag ~ CatMag + Random Effects + offsets + fixed effects:
mag_predictions = mag_predictions_fixed_only.values - random_effect_values.values
assert list(mag_predictions) == pytest.approx(expected_star_mags, abs=0.0005)
# Case 2: Model WITH log_adu (CCD nonlinearity) term.
modelV = process.SkyModel(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory, filter=test_filter,
fit_extinction=False, fit_log_adu=True, do_plots=False)
# Test attributes from inputs:
assert modelV.an_top_directory == TEST_TOP_DIRECTORY
assert modelV.an_rel_directory == an_rel_directory
assert modelV.filter == test_filter
assert modelV.instrument_name == 'Borea' # default
assert modelV.site_name == 'DSW' # default
assert modelV.max_cat_mag_error == 0.01 # default
assert modelV.max_inst_mag_sigma == 0.03 # default
assert modelV.max_color_vi == +2.5 # default
assert modelV.saturation_adu == Instrument(modelV.instrument_name).camera['saturation_adu']
assert modelV.fit_sky_bias is True
assert modelV.fit_log_adu is True
assert modelV.fit_vignette is True
assert modelV.fit_xy is False
assert modelV.fit_transform is False
assert modelV.fit_extinction is False
# Test results attributes:
assert modelV.converged is True
assert modelV.n_obs == 96
assert len(modelV.df_model) == modelV.n_obs
assert modelV.n_images == 18
assert len(modelV.df_model['FITSfile'].drop_duplicates()) == modelV.n_images
assert isinstance(modelV.mm_fit, MixedModelFit)
assert len(modelV.mm_fit.df_fixed_effects) == 4
assert modelV.transform ==\
(Instrument(modelV.instrument_name)).transform(modelV.filter, 'V-I')
assert modelV.extinction == Site(modelV.site_name).extinction[modelV.filter]
assert modelV.vignette == pytest.approx(-0.00050, abs=0.0001)
assert modelV.x == 0
assert modelV.y == 0
assert modelV.sky_bias == pytest.approx(0.5500, abs=0.001)
assert modelV.log_adu == pytest.approx(-0.0284, abs=0.001)
assert modelV.sigma == pytest.approx(0.0135, abs=0.001)
# Test SkyModel._predict_fixed_only():
df_input = pd.DataFrame({'Serial': [9997, 9998, 9999],
'SkyBias': [0.55, 0.9, 0.5454],
'LogADU': [3.1, 3.5, 3.22],
'Vignette': [0.322, 0, 1],
'CI': [0.577, 2.2, 0.12],
'Airmass': [1.57, 1.0, 2.1],
'FITSfile': ['BG Gem-0001-V.fts', 'BG Gem-0001-V.fts',
'Std_SA35-0001-V.fts'],
'InstMag': [-7.68043698, -10.7139893, -6.500945076]},
index=['9997a', '9998a', '9999a'])
expected_star_mags = [12.3921, 9.3211, 13.3284] # ideal CatMags
mag_predictions_fixed_only = modelV._predict_fixed_only(df_input)
random_effect_values = modelV.df_image.loc[df_input['FITSfile'], 'Value']
# Remember: we SUBTRACT random effects (because original fit was
# InstMag ~ CatMag + Random Effects + offsets + fixed effects:
mag_predictions = mag_predictions_fixed_only.values - random_effect_values.values
assert list(mag_predictions) == pytest.approx(expected_star_mags, abs=0.001)
def test_curate_stare_comps():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
# Nested function for testing: ----------------------------------------------
def _do_apply_stare_comps_lines(directive_lines):
filename = 'stare_comps.txt'
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', filename)
# Write new stare_comps.txt with test directive lines:
os.remove(fullpath)
process._write_stare_comps_txt_stub(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
with open(fullpath) as f:
lines = f.readlines()
directive_lines = [line + '\n' for line in directive_lines]
lines.extend(directive_lines)
with open(fullpath, 'w') as f:
f.writelines(lines)
# Make output dataframe (_apply_omit_txt():
df_eligible_obs, _ = process._apply_omit_txt(an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory)
df_curated_obs, warning_lines = process._curate_stare_comps(
an_top_directory=TEST_TOP_DIRECTORY,
an_rel_directory=an_rel_directory,
df_in=df_eligible_obs)
return df_eligible_obs, df_curated_obs, warning_lines
# -------------------------------------------------------------------------------
# Case: #COMPS directive:
# (These are NOT the comps originally removed in processing this AN;
# they are set to make a better test.)
df_eligible_obs, df_curated_obs, warning_lines = _do_apply_stare_comps_lines(
['#COMPS V1023 Her , V , 117,120,123 ; one FOV, keep 3 comps',
'#CRAZY_DIRECTIVE XXX,999 ; raise warning'
])
rows_expected_removed = (df_eligible_obs['FOV'] == 'V1023 Her') &\
(df_eligible_obs['StarType'] == 'Comp') &\
(df_eligible_obs['Filter'] == 'V') &\
(~ df_eligible_obs['StarID'].isin(['117', '120', '123']))
assert sum(rows_expected_removed) == 90 # a check before proceeding
serials_expected_removed = set((df_eligible_obs[rows_expected_removed])['Serial'])
serials_actually_removed = set(df_eligible_obs['Serial']) - set(df_curated_obs['Serial'])
assert serials_actually_removed == serials_expected_removed
starids_removed = df_eligible_obs.loc[list(serials_actually_removed), 'StarID']
assert all(starids_removed.isin(['111']))
assert len(warning_lines) == 1
assert warning_lines[0].find('Directive not understood') != -1 # -1 returned if a is not in b
assert warning_lines[0].find('#CRAZY_DIRECTIVE ') != -1 # -1 returned if a is not in b
# end of test_curate_stare_comps().
def test_solve_for_real_ci():
# Start with ideal values, make observed values, then test that fn recovers ideal Color Index:
ideal_mags = {'V': 12.5, 'I': 8.8}
ci_filters = ['V', 'I']
transforms = {'I': -0.044, 'V': 0.025}
ideal_ci = ideal_mags['V'] - ideal_mags['I']
untransformed_mags = {cif: ideal_mags[cif] + transforms[cif] * ideal_ci for cif in ci_filters}
real_ci = process._solve_for_real_ci(untransformed_mags, ci_filters, transforms)
assert real_ci == pytest.approx(ideal_ci, abs=0.000001)
def test_extract_ci_points():
# Case 1:
df_star_id = pd.DataFrame({'Serial': [2, 4, 6, 8],
'ModelStarID': 'AU XXX_111',
'Filter': ['V', 'R', 'I', 'I'],
'JD_num': [0.66, 0.67, 0.68, 0.69],
'CI': 0.0,
'UntransformedMag': [12.8, 10.2, 8.3, 8.2]})
ci_filters = ['V', 'I']
transforms = {'V': 0.025, 'R': -0.08, 'I': 0.052, 'XX': 1.0}
df_result = process._extract_ci_points(df_star_id=df_star_id, ci_filters=ci_filters,
transforms=transforms)
assert len(df_result) == 1
assert df_result.loc[0, 'JD_num'] == \
(df_star_id.loc[0, 'JD_num'] + df_star_id.loc[2, 'JD_num']) / 2.0
assert df_result.loc[0, 'CI'] == pytest.approx((12.8-8.3) / (1.0 + 0.025 - 0.052), abs=0.000001)
# Case 2:
df_star_id = pd.DataFrame({'Serial': np.arange(1, 9),
'ModelStarID': 'AU XXX_111',
'Filter': ['V', 'R', 'I', 'I', 'V', 'I', 'V', 'R'],
'JD_num': 0.66 + np.arange(8)*0.01,
'CI': 0.0,
'UntransformedMag': [12.8, 10.2, 8.3, 8.2, 12.6, 8.1, 12.5, 10.11]})
ci_filters = ['V', 'I']
transforms = {'V': 0.025, 'R': -0.08, 'I': 0.052, 'XX': 1.0}
df_result = process._extract_ci_points(df_star_id=df_star_id, ci_filters=ci_filters,
transforms=transforms)
assert len(df_result) == 4
assert list(df_result['JD_num']) == pytest.approx([0.67, 0.695, 0.705, 0.715], abs=0.00001)
untransformed_ci = [12.8-8.3, 12.6-8.2, 12.6-8.1, 12.5-8.1]
real_ci = [uci / (1.0 + 0.025 - 0.052) for uci in untransformed_ci]
assert list(df_result['CI']) == pytest.approx(real_ci, abs=0.000001)
def test_impute_target_ci():
# Make a df for each interpolation case, concatenate them into one df, and test function:
df_1_ci_point = pd.DataFrame({'Serial': 1 + np.arange(4),
'ModelStarID': 'AU_1_PT',
'StarType': 'Target',
'Filter': ['V', 'R', 'I', 'I'],
'JD_mid': 2457800.35 + 0.01*np.arange(4),
'CI': 0.0,
'UntransformedMag': [12.8, 10.2, 8.3, 8.2]})
df_2_ci_points = pd.DataFrame({'Serial': 10 + np.arange(6),
'ModelStarID': 'AU_2_PTS',
'StarType': 'Target',
'Filter': ['V', 'R', 'I', 'I', 'V', 'V'],
'JD_mid': 2457800.45 + 0.01*np.arange(6),
'CI': 0.0,
'UntransformedMag': [12.9, 10.5, 8.7, 8.9, 12.6, 12.7]})
df_3_ci_points = pd.DataFrame({'Serial': 20 + np.arange(8),
'ModelStarID': 'AU_3_PTS',
'StarType': 'Check',
'Filter': ['V', 'R', 'I', 'I', 'V', 'V', 'R', 'I'],
'JD_mid': 2457800.45 + 0.01*np.arange(8),
'CI': 0.0,
'UntransformedMag': [12.5, 10.5, 8.5, 8.4, 12.4, 12.3, 10, 8.8]})
df_4_ci_points = pd.DataFrame({'Serial': 40 + np.arange(10),
'ModelStarID': 'AU_4_PTS',
'StarType': 'Check',
'Filter': ['V', 'R', 'I', 'I', 'V', 'V', 'R', 'I', 'R', 'V'],
'JD_mid': 2457800.55 + 0.005*np.arange(10),
'CI': 0.0,
'UntransformedMag': [12.8, 10.2, 8.3, 8.2, 12.2, 12.3, 10.2,
8.8, 10.2, 12.5]})
df_7_ci_points = pd.DataFrame({'Serial': 60 + np.arange(11),
'ModelStarID': 'AU_7_PTS',
'StarType': 'Check',
'Filter': ['V', 'I', 'V', 'V', 'I', 'V', 'V', 'I',
'V', 'V', 'I'],
'JD_mid': 2457800.65 + 0.005*np.arange(11),
'CI': 0.0,
'UntransformedMag': [12.6, 8.3, 12.2, 12.8, 8.3, 12.2,
12.8, 8.3, 12.2, 12.8, 8.4]})
df_no_ci_points = pd.DataFrame({'Serial': 80 + np.arange(4),
'ModelStarID': 'AU_NO_PTS',
'StarType': 'Target',
'Filter': ['V', 'R', 'V', 'V'],
'JD_mid': 2457800.75 + 0.02*np.arange(4),
'CI': 0.0,
'UntransformedMag': [12.8, 10.2, 8.3, 8.2]})
df_predictions_checks_targets = pd.concat([df_1_ci_point, df_2_ci_points, df_3_ci_points,
df_4_ci_points, df_7_ci_points, df_no_ci_points])
df_predictions_checks_targets.index = df_predictions_checks_targets['Serial']
ci_filters = ['V', 'I']
transforms = {'V': 0.025, 'R': -0.08, 'I': 0.052, 'XX': 1.0}
df_updated = process._impute_target_ci(df_predictions_checks_targets, ci_filters, transforms)
factor = 1 / (1 + transforms['V'] - transforms['I']) # for solving for real CI from obs CI
# Test case: 1 CI point:
df_1 = df_updated[df_updated['ModelStarID'] == 'AU_1_PT'] # fn output for this star
# Verify no change
assert list(df_1['Serial']) == list(1 + np.arange(4))
assert list(df_1['StarType']) == len(df_1) * ['Target']
assert list(df_1['JD_mid']) == list(df_1_ci_point['JD_mid'])
assert list(df_1['UntransformedMag']) == list(df_1_ci_point['UntransformedMag'])
ci_points = factor * (df_1_ci_point.iloc[0]['UntransformedMag'] -
df_1_ci_point.iloc[2]['UntransformedMag'])
# Verify Color Index values:
assert list(df_1['CI']) == len(df_1) * [pytest.approx(ci_points, abs=0.0001)]
# Test case: 2 CI points:
df_2 = df_updated[df_updated['ModelStarID'] == 'AU_2_PTS'] # fn output for this star
assert list(df_2['Serial']) == list(10 + np.arange(6))
assert list(df_2['StarType']) == len(df_2) * ['Target']
assert list(df_2['JD_mid']) == list(df_2_ci_points['JD_mid'])
assert list(df_2['UntransformedMag']) == list(df_2_ci_points['UntransformedMag'])
ci_points = [factor * (df_2_ci_points.iloc[i]['UntransformedMag'] -
df_2_ci_points.iloc[j]['UntransformedMag'])
for (i, j) in [(0, 2), (4, 3)]]
assert df_2.iloc[0]['CI'] == pytest.approx(ci_points[0], abs=0.0001)
assert df_2.iloc[1]['CI'] == pytest.approx(ci_points[0], abs=0.0001)
assert df_2.iloc[2]['CI'] == pytest.approx(4.1110, abs=0.0001)
assert df_2.iloc[3]['CI'] == pytest.approx(3.9054, abs=0.0001)
assert df_2.iloc[4]['CI'] == pytest.approx(ci_points[1], abs=0.0001)
assert df_2.iloc[5]['CI'] == pytest.approx(ci_points[1], abs=0.0001)
# Test case: 3 CI points:
df_3 = df_updated[df_updated['ModelStarID'] == 'AU_3_PTS'] # fn output for this star
assert list(df_3['Serial']) == list(20 + np.arange(8))
assert list(df_3['StarType']) == len(df_3) * ['Check']
assert list(df_3['JD_mid']) == list(df_3_ci_points['JD_mid'])
assert list(df_3['UntransformedMag']) == list(df_3_ci_points['UntransformedMag'])
assert df_3.iloc[0]['CI'] == df_3.iloc[1]['CI'] == pytest.approx(4.19664, abs=0.0001)
assert df_3.iloc[2]['CI'] == pytest.approx(4.09387, abs=0.0001)
assert df_3.iloc[3]['CI'] == pytest.approx(3.9911, abs=0.0001)
assert df_3.iloc[4]['CI'] == pytest.approx(3.8883, abs=0.0001)
assert df_3.iloc[5]['CI'] == pytest.approx(3.7855, abs=0.0001)
assert df_3.iloc[6]['CI'] == df_3.iloc[7]['CI'] == pytest.approx(3.68277, abs=0.0001)
# Test case: 4 CI points (make a nice spline):
df_4 = df_updated[df_updated['ModelStarID'] == 'AU_4_PTS'] # fn output for this star
assert list(df_4['Serial']) == list(40 + np.arange(10))
assert list(df_4['StarType']) == len(df_4) * ['Check']
assert list(df_4['JD_mid']) == list(df_4_ci_points['JD_mid'])
assert list(df_4['UntransformedMag']) == list(df_4_ci_points['UntransformedMag'])
assert df_4.iloc[0]['CI'] == df_4.iloc[1]['CI'] == pytest.approx(4.62487, abs=0.0001)
assert df_4.iloc[2]['CI'] == pytest.approx(4.47805, abs=0.0001)
assert df_4.iloc[3]['CI'] == pytest.approx(4.24314, abs=0.0001)
assert df_4.iloc[4]['CI'] == pytest.approx(3.97886, abs=0.0001)
assert df_4.iloc[5]['CI'] == pytest.approx(3.74394, abs=0.0001)
assert df_4.iloc[6]['CI'] == pytest.approx(3.59712, abs=0.0001)
assert df_4.iloc[7]['CI'] == pytest.approx(3.59712, abs=0.0001)
assert df_4.iloc[8]['CI'] == df_4.iloc[9]['CI'] == pytest.approx(3.80267, abs=0.0001)
# Test case: 7 CI points:
df_7 = df_updated[df_updated['ModelStarID'] == 'AU_7_PTS'] # fn output for this star
assert list(df_7['Serial']) == list(60 + np.arange(11))
assert list(df_7['StarType']) == len(df_7) * ['Check']
assert list(df_7['JD_mid']) == list(df_7_ci_points['JD_mid'])
assert list(df_7['UntransformedMag']) == list(df_7_ci_points['UntransformedMag'])
assert df_7.iloc[0]['CI'] == pytest.approx(4.41340, abs=0.0001)
assert df_7.iloc[2]['CI'] == pytest.approx(4.19340, abs=0.0001)
assert df_7.iloc[5]['CI'] == pytest.approx(4.08470, abs=0.0001)
assert df_7.iloc[8]['CI'] == pytest.approx(3.68003, abs=0.0001)
assert df_7.iloc[10]['CI'] == pytest.approx(4.52080, abs=0.0001)
# Test case: NO CI points at all:
df_no = df_updated[df_updated['ModelStarID'] == 'AU_NO_PTS'] # fn output for this star
assert list(df_no['Serial']) == list(80 + np.arange(4))
assert list(df_no['StarType']) == len(df_no) * ['Target']
assert list(df_no['JD_mid']) == list(df_no_ci_points['JD_mid'])
assert list(df_no['UntransformedMag']) == list(df_no_ci_points['UntransformedMag'])
assert all(np.isnan(df_no['CI']))
def test_class_predictionset():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
# Ensure omit.txt and stare_comps.txt are set up before we start (no backups)
# with directives used in original R processing of 20170504:
_overwrite_omit_txt(an_top_directory=an_top_directory, an_rel_directory=an_rel_directory)
_overwrite_stare_comps_txt(an_top_directory=an_top_directory, an_rel_directory=an_rel_directory)
# Construct skymodel objects:
skymodel_list = []
for test_filter in ['V', 'R', 'I']:
skymodel_this_filter = process.SkyModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter=test_filter,
fit_extinction=False, do_plots=False)
skymodel_list.append(skymodel_this_filter)
ps = process.PredictionSet(an_top_directory=an_top_directory,
an_rel_directory='$an_for_test',
instrument_name='Borea',
site_name='DSW',
max_inst_mag_sigma=0.05,
skymodel_list=skymodel_list)
# Test basic attributes:
assert ps.an_top_directory == an_top_directory
assert ps.an_rel_directory == an_rel_directory
assert ps.instrument_name == 'Borea'
assert ps.site_name == 'DSW'
assert ps.max_inst_mag_sigma == 0.05
assert ps.saturation_adu == \
Instrument(instrument_name=ps.instrument_name).camera['saturation_adu']
assert len(ps.images_with_targets_and_comps) == 191 # matches R::images_with_comps
# Test df_all_eligible_obs (added col LogADU in R ver 1.2.1):
assert ps.df_all_eligible_obs.shape == (2259, 36)
assert len(ps.df_all_eligible_obs['ModelStarID'].drop_duplicates()) == 464
assert len(ps.df_all_eligible_obs['StarID'].drop_duplicates()) == 154
assert len(ps.df_all_eligible_obs['FITSfile'].drop_duplicates()) == 217
assert (ps.df_all_eligible_obs['JD_mid'] - 2457878.0).mean() == \
pytest.approx(0.7671787, 0.000001)
# Test df_all_curated_obs (match R::df_filtered of line 40/41, version 1.1.4,
# these will not have changed from df_all_eligible_obs, as stare_comps removed no comps):
assert ps.df_all_curated_obs.shape == ps.df_all_eligible_obs.shape
assert len(ps.df_all_curated_obs['ModelStarID'].drop_duplicates()) ==\
len(ps.df_all_eligible_obs['ModelStarID'].drop_duplicates())
assert len(ps.df_all_curated_obs['StarID'].drop_duplicates()) ==\
len(ps.df_all_eligible_obs['StarID'].drop_duplicates())
assert len(ps.df_all_curated_obs['FITSfile'].drop_duplicates()) == \
len(ps.df_all_eligible_obs['FITSfile'].drop_duplicates())
assert (ps.df_all_curated_obs['JD_mid'] - 2457878.0).mean() == \
(ps.df_all_eligible_obs['JD_mid'] - 2457878.0).mean()
# Test df_comp_mags (match R::df_estimates_comps of line 80/81, version 1.1.4),
# added column LogADU in R ver 1.2.1.
# In R: df_estimates_comps includes only images with targets.
# In py/photrix: ps.df_comp_mags includes ALL images with eligible comps incl Std FOVs etc.
# Test R-equivalent dataframe r_df_estimates_comps:
rows_to_keep = [ff in ps.images_with_targets_and_comps
for ff in ps.df_comp_mags['FITSfile'].values]
r_df_estimates_comps = (ps.df_comp_mags.copy()).loc[rows_to_keep, :]
assert r_df_estimates_comps.shape == (885, 22)
assert len(r_df_estimates_comps['ModelStarID'].drop_duplicates()) == 271
assert len(r_df_estimates_comps['StarID'].drop_duplicates()) == 79
assert all(r_df_estimates_comps['StarType'] == 'Comp')
assert any(np.isnan(r_df_estimates_comps['CatMag'])) is False
assert r_df_estimates_comps['CatMag'].mean() == pytest.approx(11.74600113, abs=0.000001)
assert (r_df_estimates_comps['JD_mid'] - 2457878.0).mean() == \
pytest.approx(0.7606676, 0.000001)
# Test actual dataframe ps.df_comp_mags (nb: added column LogADU in R ver 1.2.1):
assert ps.df_comp_mags.shape == (1126, 22)
assert len(ps.df_comp_mags['ModelStarID'].drop_duplicates()) == 310
assert len(ps.df_comp_mags['StarID'].drop_duplicates()) == 101
assert any(np.isnan(ps.df_comp_mags['CatMag'])) is False
assert ps.df_comp_mags['CatMag'].mean() == pytest.approx(11.795893, abs=0.000001)
assert (ps.df_comp_mags['JD_mid'] - 2457878.0).mean() == \
pytest.approx(0.755980, 0.000001)
images_r_df_estimates_comps = set(r_df_estimates_comps['FITSfile'])
images_df_comp_mags = set(ps.df_comp_mags['FITSfile'])
assert images_r_df_estimates_comps == set(ps.images_with_targets_and_comps)
assert images_df_comp_mags == set(ps.images_with_eligible_comps)
# Rigorous test of df_comp_mags via calculation from scratch:
df = ps.df_comp_mags
df_comp = df.loc[df['ModelStarID'] == 'RZ Mon_145']
this_skymodel = [s_l for s_l in skymodel_list if s_l.filter == 'V'][0]
fe = this_skymodel.mm_fit.df_fixed_effects.Value
inst_mag = df_comp['InstMag'].iloc[0]
dep_var_offsets = this_skymodel.transform * df_comp['CI'].iloc[0] +\
this_skymodel.extinction * df_comp['Airmass'].iloc[0]
raw_mm_prediction = fe.Intercept + \
fe.SkyBias * df_comp['SkyBias'].iloc[0] +\
fe.Vignette * df_comp['Vignette'].iloc[0] +\
fe.LogADU * df_comp['LogADU'].iloc[0]
estimated_mag_predicted = inst_mag - dep_var_offsets - raw_mm_prediction
# The next line does NOT include cirrus/image effect:
assert estimated_mag_predicted == pytest.approx(df_comp['EstimatedMag'].iloc[0], abs=0.000001)
# Test df_cirrus_effect (SUPERSET of (not =) R::df_cirrus_effect line 164/165, version 1.2.0),
# that is, now includes images without targets (e.g., Std FOVs):
assert ps.df_cirrus_effect.shape == (212, 8)
assert set(ps.df_cirrus_effect.columns) == set(['Image', 'CirrusEffect', 'CirrusSigma',
'Criterion1', 'Criterion2', 'NumCompsUsed',
'CompIDsUsed', 'NumCompsRemoved'])
assert ps.df_cirrus_effect['NumCompsRemoved'].sum() == 13
assert ps.df_cirrus_effect.loc['SS Gem-0003-I.fts', 'CompIDsUsed'] == '104,110,113,95'
assert ps.df_cirrus_effect.loc['Std_SA32-0009-I.fts', 'NumCompsUsed'] == 14
assert ps.df_cirrus_effect.loc['Std_SA32-0009-I.fts', 'NumCompsRemoved'] == 1
# Test the SUBSET of df_cirrus_effect with only images w/ targets (matching R:df_cirrus_effect):
rows_with_targets = [(im in ps.images_with_targets_and_comps)
for im in ps.df_cirrus_effect['Image']]
df_cirrus_effect_with_targets = ps.df_cirrus_effect[rows_with_targets]
assert df_cirrus_effect_with_targets.shape == (191, 8)
assert set(df_cirrus_effect_with_targets.columns) == \
set(['Image', 'CirrusEffect', 'CirrusSigma',
'Criterion1', 'Criterion2', 'NumCompsUsed',
'CompIDsUsed', 'NumCompsRemoved'])
assert df_cirrus_effect_with_targets['NumCompsRemoved'].sum() == 11
assert df_cirrus_effect_with_targets.loc['SS Gem-0003-I.fts', 'CompIDsUsed'] == '104,110,113,95'
assert 'Std...' not in df_cirrus_effect_with_targets['Image']
# Test result df_transformed from _compute_transformed_mags():
expected_columns = set(['Serial', 'ModelStarID', 'FITSfile', 'StarID', 'Chart',
'Xcentroid', 'Ycentroid', 'InstMag', 'InstMagSigma', 'StarType',
'CatMag', 'CatMagError', 'Exposure', 'JD_mid', 'Filter',
'Airmass', 'CI', 'SkyBias', 'Vignette', 'LogADU',
'UseInEnsemble', 'CirrusEffect', 'CirrusSigma', 'CompIDsUsed',
'Image', 'NumCompsRemoved', 'NumCompsUsed', 'JD_num',
'TransformedMag', 'ModelSigma', 'TotalSigma', 'FOV',
'MaxADU_Ur', 'FWHM', 'SkyADU', 'SkySigma'])
assert set(ps.df_transformed.columns) == expected_columns
assert ps.df_transformed.shape == (532, 36)
assert list(ps.df_transformed['Serial'].iloc[[0, 10, 500]]) == [441, 332, 1589]
assert ps.df_transformed['TransformedMag'].sum() == pytest.approx(6408.9, abs=1) # changed
assert ps.df_transformed['TotalSigma'].sum() == pytest.approx(10.103, abs=0.01)
def test_stare_comps():
df_test = pd.DataFrame({'Serial': range(10),
'FOV': ['A', 'B', 'A', 'A', 'B', 'A', 'B', 'A', 'B', 'A'],
'StarID': ['Star1', 'Star1', 'Star1', 'Star2', 'Star2', 'Star2', 'Ono',
'Star1', 'Star2', 'Star2'],
'Filter': ['V', 'X', 'V', 'V', 'V', 'X', 'V', 'V', 'V', 'V'],
'CompIDsUsed': ['12,23,34,45', '12,23,34', '12,34,45', '12,23,34,45',
'12,23,45', '12,23,34', '12,45', '12,23,34',
'12,23,34,45', '34']})
df_test.index = df_test['Serial']
result_A1 = process.get_stare_comps(df_test, fov='A', star_id='Star1', this_filter='V')
assert len(result_A1) == 5
assert result_A1[0].startswith('EDIT file ')
assert result_A1[4].strip() == '4 comps -> 2 images qualify --> #COMPS A, V, 12, 23, 34, 45'
result_A2 = process.get_stare_comps(df_test, fov='A', star_id='Star2', this_filter='V')
assert len(result_A2) == 5
assert result_A2[1].strip() == '1 comps -> 2 images qualify --> #COMPS A, V, 34'
assert result_A2[3].strip() == '3 comps -> 1 images qualify --> #COMPS A, V, 12, 23, 34'
# Case: only one image (won't happen for real stares, but an edge case):
result_B1 = process.get_stare_comps(df_test, fov='B', star_id='Star1', this_filter='V')
assert len(result_B1) == 2
assert result_B1[1].strip() == '>>> One or zero qualifying images in dataframe.'
def test_transform_model():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
df_master = process.get_df_master(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory)
# Case: one FOV with one image:
tm = process.TransformModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter='V', ci_type='V-I', fovs_to_include='RZ Mon',
instrument_name='Borea', site_name='DSW')
assert tm.is_valid
assert len(tm.image_list) == 1
assert len(tm.fitted_values) == 12
assert list(tm.fitted_values[:2]) == pytest.approx([-20.2926, -20.2896], abs=0.0001)
assert tm.param_values['Intercept'] == pytest.approx(-20.1782, abs=0.0001)
assert set(tm.param_values.index) == set(['Intercept', 'SkyBias', 'LogADU', 'CI'])
assert len(tm.residuals) == 12
assert list(tm.residuals[:2]) == pytest.approx([-0.00568, -0.00614], abs=0.00001)
assert tm.transform_value == pytest.approx(-0.09770, abs=0.0001)
assert tm.sigma == pytest.approx(0.02151, abs=0.0001)
assert tm.image_effect is None
# Case: one FOV with multiple images:
tm = process.TransformModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter='V', ci_type='V-I', fovs_to_include='Std_SA32',
instrument_name='Borea', site_name='DSW')
assert tm.is_valid
assert len(tm.image_list) == 4
assert all([n.startswith('Std_SA32-00') for n in tm.image_list])
assert all([n.endswith('-V.fts') for n in tm.image_list])
assert tm.param_values['Intercept'] == pytest.approx(-20.488572, abs=0.0001)
# Case: list of FOVs:
tm = process.TransformModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter='V', ci_type='V-I',
fovs_to_include=['Std_SA32', 'RZ Mon'],
instrument_name='Borea', site_name='DSW')
assert tm.is_valid
assert len(tm.image_list) == 5
assert tm.param_values['CI'] == pytest.approx(-0.027827, abs=0.0001)
# Case: fovs_to_include = "Standards" (test data selection only):
tm = process.TransformModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter='V', ci_type='V-I', fovs_to_include='Standards',
instrument_name='Borea', site_name='DSW')
assert tm.is_valid
assert len(tm.image_list) == 7
assert all([n.startswith('Std_') for n in tm.image_list])
assert all([n.endswith('-V.fts') for n in tm.image_list])
assert tm.param_values['CI'] == pytest.approx(-0.029374, abs=0.0001)
# Case: fovs_to_include = "All" (test data selection only):
tm = process.TransformModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter='V', ci_type='V-I', fovs_to_include='All',
instrument_name='Borea', site_name='DSW')
assert tm.is_valid
all_images = df_master['FITSfile'].drop_duplicates().tolist()
all_fovs = df_master['FOV'].drop_duplicates().tolist()
assert len(tm.image_list) == 119
assert set(tm.image_list) <= set(all_images)
assert len(tm.fov_list) == 26
assert set(tm.fov_list) <= set(all_fovs)
assert tm.param_values['CI'] == pytest.approx(-0.047128, abs=0.0001)
assert tm.transform_value == tm.param_values['CI']
assert tm.transform_sigma == pytest.approx(0.00606, abs=0.0001)
assert tm.sigma == pytest.approx(0.0215, abs=0.0001)
# Case: fovs_to_include = "All" w/ different color index (test data selection only):
tm = process.TransformModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter='R', ci_type='R-I', fovs_to_include='All',
instrument_name='Borea', site_name='DSW')
assert tm.is_valid
assert len(tm.image_list) == 18
assert tm.transform_value == pytest.approx(+0.077440, abs=0.0001)
def test_predictionset_aavso_report():
an_top_directory = TEST_TOP_DIRECTORY
an_rel_directory = '$an_for_test'
# Ensure omit.txt and stare_comps.txt are set up before we start (no backups)
# with directives used in original R processing of 20170504:
_overwrite_omit_txt(an_top_directory=an_top_directory, an_rel_directory=an_rel_directory)
_overwrite_stare_comps_txt(an_top_directory=an_top_directory, an_rel_directory=an_rel_directory)
# Construct skymodel objects:
skymodel_list = []
for test_filter in ['V', 'R', 'I']:
skymodel_this_filter = process.SkyModel(an_top_directory=an_top_directory,
an_rel_directory=an_rel_directory,
filter=test_filter,
fit_extinction=False, do_plots=False)
skymodel_list.append(skymodel_this_filter)
ps = process.PredictionSet(an_top_directory=an_top_directory,
an_rel_directory='$an_for_test',
instrument_name='Borea',
site_name='DSW',
max_inst_mag_sigma=0.05,
skymodel_list=skymodel_list)
df_report = ps.aavso_report(write_file=True, return_df=True)
assert df_report.shape == (291, 17)
# --------------- INTERNAL TEST-HELPER FUNCTIONS ----------------------------------------------
def _overwrite_omit_txt(an_top_directory, an_rel_directory, directive_lines=None):
# No backup. Just write it:
header = [';----- This is omit.txt for AN directory_path ' + an_rel_directory,
';----- Use this file to omit observations from input to SkyModel (all filters).',
';----- Example directive lines:',
';',
';#OBS Obj-0000-V, 132 ; to omit star 132 from FITS image Obj-0000-V.fts',
';#STAR FOV, 132, V ; to omit star 132 from all FITS with FOV '
'and filter V',
';#STAR FOV, 132 ; to omit star 132 from all FITS with FOV '
'and ALL filters',
';#IMAGE Obj-0000-V ; to omit FITS image Obj-0000-V.fts specifically',
';#JD 0.72, 1 ; to omit fractional JD from 0.72 through 1',
';#SERIAL 123,77 54 6 ; to omit observations by Serial number (many per line OK)',
';',
';----- Add your directive lines:',
';']
if directive_lines is None:
# The original 20170504 lines:
directive_lines_to_write = ['#SERIAL 348 203 1884 678 182 177 1653 1880 ; V outliers',
'#IMAGE QZ Aql-0001-V ; crazy cirrus term',
'#SERIAL 352 690 ; R outliers',
'#SERIAL 703 875 193 ; I outliers']
else:
directive_lines_to_write = directive_lines
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'omit.txt')
all_text = [line + '\n' for line in (header + directive_lines_to_write)]
with open(fullpath, 'w') as f:
f.writelines(all_text)
def _overwrite_stare_comps_txt(an_top_directory, an_rel_directory, directive_lines=None):
# No backup. Just write it:
# (These ARE the comps originally kept in processing this AN. I think there are no
# comps removed (kept them all), but that's OK since a restrictive set was tested
# in test_stare_comps() above.)
header = [';----- This is stare_comps.txt for AN directory_path ' + an_rel_directory,
';----- Select comp stars (by FOV, filter, & StarID) from input to _predict_fixed_only().',
';----- Example directive line:',
';',
';#COMPS Obj, V, 132, 133 144 ; to KEEP from FOV \'Obj\': '
'comp stars \'132\' \'133\' and \'144\' in filter \'V\'',
';',
';----- Add your directive lines:',
';']
if directive_lines is None:
# The original directive lines in AN20170504.
directive_lines_to_write = ['#COMPS V1023 Her , V , 117,120,111',
'#COMPS V1023 Her , I , 117,120,111']
else:
directive_lines_to_write = directive_lines
fullpath = os.path.join(an_top_directory, an_rel_directory, 'Photometry', 'stare_comps.txt')
all_text = [line + '\n' for line in (header + directive_lines_to_write)]
with open(fullpath, 'w') as f:
f.writelines(all_text)
def test_make_df_master():
# For now, do this in external directories. Later, set it up in photrix test subdirectories.
# For now, we do not construct df_master.csv files here, we construct them separately,
# then only compare them here.
# Read in dataframe as constructed by R software (2015-7):
df_r = pd.read_csv('C:/Astro/Images/Borea Photrix/20170710-R/Photometry/df_master.csv', sep=';',)
df_r.index = df_r['Serial']
# Execute test function (photrix, python), and read dataframe back in:
# process.make_df_master('C:/Astro/Images/Borea Photrix', '20170710-py', ask_user=False)
df_py = pd.read_csv('C:/Astro/Images/Borea Photrix/20170710-py/Photometry/df_master.csv', sep=';')
df_py.index = df_py['Serial']
assert set(df_py.columns) == set(df_r.columns) # columns must be same, differing order OK.
assert len(df_py) == len(df_r)
assert df_py['Serial'].tolist() == list(range(1, 1 + len(df_py)))
# Check column dtypes are OK:
for col in df_py.columns:
col_types_same = (df_py[col].dtype == df_r[col].dtype)
col_type_ok = col_types_same or\
(str(df_py[col].dtype).startswith('float') and
str(df_r[col].dtype).startswith('int'))
if not col_type_ok:
print('py column', col, 'is', str(df_py[col].dtype), 'but r has', df_r[col].dtype)
assert col_type_ok
# Check that object and integer column contents are exactly equal,
# and that float column contents are approximately equal.
print()
assert [col[:3] in ['obj', 'str', 'int', 'flo'] for col in df_py.columns]
py_cols = [col for col in df_py.columns
if str(df_py[col].dtype).lower()[:3] in ['obj', 'str', 'int']] +\
[col for col in df_py.columns
if str(df_py[col].dtype).lower()[:3] == 'flo'] # exact cols first, then floats.
for col in py_cols:
print('starting col', col)
py_type = str(df_py[col].dtype).lower()[:3]
py_values = list(df_py[col])
r_values = list(df_r[col])
if col == 'UTC_start':
assert all([py.split('+')[0] == r.replace('T', ' ')
for (py, r) in zip(py_values, r_values)]) # py is ISO 8601, R ~ different.
elif col == 'SkyADU':
mean_shift = sum(py_values)/len(py_values) - sum(r_values)/len(r_values)
assert mean_shift == pytest.approx(0, abs=2) # seems to be the shift.
elif col in ['SkySigma', 'FWHM']:
pct_abs_diff = [abs(py - r) / ((py + r) / 2) * 100.0
for (py, r) in zip(py_values, r_values)
if py > 0 and r > 0]
mean_pct_abs_diff = sum(pct_abs_diff) / len(pct_abs_diff)
assert mean_pct_abs_diff < 5.0
elif col in ['Vignette', 'X1024', 'Y1024']:
abs_diff = [abs(py-r) for (py, r) in zip(py_values, r_values)]
mean_abs_diff = sum(abs_diff)/len(abs_diff)
assert mean_abs_diff < 0.004
elif col in ['Xcentroid', 'Ycentroid']:
abs_diff = [abs(py - r) for (py, r) in zip(py_values, r_values)]
mean_abs_diff = sum(abs_diff) / len(abs_diff)
assert mean_abs_diff < 0.2
elif col == 'SkyBias':
pass # probably going to delete this term before too long, so don't bother testing.
elif col == 'InstMag':
stdev_diff_bright = (pd.Series([py - r
for (py, r) in zip(py_values, r_values)
if py < -6])).std()
assert stdev_diff_bright < 0.015
elif col == 'InstMagSigma':
max_diff = (pd.Series([abs(py - r)
for (py, r) in zip(py_values, r_values)
if (py > 0) and (py < 0.03)])).max()
assert max_diff < 0.015
elif col == 'CatMag':
both_nan = [np.isnan(py) and np.isnan(r) for (py, r) in zip(py_values, r_values)]
equal = [py == r for (py, r) in zip(py_values, r_values)]
cat_mag_match = [b or e for (b,e) in zip(both_nan, equal)]
assert all(cat_mag_match)
elif col == 'CatMagError':
both_nan = [np.isnan(py) and np.isnan(r) for (py, r) in zip(py_values, r_values)]
equal = [py == r for (py, r) in zip(py_values, r_values)]
r_nan_py_max = []
for (py, r, f, star) in zip(py_values, r_values, df_py['Filter'], df_py['ModelStarID']):
this_tf = False
if np.isnan(r) and not np.isnan(py):
this_max = max(df_py.loc[(df_py['Filter'] == f)
& (df_py['ModelStarID'] == star)
& (df_py['CatMagError'] is not None),
'CatMagError'])
this_tf = (py == this_max)
r_nan_py_max.append(this_tf)
cat_mag_match = [(b or e or rp) for (b, e, rp) in zip(both_nan, equal, r_nan_py_max)]
assert all(cat_mag_match)
elif col == 'CI':
py_nan = set(np.isnan(py_values))
r_nan = set(np.isnan(r_values))
assert py_nan == r_nan
max_diff = pd.Series([abs(py - r) for (py, r) in zip(py_values, r_values)]).max()
assert max_diff < 0.001
elif py_type in ['obj', 'str', 'int']:
if not py_values == r_values:
print('col', col, 'differ')
assert py_values == r_values
else:
# Here, both are presumed to be floats:
py_values = [float(v) for v in py_values]
r_values = [float(v) for v in r_values]
if not py_values == pytest.approx(r_values):
print('col', col, 'differ')
assert py_values == pytest.approx(r_values)
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,641
|
edose/photrix
|
refs/heads/master
|
/photrix/util.py
|
from datetime import datetime, timedelta, timezone
import math
from math import floor, sqrt
import numpy as np
import pandas as pd
import statsmodels.regression.mixed_linear_model as sm # statsmodels version >= 0.8 !
import ephem
__author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque"
class Timespan:
""" Holds one (start, end) span of time. Immutable.
Input: 2 python datetimes (in UTC), defining start and end of timespan.
methods:
ts2 = ts.copy()
ts2 == ts # only if both start and end are equal
ts2 = ts.delay_seconds(120) # returns new Timespan, offset in both start and end
ts.intersect(other) # returns True iff any overlap at all
ts2 = ts.subtract(other) # returns new Timespan; longer of 2 possible spans if ambiguous.
ts.contains_time(t) # returns True iff ts.start <= t <= ts.end
ts.contains_timespan(other) # returns True iff ts wholly contains other
str(ts) # returns string describing Timespan's start, end, and duration in seconds.
"""
def __init__(self, start_utc, end_utc):
self.start = start_utc
self.end = max(start_utc, end_utc)
self.seconds = (self.end-self.start).seconds
self.midpoint = self.start + timedelta(seconds=self.seconds / 2)
def copy(self):
return Timespan(self.start, self.end)
def __eq__(self, other):
return self.start == other.start and self.end == other.end
def delay_seconds(self, seconds):
delay = timedelta(seconds=seconds)
return Timespan(self.start+delay, self.end+delay)
def expand_seconds(self, seconds):
# Use negative seconds to contract Timespan. New Timespan will have non-negative duration.
expansion = timedelta(seconds=seconds)
new_start = min(self.start - expansion, self.midpoint)
new_end = max(self.end + expansion, self.midpoint)
return Timespan(new_start, new_end)
def intersect(self, other):
new_start = max(self.start, other.start)
new_end = min(self.end, other.end)
return Timespan(new_start, new_end)
def subtract(self, other):
if self.intersect(other).seconds == 0: # case: no overlap/intersection.
return self
if other.contains_timespan(self): # case: self entirely subtracted away.
return Timespan(self.start, self.start)
if self.contains_timespan(other): # case: 2 timespans -> take the longer.
diff_early = Timespan(self.start, other.start)
diff_late = Timespan(other.end, self.end)
if diff_early.seconds >= diff_late.seconds:
return diff_early
else:
return diff_late
if self.start < other.start: # remaining case: partial overlap.
return Timespan(self.start, other.start)
else:
return Timespan(other.end, self.end)
def contains_time(self, time_utc):
return self.start <= time_utc <= self.end
def contains_timespan(self, other):
return (self.start <= other.start) & (self.end >= other.end)
@staticmethod
def longer(ts1, ts2, on_tie="earlier"):
"""
Returns Timespan with longer duration (larger .seconds).
If equal duration:
if_tie=="earlier", return earlier.
if_tie=="first", return ts1.
[TODO: add "random" option later to return randomly chosen ts1 or ts2.]
:param ts1: input Timespan object.
:param ts2: input Timespan object.
:param on_tie: "earlier" or "first". Any other string behaves as "first".
:return: the Timespan object with longer duration.
"""
if ts1.seconds > ts2.seconds:
return ts1
if ts2.seconds > ts1.seconds:
return ts2
# here: equal length cases. First, try to break duration tie with earlier midpoint.
if on_tie.lower() == "earlier" and ts1.midpoint != ts2.midpoint:
if ts1.midpoint < ts2.midpoint:
return ts1
return ts2
# here, tie-breaking has failed. So simply return first of 2 input Timespans.
return ts1
def __str__(self):
return "Timespan '" + str(self.start) + "' to '" + str(self.end) + "' = " + \
str(self.seconds) + " seconds."
class RaDec:
"""
Holds one Right Ascension, Declination sky position (internally as degrees).
Parameters:
ra : (hours hex string, or degrees)
dec : (degrees hex string, or degrees)
Methods:
"""
def __init__(self, ra, dec):
if isinstance(ra, str):
self.ra = ra_as_degrees(ra)
else:
self.ra = ra
if isinstance(dec, str):
self.dec = dec_as_degrees(dec)
else:
self.dec = dec
self.as_degrees = self.ra, self.dec # stored internally as degrees
self.as_hex = ra_as_hours(self.ra), dec_as_hex(self.dec)
def degrees_from(self, other):
deg_per_radian = 180.0 / math.pi
diff_ra = abs(self.ra - other.ra) / deg_per_radian
cos_dec_1 = math.cos(self.dec / deg_per_radian)
cos_dec_2 = math.cos(other.dec / deg_per_radian)
diff_dec = abs(self.dec - other.dec) / deg_per_radian
arg = math.sqrt(math.sin(diff_dec/2.0)**2 + cos_dec_1*cos_dec_2*math.sin(diff_ra/2.0)**2)
if arg > 0.001:
return deg_per_radian * (2.0 * math.asin(arg)) # haversine formula
else:
# spherical law of cosines
sin_dec_1 = math.sin(self.dec / deg_per_radian)
sin_dec_2 = math.sin(other.dec / deg_per_radian)
return deg_per_radian * \
math.acos(sin_dec_1*sin_dec_2 + cos_dec_1*cos_dec_2*math.cos(diff_ra))
def farther_from(self, other_ra_dec, degrees_limit):
return self.degrees_from(other_ra_dec) > degrees_limit
def __eq__(self, other):
return (self.ra == other.ra) and (self.dec == other.dec)
def __str__(self):
ra_hex, dec_hex = self.as_hex
return "RaDec object: " + ra_hex + " " + dec_hex
def __repr__(self):
ra_hex, dec_hex = self.as_hex
return "RaDec('" + ra_hex + "', '" + dec_hex + "')"
def parse_hex(hex_string):
"""
Helper function for RA and Dec parsing, takes hex string, returns list of floats.
:param hex_string: string in either full hex ("12:34:56.7777" or "12 34 56.7777"),
or degrees ("234.55")
:return: list of strings representing floats (hours:min:sec or deg:arcmin:arcsec).
"""
colon_list = hex_string.split(':')
space_list = hex_string.split() # multiple spaces act as one delimiter
if len(colon_list) >= len(space_list):
return [x.strip() for x in colon_list]
return space_list
def ra_as_degrees(ra_string):
"""
:param ra_string: string in either full hex ("12:34:56.7777" or "12 34 56.7777"),
or degrees ("234.55")
:return float of Right Ascension in degrees between 0 and 360.
"""
ra_list = parse_hex(ra_string)
if len(ra_list) == 1:
ra_degrees = float(ra_list[0]) # input assumed to be in degrees.
elif len(ra_list) == 2:
ra_degrees = 15 * (float(ra_list[0]) + float(ra_list[1])/60.0) # input assumed in hex.
else:
ra_degrees = 15 * (float(ra_list[0]) + float(ra_list[1]) / 60.0 +
float(ra_list[2])/3600.0) # input assumed in hex.
if (ra_degrees < 0) | (ra_degrees > 360):
ra_degrees = None
return ra_degrees
def hex_degrees_as_degrees(hex_degrees_string):
"""
:param hex_degrees_string: string in either full hex ("-12:34:56.7777", or "-12 34 56.7777"),
or degrees ("-24.55")
:return float of degrees (not limited)
"""
# dec_list = hex_degrees_string.split(":")
dec_list = parse_hex(hex_degrees_string)
# dec_list = [dec.strip() for dec in dec_list]
if dec_list[0].startswith("-"):
sign = -1
else:
sign = 1
if len(dec_list) == 1:
dec_degrees = float(dec_list[0]) # input assumed to be in degrees.
elif len(dec_list) == 2:
dec_degrees = sign * (abs(float(dec_list[0])) + float(dec_list[1])/60.0) # input is hex.
else:
dec_degrees = sign * (abs(float(dec_list[0])) + float(dec_list[1]) / 60.0 +
float(dec_list[2])/3600.0) # input is hex.
return dec_degrees
def dec_as_degrees(dec_string):
""" Input: string in either full hex ("-12:34:56.7777") or degrees ("-24.55")
Returns: float of Declination in degrees, required to be -90 to +90, inclusive.
"""
dec_degrees = hex_degrees_as_degrees(dec_string)
if (dec_degrees < -90) | (dec_degrees > +90):
dec_degrees = None
return dec_degrees
def ra_as_hours(ra_degrees):
""" Input: float of Right Ascension in degrees.
Returns: string of RA as hours, in hex, to the nearest 0.001 RA seconds.
"""
if (ra_degrees < 0) | (ra_degrees > 360):
return None
n_ra_milliseconds = round((ra_degrees * 3600 * 1000) / 15)
ra_hours, remainder = divmod(n_ra_milliseconds, 3600 * 1000)
ra_minutes, remainder = divmod(remainder, 60 * 1000)
ra_seconds = round(remainder / 1000, 3)
format_string = "{0:02d}:{1:02d}:{2:06.3f}"
ra_str = format_string.format(int(ra_hours), int(ra_minutes), ra_seconds)
if ra_str[:3] == "24:":
ra_str = format_string.format(0, 0, 0)
return ra_str
def dec_as_hex(dec_degrees):
""" Input: float of Declination in degrees.
Returns: string of Declination in hex, to the nearest 0.01 arcsecond.
"""
if (dec_degrees < -90) | (dec_degrees > +90):
return None
dec_string = degrees_as_hex(dec_degrees, seconds_decimal_places=2)
return dec_string
def degrees_as_hex(angle_degrees, seconds_decimal_places=2):
"""
:param angle_degrees: any angle as degrees
:return: same angle in hex notation, unbounded.
"""
if angle_degrees < 0:
sign = "-"
else:
sign = "+"
abs_degrees = abs(angle_degrees)
milliseconds = round(abs_degrees * 3600 * 1000)
degrees, remainder = divmod(milliseconds, 3600 * 1000)
minutes, remainder = divmod(remainder, 60 * 1000)
seconds = round(remainder / 1000, 2)
format_string = '{0}{1:02d}:{2:02d}:{3:0' + str(int(seconds_decimal_places)+3) + \
'.0' + str(int(seconds_decimal_places)) + 'f}'
hex_string = format_string.format(sign, int(degrees), int(minutes), seconds)
return hex_string
def weighted_mean(values, weights):
"""
Returns weighted mean, weighted std deviation of values, and weighted std deviation of the mean.
:param values: list (or other iterable) of values to be averaged
:param weights: list (or other iterable) of weights; length must = length of values
:return: 3-tuple (weighted mean, weighted std dev (population), weighted std dev of mean)
"""
if (len(values) != len(weights)) or (len(values) == 0) or (len(weights) == 0):
raise ValueError('lengths of values & weights must be equal & non-zero.')
if sum(weights) <= 0:
raise ValueError('sum of weights must be positive.')
value_list = list(values) # py list comprehension often misunderstands pandas Series indices.
weight_list = list(weights) # "
norm_weights = [wt/sum(weights) for wt in weight_list]
w_mean = sum([nwt * val for (nwt, val) in zip(norm_weights, value_list)])
n_nonzero_weights = sum([w != 0 for w in weight_list])
if n_nonzero_weights == 1:
w_stdev_pop = 0
w_stdev_w_mean = 0
else:
resid2 = [(val-w_mean)**2 for val in value_list]
nwt2 = sum([nwt**2 for nwt in norm_weights])
rel_factor = 1.0 / (1.0 - nwt2) # reliability factor (better than N'/(N'-1))
w_stdev_pop = sqrt(rel_factor * sum([nwt * r2 for (nwt, r2) in zip(norm_weights, resid2)]))
w_stdev_w_mean = sqrt(nwt2) * w_stdev_pop
return w_mean, w_stdev_pop, w_stdev_w_mean
DEFAULT_LADDER = [1.0, 1.25, 1.6, 2.0, 2.5, 3.2, 4.0, 5.0, 6.4, 8.0, 10.0]
def ladder_round(raw_value, ladder=DEFAULT_LADDER, direction="nearest"):
"""
Rounds to a near-log scale value. May be useful for familiar exposure times.
Can handle negative numbers, too. Zero returns zero.
:param raw_value: the value we want to round
:param ladder: ascending list of values from 1 to 10 to which to round.
:param direction: "nearest" or "down" or "up"
:return: raw_valued rounded to nearest ladder value, not counting powers of 10,
e.g., 32.5 -> 32, 111 -> 100, 6321 -> 6400, -126 -> -125
"""
if raw_value == 0:
return 0
base = math.copysign(10**(math.floor(math.log10(math.fabs(raw_value)))), raw_value)
target = math.fabs(raw_value / base)
if target in ladder:
return raw_value
for i, val in enumerate(ladder[1:]):
if target < val:
ratio_below = target / ladder[i]
ratio_above = ladder[i+1] / target
if direction == "down":
return base * ladder[i]
if direction == "up":
return base * ladder[i+1]
if ratio_below <= ratio_above: # default case "nearest"
return base * ladder[i] # round downward
else:
return base * ladder[i+1] # round upward
def get_phase(jd, jd_epoch, period):
phase = math.modf((jd - jd_epoch) / period)[0]
if phase < 0:
phase += 1
return phase
def jd_from_datetime_utc(datetime_utc=None):
if datetime_utc is None:
return None
datetime_j2000 = datetime(2000, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc)
jd_j2000 = 2451544.5
seconds_since_j2000 = (datetime_utc - datetime_j2000).total_seconds()
return jd_j2000 + seconds_since_j2000 / (24*3600)
def datetime_utc_from_jd(jd=None):
if jd is None:
return datetime.now(timezone.utc)
datetime_j2000 = datetime(2000, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc)
jd_j2000 = 2451544.5
seconds_since_j2000 = 24 * 3600 * (jd - jd_j2000)
return datetime_j2000 + timedelta(seconds=seconds_since_j2000)
def hhmm_from_datetime_utc(datetime_utc):
minutes = round(datetime_utc.hour*60 # NB: banker's rounding (nearest even)
+ datetime_utc.minute
+ datetime_utc.second/60
+ datetime_utc.microsecond/(60*1000000)) % 1440
hh = minutes // 60
mm = minutes % 60
return '{0:0>4d}'.format(100 * hh + mm)
def az_alt_at_datetime_utc(longitude, latitude, target_radec, datetime_utc):
obs = ephem.Observer() # for local use.
if isinstance(longitude, str):
obs.lon = longitude
else:
# next line wrong?: if string should be in deg not radians?? (masked by long passed as hex string?)
obs.lon = str(longitude * math.pi / 180)
if isinstance(latitude, str):
obs.lat = latitude
else:
# next line wrong?: if string should be in deg not radians?? (masked by long passed as hex string?)
obs.lat = str(latitude * math.pi / 180)
obs.date = datetime_utc
target_ephem = ephem.FixedBody() # so named to suggest restricting its use to ephem.
target_ephem._epoch = '2000'
target_ephem._ra, target_ephem._dec = target_radec.as_hex # text: RA in hours, Dec in deg
target_ephem.compute(obs)
return target_ephem.az * 180 / math.pi, target_ephem.alt * 180 / math.pi
def isfloat(string):
try:
float(string)
return True
except ValueError:
return False
def float_or_none(string):
try:
return float(string)
except ValueError:
return None
def event_utcs_in_timespan(jd_reference, period, timespan):
"""
Returns a list of UTC times of period events within a given Timespan.
A generalization of (and replacing) fn find_minima_in_timespan()
:param jd_reference: Julian Date of any occurence of the period event (e.g., Mira max) [float]
:param period: in days [float]
:param timespan: target timespan (start and end datetimes) [Timespan object]
:return: list of up to 10 UTCs of periodic events within the target timespan [list of datetimes]
Return None if jd_reference or period are invalid. Return empty list if no such events.
"""
if jd_reference is None or period is None:
return None
jd_ts_start = jd_from_datetime_utc(timespan.start)
jd_ts_end = jd_from_datetime_utc(timespan.end)
n_prior = floor((jd_ts_start - jd_reference) / period)
jd_prior = jd_reference + n_prior * period
utc_list = []
for i in range(10):
jd_test = jd_prior + i * period
if jd_test > jd_ts_end:
return utc_list
if jd_test >= jd_ts_start:
utc_list.append(datetime_utc_from_jd(jd_test))
return utc_list
class MixedModelFit:
"""
Object: holds info for one mixed-model (py::statsmodel) fit.
Generic in nature--NOT tied to astronomical usage.
Uses formula form, i.e., statsmodel::sm.MixedLM.from_formula()
"""
def __init__(self, data, dep_var=None, fixed_vars=None, group_var=None):
"""
Executes mixed-model fit & makes data available.
:param data: input data, one variable per column, one point per row [pandas Dataframe]
:param dep_var: one column name as dependent 'Y' variable [string]
:param fixed_vars: one or more column names as independent 'X' variable [string or
list of strings]
:param group_var: one column name as group (category; random-effect) variable [string]
Usage: fit = MixedModel(df_input, 'Y', ['X1', 'X2'], 'a_group_type']
fit = MixedModel(df_input, 'Y', 'X1', 'a_group_type'] (OK if only one indep var)
"""
if not isinstance(data, pd.DataFrame):
print('Parameter \'data\' must be a pandas Dataframe of input data.')
return
if dep_var is None or fixed_vars is None or group_var is None:
print('Provide all parameters: dep_var, fixed_vars, and group_var.')
return
if not isinstance(dep_var, str) or not isinstance(group_var, str):
print('Parameters \'dep_var\' and \'group_var\' must both be strings.')
return
fixed_vars_valid = False # default if not validated
if isinstance(fixed_vars, str):
fixed_vars = list(fixed_vars)
fixed_vars_valid = True
if isinstance(fixed_vars, list):
if len(fixed_vars) >= 1:
if all([isinstance(var, str) for var in fixed_vars]):
fixed_vars_valid = True
if not fixed_vars_valid:
print('Parameter \'fixed_vars\' must be a string or a list of strings.')
return
formula = dep_var + ' ~ ' + ' + '.join(fixed_vars)
model = sm.MixedLM.from_formula(formula, groups=data[group_var], data=data)
fit = model.fit()
self.statsmodels_object = fit # instance of class MixedLMResults (py pkg statsmodels)
# Scalar and naming attributes:
self.converged = fit.converged # bool
self.nobs = fit.nobs # number of observations used in fit
self.likelihood = fit.llf
self.dep_var = dep_var
self.fixed_vars = fixed_vars
self.group_var = group_var
self.sigma = sqrt(sum(fit.resid**2)/(fit.nobs-len(fixed_vars)-2))
# Fixed-effects dataframe (joins so we don't count on consistent input ordering):
df = pd.DataFrame({'Value': fit.fe_params})
df = df.join(pd.DataFrame({'Stdev': fit.bse_fe})) # join on index (enforce consistency)
df = df.join(pd.DataFrame({'Tvalue': fit.tvalues})) # " & any random effect discarded
df = df.join(pd.DataFrame({'Pvalue': fit.pvalues})) # " & "
df['Name'] = df.index
self.df_fixed_effects = df.copy()
# Random-effect dataframe, index=GroupName, cols=GroupName, GroupValue:
df = pd.DataFrame(fit.random_effects).transpose() # DataFrame, 1 row/group
df = df.rename(columns={'groups': 'Group'}) # was 'GroupValue'
df['GroupName'] = df.index
self.df_random_effects = df.copy()
# Observation dataframe (safe to count on consistent input ordering -> easier construction):
df = pd.DataFrame({'FittedValue': fit.fittedvalues})
df['Residual'] = fit.resid
self.df_observations = df.copy()
def predict(self, df_predict_input, include_random_effect=True):
"""
Takes new_data and renders predicted dependent-variable values.
Optionally includes effect of groups (random effects), unlike py::statsmodels.
:param: new_data: new input data used to render predictions.
Extra (unused) columns OK; model selects only needed columns. [pandas DataFrame]
:param: include_random_effect: True to include them, False to omit/ignore [bool]
:return: predictions of dependent-variable values matching rows of new data (pandas Series)
"""
# Get predicted values on fixed effects only (per statsmodels' weird def. of 'predicted'):
fixed_effect_inputs = df_predict_input[self.fixed_vars] # 1 col per fixed effect variable
predicted_on_fixed_only = self.statsmodels_object.predict(exog=fixed_effect_inputs)
# If requested, add RE contibs (that were not included in MixedModels object 'fit'):
if include_random_effect:
df_random_effect_inputs = pd.DataFrame(df_predict_input[self.group_var])
df_random_effect_values = self.df_random_effects[['Group']] # was ['GroupValue']
predicted_on_random_only = pd.merge(df_random_effect_inputs, df_random_effect_values,
left_on=self.group_var,
right_index=True, how='left',
sort=False)['Group'] # was 'GroupValue'
total_prediction = predicted_on_fixed_only + predicted_on_random_only
else:
total_prediction = predicted_on_fixed_only
return total_prediction
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,642
|
edose/photrix
|
refs/heads/master
|
/test/test_acps.py
|
from photrix import acps # don't include "acps." in calls to functions and classes.
__author__ = "Eric Dose :: Bois d'Arc Observatory, Kansas"
def test_ACPS_observation():
obs = acps.ACPS_observation("ST Tri", 34.555, 21.334)
obs.add_imageset("ST Tri", 3, 120, 'V')
obs.add_imageset("ST Tri I filter", 3, 30, 'I')
txt = obs.rtml()
print("\n" + txt + "\n")
def test_run():
project = acps.ACPS_project("AN20160630-BOREA") # for the whole night on one instrument.
plan = project.make_plan("first plan")
plan.horizon = 30 # override class defaults
plan.priority = 4 # "
obs = acps.ACPS_observation('obs_id', 37, -1)
obs.add_imageset("", 5, 60, 'V')
obs.add_imageset("name2", 2, 120, 'I')
plan.add_observation(obs)
project.add_plan(plan)
print('\n'*2 + project.rtml())
|
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
|
36,660
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/apis/run_analysis.py
|
import json
import os
import sys
sys.path.append(os.path.abspath('.'))
import time
import uuid
from itertools import islice
import cv2
from instaloader import Profile, Instaloader
from apis.config import DATA_FOLDER, MAX_POSTS_TO_ANALYSE, DEEPFASHION_API_KEY, IMAGE_TYPES, MS_COGNITIVE_FACE_KEY, \
MS_COGNITIVE_VISION_KEY, TEMP_FOLDER
from apis.data_groomer import DataGroomer
from apis.dl_image_analyzer import DlImageAnalyzer
from apis.photo import Photo
from apis.utils import print_report, create_dir_if_not_exists, draw_bbox_on_image
def run_analysis(instagram_user):
create_dir_if_not_exists(DATA_FOLDER)
create_dir_if_not_exists(TEMP_FOLDER)
L = Instaloader()
profile = Profile.from_username(L.context, instagram_user.instagram_id)
posts = profile.get_posts()
if profile.mediacount > MAX_POSTS_TO_ANALYSE:
print("Sorting according to likes+comments ...")
posts_sorted_by_likes = sorted(profile.get_posts(), key=lambda p: p.likes + p.comments)
posts = []
for post in islice(posts_sorted_by_likes, MAX_POSTS_TO_ANALYSE):
print("Adding post...")
posts.append(post)
instagram_user.name = profile.full_name
instagram_user.followers = profile.followers
instagram_user.profile_picture_url = profile.get_profile_pic_url()
instagram_user.photos = []
for post in posts:
photo = Photo(shortcode=post.shortcode, public_url=post.url,
likes=post.likes, comments=post.comments, caption=post.caption)
instagram_user.photos.append(photo)
print("Total Photos {} by {}".format(len(instagram_user.photos), instagram_user.instagram_id))
for photo in instagram_user.photos:
print("Analysing {} of {}...".format(instagram_user.photos.index(photo), len(instagram_user.photos)))
analyzer = DlImageAnalyzer(subscription_key=MS_COGNITIVE_VISION_KEY,
face_subscription_key=MS_COGNITIVE_FACE_KEY,
public_image_url=photo.public_url,
deepfashion_api_key=DEEPFASHION_API_KEY)
analyzer.analyze_image()
photo.description = analyzer.image_caption
photo.racy_score = analyzer.racy_score
photo.adult_score = analyzer.adult_score
photo.analyze_response = json.dumps(analyzer.analyze_response)
photo.apparel_response = json.dumps(analyzer.apparel_response)
if IMAGE_TYPES[photo.type] in ['selfie', 'people']:
try:
photo.face_response = json.dumps(analyzer.face_response)
photo.emotions_json = json.dumps(analyzer.emotions_json)
except AttributeError:
pass
try:
filename = str(uuid.uuid1()) + '.jpg'
cv2.imwrite(os.path.join(DATA_FOLDER, filename), analyzer.pose_image)
photo.pose_image_filename = filename
except:
pass
photo.bboxed_filename = save_bboxed_objs_from_image(photo)
time.sleep(4)
data_groomer = DataGroomer(instagram_user=instagram_user)
data_groomer.start()
print("Analyzed {}'s Profile Sucessfully!".format(instagram_user.instagram_id))
print_report(data_groomer=data_groomer, instagram_user=instagram_user)
return instagram_user
def save_bboxed_objs_from_image(photo):
print("Saving bboxed images...")
image = photo.cv2_image
for obj in photo.objects:
image = draw_bbox_on_image(image, obj.rectangle.x, obj.rectangle.y,
obj.rectangle.x + obj.rectangle.w,
obj.rectangle.y + obj.rectangle.h,
obj.object + " " + str(obj.confidence),
color=(0, 255, 0),
thickness=3)
f_name = str(uuid.uuid1()) + '.jpg'
cv2.imwrite(os.path.join(DATA_FOLDER, f_name), image)
return f_name
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,661
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/apis/data_groomer.py
|
import os
import uuid
import matplotlib.pyplot as plt
from apis.config import IMAGE_TYPES, TEMP_FOLDER
class DataGroomer:
def __init__(self, instagram_user):
self.instagram_user = instagram_user
self.portrait_photos = self.sort_photos('selfie')
self.social_photos = self.sort_photos('people')
self.scenery_photos = self.sort_photos('scenery')
self.other_photos = self.sort_photos('unclassified')
self.all_photos = self.sort_photos()
self.most_liked_post = None
self.most_commented_post = None
self.most_liked_emotion = None
def start(self):
self.racy_vs_engagement_chart()
def sort_photos(self, photo_type=None):
if photo_type:
photos = [photo for photo in self.instagram_user.photos if IMAGE_TYPES.get(photo.type) == photo_type]
else:
photos = self.instagram_user.photos
photos.sort(key=lambda x: x.likes + x.comments, reverse=True)
return photos
@property
def total_likes(self):
total_likes = 0
for photo in self.instagram_user.photos:
try:
total_likes += int(photo.likes)
except AttributeError:
pass
return total_likes
@property
def total_comments(self):
comments = 0
for photo in self.instagram_user.photos:
try:
comments += int(photo.comments)
except AttributeError:
pass
return comments
@property
def engagement_rate(self):
rate = (self.total_likes + self.total_comments) / (self.instagram_user.followers * 100)
return round(rate, 2)
def count_likes(self, photos):
total_likes = 0
for photo in photos:
try:
total_likes += int(photo.likes)
except AttributeError:
pass
return total_likes
def count_comments(self, photos):
total_comments = 0
for photo in photos:
try:
total_comments += int(photo.comments)
except AttributeError:
pass
return total_comments
def frequency_emotions(self, photos):
freq_emotion = {}
for photo in photos:
if photo.emotion:
for emotion_value in photo.emotion.values():
if emotion_value > 0.4:
t = [emotion for emotion, value in photo.emotion.items() if value == emotion_value]
emotion = freq_emotion.get(t[0], 0)
emotion += 1
freq_emotion[t[0]] = emotion
return freq_emotion
def get_racy_vs_enagement_scores(self):
photos = self.social_photos
photos.sort(key=lambda x: x.racy_score, reverse=False)
racy_scores = []
engagements = []
for photo in photos:
racy_scores.append(photo.racy_score)
engagements.append(photo.likes)
return racy_scores, engagements
def racy_vs_engagement_chart(self):
racy_scores, engagements = self.get_racy_vs_enagement_scores()
plt.plot(racy_scores, engagements)
filename = self.save_plot_to_file()
return filename
def emotions_distribution_chart(self):
dictionary = self.frequency_emotions(self.all_photos)
plt.bar(range(len(dictionary)), list(dictionary.values()), align='center')
plt.xticks(range(len(dictionary)), list(dictionary.keys()))
filename = self.save_plot_to_file()
return filename
def emotion_vs_enagagement_chart(self):
emotion_engagement = {}
for photo in self.all_photos:
if photo.top_emotions:
for emotion, value in photo.top_emotions.items():
emotion_engagement[emotion] = emotion_engagement.get(emotion, 0) + photo.engagement
plt.bar(range(len(emotion_engagement)), list(emotion_engagement.values()), align='center')
plt.xticks(range(len(emotion_engagement)), list(emotion_engagement.keys()))
filename = self.save_plot_to_file()
return filename
def photo_type_vs_enagagement_chart(self):
photo_type_engage = {
'portrait': self.count_likes(self.portrait_photos) + self.count_comments(self.portrait_photos),
'pose': self.count_likes(self.social_photos) + self.count_comments(self.social_photos),
'outdoor': self.count_likes(self.scenery_photos) + self.count_comments(self.scenery_photos),
'other': self.count_likes(self.other_photos) + self.count_comments(self.other_photos),
}
plt.bar(range(len(photo_type_engage)), list(photo_type_engage.values()), align='center')
plt.xticks(range(len(photo_type_engage)), list(photo_type_engage.keys()))
filename = self.save_plot_to_file()
return filename
def save_plot_to_file(self):
filename = str(uuid.uuid1()) + '.png'
plt.savefig(os.path.join(TEMP_FOLDER, filename), bbox_inches='tight')
plt.close()
return filename
@property
def top_pose_images(self):
res = []
for photo in self.social_photos[:25]:
if photo.pose_image_filename:
res.append(photo)
return res
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,662
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/apis/utils.py
|
import os
import cv2
from numpy import unicode
def url_to_image(url):
"""Converts url to cv2 image"""
import numpy as np
import cv2
from six.moves.urllib.request import urlopen
try:
resp = urlopen(url)
image = np.asarray(bytearray(resp.read()), dtype="uint8")
image = cv2.imdecode(image, cv2.IMREAD_COLOR)
return image
except Exception as e:
from flask import current_app
print(e)
current_app.logger.error(str(e))
def create_dir_if_not_exists(output_dir):
try:
os.makedirs(output_dir)
return output_dir
except OSError:
if not os.path.isdir(output_dir):
raise OSError("Failed to create" + output_dir)
def draw_bbox_on_image(frame, x1, y1, x2, y2, text, color=(0, 0, 0), thickness=3):
if type(color) in (str, unicode):
color = color.lstrip('#')
rgb = (color[:2], color[2:4], color[4:])
color = tuple(int(x, 16) for x in rgb)
cv2.rectangle(frame, (x1, y1), (x2, y2),
color=color, thickness=thickness)
cv2.putText(frame, text, (x1, y1),
cv2.FONT_HERSHEY_SIMPLEX, 1, color, thickness)
return frame
def print_report(data_groomer, instagram_user):
print(instagram_user.name, "-", instagram_user.instagram_id)
print("Profile Picture", instagram_user.profile_picture_url)
print("Followers:", instagram_user.followers)
print("Engagement Rate", data_groomer.engagement_rate)
print("What does your post distribution look like?")
print("Total Number of Photos: ", len(instagram_user.photos))
print("Portraits: ", len(data_groomer.portrait_photos))
print("Pose Photos: ", len(data_groomer.social_photos))
print("Scenery: ", len(data_groomer.scenery_photos))
print("Other Photos: ", len(data_groomer.other_photos))
print("Emotions")
print("Emotional Distribution", data_groomer.emotions_distribution_chart())
print("Emotions V/s Engagement", data_groomer.emotion_vs_enagagement_chart())
print("Photo Type V/s Engagement", data_groomer.photo_type_vs_enagagement_chart())
print("Does racy-ness drive engagement", data_groomer.racy_vs_engagement_chart())
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,663
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/apis/instagram_user.py
|
class InstagramUser:
def __init__(self, instagram_id):
self.instagram_id = instagram_id
self.name = ""
self.followers = 0
self.profile_picture_url = ""
self.photos = []
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,664
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/apis/config.py
|
DATA_FOLDER = "data"
TEMP_FOLDER = "temp"
MAX_POSTS_TO_ANALYSE = 25
ENABLE_DEEP_FASHION = True
MS_COGNITIVE_VISION_KEY = ""
MS_COGNITIVE_FACE_KEY = ""
INSTAGRAM_USERNAME = ""
INSTAGRAM_PASSWORD = ""
DEEPFASHION_API_KEY = ""
IMAGE_TYPES = {
1: 'selfie',
2: 'people',
3: 'scenery',
4: 'unclassified'
}
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,665
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/apis/photo.py
|
import json
import os
from dotmap import DotMap
from apis.utils import url_to_image
def get_photo_type_from_categories(categories):
top_types = []
for category in categories:
name = category['name']
score = category['score']
if score > 0.3:
if 'people_portrait' in name:
top_types.append(1)
elif 'people_' in name:
top_types.append(2)
elif 'outdoor_' in name or 'building_' in name or 'sky_' in name:
top_types.append(3)
if 1 in top_types:
return 1
elif 2 in top_types:
return 2
elif 3 in top_types:
return 3
else:
return 4
class Photo:
DATA_FOLDER = ""
def __init__(self, shortcode, public_url, likes, comments, caption):
self.filename = None
self.bboxed_filename = None
self.shortcode = shortcode
self.public_url = public_url
self.instagram_user_id = None
self.racy_score = 0
self.adult_score = 0
self.golden_ratio = 0
self.description = None
self.analyze_response = None
self.face_response = None
self.emotions_json = None
self.apparel_response = None
self.pose_image_filename = None
self.likes = likes
self.comments = comments
self.caption = caption
@property
def type(self):
try:
categories = json.loads(self.analyze_response)['categories']
return get_photo_type_from_categories(categories)
except Exception:
pass
@property
def cv2_image(self):
return url_to_image(self.public_url)
@property
def bboxed_filepath(self):
if self.bboxed_filename:
return os.path.join(Photo.DATA_FOLDER, self.bboxed_filename)
@property
def emotion(self):
if self.emotions_json:
return json.loads(self.emotions_json)
else:
return {}
@property
def top_emotions(self):
if self.emotions_json:
emotion_resp = {}
emotions = json.loads(self.emotions_json)
for emotion, value in emotions.items():
if value > 0.3:
emotion_resp[emotion] = value
return emotion_resp
@property
def objects(self):
if self.analyze_response:
objects = json.loads(self.analyze_response).get('objects')
if objects:
objects = [DotMap(object) for object in objects]
return objects
else:
return {}
else:
return {}
@property
def engagement(self):
return self.likes + self.comments
@property
def ai_caption(self):
try:
return json.loads(self.analyze_response)['description']['captions'][0]['text']
except:
pass
@property
def pose_image_url(self):
from flask import url_for
return url_for('main.data_file', filename=self.pose_image_filename)
@property
def apparels(self):
try:
return json.loads(self.apparel_response)['articles']
except:
pass
def __repr__(self):
return "<Photo> {} {}".format(self.type, self.shortcode)
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,666
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/run_rumaro.py
|
import click
from apis.instagram_user import InstagramUser
from apis.run_analysis import run_analysis
@click.command()
@click.option('--instagram_id', help="Who's instagram would you like to analyze?")
def cli(instagram_id):
insta_user = InstagramUser(instagram_id=instagram_id)
run_analysis(insta_user)
if __name__ == '__main__':
cli()
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,667
|
bigdatasciencegroup/Rumaro
|
refs/heads/master
|
/apis/dl_image_analyzer.py
|
import Algorithmia
import requests
from apis.photo import get_photo_type_from_categories
from apis.pose_detection.open_pose_manager import OpenPoseManager
from apis.utils import url_to_image
class DlImageAnalyzer:
def __init__(self, subscription_key, face_subscription_key,
public_image_url, deepfashion_api_key=None):
self.deepfashion_api_key = deepfashion_api_key
self.subscription_key = subscription_key
self.face_subscription_key = face_subscription_key
self.public_image_url = public_image_url
self.apparel_response = {}
def analyze_image(self):
self._basic_analyze()
if get_photo_type_from_categories(self.analyze_response['categories']) in [1, 2]:
self._analyze_faces()
if self.deepfashion_api_key \
and get_photo_type_from_categories(self.analyze_response['categories']) in [2]:
self._analyze_apparel()
if get_photo_type_from_categories(self.analyze_response['categories']) in [2]:
self._analyze_pose()
return self
def _basic_analyze(self):
print("Sending image to Vision API...")
vision_base_url = "https://eastus.api.cognitive.microsoft.com/vision/v2.0/"
analyze_url = vision_base_url + "analyze"
headers = {'Ocp-Apim-Subscription-Key': self.subscription_key}
params = {'visualFeatures': 'Categories,Description,Color,Brands,Adult,Tags,Faces,Objects',
'details': 'Celebrities,Landmarks'}
data = {'url': self.public_image_url}
response = requests.post(analyze_url, headers=headers, params=params, json=data)
response.raise_for_status()
analysis = response.json()
self.analyze_response = response.json()
try:
self.image_caption = analysis["description"]["captions"][0]["text"].capitalize()
except Exception:
self.image_caption = ""
self.racy_score = float(analysis['adult']['racyScore'])
self.adult_score = float(analysis['adult']['adultScore'])
def _analyze_faces(self):
print("Sending image to Face API...")
face_api_url = 'https://eastus.api.cognitive.microsoft.com/face/v1.0/detect'
headers = {'Ocp-Apim-Subscription-Key': self.face_subscription_key}
params = {
'returnFaceId': 'true',
'returnFaceLandmarks': 'false',
'returnFaceAttributes': 'age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,'
'accessories,blur,exposure,noise',
}
response = requests.post(face_api_url, params=params, headers=headers, json={"url": self.public_image_url})
analysis = response.json()
try:
self.face_response = analysis[0]
self.emotions_json = analysis[0]['faceAttributes']['emotion']
except IndexError:
pass
def _analyze_apparel(self):
print("Sending image to DeepFashion")
input = {
"image": self.public_image_url,
"model": "small",
"threshold": 0.3,
"tags_only": True
}
client = Algorithmia.client(self.deepfashion_api_key)
algo = client.algo('algorithmiahq/DeepFashion/1.3.0')
algo.set_options(timeout=300) # optional
self.apparel_response = algo.pipe(input).result
def _analyze_pose(self):
print("Analysing Pose...")
pose = OpenPoseManager(cv_image=url_to_image(self.public_image_url))
self.pose_image = pose.run()
return self.pose_image
|
{"/apis/run_analysis.py": ["/apis/config.py", "/apis/data_groomer.py", "/apis/dl_image_analyzer.py", "/apis/photo.py", "/apis/utils.py"], "/apis/data_groomer.py": ["/apis/config.py"], "/apis/photo.py": ["/apis/utils.py"], "/run_rumaro.py": ["/apis/instagram_user.py", "/apis/run_analysis.py"], "/apis/dl_image_analyzer.py": ["/apis/photo.py", "/apis/utils.py"]}
|
36,668
|
ChihweiLHBird/pyrfume
|
refs/heads/master
|
/pyrfume/unit_test/test_config_data.py
|
import unittest
class DataAndConfigTestCase(unittest.TestCase):
def test_init_reset_config(self):
from pyrfume import init_config, reset_config
init_config(False)
reset_config()
init_config(True)
def test_read_write_config(self):
from pyrfume import read_config, write_config
write_config("PATHS", "a", "b")
self.assertEqual(read_config("PATHS", "a"), "b")
def test_data_path(self):
from pyrfume import set_data_path, get_data_path
from pyrfume.base import PACKAGE_DIR, DEFAULT_DATA_PATH
import os
path_not_exists = PACKAGE_DIR / "THIS_IS_AN_INVALID_PATH"
self.assertRaises(Exception, set_data_path, path_not_exists)
self.assertRaises(Exception, get_data_path, path_not_exists)
path1 = PACKAGE_DIR / "unit_test"
set_data_path(path1)
path2 = get_data_path()
self.assertEqual(path1, path2)
os.mkdir(DEFAULT_DATA_PATH)
set_data_path(DEFAULT_DATA_PATH)
path3 = get_data_path()
self.assertEqual(path3, DEFAULT_DATA_PATH)
def test_load_data(self):
import pickle, os
from pyrfume.base import DEFAULT_DATA_PATH
from pyrfume import load_data, save_data
import pandas as pd
data = {'col1': [1, 2], 'col2': [3, 4]}
file_path = DEFAULT_DATA_PATH / "data.pkl"
path_not_exists = DEFAULT_DATA_PATH / "THIS_IS_AN_INVALID_PATH"
self.assertRaises(Exception, save_data, data, path_not_exists)
save_data(data, file_path)
data_gain = load_data(file_path)
self.assertEqual(data_gain, data)
os.remove(file_path)
file_path = DEFAULT_DATA_PATH / "data.csv"
df = pd.DataFrame(data)
save_data(df, file_path)
#with open(file_path, "w") as f:
# f.write("0,1,2,3\n0,1,2,3")
data_gain = load_data(file_path)
for index1 in range(len(data_gain.values)):
for index2 in range(len(data_gain.values[index1])):
self.assertEqual(data_gain.values[index1][index2], df.values[index1][index2])
os.remove(file_path)
def test_save_data(self):
from pyrfume.base import DEFAULT_DATA_PATH
if __name__ == '__main__':
unittest.main()
|
{"/pyrfume/unit_test/test_config_data.py": ["/pyrfume/__init__.py", "/pyrfume/base.py"], "/pyrfume/__init__.py": ["/pyrfume/base.py"]}
|
36,669
|
ChihweiLHBird/pyrfume
|
refs/heads/master
|
/pyrfume/base.py
|
from pathlib import Path
PACKAGE_DIR = Path(__file__).resolve().parent
CONFIG_PATH = PACKAGE_DIR / "config.ini"
DEFAULT_DATA_PATH = PACKAGE_DIR.parent / "data"
|
{"/pyrfume/unit_test/test_config_data.py": ["/pyrfume/__init__.py", "/pyrfume/base.py"], "/pyrfume/__init__.py": ["/pyrfume/base.py"]}
|
36,670
|
ChihweiLHBird/pyrfume
|
refs/heads/master
|
/pyrfume/pubchem.py
|
import json
import requests
def get_summary(cid):
url = "https://pubchem.ncbi.nlm.nih.gov/rest/pug/compound/cid/%d/description/json" % cid
result = requests.get(url)
summary = json.loads(result.content)
return summary
def parse_summary_for_odor(summary):
statements = []
# keywords should include aroma but exclude aromatic (due to its special meaning in chemistry)
keywords = ("odor", "odour", "smell", "aroma ", "aroma,", "aroma.", "fragrance")
if "InformationList" in summary:
for item in summary["InformationList"]["Information"]:
if "Description" in item:
for statement in item["Description"].split("."):
if any([x in statement.lower() for x in keywords]):
statements.append(statement.strip())
return statements
def get_physical_description(cid):
url = (
"https://pubchem.ncbi.nlm.nih.gov/rest/pug_view/data/compound/%d/JSON?heading="
"Physical+Description"
% cid
)
result = requests.get(url)
try:
summary = json.loads(result.content)
except UnicodeDecodeError:
summary = {}
return summary
def parse_physical_description_for_odor(physical_description):
statements = []
try:
strings = [
x["Value"]["StringWithMarkup"][0]["String"]
for x in physical_description["Record"]["Section"][0]["Section"][0]["Section"][0][
"Information"
]
]
except KeyError:
pass
else:
# keywords should include aroma but exclude aromatic
# (due to its special meaning in chemistry)
keywords = ("odor", "odour", "smell", "aroma ", "aroma,", "aroma.", "fragrance")
for string in strings:
for statement in string.split("."):
if any([x in statement.lower() for x in keywords]):
statements.append(statement.strip())
return statements
def get_ghs_classification(cid):
url = (
"https://pubchem.ncbi.nlm.nih.gov/rest/pug_view/data/compound/%d/JSON?heading=GHS"
"+Classification"
% cid
)
result = requests.get(url)
try:
summary = json.loads(result.content)
except UnicodeDecodeError:
summary = {}
return summary
GHS_CODES = {
"H300": "Fatal if swallowed.",
"H301": "Toxic if swallowed",
"H302": "Harmful if swallowed",
"H303": "May be harmful if swallowed",
"H304": "May be fatal if swallowed and enters airways",
"H305": "May be harmful if swallowed and enters airways",
"H310": "Fatal in contact with skin",
"H311": "Toxic in contact with skin",
"H312": "Harmful in contact with skin",
"H313": "May be harmful in contact with skin",
"H314": "Causes severe skin burns and eye damage",
"H315": "Causes skin irritation",
"H316": "Causes mild skin irritation",
"H317": "May cause an allergic skin reaction",
"H318": "Causes serious eye damage",
"H319": "Causes serious eye irritation",
"H320": "Causes eye irritation",
"H330": "Fatal if inhaled",
"H331": "Toxic if inhaled",
"H332": "Harmful if inhaled",
"H333": "May be harmful if inhaled",
"H334": "May cause allergy or asthma symptoms or breathing difficulties if inhaled",
"H335": "May cause respiratory irritation",
"H336": "May cause drowsiness or dizziness",
"H340": "May cause genetic defects",
"H341": "Suspected of causing genetic defects",
"H350": "May cause cancer",
"H351": "Suspected of causing cancer",
"H360": "May damage fertility or the unborn child",
"H361": "Suspected of damaging fertility or the unborn child",
"H361d": "Suspected of damaging the unborn child",
"H361e": "May damage the unborn child",
"H361f": "Suspected of damaging fertility",
"H361g": "may damage fertility",
"H362": "May cause harm to breast-fed children",
"H370": "Causes damage to organs",
"H371": "May cause damage to organs",
"H372": "Causes damage to organs through prolonged or repeated exposure",
"H373": "May cause damage to organs through prolonged or repeated exposure",
}
def parse_ghs_classification_for_odor(
ghs_info,
codes=("H330", "H331", "H334", "H340", "H350", "H350i", "H351", "H36", "H37"),
only_percent=True,
code_only=True,
):
strings = []
if "Record" in ghs_info:
for block in ghs_info["Record"]["Section"][0]["Section"][0]["Section"][0]["Information"]:
if block["Name"] == "GHS Hazard Statements":
for entry in block["Value"]["StringWithMarkup"]:
string = entry["String"]
for code in codes:
match = (code + " (") if only_percent else code
if match in string:
if code_only:
string = string.split(":")[0]
strings.append(string)
return strings
|
{"/pyrfume/unit_test/test_config_data.py": ["/pyrfume/__init__.py", "/pyrfume/base.py"], "/pyrfume/__init__.py": ["/pyrfume/base.py"]}
|
36,671
|
ChihweiLHBird/pyrfume
|
refs/heads/master
|
/pyrfume/embedding.py
|
from scipy.spatial import distance_matrix
from scipy.stats import pearsonr, spearmanr
def embedding_distance_correlation(original_distances, embedding, quiet=False):
"""
params:
original_distances: A pandas DataFrame of distances between objects
embedding: e.g. a fitted TSNE object
"""
transformed_distances = distance_matrix(embedding.embedding_, embedding.embedding_)
transformed_distances = transformed_distances.ravel()
original_distances = original_distances.values.ravel()
r, p_r = pearsonr(original_distances, transformed_distances)
rho, p_rho = spearmanr(original_distances, transformed_distances)
if not quiet:
print("R = %.3f; rho = %.3f" % (r, rho))
return r, rho
|
{"/pyrfume/unit_test/test_config_data.py": ["/pyrfume/__init__.py", "/pyrfume/base.py"], "/pyrfume/__init__.py": ["/pyrfume/base.py"]}
|
36,672
|
ChihweiLHBird/pyrfume
|
refs/heads/master
|
/pyrfume/__init__.py
|
import configparser
import json
import logging
import pickle
import urllib
from pathlib import Path
import numpy as np
import pandas as pd
from tqdm.auto import tqdm, trange
from .base import CONFIG_PATH, DEFAULT_DATA_PATH
from typing import Any
logger = logging.getLogger("pyrfume")
def init_config(overwrite=False):
if overwrite or not CONFIG_PATH.exists():
config = configparser.ConfigParser()
config["PATHS"] = {"pyrfume-data": str(DEFAULT_DATA_PATH)}
config["DATABASE"] = {"schema_name": "UNDEFINED"}
with open(CONFIG_PATH, "w") as f:
config.write(f)
def reset_config():
init_config(overwrite=True)
def read_config(header, key):
config = configparser.ConfigParser()
init_config()
config.read(CONFIG_PATH)
return config[header][key]
def write_config(header, key, value):
config = configparser.ConfigParser()
init_config()
config.read(CONFIG_PATH)
config[header][key] = value
with open(CONFIG_PATH, "w") as f:
config.write(f)
def set_data_path(path):
path = Path(path).resolve()
if not path.exists():
raise Exception("Could not find path %s" % path)
write_config("PATHS", "pyrfume-data", str(path))
def get_data_path():
path = read_config("PATHS", "pyrfume-data")
path = Path(path).resolve()
if not path.exists():
raise Exception("Could not find data path %s" % path)
return path
def load_data(rel_path, **kwargs):
full_path = get_data_path() / rel_path
is_pickle = any([str(full_path).endswith(x) for x in (".pkl", ".pickle", ".p")])
is_excel = any([str(full_path).endswith(x) for x in (".xls", ".xlsx")])
if is_pickle:
with open(full_path, "rb") as f:
data = pickle.load(f)
elif is_excel:
data = pd.read_excel(full_path, **kwargs)
else:
if "index_col" not in kwargs:
kwargs["index_col"] = 0
data = pd.read_csv(full_path, **kwargs)
return data
def save_data(data, rel_path, **kwargs):
full_path = get_data_path() / rel_path
is_pickle = any(str(full_path).endswith(x) for x in (".pkl", ".pickle", ".p"))
is_csv = any(str(full_path).endswith(x) for x in (".csv"))
if is_pickle:
with open(full_path, "wb") as f:
pickle.dump(data, f)
elif is_csv:
data.to_csv(full_path, **kwargs)
else:
raise Exception("Unsupported extension in file name %s" % full_path.name)
class Mixture(object):
"""
A mixture of molecules, defined by the presence of absence of the
candidate molecules in the mixture.
"""
def __init__(self, C: int, components: list=None):
"""
Builds odorant from a list of components.
"""
self.C = C
self.components = components if components else []
name = None # Name of odorant, built from a hash of component names.
C = None # Number of components from which to choose.
def components_vector(self, all_components: list=None, normalize: float=0):
vector = np.zeros(self.C)
for component in self.components:
vector[all_components.index(component)] = 1
if normalize:
denom = (np.abs(vector) ** normalize).sum()
vector /= denom
return vector
@property
def N(self):
"""
Number of components in this odorant.
"""
return len(self.components)
def r(self, other):
"""
Number of replacements (swaps) to get from self to another odorant.
"""
if len(self.components) == len(other.components):
return self.hamming(other) / 2
else:
return None
def overlap(self, other, percent=False):
"""
Overlap between self and another odorant. Complement of r.
Optionally report result as percent relative to number of components.
"""
overlap = self.N - self.r(other)
if percent:
overlap = overlap * 100.0 / self.N
return overlap
def hamming(self, other):
"""
Hamming distance between self and another odorant.
Synonymous with number of d, the number of total 'moves' to go from
one odorant to another.
"""
x = set(self.components)
y = set(other.components)
diff = len(x) + len(y) - 2 * len(x.intersection(y))
return diff
def add_component(self, component):
"""
Adds one component to an odorant.
"""
self.components.append(component)
def remove_component(self, component):
"""
Removes one component to an odorant.
"""
self.components.remove(component)
def descriptor_list(self, source):
"""
Given a data source, returns a list of descriptors about this odorant.
"""
descriptors = []
for component in self.components:
if source in component.descriptors:
desc = component.descriptors[source]
if type(desc) == list:
descriptors += desc
if type(desc) == dict:
descriptors += [key for key, value in list(desc.items()) if value > 0.0]
return list(set(descriptors)) # Remove duplicates.
def descriptor_vector(self, source, all_descriptors):
"""
Given a data source, returns a vector of descriptors about this
odorant. The vector will contain positive floats.
"""
vector = np.zeros(len(all_descriptors[source]))
for component in self.components:
if source in component.descriptors:
desc = component.descriptors[source]
if type(desc) == list:
for descriptor in desc:
index = all_descriptors[source].index(descriptor)
assert index >= 0
vector[index] += 1
if type(desc) == dict:
this_vector = np.array([value for key, value in sorted(desc.items())])
vector += this_vector
return vector
def descriptor_vector2(self, all_descriptors):
"""
Returns a vector of descriptors about this odorant, combining multiple
data sources.
"""
n_descriptors_dravnieks = len(all_descriptors["dravnieks"])
n_descriptors_sigma_ff = len(all_descriptors["sigma_ff"])
vector = np.zeros(n_descriptors_dravnieks + n_descriptors_sigma_ff)
for component in self.components:
if "dravnieks" in component.descriptors:
desc = component.descriptors["dravnieks"]
this_vector = np.array([value for key, value in sorted(desc.items())])
vector[0:n_descriptors_dravnieks] += this_vector
elif "sigma_ff" in component.descriptors:
desc = component.descriptors["sigma_ff"]
for descriptor in desc:
index = all_descriptors["sigma_ff"].index(descriptor)
assert index >= 0
vector[n_descriptors_dravnieks + index] += 1
return vector
def described_components(self, source):
"""
Given a data source, returns a list of the components which are
described by that source, i.e. those that have descriptors.
"""
return [component for component in self.components if source in component.descriptors]
def n_described_components(self, source):
"""
Given a data source, returns the number of components that are
described by that data source.
"""
return len(self.described_components(source))
def fraction_components_described(self, source):
"""
Given a data source, returns the fraction of components that are
described by that data source.
"""
return self.n_described_components(source) / self.N
def matrix(self, features, weights=None):
matrix = np.vstack(
[
component.vector(features, weights=weights)
for component in self.components
if component.cid in features
]
)
if 0: # matrix.shape[0] != self.N:
print(
("Mixture has %d components but only " "%d vectors were computed")
% (self.N, matrix.shape[0])
)
return matrix
def vector(self, features, weights=None, method="sum"):
matrix = self.matrix(features, weights=weights)
if method == "sum":
vector = matrix.sum(axis=0)
else:
vector = None
return vector
def __str__(self):
"""
String representation of the odorant.
"""
return ",".join([str(x) for x in self.components])
class Component(object):
"""
A single molecule, which may or may not be present in an odorant.
"""
def __init__(self, component_id: int, name: str, cas: str, percent: float, solvent: "Compound"):
"""
Components are defined by a component_id from the Bushdid et al
supplemental material, a name, a CAS number, a percent dilution,
and a solvent.
"""
self.id = component_id
self.name = name
self.cas = cas
self.cid_ = None
self.percent = percent
self.solvent = solvent
self.descriptors = {} # An empty dictionary.
@property
def cid(self):
cid = None
if self.cid_:
cid = self.cid_
else:
url_template = "https://pubchem.ncbi.nlm.nih.gov/" "rest/pug/compound/name/%s/cids/JSON"
for query in self.cas, self.name:
try:
url = url_template % query
page = urllib.request.urlopen(url)
string = page.read().decode("utf-8")
json_data = json.loads(string)
cid = json_data["IdentifierList"]["CID"][0]
except urllib.error.HTTPError:
print(query)
else:
break
self.cid_ = cid
return cid
def set_descriptors(self, source: str, cas_descriptors) -> None:
"""
Given a data source, sets descriptors for this odorant using
a dictionary where CAS numbers are keys, and descriptors are values.
"""
assert type(source) == str and len(source)
if self.cas in cas_descriptors:
self.descriptors[source] = cas_descriptors[self.cas]
# For sigma_ff this will be a list.
# For dravnieks this will be a dict.
def vector(self, features, weights=None):
if self.cid in features:
feature_values = np.array(list(features[self.cid].values()))
if weights is None:
weights = np.ones(feature_values.shape)
result = feature_values * weights
else:
result = None
return result
def __str__(self):
return self.name
class TriangleTest(object):
"""
One kind of experimental test, as performed by e.g. Bushdid et al.
A 'triangle test' has three stimuli, two of which are the same two
odorants, and is defined by those odorants.
"""
def __init__(self, test_uid: int, odorants: list, dilution: float, correct: bool):
"""
Tests are defined by their universal identifier (UID), the 3
odorants used (2 should be identical), the dilution, and the
identity of the correct response, which should be the odd-ball.
"""
self.id = test_uid
self.odorants = odorants
self.dilution = dilution
self.correct = correct
def add_odorant(self, odorant):
"""
Adds one odorant to this test.
"""
self.odorants.append(odorant)
def add_odorants(self, odorants: list):
"""
Adds more than one odorants to this test.
"""
self.odorants.extend(odorants)
@property
def double(self):
"""
Returns the odorant present twice in this test.
"""
for odorant in self.odorants:
if self.odorants.count(odorant) == 2:
return odorant
return None
@property
def single(self):
"""
Returns the odorant present once in this test.
"""
for odorant in self.odorants:
if self.odorants.count(odorant) == 1:
return odorant
return None
@property
def pair(self):
"""
Returns the odorant pair in this test, with the odorant present
twice listed first.
"""
return (self.double, self.single)
@property
def N(self):
"""
Returns the number of components in each of the odorants.
This a single value since they should all have the same number
of components.
"""
return self.double.N
@property
def r(self):
"""
Returns the number of component replacements (swaps) separating one of
the odorants from the other.
"""
return self.double.r(self.single)
def overlap(self, percent=False):
"""
Returns the overlap (complement of r) between the two odorants.
Optionally returns this as a percentage of N.
"""
return self.double.overlap(self.single, percent=percent)
@property
def common_components(self):
"""
Returns a list of components common to the two odorants.
"""
d = set(self.double.components)
s = set(self.single.components)
return list(s.intersection(d))
@property
def unique_components(self):
"""
Returns a list of components that exactly one of the two odorants has.
"""
d = set(self.double.components)
s = set(self.single.components)
return list(s.symmetric_difference(d))
def unique_descriptors(self, source):
"""
Given a data source, returns a list of descriptors that
exactly one of the two odorants has.
"""
sl = self.single.descriptor_list(source)
dl = self.double.descriptor_list(source)
unique = set(dl).symmetric_difference(set(sl))
return list(unique)
def common_descriptors(self, source):
"""
Given a data source, returns a list of descriptors that
are common to the two odorants.
"""
sl = self.single.descriptor_list(source)
dl = self.double.descriptor_list(source)
unique = set(dl).intersection(set(sl))
return list(unique)
def descriptors_correlation(self, source, all_descriptors):
"""
Given a data source, returns the correlation between the descriptors
of the two odorants.
"""
sv = self.single.descriptor_vector(source, all_descriptors)
dv = self.double.descriptor_vector(source, all_descriptors)
return np.corrcoef(sv, dv)[1][0]
def descriptors_correlation2(self, all_descriptors):
"""
Returns the correlation between the descriptors
of the two odorants, combining multiple data sources.
"""
sv = self.single.descriptor_vector2(all_descriptors)
dv = self.double.descriptor_vector2(all_descriptors)
return np.corrcoef(sv, dv)[1][0]
def descriptors_difference(self, source, all_descriptors):
"""
Given a data source, returns the absolute difference between the
descriptors of the two odorants.
"""
sv = self.single.descriptor_vector(source, all_descriptors)
dv = self.double.descriptor_vector(source, all_descriptors)
return np.abs(sv - dv)
def n_undescribed(self, source):
"""
Given a data source, returns the number of components from among the
two odorants that are not described by that source.
"""
d = self.double.n_described_components(source)
s = self.single.n_described_components(source)
return (self.N - d, self.N - s)
@classmethod
def length(cls, v):
return np.sqrt(np.dot(v, v))
@classmethod
def find_angle(cls, v1, v2):
return np.arccos(np.dot(v1, v2) / (cls.length(v1) * cls.length(v2)))
@classmethod
def circmean(cls, angles):
return np.arctan2(np.mean(np.sin(angles)), np.mean(np.cos(angles)))
def angle(self, features, weights=None, method="sum", method_param=1.0):
angle = None
if method == "sum":
v1 = self.single.vector(features, weights=weights, method=method)
v2 = self.double.vector(features, weights=weights, method=method)
angle = self.find_angle(v1, v2)
elif method == "nn": # Nearest-Neighbor.
m1 = self.single.matrix(features, weights=weights)
m2 = self.double.matrix(features, weights=weights)
angles = []
for i in range(m1.shape[0]):
angles_i = []
for j in range(m2.shape[0]):
one_angle = self.find_angle(m1[i, :], m2[j, :])
if np.isnan(one_angle):
one_angle = 1.0
angles_i.append(one_angle)
angles_i = np.array(sorted(angles_i))
from scipy.stats import geom
weights_i = geom.pmf(range(1, len(angles_i) + 1), method_param)
angles.append(np.dot(angles_i, weights_i))
angle = np.abs(angles).mean() # circmean(angles)
return angle
def norm(self, features, order=1, weights=None, method="sum"):
v1 = self.single.vector(features, weights=weights, method=method)
v2 = self.double.vector(features, weights=weights, method=method)
dv = v1 - v2
dv = np.abs(dv) ** order
return np.sum(dv)
def distance(self, features, weights=None, method="sum"):
v1 = self.single.vector(features, weights=weights, method=method)
v2 = self.double.vector(features, weights=weights, method=method)
return np.sqrt(((v1 - v2) ** 2).sum())
def fraction_correct(self, results: list):
num, denom = 0.0, 0.0
for result in results:
if result.test.id == self.id:
num += result.correct
denom += 1
return num / denom
class Result(object):
"""
A test result, corresponding to one test given to one subject.
"""
def __init__(self, test: TriangleTest, subject_id: int, correct: bool):
"""
Results are defined by the test to which they correspond,
the id of the subject taking that test, and whether the subject
gave the correct answer.
"""
self.test = test
self.subject_id = subject_id
self.correct = correct
class Distance(object):
"""
An odorant distance, corresponding to distance between two odorants.
No particular implementation for computing distance is mandated.
"""
def __init__(self, odorant_i: Any, odorant_j: Any, distance: float):
self.odorant_i = odorant_i
self.odorant_j = odorant_j
self.distance = distance
def odorant_distances(results, subject_id=None):
"""
Given the test results, returns a dictionary whose keys are odorant pairs
and whose values are psychometric distances between those pairs,
defined as the fraction of discriminations that were incorrect.
This can be limited to one subject indicated by subject_id, or else
by default it pools across all subjects.
"""
distances = {}
distance_n_subjects = {}
for result in results:
if subject_id and result.subject_id != subject_id:
continue
pair = result.test.pair
if pair not in distances:
distances[pair] = 0
distance_n_subjects[pair] = 0
distances[pair] += 0.0 if result.correct else 1.0
distance_n_subjects[pair] += 1
for pair in list(distances.keys()):
# Divided by the total number of subjects.
distances[pair] /= distance_n_subjects[pair]
return distances
def ROC(results, N):
"""
Given test results and a number of components N, returns a distribution
of the number of distinct components 'r' for correct trials (right) and
incorrect trials (wrong), in tests using odorants with N total components.
These can later be plotted or used to generated an ROC curve.
"""
right = []
wrong = []
for result in results:
if result.test.N == N:
r = result.test.r
if result.correct:
right.append(r)
else:
wrong.append(r)
right = np.array(right) # Distribution of r for correct trials.
wrong = np.array(wrong) # Distribution of r for incorrect trials.
return (right, wrong)
def correct_matrix(results, N, overlap):
"""
Given test results, a number of components N, and a level of overlap
between odorants, returns a num_subjects by num_test matrix of booleans
corresponding to the correctness of that subject's response on that test.
"""
results = [
r
for r in results
if (N is None or r.test.N == N) and (overlap is None or r.test.overlap() == overlap)
]
subjects = [r.subject_id for r in results]
subjects = list(set(subjects))
tests = [r.test for r in results]
tests = list(set(tests))
correct = np.zeros((len(subjects), len(tests)))
correct -= 1 # Set to make sure every point gets set to 0 or 1 later.
for result in results:
i = subjects.index(result.subject_id)
j = tests.index(result.test)
correct[i, j] = result.correct
return correct, subjects, tests
|
{"/pyrfume/unit_test/test_config_data.py": ["/pyrfume/__init__.py", "/pyrfume/base.py"], "/pyrfume/__init__.py": ["/pyrfume/base.py"]}
|
36,694
|
AllenKd/job_automation
|
refs/heads/develop
|
/video_rearranger/video_rearranger.py
|
import os
import shutil
from hurry.filesize import size
from config.logger import get_logger
from util.util import Util
class VideoRearranger(object):
def __init__(self, path):
self.logger = get_logger(self.__class__.__name__)
self.config = Util().get_config()
self.video_folder_path = path
self.ignore_folder = []
self.logger.info('video rearranger initialized, video path: {}'.format(self.video_folder_path))
def start(self):
self.logger.info('start rearrange')
for folder_name in os.listdir(self.video_folder_path):
if not os.path.isdir('{}/{}'.format(self.video_folder_path, folder_name)):
self.logger.debug('{} is not folder, skip'.format(folder_name))
continue
largest_file = self.get_largest_file(folder_name)
if not self.is_allowed_extension(largest_file):
self.logger.warn('{} not in expected file extension, add to ignore folder')
self.ignore_folder.append(largest_file)
continue
self.move_out(self.get_largest_file(folder_name))
self.clean_folder()
self.logger.info('finished video rearrange, ignore folder: {}'.format(self.ignore_folder))
def get_largest_file(self, folder_name):
self.logger.info('start get largest file')
folder_path = '{}/{}'.format(self.video_folder_path, folder_name)
largest_file = ""
largest_size = 0
for afile in os.listdir(folder_path):
file_abs_path = '{}/{}'.format(folder_path, afile)
if largest_size < os.path.getsize(file_abs_path):
largest_file = file_abs_path
largest_size = os.path.getsize(file_abs_path)
self.logger.info('largest file: {}, size: {}'.format(os.path.basename(largest_file), size(largest_size)))
return largest_file
def is_allowed_extension(self, file_abs_path):
return os.path.splitext(file_abs_path)[-1] in self.config['video_rearranger']['expected_extension']
def move_out(self, file_path):
self.logger.info('start move file out: {}'.format(file_path))
os.rename(file_path, '{}/{}'.format(self.video_folder_path, os.path.basename(file_path)))
def clean_folder(self):
self.logger.info('start clean folder')
for folder_name in os.listdir(self.video_folder_path):
if os.path.isdir(
'{}/{}'.format(self.video_folder_path, folder_name)) and folder_name not in self.ignore_folder:
self.logger.debug('remove folder: {}'.format(folder_name))
shutil.rmtree('{}/{}'.format(self.video_folder_path, folder_name))
|
{"/video_rearranger/video_rearranger.py": ["/config/logger.py", "/util/util.py"], "/main.py": ["/util/util.py", "/video_rearranger/video_rearranger.py"], "/util/util.py": ["/config/logger.py"]}
|
36,695
|
AllenKd/job_automation
|
refs/heads/develop
|
/main.py
|
import os
import click
from util.util import Util
from video_rearranger.video_rearranger import VideoRearranger
@click.group()
def cli():
pass
@click.command('video_rearrange', help='Move out the largest file from each sub-folder on given folder.')
@click.option('--path', '-p',
default=os.environ['VIDEO_PATH'] if 'VIDEO_PATH' in os.environ else
Util().get_config()['video_rearranger']['target_path'],
help='Specify video folder path', show_default=True)
def video_rearrange(path):
Util().load_environment_variable()
VideoRearranger(path).start()
if __name__ == '__main__':
cli.add_command(video_rearrange)
cli()
|
{"/video_rearranger/video_rearranger.py": ["/config/logger.py", "/util/util.py"], "/main.py": ["/util/util.py", "/video_rearranger/video_rearranger.py"], "/util/util.py": ["/config/logger.py"]}
|
36,696
|
AllenKd/job_automation
|
refs/heads/develop
|
/config/logger.py
|
import datetime
import logging
import os
import yaml
init = False
def get_logger(name):
global init
if not init:
if not os.path.exists('log'):
os.mkdir('log')
with open('config/configuration.yaml', 'r') as config:
level = logging.getLevelName(yaml.load(config, Loader=yaml.FullLoader)['logging']['level'])
logging.basicConfig(level=level,
format='%(asctime)s %(filename)s %(lineno)d: %(levelname)s %(message)s',
datefmt='%y-%m-%d %H:%M:%S',
filename='log/{:%Y-%m-%d}.log'.format(datetime.datetime.now()))
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(filename)s %(lineno)d: %(levelname)s %(message)s')
console.setFormatter(formatter)
logging.getLogger().addHandler(console)
init = True
logger = logging.getLogger(name)
return logger
|
{"/video_rearranger/video_rearranger.py": ["/config/logger.py", "/util/util.py"], "/main.py": ["/util/util.py", "/video_rearranger/video_rearranger.py"], "/util/util.py": ["/config/logger.py"]}
|
36,697
|
AllenKd/job_automation
|
refs/heads/develop
|
/util/util.py
|
import os
import yaml
from config.logger import get_logger
class Util(object):
def __init__(self):
self.logger = get_logger(self.__class__.__name__)
with open('config/configuration.yaml') as config:
self.config = yaml.load(config, Loader=yaml.FullLoader)
def load_environment_variable(self):
self.logger.info('start load environment variables and overwrite config file')
with open('config/configuration.yaml') as config:
config = yaml.load(config, Loader=yaml.FullLoader)
if os.environ.get('TARGET_PATH'):
self.logger.debug(
'overwrite target path from {} to {}'.format(config['video_rearranger']['target_path'],
os.environ.get('TARGET_PATH')))
config['video_rearranger']['target_path'] = os.environ.get('TARGET_PATH')
# overwrite config by environment variable
with open('config/configuration.yml', 'w') as new_config:
yaml.dump(config, new_config)
self.logger.debug('finish update config file')
return
def get_config(self):
self.logger.info('getting config')
return self.config
|
{"/video_rearranger/video_rearranger.py": ["/config/logger.py", "/util/util.py"], "/main.py": ["/util/util.py", "/video_rearranger/video_rearranger.py"], "/util/util.py": ["/config/logger.py"]}
|
36,698
|
arkadiusz-s/idiomatic
|
refs/heads/master
|
/idiomatic/__main__.py
|
import argparse
from os import system
from os import close
from sys import stdout
from bloom_core import fullparse
if __name__ == "__main__":
parser = argparse.ArgumentParser("Generate Fluent C++ code from Bloom DSL spec.")
parser.add_argument('spec',
help='path to the Bloom DSL spec file')
parser.add_argument('-o', '--out',
help='output C++ file')
args = parser.parse_args()
if (args.out == None):
codeFd = stdout
else:
codeFd = open(args.out, "w")
result = fullparse(args.spec)
codeFd.write(result)
codeFd.close()
|
{"/idiomatic/__init__.py": ["/idiomatic/bloom_core.py"]}
|
36,699
|
arkadiusz-s/idiomatic
|
refs/heads/master
|
/tests/test_chat.py
|
from context import idiomatic
def test_chat_nocrash():
result = idiomatic.fullparse("examples/chat/client.bl")
result = idiomatic.fullparse("examples/chat/server.bl")
assert True
|
{"/idiomatic/__init__.py": ["/idiomatic/bloom_core.py"]}
|
36,700
|
arkadiusz-s/idiomatic
|
refs/heads/master
|
/idiomatic/bloom_core.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Fluent code generator
XXX THIS CODE IS CURRENTLY UNSTABLE.
This module takes in Bloom-like DSL files and generates
fluent-compatible C++ implementations as header files to
be included in a C++ driver program.
The Bloom DSL file has the format:
<... FIX ME ...>
Example:
In this example, the target we build is a `.dylib`
file suitable for linking into Python. It contains code
for both a server and a client.
$ python fluentgen.py client.yml -o client.h
$ python fluentgen.py server.yml -o server.h
$ g++ -std=c++14 -Wall -c -g -I../fluent/src -I../fluent/build/Debug/vendor/googletest/src/googletest/googletest/include -I../fluent/build/Debug/vendor/range-v3/range-v3-prefix/src/range-v3/include -Wall -Wextra -Werror -pedantic -c fluentchat.cc
$ g++ -Wall -g -dynamiclib fluentchat.o -L../fluent/build/Debug/fluent -lfluent -lzmq -lglog -lfmt -o fluentchat.dylib
the context of fluentchat.cc is
Attributes:
This module has no module-level variables
Todo:
* test on more examples
* think about modularity (e.g. the `import` feature of Bud)
"""
from collections import defaultdict,OrderedDict
import pprint
from bloom import BloomParser
class BloomSemantics(object):
"""Tatsu semantics file for handling different tokens in the grammar
"""
boot_state = False
schema = {}
boot_rules = {}
rules = {}
tups = {}
tupbuf = []
varnum = 0
def start(self, ast):
"""start of the parser semantics
Args:
ast (str): Tatsu AST
Returns:
str: parsed output, namely C++ code
"""
# prologue
retval = self.fluent_prologue(ast.name, ast.args)
# schema
retval += '''
///////////////
// Bloom Schema
'''
retval += '\n'.join((' .' + l) for l in self.translate_schema(self.schema))
retval += '''
///////////////
;
'''
# constant tuples
for k in self.tups.keys():
# first the type
retval += ' using ' + k + '_tuple_t = std::tuple<'
retval += ', '.join(v for _,v in self.schema[k]['cols'].items())
retval += '>;\n'
# then the constant collection
retval += ' std::vector<' + k + '_tuple_t> ' + k + '_tuples = {\n'
retval += (';\n'.join(' std::make_tuple(' + ', '.join(a.strip() for a in tup) + ')' for tup in self.tups[k]))
retval += '\n };\n'
# bootstrap logic
retval += self.register_rules(True, ast.boot_logic)
# bloom logic
retval += self.register_rules(False, ast.logic)
# epilogue
retval += self.fluent_epilogue(ast.name)
return retval
def attrs(self, ast):
return ast
def set_boot(self, ast):
self.boot_state = True
return ast
def unset_boot(self, ast):
self.boot_state = False
return ast
def register_rules(self, bootp, rules):
"""generate the C++ to wrap the rule definitions
Args:
bootp (bool): whether these are bootstrap rules
rules (str): C++ translation of rules
Returns:
str: C++ code wrapping all the rules passed in
"""
if rules == None or len(rules) == 0:
return ''
bootstr = ("Bootstrap" if bootp else "")
retval = " auto bloom" + str(self.varnum) + " = std::move(bloom"
retval += str(self.varnum - 1) + ")\n"
self.varnum += 1
retval += " .Register" + bootstr + "Rules([&]("
retval += ", ".join(('auto& ' + k) for k in self.schema.keys())
retval += ") {\n"
retval += "\n".join(' (void)' + l + ';' for l in self.schema.keys())
retval += '''
using namespace fluent::infix;
//////////////
// Bloom ''' + bootstr + ''' Rules
'''
retval += rules
retval += ' return std::make_tuple('
if (bootp):
retval += ", ".join(self.boot_rules.keys()) + ');\n'
else:
retval += ", ".join(self.rules.keys()) + ');\n'
retval += ''' //////////////
})'''
retval += (";\n" if bootp else "\n")
return retval
def logic(self, ast):
"""convert the logic ast to a single string
Args:
ast (AST): Tatsu AST
Returns:
str: the logic ast as a string
"""
return ''.join(ast)
def stmt(self, ast):
"""wrap a statement as a string
Args:
ast (AST): the ast for the statement
Returns:
str: semicolon-terminated C++ statement
"""
if ast != '':
return ' ' + ast + ';\n'
else:
return ast
def ruledef(self, ast):
"""wrap a rule definition in C++ boilerplate
Args:
ast (AST): the ast for the statement
Returns:
str: properly-typed C++ assignment
"""
if (self.boot_state):
self.boot_rules[ast.var] = ast.rule
else:
self.rules[ast.var] = ast.rule
return "auto " + ast.var + " = " + ast.rule
def rule(self, ast):
"""wrap a rule in C++ boilerplate,
if the ast has no rhs, this is a rule injecting constant
tuples (e.g. in a Bootstrap).
Args:
ast (AST): the ast for the statement
Returns:
str: properly-wrapped C++ assignment
"""
if ast.rhs == None:
self.tups[ast.lhs] = self.tupbuf
self.tupbuf = []
rhs = 'lra::make_iterable(&' + ast.lhs + '_tuples)'
else:
rhs = ast.rhs
return ast.lhs + ' ' + ast.mtype + ' ' + rhs
def catalog_entry(self, ast):
"""handle a simple table/channel reference
as a special case, translate stdin to fluin, stdout to fluout
Args:
ast (AST): the ast for the statement
Returns:
str: the catalog entry as a string
"""
retval = (''.join(ast))
if (retval == 'stdin'):
return 'fluin'
elif (retval == 'stdout'):
return 'fluout'
else:
return retval
def rhs(self, ast):
"""wrap the rhs of a rule
Args:
ast (AST): the ast for the statement
Returns:
str: pipe-delimited fluent text
"""
retval = "("
if ast.anchor != None:
retval += ast.anchor
if ast.chain != None:
retval += ' | '
if ast.chain != None:
retval += ' | '.join(ast.chain)
if ast.tups != None:
self.tupbuf = ast.tups
return None
return retval + ")"
def op(self, ast):
"""wrap a fluent operator
Args:
ast (AST): the ast for the operator
Returns:
str: appropriately-wrapped C++
"""
retval = ast.opname
if ast.plist != None:
retval += "<" + ','.join(ast.plist) + ">"
retval += "("
if type(ast.op_args) == list:
retval += ', '.join(ast.op_args)
elif ast.op_args != None:
retval += '[&]'
retval += '(const '
retval += 'auto'
retval += '& ' + ast.op_args.argname + ')'
retval += ast.op_args.code.code
retval += ')'
return(retval)
def opname(self, ast):
"""wrap an opname
Args:
ast (AST): the ast for the statement
Returns:
str: properly wrapped opname
"""
return "lra::" + ast
def rhs_catalog_entry(self, ast):
return self.cwrap + "(&" + ast + ")"
def where(self, ast):
return "filter"
def cross(self, ast):
return "make_cross"
def now(self, ast):
return "<="
def next(self, ast):
return "+="
def async(self, ast):
return "<="
def delete(self, ast):
return "-="
def schemadef(self, ast):
"""wrap a schema definition.
As a special case, translate stdin/stdout to fluin/fluout
Args:
ast (AST): the ast for the statement
Returns:
nothing, but side effect: sets self.schema
"""
if ast.name == 'stdin':
self.schema['fluin'] = None;
elif ast.name == 'stdout':
self.schema['fluout'] = None;
else:
collection_type = ast.type
collection_name = ast.name
cols = { i.attrname: i.type for i in ast.cols}
self.schema[collection_name] = {
'type': collection_type,
'cols': cols
}
return ""
def fluent_prologue(self, name, arguments):
"""Generate C++ file preamble.
Args:
name (str): project name from the Bloom DSL name key
args (list): the Bloom DSL args key, used to pass in runtime configuration arguments
Returns:
str: C++ code for the top of the generated file
"""
retval = '''#ifndef ''' + str.upper(name) + '''_H_
#define ''' + str.upper(name) + '''_H_
#include <vector>
#include "zmq.hpp"
#include "common/status.h"
#include "common/mock_pickler.h"
#include "fluent/fluent_builder.h"
#include "fluent/fluent_executor.h"
#include "fluent/infix.h"
#include "lineagedb/connection_config.h"
#include "lineagedb/noop_client.h"
#include "lineagedb/to_sql.h"
#include "ra/logical/all.h"
#include "common/hash_util.h"
namespace lra = fluent::ra::logical;
struct ''' + name + '''Args {
'''
for i in arguments:
retval += " " + i.type + " " + i.attrname + ";\n"
retval += '''};
int ''' + name + '''Main(const ''' + name + '''Args& args) {
zmq::context_t context(1);
fluent::lineagedb::ConnectionConfig connection_config;
auto bloom''' + str(self.varnum)
self.varnum += 1
retval += '''= fluent::fluent<fluent::lineagedb::NoopClient,fluent::Hash,
fluent::lineagedb::ToSql,fluent::MockPickler,
std::chrono::system_clock>
("'''
retval += name + '''_" + std::to_string(rand()),
args.address, &context,
connection_config)
.ConsumeValueOrDie();
auto bloom''' + str(self.varnum);
retval += ''' = std::move(bloom''' + str(self.varnum - 1)
self.varnum += 1
retval += ")\n"
return retval
def fluent_epilogue(self, name):
"""Generate C++ file postamble.
Args:
name (str): project name from the Bloom DSL name key
Returns:
str: C++ code for the bottom of the generated file
"""
retval = '''
.ConsumeValueOrDie();
fluent::Status status = std::move(bloom'''
retval += str(self.varnum - 1) + ''').Run();
CHECK_EQ(fluent::Status::OK, status);
return 0;
}
#endif // ''' + str.upper(name) + '''_H_
'''
return retval
def translate_schema(self, sdict):
"""convert Bloom DSL schema to a list of Fluent C++ collection definitions
Args:
sdict (dict): Bloom DSL entries for collection definitions
Returns:
list of str: list with entries with one of the following forms:
"stdin()"
"stdout()"
"template collection_type <type1,...>(collection_name, {{name1, ...}})"
"""
result = []
for name, defn in sdict.items():
# we ignore the definition for stdin and stdout
if (name == 'fluin'):
result.append('stdin' + '()')
elif name == 'fluout':
result.append('stdout' + '()')
else:
collection_type = defn['type']
collection_name = name
cols = defn['cols']
colnames = ('"' + col + '"' for col in cols.keys())
coltypes = cols.values()
str = "template " + collection_type + "<"
str += ", ".join(coltypes) + ">("
str += '"' + collection_name + '", {{' + ', '.join(colnames) + '}})'
result.append(str)
return result
def fullparse(specFile):
"""convert Bloom spec to a Fluent C++ header file
Args:
specFile (str): path to the .yml file
Returns:
text of the C++ file
"""
spec = open(specFile).read()
sem = BloomSemantics();
setattr(sem, "cwrap", "lra::make_collection")
parser = BloomParser()
retval = parser.parse(spec, semantics=sem)
return retval
|
{"/idiomatic/__init__.py": ["/idiomatic/bloom_core.py"]}
|
36,701
|
arkadiusz-s/idiomatic
|
refs/heads/master
|
/tests/test_sigmod.py
|
from context import idiomatic
def test_sigmod_nocrash():
result = idiomatic.fullparse("examples/sigmod17/fig1.bl")
result = idiomatic.fullparse("examples/sigmod17/fig2.bl")
result = idiomatic.fullparse("examples/sigmod17/fig6.bl")
assert True
|
{"/idiomatic/__init__.py": ["/idiomatic/bloom_core.py"]}
|
36,702
|
arkadiusz-s/idiomatic
|
refs/heads/master
|
/examples/chat/chat_server.py
|
#!/usr/bin/python
from ctypes import cdll
import argparse
parser = argparse.ArgumentParser("Launch a Fluent chat server.")
parser.add_argument('address',
help='ZeroMQ address (e.g. tcp://0.0.0.0:8000)')
args = parser.parse_args()
chat_lib = cdll.LoadLibrary("fluentchat.dylib")
chatserver = chat_lib.server(args.address.encode("ascii"))
|
{"/idiomatic/__init__.py": ["/idiomatic/bloom_core.py"]}
|
36,703
|
arkadiusz-s/idiomatic
|
refs/heads/master
|
/examples/chat/chat_client.py
|
#!/usr/bin/python
from ctypes import * #cdll
import argparse
parser = argparse.ArgumentParser("Launch a Fluent chat server.")
parser.add_argument('server_address',
help='ZeroMQ address of the server (e.g. tcp://0.0.0.0:8000)')
parser.add_argument('your_address',
help='ZeroMQ address for this client (e.g. tcp://0.0.0.0:8001)')
parser.add_argument('nickname',
help='your nickname')
args = parser.parse_args()
chat_lib = cdll.LoadLibrary("fluentchat.dylib")
chat_lib.client.argtypes = [c_char_p, c_char_p, c_char_p]
chatclient = chat_lib.client(args.server_address.encode("ascii"),
args.nickname.encode("ascii"),
args.your_address.encode("ascii"))
|
{"/idiomatic/__init__.py": ["/idiomatic/bloom_core.py"]}
|
36,704
|
arkadiusz-s/idiomatic
|
refs/heads/master
|
/idiomatic/__init__.py
|
from .bloom_core import fullparse
|
{"/idiomatic/__init__.py": ["/idiomatic/bloom_core.py"]}
|
36,705
|
AntoineDidisheim/car_accident
|
refs/heads/main
|
/data.py
|
import pandas as pd
import numpy as np
from parameters import *
import didipack as didi
class Data:
def __init__(self,par: Params):
self.par = par
def load_all(self, reload=False):
if reload:
df = []
for n in ['accidents_2005_to_2007','accidents_2009_to_2011','accidents_2012_to_2014']:
df.append(self.load_year(n))
df=pd.concat(df)
##################
# create regions
##################
# we will create some squared regions of uk randomly to predict
df['r'] = 0
nb_square = 10
q_long = np.linspace(df['longitude'].min(), df['longitude'].max(), 10)
q_lat = np.linspace(df['latitude'].min(), df['latitude'].max(), 10)
long = df['r'].values.copy()
lat = df['r'].values.copy()
for i in range(nb_square):
long[df['longitude'] > q_long[i]] = long[df['longitude'] > q_long[i]] + 1
lat[df['latitude'] > q_lat[i]] = lat[df['latitude'] > q_lat[i]] + 1
# create a unique name
r = []
for i in range(len(long)):
r.append(str(long[i]) + '-' + str(lat[i]))
df['r'] = r
df.to_pickle(self.par.data.dir+'all.p')
else:
df = pd.read_pickle(self.par.data.dir+'all.p')
return df
def load_year(self,name):
df = pd.read_csv(f'{self.par.data.dir}{name}.csv')
df.columns = [x.lower() for x in df.columns]
df['date'] = pd.to_datetime(df['date'])
df['month'] = df['date'].dt.year * 100 + df['date'].dt.month
col_to_del = ['location_easting_osgr', 'location_northing_osgr','lsoa_of_accident_location', 'junction_detail', 'junction_control', 'carriageway_hazards', 'special_conditions_at_site']
for c in col_to_del:
del df[c]
return df
self = Data(Params())
# lsoa_of_accident_location
|
{"/data_exploration.py": ["/data.py", "/map.py"], "/test_performance.py": ["/data.py", "/map.py"], "/train_models.py": ["/data.py", "/map.py"]}
|
36,706
|
AntoineDidisheim/car_accident
|
refs/heads/main
|
/data_exploration.py
|
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import didipack as didi
import geopandas
from parameters import *
from data import *
from map import *
import os
##################
# set parameters
##################
par = Params()
# this function allow us to change in one point all plt.show with plt.close
def plt_show():
if Constant.SHOW_PLOT:
plt.show()
else:
plt.close()
##################
# create saving directory
##################
save_dir = par.model.res_dir + '/exploration/'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
##################
# load data
##################
# if we load the data for the first time, we need to set reload=True, hence the try catch
try:
df = Data(par).load_all()
except:
df = Data(par).load_all(True)
##################
# View map dispersion
##################
map = MapPlotter(par)
# since there is a lot of accident each day we arbitrairly select one day to plot
rnd_date = np.random.choice(df['date'])
t = df.loc[df['date'] == rnd_date, :]
map.plot(t)
plt.title(f'Accident on {str(rnd_date).split("T")[0]}')
plt.tight_layout()
plt.savefig(save_dir + 'rnd_date_map.png')
plt_show()
##################
# number of casualities and severity
##################
# histogram of number of death per accident
plt.hist(df['number_of_casualties'], bins=100)
plt_show()
# the many accidents with extremes makes it hard to see, we therefore look at the distribution conditional on death>0 and death <10
plt.hist(df.loc[(df['number_of_casualties'] <= 10), 'number_of_casualties'], bins=12)
plt.savefig(save_dir + 'nb_casuality_hist.png')
plt_show()
# same on accident severity
plt.hist(df['accident_severity'], bins=10)
plt.savefig(save_dir + 'nb_casuality_hist.png')
plt_show()
##################
# nb accident depending on conditions
##################
save_dir_cond = save_dir + 'cond/'
if not os.path.exists(save_dir_cond):
os.makedirs(save_dir_cond)
df['month_nb'] = df['date'].dt.month
df.head()
col = ['day_of_week', 'road_type', 'pedestrian_crossing-human_control', 'pedestrian_crossing-physical_facilities',
'light_conditions', 'weather_conditions', 'road_surface_conditions', 'did_police_officer_attend_scene_of_accident','month_nb',
'speed_limit','urban_or_rural_area']
for c in col:
t = df.groupby(c)['accident_index'].count()
print(t)
plt.bar(t.index, t.values)
try:
ml = np.max([len(x) for x in t.index])
except:
ml = 1
if ml > 4:
plt.xticks(rotation=90)
plt.title(c)
plt.tight_layout()
plt.savefig(save_dir_cond + c + '.png')
plt_show()
t = df.groupby('time')['accident_index'].count().plot()
plt.savefig(save_dir_cond + 'time.png')
plt_show()
##################
# time series
##################
save_dir_ts = save_dir + 'ts/'
if not os.path.exists(save_dir_ts):
os.makedirs(save_dir_ts)
df.groupby('date')['accident_index'].count().rolling(252).mean().dropna().plot()
plt.savefig(save_dir_ts+'nb_accident.png')
plt_show()
for s in df['accident_severity'].unique():
df.loc[df['accident_severity']==s,:].groupby('date')['accident_index'].count().rolling(252).mean().dropna().plot()
plt.savefig(save_dir_ts+f'nb_accident_severity{s}.png')
plt_show()
##################
# illustrate the region
##################
r = df['r'].unique()[0]
ax = map.plot(gdf=df.loc[df['r']==r,:].head(100),color_=None,legend_=r)
for r in df['r'].unique()[1:]:
ax = map.plot(gdf=df.loc[df['r'] == r, :].head(10000),color_=None, legend_=r,ax=ax)
plt.savefig(save_dir+f'region_illustration.png')
plt_show()
|
{"/data_exploration.py": ["/data.py", "/map.py"], "/test_performance.py": ["/data.py", "/map.py"], "/train_models.py": ["/data.py", "/map.py"]}
|
36,707
|
AntoineDidisheim/car_accident
|
refs/heads/main
|
/test_performance.py
|
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import didipack as didi
import geopandas
from parameters import *
from data import *
from map import *
import os
from sklearn.ensemble import RandomForestRegressor as RF
from sklearn.linear_model import LinearRegression as OLS
from sklearn.linear_model import Lasso
from sklearn.metrics import r2_score
##################
# set parameters
##################
par = Params()
par.update_model_name()
##################
# create save directory
##################
save_dir = par.model.res_dir + f'/{par.name}/'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
# this function allow us to change in one point all plt.show with plt.close
def plt_show():
if Constant.SHOW_PLOT:
plt.show()
else:
plt.close()
# load the
df = pd.read_pickle(save_dir + '/all.p')
def r2(c,df_):
return 1-((df_['y']-df_[c])**2).sum()/((df_['y']-df_['y'].mean())**2).sum()
def r2_all(df_):
return pd.Series({'RF':r2('pred_rf',df_),'OLS':r2('pred_ols',df_),'Lasso':r2('pred_lasso',df_),'Benchmark':r2('bench',df_)})
print('Overall R^2')
print(r2_all(df))
r2_all(df).to_csv(save_dir+'overall_r2.csv')
# We see that: a) OLS overfit, b) the Lasso does not even manage to match the simple benchmark performance, c) the random forest outperform the benchmark out of sample
##################
# Computing performance through time
##################
res = []
for date in df[['date']].drop_duplicates().iloc[:,0]:
date_end = date +pd.DateOffset(months=12)
ind = (df['date']>=date) & (df['date']<date_end)
# we don't compute the r² is there is not enough date as the data is missing in 2008
if (date_end <= df['date'].max()) & (len(df.loc[ind,'date'].unique())>=250):
r=r2_all(df.loc[ind,:])
r.name = date_end
res.append(r)
res=pd.DataFrame(res)
res.drop(columns='OLS').plot()
plt.ylabel(r'out-of-sample $R^2$')
plt.savefig(save_dir+'r2_across_time.png')
plt_show()
# we note that the performance off all models take a hit in 2013. We remember from explortaory analysis that
# in 2013, the trend changed and for the first time in years, the number of accident increased by a lot.
# to see if the RF got more suprised than the benchmark we will plot the relative performance
res['d'] = res['RF']-res['Benchmark']
res['d'].plot()
plt.ylabel(r'out-of-sample $R^2_{RF}-R^2_{Benchmark}$')
plt.savefig(save_dir+'r2_diff_across_time.png')
plt_show()
# we see that, while our model still outperform the benchmark in 2013, the relative performance dropped.
# unsurpsingly, this "black swan" affected our more complex model more than simple historical mean.
##################
# computing the performance per region
##################
t=df.groupby('r').apply(lambda x: r2_all(x)).dropna().sort_values('RF').reset_index(drop=True)
plt.scatter(t['Benchmark'],t['RF'],marker='+',color='k')
plt.hlines(0.0, t['Benchmark'].min(),t['Benchmark'].max(),colors='r')
plt.xlabel(r'Benchmark $R^2$')
plt.ylabel(r'Random Forest $R^2$')
plt.savefig(save_dir+'r2_regions_benchmark_v_RF.png')
plt_show()
# this plot showed us that the RF has some blind splot. For a few specific regions, the RF performance is much worst than the benchmark
##################
# understanding the poor region performance
##################
# e merge to the performance per region the average number of accident per region
t=df.groupby('r').apply(lambda x: r2_all(x)).dropna().sort_values('RF').reset_index()
t=t.merge(df.groupby('r')['y'].mean().reset_index())
plt.scatter(t['y'],t['RF'],marker='+',color='k')
plt.hlines(0.0, t['y'].min(),t['y'].max(),colors='r')
plt.xlabel(r'average #accident per region')
plt.ylabel(r'Random Forest $R^2$')
plt.savefig(save_dir+'r2_region_per_accident_size.png')
plt_show()
# this plot showed that the "poor" performance of the RF is entierly concentrated on regions with a small number of accidents
|
{"/data_exploration.py": ["/data.py", "/map.py"], "/test_performance.py": ["/data.py", "/map.py"], "/train_models.py": ["/data.py", "/map.py"]}
|
36,708
|
AntoineDidisheim/car_accident
|
refs/heads/main
|
/map.py
|
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import didipack as didi
import geopandas
from parameters import *
class MapPlotter:
def __init__(self, par:Params):
self.world = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'))
self.world = self.world[self.world['name'] == 'United Kingdom']
self.par = par
def plot(self,gdf, color_ = 'blue',marker_='+', legend_ = '', ax=None):
if not type(gdf) == geopandas.geodataframe.GeoDataFrame:
gdf = self.to_geopandas(gdf)
if ax is None:
ax =self.world.plot(color='white', edgecolor='black')
if color_ is not None:
gdf.plot(ax=ax, color=color_, marker=marker_, label=legend_)
else:
gdf.plot(ax=ax, marker=marker_, label=legend_)
return ax
def to_geopandas(self,df_):
gdf = geopandas.GeoDataFrame(df_, geometry=geopandas.points_from_xy(df_['longitude'], df_['latitude']))
return gdf
|
{"/data_exploration.py": ["/data.py", "/map.py"], "/test_performance.py": ["/data.py", "/map.py"], "/train_models.py": ["/data.py", "/map.py"]}
|
36,709
|
AntoineDidisheim/car_accident
|
refs/heads/main
|
/train_models.py
|
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import didipack as didi
import geopandas
from parameters import *
from data import *
from map import *
import os
from sklearn.ensemble import RandomForestRegressor as RF
from sklearn.linear_model import LinearRegression as OLS
from sklearn.linear_model import Lasso
from sklearn.metrics import r2_score
##################
# set parameters
##################
par = Params()
par.update_model_name()
# this function allow us to change in one point all plt.show with plt.close
def plt_show():
if Constant.SHOW_PLOT:
plt.show()
else:
plt.close()
##################
# create saving directory
##################
save_dir = par.model.res_dir + f'/{par.name}/'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
##################
# load data
##################
# if we load the data for the first time, we need to set reload=True, hence the try catch
try:
df = Data(par).load_all()
except:
df = Data(par).load_all(True)
##################
# create a matrix of predictors and target
##################
# we compute the average per region of the weather conditions and road_surfac econdition. We also keep the month and the day of the week tout take into account cyclical patterns
final = df.loc[:, ['r', 'date']]
for c in ['weather_conditions','road_surface_conditions','month','day_of_week','r']:
for u in df[c].unique():
final[u] = (df[c] == u) * 1
final=final.groupby(['r', 'date']).mean().reset_index()
# we add our target variable --> the total number of accident per region per day.
final=final.merge(df.groupby(['r', 'date'])['accident_index'].count().reset_index().rename(columns={'accident_index': 'y'}))
##################
# expanding window cross-validation procedure
##################
# create time chunk of 6 months
final['T']= (final['date'].dt.month<=6)*1+final['date'].dt.year*10
T = final['T'].sort_values().unique()
rf_models = []
dash = '-' * 150
ft = '{:<10s}{:>14s}{:>14s}{:>14s}{:>14s}{:>14s}{:>14s}{:>14s}{:>14s}'
f = '{:<10s}{:>14f}{:>14f}{:>14f}{:>14f}{:>14f}{:>14f}{:>14f}{:>14f}'
print(dash)
print(ft.format('Time\R^2','RF is', 'RF oos','OLS is', 'OLS oos','LASSO is', 'LASSO oos'
,'Bench is', 'Bench oos'))
print(dash)
res = []
for t in range(1,len(T)):
# create train and test sample (and split it in x,y)
train=final.loc[final['T']<=T[t-1],:]
test=final.loc[final['T']==T[t],:]
train_x = train.drop(columns=['y','date','r','T']).values
train_y = train[['y']].values.flatten()
test_res = test[['date','r','y','T']].copy()
test_x = test.drop(columns=['y','date','r','T']).values
test_y = test[['y']].values.flatten()
m=RF(max_depth=par.model.rf_max_depth,n_jobs=-1)
m.fit(train_x,train_y)
test_res['pred_rf']=m.predict(test_x)
is_rf = m.score(train_x,train_y) # we compute both the in and out of sample r² of each model to get a feel during training of the performance
oos_rf = np.clip(m.score(test_x,test_y),-1,1) # we clip the r² to -1 to avoid printing very large negative numbers
m=OLS(fit_intercept=True)
m.fit(train_x,train_y)
test_res['pred_ols']=m.predict(test_x)
is_ols = m.score(train_x,train_y)
oos_ols = np.clip(m.score(test_x,test_y),-1,1)
m=Lasso(fit_intercept=True,alpha=par.model.lasso_penalization)
m.fit(train_x,train_y)
test_res['pred_lasso']=m.predict(test_x)
is_lasso = m.score(train_x,train_y)
oos_lasso = np.clip(m.score(test_x,test_y),-1,1)
##################
# compute benchmark performance
##################
# we predict the mean of the region in the training sample and use it as a prediction in the training and test sample
temp = train.groupby('r')['y'].mean().reset_index().rename(columns={'y':'bench'})
test_res = test_res.merge(temp)
temp = train.merge(temp)
is_benchmark = r2_score(temp['y'],temp['bench'])
oos_benchmark = np.clip(r2_score(test_res['y'],test_res['bench']), -1, 1)
print(f.format(str(T[t]), is_rf, oos_rf, is_ols, oos_ols, is_lasso, oos_lasso, is_benchmark,oos_benchmark))
# save the results
res.append(test_res)
# save the vector containing all prediciton
pd.concat(res).to_pickle(save_dir+'all.p')
|
{"/data_exploration.py": ["/data.py", "/map.py"], "/test_performance.py": ["/data.py", "/map.py"], "/train_models.py": ["/data.py", "/map.py"]}
|
36,710
|
shamicker/Movie_trailer_website
|
refs/heads/master
|
/fresh_tomatoes.py
|
import webbrowser
import os
import re
# Styles and scripting for the page
main_page_head = '''
<head>
<meta charset="utf-8">
<title>Shauna's Faves</title>
<!-- Bootstrap 3 -->
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap-theme.min.css">
<script src="https://code.jquery.com/jquery-3.1.1.slim.min.js" integrity="sha256-/SIrNqv8h6QGKDuNoLGA4iret+kyesCkHGzVUUV0shc=" crossorigin="anonymous"></script>
<script src="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/js/bootstrap.min.js"></script>
<script src="https://use.fontawesome.com/35542eb49d.js"></script>
<style type="text/css" media="screen">
body {
padding-top: 80px;
}
#trailer .modal-dialog {
margin-top: 200px;
width: 640px;
height: 480px;
}
.hanging-close {
position: absolute;
top: -12px;
right: -12px;
z-index: 9001;
color: ghostwhite;
width: 24px;
height: 24px;
}
.fa {
font: normal normal normal 24px/1 FontAwesome;
font-size: none;
width: 100%;
height: 100%;
}
.hanging-close:hover,
.fa:hover {
color: ghostwhite;
}
#trailer-video {
width: 100%;
height: 100%;
text-align: center;
}
.video-tile {
margin-bottom: 20px;
padding-top: 20px;
display: block !important;
}
.video-tile:hover {
background-color: #EEE;
cursor: pointer;
}
.scale-media {
padding-bottom: 56.25%;
position: relative;
}
.scale-media iframe {
border: none;
height: 100%;
position: absolute;
width: 100%;
left: 0;
top: 0;
background-color: white;
}
.details {
height: 342px;
width: 220px;
position: absolute;
top: 20px;
left: 0;
right: 0;
margin: auto;
opacity: 0;
text-align: center;
}
.video-tile:hover .details {
opacity: 1;
}
.video-tile:hover .image {
opacity: .1;
}
.info {
left: 0;
right: 0;
margin: 5px;
}
.genres {
position: absolute;
top: 10px;
}
.synopsis {
position: absolute;
top: 120px;
}
.rating {
position: absolute;
top: 300px;
}
.seasons {
position: absolute;
top: 280px;
}
.nav-tabs {
margin-left: 10%;
width: 80%;
border-bottom: 1px solid orange;
padding: 5px 10px 0;
}
.nav-tabs > li > a {
color: orange;
font-size: 18px;
font-weight: bold;
border-radius: 10px 10px 0 0;
}
.nav-tabs > li.active > a,
.nav-tabs > li.active > a:hover,
.nav-tabs > li.active > a:focus {
color: orange;
background: transparent;
border-top: 1px solid orange;
border-left: 1px solid orange;
border-right: 1px solid orange;
border-bottom: 1px solid black;
}
a:focus {
outline: none;
}
</style>
<script type="text/javascript" charset="utf-8">
// Pause the video when the modal is closed
$(document).on('click', '.hanging-close, .modal-backdrop, .modal', function (event) {
// Remove the src so the player itself gets removed, as this is the only
// reliable way to ensure the video stops playing in IE
$("#trailer-video-container").empty();
});
// Start playing the video whenever the trailer modal is opened
$(document).on('click', '.video-tile', function (event) {
var trailerYouTubeId = $(this).attr('data-trailer-youtube-id');
var sourceUrl = 'https://www.youtube.com/embed/' + trailerYouTubeId + '?autoplay=1&html5=1';
$("#trailer-video-container").empty().append($("<iframe></iframe>", {
'id': 'trailer-video',
'type': 'text-html',
'src': sourceUrl,
'frameborder': 0
}));
});
// Animate in the videos when the page loads
$(document).ready(function () {
$('.video-tile').hide().first().show("fast", function showNext() {
$(this).next("div").show("fast", showNext);
});
});
</script>
</head>
'''
# The main page layout and title bar
main_page_content = '''
<!DOCTYPE html>
<html lang="en">
<body>
<!-- Trailer Video Modal -->
<div class="modal" id="trailer">
<div class="modal-dialog">
<div class="modal-content">
<a href="#" class="hanging-close" data-dismiss="modal" aria-hidden="true">
<i class="fa fa-times-circle" aria-hidden="true"></i>
</a>
<div class="scale-media" id="trailer-video-container">
</div>
</div>
</div>
</div>
<!-- Main Page Content -->
<div class="container tab-content">
<!-- Nav Tabs -->
<div class="navbar navbar-inverse navbar-fixed-top" role="navigation">
<ul class="navbar-header nav nav-tabs">
<li class="navbar-right">
<a class="navbar-brand" href="#">Fresh Tomatoes Movie Trailers</a>
</li>
<li class="active">
<a href="#movies" data-toggle="tab">Some Fave Movies</a>
</li>
<li class="">
<a href="#tvshows" data-toggle="tab">Some Fave TV</a>
</li>
</ul>
</div>
<!-- Tab Contents -->
<div id="movies" class="container active tab-pane">
{movie_tiles}
</div>
<div id="tvshows" class="container tab-pane">
{tv_tiles}
</div>
</div>
</body>
</html>
'''
# A single movie entry html template
movie_tile_content = '''
<div class="col-md-6 col-lg-4 video-tile text-center" data-trailer-youtube-id="{trailer_youtube_id}" data-toggle="modal" data-target="#trailer">
<img class="tile image" src="{poster_image_url}" width="220" height="342">
<div class="tile details">
<h4 class="genres info">Genre:<br>{genre}</h4>
<h4 class="info synopsis">{general_synopsis}</h4>
<h4 class="info rating">MPAA Rating: {mpaa_rating}</h4>
</div>
<h2>{video_title}</h2>
</div>
'''
# A single tv-show entry html template
tv_tile_content = '''
<div class="col-md-6 col-lg-4 video-tile text-center" data-trailer-youtube-id="{trailer_youtube_id}" data-toggle="modal" data-target="#trailer">
<img class="tile image" src="{poster_image_url}" width="220" height="342">
<div class="tile details">
<h4 class="genres info">Labels:<br>{genre}</h4>
<h4 class="info synopsis">{general_synopsis}</h4>
<h4 class="info seasons">{seasons_and_episodes}</h4>
</div>
<h2>{video_title}</h2>
</div>
'''
def create_video_tiles_content(movie_and_tv_list):
# The HTML content for the video-tile section of the page
content = ''
for video in movie_and_tv_list:
# Extract the youtube ID from the url
youtube_id_match = re.search(r'(?<=v=)[^&#]+', video.trailer_youtube_url)
youtube_id_match = youtube_id_match or re.search(r'(?<=be/)[^&#]+', video.trailer_youtube_url)
trailer_youtube_id = youtube_id_match.group(0) if youtube_id_match else None
# Append the video-tile content with its movie or tv content filled in
try:
tv = video.season_episode
except AttributeError:
content += movie_tile_content.format(
trailer_youtube_id=trailer_youtube_id,
video_title=video.title,
general_synopsis=video.general_synopsis,
poster_image_url=video.poster_image_url,
genre=video.genre,
mpaa_rating=video.mpaa_rating
)
else:
content += tv_tile_content.format(
trailer_youtube_id=trailer_youtube_id,
video_title=video.title,
general_synopsis=video.general_synopsis,
poster_image_url=video.poster_image_url,
genre=video.genre,
seasons_and_episodes=video.season_episode
)
return content
def open_videos_page(movies, tv_shows):
# Create or overwrite the output file
output_file = open('index.html', 'w')
# Replace the placeholder for the movie tiles with the actual dynamically generated content
rendered_content = main_page_content.format(
movie_tiles=create_video_tiles_content(movies),
tv_tiles=create_video_tiles_content(tv_shows))
# Output the file
output_file.write(main_page_head + rendered_content)
output_file.close()
# Open the output file in the browser, and in a new tab, if possible
url = os.path.abspath(output_file.name)
webbrowser.open('file://' + url, new=2)
|
{"/entertainment_center.py": ["/fresh_tomatoes.py", "/media.py"]}
|
36,711
|
shamicker/Movie_trailer_website
|
refs/heads/master
|
/media.py
|
import webbrowser
class Video():
""" Create instance variables for a video.
Attributes, in strings:
title: The video's title
general_synopsis: A one-line synopsis of the video
general_image_url: The url of a poster image
general_trailer_url: The url of a trailer
genre: The genre description(s)
"""
def __init__(self, title, general_synopsis, general_image_url, general_trailer_url, genre):
self.title = title
self.general_synopsis = general_synopsis
self.poster_image_url = general_image_url
self.trailer_youtube_url = general_trailer_url
self.genre = genre
def show_trailer(self):
""" Open the trailer website in a new browser tab. """
webbrowser.open(self.trailer_youtube_url)
class Movie(Video):
""" Create instance variables for a movie.
Inherits instance variables from class Video.
Additional attributes:
mpaa_rating: MPAA rating
"""
def __init__(self, title, general_synopsis, general_image_url, general_trailer_url, genre, mpaa_rating):
Video.__init__(self, title, general_synopsis, general_image_url, general_trailer_url, genre)
self.mpaa_rating = mpaa_rating
class TV(Video):
""" Create instance variables for a TV series.
Inherits instance variables from class Video.
Additional attributes:
seasons_and_episodes: The total number of episodes in total number of seasons.
"""
def __init__(self, title, general_synopsis, general_image_url, general_trailer_url, genre, seasons_and_episodes):
Video.__init__(self, title, general_synopsis, general_image_url, general_trailer_url, genre)
self.season_episode = seasons_and_episodes
|
{"/entertainment_center.py": ["/fresh_tomatoes.py", "/media.py"]}
|
36,712
|
shamicker/Movie_trailer_website
|
refs/heads/master
|
/entertainment_center.py
|
import fresh_tomatoes
import media
''' Order of video info:
Title,
A very brief synopsis,
Poster image,
Trailer url,
Genre(s),
MPAA rating if a movie, or number of episodes/seasons if a tv show
'''
# TODO: optimize images
lola = media.Movie(
"Run Lola Run",
"Lola tries to beat the clock to help her boyfriend, while chance and choices affect the outcome.",
"images/run_lola_run.jpg",
"https://youtu.be/3ea0mG4ahRk",
"action",
"R"
)
galaxy_quest = media.Movie(
"Galaxy Quest",
"The alumni cast of a TV series have to play their roles as the real thing when an alien race needs their help.",
"images/galaxy_quest.jpg",
"https://www.youtube.com/watch?v=B34jbC43XzA",
"comedy, spoof",
"PG"
)
miss_granny = media.Movie(
"Miss Granny",
"A woman in her 70s who magically finds herself in the body of her 20-year-old self.",
"images/miss_granny.jpg",
"https://www.youtube.com/watch?v=FkWhntKKrIU",
"comedy, family",
"PG"
)
alien = media.Movie(
"Alien",
"A commercial crew aboard the deep space towing vessel, Nostromo is on its way home when they pick up an SOS warning from a distant planet.",
"images/alien_movie_poster.jpg",
"https://www.youtube.com/watch?v=e0DFvWLXv9U",
"sci-fi, suspense",
"R"
)
true_romance = media.Movie(
"True Romance",
"Clarence marries Alabama, steals cocaine from her pimp, and tries to sell it in Hollywood, while the owners of the coke try to reclaim it.",
"images/true_romance.jpeg",
"https://www.youtube.com/watch?v=_wNYNDzKpuQ",
"black comedy, thriller, violent",
"R"
)
french_kiss = media.Movie(
"French Kiss",
"A woman flies to France to confront her straying fiance, but gets into trouble when the charming crook seated next to her uses her for smuggling.",
"images/french_kiss.jpg",
"https://www.youtube.com/watch?v=eWCFoPUfm1Y",
"comedy, romance",
"PG-13"
)
lego_movie = media.Movie(
"The Lego Movie",
"An ordinary Lego construction worker is thought to be the prophesied 'Special'.",
"images/lego_movie.jpeg",
"https://www.youtube.com/watch?v=fZ_JOBCLF-I",
"comedy, family, fast-paced",
"PG"
)
time_bandits = media.Movie(
"Time Bandits",
"A boy falls in with a gang of time-travelling thieves; a giddy fairy tale, a revisionist history lesson, and a satire of technology gone awry.",
"images/time_bandits.jpg",
"https://www.youtube.com/watch?v=NNO9apbGvzo",
"dark comedy",
"PG"
)
flight_conchords = media.TV(
"Flight of the Conchords",
"Two shepherds-turned-musicians from New Zealand try to make it big as a folk duo in New York City.",
"images/conchords.jpeg",
"https://www.youtube.com/watch?v=srcc64JZmPw",
"comedy, musical",
"22 episodes in 2 seasons"
)
firefly = media.TV(
"Firefly",
"Five hundred years in the future, a renegade crew aboard a small spacecraft tries to survive as they travel the fringes of the galaxy.",
"images/firefly.jpg",
"https://www.youtube.com/watch?v=g0O29rZiIRA",
"action, sci-fi",
"14 episodes in 1 season"
)
movies_list = [time_bandits, lola, galaxy_quest, lego_movie, true_romance, alien, miss_granny, french_kiss]
tv_list = [flight_conchords, firefly]
# Use movies_list and tv_list to generate an HTML file and open it in
# a new browser tab
fresh_tomatoes.open_videos_page(movies_list, tv_list)
|
{"/entertainment_center.py": ["/fresh_tomatoes.py", "/media.py"]}
|
36,713
|
zuma-rus/cnc2
|
refs/heads/master
|
/pazpr.py
|
from template import Template
class PazPr(Template):
"""спец класс для Пазов, наследуется от Template"""
def __init__(self, name, path_to_template, path_to_progs,
prfx, ot_left, ot_right, pazov):
super().__init__(name, path_to_template, path_to_progs, 42, 42)
self.mask_name_prog = '[prfx][nul][Y][X].[ext]'
self.prfx = prfx
self.pazov = pazov
self.ot_left = ot_left
self.ot_right = ot_right
# программа, выбора
def createProgram(self, x, y):
if self.pazov == 5:
body, x, y = self.create5p(x, y)
elif self.pazov == 3:
body, x, y = self.create3p(x, y)
else:
body, x, y = self.create2p(x, y)
return body, x, y
# функция рассчёта и создания программы 2 пазов
def create2p(self, x, y):
self.subfolder = 'пазы прямые 2п\\'
k = {}
kolevka = 8
diametrFrezi = 9
meja = round((y - kolevka * 2 - diametrFrezi * 2) / 3, 1) # с округлением до 1 десятой
k['[y2]'] = kolevka + meja + diametrFrezi / 2
k['[y1]'] = y - k['[y2]']
k['[x1]'] = self.ot_left
k['[x1e]'] = x - self.ot_right
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
# функция рассчёта и создания программы 3 пазов
def create3p(self, x, y):
self.subfolder = 'пазы прямые 3п\\'
k = {}
seredY = y / 2
kolevka = 8
diametrFrezi = 9
meja = round((y - kolevka * 2 - diametrFrezi * 3) / 4, 1) - 1
# с округлением до 1 десятой : пример ' (75 - 16 - 9*3) / 4 - 1 = 7
mejdu = diametrFrezi + meja
k['[y1]'] = seredY - mejdu
k['[y2]'] = seredY + mejdu
k['[x1]'] = self.ot_left
k['[x1e]'] = x - self.ot_right
k['[srY]'] = seredY
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
# функция рассчёта и создания программы 5 пазов
def create5p(self, x, y):
self.subfolder = 'пазы прямые 5п\\'
k = {}
seredY = y / 2
kolevka = 8
diametrFrezi = 9
kolvoPolos = 5 # задел на будущее, а вдруг нужно будет больше полос
meja = round((y - kolevka * 2 - diametrFrezi * kolvoPolos) / (kolvoPolos + 1), 1) - 1
# с округлением до 1 десятой : пример ' (75 - 16 - 9*3) / 4 - 1 = 7
mejdu = diametrFrezi + meja
k['[y1]'] = seredY - mejdu * 2
k['[y2]'] = seredY - mejdu
k['[y3]'] = seredY + mejdu
k['[y4]'] = seredY + mejdu * 2
k['[x1]'] = self.ot_left
k['[x1e]'] = x - self.ot_right
k['[srY]'] = seredY
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,714
|
zuma-rus/cnc2
|
refs/heads/master
|
/vik.py
|
from template import Template
class Vik(Template):
"""спец класс для филёнок Виктории, наследуется от Template"""
def __init__(self, name_template, path_to_template, path_to_progs, ot_x, ot_y):
super().__init__(name_template, path_to_template, path_to_progs, ot_x, ot_y)
self.pref_name_folder = 'вик'
self.postfix()
# основная программа, где вычисляются значения
def createProgram(self, x, y):
k = {}
k['[x1]'] = self.ot_x
k['[x2]'] = x - self.ot_x
k['[y1]'] = self.ot_y
k['[y2]'] = y - self.ot_y
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,715
|
zuma-rus/cnc2
|
refs/heads/master
|
/messages.py
|
import sys
from PyQt5.QtWidgets import QApplication, QWidget, QPushButton, QDesktopWidget, QMessageBox
# from PyQt5.QtGui import QIcon
from PyQt5.QtCore import pyqtSlot
class Mess(QWidget):
def __init__(self):
super().__init__()
# self.title = 'PyQt5 messagebox - pythonspot.com'
# self.resize(320, 200)
# self.center()
# self.MesProgComplete()
def MesProgComplete(self, title, message):
# self.resize(520, 300)
self.center()
# buttonReply = QMessageBox.about(self, title, message, QMessageBox.Ok) # со знаком вопроса
# buttonReply = QMessageBox.about(self, title, message) # без иконки
buttonReply = QMessageBox.information(self, title, message, QMessageBox.Ok) # со знаком вопроса
if buttonReply == QMessageBox.Ok:
print('Ok')
self.show()
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def initUI(self):
self.resize(320, 200)
self.center()
self.setWindowTitle(self.title)
# self.setGeometry(self.left, self.top, self.width, self.height)
buttonReply = QMessageBox.question(self, 'PyQt5 message', "Do you like PyQt5?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if buttonReply == QMessageBox.Yes:
print('Yes clicked.')
else:
print('No clicked.')
self.show()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Mess()
sys.exit(app.exec_())
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,716
|
zuma-rus/cnc2
|
refs/heads/master
|
/my_special.py
|
class My_special():
def sorting(self, list):
for i in range(0, len(list)):
print(i,":", list[i])
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,717
|
zuma-rus/cnc2
|
refs/heads/master
|
/shield.py
|
from template import Template
class Shield(Template):
"""спец класс для филёнок Виктории, наследуется от Template"""
def __init__(self, name_template, path_to_template, path_to_progs, ot_x, ot_y, prfx):
super().__init__(name_template, path_to_template, path_to_progs, ot_x, ot_y)
self.subfolder = 'щиты\\'
self.mask_name_prog = '[prfx][X][nul][Y].[ext]'
self.prfx = prfx
# основная программа, где вычисляются значения
def createProgram(self, x, y):
if self.ot_x > 42:
self.prfx = "sb"
# Расшфровка условия otstup_y
# 0.5 это чтобы округлялось в большую сторону
# -16 это два отступа от края фигурной фрезы, каждый из которых занимает 8мм
# -44 это толщина двух борозд от фрезы 22мм
# /3 это чтобы три полосы были одинаковой ширины
# +8 это добавляем (возвращаем) один отступ от края занятый фигурной фрезой
# +11 это добавляем (возвращаем) половинный размер от толщины фрезы
self.ot_y = (int(0.5 + (y - 16 - 44) / 3) + 8 + 11) if y < 140 else self.ot_y
k = {}
k['[x1]'] = self.ot_x
k['[x2]'] = x - self.ot_x
k['[y1]'] = self.ot_y
k['[y2]'] = y - self.ot_y
k['[y3]'] = y + 150 # кажется раньше было, оставлено для совместимости
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
# дополнтельная программа для создания шаблонов под другой станок (для длинных щитов)
# Н Е Д О Д Е Л А Н О
# Н Е Д О Д Е Л А Н О
# Н Е Д О Д Е Л А Н О
# Н Е Д О Д Е Л А Н О
# Н Е Д О Д Е Л А Н О
def create_program_long(self, x, y):
if self.ot_x > 42:
self.prfx = "sb"
# Расшфровка условия otstup_y
# 0.5 это чтобы округлялось в большую сторону
# -16 это два отступа от края фигурной фрезы, каждый из которых занимает 8мм
# -44 это толщина двух борозд от фрезы 22мм
# /3 это чтобы три полосы были одинаковой ширины
# +8 это добавляем (возвращаем) один отступ от края занятый фигурной фрезой
# +11 это добавляем (возвращаем) половинный размер от толщины фрезы
self.ot_y = (int(0.5 + (y - 16 - 44) / 3) + 8 + 11) if y < 140 else self.ot_y
k = {}
k['[x1]'] = self.ot_x
k['[x2]'] = x - self.ot_x
k['[y1]'] = self.ot_y
k['[y2]'] = y - self.ot_y
k['[y3]'] = y + 150 # кажется раньше было, оставлено для совместимости
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,718
|
zuma-rus/cnc2
|
refs/heads/master
|
/template.py
|
import os
import re
class Template(object):
def __init__(self, name_template, path_to_template,
path_to_progs, ot_x, ot_y):
self.name = name_template
self.path_to_template = self.correctPath(path_to_template)
self.path_to_progs = self.correctPath(path_to_progs)
self.mask_name_prog = '[X][prfx][nul][Y][pstfx].[ext]'
self.file_extension_prog = 'tap'
self.ot_x = int(ot_x)
self.ot_y = int(ot_y)
self.prfx = 'f'
self.pstfx = ''
self.subfolder = ''
self.pref_name_folder = 'в'
# постфикс и имя внутренней папки
def postfix(self):
x = self.ot_x
y = self.ot_y
pstfx = {48: 'v', 47: 's', 46: 's', 45: 'p', 44: 'c', 43: 't', 42: '', 41: 'e'}
if x == y:
self.pstfx = 'm' if x < 41 else pstfx[x]
self.subfolder = self.pref_name_folder + '(' + str(x) + ')\\'
else:
self.pstfx = 'r'
self.subfolder = self.pref_name_folder + '(' + str(x) + '-' + str(y) + ')\\'
# чтобы при чтении-записи всё было в порядке
def correctPath(self, name):
return name.replace('/', '\\')
# заполнение шаблона значениями
def fillingTemplate(self, body, kdict):
for key in kdict:
body = body.replace(key, str(kdict[key]))
return body
# название файла готовой программы
def createNameProg(self, x, y):
prfx = self.prfx
pstfx = self.pstfx
# условная корректировка для нуля (в связи с добавленим шаблонов под синтек)
if (y > 600):
nul = '0' if x < 100 else ''
else:
nul = '0' if y < 100 else ''
name_prog = self.mask_name_prog
name_prog = name_prog.replace('[X]', str(x))
name_prog = name_prog.replace('[Y]', str(y))
name_prog = name_prog.replace('[prfx]', str(prfx))
name_prog = name_prog.replace('[pstfx]', str(pstfx))
name_prog = name_prog.replace('[nul]', str(nul))
name_prog = name_prog.replace('[ext]', self.file_extension_prog)
return name_prog
def readTemplate(self, name):
myfile = open(self.path_to_template + name)
body = ''
lines = myfile.readlines()
for line in lines:
body = body + line
myfile.close()
return body
# чтобы в шаблоне не было запятых
def onlyPoint(self, txt):
return txt.replace(',', '.')
# удаление скобок (и всё что в них) из текста
def removeBrackets(self, txt):
n = 1 # запустить хотя бы один раз
while n:
txt, n = re.subn(r'\([^()]*\)', '', txt) # удаление скобок, включая вложенные
return txt
# удаление двойных пробелов
def removeDoubleSpace(self, txt):
n = 1 # запустить хотя бы один раз
while n:
txt, n = re.subn(r' ', ' ', txt)
return txt
# создание всех программ по переданному списку
def createProgs(self, list_table):
for xy in list_table:
x = xy[0]
y = xy[1]
body, x, y = self.createProgram(x, y)
body = self.onlyPoint(body)
body = self.removeBrackets(body)
body = self.removeDoubleSpace(body)
name_prog = self.createNameProg(x, y)
self.saveProgram(name_prog, body)
# основная программа, где вычисляются значения
def createProgram(self, x, y):
pass
def saveProgram(self, name_prog, body):
self.findFolder()
full_path = self.path_to_progs + self.subfolder + name_prog
f = open(full_path, 'w')
f.write(body)
f.close()
# проверяет, есть ли папка для записи и если её нет, то создаёт
def findFolder(self):
full_path = self.path_to_progs + self.subfolder
result = os.path.isdir(full_path)
if (result):
return
else:
os.makedirs(full_path)
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,719
|
zuma-rus/cnc2
|
refs/heads/master
|
/cnc_ui.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'D:\_MyDocuments\__YandexDisk\Документы\___Python\Проекты\ЧПУ\cnc_ui.ui'
#
# Created by: PyQt5 UI code generator 5.9
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(750, 550)
MainWindow.setMinimumSize(QtCore.QSize(750, 550))
MainWindow.setMaximumSize(QtCore.QSize(750, 562))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("ico/cnc_machine.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtWidgets.QWidget(MainWindow)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setMaximumSize(QtCore.QSize(750, 550))
self.centralwidget.setBaseSize(QtCore.QSize(750, 500))
self.centralwidget.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.centralwidget.setLayoutDirection(QtCore.Qt.LeftToRight)
self.centralwidget.setObjectName("centralwidget")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.centralwidget)
self.horizontalLayout_2.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.horizontalLayout_2.setContentsMargins(-1, 9, -1, -1)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setMinimumSize(QtCore.QSize(0, 0))
self.tabWidget.setMaximumSize(QtCore.QSize(750, 550))
self.tabWidget.setBaseSize(QtCore.QSize(700, 500))
font = QtGui.QFont()
font.setPointSize(8)
self.tabWidget.setFont(font)
self.tabWidget.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.tabWidget.setWhatsThis("")
self.tabWidget.setStyleSheet("")
self.tabWidget.setTabPosition(QtWidgets.QTabWidget.North)
self.tabWidget.setTabShape(QtWidgets.QTabWidget.Rounded)
self.tabWidget.setIconSize(QtCore.QSize(16, 16))
self.tabWidget.setElideMode(QtCore.Qt.ElideLeft)
self.tabWidget.setUsesScrollButtons(False)
self.tabWidget.setObjectName("tabWidget")
self.tab_vik = QtWidgets.QWidget()
self.tab_vik.setMinimumSize(QtCore.QSize(0, 0))
self.tab_vik.setAccessibleName("")
self.tab_vik.setAccessibleDescription("")
self.tab_vik.setAutoFillBackground(False)
self.tab_vik.setObjectName("tab_vik")
self.label = QtWidgets.QLabel(self.tab_vik)
self.label.setGeometry(QtCore.QRect(360, 10, 141, 41))
font = QtGui.QFont()
font.setPointSize(24)
self.label.setFont(font)
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.tab_vik)
self.label_2.setGeometry(QtCore.QRect(500, 10, 211, 41))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setStyleSheet("background-color: rgb(255, 0, 4);\n"
"color: rgb(255, 255, 127);")
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.groupBox_ots_vik = QtWidgets.QGroupBox(self.tab_vik)
self.groupBox_ots_vik.setGeometry(QtCore.QRect(370, 70, 321, 161))
font = QtGui.QFont()
font.setPointSize(13)
self.groupBox_ots_vik.setFont(font)
self.groupBox_ots_vik.setObjectName("groupBox_ots_vik")
self.spinBox_ots_vik_X = QtWidgets.QSpinBox(self.groupBox_ots_vik)
self.spinBox_ots_vik_X.setGeometry(QtCore.QRect(20, 60, 51, 31))
self.spinBox_ots_vik_X.setMinimum(25)
self.spinBox_ots_vik_X.setMaximum(50)
self.spinBox_ots_vik_X.setProperty("value", 42)
self.spinBox_ots_vik_X.setObjectName("spinBox_ots_vik_X")
self.spinBox_ots_vik_Y = QtWidgets.QSpinBox(self.groupBox_ots_vik)
self.spinBox_ots_vik_Y.setGeometry(QtCore.QRect(80, 60, 51, 31))
self.spinBox_ots_vik_Y.setMinimum(25)
self.spinBox_ots_vik_Y.setMaximum(50)
self.spinBox_ots_vik_Y.setProperty("value", 42)
self.spinBox_ots_vik_Y.setObjectName("spinBox_ots_vik_Y")
self.label_ots_vik_X = QtWidgets.QLabel(self.groupBox_ots_vik)
self.label_ots_vik_X.setGeometry(QtCore.QRect(40, 40, 21, 16))
self.label_ots_vik_X.setObjectName("label_ots_vik_X")
self.label_ots_vik_Y = QtWidgets.QLabel(self.groupBox_ots_vik)
self.label_ots_vik_Y.setGeometry(QtCore.QRect(100, 40, 21, 16))
self.label_ots_vik_Y.setObjectName("label_ots_vik_Y")
self.btn_ots_vik_40 = QtWidgets.QPushButton(self.groupBox_ots_vik)
self.btn_ots_vik_40.setGeometry(QtCore.QRect(150, 30, 75, 23))
self.btn_ots_vik_40.setObjectName("btn_ots_vik_40")
self.btn_ots_vik_41 = QtWidgets.QPushButton(self.groupBox_ots_vik)
self.btn_ots_vik_41.setGeometry(QtCore.QRect(150, 60, 75, 23))
self.btn_ots_vik_41.setObjectName("btn_ots_vik_41")
self.btn_ots_vik_42 = QtWidgets.QPushButton(self.groupBox_ots_vik)
self.btn_ots_vik_42.setGeometry(QtCore.QRect(150, 90, 75, 23))
self.btn_ots_vik_42.setObjectName("btn_ots_vik_42")
self.btn_ots_vik_43 = QtWidgets.QPushButton(self.groupBox_ots_vik)
self.btn_ots_vik_43.setGeometry(QtCore.QRect(230, 30, 75, 23))
self.btn_ots_vik_43.setObjectName("btn_ots_vik_43")
self.btn_ots_vik_44 = QtWidgets.QPushButton(self.groupBox_ots_vik)
self.btn_ots_vik_44.setGeometry(QtCore.QRect(230, 60, 75, 23))
self.btn_ots_vik_44.setObjectName("btn_ots_vik_44")
self.btn_ots_vik_45 = QtWidgets.QPushButton(self.groupBox_ots_vik)
self.btn_ots_vik_45.setGeometry(QtCore.QRect(230, 90, 75, 23))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_ots_vik_45.setFont(font)
self.btn_ots_vik_45.setObjectName("btn_ots_vik_45")
self.btn_ots_vik_46 = QtWidgets.QPushButton(self.groupBox_ots_vik)
self.btn_ots_vik_46.setGeometry(QtCore.QRect(190, 120, 75, 23))
self.btn_ots_vik_46.setObjectName("btn_ots_vik_46")
self.btn_create_vik = QtWidgets.QPushButton(self.tab_vik)
self.btn_create_vik.setGeometry(QtCore.QRect(370, 260, 241, 61))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.btn_create_vik.setFont(font)
self.btn_create_vik.setToolTip("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("ico/Hydrattz-Multipurpose-Alphabet-Letter-V-orange.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_create_vik.setIcon(icon1)
self.btn_create_vik.setIconSize(QtCore.QSize(32, 32))
self.btn_create_vik.setAutoDefault(False)
self.btn_create_vik.setDefault(False)
self.btn_create_vik.setFlat(False)
self.btn_create_vik.setObjectName("btn_create_vik")
self.tableWidget_vik = QtWidgets.QTableWidget(self.tab_vik)
self.tableWidget_vik.setGeometry(QtCore.QRect(70, 30, 191, 421))
font = QtGui.QFont()
font.setPointSize(14)
self.tableWidget_vik.setFont(font)
self.tableWidget_vik.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.tableWidget_vik.setRowCount(12)
self.tableWidget_vik.setColumnCount(2)
self.tableWidget_vik.setObjectName("tableWidget_vik")
item = QtWidgets.QTableWidgetItem()
self.tableWidget_vik.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_vik.setHorizontalHeaderItem(1, item)
self.tableWidget_vik.horizontalHeader().setDefaultSectionSize(71)
self.tableWidget_vik.horizontalHeader().setMinimumSectionSize(30)
self.tableWidget_vik.verticalHeader().setMinimumSectionSize(33)
self.tabWidget.addTab(self.tab_vik, icon1, "")
self.tab_afin = QtWidgets.QWidget()
self.tab_afin.setObjectName("tab_afin")
self.label_3 = QtWidgets.QLabel(self.tab_afin)
self.label_3.setGeometry(QtCore.QRect(560, 10, 151, 41))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.PreferDefault)
self.label_3.setFont(font)
self.label_3.setStyleSheet("background-color:rgb(255, 255, 127); \n"
"color: rgb(255, 0, 4);")
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.tab_afin)
self.label_4.setGeometry(QtCore.QRect(420, 10, 141, 41))
font = QtGui.QFont()
font.setPointSize(24)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.groupBox_ots_afin = QtWidgets.QGroupBox(self.tab_afin)
self.groupBox_ots_afin.setGeometry(QtCore.QRect(370, 70, 321, 161))
font = QtGui.QFont()
font.setPointSize(13)
self.groupBox_ots_afin.setFont(font)
self.groupBox_ots_afin.setObjectName("groupBox_ots_afin")
self.spinBox_ots_afin_X = QtWidgets.QSpinBox(self.groupBox_ots_afin)
self.spinBox_ots_afin_X.setGeometry(QtCore.QRect(20, 60, 51, 31))
self.spinBox_ots_afin_X.setMinimum(25)
self.spinBox_ots_afin_X.setMaximum(50)
self.spinBox_ots_afin_X.setProperty("value", 42)
self.spinBox_ots_afin_X.setObjectName("spinBox_ots_afin_X")
self.spinBox_ots_afin_Y = QtWidgets.QSpinBox(self.groupBox_ots_afin)
self.spinBox_ots_afin_Y.setGeometry(QtCore.QRect(80, 60, 51, 31))
self.spinBox_ots_afin_Y.setMinimum(25)
self.spinBox_ots_afin_Y.setMaximum(50)
self.spinBox_ots_afin_Y.setProperty("value", 42)
self.spinBox_ots_afin_Y.setObjectName("spinBox_ots_afin_Y")
self.label_ots_afin_X = QtWidgets.QLabel(self.groupBox_ots_afin)
self.label_ots_afin_X.setGeometry(QtCore.QRect(40, 40, 21, 16))
self.label_ots_afin_X.setObjectName("label_ots_afin_X")
self.label_ots_afin_Y = QtWidgets.QLabel(self.groupBox_ots_afin)
self.label_ots_afin_Y.setGeometry(QtCore.QRect(100, 40, 21, 16))
self.label_ots_afin_Y.setObjectName("label_ots_afin_Y")
self.btn_ots_afin_40 = QtWidgets.QPushButton(self.groupBox_ots_afin)
self.btn_ots_afin_40.setGeometry(QtCore.QRect(150, 30, 75, 23))
self.btn_ots_afin_40.setObjectName("btn_ots_afin_40")
self.btn_ots_afin_41 = QtWidgets.QPushButton(self.groupBox_ots_afin)
self.btn_ots_afin_41.setGeometry(QtCore.QRect(150, 60, 75, 23))
self.btn_ots_afin_41.setObjectName("btn_ots_afin_41")
self.btn_ots_afin_42 = QtWidgets.QPushButton(self.groupBox_ots_afin)
self.btn_ots_afin_42.setGeometry(QtCore.QRect(150, 90, 75, 23))
self.btn_ots_afin_42.setObjectName("btn_ots_afin_42")
self.btn_ots_afin_43 = QtWidgets.QPushButton(self.groupBox_ots_afin)
self.btn_ots_afin_43.setGeometry(QtCore.QRect(230, 30, 75, 23))
self.btn_ots_afin_43.setObjectName("btn_ots_afin_43")
self.btn_ots_afin_44 = QtWidgets.QPushButton(self.groupBox_ots_afin)
self.btn_ots_afin_44.setGeometry(QtCore.QRect(230, 60, 75, 23))
self.btn_ots_afin_44.setObjectName("btn_ots_afin_44")
self.btn_ots_afin_45 = QtWidgets.QPushButton(self.groupBox_ots_afin)
self.btn_ots_afin_45.setGeometry(QtCore.QRect(230, 90, 75, 23))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_ots_afin_45.setFont(font)
self.btn_ots_afin_45.setObjectName("btn_ots_afin_45")
self.btn_ots_afin_46 = QtWidgets.QPushButton(self.groupBox_ots_afin)
self.btn_ots_afin_46.setGeometry(QtCore.QRect(190, 120, 75, 23))
self.btn_ots_afin_46.setObjectName("btn_ots_afin_46")
self.btn_create_afin = QtWidgets.QPushButton(self.tab_afin)
self.btn_create_afin.setGeometry(QtCore.QRect(370, 390, 241, 61))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.btn_create_afin.setFont(font)
self.btn_create_afin.setToolTip("")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap("ico/Icondesigner.net-Hyperion-Sidebar-Library.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_create_afin.setIcon(icon2)
self.btn_create_afin.setIconSize(QtCore.QSize(32, 32))
self.btn_create_afin.setAutoDefault(False)
self.btn_create_afin.setDefault(False)
self.btn_create_afin.setFlat(False)
self.btn_create_afin.setObjectName("btn_create_afin")
self.groupBox_6 = QtWidgets.QGroupBox(self.tab_afin)
self.groupBox_6.setGeometry(QtCore.QRect(370, 250, 161, 111))
font = QtGui.QFont()
font.setPointSize(13)
self.groupBox_6.setFont(font)
self.groupBox_6.setObjectName("groupBox_6")
self.rBtn_afin_arc = QtWidgets.QRadioButton(self.groupBox_6)
self.rBtn_afin_arc.setGeometry(QtCore.QRect(30, 40, 101, 17))
self.rBtn_afin_arc.setChecked(True)
self.rBtn_afin_arc.setObjectName("rBtn_afin_arc")
self.rBtn_afin_pr = QtWidgets.QRadioButton(self.groupBox_6)
self.rBtn_afin_pr.setGeometry(QtCore.QRect(30, 70, 91, 17))
self.rBtn_afin_pr.setObjectName("rBtn_afin_pr")
self.label_pix_afin = QtWidgets.QLabel(self.tab_afin)
self.label_pix_afin.setGeometry(QtCore.QRect(550, 260, 140, 100))
self.label_pix_afin.setText("")
self.label_pix_afin.setPixmap(QtGui.QPixmap("pix/afin_arc.png"))
self.label_pix_afin.setObjectName("label_pix_afin")
self.tableWidget_afin = QtWidgets.QTableWidget(self.tab_afin)
self.tableWidget_afin.setGeometry(QtCore.QRect(70, 30, 191, 421))
font = QtGui.QFont()
font.setPointSize(14)
self.tableWidget_afin.setFont(font)
self.tableWidget_afin.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.tableWidget_afin.setRowCount(12)
self.tableWidget_afin.setColumnCount(2)
self.tableWidget_afin.setObjectName("tableWidget_afin")
item = QtWidgets.QTableWidgetItem()
self.tableWidget_afin.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_afin.setHorizontalHeaderItem(1, item)
self.tableWidget_afin.horizontalHeader().setDefaultSectionSize(71)
self.tableWidget_afin.horizontalHeader().setMinimumSectionSize(30)
self.tableWidget_afin.verticalHeader().setMinimumSectionSize(33)
self.tabWidget.addTab(self.tab_afin, icon2, "")
self.tab_shit = QtWidgets.QWidget()
self.tab_shit.setObjectName("tab_shit")
self.label_shit = QtWidgets.QLabel(self.tab_shit)
self.label_shit.setGeometry(QtCore.QRect(560, 10, 151, 41))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.PreferDefault)
self.label_shit.setFont(font)
self.label_shit.setStyleSheet("color: rgb(170, 85, 0);")
self.label_shit.setAlignment(QtCore.Qt.AlignCenter)
self.label_shit.setObjectName("label_shit")
self.tableWidget_shit = QtWidgets.QTableWidget(self.tab_shit)
self.tableWidget_shit.setGeometry(QtCore.QRect(70, 30, 191, 421))
font = QtGui.QFont()
font.setPointSize(14)
self.tableWidget_shit.setFont(font)
self.tableWidget_shit.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.tableWidget_shit.setRowCount(12)
self.tableWidget_shit.setColumnCount(2)
self.tableWidget_shit.setObjectName("tableWidget_shit")
item = QtWidgets.QTableWidgetItem()
self.tableWidget_shit.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_shit.setHorizontalHeaderItem(1, item)
self.tableWidget_shit.horizontalHeader().setDefaultSectionSize(71)
self.tableWidget_shit.horizontalHeader().setMinimumSectionSize(30)
self.tableWidget_shit.verticalHeader().setMinimumSectionSize(33)
self.groupBox_ots_shit = QtWidgets.QGroupBox(self.tab_shit)
self.groupBox_ots_shit.setGeometry(QtCore.QRect(370, 70, 321, 151))
font = QtGui.QFont()
font.setPointSize(13)
self.groupBox_ots_shit.setFont(font)
self.groupBox_ots_shit.setObjectName("groupBox_ots_shit")
self.spinBox_ots_shit_X = QtWidgets.QSpinBox(self.groupBox_ots_shit)
self.spinBox_ots_shit_X.setGeometry(QtCore.QRect(30, 60, 51, 31))
self.spinBox_ots_shit_X.setMinimum(25)
self.spinBox_ots_shit_X.setMaximum(50)
self.spinBox_ots_shit_X.setProperty("value", 42)
self.spinBox_ots_shit_X.setObjectName("spinBox_ots_shit_X")
self.spinBox_ots_shit_Y = QtWidgets.QSpinBox(self.groupBox_ots_shit)
self.spinBox_ots_shit_Y.setGeometry(QtCore.QRect(90, 60, 51, 31))
self.spinBox_ots_shit_Y.setMinimum(25)
self.spinBox_ots_shit_Y.setMaximum(50)
self.spinBox_ots_shit_Y.setProperty("value", 42)
self.spinBox_ots_shit_Y.setObjectName("spinBox_ots_shit_Y")
self.label_ots_shit_X = QtWidgets.QLabel(self.groupBox_ots_shit)
self.label_ots_shit_X.setGeometry(QtCore.QRect(50, 40, 21, 16))
self.label_ots_shit_X.setObjectName("label_ots_shit_X")
self.label_ots_shit_Y = QtWidgets.QLabel(self.groupBox_ots_shit)
self.label_ots_shit_Y.setGeometry(QtCore.QRect(110, 40, 21, 16))
self.label_ots_shit_Y.setObjectName("label_ots_shit_Y")
self.btn_ots_shit_42 = QtWidgets.QPushButton(self.groupBox_ots_shit)
self.btn_ots_shit_42.setGeometry(QtCore.QRect(174, 40, 111, 81))
self.btn_ots_shit_42.setObjectName("btn_ots_shit_42")
self.btn_create_shit = QtWidgets.QPushButton(self.tab_shit)
self.btn_create_shit.setGeometry(QtCore.QRect(370, 250, 241, 61))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.btn_create_shit.setFont(font)
self.btn_create_shit.setToolTip("")
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap("ico/Paomedia-Small-N-Flat-Shield.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_create_shit.setIcon(icon3)
self.btn_create_shit.setIconSize(QtCore.QSize(32, 32))
self.btn_create_shit.setAutoDefault(False)
self.btn_create_shit.setDefault(False)
self.btn_create_shit.setFlat(False)
self.btn_create_shit.setObjectName("btn_create_shit")
self.tabWidget.addTab(self.tab_shit, icon3, "")
self.tab_paz_kol = QtWidgets.QWidget()
self.tab_paz_kol.setObjectName("tab_paz_kol")
self.groupBox_paz = QtWidgets.QGroupBox(self.tab_paz_kol)
self.groupBox_paz.setGeometry(QtCore.QRect(370, 70, 321, 201))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.groupBox_paz.setFont(font)
self.groupBox_paz.setObjectName("groupBox_paz")
self.label_33 = QtWidgets.QLabel(self.groupBox_paz)
self.label_33.setGeometry(QtCore.QRect(20, 30, 286, 153))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(False)
font.setWeight(50)
self.label_33.setFont(font)
self.label_33.setObjectName("label_33")
self.btn_create_paz = QtWidgets.QPushButton(self.tab_paz_kol)
self.btn_create_paz.setGeometry(QtCore.QRect(370, 310, 241, 61))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.btn_create_paz.setFont(font)
self.btn_create_paz.setToolTip("")
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap("ico/Iconsmind-Outline-Column.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_create_paz.setIcon(icon4)
self.btn_create_paz.setIconSize(QtCore.QSize(32, 32))
self.btn_create_paz.setAutoDefault(False)
self.btn_create_paz.setDefault(False)
self.btn_create_paz.setFlat(False)
self.btn_create_paz.setObjectName("btn_create_paz")
self.tableWidget_paz = QtWidgets.QTableWidget(self.tab_paz_kol)
self.tableWidget_paz.setGeometry(QtCore.QRect(70, 30, 191, 421))
font = QtGui.QFont()
font.setPointSize(14)
self.tableWidget_paz.setFont(font)
self.tableWidget_paz.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.tableWidget_paz.setRowCount(12)
self.tableWidget_paz.setColumnCount(2)
self.tableWidget_paz.setObjectName("tableWidget_paz")
item = QtWidgets.QTableWidgetItem()
self.tableWidget_paz.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_paz.setHorizontalHeaderItem(1, item)
self.tableWidget_paz.horizontalHeader().setDefaultSectionSize(71)
self.tableWidget_paz.horizontalHeader().setMinimumSectionSize(30)
self.tableWidget_paz.verticalHeader().setMinimumSectionSize(33)
self.label_paz = QtWidgets.QLabel(self.tab_paz_kol)
self.label_paz.setGeometry(QtCore.QRect(370, 10, 331, 41))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.PreferDefault)
self.label_paz.setFont(font)
self.label_paz.setStyleSheet("color: rgb(85, 85, 255);")
self.label_paz.setAlignment(QtCore.Qt.AlignCenter)
self.label_paz.setObjectName("label_paz")
self.tabWidget.addTab(self.tab_paz_kol, icon4, "")
self.tab_paz_pr = QtWidgets.QWidget()
self.tab_paz_pr.setObjectName("tab_paz_pr")
self.btn_create_pazpr = QtWidgets.QPushButton(self.tab_paz_pr)
self.btn_create_pazpr.setGeometry(QtCore.QRect(370, 260, 241, 61))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.btn_create_pazpr.setFont(font)
self.btn_create_pazpr.setToolTip("")
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap("ico/Icons8-Ios7-Editing-Line-Width.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_create_pazpr.setIcon(icon5)
self.btn_create_pazpr.setIconSize(QtCore.QSize(32, 32))
self.btn_create_pazpr.setAutoDefault(False)
self.btn_create_pazpr.setDefault(False)
self.btn_create_pazpr.setFlat(False)
self.btn_create_pazpr.setObjectName("btn_create_pazpr")
self.groupBox_ots_pazpr = QtWidgets.QGroupBox(self.tab_paz_pr)
self.groupBox_ots_pazpr.setGeometry(QtCore.QRect(370, 70, 321, 161))
font = QtGui.QFont()
font.setPointSize(13)
self.groupBox_ots_pazpr.setFont(font)
self.groupBox_ots_pazpr.setObjectName("groupBox_ots_pazpr")
self.spinBox_ots_pazpr_X_left = QtWidgets.QSpinBox(self.groupBox_ots_pazpr)
self.spinBox_ots_pazpr_X_left.setGeometry(QtCore.QRect(40, 60, 51, 31))
self.spinBox_ots_pazpr_X_left.setMinimum(0)
self.spinBox_ots_pazpr_X_left.setMaximum(500)
self.spinBox_ots_pazpr_X_left.setProperty("value", 30)
self.spinBox_ots_pazpr_X_left.setObjectName("spinBox_ots_pazpr_X_left")
self.spinBox_ots_pazpr_X_right = QtWidgets.QSpinBox(self.groupBox_ots_pazpr)
self.spinBox_ots_pazpr_X_right.setGeometry(QtCore.QRect(110, 60, 51, 31))
self.spinBox_ots_pazpr_X_right.setMinimum(0)
self.spinBox_ots_pazpr_X_right.setMaximum(500)
self.spinBox_ots_pazpr_X_right.setProperty("value", 20)
self.spinBox_ots_pazpr_X_right.setObjectName("spinBox_ots_pazpr_X_right")
self.label_ots_papr_X_left = QtWidgets.QLabel(self.groupBox_ots_pazpr)
self.label_ots_papr_X_left.setGeometry(QtCore.QRect(20, 40, 71, 20))
font = QtGui.QFont()
font.setPointSize(10)
self.label_ots_papr_X_left.setFont(font)
self.label_ots_papr_X_left.setObjectName("label_ots_papr_X_left")
self.btn_ots_papr_2030 = QtWidgets.QPushButton(self.groupBox_ots_pazpr)
self.btn_ots_papr_2030.setGeometry(QtCore.QRect(40, 110, 121, 31))
self.btn_ots_papr_2030.setObjectName("btn_ots_papr_2030")
self.label_ots_pazpr_X_right = QtWidgets.QLabel(self.groupBox_ots_pazpr)
self.label_ots_pazpr_X_right.setGeometry(QtCore.QRect(110, 40, 81, 20))
font = QtGui.QFont()
font.setPointSize(10)
self.label_ots_pazpr_X_right.setFont(font)
self.label_ots_pazpr_X_right.setObjectName("label_ots_pazpr_X_right")
self.rBtn_pazpr_3paz = QtWidgets.QRadioButton(self.groupBox_ots_pazpr)
self.rBtn_pazpr_3paz.setGeometry(QtCore.QRect(210, 80, 91, 17))
self.rBtn_pazpr_3paz.setObjectName("rBtn_pazpr_3paz")
self.rBtn_pazpr_2paz = QtWidgets.QRadioButton(self.groupBox_ots_pazpr)
self.rBtn_pazpr_2paz.setGeometry(QtCore.QRect(210, 50, 101, 17))
self.rBtn_pazpr_2paz.setChecked(True)
self.rBtn_pazpr_2paz.setObjectName("rBtn_pazpr_2paz")
self.rBtn_pazpr_5paz = QtWidgets.QRadioButton(self.groupBox_ots_pazpr)
self.rBtn_pazpr_5paz.setGeometry(QtCore.QRect(210, 110, 91, 17))
self.rBtn_pazpr_5paz.setObjectName("rBtn_pazpr_5paz")
self.tableWidget_pazpr = QtWidgets.QTableWidget(self.tab_paz_pr)
self.tableWidget_pazpr.setGeometry(QtCore.QRect(70, 30, 191, 421))
font = QtGui.QFont()
font.setPointSize(14)
self.tableWidget_pazpr.setFont(font)
self.tableWidget_pazpr.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.tableWidget_pazpr.setRowCount(12)
self.tableWidget_pazpr.setColumnCount(2)
self.tableWidget_pazpr.setObjectName("tableWidget_pazpr")
item = QtWidgets.QTableWidgetItem()
self.tableWidget_pazpr.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_pazpr.setHorizontalHeaderItem(1, item)
self.tableWidget_pazpr.horizontalHeader().setDefaultSectionSize(71)
self.tableWidget_pazpr.horizontalHeader().setMinimumSectionSize(30)
self.tableWidget_pazpr.verticalHeader().setMinimumSectionSize(33)
self.label_pazpr = QtWidgets.QLabel(self.tab_paz_pr)
self.label_pazpr.setGeometry(QtCore.QRect(400, 10, 331, 41))
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.PreferDefault)
self.label_pazpr.setFont(font)
self.label_pazpr.setStyleSheet("color: rgb(0, 0, 127);")
self.label_pazpr.setAlignment(QtCore.Qt.AlignCenter)
self.label_pazpr.setObjectName("label_pazpr")
self.tabWidget.addTab(self.tab_paz_pr, icon5, "")
self.tab_prop = QtWidgets.QWidget()
self.tab_prop.setObjectName("tab_prop")
self.label_9 = QtWidgets.QLabel(self.tab_prop)
self.label_9.setGeometry(QtCore.QRect(130, 20, 451, 41))
font = QtGui.QFont()
font.setPointSize(24)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.layoutWidget = QtWidgets.QWidget(self.tab_prop)
self.layoutWidget.setGeometry(QtCore.QRect(20, 90, 691, 211))
self.layoutWidget.setObjectName("layoutWidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.layoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.groupBox_3 = QtWidgets.QGroupBox(self.layoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_3.sizePolicy().hasHeightForWidth())
self.groupBox_3.setSizePolicy(sizePolicy)
self.groupBox_3.setObjectName("groupBox_3")
self.spinBox_prop_is_X = QtWidgets.QSpinBox(self.groupBox_3)
self.spinBox_prop_is_X.setGeometry(QtCore.QRect(90, 30, 61, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.spinBox_prop_is_X.setFont(font)
self.spinBox_prop_is_X.setMaximum(2000)
self.spinBox_prop_is_X.setObjectName("spinBox_prop_is_X")
self.label_24 = QtWidgets.QLabel(self.groupBox_3)
self.label_24.setGeometry(QtCore.QRect(60, 110, 16, 20))
font = QtGui.QFont()
font.setPointSize(17)
self.label_24.setFont(font)
self.label_24.setObjectName("label_24")
self.spinBox_prop_is_Z = QtWidgets.QSpinBox(self.groupBox_3)
self.spinBox_prop_is_Z.setGeometry(QtCore.QRect(90, 110, 61, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.spinBox_prop_is_Z.setFont(font)
self.spinBox_prop_is_Z.setObjectName("spinBox_prop_is_Z")
self.label_25 = QtWidgets.QLabel(self.groupBox_3)
self.label_25.setGeometry(QtCore.QRect(60, 70, 16, 20))
font = QtGui.QFont()
font.setPointSize(17)
self.label_25.setFont(font)
self.label_25.setObjectName("label_25")
self.label_26 = QtWidgets.QLabel(self.groupBox_3)
self.label_26.setGeometry(QtCore.QRect(60, 30, 16, 20))
font = QtGui.QFont()
font.setPointSize(17)
self.label_26.setFont(font)
self.label_26.setObjectName("label_26")
self.spinBox_prop_is_Y = QtWidgets.QSpinBox(self.groupBox_3)
self.spinBox_prop_is_Y.setGeometry(QtCore.QRect(90, 70, 61, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.spinBox_prop_is_Y.setFont(font)
self.spinBox_prop_is_Y.setMaximum(2000)
self.spinBox_prop_is_Y.setObjectName("spinBox_prop_is_Y")
self.btn_prop = QtWidgets.QPushButton(self.groupBox_3)
self.btn_prop.setGeometry(QtCore.QRect(60, 160, 111, 31))
self.btn_prop.setObjectName("btn_prop")
self.horizontalLayout.addWidget(self.groupBox_3)
self.groupBox_4 = QtWidgets.QGroupBox(self.layoutWidget)
self.groupBox_4.setObjectName("groupBox_4")
self.spinBox_prop_ras_X = QtWidgets.QSpinBox(self.groupBox_4)
self.spinBox_prop_ras_X.setGeometry(QtCore.QRect(90, 30, 61, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.spinBox_prop_ras_X.setFont(font)
self.spinBox_prop_ras_X.setMaximum(2000)
self.spinBox_prop_ras_X.setObjectName("spinBox_prop_ras_X")
self.label_21 = QtWidgets.QLabel(self.groupBox_4)
self.label_21.setGeometry(QtCore.QRect(60, 70, 16, 20))
font = QtGui.QFont()
font.setPointSize(17)
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.label_22 = QtWidgets.QLabel(self.groupBox_4)
self.label_22.setGeometry(QtCore.QRect(60, 30, 16, 20))
font = QtGui.QFont()
font.setPointSize(17)
self.label_22.setFont(font)
self.label_22.setObjectName("label_22")
self.label_23 = QtWidgets.QLabel(self.groupBox_4)
self.label_23.setGeometry(QtCore.QRect(60, 110, 16, 20))
font = QtGui.QFont()
font.setPointSize(17)
self.label_23.setFont(font)
self.label_23.setObjectName("label_23")
self.spinBox_prop_ras_Y = QtWidgets.QSpinBox(self.groupBox_4)
self.spinBox_prop_ras_Y.setGeometry(QtCore.QRect(90, 70, 61, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.spinBox_prop_ras_Y.setFont(font)
self.spinBox_prop_ras_Y.setMaximum(2000)
self.spinBox_prop_ras_Y.setObjectName("spinBox_prop_ras_Y")
self.spinBox_prop_ras_Z = QtWidgets.QSpinBox(self.groupBox_4)
self.spinBox_prop_ras_Z.setGeometry(QtCore.QRect(90, 110, 61, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.spinBox_prop_ras_Z.setFont(font)
self.spinBox_prop_ras_Z.setObjectName("spinBox_prop_ras_Z")
self.horizontalLayout.addWidget(self.groupBox_4)
self.groupBox_5 = QtWidgets.QGroupBox(self.layoutWidget)
self.groupBox_5.setObjectName("groupBox_5")
self.label_10 = QtWidgets.QLabel(self.groupBox_5)
self.label_10.setGeometry(QtCore.QRect(80, 20, 81, 20))
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.groupBox_5)
self.label_11.setGeometry(QtCore.QRect(30, 40, 61, 20))
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(self.groupBox_5)
self.label_12.setGeometry(QtCore.QRect(140, 40, 61, 20))
self.label_12.setObjectName("label_12")
self.spinBox_prop_zag_X = QtWidgets.QSpinBox(self.groupBox_5)
self.spinBox_prop_zag_X.setGeometry(QtCore.QRect(30, 60, 42, 22))
self.spinBox_prop_zag_X.setObjectName("spinBox_prop_zag_X")
self.spinBox_prop_zag_Y = QtWidgets.QSpinBox(self.groupBox_5)
self.spinBox_prop_zag_Y.setGeometry(QtCore.QRect(140, 60, 42, 22))
self.spinBox_prop_zag_Y.setObjectName("spinBox_prop_zag_Y")
self.label_13 = QtWidgets.QLabel(self.groupBox_5)
self.label_13.setGeometry(QtCore.QRect(80, 110, 91, 20))
self.label_13.setObjectName("label_13")
self.label_14 = QtWidgets.QLabel(self.groupBox_5)
self.label_14.setGeometry(QtCore.QRect(50, 130, 16, 20))
self.label_14.setObjectName("label_14")
self.label_15 = QtWidgets.QLabel(self.groupBox_5)
self.label_15.setGeometry(QtCore.QRect(100, 130, 16, 20))
self.label_15.setObjectName("label_15")
self.label_16 = QtWidgets.QLabel(self.groupBox_5)
self.label_16.setGeometry(QtCore.QRect(170, 130, 20, 20))
self.label_16.setObjectName("label_16")
self.lineEdit_zag_Y = QtWidgets.QLineEdit(self.groupBox_5)
self.lineEdit_zag_Y.setGeometry(QtCore.QRect(90, 150, 51, 20))
self.lineEdit_zag_Y.setObjectName("lineEdit_zag_Y")
self.lineEdit_zag_Z = QtWidgets.QLineEdit(self.groupBox_5)
self.lineEdit_zag_Z.setGeometry(QtCore.QRect(160, 150, 51, 20))
self.lineEdit_zag_Z.setObjectName("lineEdit_zag_Z")
self.lineEdit_zag_X = QtWidgets.QLineEdit(self.groupBox_5)
self.lineEdit_zag_X.setGeometry(QtCore.QRect(20, 150, 51, 20))
self.lineEdit_zag_X.setObjectName("lineEdit_zag_X")
self.label_17 = QtWidgets.QLabel(self.groupBox_5)
self.label_17.setGeometry(QtCore.QRect(80, 150, 16, 20))
self.label_17.setObjectName("label_17")
self.horizontalLayout.addWidget(self.groupBox_5)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap("ico/Icons8-Windows-8-Science-Geometry.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_prop, icon6, "")
self.tab_setting = QtWidgets.QWidget()
self.tab_setting.setObjectName("tab_setting")
self.toolBox_setting = QtWidgets.QToolBox(self.tab_setting)
self.toolBox_setting.setGeometry(QtCore.QRect(18, 72, 691, 401))
self.toolBox_setting.setObjectName("toolBox_setting")
self.page_shields = QtWidgets.QWidget()
self.page_shields.setGeometry(QtCore.QRect(0, 0, 691, 281))
self.page_shields.setObjectName("page_shields")
self.groupBox_7 = QtWidgets.QGroupBox(self.page_shields)
self.groupBox_7.setGeometry(QtCore.QRect(20, 100, 291, 91))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_7.setFont(font)
self.groupBox_7.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_7.setObjectName("groupBox_7")
self.lineEdit_shield_template = QtWidgets.QLineEdit(self.groupBox_7)
self.lineEdit_shield_template.setGeometry(QtCore.QRect(80, 40, 181, 20))
self.lineEdit_shield_template.setObjectName("lineEdit_shield_template")
self.toolButton_select_shield_template = QtWidgets.QToolButton(self.groupBox_7)
self.toolButton_select_shield_template.setGeometry(QtCore.QRect(30, 40, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_shield_template.setFont(font)
self.toolButton_select_shield_template.setObjectName("toolButton_select_shield_template")
self.groupBox_8 = QtWidgets.QGroupBox(self.page_shields)
self.groupBox_8.setGeometry(QtCore.QRect(380, 100, 291, 91))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_8.setFont(font)
self.groupBox_8.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_8.setObjectName("groupBox_8")
self.lineEdit_shield_prefix = QtWidgets.QLineEdit(self.groupBox_8)
self.lineEdit_shield_prefix.setGeometry(QtCore.QRect(110, 30, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.lineEdit_shield_prefix.setFont(font)
self.lineEdit_shield_prefix.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_shield_prefix.setObjectName("lineEdit_shield_prefix")
self.toolBox_setting.addItem(self.page_shields, icon3, "")
self.page_paz = QtWidgets.QWidget()
self.page_paz.setGeometry(QtCore.QRect(0, 0, 691, 281))
self.page_paz.setObjectName("page_paz")
self.groupBox_9 = QtWidgets.QGroupBox(self.page_paz)
self.groupBox_9.setGeometry(QtCore.QRect(20, 30, 651, 101))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_9.setFont(font)
self.groupBox_9.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_9.setObjectName("groupBox_9")
self.lineEdit_paz2_template = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEdit_paz2_template.setGeometry(QtCore.QRect(230, 60, 131, 20))
self.lineEdit_paz2_template.setObjectName("lineEdit_paz2_template")
self.toolButton_select_paz2_template = QtWidgets.QToolButton(self.groupBox_9)
self.toolButton_select_paz2_template.setGeometry(QtCore.QRect(180, 60, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_paz2_template.setFont(font)
self.toolButton_select_paz2_template.setObjectName("toolButton_select_paz2_template")
self.lineEdit_paz2_prefix = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEdit_paz2_prefix.setGeometry(QtCore.QRect(490, 50, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.lineEdit_paz2_prefix.setFont(font)
self.lineEdit_paz2_prefix.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_paz2_prefix.setObjectName("lineEdit_paz2_prefix")
self.label_5 = QtWidgets.QLabel(self.groupBox_9)
self.label_5.setGeometry(QtCore.QRect(230, 40, 121, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.label_18 = QtWidgets.QLabel(self.groupBox_9)
self.label_18.setGeometry(QtCore.QRect(500, 30, 61, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_18.setFont(font)
self.label_18.setObjectName("label_18")
self.line = QtWidgets.QFrame(self.groupBox_9)
self.line.setGeometry(QtCore.QRect(20, 30, 81, 16))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.line_2 = QtWidgets.QFrame(self.groupBox_9)
self.line_2.setGeometry(QtCore.QRect(20, 60, 81, 16))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.groupBox_10 = QtWidgets.QGroupBox(self.page_paz)
self.groupBox_10.setGeometry(QtCore.QRect(20, 150, 651, 101))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_10.setFont(font)
self.groupBox_10.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_10.setObjectName("groupBox_10")
self.lineEdit_paz3_template = QtWidgets.QLineEdit(self.groupBox_10)
self.lineEdit_paz3_template.setGeometry(QtCore.QRect(230, 60, 131, 20))
self.lineEdit_paz3_template.setObjectName("lineEdit_paz3_template")
self.toolButton_select_paz3_template = QtWidgets.QToolButton(self.groupBox_10)
self.toolButton_select_paz3_template.setGeometry(QtCore.QRect(180, 60, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_paz3_template.setFont(font)
self.toolButton_select_paz3_template.setObjectName("toolButton_select_paz3_template")
self.lineEdit_paz3_prefix = QtWidgets.QLineEdit(self.groupBox_10)
self.lineEdit_paz3_prefix.setGeometry(QtCore.QRect(490, 50, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.lineEdit_paz3_prefix.setFont(font)
self.lineEdit_paz3_prefix.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_paz3_prefix.setObjectName("lineEdit_paz3_prefix")
self.label_19 = QtWidgets.QLabel(self.groupBox_10)
self.label_19.setGeometry(QtCore.QRect(230, 40, 121, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_19.setFont(font)
self.label_19.setObjectName("label_19")
self.label_20 = QtWidgets.QLabel(self.groupBox_10)
self.label_20.setGeometry(QtCore.QRect(500, 30, 61, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_20.setFont(font)
self.label_20.setObjectName("label_20")
self.line_3 = QtWidgets.QFrame(self.groupBox_10)
self.line_3.setGeometry(QtCore.QRect(20, 30, 81, 16))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.line_4 = QtWidgets.QFrame(self.groupBox_10)
self.line_4.setGeometry(QtCore.QRect(30, 48, 61, 20))
self.line_4.setFrameShape(QtWidgets.QFrame.HLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.line_5 = QtWidgets.QFrame(self.groupBox_10)
self.line_5.setGeometry(QtCore.QRect(20, 70, 81, 16))
self.line_5.setFrameShape(QtWidgets.QFrame.HLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.toolBox_setting.addItem(self.page_paz, icon4, "")
self.page_pazpr = QtWidgets.QWidget()
self.page_pazpr.setGeometry(QtCore.QRect(0, 0, 691, 281))
self.page_pazpr.setObjectName("page_pazpr")
self.groupBox_11 = QtWidgets.QGroupBox(self.page_pazpr)
self.groupBox_11.setGeometry(QtCore.QRect(20, 10, 651, 81))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_11.setFont(font)
self.groupBox_11.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_11.setObjectName("groupBox_11")
self.lineEdit_pazpr2_template = QtWidgets.QLineEdit(self.groupBox_11)
self.lineEdit_pazpr2_template.setGeometry(QtCore.QRect(230, 40, 131, 20))
self.lineEdit_pazpr2_template.setObjectName("lineEdit_pazpr2_template")
self.toolButton_select_pazpr2_template = QtWidgets.QToolButton(self.groupBox_11)
self.toolButton_select_pazpr2_template.setGeometry(QtCore.QRect(180, 40, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_pazpr2_template.setFont(font)
self.toolButton_select_pazpr2_template.setObjectName("toolButton_select_pazpr2_template")
self.lineEdit_pazpr2_prefix = QtWidgets.QLineEdit(self.groupBox_11)
self.lineEdit_pazpr2_prefix.setGeometry(QtCore.QRect(490, 30, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.lineEdit_pazpr2_prefix.setFont(font)
self.lineEdit_pazpr2_prefix.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_pazpr2_prefix.setObjectName("lineEdit_pazpr2_prefix")
self.label_27 = QtWidgets.QLabel(self.groupBox_11)
self.label_27.setGeometry(QtCore.QRect(230, 20, 121, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_27.setFont(font)
self.label_27.setObjectName("label_27")
self.label_28 = QtWidgets.QLabel(self.groupBox_11)
self.label_28.setGeometry(QtCore.QRect(500, 10, 61, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_28.setFont(font)
self.label_28.setObjectName("label_28")
self.line_6 = QtWidgets.QFrame(self.groupBox_11)
self.line_6.setGeometry(QtCore.QRect(20, 30, 81, 16))
self.line_6.setFrameShape(QtWidgets.QFrame.HLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.line_7 = QtWidgets.QFrame(self.groupBox_11)
self.line_7.setGeometry(QtCore.QRect(20, 50, 81, 16))
self.line_7.setFrameShape(QtWidgets.QFrame.HLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.groupBox_12 = QtWidgets.QGroupBox(self.page_pazpr)
self.groupBox_12.setGeometry(QtCore.QRect(20, 100, 651, 81))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_12.setFont(font)
self.groupBox_12.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_12.setObjectName("groupBox_12")
self.lineEdit_pazpr3_template = QtWidgets.QLineEdit(self.groupBox_12)
self.lineEdit_pazpr3_template.setGeometry(QtCore.QRect(230, 40, 131, 20))
self.lineEdit_pazpr3_template.setObjectName("lineEdit_pazpr3_template")
self.toolButton_select_pazpr3_template = QtWidgets.QToolButton(self.groupBox_12)
self.toolButton_select_pazpr3_template.setGeometry(QtCore.QRect(180, 40, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_pazpr3_template.setFont(font)
self.toolButton_select_pazpr3_template.setObjectName("toolButton_select_pazpr3_template")
self.lineEdit_pazpr3_prefix = QtWidgets.QLineEdit(self.groupBox_12)
self.lineEdit_pazpr3_prefix.setGeometry(QtCore.QRect(490, 30, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.lineEdit_pazpr3_prefix.setFont(font)
self.lineEdit_pazpr3_prefix.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_pazpr3_prefix.setObjectName("lineEdit_pazpr3_prefix")
self.label_29 = QtWidgets.QLabel(self.groupBox_12)
self.label_29.setGeometry(QtCore.QRect(230, 20, 121, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_29.setFont(font)
self.label_29.setObjectName("label_29")
self.label_30 = QtWidgets.QLabel(self.groupBox_12)
self.label_30.setGeometry(QtCore.QRect(500, 10, 61, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_30.setFont(font)
self.label_30.setObjectName("label_30")
self.line_8 = QtWidgets.QFrame(self.groupBox_12)
self.line_8.setGeometry(QtCore.QRect(20, 30, 81, 16))
self.line_8.setFrameShape(QtWidgets.QFrame.HLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.line_9 = QtWidgets.QFrame(self.groupBox_12)
self.line_9.setGeometry(QtCore.QRect(20, 50, 81, 16))
self.line_9.setFrameShape(QtWidgets.QFrame.HLine)
self.line_9.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_9.setObjectName("line_9")
self.line_12 = QtWidgets.QFrame(self.groupBox_12)
self.line_12.setGeometry(QtCore.QRect(20, 40, 81, 16))
self.line_12.setFrameShape(QtWidgets.QFrame.HLine)
self.line_12.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_12.setObjectName("line_12")
self.groupBox_13 = QtWidgets.QGroupBox(self.page_pazpr)
self.groupBox_13.setGeometry(QtCore.QRect(20, 190, 651, 81))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_13.setFont(font)
self.groupBox_13.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_13.setObjectName("groupBox_13")
self.lineEdit_pazpr5_template = QtWidgets.QLineEdit(self.groupBox_13)
self.lineEdit_pazpr5_template.setGeometry(QtCore.QRect(230, 40, 131, 20))
self.lineEdit_pazpr5_template.setObjectName("lineEdit_pazpr5_template")
self.toolButton_select_pazpr5_template = QtWidgets.QToolButton(self.groupBox_13)
self.toolButton_select_pazpr5_template.setGeometry(QtCore.QRect(180, 40, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_pazpr5_template.setFont(font)
self.toolButton_select_pazpr5_template.setObjectName("toolButton_select_pazpr5_template")
self.lineEdit_pazpr5_prefix = QtWidgets.QLineEdit(self.groupBox_13)
self.lineEdit_pazpr5_prefix.setGeometry(QtCore.QRect(490, 30, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.lineEdit_pazpr5_prefix.setFont(font)
self.lineEdit_pazpr5_prefix.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_pazpr5_prefix.setObjectName("lineEdit_pazpr5_prefix")
self.label_31 = QtWidgets.QLabel(self.groupBox_13)
self.label_31.setGeometry(QtCore.QRect(230, 20, 121, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_31.setFont(font)
self.label_31.setObjectName("label_31")
self.label_32 = QtWidgets.QLabel(self.groupBox_13)
self.label_32.setGeometry(QtCore.QRect(500, 10, 61, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_32.setFont(font)
self.label_32.setObjectName("label_32")
self.line_10 = QtWidgets.QFrame(self.groupBox_13)
self.line_10.setGeometry(QtCore.QRect(20, 50, 81, 16))
self.line_10.setFrameShape(QtWidgets.QFrame.HLine)
self.line_10.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_10.setObjectName("line_10")
self.line_13 = QtWidgets.QFrame(self.groupBox_13)
self.line_13.setGeometry(QtCore.QRect(20, 40, 81, 16))
self.line_13.setFrameShape(QtWidgets.QFrame.HLine)
self.line_13.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_13.setObjectName("line_13")
self.line_11 = QtWidgets.QFrame(self.groupBox_13)
self.line_11.setGeometry(QtCore.QRect(20, 30, 81, 16))
self.line_11.setFrameShape(QtWidgets.QFrame.HLine)
self.line_11.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_11.setObjectName("line_11")
self.line_14 = QtWidgets.QFrame(self.groupBox_13)
self.line_14.setGeometry(QtCore.QRect(20, 20, 81, 16))
self.line_14.setFrameShape(QtWidgets.QFrame.HLine)
self.line_14.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_14.setObjectName("line_14")
self.line_15 = QtWidgets.QFrame(self.groupBox_13)
self.line_15.setGeometry(QtCore.QRect(20, 60, 81, 16))
self.line_15.setFrameShape(QtWidgets.QFrame.HLine)
self.line_15.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_15.setObjectName("line_15")
self.toolBox_setting.addItem(self.page_pazpr, icon5, "")
self.page_main_settings = QtWidgets.QWidget()
self.page_main_settings.setGeometry(QtCore.QRect(0, 0, 691, 281))
self.page_main_settings.setObjectName("page_main_settings")
self.groupBox_2 = QtWidgets.QGroupBox(self.page_main_settings)
self.groupBox_2.setGeometry(QtCore.QRect(20, 10, 651, 111))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox_2.setFont(font)
self.groupBox_2.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_2.setObjectName("groupBox_2")
self.lineEdit_setting_mainfolder = QtWidgets.QLineEdit(self.groupBox_2)
self.lineEdit_setting_mainfolder.setGeometry(QtCore.QRect(90, 30, 531, 20))
self.lineEdit_setting_mainfolder.setObjectName("lineEdit_setting_mainfolder")
self.toolButton_select_mainfolder = QtWidgets.QToolButton(self.groupBox_2)
self.toolButton_select_mainfolder.setGeometry(QtCore.QRect(30, 30, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_mainfolder.setFont(font)
self.toolButton_select_mainfolder.setObjectName("toolButton_select_mainfolder")
self.label_7 = QtWidgets.QLabel(self.groupBox_2)
self.label_7.setGeometry(QtCore.QRect(20, 60, 601, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.label_8 = QtWidgets.QLabel(self.groupBox_2)
self.label_8.setGeometry(QtCore.QRect(20, 80, 601, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.groupBox = QtWidgets.QGroupBox(self.page_main_settings)
self.groupBox.setGeometry(QtCore.QRect(20, 140, 651, 91))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox.setFont(font)
self.groupBox.setToolTip("")
self.groupBox.setStatusTip("")
self.groupBox.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox.setFlat(False)
self.groupBox.setObjectName("groupBox")
self.lineEdit_setting_templatefolder = QtWidgets.QLineEdit(self.groupBox)
self.lineEdit_setting_templatefolder.setGeometry(QtCore.QRect(90, 30, 531, 20))
self.lineEdit_setting_templatefolder.setObjectName("lineEdit_setting_templatefolder")
self.toolButton_select_templatefolder = QtWidgets.QToolButton(self.groupBox)
self.toolButton_select_templatefolder.setGeometry(QtCore.QRect(30, 30, 31, 19))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.toolButton_select_templatefolder.setFont(font)
self.toolButton_select_templatefolder.setObjectName("toolButton_select_templatefolder")
self.label_6 = QtWidgets.QLabel(self.groupBox)
self.label_6.setGeometry(QtCore.QRect(70, 60, 601, 16))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.checkBox_setting_cleancomments = QtWidgets.QCheckBox(self.page_main_settings)
self.checkBox_setting_cleancomments.setGeometry(QtCore.QRect(70, 240, 541, 41))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setWeight(75)
self.checkBox_setting_cleancomments.setFont(font)
self.checkBox_setting_cleancomments.setIconSize(QtCore.QSize(16, 16))
self.checkBox_setting_cleancomments.setCheckable(True)
self.checkBox_setting_cleancomments.setChecked(True)
self.checkBox_setting_cleancomments.setObjectName("checkBox_setting_cleancomments")
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap("ico/application_x_desktop.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolBox_setting.addItem(self.page_main_settings, icon7, "")
self.frame = QtWidgets.QFrame(self.tab_setting)
self.frame.setGeometry(QtCore.QRect(0, 60, 731, 431))
self.frame.setStyleSheet("background-color: rgb(240, 240, 240);")
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.btn_save_setting = QtWidgets.QPushButton(self.tab_setting)
self.btn_save_setting.setGeometry(QtCore.QRect(510, 10, 201, 41))
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap("ico/Hopstarter-Rounded-Square-Button-Ok.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_save_setting.setIcon(icon8)
self.btn_save_setting.setIconSize(QtCore.QSize(25, 25))
self.btn_save_setting.setAutoDefault(False)
self.btn_save_setting.setDefault(False)
self.btn_save_setting.setFlat(False)
self.btn_save_setting.setObjectName("btn_save_setting")
self.label_34 = QtWidgets.QLabel(self.tab_setting)
self.label_34.setGeometry(QtCore.QRect(20, 20, 21, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_34.setFont(font)
self.label_34.setObjectName("label_34")
self.label_version = QtWidgets.QLabel(self.tab_setting)
self.label_version.setGeometry(QtCore.QRect(34, 18, 71, 16))
font = QtGui.QFont()
font.setFamily("Comic Sans MS")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.label_version.setFont(font)
self.label_version.setStyleSheet("color: rgb(170, 0, 0);")
self.label_version.setObjectName("label_version")
self.frame.raise_()
self.toolBox_setting.raise_()
self.btn_save_setting.raise_()
self.label_34.raise_()
self.label_version.raise_()
self.tabWidget.addTab(self.tab_setting, icon7, "")
self.horizontalLayout_2.addWidget(self.tabWidget)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(6)
self.toolBox_setting.setCurrentIndex(3)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Создатель ЧПУ программ (АртФасадЮг)"))
self.label.setText(_translate("MainWindow", "филёнки"))
self.label_2.setText(_translate("MainWindow", "ВИКТОРИЯ"))
self.groupBox_ots_vik.setTitle(_translate("MainWindow", "Отступ"))
self.label_ots_vik_X.setText(_translate("MainWindow", "X"))
self.label_ots_vik_Y.setText(_translate("MainWindow", "Y"))
self.btn_ots_vik_40.setText(_translate("MainWindow", "40m"))
self.btn_ots_vik_41.setText(_translate("MainWindow", "41e"))
self.btn_ots_vik_42.setText(_translate("MainWindow", "42"))
self.btn_ots_vik_43.setText(_translate("MainWindow", "43t"))
self.btn_ots_vik_44.setText(_translate("MainWindow", "44c"))
self.btn_ots_vik_45.setText(_translate("MainWindow", "45p"))
self.btn_ots_vik_46.setText(_translate("MainWindow", "46s"))
self.btn_create_vik.setText(_translate("MainWindow", "Создать программы"))
item = self.tableWidget_vik.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "X"))
item = self.tableWidget_vik.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Y"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_vik), _translate("MainWindow", "Виктория (ф)"))
self.label_3.setText(_translate("MainWindow", "АФИНЫ"))
self.label_4.setText(_translate("MainWindow", "филёнки"))
self.groupBox_ots_afin.setTitle(_translate("MainWindow", "Отступ"))
self.label_ots_afin_X.setText(_translate("MainWindow", "X"))
self.label_ots_afin_Y.setText(_translate("MainWindow", "Y"))
self.btn_ots_afin_40.setText(_translate("MainWindow", "40m"))
self.btn_ots_afin_41.setText(_translate("MainWindow", "41e"))
self.btn_ots_afin_42.setText(_translate("MainWindow", "42"))
self.btn_ots_afin_43.setText(_translate("MainWindow", "43t"))
self.btn_ots_afin_44.setText(_translate("MainWindow", "44c"))
self.btn_ots_afin_45.setText(_translate("MainWindow", "45p"))
self.btn_ots_afin_46.setText(_translate("MainWindow", "46s"))
self.btn_create_afin.setText(_translate("MainWindow", "Создать программы"))
self.groupBox_6.setTitle(_translate("MainWindow", "Стиль"))
self.rBtn_afin_arc.setText(_translate("MainWindow", "с Арками"))
self.rBtn_afin_pr.setText(_translate("MainWindow", "прямые"))
item = self.tableWidget_afin.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "X"))
item = self.tableWidget_afin.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Y"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_afin), _translate("MainWindow", "Афины (ф)"))
self.label_shit.setText(_translate("MainWindow", "Щиты"))
item = self.tableWidget_shit.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "X"))
item = self.tableWidget_shit.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Y"))
self.groupBox_ots_shit.setTitle(_translate("MainWindow", "Отступ"))
self.label_ots_shit_X.setText(_translate("MainWindow", "X"))
self.label_ots_shit_Y.setText(_translate("MainWindow", "Y"))
self.btn_ots_shit_42.setText(_translate("MainWindow", "Стандарт\n"
"42"))
self.btn_create_shit.setText(_translate("MainWindow", "Создать программы"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_shit), _translate("MainWindow", "Щиты"))
self.groupBox_paz.setTitle(_translate("MainWindow", "Примечание"))
self.label_33.setText(_translate("MainWindow", "<html><head/><body><p>Если размер Y-ка меньше или равен</p><p>62мм, то делаются программы на</p><p>2 паза (под узкие колонны).</p><p>Во всех остальных случаях</p><p>делаются программы на 3 паза </p></body></html>"))
self.btn_create_paz.setText(_translate("MainWindow", "Создать программы"))
item = self.tableWidget_paz.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "X"))
item = self.tableWidget_paz.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Y"))
self.label_paz.setText(_translate("MainWindow", "Пазы (для колонн)"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_paz_kol), _translate("MainWindow", "Пазы (колонны)"))
self.btn_create_pazpr.setText(_translate("MainWindow", "Создать программы"))
self.groupBox_ots_pazpr.setTitle(_translate("MainWindow", "Отступ"))
self.label_ots_papr_X_left.setText(_translate("MainWindow", "<-- X слева"))
self.btn_ots_papr_2030.setToolTip(_translate("MainWindow", "<html><head/><body><p>Идеально для колонн у которых с одной стороны колёвка, а с другой стороны просто обрезано прямо.</p></body></html>"))
self.btn_ots_papr_2030.setText(_translate("MainWindow", "30 / 20"))
self.label_ots_pazpr_X_right.setText(_translate("MainWindow", "X справа -->"))
self.rBtn_pazpr_3paz.setText(_translate("MainWindow", "3 паза"))
self.rBtn_pazpr_2paz.setText(_translate("MainWindow", "2 паза"))
self.rBtn_pazpr_5paz.setText(_translate("MainWindow", "5 пазов"))
item = self.tableWidget_pazpr.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "X"))
item = self.tableWidget_pazpr.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Y"))
self.label_pazpr.setText(_translate("MainWindow", "Пазы (прямые)"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_paz_pr), _translate("MainWindow", "Пазы прямые"))
self.label_9.setText(_translate("MainWindow", "Быстрый просчёт пропорций"))
self.groupBox_3.setTitle(_translate("MainWindow", "Исходные размеры заготовки"))
self.label_24.setText(_translate("MainWindow", "Z"))
self.label_25.setText(_translate("MainWindow", "Y"))
self.label_26.setText(_translate("MainWindow", "X "))
self.btn_prop.setText(_translate("MainWindow", "Рассчитать"))
self.groupBox_4.setTitle(_translate("MainWindow", "Расчитанные пропорции"))
self.label_21.setText(_translate("MainWindow", "Y"))
self.label_22.setText(_translate("MainWindow", "X "))
self.label_23.setText(_translate("MainWindow", "Z"))
self.groupBox_5.setTitle(_translate("MainWindow", "Размеры заготовки"))
self.label_10.setText(_translate("MainWindow", "Добавление"))
self.label_11.setText(_translate("MainWindow", "по X (в мм)"))
self.label_12.setText(_translate("MainWindow", "по Y (в мм)"))
self.spinBox_prop_zag_X.setSpecialValueText(_translate("MainWindow", "20"))
self.spinBox_prop_zag_Y.setSpecialValueText(_translate("MainWindow", "20"))
self.label_13.setText(_translate("MainWindow", "Заготовка (в мм)"))
self.label_14.setText(_translate("MainWindow", "X "))
self.label_15.setText(_translate("MainWindow", "Y"))
self.label_16.setText(_translate("MainWindow", "Z"))
self.label_17.setText(_translate("MainWindow", "x"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_prop), _translate("MainWindow", "Пропорции"))
self.groupBox_7.setTitle(_translate("MainWindow", "Шаблон для создания программ Щитов"))
self.lineEdit_shield_template.setText(_translate("MainWindow", "shitok-chis"))
self.toolButton_select_shield_template.setText(_translate("MainWindow", "..."))
self.groupBox_8.setTitle(_translate("MainWindow", "Префикс для названий программ Щитов"))
self.lineEdit_shield_prefix.setText(_translate("MainWindow", "sh"))
self.toolBox_setting.setItemText(self.toolBox_setting.indexOf(self.page_shields), _translate("MainWindow", "Настройки для вкладки Щиты"))
self.groupBox_9.setTitle(_translate("MainWindow", "Под 2 полосы"))
self.lineEdit_paz2_template.setText(_translate("MainWindow", "paz2c"))
self.toolButton_select_paz2_template.setText(_translate("MainWindow", "..."))
self.lineEdit_paz2_prefix.setText(_translate("MainWindow", "2p"))
self.label_5.setText(_translate("MainWindow", "Название шаблона"))
self.label_18.setText(_translate("MainWindow", "Префикс"))
self.groupBox_10.setTitle(_translate("MainWindow", "Под 3 полосы"))
self.lineEdit_paz3_template.setText(_translate("MainWindow", "paz3c"))
self.toolButton_select_paz3_template.setText(_translate("MainWindow", "..."))
self.lineEdit_paz3_prefix.setText(_translate("MainWindow", "3p"))
self.label_19.setText(_translate("MainWindow", "Название шаблона"))
self.label_20.setText(_translate("MainWindow", "Префикс"))
self.toolBox_setting.setItemText(self.toolBox_setting.indexOf(self.page_paz), _translate("MainWindow", "Настройки для вкладки Пазы (колонны)"))
self.groupBox_11.setTitle(_translate("MainWindow", "Под 2 полосы (прямые)"))
self.lineEdit_pazpr2_template.setText(_translate("MainWindow", "paz2s"))
self.toolButton_select_pazpr2_template.setText(_translate("MainWindow", "..."))
self.lineEdit_pazpr2_prefix.setText(_translate("MainWindow", "2s"))
self.label_27.setText(_translate("MainWindow", "Название шаблона"))
self.label_28.setText(_translate("MainWindow", "Префикс"))
self.groupBox_12.setTitle(_translate("MainWindow", "Под 3 полосы (прямые)"))
self.lineEdit_pazpr3_template.setText(_translate("MainWindow", "paz3s"))
self.toolButton_select_pazpr3_template.setText(_translate("MainWindow", "..."))
self.lineEdit_pazpr3_prefix.setText(_translate("MainWindow", "3s"))
self.label_29.setText(_translate("MainWindow", "Название шаблона"))
self.label_30.setText(_translate("MainWindow", "Префикс"))
self.groupBox_13.setTitle(_translate("MainWindow", "Под 5 полос (прямые)"))
self.lineEdit_pazpr5_template.setText(_translate("MainWindow", "paz5s"))
self.toolButton_select_pazpr5_template.setText(_translate("MainWindow", "..."))
self.lineEdit_pazpr5_prefix.setText(_translate("MainWindow", "5s"))
self.label_31.setText(_translate("MainWindow", "Название шаблона"))
self.label_32.setText(_translate("MainWindow", "Префикс"))
self.toolBox_setting.setItemText(self.toolBox_setting.indexOf(self.page_pazpr), _translate("MainWindow", "Настройки для вкладки Пазы прямые"))
self.groupBox_2.setTitle(_translate("MainWindow", "Выходная КОРНЕВАЯ папка с готовыми программами"))
self.toolButton_select_mainfolder.setToolTip(_translate("MainWindow", "Этой кнопкой можно выбрать необходимую папку"))
self.toolButton_select_mainfolder.setText(_translate("MainWindow", "..."))
self.label_7.setText(_translate("MainWindow", " Папка, в которой находятся другие подпапки (типа \'на 41 Виктория\' или \'на 43 Афина\') и уже в этих подпапках"))
self.label_8.setText(_translate("MainWindow", "будут лежать готовые программы."))
self.groupBox.setTitle(_translate("MainWindow", "Папка с шаблонами (для выбора впишите в поле путь к папке или нажмите кнопку выбора)"))
self.toolButton_select_templatefolder.setToolTip(_translate("MainWindow", "Этой кнопкой можно выбрать необходимую папку"))
self.toolButton_select_templatefolder.setText(_translate("MainWindow", "..."))
self.label_6.setText(_translate("MainWindow", "Это папка, где лежат файлы шаблонов с расширением shb."))
self.checkBox_setting_cleancomments.setText(_translate("MainWindow", "Очищать готовые программы от комментариев в скобках, если такие комментарии есть в шаблонах"))
self.toolBox_setting.setItemText(self.toolBox_setting.indexOf(self.page_main_settings), _translate("MainWindow", "Гланые Настройки"))
self.btn_save_setting.setToolTip(_translate("MainWindow", "Обязательно нажимайте эту кнопку после внесения изменений в настройки"))
self.btn_save_setting.setText(_translate("MainWindow", "Сохранить изменения"))
self.label_34.setText(_translate("MainWindow", "v."))
self.label_version.setText(_translate("MainWindow", "TextLabel"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_setting), _translate("MainWindow", "Настройки"))
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,720
|
zuma-rus/cnc2
|
refs/heads/master
|
/paz.py
|
from template import Template
class Paz(Template):
"""спец класс для Пазов, наследуется от Template"""
def __init__(self, name_template2, name_template3, path_to_template, path_to_progs,
prfx2, prfx3):
super().__init__(name_template2, path_to_template, path_to_progs, 42, 42)
self.mask_name_prog = '[prfx][nul][Y][X].[ext]'
self.prfx2 = prfx2
self.prfx3 = prfx3
self.name_template2 = name_template2
self.name_template3 = name_template3
# программа, выбора
def createProgram(self, x, y):
if y > 500:
if x <= 62:
body, x, y = self.create_2p_syn(x, y)
else:
body, x, y = self.create_3p_syn(x, y)
else:
if y <= 62:
body, x, y = self.create_2p(x, y)
else:
body, x, y = self.create_3p(x, y)
return body, x, y
# функция рассчёта и создания программы 2 пазов
def create_2p_syn(self, x, y):
self.subfolder = 'пазы 2п\\'
self.file_extension_prog = 'nc'
self.name = self.name_template2
self.prfx = self.prfx2
kolevka = 8
diametrFrezi = 9
meja = round((y - kolevka * 2 - diametrFrezi * 2) / 3, 1) # с округлением до 1 десятой
k = {}
k['[y2]'] = kolevka + meja + diametrFrezi / 2
k['[y1]'] = y - k['[y2]']
k['[x1]'] = y + 15
k['[x1e]'] = x - k['[x1]']
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
# функция рассчёта и создания программы 3 пазов для Синтека
def create_3p_syn(self, x, y):
self.subfolder = 'пазы 3п\\'
self.file_extension_prog = 'nc'
self.mask_name_prog = '[prfx][nul][X]x[Y].[ext]'
self.name = "paz3p_syn.shb"
self.prfx = "3p_syn_"
seredX = x / 2
kolevka = 8
diametrFrezi = 9
meja = round((x - kolevka * 2 - diametrFrezi * 3) / 4, 1) - 1
# с округлением до 1 десятой : пример # (75 - 16 - 9*3) / 4 - 1 = 7
mejdu = diametrFrezi + meja
k = {}
# считаем значения. Необходимо для правильного рассчёта, т.к. словарь нужен с нужной разрядностью
x1 = seredX - mejdu
x2 = seredX + mejdu
y1 = x + 25 # подобрано опытным путём (изначально было просто фиксировано 98)
y1e = y - y1
y2 = x + 37 # подобрано опытным путём (изначально было просто фиксировано 110)
y2e = y - y2
# заполнение словаря с разрядностью 3 (это нужно для синтека)
k['[x1]'] = "{0:.3f}".format(x1)
k['[x2]'] = "{0:.3f}".format(x2)
k['[y1]'] = "{0:.3f}".format(y1)
k['[y1e]'] = "{0:.3f}".format(y1e)
k['[y2]'] = "{0:.3f}".format(y2)
k['[y2e]'] = "{0:.3f}".format(y2e)
k['[srX]'] = "{0:.3f}".format(seredX)
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
# функция рассчёта и создания программы 2 пазов
def create_2p(self, x, y):
self.subfolder = 'пазы 2п\\'
self.name = self.name_template2
self.prfx = self.prfx2
kolevka = 8
diametrFrezi = 9
meja = round((y - kolevka * 2 - diametrFrezi * 2) / 3, 1) # с округлением до 1 десятой
k = {}
k['[y2]'] = kolevka + meja + diametrFrezi / 2
k['[y1]'] = y - k['[y2]']
k['[x1]'] = y + 15
k['[x1e]'] = x - k['[x1]']
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
# функция рассчёта и создания программы 3 пазов
def create_3p(self, x, y):
self.subfolder = 'пазы 3п\\'
self.name = self.name_template3
self.prfx = self.prfx3
seredY = y / 2
kolevka = 8
diametrFrezi = 9
meja = round((y - kolevka * 2 - diametrFrezi * 3) / 4, 1) - 1
# с округлением до 1 десятой : пример # (75 - 16 - 9*3) / 4 - 1 = 7
mejdu = diametrFrezi + meja
k = {}
k['[y1]'] = seredY - mejdu
k['[y2]'] = seredY + mejdu
k['[x1]'] = y + 25 # подобрано опытным путём (изначально было просто фиксировано 98)
k['[x1e]'] = x - k['[x1]']
k['[x2]'] = y + 37 # подобрано опытным путём (изначально было просто фиксировано 110)
k['[x2e]'] = x - k['[x2]']
k['[srY]'] = seredY
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,721
|
zuma-rus/cnc2
|
refs/heads/master
|
/main.py
|
# Импорт необходимых библиотек (классов)
import sys
import configparser
import os
# Импортируем наш интерфейс и подключаем интерфейсные файлы
from cnc_ui import *
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QFileDialog, QTableWidgetItem
# интерфейсные файлы
from my_tables import My_tables
from messages import Mess
from vik import Vik
from afin import Afin
from shield import Shield
from paz import Paz
from pazpr import PazPr
# Основной класс для работы с интерфейсом
class MyWin(QtWidgets.QMainWindow):
"""Основной класс для работы с интерфейсом сделанным в PyQT5 Designer"""
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.tb = My_tables({
'vik': [self.ui.tableWidget_vik, 12, 2],
'afin': [self.ui.tableWidget_afin, 12, 2],
'shit': [self.ui.tableWidget_shit, 12, 2],
'paz': [self.ui.tableWidget_paz, 12, 2],
'pazpr': [self.ui.tableWidget_pazpr, 12, 2]
})
# инициализация настроек программы и последней сессии
initSettings(self.ui)
self.tf = self.ui.lineEdit_setting_templatefolder.text()
self.mf = self.ui.lineEdit_setting_mainfolder.text()
# Кнопка запуска создания программ Виктории
self.ui.btn_create_vik.clicked.connect(self.createVictories)
# Кнопка запуска создания программ Афин
self.ui.btn_create_afin.clicked.connect(self.createAfines)
# Кнопка запуска создания программ Щитов
self.ui.btn_create_shit.clicked.connect(self.createShields)
# Кнопка запуска создания программ Пазов для колонн
self.ui.btn_create_paz.clicked.connect(self.createPaz)
# Кнопка запуска создания программ Прямых Пазов для колонн
self.ui.btn_create_pazpr.clicked.connect(self.createPazPr)
# Кнопка сохранения настроек
self.ui.btn_save_setting.clicked.connect(self.savSet)
# Кнопка Прямые Пазы отступы
self.ui.btn_ots_papr_2030.clicked.connect(self.BtnOtsPazpr)
# Кнопка Щитов Отступы
self.ui.btn_ots_shit_42.clicked.connect(self.BtnOtsShit)
# Кнопки Виктория Отступы
self.ui.btn_ots_vik_40.clicked.connect(lambda: self.BtnOtsVik(40))
self.ui.btn_ots_vik_41.clicked.connect(lambda: self.BtnOtsVik(41))
self.ui.btn_ots_vik_42.clicked.connect(lambda: self.BtnOtsVik(42))
self.ui.btn_ots_vik_43.clicked.connect(lambda: self.BtnOtsVik(43))
self.ui.btn_ots_vik_44.clicked.connect(lambda: self.BtnOtsVik(44))
self.ui.btn_ots_vik_45.clicked.connect(lambda: self.BtnOtsVik(45))
self.ui.btn_ots_vik_46.clicked.connect(lambda: self.BtnOtsVik(46))
# Кнопки Афина Отступы
self.ui.btn_ots_afin_40.clicked.connect(lambda: self.BtnOtsAfin(40))
self.ui.btn_ots_afin_41.clicked.connect(lambda: self.BtnOtsAfin(41))
self.ui.btn_ots_afin_42.clicked.connect(lambda: self.BtnOtsAfin(42))
self.ui.btn_ots_afin_43.clicked.connect(lambda: self.BtnOtsAfin(43))
self.ui.btn_ots_afin_44.clicked.connect(lambda: self.BtnOtsAfin(44))
self.ui.btn_ots_afin_45.clicked.connect(lambda: self.BtnOtsAfin(45))
self.ui.btn_ots_afin_46.clicked.connect(lambda: self.BtnOtsAfin(46))
# Переключатели Афины шаблона (с арками или прямой)
self.ui.rBtn_afin_arc.clicked.connect(lambda: self.chekerAfin(1))
self.ui.rBtn_afin_pr.clicked.connect(lambda: self.chekerAfin(0))
# выбор главной папки
self.ui.toolButton_select_mainfolder.clicked.connect(
lambda: self.selectFileFolder('folder', self.ui.lineEdit_setting_mainfolder))
# выбор папки с шаблонами
self.ui.toolButton_select_templatefolder.clicked.connect(lambda: self.selectFileFolder(
'folder', self.ui.lineEdit_setting_templatefolder))
# выбор шаблона щитов
self.ui.toolButton_select_shield_template.clicked.connect(lambda: self.selectFileFolder(
'file', self.ui.lineEdit_shield_template))
# выбор шаблона на 2 паза
self.ui.toolButton_select_paz2_template.clicked.connect(lambda: self.selectFileFolder(
'file', self.ui.lineEdit_paz2_template))
# выбор шаблона на 3 паза
self.ui.toolButton_select_paz3_template.clicked.connect(lambda: self.selectFileFolder(
'file', self.ui.lineEdit_paz3_template))
# выбор шаблона на 2 паза (прямые)
self.ui.toolButton_select_pazpr2_template.clicked.connect(lambda: self.selectFileFolder(
'file', self.ui.lineEdit_pazpr2_template))
# выбор шаблона на 3 паза (прямые)
self.ui.toolButton_select_pazpr3_template.clicked.connect(lambda: self.selectFileFolder(
'file', self.ui.lineEdit_pazpr3_template))
# выбор шаблона на 5 пазов (прямые)
self.ui.toolButton_select_pazpr5_template.clicked.connect(lambda: self.selectFileFolder(
'file', self.ui.lineEdit_pazpr5_template))
# очистка всех таблиц
self.tb.cleanAll()
# запуск создания программ Виктории
def createVictories(self):
self.tb.sorting('vik')
name = 'vik.shb'
ot_x = self.ui.spinBox_ots_vik_X.value()
ot_y = self.ui.spinBox_ots_vik_Y.value()
list = self.tb.getTable('vik')
vik = Vik(name, self.tf, self.mf, ot_x, ot_y)
vik.createProgs(list)
self.statusBar().showMessage('Программы филёнок Виктории созданы!')
mes = Mess()
mes.MesProgComplete('Программы готовы', 'Программы филёнок Виктории созданы!')
# запуск создания программ Афин
def createAfines(self):
self.tb.sorting('afin')
name = 'afina.shb'
ot_x = self.ui.spinBox_ots_afin_X.value()
ot_y = self.ui.spinBox_ots_afin_Y.value()
list = self.tb.getTable('afin')
pr = self.ui.rBtn_afin_pr.isChecked()
afin = Afin(name, self.tf, self.mf, ot_x, ot_y, pr)
afin.createProgs(list)
self.statusBar().showMessage('Программы филёнок Афин созданы!')
mes = Mess()
mes.MesProgComplete('Программы готовы', 'Программы филёнок Афин созданы!')
# запуск создания программ Щитов
def createShields(self):
self.tb.sorting('shit')
name = self.ui.lineEdit_shield_template.text()
prfx = self.ui.lineEdit_shield_prefix.text()
ot_x = self.ui.spinBox_ots_shit_X.value()
ot_y = self.ui.spinBox_ots_shit_Y.value()
list = self.tb.getTable('shit')
shit = Shield(name, self.tf, self.mf, ot_x, ot_y, prfx)
shit.createProgs(list)
self.statusBar().showMessage('Программы Щитов созданы!')
mes = Mess()
mes.MesProgComplete('Программы готовы', 'Программы Щитов созданы!')
# запуск создания программ Пазов для колонн
def createPaz(self):
self.tb.sorting('paz')
name2 = self.ui.lineEdit_paz2_template.text()
prfx2 = self.ui.lineEdit_paz2_prefix.text()
name3 = self.ui.lineEdit_paz3_template.text()
prfx3 = self.ui.lineEdit_paz3_prefix.text()
list = self.tb.getTable('paz')
paz = Paz(name2, name3, self.tf, self.mf, prfx2, prfx3)
paz.createProgs(list)
self.statusBar().showMessage('Программы Пазов созданы!')
mes = Mess()
mes.MesProgComplete('Программы готовы', 'Программы Пазов созданы!')
# запуск создания программ Прямых Пазов для колонн
def createPazPr(self):
self.tb.sorting('pazpr')
ot_left = self.ui.spinBox_ots_pazpr_X_left.value()
ot_right = self.ui.spinBox_ots_pazpr_X_right.value()
name2 = self.ui.lineEdit_pazpr2_template.text()
prfx2 = self.ui.lineEdit_pazpr2_prefix.text()
name3 = self.ui.lineEdit_pazpr3_template.text()
prfx3 = self.ui.lineEdit_pazpr3_prefix.text()
name5 = self.ui.lineEdit_pazpr5_template.text()
prfx5 = self.ui.lineEdit_pazpr5_prefix.text()
list = self.tb.getTable('pazpr')
# узнать сколько пазов делать
if self.ui.rBtn_pazpr_2paz.isChecked():
pazpr = PazPr(name2, self.tf, self.mf, prfx2, ot_left, ot_right, 2)
elif self.ui.rBtn_pazpr_3paz.isChecked():
pazpr = PazPr(name3, self.tf, self.mf, prfx3, ot_left, ot_right, 3)
else:
pazpr = PazPr(name5, self.tf, self.mf, prfx5, ot_left, ot_right, 5)
pazpr.createProgs(list)
self.statusBar().showMessage('Программы Прямых Пазов созданы!')
mes = Mess()
mes.MesProgComplete('Программы готовы', 'Программы Прямых Пазов созданы!')
# диалоговое окно, для выбора файла или папки
def selectFileFolder(self, param, lineEdit):
if param == 'file':
startFolder = self.ui.lineEdit_setting_templatefolder.text()
fname = QFileDialog.getOpenFileName(
self, 'Выберите файл', startFolder, '*.shb')[0]
fname = os.path.basename(fname)
else:
fname = QFileDialog.getExistingDirectory(self, 'Выберите папку')
if (len(fname) > 0):
fname += '/'
if (len(fname) > 0):
lineEdit.setText(fname)
# функция запуска внешней функции сохранения настроек
def savSet(self):
saveSettings(self.ui)
self.statusBar().showMessage('Настройки сохранены!')
# Функция переключения режима для Афин
def chekerAfin(self, value):
if value == 1:
self.ui.label_pix_afin.setPixmap(QtGui.QPixmap("pix/afin_arc.png"))
else:
self.ui.label_pix_afin.setPixmap(QtGui.QPixmap("pix/afin_pr.png"))
# Функция кнопки установки отступов Щитов
def BtnOtsPazpr(self):
self.ui.spinBox_ots_pazpr_X_left.setValue(30)
self.ui.spinBox_ots_pazpr_X_right.setValue(20)
# Функция кнопки установки отступов Щитов
def BtnOtsShit(self):
self.ui.spinBox_ots_shit_X.setValue(42)
self.ui.spinBox_ots_shit_Y.setValue(42)
# Функция установки отступов Виктории для быстрых кнопок
def BtnOtsVik(self, value):
self.ui.spinBox_ots_vik_X.setValue(value)
self.ui.spinBox_ots_vik_Y.setValue(value)
# Функция установки отступов Афин для быстрых кнопок
def BtnOtsAfin(self, value):
self.ui.spinBox_ots_afin_X.setValue(value)
self.ui.spinBox_ots_afin_Y.setValue(value)
# Операции происходящие после нажатия на кнопку закрытия (выхода)
def closeEvent(self, event):
saveSession(self.ui)
saveSettings(self.ui)
print("Выход")
# sys.exit(app.exec_())
# загрузка и инициализация настроек программы
def initSettings(ui):
loadSettings(ui)
loadSession(ui)
# загрузка и установка настроек программы из файла settings.ini
def loadSettings(ui):
# версия (устанавливается в конце этого файла)
ui.label_version.setText(str(versionApp))
# main
# очищать комментраии в готовых программах
if str(conf.get("main", "clean_comments")) == 'True':
ui.checkBox_setting_cleancomments.setChecked(True)
else:
ui.checkBox_setting_cleancomments.setChecked(False)
# папка с шаблонами
ui.lineEdit_setting_templatefolder.setText(
str(conf.get("main", "templates_folder")))
# корневая папка для готовых программ
ui.lineEdit_setting_mainfolder.setText(
str(conf.get("main", "main_prog_folder")))
# shields
# шаблон для щитов
ui.lineEdit_shield_template.setText(
str(conf.get("shields", "shield_template")))
# префикс для щитов
ui.lineEdit_shield_prefix.setText(
str(conf.get("shields", "shield_prefix")))
# paz
# шаблон 2 пазов
ui.lineEdit_paz2_template.setText(str(conf.get("paz", "paz2_template")))
# префикс 2 пазов
ui.lineEdit_paz2_prefix.setText(str(conf.get("paz", "paz2_prefix")))
# шаблон 3 пазов
ui.lineEdit_paz3_template.setText(str(conf.get("paz", "paz3_template")))
# префикс 3 пазов
ui.lineEdit_paz3_prefix.setText(str(conf.get("paz", "paz3_prefix")))
# pazpr
# шаблон 2 пазов прямых
ui.lineEdit_pazpr2_template.setText(
str(conf.get("pazpr", "pazpr2_template")))
# префикс 2 пазов прямых
ui.lineEdit_pazpr2_prefix.setText(str(conf.get("pazpr", "pazpr2_prefix")))
# шаблон 3 пазов прямых
ui.lineEdit_pazpr3_template.setText(
str(conf.get("pazpr", "pazpr3_template")))
# префикс 3 пазов прямых
ui.lineEdit_pazpr3_prefix.setText(str(conf.get("pazpr", "pazpr3_prefix")))
# шаблон 5 пазов прямых
ui.lineEdit_pazpr5_template.setText(
str(conf.get("pazpr", "pazpr5_template")))
# префикс 5 пазов прямых
ui.lineEdit_pazpr5_prefix.setText(str(conf.get("pazpr", "pazpr5_prefix")))
# сохранение настроек программы в файл settings.ini
def saveSettings(ui):
# main
# очищать комментраии в готовых программах
conf.set("main", "clean_comments", str(
ui.checkBox_setting_cleancomments.isChecked()))
# корневая папка для готовых программ
conf.set("main", "main_prog_folder", ui.lineEdit_setting_mainfolder.text())
# папка с шаблонами
conf.set("main", "templates_folder",
ui.lineEdit_setting_templatefolder.text())
# shields
# шаблон для щитов
conf.set("shields", "shield_template", ui.lineEdit_shield_template.text())
# префикс для щитов
conf.set("shields", "shield_prefix", ui.lineEdit_shield_prefix.text())
# paz
# шаблон 2 пазов
conf.set("paz", "paz2_template", ui.lineEdit_paz2_template.text())
# префикс 2 пазов
conf.set("paz", "paz2_prefix", ui.lineEdit_paz2_prefix.text())
# шаблон 3 пазов
conf.set("paz", "paz3_template", ui.lineEdit_paz3_template.text())
# префикс 3 пазов
conf.set("paz", "paz3_prefix", ui.lineEdit_paz3_prefix.text())
# pazpr
# шаблон 2 пазов прямых
conf.set("pazpr", "pazpr2_template", ui.lineEdit_pazpr2_template.text())
# префикс 2 пазов прямых
conf.set("pazpr", "pazpr2_prefix", ui.lineEdit_pazpr2_prefix.text())
# шаблон 3 пазов прямых
conf.set("pazpr", "pazpr3_template", ui.lineEdit_pazpr3_template.text())
# префикс 3 пазов прямых
conf.set("pazpr", "pazpr3_prefix", ui.lineEdit_pazpr3_prefix.text())
# шаблон 5 пазов прямых
conf.set("pazpr", "pazpr5_template", ui.lineEdit_pazpr5_template.text())
# префикс 5 пазов прямых
conf.set("pazpr", "pazpr5_prefix", ui.lineEdit_pazpr5_prefix.text())
# обязательная секция для сохранения настроек
with open("settings.ini", "w") as config:
conf.write(config)
# загрузка и инициализация сессии из файла work.ini
def loadSession(ui):
# вкладка
ui.tabWidget.setCurrentIndex(int(work.get("main", "tab")))
# viktory
ui.spinBox_ots_vik_X.setValue(int(work.get("viktory", "otstup_x")))
ui.spinBox_ots_vik_Y.setValue(int(work.get("viktory", "otstup_y")))
# afina
ui.spinBox_ots_afin_X.setValue(int(work.get("afina", "otstup_x")))
ui.spinBox_ots_afin_Y.setValue(int(work.get("afina", "otstup_y")))
if str(work.get("afina", "style")) == 'arc':
ui.rBtn_afin_arc.setChecked(True)
ui.label_pix_afin.setPixmap(QtGui.QPixmap("pix/afin_arc.png"))
else:
ui.rBtn_afin_pr.setChecked(True)
ui.label_pix_afin.setPixmap(QtGui.QPixmap("pix/afin_pr.png"))
# shields
ui.spinBox_ots_shit_X.setValue(int(work.get("shields", "otstup_x")))
ui.spinBox_ots_shit_Y.setValue(int(work.get("shields", "otstup_y")))
# paz # pass
# pazpr
ui.spinBox_ots_pazpr_X_left.setValue(
int(work.get("pazpr", "otstup_x_left")))
ui.spinBox_ots_pazpr_X_right.setValue(
int(work.get("pazpr", "otstup_x_right")))
if int(work.get("pazpr", "kolich")) == 2:
ui.rBtn_pazpr_2paz.setChecked(True)
elif int(work.get("pazpr", "kolich")) == 3:
ui.rBtn_pazpr_3paz.setChecked(True)
else:
ui.rBtn_pazpr_5paz.setChecked(True)
# prop
ui.spinBox_prop_zag_X.setValue(int(work.get("prop", "otstup_x_zag")))
ui.spinBox_prop_zag_Y.setValue(int(work.get("prop", "otstup_y_zag")))
# сохранение сессии программы в файл work.ini
def saveSession(ui):
work.set("main", "tab", ui.tabWidget.currentIndex()) # вкладка
# viktory
work.set("viktory", "otstup_x", ui.spinBox_ots_vik_X.value())
work.set("viktory", "otstup_y", ui.spinBox_ots_vik_Y.value())
# afina
work.set("afina", "otstup_x", ui.spinBox_ots_afin_X.value())
work.set("afina", "otstup_y", ui.spinBox_ots_afin_Y.value())
if (ui.rBtn_afin_arc.isChecked()):
work.set("afina", "style", "arc")
else:
work.set("afina", "style", "pr")
# shields
work.set("shields", "otstup_x", ui.spinBox_ots_shit_X.value())
work.set("shields", "otstup_y", ui.spinBox_ots_shit_Y.value())
# paz pass
# pazpr
work.set("pazpr", "otstup_x_left", ui.spinBox_ots_pazpr_X_left.value())
work.set("pazpr", "otstup_x_right", ui.spinBox_ots_pazpr_X_right.value())
if (ui.rBtn_pazpr_2paz.isChecked()):
work.set("pazpr", "kolich", 2)
elif (ui.rBtn_pazpr_3paz.isChecked()):
work.set("pazpr", "kolich", 3)
else:
work.set("pazpr", "kolich", 5)
# prop
work.set("prop", "otstup_x_zag", ui.spinBox_prop_zag_X.value())
work.set("prop", "otstup_y_zag", ui.spinBox_prop_zag_Y.value())
# обязательная секция для сохранения настроек
with open("work.ini", "w") as config:
work.write(config)
# секция для main файла
if __name__ == "__main__":
versionApp = "2.1"
conf = configparser.RawConfigParser()
conf.read("settings.ini")
work = configparser.RawConfigParser()
work.read("work.ini")
app = QtWidgets.QApplication(sys.argv)
myapp = MyWin()
myapp.show()
sys.exit(app.exec_())
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,722
|
zuma-rus/cnc2
|
refs/heads/master
|
/afin.py
|
from template import Template
class Afin(Template):
"""спец класс для филёнок Афина, наследуется от Template"""
def __init__(self, name_template, path_to_template, path_to_progs, ot_x, ot_y, pr):
super().__init__(name_template, path_to_template, path_to_progs, ot_x, ot_y)
self.pref_name_folder = 'афин'
self.pr = pr
self.postfix()
self.prfx = 'a'
# основная программа, где вычисляются значения
def createProgram(self, x, y):
self.prfx = 'a'
self.name = 'afina.shb'
k = {}
k['[x1]'] = self.ot_x
k['[x2]'] = x - self.ot_x
k['[y1]'] = self.ot_y
k['[y2]'] = y - self.ot_y
# в процессе проб подобрано, что полукруг у нас 35мм по х и 80мм по y
k['[yk1]'] = (y - 80) / 2 # начало полукруга
k['[yk2]'] = y - k['[yk1]'] # конец полукруга
k['[yk3]'] = k['[yk1]'] + 40 # центр полукруга
k['[xk1]'] = k['[x1]'] + 35 # середина по х первого полукруга
k['[xk2]'] = k['[x2]'] - 35 # середина по х второго полукруга
# -------------- принудительное переключение на прямой шаблон --------------
if self.pr:
self.name = "afina-pr.shb" # для любых филёнок, но без овала (как для обычных)
self.prfx = "p"
else:
# -------------- переключение на прямой шаблон по условию ---------------
if y < 184 or x < 191:
self.prfx = "p"
self.name = "afina-pr.shb" # для любых филёнок, но без овала (как для обычных)
# -------------- переключение на половинный шаблон ----------------
if y > 900:
self.prfx = "h"
self.name = "afina-polov.shb"
# -------------- переключение на перевёрнутый шаблон ----------------
if y > 550 and y <= 900:
self.prfx = "y"
self.name = "afina-per.shb"
# разворот
x, y = y, x
k['[x1]'] = self.ot_y
k['[x2]'] = x - self.ot_y # надо понимать, что х уже развёрнут и стал бывшым y-ком
k['[y1]'] = self.ot_x
k['[y2]'] = y - self.ot_x
k['[xk1]'] = (x - 80) / 2 # начало полукруга
k['[xk2]'] = x - k['[xk1]'] # конец полукруга
k['[xk3]'] = k['[xk1]'] + 40 # центр полукруга
k['[yk1]'] = k['[y1]'] + 35 # середина по y нижнего полукруга
k['[yk2]'] = k['[y2]'] - 35 # середина по y верхнего полукруга
# -------------- переключение на нижний шаблон -------------------------
if x > 900:
self.prfx = "y"
self.name = "afina-niz.shb"
# разворот
x = y
y = 999 # чтобы сразу было видно развёрнутые программы в пульте
k['[x1]'] = self.ot_y
k['[x2]'] = x - self.ot_y # надо понимать, что х уже развёрнут и стал бывшым y-ком
k['[y1]'] = self.ot_x
k['[xk1]'] = (x - 80) / 2 # начало полукруга внизу
k['[xk2]'] = x - k['[xk1]'] # конец полукруга внизу
k['[xk3]'] = k['[xk1]'] + 40 # центр полукруга внизу
k['[yk1]'] = k['[y1]'] + 35 # середина по y нижнего полукруга
body = self.readTemplate(self.name)
body = self.fillingTemplate(body, k)
return body, x, y
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,723
|
zuma-rus/cnc2
|
refs/heads/master
|
/my_tables.py
|
from PyQt5.QtWidgets import QTableWidgetItem
class My_tables():
"""My_tables для удобной работы с виджетом QTableWidget"""
def __init__(self, list_tables):
# super(My_tables, self).__init__()
self.list_tables = list_tables
# отдать список со значениями, взятый из таблицы
def getTable(self, table_name):
tc = self.list_tables.get(table_name)
lines = tc[1]
table = []
for i in range(0, lines):
if tc[0].item(i, 0).text() != '':
table.append([int(tc[0].item(i, 0).text()), int(tc[0].item(i, 1).text())])
else:
break
return table
def cleanAll(self):
"""Очищает все таблицы (подготавливает) при помощи cleanTable"""
for value in self.list_tables:
self.cleanTable(value)
def cleanTable(self, table_name):
tc = self.list_tables.get(table_name)
for i in range(0, tc[1]):
for j in range(0, tc[2]):
tc[0].setItem(i, j, QTableWidgetItem(""))
# тестовая функция для проверки где и чего
def printing(self, table_name):
tc = self.list_tables.get(table_name)
for i in range(0, tc[1]):
for j in range(0, tc[2]):
item = tc[0].item(i, j)
if item is None:
valu = 'None'
else:
valu = item.text()
print('(' + str(i) + ',' + str(j) + ') = ' + valu)
# очистка линии (используется, если записна какая-либо хрень в ячейках или
# не хватает одного из значений)
def cleanLine(self, table_name, num):
table = self.list_tables.get(table_name)[0]
table.setItem(num, 0, QTableWidgetItem(""))
table.setItem(num, 1, QTableWidgetItem(""))
# обмен линий
def changeLines(self, table_name, one, two):
tc = self.list_tables.get(table_name)
table = tc[0]
cell_one = [table.item(one, 0).text(), table.item(one, 1).text()]
cell_two = [table.item(two, 0).text(), table.item(two, 1).text()]
cell_one, cell_two = cell_two, cell_one
table.setItem(one, 0, QTableWidgetItem(cell_one[0]))
table.setItem(one, 1, QTableWidgetItem(cell_one[1]))
table.setItem(two, 0, QTableWidgetItem(cell_two[0]))
table.setItem(two, 1, QTableWidgetItem(cell_two[1]))
# правильная сортировка по убыванию, так же в момент сортировки, убирает
# всякие касяки (защита от дурака)
def sorting(self, table_name):
tc = self.list_tables.get(table_name)
table = tc[0]
for i in range(0, tc[1]):
for j in range(0, tc[1] - 1):
item00 = table.item(j, 0)
item10 = table.item(j + 1, 0)
item01 = table.item(j, 1)
item11 = table.item(j + 1, 1)
if item00.text().isdigit() and item01.text().isdigit():
num00 = int(table.item(j, 0).text())
else:
self.cleanLine(table_name, j)
self.changeLines(table_name, j, j + 1)
continue
if item10.text().isdigit() and item11.text().isdigit():
num10 = int(table.item(j + 1, 0).text())
else:
self.cleanLine(table_name, j + 1)
continue
if num10 > num00:
self.changeLines(table_name, j, j + 1)
elif num10 == num00:
if item01.text().isdigit():
num01 = int(table.item(j, 1).text())
else:
self.cleanLine(table_name, j)
self.changeLines(table_name, j, j + 1)
continue
if item11.text().isdigit():
num11 = int(table.item(j + 1, 1).text())
else:
self.cleanLine(table_name, j + 1)
continue
if num11 > num01:
self.changeLines(table_name, j, j + 1)
|
{"/pazpr.py": ["/template.py"], "/vik.py": ["/template.py"], "/shield.py": ["/template.py"], "/paz.py": ["/template.py"], "/main.py": ["/cnc_ui.py", "/my_tables.py", "/messages.py", "/vik.py", "/afin.py", "/shield.py", "/paz.py", "/pazpr.py"], "/afin.py": ["/template.py"]}
|
36,724
|
jkrause1s/Project-Movie-Trailer-Website
|
refs/heads/master
|
/favorites.py
|
import fresh_tomatoes
import media
#Create instances of movies to go on webpage
#Must use + in place of spaces
movie_list = ["Catch+Me+If+you+can", "Raiders+of+the+Lost+Ark", "Da+Vinci+Code", "The+Social+Network",
"The+Return+of+the+Jedi", "This+is+Spinal+Tap", "dirty+harry","anchorman:", "fellowship+of+the+ring"]
movies = [media.Movie(x) for x in movie_list]
#Call function to pass movies in as arguement to create page
fresh_tomatoes.open_movies_page(movies)
|
{"/favorites.py": ["/media.py"]}
|
36,725
|
jkrause1s/Project-Movie-Trailer-Website
|
refs/heads/master
|
/media.py
|
import json
import urllib3
import webbrowser
import requests
#Class that will pull all movie details based on title
#Uses The Movie Database(TMDb) API to pull movie details found at https://themoviedb.org
class Movie():
api_key = "?api_key=739e431e725c6c73ac6705ca9764a173" #get an API key from TMDb
url = "https://api.themoviedb.org/3/" #common URL for all API calls
poster_link = "https://image.tmdb.org/t/p/original/"
def __init__(self, movie_title): #Only requirement to create an instance of movie is the movie title
self.movie_title = movie_title
self.movie_id = self.get_movie_id() #loads API and sets id as attribute
self.movie_details = self.load_movie_info() #stores full movie details
self.title = self.movie_details['title'] #searches detail for movie title
self.trailer_url = self.get_trailer() #gets youtube URL for movie trailer
self.poster_url = Movie.poster_link + self.movie_details['poster_path'] #gets movie poster url from dict
self.storyline = self.movie_details['overview'] #pulls storyline from dict
self.release_date = self.fix_date(self.movie_details['release_date']) #pulls release date from dict
def get_movie_id(self): #Uses the movie_title to query TMDb API
obj = requests.get(Movie.url+"search/movie" + Movie.api_key + "&query="
+ self.movie_title) # Uses movie_title to search Db
data = obj.json() #loads movie dictionary from API
movie_id = str(data['results'][0]['id']) #Pulls movie id from dict
return movie_id #Movie_id is necessary to get most info
def load_movie_info(self): #won't work to get trailer due to API
#pull full detail dictionary for specific movie ID
get_info = requests.get(Movie.url + "movie/" +
self.movie_id + Movie.api_key)
detail = get_info.json()
return detail
def get_trailer(self):
info = requests.get(Movie.url + "movie/" + self.movie_id + "/videos" + Movie.api_key)
youtube = "https://www.youtube.com/watch?v="
detail = info.json()
return youtube + str(detail['results'][0]['key'])
#take date argument year-month-day to return Month Year
def fix_date(self, date):
months = ["January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November",
"December"]
date = str(date)
first = date.find('-')
second = date[first + 1:].find('-')
month_num = int(date[first+1: first+1+second])
month = months[month_num -1]
year = date[:first]
return month + " " + year
#shortens storyline of movie to 125 characters. Too long distorts title container
def shorten_story(self, storyline):
return storyline[:124]+"..."
def show_trailer(self):
webbrowser.open(self.get_trailer())
|
{"/favorites.py": ["/media.py"]}
|
36,726
|
bart314/dreadnought
|
refs/heads/master
|
/server.py
|
# Opstarten met
# gunicorn --bind 0.0.0.0:5000 wsgi:app --error-logfile gunicorn.error.log --access-logfile gunicorn.log --capture-output
from flask import Flask, render_template, jsonify, request, Response
from flask_jwt_extended import (
JWTManager, jwt_required, create_access_token, create_refresh_token,
get_jwt_identity, set_access_cookies, set_refresh_cookies,
get_raw_jwt
)
from datetime import timedelta
import json
import database
app = Flask(__name__)
app.config['JWT_SECRET_KEY'] = 'DreadNoughtPassWord'
app.config['JWT_TOKEN_LOCATION'] = ['cookies']
app.config['JWT_ACCESS_COOKIE_PATH'] = '/'
app.config['JWT_COOKIE_SECURE'] = False
app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=3600)
app.config['JWT_BLACKLIST_ENABLED'] = True
app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['access']
jwt = JWTManager(app)
#https://github.com/vimalloc/flask-jwt-extended/blob/bf1a521b444536a5baea086899636406122acbc5/examples/redis_blacklist.py#L59
@jwt.token_in_blacklist_loader
def check_if_token_is_revoked(decrypted_token):
jti = decrypted_token['jti']
entry = database.get_blacklist_token(jti)
return entry['tot'] > 0
@app.route("/", methods=['get'])
def index():
return render_template("index.html")
@app.route("/<int:id>", methods=['get'])
def page_index(id):
return render_template("pages.html", chapter=id)
@app.route('/login', methods=['get'])
def login_form():
return render_template('login_form.html')
@app.route('/login', methods=['post'])
def login():
username = request.json.get('usr', None)
password = request.json.get('pwd', None)
if (username=='bart' and password=='dreadnought'):
access_token = create_access_token(identity=username)
resp = jsonify({'login':True})
set_access_cookies(resp, access_token)
return resp, 200
else:
return jsonify({'error':'incorrect credentials'}), 401
@app.route('/logout', methods=['get'])
@jwt_required
def logout():
jwt = get_raw_jwt()['jti']
database.blacklist_token(jwt)
return jsonify({"msg": "Access token revoked"}), 200
@app.route('/test', methods=['get'])
@jwt_required
def test():
current_user = get_jwt_identity()
return jsonify(logged_in_as=current_user), 200
@app.route('/chapters', methods=['get'])
def chapters():
data = database.get_chapters();
return jsonify(data)
@app.route('/chapters-parts', methods=['get'])
def chapters_parts():
data = database.get_chapter_and_parts()
return jsonify(data)
@app.route('/chapter/<id>', methods=['get'])
def get_chapter(id):
data = database.get_chapter_info(id)
return jsonify(data)
@app.route("/timeline/<chapter>", methods=['get'])
def timeline(chapter):
data = database.get_timeline(chapter)
return jsonify(data)
@app.route('/edit/<id>', methods=['get'])
@jwt_required
def edit(id):
data = database.get_time_item(id)
return render_template('form.html', result=data)
@app.route('/edit/<id>', methods=['put'])
#@jwt_required
def update(id):
data = request.json
n_id = database.update(data)
return f"{n_id}", 200
@app.route('/next_item/<chapter>/<item>', methods=['get'])
#@jwt_required
def next_item(chapter, item):
items = database.next_item(chapter, item)
if (len(items)==1):
return '',204
else:
return jsonify(items[1]), 200
@app.route('/testput', methods=['put'])
#@jwt_required
def testput():
print (get_jwt_identity());
return ' *** testput ***'
@app.route('/position/<id>', methods=['put'])
@jwt_required
def update_position(id):
database.update_position(id, request.json)
@app.route('/insert/', methods=['post'])
@jwt_required
def insert():
id = database.insert(request.json)
resp = Response()
resp.headers['Location'] = f"http:/localhost/dreadnought/edit/{id}"
return resp, 201
@app.route('/del/<id>', methods=['delete'])
@jwt_required
def delete(id):
database.delete(id)
return "delete"
if __name__=='__main__':
app.run(debug=True)
|
{"/server.py": ["/database.py"]}
|
36,727
|
bart314/dreadnought
|
refs/heads/master
|
/database.py
|
import mysql.connector
import json
def connection():
return mysql.connector.connect(
host = 'localhost',
user = 'dreadnought',
passwd = 'HMSDreadnought',
database = 'dreadnought',
charset = 'utf8',
)
def get_chapters():
conn = connection();
try:
cursor = conn.cursor(dictionary=True)
sql = 'select nr,titel from chapters'
cursor.execute(sql)
except mysql.connector.Error as err:
print (err)
return list(cursor)
def get_chapter_and_parts():
conn = connection()
try:
cursor = conn.cursor(dictionary=True)
cursor.execute('select * from parts order by nr')
chapters = list(cursor)
rv = []
for c in chapters:
tmp = c
sql = f"select nr,title from chapters where part={c['nr']}'"
cursor.execute(sql)
tmp['chapters'] = list(cursor)
rv.append(tmp)
return rv
except mysql.connector.Error as err:
print (err)
def get_chapter_info(id):
conn = connection()
try:
cursor = conn.cursor(dictionary=True)
sql = "select * from chapters where nr=%s"
cursor.execute(sql, (id,))
except mysql.connector.Error as err:
print (err)
return list(cursor)
def get_timeline(hoofdstuk):
conn = connection()
try:
cursor = conn.cursor(dictionary=True)
sql = "select * from timeline where hoofdstuk=%s order by volgnummer"
cursor.execute(sql, (hoofdstuk,))
except mysql.connector.Error as error:
print (error)
return list(cursor)
def get_time_item(id):
conn = connection()
try:
cursor = conn.cursor(dictionary=True)
sql = "select * from timeline where id=%s"
cursor.execute(sql, (id,))
except mysql.connector.Error as error:
print (error)
return list(cursor)[0]
def next_item(chapter, item):
conn = connection()
try:
#gaat niet helemaal goed, want id is niet gegarandeerd steeds hoger.
#maar eerst maar even zo...
cursor = conn.cursor(dictionary=True)
sql = "select id from timeline where hoofdstuk=%s and id>=%s order by volgnummer limit 2;"
cursor.execute(sql, (chapter,item))
except mysql.connector.Error as error:
print (error)
return list(cursor)
def update(data):
foo = (int(data['jaar']), data['datum'], int(data['paginanummer']), data['koptekst'], data['broodtekst'], int(data['id']))
conn = connection()
sql = "update timeline set jaar=%s, datum=%s, paginanummer=%s, koptekst=%s, broodtekst=%s where id=%s"
cursor = conn.cursor()
cursor.execute(sql, foo)
conn.commit()
return data['id']
def update_position(id, data):
hoofdstuk = get_time_item(id)['hoofdstuk']
conn = connection()
cursor = conn.cursor()
old, new = data['old'], data['new']
if (old>new):
sql = """update timeline set volgnummer=volgnummer+1
where volgnummer between %s and %s-1
and hoofdstuk=%s"""
cursor.execute(sql, (new,old,hoofdstuk))
else:
sql = """update timeline set volgnummer=volgnummer-1
where volgnummer between %s and %s
and hoofdstuk=%s"""
cursor.execute(sql, (old,new,hoofdstuk))
sql = "update timeline set volgnummer=%s where id=%s"
cursor.execute(sql, (new,id))
conn.commit()
def insert(data):
foo = (int(data['jaar']), data['datum'], int(data['paginanummer']), data['koptekst'], data['broodtekst'])
conn = connection()
sql = "insert into timeline(jaar, datum, paginanummer, koptekst, broodtekst) values (%s,%s,%s,%s,%s)"
cursor = conn.cursor()
cursor.execute(sql, foo)
conn.commit()
id = cursor.lastrowid
# default volgnummer naar id; aanpassen doen we via de gui
sql = "update timeline set volgnummer=%s where id=%s"
cursor.execute(sql, (id, id))
conn.commit()
return id
def delete(id):
conn = connection()
sql = "delete from timeline where id=%s"
conn.cursor().execute(sql, (id,))
conn.commit()
def blacklist_token(jwt):
conn = connection()
sql = "insert into tokens values(%s)"
conn.cursor().execute(sql, (jwt, ))
conn.commit()
def get_blacklist_token(jwt):
conn = connection()
sql = "select count(*) as tot from tokens where jwt=%s";
cursor = conn.cursor(dictionary=True )
cursor.execute(sql, (jwt,));
return list(cursor)[0]
#{"id":"1","jaar":"1910","datum":"12-01","paginanummer":"720","koptext":"Germany will defend the interests of German merchants in Morocco.","broodtekst":"The south of Morocco is thought to be exceedingly fertile. German merchants want to settle there, but the road is blocked by the French as a result of the Act of Algeciras. "}
#data = {"jaar":"1921","datum":"12-05","paginanummer":"123","koptekst":"dit is de awesome koptekst","broodtekst":"En hier allemaal mooie <a href='hallo daar'>dingen</a>...", "id":28}
#foo = (int(data['jaar']), data['datum'], int(data['paginanummer']), data['koptekst'], data['broodtekst'], int(data['id']))
#update(foo)
#print (get_timeline(39)[0])
#print (get_time_item(13))
#delete(28)
#insert (data)d
#data = {'old':2, 'new':23}
get_chapter_and_parts()
|
{"/server.py": ["/database.py"]}
|
36,739
|
cpodieu1/Belt_Exam
|
refs/heads/master
|
/exam_app/views.py
|
from django.shortcuts import render, HttpResponse, redirect
from django.contrib import messages
from .models import User, Wish, Granted_wish
import bcrypt
def index(request):
return render(request, 'index.html')
def register(request):
errors = User.objects.register_validator(request.POST)
if errors:
for k, v in errors.items():
messages.error(request, v)
return redirect('/')
print('okie-dokie, doing good so far')
pw_hash = bcrypt.hashpw(request.POST['password'].encode(), bcrypt.gensalt()).decode()
User.objects.create(
first_name = request.POST['first_name'],
last_name = request.POST['last_name'],
email = request.POST['email'],
password = pw_hash
)
messages.info(request, 'You are in shape, go ahead and log in.')
return redirect('/')
def login(request):
try:
user = User.objects.get(email = request.POST['email'])
except:
messages.error(request, 'You are trying wrong email or password.')
return redirect('/')
if bcrypt.checkpw(request.POST['password'].encode(), user.password.encode()):
print('yay! password matches!')
else:
messages.error(request, 'Be informed that you are still using incorrect email or password.')
return redirect('/')
request.session['user_id'] = user.id
request.session['first_name'] = user.first_name
request.session['last_name'] = user.last_name
request.session['email'] = user.email
return redirect('/wishes')
def logout(request):
del request.session['user_id']
del request.session['first_name']
del request.session['last_name']
messages.info(request, 'You logged out! Come back soon!')
return redirect('/')
def wishes(request):
context = {
'user': User.objects.get(email=request.session['email']),
'wishes': User.objects.get(email=request.session['email']).wishes.all(),
'granted_wishes': Granted_wish.objects.all()
}
return render(request, 'wishes.html', context)
def hacker(request):
return render(request, 'hacker.html')
def new(request):
context = {
'user': User.objects.get(email=request.session['email'])
}
return render(request, 'new.html', context)
def edit(request, id):
context = {
'user': User.objects.get(email=request.session['email']),
'wish': Wish.objects.get(id=id)
}
return render(request, 'edit.html', context)
def stats(request):
context = {
'user': User.objects.get(email=request.session['email']),
'granted_wishes': Granted_wish.objects.count(),
'user_granted_wishes': User.objects.get(email=request.session['email']).granted_wishes.count(),
'user_pending_wishes': User.objects.get(email=request.session['email']).wishes.count()
}
return render(request, 'stats.html', context)
def new_wish(request):
if request.method == 'POST':
errors = Wish.objects.basic_validator(request.POST)
if len(errors):
for key, value in errors.items():
messages.error(request, value)
return redirect('/new')
else:
Wish.objects.create(item=request.POST['item'], desc=request.POST['desc'], user=User.objects.get(
id=request.POST['user_id']))
return redirect('/wishes')
else:
return redirect('/')
def grant(request):
if request.method == 'POST':
Granted_wish.objects.create(item=request.POST['wish_item'], user=User.objects.get(
id=request.POST['user_id']), date_added=request.POST['wish_created'])
wish = Wish.objects.get(id=request.POST['wish_id'])
wish.delete()
return redirect('/wishes')
else:
return redirect('/')
def update(request, id):
if request.method == 'POST':
errors = Wish.objects.basic_validator(request.POST)
if len(errors):
for key, value in errors.items():
messages.error(request, value)
return redirect('/edit/'+id)
else:
wish = Wish.objects.get(id=id)
wish.item = request.POST['item']
wish.desc = request.POST['desc']
wish.save()
return redirect('/wishes')
else:
return redirect('/')
def like(request):
if request.method == 'POST':
granted = Granted_wish.objects.get(id=request.POST['grant_id'])
user = User.objects.get(id=request.POST['user_id'])
if granted.user_id == user.id:
messages.error(request, "Users may not like their own wishes.")
return redirect('/wishes')
if len(granted.likes.filter(id=request.POST['user_id'])) > 0:
messages.error(request, "You have already liked this wish.")
return redirect('/wishes')
else:
granted.likes.add(user)
return redirect('/wishes')
def delete(request):
if request.method == 'POST':
wish = Wish.objects.get(id=request.POST['wish_id'])
wish.delete()
return redirect('/wishes')
else:
return redirect('/')
# def likes(request):
# if('count' not in request.session):
# request.session['likes'] = 'likes'
# if (request.POST['likes'] == 'like'):
# request.session['likes'] += 1
# return redirect('/wishes')
|
{"/exam_app/views.py": ["/exam_app/models.py"]}
|
36,740
|
cpodieu1/Belt_Exam
|
refs/heads/master
|
/exam_app/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('wishes', views.wishes),
path('hacker', views.hacker),
path('new', views.new),
path('edit/<int:id>', views.edit),
path('stats', views.stats),
path('register', views.register),
path('login', views.login),
path('logout', views.logout),
path('new_wish', views.new_wish),
path('grant', views.grant),
path('update/<int:id>', views.update),
path('delete', views.delete),
# path('process_likes', views.likes)
]
|
{"/exam_app/views.py": ["/exam_app/models.py"]}
|
36,741
|
cpodieu1/Belt_Exam
|
refs/heads/master
|
/exam_app/models.py
|
from __future__ import unicode_literals
from django.db import models
import re
import bcrypt
class UserManager(models.Manager):
def register_validator(self, post_data):
errors = {}
email_regex = re.compile(r'^[a-zA-Z0-9.+-_]+@[a-zA-Z0-9.-_]+\.[a-zA-Z]+$')
if not email_regex.match(post_data['email']):
errors['email'] = 'Invalid email address.'
if (len(post_data['first_name']) < 2):
errors['first_name'] = 'First name should be 2 or more characters.'
if (len(post_data['last_name']) < 2):
errors['last_name'] = 'Last name should be 2 or more characters.'
if (len(post_data['password']) < 8):
errors['password'] = 'Password should be 8 or more characters.'
if post_data['password'] != post_data['confirm_pw']:
errors['password_match'] = 'Password and confirmed password do not match.'
return errors
def login_validator(self, post_data):
errors = {}
try:
User.objects.get(email = post_data['email'])
errors['email'] = 'This email is already a member.'
except:
pass
return errors
# users = User.objects.filter(email=post_data['email])
# if len(users) != 0:
# erros['email] = 'This is email is already a member.'
class User(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField(max_length=50)
password = models.CharField(max_length=60)
createed_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserManager()
class WishManager(models.Manager):
def basic_validator(self, postData):
errors = {}
if len(postData['item']) < 3:
errors['item'] = "Item must be no fewer than 3 characters."
if len(postData['desc']) < 3:
errors['first_name'] = "Description must be no fewer than 3 characters."
return errors
class User(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
email_hash = models.CharField(max_length=255)
password = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserManager()
class Wish(models.Model):
item = models.CharField(max_length=255)
desc = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, related_name="wishes", on_delete=models.CASCADE)
objects = WishManager()
class Granted_wish(models.Model):
item = models.CharField(max_length=255)
date_added = models.DateTimeField(auto_now=True)
granted_at = models.DateTimeField(auto_now_add=True)
likes = models.ManyToManyField(User, related_name='likes')
user = models.ForeignKey(User, related_name="granted_wishes", on_delete=models.CASCADE)
# @property
def num_likes(self):
return self.likes.all().count()
|
{"/exam_app/views.py": ["/exam_app/models.py"]}
|
36,742
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/models/user.py
|
from db.database import Database
from bson.objectid import ObjectId # convert string id to BSON object
class UserModel:
def __init__(self, _id, username, password):
self.id = str(_id) if _id is not None else _id
self.username = username
self.password = password
@classmethod
def find_by_username(cls, username):
row = Database.find_one('users', {'username': username})
if row is not None:
user = cls(**row)
user.id = str(user.id) # get the actual string id
else:
user = None
return user
@classmethod
def find_by_id(cls, _id):
# convert string id to BSON object
row = Database.find_one('users', {'_id': ObjectId(_id)})
if row is not None:
user = cls(**row)
else:
user = None
return user
def save_to_db(self):
Database.insert(
"users", {'username': self.username, 'password': self.password})
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,743
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/resources/item.py
|
from flask import Flask, request, jsonify
from flask_restful import Resource, reqparse
from flask_jwt import jwt_required # import JWT
from db.database import Database
from models.item import ItemModel
class Item(Resource):
parser = reqparse.RequestParser()
parser.add_argument('price', type=float, required=True,
help="This field cannot be left blank")
parser.add_argument('store_id', type=str, required=True,
help="Every item needs a store id")
@jwt_required()
def get(self, name): # get method for this resource
item = ItemModel.find_by_name(name)
if item:
return item.json()
return {'message': 'Item not found'}, 404
def post(self, name): # post method for this resource
if ItemModel.find_by_name(name):
# bad request
return {'message': 'An item with name {} already exists'.format(name)}, 400
data = Item.parser.parse_args()
item = ItemModel(None, name, data['price'], data['store_id'])
try:
item.save_to_db()
except:
return {"message": "An error occurred while inserting the client."}, 500
return item.json(), 201 # return 201 if item has been created
@jwt_required()
def delete(self, name):
item = ItemModel.find_by_name(name)
if item:
item.destroy()
return {'message': 'Item deleted'}
return {'message': 'Item could not be found'}
def put(self, name):
data = Item.parser.parse_args()
item = ItemModel.find_by_name(name)
if item is None:
item = ItemModel(None, name, data['price'], data['store_id'])
item.save_to_db()
else:
item.price = data['price']
item.update()
return item.json()
class ItemList(Resource):
def get(self): # get method for this resource
items = ItemModel.find_all()
return {'items': items}
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,744
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/models/item.py
|
from db.database import Database
from bson.objectid import ObjectId # convert string id to BSON object
class ItemModel:
COLLECTION = "items"
def __init__(self, _id, name, price, store_id):
self.id = str(_id) if _id is not None else _id
self.name = name
self.price = price
self.store_id = str(store_id) if store_id else None
def json(self):
return {"name": self.name, "price": self.price, "store_id": ObjectId(self.store_id)}
def load_json(self):
return {"name": self.name, "price": self.price, "store_id": self.store_id}
@classmethod
def find_by_name(cls, name): # get method for this resource
row = Database.find_one(ItemModel.COLLECTION, {"name": name})
if row:
return cls(**row)
@staticmethod
def find_all(query = {}):
items = Database.find(ItemModel.COLLECTION, query)
if items:
return [ItemModel(**item).load_json() for item in items]
return []
def save_to_db(self):
Database.insert(ItemModel.COLLECTION, self.json())
def update(self):
Database.update(ItemModel.COLLECTION, {"_id": self.id}, self.json())
def destroy(self):
Database.remove(ItemModel.COLLECTION, {"name": self.name})
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,745
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/db/initialize.py
|
import pymongo
from bson.objectid import ObjectId
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["mydatabase"]
mycol = None
""" Create users collection """
collections = mydb.list_collection_names()
users_collection = mydb["users"]
""" Insert default user """
user = {"username": "admin", "password": "password"}
# add admin user if the user hasn't been created already
if user['username'] not in [u['username'] for u in users_collection.find({}, user)]:
users_collection.insert_one(user)
# show all users in user collection
print([u for u in users_collection.find()])
print(myclient.list_database_names()) # show all databases
# mydb["items"].update_many({}, {'$set': {"store_id": 1}}, upsert=True)
# mydb["items"].update_many({"store_id": 1}, {'$set': {"store_id": ObjectId('5bf1f60a9c31ecbbb89d0d66')}}, upsert=True)
# mydb["stores"].delete_many({})
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,746
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/resources/user.py
|
import pymongo
from db.database import Database
from models.user import UserModel
from flask_restful import Resource, reqparse
class UserRegister(Resource):
parser = reqparse.RequestParser()
parser.add_argument('username', type=str, required=True,
help="This field cannot be blank.")
parser.add_argument('password', type=str, required=True,
help="This field cannot be blank.")
def post(self):
data = UserRegister.parser.parse_args()
if UserModel.find_by_username(data['username']):
return {"message": "A user with that username already exists"}, 400
new_user = UserModel(_id = None, username = data['username'], password = data['password'])
new_user.save_to_db()
return {"message": "User created successfully."}, 201
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,747
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/models/store.py
|
from db.database import Database
from models.item import ItemModel
from bson.objectid import ObjectId
class StoreModel:
COLLECTION = "stores"
def __init__(self, _id, name):
self.id = str(_id) if _id is not None else _id
self.name = name
def json(self):
return {"name": self.name}
def load_json(self):
return {"name": self.name, "items": self.items}
@property
def items(self):
items = []
if self.id:
items = ItemModel.find_all({"store_id": ObjectId(self.id)})
return items
@classmethod
def find_by_name(cls, name): # get method for this resource
row = Database.find_one(StoreModel.COLLECTION, {"name": name})
if row:
return cls(**row)
@staticmethod
def find_all():
stores = Database.find(StoreModel.COLLECTION, {})
return [StoreModel(**store).load_json() for store in stores]
def save_to_db(self):
Database.insert(StoreModel.COLLECTION, self.json())
def update(self):
Database.update(StoreModel.COLLECTION, {"_id": self.id}, self.json())
def destroy(self):
Database.remove(StoreModel.COLLECTION, {"name": self.name})
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,748
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/security.py
|
from werkzeug.security import safe_str_cmp # for comparing strings
from models.user import UserModel
# authenticate method to be run by JWT in authenticating a user
def authenticate(username, password):
user = UserModel.find_by_username(username)
if user and safe_str_cmp(user.password, password):
return user
# this is the function to identify user from the token
def identity(payload):
user_id = UserModel.find_by_id(payload['identity'])
return user_id
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,749
|
IfeOlogbese/python-stores-api
|
refs/heads/master
|
/app.py
|
from flask import Flask
from flask_restful import Api
from flask_jwt import JWT # import JWT
from security import authenticate, identity
from db.database import Database
from resources.user import UserRegister
from resources.item import Item, ItemList
from resources.store import Store, StoreList
app = Flask(__name__)
app.secret_key = "some secret key"
api = Api(app) # Allow us to very easily add http resources
""" Api works with resources and every resource has to be a class"""
# creates new endpoint /auth, we send username and password, JWT gets it and sends to the authenticate method
jwt = JWT(app, authenticate, identity)
@app.before_first_request # initialize database connection
def init_db():
Database.initialize()
api.add_resource(Item, '/item/<string:name>') # http://localhost:5000/item/ife
api.add_resource(ItemList, '/items')
api.add_resource(Store, '/store/<string:name>')
api.add_resource(StoreList, '/stores')
api.add_resource(UserRegister, '/register')
if __name__ == '__main__':
app.run(port=5000, debug=True) # debug to true for proper error messages
|
{"/resources/item.py": ["/models/item.py"], "/resources/user.py": ["/models/user.py"], "/models/store.py": ["/models/item.py"], "/security.py": ["/models/user.py"], "/app.py": ["/security.py", "/resources/user.py", "/resources/item.py"]}
|
36,761
|
EmbeddedSystemClass/ADE9000_monitor
|
refs/heads/master
|
/form/ADE9000_form.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ADE9000_form.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.setWindowModality(QtCore.Qt.NonModal)
Form.setEnabled(True)
Form.resize(700, 600)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())
Form.setSizePolicy(sizePolicy)
Form.setMinimumSize(QtCore.QSize(700, 600))
Form.setMaximumSize(QtCore.QSize(700, 600))
self.lcd_Voltage = QtWidgets.QLCDNumber(Form)
self.lcd_Voltage.setGeometry(QtCore.QRect(50, 50, 121, 41))
self.lcd_Voltage.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_Voltage.setObjectName("lcd_Voltage")
self.label_Vrms = QtWidgets.QLabel(Form)
self.label_Vrms.setGeometry(QtCore.QRect(178, 56, 61, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Vrms.setFont(font)
self.label_Vrms.setObjectName("label_Vrms")
self.label_Arms = QtWidgets.QLabel(Form)
self.label_Arms.setGeometry(QtCore.QRect(180, 106, 61, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Arms.setFont(font)
self.label_Arms.setObjectName("label_Arms")
self.lcd_Current = QtWidgets.QLCDNumber(Form)
self.lcd_Current.setGeometry(QtCore.QRect(50, 100, 121, 41))
self.lcd_Current.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_Current.setObjectName("lcd_Current")
self.label_Voltage = QtWidgets.QLabel(Form)
self.label_Voltage.setGeometry(QtCore.QRect(10, 50, 41, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Voltage.setFont(font)
self.label_Voltage.setObjectName("label_Voltage")
self.label_ADE9000 = QtWidgets.QLabel(Form)
self.label_ADE9000.setGeometry(QtCore.QRect(10, 10, 681, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_ADE9000.setFont(font)
self.label_ADE9000.setObjectName("label_ADE9000")
self.lcd_Power = QtWidgets.QLCDNumber(Form)
self.lcd_Power.setGeometry(QtCore.QRect(50, 150, 121, 41))
self.lcd_Power.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_Power.setObjectName("lcd_Power")
self.label_W = QtWidgets.QLabel(Form)
self.label_W.setGeometry(QtCore.QRect(180, 156, 61, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_W.setFont(font)
self.label_W.setObjectName("label_W")
self.label_Power = QtWidgets.QLabel(Form)
self.label_Power.setGeometry(QtCore.QRect(10, 150, 41, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Power.setFont(font)
self.label_Power.setTextFormat(QtCore.Qt.AutoText)
self.label_Power.setObjectName("label_Power")
self.label_Hz = QtWidgets.QLabel(Form)
self.label_Hz.setGeometry(QtCore.QRect(448, 56, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Hz.setFont(font)
self.label_Hz.setObjectName("label_Hz")
self.label_Freq = QtWidgets.QLabel(Form)
self.label_Freq.setGeometry(QtCore.QRect(260, 50, 51, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Freq.setFont(font)
self.label_Freq.setTextFormat(QtCore.Qt.AutoText)
self.label_Freq.setObjectName("label_Freq")
self.lcd_VTHD = QtWidgets.QLCDNumber(Form)
self.lcd_VTHD.setGeometry(QtCore.QRect(320, 100, 121, 41))
self.lcd_VTHD.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_VTHD.setObjectName("lcd_VTHD")
self.lcd_ITHD = QtWidgets.QLCDNumber(Form)
self.lcd_ITHD.setGeometry(QtCore.QRect(320, 150, 121, 41))
self.lcd_ITHD.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_ITHD.setObjectName("lcd_ITHD")
self.lcd_Freq = QtWidgets.QLCDNumber(Form)
self.lcd_Freq.setGeometry(QtCore.QRect(320, 50, 121, 41))
self.lcd_Freq.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_Freq.setObjectName("lcd_Freq")
self.label_ITHD = QtWidgets.QLabel(Form)
self.label_ITHD.setGeometry(QtCore.QRect(250, 150, 71, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_ITHD.setFont(font)
self.label_ITHD.setObjectName("label_ITHD")
self.label_Percent1 = QtWidgets.QLabel(Form)
self.label_Percent1.setGeometry(QtCore.QRect(450, 106, 21, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Percent1.setFont(font)
self.label_Percent1.setObjectName("label_Percent1")
self.label_VHD = QtWidgets.QLabel(Form)
self.label_VHD.setGeometry(QtCore.QRect(250, 100, 71, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_VHD.setFont(font)
self.label_VHD.setObjectName("label_VHD")
self.label_Percent2 = QtWidgets.QLabel(Form)
self.label_Percent2.setGeometry(QtCore.QRect(450, 156, 21, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Percent2.setFont(font)
self.label_Percent2.setObjectName("label_Percent2")
self.label_degC = QtWidgets.QLabel(Form)
self.label_degC.setGeometry(QtCore.QRect(670, 106, 21, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_degC.setFont(font)
self.label_degC.setObjectName("label_degC")
self.label_Percent3 = QtWidgets.QLabel(Form)
self.label_Percent3.setGeometry(QtCore.QRect(668, 56, 31, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Percent3.setFont(font)
self.label_Percent3.setObjectName("label_Percent3")
self.lcd_PF = QtWidgets.QLCDNumber(Form)
self.lcd_PF.setGeometry(QtCore.QRect(540, 50, 121, 41))
self.lcd_PF.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_PF.setObjectName("lcd_PF")
self.label_Temp = QtWidgets.QLabel(Form)
self.label_Temp.setGeometry(QtCore.QRect(500, 100, 41, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Temp.setFont(font)
self.label_Temp.setObjectName("label_Temp")
self.label_PF = QtWidgets.QLabel(Form)
self.label_PF.setGeometry(QtCore.QRect(500, 50, 41, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_PF.setFont(font)
self.label_PF.setObjectName("label_PF")
self.lcd_Temp = QtWidgets.QLCDNumber(Form)
self.lcd_Temp.setGeometry(QtCore.QRect(540, 100, 121, 41))
self.lcd_Temp.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcd_Temp.setObjectName("lcd_Temp")
self.label_Voltage_2 = QtWidgets.QLabel(Form)
self.label_Voltage_2.setGeometry(QtCore.QRect(10, 100, 41, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(12)
self.label_Voltage_2.setFont(font)
self.label_Voltage_2.setObjectName("label_Voltage_2")
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "ADE9000 Monitor"))
self.label_Vrms.setText(_translate("Form", "Vrms"))
self.label_Arms.setText(_translate("Form", "Arms"))
self.label_Voltage.setText(_translate("Form", "電圧"))
self.label_ADE9000.setText(_translate("Form", "ADE9000 High Performance, Multiphase Energy, and Power Quality Monitoring IC"))
self.label_W.setText(_translate("Form", "W"))
self.label_Power.setText(_translate("Form", "電力"))
self.label_Hz.setText(_translate("Form", "Hz"))
self.label_Freq.setText(_translate("Form", "周波数"))
self.label_ITHD.setText(_translate("Form", "電流THD"))
self.label_Percent1.setText(_translate("Form", "%"))
self.label_VHD.setText(_translate("Form", "電圧THD"))
self.label_Percent2.setText(_translate("Form", "%"))
self.label_degC.setText(_translate("Form", "℃"))
self.label_Percent3.setText(_translate("Form", "%"))
self.label_Temp.setText(_translate("Form", "温度"))
self.label_PF.setText(_translate("Form", "力率"))
self.label_Voltage_2.setText(_translate("Form", "電流"))
|
{"/ADE9000_monitor.py": ["/form/ADE9000_form.py"]}
|
36,762
|
EmbeddedSystemClass/ADE9000_monitor
|
refs/heads/master
|
/ADE9000_monitor.py
|
# -*- coding: utf-8 -*-
import sys
from PyQt5.QtCore import QTimer
from PyQt5.QtWidgets import QApplication, QWidget, QSizePolicy
from form.ADE9000_form import Ui_Form
import serial
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
class ADE9000Monitor(QWidget):
arduinoData: str
Vrms: float
Arms: float
Watt: float
Freq: float
VTHD: float
ITHD: float
PF: float
AngVAIA: float
Temp: float
Full_scale_Vrms = 566.4
Full_scale_Vrms_code = 52702092
Full_scale_Arms = 208.0
Full_scale_Arms_code = 52702092
Full_scale_Watt = Full_scale_Vrms * Full_scale_Arms
Full_scale_Watt_code = 20694066
def __init__(self, parent=None):
super(ADE9000Monitor, self).__init__(parent)
self.ui = Ui_Form()
self.ui.setupUi(self)
self.canvas = PlotCanvas(self, width=6.8, height=3.7)
self.canvas.move(10, 220)
self.timer = QTimer(self)
self.timer.start(1000)
self.timer.timeout.connect(self.get_value)
def get_value(self):
ser.write(b'r')
self.arduinoData = ser.readline().decode('ascii')
self.arduinoData = self.arduinoData.split()
self.Vrms = self.arduinoData[0]
self.Vrms = int(self.Vrms) * self.Full_scale_Vrms / self.Full_scale_Vrms_code
self.ui.lcd_Voltage.display(self.Vrms)
self.Arms = self.arduinoData[1]
self.Arms = int(self.Arms) * self.Full_scale_Arms / self.Full_scale_Arms_code
self.ui.lcd_Current.display(self.Arms)
self.Watt = self.arduinoData[2]
self.Watt = int(self.Watt) * self.Full_scale_Watt / self.Full_scale_Watt_code
self.Watt = abs(self.Watt)
self.ui.lcd_Power.display(self.Watt)
self.Freq = self.arduinoData[3]
self.ui.lcd_Freq.display(self.Freq)
self.VTHD = self.arduinoData[4]
# print(self.VTHD)
self.VTHD = float(self.VTHD)
self.ui.lcd_VTHD.display(self.VTHD)
self.ITHD = self.arduinoData[5]
# print(self.ITHD)
self.ITHD = float(self.ITHD)
self.ui.lcd_ITHD.display(self.ITHD)
self.PF = self.arduinoData[6]
self.PF = float(self.PF) * 100
self.ui.lcd_PF.display(self.PF)
# self.AngVAIA = self.arduinoData[7]
# self.AngVAIA = float(self.AngVAIA) * 0.02109375
# print(self.AngVAIA)
# self.ui.lcd_AngVAIA.display(self.AngVAIA)
self.Temp = self.arduinoData[8]
self.ui.lcd_Temp.display(self.Temp)
self.canvas.plot(*self.arduinoData)
class PlotCanvas(FigureCanvas):
arduinoData: str
Full_scale_Vresampled = 801.0
Full_scale_Vresampled_code = 18196
Full_scale_Iresampled = 294.1
Full_scale_Iresampled_code = 18196
def __init__(self, parent=None, width=5, height=4, dpi=100):
self.fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = self.fig.add_subplot(1, 2, 1)
FigureCanvas.__init__(self, self.fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
self.ax1 = self.figure.add_subplot(1, 2, 1)
self.ax2 = self.figure.add_subplot(1, 2, 2)
def plot(self, *args):
self.V = args[9::2]
self.V = [int(s) / self.Full_scale_Vresampled_code * self.Full_scale_Vresampled for s in self.V]
self.I = args[10::2]
self.I = [int(s) / self.Full_scale_Iresampled_code * self.Full_scale_Iresampled for s in self.I]
self.ax1.clear()
self.ax1.set_title('Voltage')
self.ax1.set_xlabel("Sample")
self.ax1.set_ylabel("Volt")
self.ax1.plot(self.V)
self.ax2.clear()
self.ax2.set_title('Current')
self.ax2.set_xlabel("Sample")
self.ax2.set_ylabel("Ampere")
self.ax2.plot(self.I)
self.fig.subplots_adjust(top=0.9, bottom=0.2, wspace=0.5)
self.draw()
if __name__ == '__main__':
ser = serial.Serial('COM9', baudrate=115200, timeout=1)
app = QApplication(sys.argv)
window = ADE9000Monitor()
window.show()
sys.exit(app.exec_())
|
{"/ADE9000_monitor.py": ["/form/ADE9000_form.py"]}
|
36,763
|
EmbeddedSystemClass/ADE9000_monitor
|
refs/heads/master
|
/form/ui_convert_qt.py
|
# -*- coding: utf-8 -*-
from PyQt5 import uic
fin = open('ADE9000_form.ui', 'r', encoding='utf8')
fout = open('ADE9000_form.py', 'w', encoding='utf8')
uic.compileUi(fin,fout,execute=False)
fin.close()
fout.close()
|
{"/ADE9000_monitor.py": ["/form/ADE9000_form.py"]}
|
36,768
|
bslsbdd/FIT9133_Assignment_2
|
refs/heads/master
|
/checkFile.py
|
def check_file(file1,file2):
f1=open(file1,"r",encoding= 'utf-8')
f2=open(file2,"r",encoding ='utf-8')
n = 0
for line1 in f1:
n+=1
for line2 in f2:
if line1==line2:
#print("SAME\n")
break
else:
print(line1 + line2, n)
break
f1.close()
f2.close()
#print(filecmp.cmp('pg16328_clean.txt','test.txt'))
#check_file('11-0_clean.txt','11-0.txttest.txt')
#check_file('84-0_clean.txt','84-0.txttest.txt')
#check_file('1342-0_clean.txt','1342-0.txttest.txt')
#check_file('1661-0_clean.txt','1661-0.txttest.txt')
#check_file('1952-0_clean.txt','1952-0.txttest.txt')
check_file('pg16328_clean.txt','pg16328.txttest.txt')
|
{"/assignment_testcase.py": ["/idf_27133087.py"]}
|
36,769
|
bslsbdd/FIT9133_Assignment_2
|
refs/heads/master
|
/A2_27133087/word_27133087.py
|
############################################
# The solution is developed by Chengyao Xu #
# Student ID: 27133087 #
############################################
class WordAnalyser:
# init class constructor with dictionary word count and word sum
def __init__(self):
self.word_counts = {}
self.word_sum = 0
return
# Redefine the method to present the number of occurrences for each word in a string format.
def __str__(self):
info = ''
for key, value in self.word_counts.items():
info = info + key + ':' + str(value) + '\n'
return info
# This method accept the cleaned book text as the argument, and attempt to count the
# occurrences for each of the words.
def analyse_words(self, book_text):
for word in book_text.split():
self.word_sum += 1
if word in self.word_counts.keys():
self.word_counts[word] += 1
else:
self.word_counts[word] = 1
return
# use the word count and sum to calculate word frequency
def get_word_frequency(self):
word_frequency = {}
for key, value in self.word_counts.items():
word_frequency[key] = value/self.word_sum
return word_frequency
|
{"/assignment_testcase.py": ["/idf_27133087.py"]}
|
36,770
|
bslsbdd/FIT9133_Assignment_2
|
refs/heads/master
|
/idf_27133087.py
|
############################################
# The solution is developed by Chengyao Xu #
# Student ID: 27133087 #
############################################
import pandas as pd
import math
# allow it to print more data in data frame
pd.set_option('display.max_rows', 500)
pd.set_option('display.max_columns', 500)
pd.set_option('display.width', 1000000)
class IDFAnalyser:
# init class constructor Create an instance variable called data that is a Dataframe
def __init__(self):
self.data = pd.DataFrame()
# Loads the frequency of a cleaned text into data with a title that corresponds to the
# text the frequency was generated from.
def load_frequency(self, book_frequency, book_title):
# create new data frame with index on book title
new_row = pd.DataFrame([book_frequency], index=[book_title])
# append the new data frame to data
self.data = self.data.append(new_row, sort=False)
return
# Obtains the IDF for the term provided and the documents loaded into data
def get_IDF(self,term):
if term not in self.data.keys():
n = 0
else:
n = self.data.count()[term]
d = len(self.data)
idf = 1 + math.log(d/(1+n))
return idf
def get_data(self):
return self.data
|
{"/assignment_testcase.py": ["/idf_27133087.py"]}
|
36,771
|
bslsbdd/FIT9133_Assignment_2
|
refs/heads/master
|
/assignment_testcase.py
|
import preprocessor_27133087 as processor
import word_27133087 as word
import idf_27133087 as idf
import task4_27133087 as task4
def get_document_word_frequency(book_name):
book = processor.Preprocessor()
book.read_text(book_name)
book.clean()
cleaned_content = book.get_content()
word_analysis = word.WordAnalyser()
word_analysis.analyse_words(cleaned_content)
word_frequency = word_analysis.get_word_frequency()
return word_frequency
if __name__ == "__main__":
idf_analyser = idf.IDFAnalyser()
book_11_0_info = get_document_word_frequency('11-0.txt')
book_84_0_info = get_document_word_frequency('84-0.txt')
book_1342_0_info = get_document_word_frequency('1342-0.txt')
book_1661_0_info = get_document_word_frequency('1661-0.txt')
book_1952_0_info = get_document_word_frequency('1952-0.txt')
book_pg16328_info = get_document_word_frequency('pg16328.txt')
idf_analyser.load_frequency(book_11_0_info,'11-0')
idf_analyser.load_frequency(book_84_0_info,'84-0')
idf_analyser.load_frequency(book_1342_0_info,'1342-0')
idf_analyser.load_frequency(book_1661_0_info,'1661-0')
idf_analyser.load_frequency(book_1952_0_info,'1952-0')
idf_analyser.load_frequency(book_pg16328_info,'pg16328')
print(task4.choice('frankenstein',idf_analyser))
print(task4.choice('sherlock',idf_analyser))
print(task4.choice('the',idf_analyser))
print(task4.choice('bird',idf_analyser))
print(task4.choice('wallpaper',idf_analyser))
print(task4.choice('\n',idf_analyser))
|
{"/assignment_testcase.py": ["/idf_27133087.py"]}
|
36,772
|
bslsbdd/FIT9133_Assignment_2
|
refs/heads/master
|
/A2_27133087/preprocessor_27133087.py
|
############################################
# The solution is developed by Chengyao Xu #
# Student ID: 27133087 #
############################################
import string
class Preprocessor:
# init class constructor with empty book content
def __init__(self):
self.book_content = ''
# redefine the toString function to print the book content
def __str__(self):
return self.book_content
# clean function make clear out non english character and punctuation
def clean(self):
book_content_cleaned = ''
# start clean process when book has content
if len(self.book_content) != 0:
for character in self.book_content:
# clear out unwanted character only keep english character, number and required symbol
if character in string.ascii_letters or \
character.isnumeric() or \
character in ['-','_',' ',' '] or \
character == '\n':
if character in ['-','_',' ']:
book_content_cleaned += ' '
else:
book_content_cleaned += character.lower()
self.book_content = book_content_cleaned
return
else:
return 1
# read the book content from file with utf 8 encoding
def read_text(self, text_name):
file = open(text_name, 'r', encoding= 'utf-8')
lines = file.readlines()
for line in lines:
self.book_content += line
return
# getter for book content
def get_content(self):
return self.book_content
|
{"/assignment_testcase.py": ["/idf_27133087.py"]}
|
36,773
|
bslsbdd/FIT9133_Assignment_2
|
refs/heads/master
|
/A2_27133087/task4_27133087.py
|
############################################
# The solution is developed by Chengyao Xu #
# Student ID: 27133087 #
############################################
# define choice function where term is a string indicating what term is being used to
# search and documents is an idf object outlined in Task 3
def choice(term,documents):
# check is term in the document or not
assert term in documents.data.keys(), term + "Term is not in documents"
# get idf from document
idf = documents.get_IDF(term)
# get the column of the term form data frame and delete all NaN rows then change it to dictionary
term_frequency_dic = documents.get_data()[term].dropna().to_dict()
# calculate TF-IDF for each key in dictionary
for key in term_frequency_dic.keys():
term_frequency_dic[key] *= idf
# finding the book with high TF-IDF value
all_books = list(term_frequency_dic.keys())
TFIDF = list(term_frequency_dic.values())
selected_book = all_books[TFIDF.index(max(TFIDF))]
return selected_book
|
{"/assignment_testcase.py": ["/idf_27133087.py"]}
|
36,788
|
andrster/c4cs-w18-rpn
|
refs/heads/master
|
/test_rpn.py
|
import unittest
import rpn
class TestBasics(unittest.TestCase):
def test_add(self):
result = rpn.calculate('1 1 +')
self.assertEqual(2,result)
def test_adds(self):
result = rpn.calculate(' 1 1 + 2 +')
self.assertEqual(4, result)
def test_subtract(self):
result = rpn.calculate('5 2 -')
self.assertEqual(3,result)
def test_tomany(self):
with self.assertRaises(TypeError):
Eresult = rpn.calculate('1 2 3 +')
def test_mult(self):
result = rpn.calculate('3 2 *')
self.assertEqual(6, result)
def test_div(self):
result = rpn.calculate('10 2 /')
self.assertEqual(5, result)
def test_all(self):
result = rpn.calculate('1 1 + 2 *')
self.assertEqual(4, result)
def test_div_zero(self):
result = rpn.calculate('3 4 0 /')
self.assertEqual('Error div by zero', result)
def test_factorial(self):
result = rpn.calculate('4 ! 3 +')
self.assertEqual(27,result)
def test_factor(self):
result = rpn.calculate('.25 F')
self.assertEqual('1/4', str(result))
def test_repeat(self):
result = rpn.calculate('2 3 2 + R')
self.assertEqual(7,result)
def test_carret(self):
result = rpn.calculate('3 2 ^')
self.assertEqual(9,result)
|
{"/test_rpn.py": ["/rpn.py"]}
|
36,789
|
andrster/c4cs-w18-rpn
|
refs/heads/master
|
/rpn.py
|
#!/usr/bin/env python 3
import operator
import sys
import logging
import math
from fractions import Fraction
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
sh = logging.StreamHandler(sys.stdout)
logger.addHandler(sh)
operator = {
'+':operator.add,
'-':operator.sub,
'*':operator.mul,
'/':operator.truediv,
# '^':operator.pow,
'!':math.factorial,
'F':Fraction,
'R':operator.add,
'^':operator.pow
}
def calculate(arg):
stack = list()
oldtoken = ''
for token in arg.split():
try:
value = float(token)
value = round(value,2)
stack.append(value)
except ValueError:
function = operator[token]
arg2 = stack.pop()
if token != '!' and token != 'F' and token != 'R':
arg1 = stack.pop()
if token == '/' and arg2 == 0:
return 'Error div by zero'
elif token == '!' or token == 'F':
result = function(arg2)
stack.append(result)
print(result)
elif token == 'R':
function = operator[oldtoken]
if oldtoken == '!' or oldtoken == 'F':
result = function(arg2)
stack.append(result)
print(result)
else:
arg1 = stack.pop()
result = function(arg1,arg2)
stack.append(result)
else:
result = function(arg1,arg2)
stack.append(result)
logger.debug(stack)
oldtoken = token
if len(stack) != 1:
raise TypeError
return stack.pop();
def main():
while True:
print(calculate(input('rpn calc>')))
if __name__ == '__main__':
main()
|
{"/test_rpn.py": ["/rpn.py"]}
|
36,805
|
IVC-Projects/PMVE
|
refs/heads/master
|
/UTILS_MF_ra_single.py
|
import numpy as np
import tensorflow as tf
import math, os, random, re
from PIL import Image
from VDSRUNK_MF_ra_single import BATCH_SIZE
from VDSRUNK_MF_ra_single import PATCH_SIZE
# due to a batch trainingSet come from one picture. I design a algorithm to make the TrainingSet more diversity.
def normalize(x):
x = x / 255.
return truncate(x, 0., 1.)
def denormalize(x):
x = x * 255.
return truncate(x, 0., 255.)
def truncate(input, min, max):
input = np.where(input > min, input, min)
input = np.where(input < max, input, max)
return input
def remap(input):
input = 16+219/255*input
#return tf.clip_by_value(input, 16, 235).eval()
return truncate(input, 16.0, 235.0)
def deremap(input):
input = (input-16)*255/219
#return tf.clip_by_value(input, 0, 255).eval()
return truncate(input, 0.0, 255.0)
# return the whole absolute path.
def load_file_list(directory):
list = []
for filename in [y for y in os.listdir(directory) if os.path.isfile(os.path.join(directory,y))]:
list.append(os.path.join(directory,filename))
#list.sort()
return list
def searchHighData(currentLowDataIndex, highDataList, highIndexList):
searchOffset = 3
searchedHighDataIndexList = []
searchedHighData = []
for i in range(currentLowDataIndex - searchOffset, currentLowDataIndex + searchOffset + 1):
if i in highIndexList:
searchedHighDataIndexList.append(i)
assert len(searchedHighDataIndexList) == 2, 'search method have error!'
for tempData in highDataList:
if int(os.path.basename(tempData).split('.')[0].split('_')[-1]) \
== searchedHighDataIndexList[0] == searchedHighDataIndexList[1]:
searchedHighData.append(tempData)
return searchedHighData
# return like this"[[[high1Data, lowData], label], [[2, 7, 8], 22], [[3, 8, 9], 33]]" with the whole path.
def get_test_list2(highDataList, lowDataList, labelList):
assert len(lowDataList) == len(labelList), "low:%d, label:%d,"%(len(lowDataList) , len(labelList))
# [0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48]
highIndexList = [q for q in range(49) if q % 4 == 0]
test_list = []
for tempDataPath in lowDataList:
tempData = []
temp = []
# this place should changed on the different situation.
currentLowDataIndex = int(os.path.basename(tempDataPath).split('.')[0].split('_')[-1])
searchedHighData = searchHighData(currentLowDataIndex, highDataList, highIndexList)
tempData.append(searchedHighData[0])
tempData.append(tempDataPath)
tempData.append(searchedHighData[1])
i = list(lowDataList).index(tempDataPath)
temp.append(tempData)
temp.append(labelList[i])
test_list.append(temp)
return test_list
def get_temptest_list(high1DataList, lowDataList, high2DataList, labelList):
tempData = []
temp = []
test_list = []
for i in range(len(lowDataList)):
tempData.append(high1DataList[i])
tempData.append(lowDataList[i])
tempData.append(high2DataList[i])
temp.append(tempData)
temp.append(labelList[i])
test_list.append(temp)
return test_list
# [[high, low1, label1], [[h21,h22], low2, label2]]
def get_test_list( lowDataList, labelList):
doubleTest_list = []
singleTest_list = []
for lowdata in lowDataList:
tempData = []
lowdataIndex = int(os.path.basename(lowdata).split('.')[0].split('_')[-1])
if lowdataIndex % 4 != 0:
#ASCNN_Frames_index = lowdataIndex - (lowdataIndex//4) - 1
#tempData.append(ASCNN_Frames_List[ASCNN_Frames_index])
tempData.append(lowdata)
labelIndex = list(lowDataList).index(lowdata)
tempData.append(labelList[labelIndex])
doubleTest_list.append(tempData)
# [[ASCNN_Frame, lowData, LabelData],[ASCNN_Frame2, lowData2, LabelData2], []]
else:
tempData.append(lowdata)
labelIndex = list(lowDataList).index(lowdata)
if int(os.path.basename(lowdata).split('.')[0].split('_')[-1]) == \
int(os.path.basename(labelList[labelIndex]).split('.')[0].split('_')[-1]):
tempData.append(labelList[labelIndex])
singleTest_list.append(tempData)
return doubleTest_list, singleTest_list
# return like this"[[[high1Data, lowData, high2Data], label], [[2, 7, 8], 22], [[3, 8, 9], 33]]" with the whole path.
def get_train_list(lowDataList, labelList):
assert len(lowDataList) == len(labelList), \
"low:%d, high1:%d, label:%d" % (len(lowDataList), len(labelList))
lowPath=os.path.split(lowDataList[0])[0]
train_list = []
for i in range(len(labelList)):
tempData = []
temp = []
# this place should changed on the different situation.
# if int(os.path.basename(high1DataList[i]).split('_')[-1].split('.')[0]) == \
# int(os.path.basename(lowDataList[i]).split('_')[-1].split('.')[0]):
# j=os.path.basename(high1DataList).index(os.path.basename(labelList[i]))
# k=os.path.basename(lowDataList).index(os.path.basename(labelList[i]))
filename=os.path.basename(labelList[i])
tempData.append(os.path.join(lowPath,filename))
temp.append(tempData)
temp.append(labelList[i])
# print(high1DataList[j])
# print(lowDataList[k])
# print(labelList[i])
# else:
# raise Exception('len(lowData) not equal with len(highData).or not match index.')
train_list.append(temp)
return train_list
def prepare_nn_data(train_list):
batchSizeRandomList = random.sample(range(0,len(train_list)), 8)
# print(batchSizeRandomList)
gt_list = []
#high1Data_list = []
lowData_list = []
#high2Data_list = []
for i in batchSizeRandomList:
#high1Data_image = c_getYdata(train_list[i][0][0])
# l = high1Data_image.astype('uint8')
# l = Image.fromarray(l)
# l.show()
lowData_image = c_getYdata(train_list[i][0][0])
gt_image = c_getYdata(train_list[i][1])
# print('high1Data_image-->' + train_list[i][0][0])
# print('lowData_image:-->'+train_list[i][0][1])
# print('high2Data_image:-->' + train_list[i][0][2])
# loss analyze!!!
# print('GT--->' + train_list[i][1])
for j in range(0, 8):
#crop images to the disired size.
lowData_imgY, gt_imgY = \
crop(lowData_image, gt_image, PATCH_SIZE[0], PATCH_SIZE[1], "ndarray")
# l = high1Data_imgY.astype('uint8')
# l = Image.fromarray(l)
# l.show()
# l = lowData_imgY.astype('uint8')
# l = Image.fromarray(l)
# l.show()
# l = high2Data_imgY.astype('uint8')
# l = Image.fromarray(l)
# l.show()
#normalize
#high1Data_imgY = normalize(high1Data_imgY)
lowData_imgY = normalize(lowData_imgY)
gt_imgY = normalize(gt_imgY)
#high1Data_list.append(high1Data_imgY)
lowData_list.append(lowData_imgY)
gt_list.append(gt_imgY)
#inputcbcr_list.append(input_imgCbCr)
#high1Data_list = np.resize(high1Data_list, (BATCH_SIZE, PATCH_SIZE[0], PATCH_SIZE[1], 1))
lowData_list = np.resize(lowData_list, (BATCH_SIZE, PATCH_SIZE[0], PATCH_SIZE[1], 1))
gt_list = np.resize(gt_list, (BATCH_SIZE, PATCH_SIZE[0], PATCH_SIZE[1], 1))
#inputcbcr_list = np.resize(inputcbcr_list, (BATCH_SIZE, PATCH_SIZE[0], PATCH_SIZE[1], 2))
return lowData_list, gt_list
def getWH(yuvfileName):
#w_included , h_included = os.path.basename(yuvfileName).split('x')
#w = w_included.split('_')[-1]
#h = h_included.split('_')[0]
deyuv=re.compile(r'(.+?)\.')
deyuvFilename=deyuv.findall(yuvfileName)[0] #去yuv后缀的文件名
if 'x' in os.path.basename(deyuvFilename).split('_')[-2]:
wxh = os.path.basename(deyuvFilename).split('_')[-2]
elif 'x' in os.path.basename(deyuvFilename).split('_')[1]:
wxh = os.path.basename(deyuvFilename).split('_')[1]
else:
# print(yuvfileName)
raise Exception('do not find wxh')
w, h = wxh.split('x')
return int(w), int(h)
def getYdata(path, size):
w = size[0]
h = size[1]
# Yt = np.zeros([w, h], dtype="uint8", order='C') # 好像没用吧?
with open(path, 'rb') as fp:
fp.seek(0, 0)
Yt = fp.read()
tem = Image.frombytes('L', [w, h], Yt)
Yt = np.asarray(tem, dtype='float32')
# for n in range(h):
# for m in range(w):
# Yt[n, m] = ord(fp.read(1))
return Yt
def c_getYdata(path):
# print(path)
return getYdata(path, getWH(path))
def c_getCbCr(path):
w, h = getWH(path)
CbCr = []
with open(path, 'rb+') as file:
y = file.read(h * w)
if y == b'':
return ''
u = file.read(h * w // 4)
v = file.read(h * w // 4)
# convert string-list to int-list.
u = list(map(int, u))
v = list(map(int, v))
CbCr.append(u)
CbCr.append(v)
return CbCr
def img2y(input_img):
if np.asarray(input_img).shape[2] == 3:
input_imgY = input_img.convert('YCbCr').split()[0]
input_imgCb, input_imgCr = input_img.convert('YCbCr').split()[1:3]
input_imgY = np.asarray(input_imgY, dtype='float32')
input_imgCb = np.asarray(input_imgCb, dtype='float32')
input_imgCr = np.asarray(input_imgCr, dtype='float32')
#Concatenate Cb, Cr components for easy, they are used in pair anyway.
input_imgCb = np.expand_dims(input_imgCb,2)
input_imgCr = np.expand_dims(input_imgCr,2)
input_imgCbCr = np.concatenate((input_imgCb, input_imgCr), axis=2)
elif np.asarray(input_img).shape[2] == 1:
print("This image has one channal only.")
#If the num of channal is 1, remain.
input_imgY = input_img
input_imgCbCr = None
else:
print("The num of channal is neither 3 nor 1.")
exit()
return input_imgY, input_imgCbCr
# def crop(input_image, gt_image, patch_width, patch_height, img_type):
def crop(lowData_image, gt_image, patch_width, patch_height, img_type):
assert type(gt_image) == type(lowData_image), "types are different."
lowData_cropped = []
gt_cropped = []
# return a ndarray object
if img_type == "ndarray":
in_row_ind = random.randint(0,lowData_image.shape[0]-patch_width)
in_col_ind = random.randint(0,lowData_image.shape[1]-patch_height)
#high1Data_cropped = high1Data_image[in_row_ind:in_row_ind+patch_width, in_col_ind:in_col_ind+patch_height]
lowData_cropped = lowData_image[in_row_ind:in_row_ind + patch_width, in_col_ind:in_col_ind + patch_height]
gt_cropped = gt_image[in_row_ind:in_row_ind+patch_width, in_col_ind:in_col_ind+patch_height]
#return an "Image" object
elif img_type == "Image":
pass
# in_row_ind = random.randint(0,input_image.size[0]-patch_width)
# in_col_ind = random.randint(0,input_image.size[1]-patch_height)
#
# input_cropped = input_image.crop(box=(in_row_ind, in_col_ind, in_row_ind+patch_width, in_col_ind+patch_height))
# gt_cropped = gt_image.crop(box=(in_row_ind, in_col_ind, in_row_ind+patch_width, in_col_ind+patch_height))
return lowData_cropped, gt_cropped
def save_images(inputY, inputCbCr, size, image_path):
"""Save mutiple images into one single image.
# Parameters
# -----------
# images : numpy array [batch, w, h, c]
# size : list of two int, row and column number.
# number of images should be equal or less than size[0] * size[1]
# image_path : string.
#
# Examples
# ---------
# # >>> images = np.random.rand(64, 100, 100, 3)
# # >>> tl.visualize.save_images(images, [8, 8], 'temp.png')
"""
def merge(images, size):
h, w = images.shape[1], images.shape[2]
img = np.zeros((h * size[0], w * size[1], 3))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j*h:j*h+h, i*w:i*w+w, :] = image
return img
inputY = inputY.astype('uint8')
inputCbCr = inputCbCr.astype('uint8')
output_concat = np.concatenate((inputY, inputCbCr), axis=3)
assert len(output_concat) <= size[0] * size[1], "number of images should be equal or less than size[0] * size[1] {}".format(len(output_concat))
new_output = merge(output_concat, size)
new_output = new_output.astype('uint8')
img = Image.fromarray(new_output, mode='YCbCr')
img = img.convert('RGB')
img.save(image_path)
def get_image_batch(train_list,offset,batch_size):
target_list = train_list[offset:offset+batch_size]
input_list = []
gt_list = []
inputcbcr_list = []
for pair in target_list:
input_img = Image.open(pair[0])
gt_img = Image.open(pair[1])
#crop images to the disired size.
input_img, gt_img = crop(input_img, gt_img, PATCH_SIZE[0], PATCH_SIZE[1], "Image")
#focus on Y channal only
input_imgY, input_imgCbCr = img2y(input_img)
gt_imgY, gt_imgCbCr = img2y(gt_img)
#input_imgY = normalize(input_imgY)
#gt_imgY = normalize(gt_imgY)
input_list.append(input_imgY)
gt_list.append(gt_imgY)
inputcbcr_list.append(input_imgCbCr)
input_list = np.resize(input_list, (batch_size, PATCH_SIZE[0], PATCH_SIZE[1], 1))
gt_list = np.resize(gt_list, (batch_size, PATCH_SIZE[0], PATCH_SIZE[1], 1))
return input_list, gt_list, inputcbcr_list
def save_test_img(inputY, inputCbCr, path):
assert len(inputY.shape) == 4, "the tensor Y's shape is %s"%inputY.shape
assert inputY.shape[0] == 1, "the fitst component must be 1, has not been completed otherwise.{}".format(inputY.shape)
inputY = np.squeeze(inputY, axis=0)
inputY = inputY.astype('uint8')
inputCbCr = inputCbCr.astype('uint8')
output_concat = np.concatenate((inputY, inputCbCr), axis=2)
img = Image.fromarray(output_concat, mode='YCbCr')
img = img.convert('RGB')
img.save(path)
def psnr(hr_image, sr_image, max_value=255.0):
eps = 1e-10
if((type(hr_image)==type(np.array([]))) or (type(hr_image)==type([]))):
hr_image_data = np.asarray(hr_image, 'float32')
sr_image_data = np.asarray(sr_image, 'float32')
diff = sr_image_data - hr_image_data
mse = np.mean(diff*diff)
mse = np.maximum(eps, mse)
return float(10*math.log10(max_value*max_value/mse))
else:
assert len(hr_image.shape)==4 and len(sr_image.shape)==4
diff = hr_image - sr_image
mse = tf.reduce_mean(tf.square(diff))
mse = tf.maximum(mse, eps)
return 10*tf.log(max_value*max_value/mse)/math.log(10)
def getBeforeNNBlockDict(img, w, h):
# print(img[:1500, : 2000])
blockSize = 1000
padding = 32
yBlockNum = (h // blockSize) if (h % blockSize == 0) else (h // blockSize + 1)
xBlockNum = (w // blockSize) if (w % blockSize == 0) else (w // blockSize + 1)
tempImg = {}
i = 0
for yBlock in range(yBlockNum):
for xBlock in range(xBlockNum):
if yBlock == 0:
if xBlock == 0:
tempImg[i] = img[0: blockSize+padding, 0: blockSize+padding]
elif xBlock == xBlockNum - 1:
tempImg[i] = img[0: blockSize+padding, xBlock*blockSize-padding: w]
else:
tempImg[i] = img[0: blockSize+padding, blockSize*xBlock-padding: blockSize*(xBlock+1)+padding]
elif yBlock == yBlockNum - 1:
if xBlock == 0:
tempImg[i] = img[blockSize*yBlock-padding: h, 0: blockSize+padding]
elif xBlock == xBlockNum - 1:
tempImg[i] = img[blockSize*yBlock-padding: h, blockSize*xBlock-padding: w]
else:
tempImg[i] = img[blockSize*yBlock-padding: h, blockSize*xBlock-padding: blockSize*(xBlock+1)+padding]
elif xBlock == 0:
tempImg[i] = img[blockSize*yBlock-padding: blockSize*(yBlock+1)+padding, 0: blockSize+padding]
elif xBlock == xBlockNum - 1:
tempImg[i] = img[blockSize*yBlock-padding: blockSize*(yBlock+1)+padding, blockSize*xBlock-padding: w]
else:
tempImg[i] = img[blockSize*yBlock-padding: blockSize*(yBlock+1)+padding,
blockSize*xBlock-padding: blockSize*(xBlock+1)+padding]
i += i
l = tempImg[i].astype('uint8')
l = Image.fromarray(l)
l.show()
|
{"/PSVE_test.py": ["/PSVE_model.py"], "/PSVE_train.py": ["/PSVE_model.py", "/UTILS_MF_ra_single.py"], "/PMVE_test.py": ["/PMVE_model.py", "/PSVE_model.py"]}
|
36,806
|
IVC-Projects/PMVE
|
refs/heads/master
|
/PMVE_model.py
|
import tensorflow as tf
import tensorflow.contrib.slim as slim
import tflearn
# 第一层就是三个,第二层、第三层都是俩(为了过渡),然后开始6个残差单元(3 * 6 = 18 layers)
def network(frame1, frame2, frame3, reuse = False, scope='netflow'):
with tf.variable_scope(scope, reuse=reuse):
with slim.arg_scope([slim.conv2d], activation_fn=tflearn.activations.relu,
weights_initializer=tf.contrib.layers.xavier_initializer(uniform=True),
biases_initializer=tf.constant_initializer(0.0)), \
slim.arg_scope([slim.conv2d_transpose], activation_fn=tflearn.activations.relu,
weights_initializer=tf.contrib.layers.xavier_initializer(uniform=True),
biases_initializer=tf.constant_initializer(0.0)):
# feature extration
c11 = slim.conv2d(frame1, 64, [3, 3], scope='conv1_1')
c12 = slim.conv2d(frame2, 64, [3, 3], scope='conv1_2')
c13 = slim.conv2d(frame3, 64, [3, 3], scope='conv1_3')
concat1_12 = tf.concat([c11, c12], 3, name='concat1_12')
concat1_23 = tf.concat([c12, c13], 3, name='concat1_23')
#feature merging
concat12_1x1 = slim.conv2d(concat1_12, 64, [1, 1], scope='concat12_1x1')
c21 = slim.conv2d(concat12_1x1, 64, [3, 3], scope='conv2_1')
concat23_1x1 = slim.conv2d(concat1_23, 64, [1, 1], scope='concat23_1x1')
c22 = slim.conv2d(concat23_1x1, 64, [3, 3], scope='conv2_2')
# complex feature extration
c31 = slim.conv2d(c21, 64, [3, 3], scope='conv3_1')
c32 = slim.conv2d(c22, 64, [3, 3], scope='conv3_2')
concat3_12 = tf.concat([c31, c32], 3, name='concat3_12')
# rename!
conv = concat3_12
# non-linear mapping
# residual reconstruction
# residual cell 1
for i in range(5):
c1 = slim.conv2d(conv, 64, [1, 1], scope='convB_%02d' % (i))
convC = slim.conv2d(c1, 64, [3, 3], scope='convC_%02d' % (i))
c2 = slim.conv2d(convC, 128, [1, 1], scope='convA_%02d' % (i))
conv = tf.add(conv, c2)
c5 = slim.conv2d(conv, 1, [5, 5], activation_fn=None, scope='conv5')
# enhanced frame reconstruction
output = tf.add(c5, frame2)
return output
|
{"/PSVE_test.py": ["/PSVE_model.py"], "/PSVE_train.py": ["/PSVE_model.py", "/UTILS_MF_ra_single.py"], "/PMVE_test.py": ["/PMVE_model.py", "/PSVE_model.py"]}
|
36,807
|
IVC-Projects/PMVE
|
refs/heads/master
|
/PSVE_test.py
|
import time
from PSVE_model import model_single
from UTILS_MF_ra_ALL_Single import *
import itertools
tf.logging.set_verbosity(tf.logging.WARN)
EXP_DATA = "qp37_ra_PSVE_999"
TESTOUT_PATH = "./testout/%s/"%(EXP_DATA)
MODEL_PATH = "./checkpoints/%s/"%(EXP_DATA)
#ORIGINAL_PATH = "./data/test/mix_noSAO/test_D/q22"
#GT_PATH = "./data/test/mix/test_D"
QP_LOWDATA_PATH = r'I:\TJC\HEVC_TestSequenct\rec\qp37\ra'
GT_PATH = r"I:\TJC\HEVC_TestSequenct\org"
DL_path = r'D:\gaoxiang\DL_path\abc'
OUT_DATA_PATH = "./outdata/%s/"%(EXP_DATA)
NOFILTER = {'q22':42.2758, 'q27':38.9788, 'qp32':35.8667, 'q37':32.8257,'qp37':32.8257}
# Ground truth images dir should be the 2nd component of 'fileOrDir' if 2 components are given.
##cb, cr components are not implemented
def prepare_test_data(fileOrDir):
if not os.path.exists(TESTOUT_PATH):
os.mkdir(TESTOUT_PATH)
doubleData_ycbcr = []
doubleGT_y = []
singleData_ycbcr = []
singleGT_y = []
fileName_list = []
#The input is a single file.
if len(fileOrDir) == 2:
# return the whole absolute path.
fileName_list = load_file_list(fileOrDir[0])
# double_list # [[high, low1, label1], [[h21,h22], low2, label2]]
# single_list # [[low1, lable1], [2,2] ....]
single_list = get_test_list(load_file_list(fileOrDir[0]), load_file_list(fileOrDir[1]))
# single_list # [[low1, lable1], [2,2] ....]
for pair in single_list:
lowData_list = []
lowData_imgY = c_getYdata(pair[0])
CbCr = c_getCbCr(pair[0])
gt_imgY = c_getYdata(pair[1])
# normalize
lowData_imgY = normalize(lowData_imgY)
lowData_imgY = np.resize(lowData_imgY, (1, lowData_imgY.shape[0], lowData_imgY.shape[1], 1))
gt_imgY = np.resize(gt_imgY, (1, gt_imgY.shape[0], gt_imgY.shape[1], 1))
lowData_list.append([lowData_imgY, CbCr])
singleData_ycbcr.append(lowData_list)
singleGT_y.append(gt_imgY)
else:
print("Invalid Inputs...!tjc!")
exit(0)
return singleData_ycbcr, singleGT_y, fileName_list
def test_all_ckpt(modelPath, fileOrDir):
max = [0, 0]
tem = [f for f in os.listdir(modelPath) if 'data' in f]
ckptFiles = sorted([r.split('.data')[0] for r in tem])
re_psnr = tf.placeholder('float32')
tf.summary.scalar('re_psnr', re_psnr)
with tf.Session() as sess:
lowData_tensor = tf.placeholder(tf.float32, shape=(1, None, None, 1))
shared_model = tf.make_template('shared_model', model_single)
output_tensor = shared_model(lowData_tensor)
#output_tensor = shared_model(input_tensor)
output_tensor = tf.clip_by_value(output_tensor, 0., 1.)
output_tensor = output_tensor * 255
merged = tf.summary.merge_all()
file_writer = tf.summary.FileWriter(OUT_DATA_PATH, sess.graph)
#weights = tf.get_collection(tf.GraphKeys.WEIGHTS)
saver = tf.train.Saver(tf.global_variables())
sess.run(tf.global_variables_initializer())
singleData_ycbcr, singleGT_y, fileName_list = prepare_test_data(fileOrDir)
for ckpt in ckptFiles:
epoch = int(ckpt.split('_')[-1].split('.')[0])
if epoch != 999:
continue
saver.restore(sess, os.path.join(modelPath,ckpt))
total_time, total_psnr = 0, 0
total_imgs = len(fileName_list)
count = 0
total_hevc_psnr = 0
for i in range(total_imgs):
# print(fileName_list[i])
count += 1
# sorry! this place write so difficult!【[[[h1,0]],[[low,0]],[[h2, 0]]], [[[h1,0]],[[low,0]],[[h2, 0]]]】
lowDataY = singleData_ycbcr[i][0][0]
imgLowCbCr = singleData_ycbcr[i][0][1]
#imgCbCr = original_ycbcr[i][1]
gtY = singleGT_y[i] if singleGT_y else 0
#### adopt the split frame method to deal with the out of memory situation. ####
# # (240, 416), imgHigh1DataY.shape[1] = w, [0] = h;
# out = getBeforeNNBlockDict(imgHigh1DataY, imgHigh1DataY.shape[1], imgHigh1DataY.shape[0])
start_t = time.time()
out = sess.run(output_tensor, feed_dict={lowData_tensor: lowDataY})
out = np.around(out)
out = out.astype('int')
out = np.reshape(out, [1, out.shape[1], out.shape[2], 1])
hevc = psnr(lowDataY * 255.0, gtY)
total_hevc_psnr += hevc
duration_t = time.time() - start_t
total_time += duration_t
Y = np.reshape(out, [out.shape[1], out.shape[2]])
Y = np.array(list(itertools.chain.from_iterable(Y)))
U = imgLowCbCr[0]
V = imgLowCbCr[1]
creatPath = os.path.join(DL_path, fileName_list[i].split('\\')[-2])
if not os.path.exists(creatPath):
os.mkdir(creatPath)
# print(np.shape(gtY))
if singleGT_y:
p = psnr(out, gtY)
path = os.path.join(DL_path,
fileName_list[i].split('\\')[-2],
fileName_list[i].split('\\')[-1].split('.')[0]) + '_%.4f' % (p - hevc) + '.yuv'
YUV = np.concatenate((Y, U, V))
YUV = YUV.astype('uint8')
YUV.tofile(path)
total_psnr += p
print("qp??\tepoch:%d\t%s\t%.4f\n" % (epoch, fileName_list[i], p))
#print("took:%.2fs\t psnr:%.2f name:%s"%(duration_t, p, save_path))
print("AVG_DURATION:%.2f\tAVG_PSNR:%.4f"%(total_time/total_imgs, total_psnr / count))
print('count:', count)
print('total_hevc_psnr:',total_hevc_psnr / count)
# avg_psnr = total_psnr/total_imgs
avg_psnr = total_psnr / count
avg_duration = (total_time/total_imgs)
if avg_psnr > max[0]:
max[0] = avg_psnr
max[1] = epoch
# QP = os.path.basename(HIGHDATA_PATH)
# tf.logging.warning("QP:%s\tepoch: %d\tavg_max:%.4f\tdelta:%.4f"%(QP, max[1], max[0], max[0]-NOFILTER[QP]))
if __name__ == '__main__':
test_all_ckpt(MODEL_PATH, [QP_LOWDATA_PATH, GT_PATH])
# test_all_ckpt(MODEL_PATH, [r'D:\PycharmProjects\data_tjc\hm_test_noFilter\qp37\data', r'D:\PycharmProjects\data_tjc\hm_test_origin\org'])
|
{"/PSVE_test.py": ["/PSVE_model.py"], "/PSVE_train.py": ["/PSVE_model.py", "/UTILS_MF_ra_single.py"], "/PMVE_test.py": ["/PMVE_model.py", "/PSVE_model.py"]}
|
36,808
|
IVC-Projects/PMVE
|
refs/heads/master
|
/PSVE_train.py
|
import argparse, time
# from yangNet import network
from PSVE_model import network
from UTILS_MF_ra_single import *
tf.logging.set_verbosity(tf.logging.WARN)
#os.environ["CUDA_VISIBLE_DEVICES"] = "1"
EXP_DATA = 'qp37_ra_101701_PSVE'
LOW_DATA_PATH = r"D:\TJC\trainSet\ASCNNx2\qp37\low_data"
# the frames are predicted by ASCNN.
#HIGH1_DATA_PATH = r"/media/chenjs/Elements/TJC/ASCNN/Training/qp22/ASCNN_prediction_l1"
# HIGH2_DATA_PATH = r"E:\MF\trainSet\qp37\high2_wraped_Y"
LABEL_PATH = r"D:\TJC\trainSet\ASCNNx2\qp37\RA\label_s"
LOG_PATH = "./logs/%s/"%(EXP_DATA)
CKPT_PATH = "./checkpoints/%s/"%(EXP_DATA)
SAMPLE_PATH = "./samples/%s/"%(EXP_DATA)
PATCH_SIZE = (64, 64)
BATCH_SIZE = 64
BASE_LR = 3e-4
LR_DECAY_RATE = 0.5
LR_DECAY_STEP = 30
MAX_EPOCH = 2000
parser = argparse.ArgumentParser()
parser.add_argument("--model_path")
args = parser.parse_args()
model_path = args.model_path
if __name__ == '__main__':
# return like this"[[[high1Data, lowData, high2Data], label], [[3, 8, 9], 33]]" with the whole path.
train_list = get_train_list(load_file_list(LOW_DATA_PATH), load_file_list(LABEL_PATH))
with tf.name_scope('input_scope'):
#train_hight1Data = tf.placeholder('float32', shape=(BATCH_SIZE, PATCH_SIZE[0], PATCH_SIZE[1], 1))
train_lowData = tf.placeholder('float32', shape=(BATCH_SIZE, PATCH_SIZE[0], PATCH_SIZE[1], 1))
train_gt = tf.placeholder('float32', shape=(BATCH_SIZE, PATCH_SIZE[0], PATCH_SIZE[1], 1))
shared_model = tf.make_template('shared_model', network)
train_output = shared_model(train_lowData)
train_output = tf.clip_by_value(train_output, 0., 1.)
with tf.name_scope('loss_scope'):
loss2 = tf.reduce_sum(tf.square(tf.subtract(train_output, train_gt)))
loss1 = tf.reduce_sum(tf.abs(tf.subtract(train_output, train_gt)))
# for w in weights:
W = tf.get_collection(tf.GraphKeys.WEIGHTS)
for w in W:
loss2 += tf.nn.l2_loss(w)*1e-4
#tf.summary.scalar("loss", loss)
avg_loss = tf.placeholder('float32')
tf.summary.scalar("avg_loss", avg_loss)
global_step = tf.Variable(0, trainable=False) # len(train_list)
learning_rate = tf.train.exponential_decay(BASE_LR, global_step, LR_DECAY_STEP*1000, LR_DECAY_RATE, staircase=True)
tf.summary.scalar("learning rate", learning_rate)
# org ---------------------------------------------------------------------------------
optimizer = tf.train.AdamOptimizer(learning_rate, 0.9)
opt = optimizer.minimize(loss2, global_step=global_step)
#
# saver = tf.train.Saver(weights, max_to_keep=0)
saver = tf.train.Saver(max_to_keep=0)
# org end------------------------------------------------------------------------------
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
#config.gpu_options.per_process_gpu_memory_fraction = 0.8
with tf.Session(config=config) as sess:
if not os.path.exists(LOG_PATH):
os.makedirs(LOG_PATH)
if not os.path.exists(os.path.dirname(CKPT_PATH)):
os.makedirs(os.path.dirname(CKPT_PATH))
if not os.path.exists(SAMPLE_PATH):
os.makedirs(SAMPLE_PATH)
merged = tf.summary.merge_all()
file_writer = tf.summary.FileWriter(LOG_PATH, sess.graph)
sess.run(tf.global_variables_initializer())
if model_path:
print("restore model...")
saver.restore(sess, model_path)
print("Done")
#for epoch in range(400, MAX_EPOCH):
for epoch in range(MAX_EPOCH):
total_g_loss, n_iter = 0, 0
idxOfImgs = np.random.permutation(len(train_list))
epoch_time = time.time()
# for idx in range(len(idxOfImgs)):
for idx in range(1000):
# idx = idxOfImgs[idx]
input_lowData, gt_data = prepare_nn_data(train_list)
feed_dict = {train_lowData: input_lowData, train_gt: gt_data}
_, l, output, g_step = sess.run([opt, loss2, train_output, global_step], feed_dict=feed_dict)
total_g_loss += l
del input_lowData, gt_data, output
lr, summary = sess.run([learning_rate, merged], {avg_loss:total_g_loss/n_iter})
file_writer.add_summary(summary, epoch)
tf.logging.warning("Epoch: [%4d/%4d] time: %4.4f\tloss: %.8f\tlr: %.8f"%(epoch, MAX_EPOCH, time.time()-epoch_time, total_g_loss/n_iter, lr))
print("Epoch: [%4d/%4d] time: %4.4f\tloss: %.8f\tlr: %.8f"%(epoch, MAX_EPOCH, time.time()-epoch_time, total_g_loss/n_iter, lr))
saver.save(sess, os.path.join(CKPT_PATH, "%s_%03d.ckpt"%(EXP_DATA, epoch)))
|
{"/PSVE_test.py": ["/PSVE_model.py"], "/PSVE_train.py": ["/PSVE_model.py", "/UTILS_MF_ra_single.py"], "/PMVE_test.py": ["/PMVE_model.py", "/PSVE_model.py"]}
|
36,809
|
IVC-Projects/PMVE
|
refs/heads/master
|
/PMVE_test.py
|
import time
import itertools
from PMVE_model import network
from PSVE_model import model_single
# ASCNN(0,4)(1,3)
from UTILS_MF_ra import *
tf.logging.set_verbosity(tf.logging.WARN)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
EXP_DATA1 = "MF_qp37_PMVE_999" # double
EXP_DATA2 = "qp37_ra_PSVE_999" # 暂时不用!
MODEL_DOUBLE_PATH1 = "./checkpoints/%s/"%(EXP_DATA1)
MODEL_DOUBLE_PATH2 = "./checkpoints/%s/"%(EXP_DATA2)
#ORIGINAL_PATH = "./data/test/mix_noSAO/test_D/q22"
#GT_PATH = "./data/test/mix/test_D"
# path 1 : 04
# path 2 : 13
# QP : "rec dir"
# GT_path : "org"
# 139 line.
ASCNN_Frames_PATH1 = r"I:\TJC\ldp_new\ASCNN_out\yuv_04\D\RaceHorses_qp22_ai_416x240"
ASCNN_Frames_PATH2 = r"I:\TJC\ldp_new\ASCNN_out\yuv_13\D\RaceHorses_qp22_ai_416x240"
QP_LOWDATA_PATH = r'I:\TJC\ldp_new\REC\D\RaceHorses_qp37_ldp_416x240'
GT_PATH = r"I:\TJC\HEVC_TestSequenct\org\D\RaceHorses_416x240"
DL_path = r'I:\TJC\HEVC_TestSequenct\DL_rec\abc'
OUT_DATA_PATH = "./outdata/%s/"%(EXP_DATA1)
NOFILTER = {'q22':42.2758, 'q27':38.9788, 'qp32':35.8667, 'q37':32.8257,'qp37':32.8257}
# Ground truth images dir should be the 2nd component of 'fileOrDir' if 2 components are given.
## cb, cr components are not implemented
# [ASCNN_Frames_PATH, QP_LOWDATA_PATH, GT_PATH]
def prepare_test_data(fileOrDir):
doubleData_ycbcr = []
doubleGT_y = []
singleData_ycbcr = []
singleGT_y = []
fileName_list = []
#The input is a single file.
if len(fileOrDir) == 4:
# return the whole absolute path.
fileName_list = load_file_list(fileOrDir[1])
# [[ASCNN_Frame, lowData, LabelData],[ASCNN_Frame2, lowData2, LabelData2], []]
# single_list # [[low1, lable1], [2,2] ....]
double_list, single_list = get_test_list(load_file_list(fileOrDir[0]), load_file_list(fileOrDir[1]),
load_file_list(fileOrDir[2]), load_file_list(fileOrDir[3]))
for pair in double_list:
ASCNN_Data_List1 = []
lowData_List = []
ASCNN_Data_List2 = []
ASCNN_Data_imgY1 = c_getYdata(pair[0])
lowData_imgY = c_getYdata(pair[1])
ASCNN_Data_imgY2 = c_getYdata(pair[2])
CbCr = c_getCbCr(pair[1])
gt_imgY = c_getYdata(pair[3])
#normalize
ASCNN_Data_imgY1 = normalize(ASCNN_Data_imgY1)
lowData_imgY = normalize(lowData_imgY)
ASCNN_Data_imgY2 = normalize(ASCNN_Data_imgY2)
ASCNN_Data_imgY1 = np.resize(ASCNN_Data_imgY1, (1, ASCNN_Data_imgY1.shape[0], ASCNN_Data_imgY1.shape[1],1))
lowData_imgY = np.resize(lowData_imgY, (1, lowData_imgY.shape[0], lowData_imgY.shape[1], 1))
ASCNN_Data_imgY2 = np.resize(ASCNN_Data_imgY2, (1, ASCNN_Data_imgY2.shape[0], ASCNN_Data_imgY2.shape[1], 1))
gt_imgY = np.resize(gt_imgY, (1, gt_imgY.shape[0], gt_imgY.shape[1],1))
## act as a placeholder
ASCNN_Data_List1.append([ASCNN_Data_imgY1, 0])
lowData_List.append([lowData_imgY, CbCr])
ASCNN_Data_List2.append([ASCNN_Data_imgY2, 0])
doubleData_ycbcr.append([ASCNN_Data_List1, lowData_List, ASCNN_Data_List2])
# [ASCNN_Data_imgY, 0]
doubleGT_y.append(gt_imgY)
# single_list # [[low1, lable1], [2,2] ....]
for pair in single_list:
lowData_list = []
lowData_imgY = c_getYdata(pair[0])
CbCr = c_getCbCr(pair[0])
gt_imgY = c_getYdata(pair[1])
# normalize
lowData_imgY = normalize(lowData_imgY)
lowData_imgY = np.resize(lowData_imgY, (1, lowData_imgY.shape[0], lowData_imgY.shape[1], 1))
gt_imgY = np.resize(gt_imgY, (1, gt_imgY.shape[0], gt_imgY.shape[1], 1))
lowData_list.append([lowData_imgY, CbCr])
singleData_ycbcr.append(lowData_list)
singleGT_y.append(gt_imgY)
else:
print("Invalid Inputs...!tjc!")
exit(0)
return doubleData_ycbcr, doubleGT_y, singleData_ycbcr, singleGT_y, fileName_list
def abc(modelPath1, modelPath2, fileOrDir):
max = [0, 0]
tem1 = [f for f in os.listdir(modelPath1) if 'data' in f]
ckptFiles1 = sorted([r.split('.data')[0] for r in tem1])
tem2 = [f for f in os.listdir(modelPath2) if 'data' in f]
ckptFiles2 = sorted([r.split('.data')[0] for r in tem2])
re_psnr = tf.placeholder('float32')
tf.summary.scalar('re_psnr', re_psnr)
doubleData_ycbcr, doubleGT_y, singleData_ycbcr, singleGT_y, fileName_list = prepare_test_data(fileOrDir)
total_time, total_psnr = 0, 0
total_psnr_36 = 0
total_imgs = len(fileName_list)
total_hevc_psnr = 0
total_hevc_psnr_36 = 0
count = 0
for i in range(total_imgs):
# print(fileName_list[i])
if i % 4 != 0:
count += 1
# sorry! this place write so difficult!【[[[h1,0]],[[low,0]],[[h2, 0]]], [[[h1,0]],[[low,0]],[[h2, 0]]]】
j = i - (i//4) - 1
ASCNNDataY1 = doubleData_ycbcr[j][0][0][0]
# print(np.shape(ASCNNDataY1))
imgLowDataY = doubleData_ycbcr[j][1][0][0]
imgLowCbCr = doubleData_ycbcr[j][1][0][1]
ASCNNDataY2 = doubleData_ycbcr[j][2][0][0]
#imgCbCr = original_ycbcr[i][1]
gtY = doubleGT_y[j] if doubleGT_y else 0
for ckpt1 in ckptFiles1:
epoch = int(ckpt1.split('_')[-1].split('.')[0])
if epoch != 999:
continue
# very important!!!!!!!
tf.reset_default_graph()
# Double section
ASCNN_Data_tensor1 = tf.placeholder(tf.float32, shape=(1, None, None, 1))
lowData_tensor = tf.placeholder(tf.float32, shape=(1, None, None, 1))
ASCNN_Data_tensor2 = tf.placeholder(tf.float32, shape=(1, None, None, 1))
shared_model1 = tf.make_template('shared_model', network)
output_tensor1 = shared_model1(ASCNN_Data_tensor1, lowData_tensor, ASCNN_Data_tensor2)
output_tensor1 = tf.clip_by_value(output_tensor1, 0., 1.)
output_tensor1 = output_tensor1 * 255
with tf.Session() as sess:
# saver = tf.train.Saver(tf.global_variables())
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
saver.restore(sess, os.path.join(modelPath1, ckpt1))
###tjc###
start_t = time.time()
out = sess.run(output_tensor1, feed_dict={
ASCNN_Data_tensor1: ASCNNDataY1, lowData_tensor:imgLowDataY, ASCNN_Data_tensor2: ASCNNDataY2})
duration_t = time.time() - start_t
total_time += duration_t
hevc = psnr(imgLowDataY * 255.0, gtY)
print("hevc:",hevc)
total_hevc_psnr += hevc
total_hevc_psnr_36 += hevc
out = np.around(out)
out = out.astype('int')
out = np.reshape(out, [1, out.shape[1], out.shape[2], 1])
Y = np.reshape(out, [out.shape[1], out.shape[2]])
Y = np.array(list(itertools.chain.from_iterable(Y)))
U = imgLowCbCr[0]
V = imgLowCbCr[1]
creatPath = os.path.join(DL_path, fileName_list[i].split('\\')[-2])
if not os.path.exists(creatPath):
os.mkdir(creatPath)
# print(np.shape(gtY))
if doubleGT_y:
p = psnr(out, gtY)
print("p:",p)
path = os.path.join(DL_path,
fileName_list[i].split('\\')[-2],
fileName_list[i].split('\\')[-1].split('.')[0]) + '_%.4f' % (p-hevc)+ '.yuv'
# print(path)
YUV = np.concatenate((Y, U, V))
print(np.shape(Y))
print(np.shape(U))
print(np.shape(V))
YUV = YUV.astype('uint8')
YUV.tofile(path)
total_psnr += p
total_psnr_36 += p
print("qp37\tepoch:%d\t%s\t%.4f\n" % (epoch, fileName_list[i], p))
else: ##single frame
continue
count += 1
j = i // 4
## ???
lowDataY = singleData_ycbcr[j][0][0]
imgLowCbCr = singleData_ycbcr[j][0][1]
# imgCbCr = original_ycbcr[i][1]
gtY = singleGT_y[j] if singleGT_y else 0
hevc = psnr(lowDataY * 255.0, gtY)
total_hevc_psnr += hevc
print(hevc)
#start_t = time.time()
for ckpt2 in ckptFiles2:
epoch = int(ckpt2.split('_')[-1].split('.')[0])
if epoch != 999:
continue
tf.reset_default_graph()
# Single section
lowSingleData_tensor = tf.placeholder(tf.float32, shape=(1, None, None, 1))
shared_model2 = tf.make_template('shared_model', model_single)
output_tensor2 = shared_model2(lowSingleData_tensor)
# output_tensor = shared_model(input_tensor)
output_tensor2 = tf.clip_by_value(output_tensor2, 0., 1.)
output_tensor2 = output_tensor2 * 255
with tf.Session() as sess:
saver = tf.train.Saver(tf.global_variables())
sess.run(tf.global_variables_initializer())
saver.restore(sess, os.path.join(modelPath2, ckpt2))
out = sess.run(output_tensor2, feed_dict={lowSingleData_tensor: lowDataY})
out = np.around(out)
out = out.astype('int')
out = np.reshape(out, [1, out.shape[1], out.shape[2], 1])
Y = np.reshape(out, [out.shape[1], out.shape[2]])
Y = np.array(list(itertools.chain.from_iterable(Y)))
U = imgLowCbCr[0]
V = imgLowCbCr[1]
creatPath = os.path.join(DL_path, fileName_list[i].split('\\')[-2])
if not os.path.exists(creatPath):
os.mkdir(creatPath)
if singleGT_y:
p = psnr(out, gtY)
path = os.path.join(DL_path, fileName_list[i].split('\\')[-2],
fileName_list[i].split('\\')[-1].split('.')[0]) + '_%.4f' % (p - hevc) + '.yuv'
YUV = np.concatenate((Y, U, V))
YUV = YUV.astype('uint8')
YUV.tofile(path)
total_psnr += p
print("qp37\tepoch:%d\t%s\t%.4f\n" % (epoch, fileName_list[i], p))
#duration_t = time.time() - start_t
#total_time += duration_t
print("AVG_DURATION:%.2f\tAVG_PSNR:%.4f"%(total_time/total_imgs, total_psnr / count))
print('Total_time:', total_time)
print('count:', count)
print("%.4f"%(total_hevc_psnr/count))
print("%.4f" % (total_hevc_psnr_36 / 36))
print("%.4f" % (total_psnr_36 / 36))
# avg_psnr = total_psnr/total_imgs
avg_psnr = total_psnr / count
avg_duration = (total_time/total_imgs)
if avg_psnr > max[0]:
max[0] = avg_psnr
max[1] = epoch
# summary = sess.run(merged, {re_psnr:avg_psnr})
# file_writer.add_summary(summary, epoch)
# tf.logging.warning("AVG_DURATION:%.2f\tAVG_PSNR:%.2f\tepoch:%d"%(avg_duration, avg_psnr, epoch))
# QP = os.path.basename(HIGHDATA_PATH)
# tf.logging.warning("QP:%s\tepoch: %d\tavg_max:%.4f\tdelta:%.4f"%(QP, max[1], max[0], max[0]-NOFILTER[QP]))
if __name__ == '__main__':
abc(MODEL_DOUBLE_PATH1, MODEL_DOUBLE_PATH2, [ASCNN_Frames_PATH1, QP_LOWDATA_PATH, ASCNN_Frames_PATH2, GT_PATH])
# test_all_ckpt(MODEL_PATH, [r'D:\PycharmProjects\data_tjc\hm_test_noFilter\qp37\data', r'D:\PycharmProjects\data_tjc\hm_test_origin\org'])
print('tjc')
|
{"/PSVE_test.py": ["/PSVE_model.py"], "/PSVE_train.py": ["/PSVE_model.py", "/UTILS_MF_ra_single.py"], "/PMVE_test.py": ["/PMVE_model.py", "/PSVE_model.py"]}
|
36,810
|
IVC-Projects/PMVE
|
refs/heads/master
|
/PSVE_model.py
|
import tensorflow as tf
import tensorflow.contrib.slim as slim
import tflearn
import numpy as np
from PIL import Image
# 第一层就是三个,第二层、第三层都是俩(为了过渡),然后开始6个残差单元(3 * 6 = 18 layers)
def model_single(frame2, reuse = False, scope='netflow'):
with tf.variable_scope(scope, reuse=reuse):
with slim.arg_scope([slim.conv2d], activation_fn=tflearn.activations.relu,
weights_initializer=tf.contrib.layers.xavier_initializer(uniform=True),
biases_initializer=tf.constant_initializer(0.0)), \
slim.arg_scope([slim.conv2d_transpose], activation_fn=tflearn.activations.relu,
weights_initializer=tf.contrib.layers.xavier_initializer(uniform=True),
biases_initializer=tf.constant_initializer(0.0)):
# feature extration
c12 = slim.conv2d(frame2, 64, [3, 3], scope='conv1_2')
conv = c12
for i in range(10):
c1 = slim.conv2d(conv, 64, [1, 1], scope='convB_%02d' % (i))
convC = slim.conv2d(c1, 64, [3, 3], scope='convC_%02d' % (i))
c2 = slim.conv2d(convC, 64, [1, 1], scope='convA_%02d' % (i))
conv = tf.add(conv, c2)
c5 = slim.conv2d(conv, 1, [5, 5], activation_fn=None, scope='conv5')
# enhanced frame reconstruction
output = tf.add(c5, frame2)
return output
|
{"/PSVE_test.py": ["/PSVE_model.py"], "/PSVE_train.py": ["/PSVE_model.py", "/UTILS_MF_ra_single.py"], "/PMVE_test.py": ["/PMVE_model.py", "/PSVE_model.py"]}
|
36,812
|
egalistmir/pyfvm
|
refs/heads/master
|
/pyfvm/nonlinear_methods.py
|
# -*- coding: utf-8 -*-
#
import numpy
def newton(f, jacobian_solver, u0, tol=1.0e-10, max_iter=20, verbose=True):
u = u0.copy()
fu = f(u)
nrm = numpy.linalg.norm(fu)
if verbose:
print("||F(u)|| = %e" % nrm)
k = 0
is_converged = False
while k < max_iter:
if nrm < tol:
is_converged = True
break
du = jacobian_solver(u, -fu)
u += du
fu = f(u)
nrm = numpy.linalg.norm(fu)
k += 1
if verbose:
print("||F(u)|| = %e" % nrm)
assert is_converged
return u
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,813
|
egalistmir/pyfvm
|
refs/heads/master
|
/examples/bratu_example_test.py
|
# -*- coding: utf-8 -*-
import meshzoo
import pyfvm
from pyfvm.form_language import integrate, n_dot_grad, dS, dV, Boundary
import numpy
from sympy import exp
import meshplex
def test():
class Bratu(object):
def apply(self, u):
return integrate(lambda x: -n_dot_grad(u(x)), dS) - integrate(
lambda x: 2.0 * exp(u(x)), dV
)
def dirichlet(self, u):
return [(u, Boundary())]
vertices, cells = meshzoo.rectangle(0.0, 2.0, 0.0, 1.0, 101, 51)
mesh = meshplex.MeshTri(vertices, cells)
f, jac_u = pyfvm.discretize(Bratu(), mesh)
def jacobian_solver(u0, rhs):
from scipy.sparse import linalg
jac = jac_u.get_linear_operator(u0)
return linalg.spsolve(jac, rhs)
u0 = numpy.zeros(len(vertices))
u = pyfvm.newton(lambda u: f.eval(u), jacobian_solver, u0)
# import scipy.optimize
# u = scipy.optimize.newton_krylov(f_eval, u0)
mesh.write("out.vtk", point_data={"u": u})
return
if __name__ == "__main__":
test()
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,814
|
egalistmir/pyfvm
|
refs/heads/master
|
/pyfvm/linear_fvm_problem.py
|
# -*- coding: utf-8 -*-
#
import numpy
from scipy import sparse
def get_linear_fvm_problem(
mesh, edge_kernels, vertex_kernels, face_kernels, dirichlets
):
V, I, J, rhs = _get_VIJ(mesh, edge_kernels, vertex_kernels, face_kernels)
# One unknown per vertex
n = len(mesh.node_coords)
# Transform to CSR format for efficiency
matrix = sparse.coo_matrix((V, (I, J)), shape=(n, n))
matrix = matrix.tocsr()
# Apply Dirichlet conditions.
d = matrix.diagonal()
for dirichlet in dirichlets:
vertex_mask = mesh.get_vertex_mask(dirichlet.subdomain)
# Set all Dirichlet rows to 0.
for i in numpy.where(vertex_mask)[0]:
matrix.data[matrix.indptr[i] : matrix.indptr[i + 1]] = 0.0
# Set the diagonal and RHS.
coeff, rhs_vals = dirichlet.eval(vertex_mask)
d[vertex_mask] = coeff
rhs[vertex_mask] = rhs_vals
matrix.setdiag(d)
return matrix, rhs
def _get_VIJ(mesh, edge_kernels, vertex_kernels, face_kernels):
V = []
I_ = []
J = []
n = len(mesh.node_coords)
# Treating the diagonal explicitly makes tocsr() faster at the cost of a
# bunch of numpy.add.at().
diag = numpy.zeros(n)
#
rhs = numpy.zeros(n)
for edge_kernel in edge_kernels:
for subdomain in edge_kernel.subdomains:
cell_mask = mesh.get_cell_mask(subdomain)
v_mtx, v_rhs, nec = edge_kernel.eval(mesh, cell_mask)
# Diagonal entries.
# Manually sum up the entries corresponding to the same i, j first.
for c, i in zip(mesh.cells["nodes"].T, mesh.local_idx_inv):
numpy.add.at(diag, c, sum([v_mtx[t[0]][t[0]][t[1:]] for t in i]))
# offdiagonal entries
V.append(v_mtx[0][1])
I_.append(nec[0])
J.append(nec[1])
#
V.append(v_mtx[1][0])
I_.append(nec[1])
J.append(nec[0])
# Right-hand side.
try:
for c, i in zip(mesh.cells["nodes"].T, mesh.local_idx_inv):
numpy.subtract.at(rhs, c, sum([v_rhs[t[0]][t[1:]] for t in i]))
except TypeError:
# v_rhs probably integers/floats
if v_rhs[0] != 0:
# FIXME these at operations seem really slow with v_rhs
# not being of type numpy.ndarray
numpy.subtract.at(rhs, nec[0], v_rhs[0])
if v_rhs[1] != 0:
numpy.subtract.at(rhs, nec[1], v_rhs[1])
# if dot() is used in the expression, the shape of of v_matrix will
# be (2, 2, 1, k) instead of (2, 2, 871, k).
# if len(v_matrix.shape) == 5:
# assert v_matrix.shape[2] == 1
# V.append(v_matrix[0, 0, 0])
# V.append(v_matrix[0, 1, 0])
# V.append(v_matrix[1, 0, 0])
# V.append(v_matrix[1, 1, 0])
# else:
for vertex_kernel in vertex_kernels:
for subdomain in vertex_kernel.subdomains:
vertex_mask = mesh.get_vertex_mask(subdomain)
vals_matrix, vals_rhs = vertex_kernel.eval(vertex_mask)
# numpy.add.at(diag, verts, vals_matrix)
# numpy.subtract.at(rhs, verts, vals_rhs)
if vertex_mask == numpy.s_[:]:
diag += vals_matrix
rhs -= vals_rhs
else:
diag[vertex_mask] += vals_matrix
rhs[vertex_mask] -= vals_rhs
for face_kernel in face_kernels:
for subdomain in face_kernel.subdomains:
face_mask = mesh.get_face_mask(subdomain)
vals_matrix, vals_rhs = face_kernel.eval(face_mask)
ids = mesh.idx_hierarchy[..., face_mask]
V.append(vals_matrix)
I_.append(ids)
J.append(ids)
numpy.subtract.at(rhs, ids, vals_rhs)
# add diagonal
I_.append(numpy.arange(n))
J.append(numpy.arange(n))
V.append(diag)
# Finally, make V, I, J into 1D-arrays.
V = numpy.concatenate([v.flat for v in V])
I_ = numpy.concatenate([i.flat for i in I_])
J = numpy.concatenate([j.flat for j in J])
return V, I_, J, rhs
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,815
|
egalistmir/pyfvm
|
refs/heads/master
|
/examples/complex_energy_test.py
|
# -*- coding: utf-8 -*-
import pyfvm
import meshzoo
import pyamg
import numpy
import meshplex
def test():
class EnergyEdgeKernel(object):
def __init__(self):
self.subdomains = [None]
return
def eval(self, mesh, cell_mask):
edge_ce_ratio = mesh.ce_ratios[..., cell_mask]
beta = 1.0
return numpy.array(
[
[edge_ce_ratio, -edge_ce_ratio * numpy.exp(1j * beta)],
[-edge_ce_ratio * numpy.exp(-1j * beta), edge_ce_ratio],
]
)
vertices, cells = meshzoo.rectangle(0.0, 2.0, 0.0, 1.0, 101, 51)
mesh = meshplex.MeshTri(vertices, cells)
matrix = pyfvm.get_fvm_matrix(mesh, [EnergyEdgeKernel()], [], [], [])
rhs = mesh.control_volumes.copy()
sa = pyamg.smoothed_aggregation_solver(matrix, smooth="energy")
u = sa.solve(rhs, tol=1e-10)
# Cannot write complex data ot VTU; split real and imaginary parts first.
# <http://stackoverflow.com/a/38902227/353337>
mesh.write("out.vtk", point_data={"u": u.view("(2,)float")})
return
if __name__ == "__main__":
test()
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,816
|
egalistmir/pyfvm
|
refs/heads/master
|
/pyfvm/form_language.py
|
# -*- coding: utf-8 -*-
#
import numpy
import sympy
class Subdomain(object):
pass
class Boundary(Subdomain):
is_boundary_only = True
def is_inside(self, x):
return numpy.ones(x.shape[1], dtype=bool)
class FvmProblem(object):
pass
class Measure(object):
pass
class ControlVolume(Measure):
def __repr__(self):
return "dV"
dV = ControlVolume()
class ControlVolumeSurface(Measure):
def __repr__(self):
return "dS"
dS = ControlVolumeSurface()
class CellSurface(Measure):
pass
class EdgeKernel(object):
pass
class n_dot_grad(sympy.Function):
pass
class n_dot(sympy.Function):
pass
dGamma = CellSurface()
def integrate(integrand, measure, subdomains=None):
"""Convenience function for IntegralSum. Just syntastic sugar.
"""
return IntegralSum(integrand, measure, subdomains)
class Integral(object):
def __init__(self, integrand, measure, subdomains):
assert isinstance(measure, Measure)
if subdomains is None:
subdomains = set()
elif not isinstance(subdomains, set):
try:
subdomains = set(subdomains)
except TypeError: # TypeError: 'D1' object is not iterable
subdomains = set([subdomains])
assert (
isinstance(measure, ControlVolumeSurface)
or isinstance(measure, ControlVolume)
or isinstance(measure, CellSurface)
)
self.integrand = integrand
self.measure = measure
self.subdomains = subdomains
return
def __repr__(self):
x = sympy.Symbol("x")
msg = "pyfvm.Integral({}, {}, {})".format(
self.integrand(x), self.measure, self.subdomains
)
return msg
class IntegralSum(object):
def __init__(self, integrand, measure, subdomains):
self.integrals = [Integral(integrand, measure, subdomains)]
return
def __add__(self, other):
self.integrals.extend(other.integrals)
return self
def __sub__(self, other):
# flip the sign on the integrand of all 'other' integrands
self.integrals += [
Integral(
lambda x: -integral.integrand(x), integral.measure, integral.subdomains
)
for integral in other.integrals
]
return self
def __pos__(self):
return self
def __neg__(self):
# flip the sign on the integrand of all 'self' integrands
self.integrals = [
Integral(
lambda x: -integral.integrand(x), integral.measure, integral.subdomains
)
for integral in self.integrals
]
return self
def __mul__(self, a):
assert isinstance(a, float) or isinstance(a, int)
self.integrals = [
Integral(
lambda x: a * integral.integrand(x),
integral.measure,
integral.subdomains,
)
for integral in self.integrals
]
return self
__rmul__ = __mul__
def __repr__(self):
msg = ["pyfvm.IntegralSum("]
msg += [" " + integral.__repr__() for integral in self.integrals]
msg += [")"]
return "\n".join(msg)
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,817
|
egalistmir/pyfvm
|
refs/heads/master
|
/examples/neumann_example_test.py
|
# -*- coding: utf-8 -*-
import pyfvm
from pyfvm.form_language import Subdomain, integrate, n_dot_grad, dS, dV, dGamma
import meshzoo
from scipy.sparse import linalg
import meshplex
def test():
class D1(Subdomain):
def is_inside(self, x):
return x[1] < 0.5
is_boundary_only = True
class Poisson(object):
def apply(self, u):
return (
integrate(lambda x: -n_dot_grad(u(x)), dS)
+ integrate(lambda x: 3.0, dGamma)
- integrate(lambda x: 1.0, dV)
)
def dirichlet(self, u):
return [(u, D1())]
vertices, cells = meshzoo.rectangle(0.0, 1.0, 0.0, 1.0, 51, 51)
mesh = meshplex.MeshTri(vertices, cells)
matrix, rhs = pyfvm.discretize_linear(Poisson(), mesh)
u = linalg.spsolve(matrix, rhs)
mesh.write("out.vtk", point_data={"u": u})
return
if __name__ == "__main__":
test()
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,818
|
egalistmir/pyfvm
|
refs/heads/master
|
/pyfvm/jacobian.py
|
# -*- coding: utf-8 -*-
#
import numpy
from scipy import sparse
class Jacobian(object):
def __init__(self, mesh, edge_kernels, vertex_kernels, face_kernels, dirichlets):
self.mesh = mesh
self.edge_kernels = edge_kernels
self.vertex_kernels = vertex_kernels
self.face_kernels = face_kernels
self.dirichlets = dirichlets
return
def get_linear_operator(self, u):
V, I, J = _get_VIJ(
self.mesh, u, self.edge_kernels, self.vertex_kernels, self.face_kernels
)
# One unknown per vertex
n = len(self.mesh.node_coords)
matrix = sparse.coo_matrix((V, (I, J)), shape=(n, n))
# Transform to CSR format for efficiency
matrix = matrix.tocsr()
# Apply Dirichlet conditions.
d = matrix.diagonal()
for dirichlet in self.dirichlets:
vertex_mask = self.mesh.get_vertex_mask(dirichlet.subdomain)
# Set all Dirichlet rows to 0.
for i in numpy.where(vertex_mask)[0]:
matrix.data[matrix.indptr[i] : matrix.indptr[i + 1]] = 0.0
# Set the diagonal.
d[vertex_mask] = dirichlet.eval(u[vertex_mask], self.mesh, vertex_mask)
matrix.setdiag(d)
return matrix
def _get_VIJ(mesh, u, edge_kernels, vertex_kernels, face_kernels):
V = []
I_ = []
J = []
for edge_kernel in edge_kernels:
for subdomain in edge_kernel.subdomains:
cell_mask = mesh.get_cell_mask(subdomain)
v_matrix = edge_kernel.eval(u, mesh, cell_mask)
V.append(v_matrix[0, 0].flatten())
V.append(v_matrix[0, 1].flatten())
V.append(v_matrix[1, 0].flatten())
V.append(v_matrix[1, 1].flatten())
I_.append(mesh.idx_hierarchy[0].flatten())
I_.append(mesh.idx_hierarchy[0].flatten())
I_.append(mesh.idx_hierarchy[1].flatten())
I_.append(mesh.idx_hierarchy[1].flatten())
J.append(mesh.idx_hierarchy[0].flatten())
J.append(mesh.idx_hierarchy[1].flatten())
J.append(mesh.idx_hierarchy[0].flatten())
J.append(mesh.idx_hierarchy[1].flatten())
for vertex_kernel in vertex_kernels:
for subdomain in vertex_kernel.subdomains:
vertex_mask = mesh.get_vertex_mask(subdomain)
vals_matrix = vertex_kernel.eval(u, mesh, vertex_mask)
if vertex_mask == numpy.s_[:]:
verts = numpy.arange(len(vals_matrix))
V.append(vals_matrix)
I_.append(verts)
J.append(verts)
for face_kernel in face_kernels:
for subdomain in face_kernel.subdomains:
face_mask = mesh.get_face_mask(subdomain)
vals_matrix = face_kernel.eval(u, mesh, face_mask)
faces = mesh.idx_hierarchy[face_mask]
V.append(vals_matrix)
I_.append(faces)
J.append(faces)
# Finally, make V, I, J into 1D-arrays.
V = numpy.concatenate(V)
I_ = numpy.concatenate(I_)
J = numpy.concatenate(J)
return V, I_, J
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,819
|
egalistmir/pyfvm
|
refs/heads/master
|
/pyfvm/__init__.py
|
# -*- coding: utf-8 -*-
#
from __future__ import print_function
from . import fvm_problem
from . import linear_fvm_problem
from .discretize_linear import discretize_linear, split
from .discretize import discretize
from .nonlinear_methods import newton
from .fvm_matrix import get_fvm_matrix
from pyfvm.__about__ import __version__, __author__, __author_email__
__all__ = [
"__version__",
"__author__",
"__author_email__",
"discretize",
"discretize_linear",
"split",
"newton",
"fvm_problem",
"linear_fvm_problem",
"get_fvm_matrix",
"EdgeMatrixKernel",
]
try:
import pipdate
except ImportError:
pass
else:
if pipdate.needs_checking(__name__):
print(pipdate.check(__name__, __version__), end="")
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
36,820
|
egalistmir/pyfvm
|
refs/heads/master
|
/pyfvm/fvm_problem.py
|
# -*- coding: utf-8 -*-
#
import numpy
from . import fvm_matrix
class FvmProblem(object):
def __init__(
self,
mesh,
edge_kernels,
vertex_kernels,
face_kernels,
dirichlets,
edge_matrix_kernels,
vertex_matrix_kernels,
face_matrix_kernels,
):
self.mesh = mesh
self.edge_kernels = edge_kernels
self.vertex_kernels = vertex_kernels
self.face_kernels = face_kernels
self.dirichlets = dirichlets
if edge_matrix_kernels or vertex_matrix_kernels or face_matrix_kernels:
self.matrix = fvm_matrix.get_fvm_matrix(
mesh,
edge_matrix_kernels,
vertex_matrix_kernels,
face_matrix_kernels,
[], # dirichlets
)
else:
self.matrix = None
return
def eval(self, u):
if self.matrix is None:
out = numpy.zeros_like(u)
else:
out = self.matrix.dot(u)
for edge_kernel in self.edge_kernels:
for subdomain in edge_kernel.subdomains:
cell_mask = self.mesh.get_cell_mask(subdomain)
numpy.add.at(
out,
self.mesh.idx_hierarchy,
edge_kernel.eval(u, self.mesh, cell_mask),
)
for vertex_kernel in self.vertex_kernels:
for subdomain in vertex_kernel.subdomains:
vertex_mask = self.mesh.get_vertex_mask(subdomain)
out[vertex_mask] += vertex_kernel.eval(u, self.mesh, vertex_mask)
for face_kernel in self.face_kernels:
for subdomain in face_kernel.subdomains:
face_mask = self.mesh.get_face_mask(subdomain)
numpy.add(out, face_mask, face_kernel.eval(u, self.mesh, face_mask))
for dirichlet in self.dirichlets:
vertex_mask = self.mesh.get_vertex_mask(dirichlet.subdomain)
out[vertex_mask] = dirichlet.eval(u[vertex_mask], self.mesh, vertex_mask)
return out
|
{"/examples/bratu_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/examples/complex_energy_test.py": ["/pyfvm/__init__.py"], "/examples/neumann_example_test.py": ["/pyfvm/__init__.py", "/pyfvm/form_language.py"], "/pyfvm/__init__.py": ["/pyfvm/discretize.py", "/pyfvm/nonlinear_methods.py", "/pyfvm/fvm_matrix.py"], "/pyfvm/fvm_problem.py": ["/pyfvm/__init__.py"], "/pyfvm/discretize.py": ["/pyfvm/__init__.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.