From 0a312be5e94e400df027ffdc476073aa31fc3fb6 Mon Sep 17 00:00:00 2001 From: keskitalo Date: Mon, 18 Aug 2025 21:08:31 -0700 Subject: [PATCH 1/5] Fix that works for both intervals and spt3g --- src/toast/intervals.py | 17 +++++++-- src/toast/observation_dist.py | 64 +++++++++------------------------ src/toast/spt3g/spt3g_export.py | 17 +++++++-- 3 files changed, 46 insertions(+), 52 deletions(-) diff --git a/src/toast/intervals.py b/src/toast/intervals.py index 0f199193b..b475e2585 100644 --- a/src/toast/intervals.py +++ b/src/toast/intervals.py @@ -94,8 +94,16 @@ def __init__(self, timestamps, intervals=None, timespans=None, samplespans=None) if np.isclose(timespans[i][1], timespans[i + 1][0], rtol=1e-12): # Force nearly equal timestamps to match timespans[i][1] = timespans[i + 1][0] + # Check that the intervals are sorted and disjoint if timespans[i][1] > timespans[i + 1][0]: - raise RuntimeError("Timespans must be sorted and disjoint") + t1 = timespans[i][1] + t2 = timespans[i + 1][0] + dt = t1 - t2 + ts = np.median(np.diff(timestamps)) + msg = f"Timespans must be sorted and disjoint" + msg += f" but {t1} - {t2} = {dt} s = {dt / ts} samples)" + raise RuntimeError(msg) + # Map interval times into sample indices indices, times = self._find_indices(timespans) self.data = np.array( [ @@ -186,7 +194,12 @@ def __len__(self): return len(self.data) def __repr__(self): - return self.data.__repr__() + s = "= ival.start: + overlap.append(( + ival.start, + ival.stop, + max(frame.first, ival.first), + min(frame.last, ival.last), + )) + overlap = IntervalList( + iframe.timestamps, + intervals=np.array(overlap, dtype=interval_dtype).view(np.recarray), + ) out = None try: From 6f40ec065910c0efdac58423e7629eff112fd5af Mon Sep 17 00:00:00 2001 From: keskitalo Date: Mon, 18 Aug 2025 22:45:08 -0700 Subject: [PATCH 2/5] Fix another corner case in WCS auto_bounds --- src/toast/ops/pixels_wcs.py | 47 ++++++++++++++++++++++++------------- 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/src/toast/ops/pixels_wcs.py b/src/toast/ops/pixels_wcs.py index 1a505fe3f..58ca0a64e 100644 --- a/src/toast/ops/pixels_wcs.py +++ b/src/toast/ops/pixels_wcs.py @@ -23,6 +23,18 @@ from .operator import Operator +def unwrap_together(x, y, period=2 * np.pi * u.rad): + """ Unwrap x but apply the same branch corrections to y """ + for i in range(1, len(x)): + while np.abs(x[i] - x[i - 1]) > np.abs(x[i] + period - x[i - 1]): + x[i] += period + y[i] += period + while np.abs(x[i] - x[i - 1]) > np.abs(x[i] - period - x[i - 1]): + x[i] -= period + y[i] -= period + return + + @trait_docs class PixelsWCS(Operator): """Operator which generates detector pixel indices defined on a flat projection. @@ -437,28 +449,31 @@ def _exec(self, data, detectors=None, **kwargs): is_azimuth=is_azimuth, center_offset=self.center_offset, ) + rank = data.comm.comm_world.rank # DEBUG minmax = minmax.T # Compact observations on both sides of the zero meridian - # can confuse this calculation. Use np.unwrap() to find - # the most compact longitude range. - lonmin = np.amin(np.unwrap(minmax[0])) - lonmax = np.amax(np.unwrap(minmax[1])) - if lonmax < lonmin: - lonmax += 2 * np.pi + # can confuse this calculation. We must unwrap the per-observation + # limits for the most compact longitude range. + unwrap_together(minmax[0], minmax[1]) + lonmin = np.amin(minmax[0]) + lonmax = np.amin(minmax[1]) latmin = np.amin(minmax[2]) latmax = np.amax(minmax[3]) if data.comm.comm_world is not None: # Zero meridian concern applies across processes - all_lonmin = data.comm.comm_world.allgather(lonmin.to_value(u.radian)) - all_lonmax = data.comm.comm_world.allgather(lonmax.to_value(u.radian)) - all_latmin = data.comm.comm_world.allgather(latmin.to_value(u.radian)) - all_latmax = data.comm.comm_world.allgather(latmax.to_value(u.radian)) - lonmin = np.amin(np.unwrap(all_lonmin)) * u.radian - lonmax = np.amax(np.unwrap(all_lonmax)) * u.radian - if lonmax < lonmin: - lonmax += 2 * np.pi - latmin = np.amin(all_latmin) * u.radian - latmax = np.amax(all_latmax) * u.radian + def gather(x): + return data.comm.comm_world.allgather( + x.to_value(u.radian) + ) * u.radian + all_lonmin = gather(lonmin) + all_lonmax = gather(lonmax) + all_latmin = gather(latmin) + all_latmax = gather(latmax) + unwrap_together(all_lonmin, all_lonmax) + lonmin = np.amin(all_lonmin) + lonmax = np.amin(all_lonmax) + latmin = np.amin(all_latmin) + latmax = np.amax(all_latmax) self.bounds = ( lonmin.to(u.degree), lonmax.to(u.degree), From 55385843b3795eb7b4a91252c9cad955c1c75f88 Mon Sep 17 00:00:00 2001 From: Theodore Kisner Date: Tue, 19 Aug 2025 15:37:16 -0700 Subject: [PATCH 3/5] Change default ground simulation timespan to 2 hours in the unit test helpers --- src/toast/tests/helpers/ground.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/toast/tests/helpers/ground.py b/src/toast/tests/helpers/ground.py index 209029889..6e42e06ed 100644 --- a/src/toast/tests/helpers/ground.py +++ b/src/toast/tests/helpers/ground.py @@ -93,6 +93,7 @@ def create_ground_data( turnarounds_invalid=False, single_group=False, flagged_pixels=True, + schedule_hours=2, ): """Create a data object with a simple ground sim. @@ -137,6 +138,7 @@ def create_ground_data( if tdir is None: tdir = tempfile.mkdtemp() + sch_hours = f"{int(schedule_hours):02d}" sch_file = os.path.join(tdir, "ground_schedule.txt") run_scheduler( opts=[ @@ -155,7 +157,7 @@ def create_ground_data( "--start", "2020-01-01 00:00:00", "--stop", - "2020-01-01 06:00:00", + f"2020-01-01 {sch_hours}:00:00", "--out", sch_file, ] @@ -218,6 +220,7 @@ def create_overdistributed_data( freqs=None, turnarounds_invalid=False, single_group=False, + schedule_hours=2, ): """Create a data object with more detectors than processes. @@ -264,6 +267,7 @@ def create_overdistributed_data( if tdir is None: tdir = tempfile.mkdtemp() + sch_hours = f"{int(schedule_hours):02d}" sch_file = os.path.join(tdir, "ground_schedule.txt") run_scheduler( opts=[ @@ -282,7 +286,7 @@ def create_overdistributed_data( "--start", "2020-01-01 00:00:00", "--stop", - "2020-01-01 06:00:00", + f"2020-01-01 {sch_hours}:00:00", "--out", sch_file, ] From 5177fd6f0a5b08bb29428693d87e700bf248c67e Mon Sep 17 00:00:00 2001 From: Theodore Kisner Date: Tue, 19 Aug 2025 20:22:43 -0700 Subject: [PATCH 4/5] Small fixes to hwpss model operator --- src/toast/ops/hwpss_model.py | 7 +++++-- src/toast/tests/ops_hwpss_model.py | 8 ++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/toast/ops/hwpss_model.py b/src/toast/ops/hwpss_model.py index 1bcbfa3c8..992001e02 100644 --- a/src/toast/ops/hwpss_model.py +++ b/src/toast/ops/hwpss_model.py @@ -690,8 +690,11 @@ def _cut_outliers(self, obs, det_mag): all_dets = dets all_mag = mag else: - all_dets = flatten(obs.comm_col.gather(dets, root=0)) - all_mag = np.array(flatten(obs.comm_col.gather(mag, root=0))) + all_dets = obs.comm_col.gather(dets, root=0) + all_mag = obs.comm_col.gather(mag, root=0) + if obs.comm_col.rank == 0: + all_dets = list(flatten(all_dets)) + all_mag = np.array(list(flatten(all_mag))) # One process does the trivial calculation all_flags = None diff --git a/src/toast/tests/ops_hwpss_model.py b/src/toast/tests/ops_hwpss_model.py index f811fae88..5d6bc69c1 100644 --- a/src/toast/tests/ops_hwpss_model.py +++ b/src/toast/tests/ops_hwpss_model.py @@ -106,7 +106,7 @@ def create_test_data(self, testdir): weights, skyfile, "input_sky_dist", - map_key="input_sky", + map_key=map_key, fwhm=30.0 * u.arcmin, lmax=3 * pixels.nside, I_scale=0.001, @@ -274,7 +274,7 @@ def test_hwpss_basic(self): os.makedirs(testdir) data, tod_rms, coeff = self.create_test_data(testdir) - n_harmonics = len(coeff) // 4 + n_harmonics = len(coeff[data.obs[0].name]) // 4 # Add random DC level for ob in data.obs: @@ -331,7 +331,7 @@ def test_hwpss_relcal_fixed(self): os.makedirs(testdir) data, tod_rms, coeff = self.create_test_data(testdir) - n_harmonics = len(coeff) // 4 + n_harmonics = len(coeff[data.obs[0].name]) // 4 # Apply a random inverse relative calibration np.random.seed(123456) @@ -400,7 +400,7 @@ def test_hwpss_relcal_continuous(self): os.makedirs(testdir) data, tod_rms, coeff = self.create_test_data(testdir) - n_harmonics = len(coeff) // 4 + n_harmonics = len(coeff[data.obs[0].name]) // 4 # Apply a random inverse relative calibration that is time-varying np.random.seed(123456) From d1f74a416256867766c327850de27ecebeae9ae9 Mon Sep 17 00:00:00 2001 From: Theodore Kisner Date: Wed, 20 Aug 2025 05:59:45 -0700 Subject: [PATCH 5/5] Remove debug statement that breaks serial operation --- src/toast/ops/pixels_wcs.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/toast/ops/pixels_wcs.py b/src/toast/ops/pixels_wcs.py index 58ca0a64e..b225a7314 100644 --- a/src/toast/ops/pixels_wcs.py +++ b/src/toast/ops/pixels_wcs.py @@ -24,7 +24,7 @@ def unwrap_together(x, y, period=2 * np.pi * u.rad): - """ Unwrap x but apply the same branch corrections to y """ + """Unwrap x but apply the same branch corrections to y""" for i in range(1, len(x)): while np.abs(x[i] - x[i - 1]) > np.abs(x[i] + period - x[i - 1]): x[i] += period @@ -449,7 +449,6 @@ def _exec(self, data, detectors=None, **kwargs): is_azimuth=is_azimuth, center_offset=self.center_offset, ) - rank = data.comm.comm_world.rank # DEBUG minmax = minmax.T # Compact observations on both sides of the zero meridian # can confuse this calculation. We must unwrap the per-observation @@ -462,9 +461,10 @@ def _exec(self, data, detectors=None, **kwargs): if data.comm.comm_world is not None: # Zero meridian concern applies across processes def gather(x): - return data.comm.comm_world.allgather( - x.to_value(u.radian) - ) * u.radian + return ( + data.comm.comm_world.allgather(x.to_value(u.radian)) * u.radian + ) + all_lonmin = gather(lonmin) all_lonmax = gather(lonmax) all_latmin = gather(latmin)