Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improvements to AzimuthIntervals #800

Open
wants to merge 6 commits into
base: toast3
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 12 additions & 9 deletions src/toast/intervals.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,18 @@ def __init__(self, timestamps, intervals=None, timespans=None, samplespans=None)
raise RuntimeError(
"If constructing from intervals, other spans should be None"
)
timespans = [(x.start, x.stop) for x in intervals]
indices = self._find_indices(timespans)
self.data = np.array(
[
(self.timestamps[x[0]], self.timestamps[x[1]], x[0], x[1])
for x in indices
],
dtype=interval_dtype,
).view(np.recarray)
if len(intervals) == 0:
self.data = np.zeros(0, dtype=interval_dtype).view(np.recarray)
else:
timespans = [(x.start, x.stop) for x in intervals]
indices = self._find_indices(timespans)
self.data = np.array(
[
(self.timestamps[x[0]], self.timestamps[x[1]], x[0], x[1])
for x in indices
],
dtype=interval_dtype,
).view(np.recarray)
elif timespans is not None:
if samplespans is not None:
raise RuntimeError("Cannot construct from both time and sample spans")
Expand Down
597 changes: 413 additions & 184 deletions src/toast/ops/azimuth_intervals.py

Large diffs are not rendered by default.

11 changes: 3 additions & 8 deletions src/toast/ops/demodulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,18 +309,13 @@ def _exec(self, data, detectors=None, **kwargs):
self.demod_data.obs.append(demod_obs)

if self.purge:
if self.shared_flags is not None:
del obs.shared[self.shared_flags]
for det_data in self.det_data.split(";"):
del obs.detdata[det_data]
if self.det_flags is not None:
del obs.detdata[self.det_flags]
if self.noise_model is not None:
del obs[self.noise_model]
obs.clear()

log.debug_rank(
"Demodulated observation in", comm=data.comm.comm_group, timer=timer
)
if self.purge:
data.clear()

return

Expand Down
13 changes: 9 additions & 4 deletions src/toast/ops/flag_intervals.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,15 @@ def _exec(self, data, detectors=None, **kwargs):
if ob.comm_col_rank == 0:
new_flags = np.array(ob.shared[self.shared_flags])
for vname, vmask in self.view_mask:
views = ob.view[vname]
for vw in views:
# Note that a View acts like a slice
new_flags[vw] |= vmask
if vname in ob.intervals:
views = ob.view[vname]
for vw in views:
# Note that a View acts like a slice
new_flags[vw] |= vmask
else:
msg = f"Intervals '{vname}' does not exist in {ob.name}"
msg += " skipping flagging"
log.warning(msg)
ob.shared[self.shared_flags].set(new_flags, offset=(0,), fromrank=0)

def _finalize(self, data, **kwargs):
Expand Down
1 change: 1 addition & 0 deletions src/toast/tests/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ install(FILES
ops_yield_cut.py
ops_elevation_noise.py
ops_signal_diff_noise.py
ops_azimuth_intervals.py
ops_loader.py
accelerator.py
ops_example_ground.py
Expand Down
172 changes: 172 additions & 0 deletions src/toast/tests/ops_azimuth_intervals.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
# Copyright (c) 2024-2024 by the parties listed in the AUTHORS file.
# All rights reserved. Use of this source code is governed by
# a BSD-style license that can be found in the LICENSE file.

import os
from datetime import datetime

import numpy as np
from astropy import units as u

from .. import ops as ops
from ..data import Data
from ..instrument import Focalplane, GroundSite, Telescope
from ..instrument_sim import fake_hexagon_focalplane
from ..mpi import MPI, Comm
from ..observation import Observation
from ..observation import default_values as defaults
from ..pixels_io_healpix import write_healpix_fits
from ..schedule import GroundSchedule
from ..schedule_sim_ground import run_scheduler
from ..vis import set_matplotlib_backend
from ..ops.sim_ground_utils import scan_between

from ._helpers import close_data, create_comm, create_outdir, create_ground_telescope
from .mpi import MPITestCase


class AzimuthIntervalsTest(MPITestCase):
def setUp(self):
fixture_name = os.path.splitext(os.path.basename(__file__))[0]
self.outdir = create_outdir(self.comm, fixture_name)

def create_fake_data(self):
# Just one group with all processes
toastcomm = create_comm(self.comm, single_group=True)

rate = 100.0 * u.Hz

telescope = create_ground_telescope(
toastcomm.group_size,
sample_rate=rate,
pixel_per_process=1,
fknee=None,
freqs=None,
width=5.0 * u.degree,
)

data = Data(toastcomm)

# 8 minutes
n_samp = int(8 * 60 * rate.to_value(u.Hz))
n_parked = int(0.1 * n_samp)
n_scan = int(0.12 * n_samp)

ob = Observation(toastcomm, telescope, n_samples=n_samp, name="aztest")
# Create shared objects for timestamps, common flags, boresight, position,
# and velocity.
ob.shared.create_column(
defaults.times,
shape=(ob.n_local_samples,),
dtype=np.float64,
)
ob.shared.create_column(
defaults.shared_flags,
shape=(ob.n_local_samples,),
dtype=np.uint8,
)
ob.shared.create_column(
defaults.azimuth,
shape=(ob.n_local_samples,),
dtype=np.float64,
)
ob.shared.create_column(
defaults.elevation,
shape=(ob.n_local_samples,),
dtype=np.float64,
)

# Rank zero of each grid column creates the data
stamps = None
azimuth = None
elevation = None
flags = None
scans = None
if ob.comm_col_rank == 0:
start_time = 0.0 + float(ob.local_index_offset) / rate.to_value(u.Hz)
stop_time = start_time + float(ob.n_local_samples - 1) / rate.to_value(u.Hz)
stamps = np.linspace(
start_time,
stop_time,
num=ob.n_local_samples,
endpoint=True,
dtype=np.float64,
)

scans = (n_samp - n_parked) // n_scan
sim_scans = scans + 1

azimuth = np.zeros(ob.n_local_samples, dtype=np.float64)
elevation = np.radians(45.0) * np.ones(ob.n_local_samples, dtype=np.float64)

azimuth[:n_parked] = np.pi / 4

for iscan in range(sim_scans):
first_samp = iscan * n_scan + n_parked
if iscan % 2 == 0:
azstart = np.pi / 4
azstop = 3 * np.pi / 4
else:
azstart = 3 * np.pi / 4
azstop = np.pi / 4
_, az, el = scan_between(
stamps[first_samp],
azstart,
np.pi / 4,
azstop,
np.pi / 4,
np.radians(1.0), # rad / s
np.radians(0.25), # rad / s^2
np.radians(1.0), # rad / s
np.radians(0.25), # rad / s^2
nstep=n_scan,
)
if iscan == scans:
azimuth[first_samp:] = az[: n_samp - first_samp]
elevation[first_samp:] = el[: n_samp - first_samp]
else:
azimuth[first_samp : first_samp + n_scan] = az
elevation[first_samp : first_samp + n_scan] = el

# Add some noise
scale = 0.0005
azimuth[:] += np.random.normal(loc=0, scale=scale, size=ob.n_local_samples)
elevation[:] += np.random.normal(
loc=0, scale=scale, size=ob.n_local_samples
)

# Periodic flagged samples. Add garbage spikes there.
flags = np.zeros(ob.n_local_samples, dtype=np.uint8)
for fspan in range(5):
flags[:: 1000 + fspan] = defaults.shared_mask_invalid
bad_samps = flags != 0
azimuth[bad_samps] = 10.0
elevation[bad_samps] = -10.0

if ob.comm_col is not None:
scans = ob.comm_col.bcast(scans, root=0)

ob.shared[defaults.times].set(stamps, offset=(0,), fromrank=0)
ob.shared[defaults.azimuth].set(azimuth, offset=(0,), fromrank=0)
ob.shared[defaults.elevation].set(elevation, offset=(0,), fromrank=0)
ob.shared[defaults.shared_flags].set(flags, offset=(0,), fromrank=0)
data.obs.append(ob)
return data, scans

def test_exec(self):
data, num_scans = self.create_fake_data()

azint = ops.AzimuthIntervals(
debug_root=os.path.join(self.outdir, "az_intervals"),
window_seconds=5.0,
)
azint.apply(data)

for ob in data.obs:
n_scans = len(ob.intervals[defaults.scanning_interval])
if n_scans != num_scans + 1:
msg = f"Found {n_scans} scanning intervals instead of {num_scans}"
print(msg, flush=True)
self.assertTrue(False)

close_data(data)
2 changes: 1 addition & 1 deletion src/toast/tests/ops_demodulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def test_demodulate(self):
# Demodulate

downsample = 3
demod = ops.Demodulate(stokes_weights=weights, nskip=downsample, purge=True)
demod = ops.Demodulate(stokes_weights=weights, nskip=downsample, purge=False)
demod_data = demod.apply(data)

# Map again
Expand Down
2 changes: 2 additions & 0 deletions src/toast/tests/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from . import math_misc as test_math_misc
from . import noise as test_noise
from . import observation as test_observation
from . import ops_azimuth_intervals as test_ops_azimuth_intervals
from . import ops_cadence_map as test_ops_cadence_map
from . import ops_common_mode_noise as test_ops_common_mode_noise
from . import ops_crosslinking as test_ops_crosslinking
Expand Down Expand Up @@ -183,6 +184,7 @@ def test(name=None, verbosity=2):
suite.addTest(loader.loadTestsFromModule(test_dist))
suite.addTest(loader.loadTestsFromModule(test_config))

suite.addTest(loader.loadTestsFromModule(test_ops_azimuth_intervals))
suite.addTest(loader.loadTestsFromModule(test_ops_sim_satellite))
suite.addTest(loader.loadTestsFromModule(test_ops_sim_ground))
suite.addTest(loader.loadTestsFromModule(test_ops_memory_counter))
Expand Down