Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ earlier.
--cov artifacts/cov_delta_full.npy \
--out-delta artifacts/delta_ell_full.npy \
--out-summary artifacts/summary_full.json
```
```
The resulting JSON contains the Δ vector length and the stabilized
χ ("z") statistic for the null test. When reusing an older
covariance that has one or two extra high-ℓ bins, add
Expand All @@ -216,12 +216,22 @@ earlier.
--order-a artifacts/order_A_to_B_full.npz \
--order-b artifacts/order_B_to_A_full.npz \
--theory data/theory/tk_planck2018.npz \
--lmin 30 \
--nlb 30 \
--cov artifacts/cov_delta_full.npy \
--out artifacts/cross_tk_full.npz \
--summary artifacts/cross_summary_full.json
```
Check `artifacts/cross_summary_full.json` to confirm the mean and
maximum |z| are consistent with a null detection.
The explicit `--lmin`/`--nlb` values ensure the theory is binned with
the same geometry as the commutator runs when the preregistration
metadata diverges. Check `artifacts/cross_summary_full.json` to confirm
the mean and maximum |z| are consistent with a null detection.

The ordering scripts now record the binning limits alongside the
spectra, so the cross-spectrum CLI can infer `lmin`/`nlb` directly from
those artifacts. Passing the explicit values remains recommended for a
reproducible command log, especially when exchanging files generated by
older commits that predate the embedded metadata.

7. **Generate publication figures and a textual digest.**
```bash
Expand Down
149 changes: 139 additions & 10 deletions scripts/compute_cross_spectrum.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import argparse
import json
from collections.abc import Iterable, Sequence
from collections.abc import Iterable, Mapping, Sequence
from dataclasses import dataclass
from pathlib import Path

Expand All @@ -18,6 +18,58 @@ class _BinInfo:
ell_lists: list[np.ndarray]


def _bin_info_from_orders(
orders: Sequence[np.lib.npyio.NpzFile],
nbins: int,
) -> _BinInfo | None:
for order in orders:
ell_eff_raw = order.get("ell_effective")
ell_left = order.get("ell_left_edges")
ell_right = order.get("ell_right_edges")

ell_lists: list[np.ndarray] = []
lists_valid = True
if ell_left is not None and ell_right is not None:
left = np.asarray(ell_left, dtype=int)
right = np.asarray(ell_right, dtype=int)
if left.shape == right.shape and left.shape[0] == nbins:
for lo, hi in zip(left, right):
lo_i = int(lo)
hi_i = int(hi)
if hi_i > lo_i >= 0:
ell_lists.append(np.arange(lo_i, hi_i))
else:
lists_valid = False
break
else:
lists_valid = False
else:
lists_valid = False

if not lists_valid or len(ell_lists) != nbins:
ell_lists = []

ell_eff = None
if ell_eff_raw is not None:
ell_eff = np.asarray(ell_eff_raw, dtype=float)
if ell_eff.size != nbins:
ell_eff = None

if ell_eff is None and ell_lists and len(ell_lists) == nbins:
ell_eff = np.asarray(
[
0.5 * (float(group[0]) + float(group[-1])) if group.size else 0.0
for group in ell_lists
],
dtype=float,
)

if ell_eff is not None and ell_eff.size == nbins:
return _BinInfo(ell_effective=ell_eff, ell_lists=ell_lists)

return None


def _load_bin_info(
prereg: Path,
*,
Expand All @@ -27,7 +79,7 @@ def _load_bin_info(
fallback_nlb: int | None,
) -> _BinInfo:
try:
bins, _meta = load_bins_from_prereg(prereg, nside=nside)
bins, bins_meta = load_bins_from_prereg(prereg, nside=nside)
except FileNotFoundError:
bins = None
except Exception as exc: # pragma: no cover - defensive fallback for CLI use
Expand All @@ -43,7 +95,13 @@ def _load_bin_info(
ell_lists.append(np.asarray(group, dtype=int))
except Exception: # pragma: no cover - guard against API mismatches
ell_lists = []
return _BinInfo(ell_effective=ell_eff, ell_lists=ell_lists)
if ell_eff.size == nbins:
return _BinInfo(ell_effective=ell_eff, ell_lists=ell_lists)

summary_line(
"prereg bin definition does not match input spectra; "
"falling back to CLI-provided binning"
)

if fallback_nlb is None:
raise ValueError("Prereg bins unavailable; please provide --nlb for fallback binning")
Expand Down Expand Up @@ -109,6 +167,49 @@ def _bin_theory(
return np.zeros(0, dtype=float)


def _extract_scalar(value: object) -> int | None:
if value is None:
return None
if isinstance(value, (int, np.integer)):
return int(value)
if isinstance(value, (float, np.floating)):
if np.isnan(value):
return None
return int(value)
try:
arr = np.asarray(value)
except Exception:
return None
if arr.size == 0:
return None
try:
return int(arr.reshape(-1)[0])
except Exception:
return None


def _load_order_metadata(order_path: Path) -> Mapping[str, object]:
meta_path = order_path.with_suffix(".json")
if not meta_path.exists():
return {}
try:
return json.loads(meta_path.read_text())
except Exception as exc: # pragma: no cover - metadata is optional for CLI use
summary_line(f"failed to parse {meta_path.name}: {exc}; ignoring metadata")
return {}


def _extract_from_metadata(meta: Mapping[str, object], key: str) -> object | None:
if not isinstance(meta, Mapping):
return None
if key in meta:
return meta[key]
bins = meta.get("bins")
if isinstance(bins, Mapping) and key in bins:
return bins[key]
return None


def main(argv: Iterable[str] | None = None) -> int:
ap = argparse.ArgumentParser(description="Build the T×κ cross-spectrum science product")
ap.add_argument("--order-a", default="artifacts/order_A_to_B.npz")
Expand Down Expand Up @@ -137,13 +238,41 @@ def main(argv: Iterable[str] | None = None) -> int:
nbins = cl_a.size
nside = int(order_a.get("nside", order_b.get("nside", 0)))

info = _load_bin_info(
args.prereg,
nside=nside if nside > 0 else 256,
nbins=nbins,
fallback_lmin=args.lmin,
fallback_nlb=args.nlb,
)
info = _bin_info_from_orders((order_a, order_b), nbins)
if info is None:
fallback_nlb = args.nlb
if fallback_nlb is None:
fallback_nlb = _extract_scalar(order_a.get("nlb")) or _extract_scalar(
order_b.get("nlb")
)
meta_a: Mapping[str, object] | None = None
meta_b: Mapping[str, object] | None = None
if fallback_nlb is None or args.lmin is None:
meta_a = _load_order_metadata(Path(args.order_a))
meta_b = _load_order_metadata(Path(args.order_b))

if fallback_nlb is None:
fallback_nlb = _extract_scalar(
_extract_from_metadata(meta_a or {}, "nlb")
) or _extract_scalar(_extract_from_metadata(meta_b or {}, "nlb"))

fallback_lmin = args.lmin
if fallback_lmin is None:
fallback_lmin = _extract_scalar(order_a.get("lmin")) or _extract_scalar(
order_b.get("lmin")
)
if fallback_lmin is None and meta_a is not None and meta_b is not None:
fallback_lmin = _extract_scalar(
_extract_from_metadata(meta_a, "lmin")
) or _extract_scalar(_extract_from_metadata(meta_b, "lmin"))

info = _load_bin_info(
args.prereg,
nside=nside if nside > 0 else 256,
nbins=nbins,
fallback_lmin=fallback_lmin,
fallback_nlb=fallback_nlb,
)

ell_eff = info.ell_effective
cl_data = 0.5 * (cl_a + cl_b)
Expand Down
105 changes: 103 additions & 2 deletions scripts/run_order_A_to_B.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from __future__ import annotations

import argparse
from collections.abc import Mapping
from pathlib import Path

import healpy as hp
Expand Down Expand Up @@ -68,6 +69,8 @@ def main():

bins = None
bins_meta = None
bins_ell_groups: list[np.ndarray] = []
ell_effective: np.ndarray | None = None
windows_cfg: WindowConfig | None = None
if not args.disable_prereg:
try:
Expand All @@ -85,6 +88,36 @@ def main():
nlb=args.nlb,
)

if hasattr(bins, "get_effective_ells"):
try:
ell_effective = np.asarray(bins.get_effective_ells(), dtype=float)
except Exception:
ell_effective = None

if hasattr(bins, "get_ell_list"):
try:
bins_ell_groups = [np.asarray(group, dtype=int) for group in bins.get_ell_list()]
except Exception:
bins_ell_groups = []

def _infer_bin_limits() -> tuple[int | None, int | None]:
lmin_val = getattr(bins, "lmin", None)
lmax_val = getattr(bins, "lmax", None)
for group in bins_ell_groups:
arr = np.asarray(group, dtype=int)
if arr.size:
lmin_val = int(arr[0])
break
for group in reversed(bins_ell_groups):
arr = np.asarray(group, dtype=int)
if arr.size:
lmax_val = int(arr[-1])
break
return (
None if lmin_val is None else int(lmin_val),
None if lmax_val is None else int(lmax_val),
)

if not args.disable_prereg:
try:
windows_cfg = load_windows_from_prereg(args.prereg)
Expand Down Expand Up @@ -113,7 +146,75 @@ def main():
field_names=("cmb", "phi"),
)

np.savez(Path(args.out), cl=cl, nside=nside, nlb=args.nlb)
lmin_used, lmax_used = _infer_bin_limits()
if lmin_used is None:
if args.lmin is not None:
lmin_used = int(args.lmin)
elif isinstance(bins_meta, Mapping) and "lmin" in bins_meta:
try:
lmin_used = int(bins_meta["lmin"])
except Exception:
lmin_used = None
if lmin_used is None:
# NaMaster's linear bin helper defaults to starting at ℓ=0 when no
# explicit lower bound is supplied. Persist that implicit origin so
# downstream stages can always reconstruct the band edges when
# preregistered metadata is unavailable.
lmin_used = 0
bin_left_edges: np.ndarray | None = None
bin_right_edges: np.ndarray | None = None
if bins_ell_groups and len(bins_ell_groups) == cl.size:
bin_left_edges = np.full(cl.size, -1, dtype=int)
bin_right_edges = np.full(cl.size, -1, dtype=int)
for idx, group in enumerate(bins_ell_groups):
if group.size:
bin_left_edges[idx] = int(group[0])
bin_right_edges[idx] = int(group[-1]) + 1

nlb_value = int(args.nlb)
if isinstance(bins_meta, Mapping) and "nlb" in bins_meta:
try:
nlb_value = int(bins_meta["nlb"])
except Exception:
nlb_value = int(args.nlb)

payload: dict[str, object] = {"cl": cl, "nside": nside, "nlb": np.array(nlb_value, dtype=int)}
if lmin_used is not None:
payload["lmin"] = np.array(lmin_used, dtype=int)
if lmax_used is not None:
payload["lmax"] = np.array(lmax_used, dtype=int)
if ell_effective is not None and ell_effective.size == cl.size:
payload["ell_effective"] = ell_effective.astype(float)
if bin_left_edges is not None and bin_right_edges is not None:
payload["ell_left_edges"] = bin_left_edges
payload["ell_right_edges"] = bin_right_edges
np.savez(Path(args.out), **payload)

bins_summary: dict[str, object] = {}
if isinstance(bins_meta, Mapping):
bins_summary.update({k: v for k, v in bins_meta.items()})
if lmin_used is not None and "lmin" not in bins_summary:
bins_summary["lmin"] = int(lmin_used)
if lmax_used is not None and "lmax" not in bins_summary:
bins_summary["lmax"] = int(lmax_used)
if "nlb" not in bins_summary:
bins_summary["nlb"] = int(args.nlb)
bins_summary["nbins"] = int(cl.size)
if (
ell_effective is not None
and ell_effective.size == cl.size
and "ell_effective" not in bins_summary
):
bins_summary["ell_effective"] = ell_effective.astype(float).tolist()
if (
bin_left_edges is not None
and bin_right_edges is not None
and "ell_left_edges" not in bins_summary
and "ell_right_edges" not in bins_summary
):
bins_summary["ell_left_edges"] = bin_left_edges.astype(int).tolist()
bins_summary["ell_right_edges"] = bin_right_edges.astype(int).tolist()

save_json(
{
"order": "A_to_B",
Expand All @@ -122,7 +223,7 @@ def main():
"mask_threshold_sigma": args.threshold_sigma,
"mask_apod_arcmin": args.apod_arcmin,
"bins_source": "prereg" if bins_meta is not None else "cli",
"bins": bins_meta,
"bins": bins_summary,
"windows": windows_cfg.to_metadata() if windows_cfg is not None else None,
},
Path(args.out).with_suffix(".json"),
Expand Down
Loading