Skip to content

Commit

Permalink
Merge pull request #56 from HERA-Team/xtalk_error_cov
Browse files Browse the repository at this point in the history
Add xtalk error calculation
  • Loading branch information
jsdillon authored Nov 6, 2024
2 parents 8c2a233 + a65fc03 commit 3b60745
Showing 1 changed file with 47 additions and 15 deletions.
62 changes: 47 additions & 15 deletions notebooks/single_baseline_postprocessing_and_pspec.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"source": [
"# Single Baseline Filtering and Power Spectrum Estimation\n",
"\n",
"**by Josh Dillon, Bobby Pascua, and Mike Wilensky**, last updated October 28, 2024\n",
"**by Josh Dillon, Bobby Pascua, and Mike Wilensky**, last updated November 5, 2024\n",
"\n",
"This notebook is designed to take a single redundantly-averaged unique baseline (typically after LST-binning) and push it through all the way to the power spectrum. It operates on single files that contain a single baseline for all LSTs and both `'ee'` and `'nn'` polarizations. It then can:\n",
"* Throw out highly flagged times and/or channels\n",
Expand Down Expand Up @@ -151,6 +151,8 @@
"FR_EIGENVAL_CUTOFF = float(os.environ.get(\"FR_EIGENVAL_CUTOFF\", 1e-12))\n",
"TARGET_AVERAGING_TIME = float(os.environ.get(\"TARGET_AVERAGING_TIME\", 300.0)) # coherent integration time in seconds. Actual time might be less to so that all interleaves have the same number of samples averaged\n",
"USE_CORR_MATRIX = os.environ.get(\"USE_CORR_MATRIX\", \"TRUE\").upper() == \"TRUE\" # Whether or not to use the correlation matrix to do the noise statistics correction factor\n",
"CORR_MATRIX_FREQ_DECIMATION = int(os.environ.get(\"CORR_MATRIX_FREQ_DECIMATION\", 10)) # How much to decimate the frequency axis when getting the noise statistic correction factor\n",
"CORR_MATRIX_NOTCH_CUTOFF = float(os.environ.get(\"CORR_MATRIX_NOTCH_CUTOFF\", 30.)) # Maximum EW-projection in meters for including the notch filter in error covariance calculation\n",
"\n",
"# Power spectrum settings\n",
"EFIELD_HEALPIX_BEAM_FILE = os.environ.get(\"EFIELD_HEALPIX_BEAM_FILE\", \"/lustre/aoc/projects/hera/h6c-analysis/IDR2/beams/NF_HERA_Vivaldi_efield_beam_healpix.fits\")\n",
Expand Down Expand Up @@ -1417,21 +1419,26 @@
"outputs": [],
"source": [
"def get_frop_wrapper(mode=\"main_lobe\", pol=\"nn\", stream_ind=0, band_ind=0, t_avg=AVERAGING_TIME, \n",
" rephase=True, wgt_tavg_by_nsample=True, nsamples=None, bl_vec=None,\n",
" dlst=None, coherent_avg=True, times=None):\n",
" rephase=True, wgt_tavg_by_nsample=True, bl_vec=None,\n",
" dlst=None, coherent_avg=True, times=None,\n",
" freq_decimation=CORR_MATRIX_FREQ_DECIMATION):\n",
" \"\"\"\n",
" This wraps hera_cal.frf.get_frop_for_noise using specific information from this notebook so that we can do\n",
" post FRF time-time visibility covariance calculation. It returns an operator that can be used on a dynamic \n",
" spectrum to filter it down the time axis.\n",
" \"\"\"\n",
" bl=(ANTPAIR[0], ANTPAIR[1], pol)\n",
" \n",
" band = bands[band_ind]\n",
" band_slice = band_slices[band_ind]\n",
" Nfreqs = band_slice.stop - band_slice.start\n",
" if freq_decimation > 1:\n",
" band_slice = slice(band_slice.start, band_slice.stop, freq_decimation)\n",
"\n",
" weights=deint_wgts[stream_ind][bl][:, band_slice]\n",
" nsamples = deint_nsamples[stream_ind][bl][:, band_slice]\n",
" \n",
" if times is None:\n",
" times = np.modf(single_bl_times[tslice][stream_ind::4])[0] * 24 * 3600\n",
" times = np.modf(single_bl_times[tslice][stream_ind::NINTERLEAVE])[0] * 24 * 3600\n",
" times = times - times[0]\n",
"\n",
" if mode == \"main_lobe\":\n",
Expand Down Expand Up @@ -1461,27 +1468,49 @@
"metadata": {},
"outputs": [],
"source": [
"\n",
"coherent_avg_correction_factors = []\n",
"for spw, band in enumerate(bands):\n",
" freq_slice = band_slices[spw]\n",
" decimated_freq_slice = slice(freq_slice.start, freq_slice.stop, CORR_MATRIX_FREQ_DECIMATION)\n",
" \n",
" if USE_CORR_MATRIX:\n",
" corr_factors = []\n",
" for stream_ind in range(NINTERLEAVE):\n",
" times=deint_filt_data[stream_ind].times * 24 * 3600 \n",
" cov_here = 0\n",
" for pol in (\"ee\", \"nn\"):\n",
" cross_antpairpol = ANTPAIR + (pol,)\n",
" freq_slice = band_slices[spw]\n",
"\n",
" var = frf.prep_var_for_frop(deint_filt_data[stream_ind],\n",
" deint_nsamples[stream_ind],\n",
" deint_wgts[stream_ind],\n",
" cross_antpairpol,\n",
" freq_slice,\n",
" decimated_freq_slice,\n",
" auto_ant=0)\n",
" frop = get_frop_wrapper(pol=pol, stream_ind=stream_ind, band_ind=spw,\n",
" nsamples=deint_nsamples[stream_ind][cross_antpairpol][:, freq_slice],\n",
" dlst=dlst, bl_vec=bl_vec[cross_antpairpol],\n",
" times=times)\n",
"\n",
" main_lobe_frop = get_frop_wrapper(pol=pol, stream_ind=stream_ind, band_ind=spw,\n",
" dlst=dlst, bl_vec=bl_vec[cross_antpairpol],\n",
" times=times)\n",
" \n",
" # Calculate whether the EW-baseline projection is short enough to care about the notch\n",
" EW_proj = data.antpos[ANTPAIR[0]][0] - data.antpos[ANTPAIR[1]][0]\n",
" if np.abs(EW_proj) < CORR_MATRIX_NOTCH_CUTOFF:\n",
" xtalk_frop = get_frop_wrapper(pol=pol, stream_ind=stream_ind, band_ind=spw,\n",
" dlst=dlst, bl_vec=bl_vec[cross_antpairpol],\n",
" times=times, rephase=False, mode=\"xtalk\", coherent_avg=True,\n",
" t_avg=times[1] - times[0])\n",
"\n",
"\n",
" frop = np.zeros_like(main_lobe_frop)\n",
" Nfreqs_band = freq_slice.stop - freq_slice.start\n",
" Nfreqs_calc = int(np.ceil(Nfreqs_band / CORR_MATRIX_FREQ_DECIMATION))\n",
"\n",
" for freq_ind in range(Nfreqs_calc):\n",
" frop[:, :, freq_ind] = np.tensordot(main_lobe_frop[:, :, freq_ind],\n",
" xtalk_frop[:, :, freq_ind],\n",
" axes=1)\n",
" else:\n",
" frop = main_lobe_frop\n",
" cov_here += frf.get_FRF_cov(frop, var) # computes covariance for pI by adding ee and nn\n",
" if hd.pol_convention == \"avg\":\n",
" cov_here /= 4\n",
Expand All @@ -1494,7 +1523,8 @@
"\n",
" else:\n",
" # use the mode-counting method, which is simpler but less accurate\n",
" coherent_avg_correction_factors.append(dpss_coherent_avg_correction(spw))"
" coherent_avg_correction_factors.append(dpss_coherent_avg_correction(spw))\n",
" "
]
},
{
Expand Down Expand Up @@ -2068,7 +2098,9 @@
"cell_type": "code",
"execution_count": null,
"id": "93240fe7",
"metadata": {},
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"plot_Pk_SNR_vs_LST(func=np.real)\n",
Expand Down Expand Up @@ -2222,7 +2254,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
"version": "3.9.6"
},
"toc": {
"base_numbering": 1,
Expand Down

0 comments on commit 3b60745

Please sign in to comment.