From 5c30094400000918a7a5c2404580b47dbdd8f679 Mon Sep 17 00:00:00 2001 From: Petia Yanchulova Merica-Jones Date: Fri, 26 May 2023 17:54:05 +0300 Subject: [PATCH 1/7] added option to calculate a map of median values --- megabeast/tools/make_naive_maps.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/megabeast/tools/make_naive_maps.py b/megabeast/tools/make_naive_maps.py index 4c0ef53..b0bedf1 100644 --- a/megabeast/tools/make_naive_maps.py +++ b/megabeast/tools/make_naive_maps.py @@ -2,6 +2,7 @@ import argparse import numpy as np +import scipy.stats.median_abs_deviation as mad import h5py import itertools as it from astropy.io import fits @@ -17,7 +18,7 @@ __all__ = ["create_naive_maps"] -def create_naive_maps(stats_filename, pix_size=10.0, verbose=False): +def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False): """ Make the naive maps by directly averaging the BEAST results for all the stars in each pixel. Does not account for completeness, hence naive maps! @@ -87,10 +88,14 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False): for k, cur_stat in enumerate(sum_stats): values = cat[cur_stat + "_" + stat_type][tindxs] values_foreach_pixel[cur_stat][i, j] = values - summary_stats[j, i, k] = np.average(values) - summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt( - len(values) - ) + if median: + summary_stats[j, i, k] = np.median(values) + summary_sigmas[j, i, k] = mad(values) + else: + summary_stats[j, i, k] = np.average(values) + summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt( + len(values) + ) master_header = w.to_header() # Now, write the maps to disk @@ -136,6 +141,9 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False): parser.add_argument( "--verbose", default=False, type=bool, help="print pixel indices as a check" ) + parser.add_argument( + "--median", default=False, type=bool, help="find the median of the values" + ) args = parser.parse_args() # call the function From 6058af45b093c608673a59ccf84f48a35de9e4bc Mon Sep 17 00:00:00 2001 From: Petia Yanchulova Merica-Jones Date: Fri, 26 May 2023 17:56:18 +0300 Subject: [PATCH 2/7] added 3 stellar parameters to naive maps --- megabeast/tools/make_naive_maps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/megabeast/tools/make_naive_maps.py b/megabeast/tools/make_naive_maps.py index b0bedf1..7ce3772 100644 --- a/megabeast/tools/make_naive_maps.py +++ b/megabeast/tools/make_naive_maps.py @@ -67,7 +67,7 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False y = np.floor(pix_y) # setup arrary to store summary stats per pixel - sum_stats = ["Av", "Rv", "f_A"] + sum_stats = ["Av", "Rv", "f_A", "logT", "M_act", "logA"] n_sum = len(sum_stats) summary_stats = np.zeros((n_y + 1, n_x + 1, n_sum + 1), dtype=float) summary_sigmas = np.zeros((n_y + 1, n_x + 1, n_sum), dtype=float) From 0fde9f6e8070a5db9ba0a9e534ecbd8869afb242 Mon Sep 17 00:00:00 2001 From: Petia Yanchulova Merica-Jones Date: Sat, 27 May 2023 10:58:37 +0300 Subject: [PATCH 3/7] add qual cut option, update map naming convention --- megabeast/tools/make_naive_maps.py | 33 ++++++++++++++++++++---------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/megabeast/tools/make_naive_maps.py b/megabeast/tools/make_naive_maps.py index 7ce3772..476d0bb 100644 --- a/megabeast/tools/make_naive_maps.py +++ b/megabeast/tools/make_naive_maps.py @@ -2,7 +2,7 @@ import argparse import numpy as np -import scipy.stats.median_abs_deviation as mad +from scipy.stats import median_abs_deviation import h5py import itertools as it from astropy.io import fits @@ -18,7 +18,11 @@ __all__ = ["create_naive_maps"] -def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False): +def create_naive_maps(stats_filename, + pix_size=10.0, + verbose=False, + median=False, + chi2mincut=None): """ Make the naive maps by directly averaging the BEAST results for all the stars in each pixel. Does not account for completeness, hence naive maps! @@ -30,6 +34,12 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False pix_size : float (default=10) size of pixels/regions in arcsec + + median : bool (default=False) + calculate the median of the BEAST results (instead of the mean) + + chi2mincut : int (default=None) + place a chi2min cut on the BEAST results """ # type of statistic (make a commandline parameter later) @@ -80,7 +90,8 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False for i in range(n_x + 1): for j in range(n_y + 1): (tindxs,) = np.where((x == i) & (y == j)) - # tindxs, = np.where((x == i) & (y == j) & (cat['chi2min'] < 10.)) + if chi2mincut: + (tindxs,) = np.where((x == i) & (y == j) & (cat['chi2min'] < chi2mincut)) if len(tindxs) > 0: summary_stats[j, i, n_sum] = len(tindxs) if verbose: @@ -88,19 +99,19 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False for k, cur_stat in enumerate(sum_stats): values = cat[cur_stat + "_" + stat_type][tindxs] values_foreach_pixel[cur_stat][i, j] = values + summary_stats[j, i, k] = np.average(values) + summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) if median: summary_stats[j, i, k] = np.median(values) - summary_sigmas[j, i, k] = mad(values) - else: - summary_stats[j, i, k] = np.average(values) - summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt( - len(values) - ) + summary_sigmas[j, i, k] = median_abs_deviation(values) master_header = w.to_header() # Now, write the maps to disk for k, cur_stat in enumerate(sum_stats): - map_name = stats_filename[0].replace("stats", "map" + cur_stat) + map_name = stats_filename[0].replace("stats.fits", "map_" + \ + cur_stat + "_" + \ + str(pix_size) + \ + "arcsec.fits") hdu = fits.PrimaryHDU(summary_stats[:, :, k], header=master_header) hdu.writeto(map_name, overwrite=True) @@ -109,7 +120,7 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False hdu_sigma.writeto(sigma_name, overwrite=True) hdu = fits.PrimaryHDU(summary_stats[:, :, n_sum], header=master_header) - hdu.writeto(stats_filename[0].replace("stats", "npts"), overwrite=True) + hdu.writeto(stats_filename[0].replace("stats.fits", "npts.fits"), overwrite=True) # And store all the values in HDF5 format values_name = stats_filename[0].replace("stats.fits", "values_per_pixel.hd5") From 899398ef984dffaaa2751eeac4252a844028fe67 Mon Sep 17 00:00:00 2001 From: Petia Yanchulova Merica-Jones Date: Fri, 8 Dec 2023 17:03:28 -0500 Subject: [PATCH 4/7] add av weighting of rv and f_a --- megabeast/tools/make_naive_maps.py | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/megabeast/tools/make_naive_maps.py b/megabeast/tools/make_naive_maps.py index 476d0bb..5c346e6 100644 --- a/megabeast/tools/make_naive_maps.py +++ b/megabeast/tools/make_naive_maps.py @@ -22,7 +22,8 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False, - chi2mincut=None): + chi2mincut=None, + weigh_by_av=False): """ Make the naive maps by directly averaging the BEAST results for all the stars in each pixel. Does not account for completeness, hence naive maps! @@ -40,6 +41,10 @@ def create_naive_maps(stats_filename, chi2mincut : int (default=None) place a chi2min cut on the BEAST results + + weigh_by_av : bool (default=False) + weigh R(V) and f_A by A(V) to determining R(V) and f_A of the total column + of dust in a pixel (as opposed to finding a simple average across a pixel) """ # type of statistic (make a commandline parameter later) @@ -60,6 +65,9 @@ def create_naive_maps(stats_filename, dec = cat["DEC"] pixsize_degrees = pix_size / 3600 n_x, n_y, ra_delt, dec_delt = calc_nx_ny_from_pixsize(cat, pixsize_degrees) + print("pix_size", pix_size) + print("n_x", n_x) + print("ra_delt", ra_delt) # the ra spacing needs to be larger, as 1 degree of RA == # cos(DEC) degrees on the great circle ra_grid = ra.min() + ra_delt * np.arange(0, n_x + 1, dtype=float) @@ -78,6 +86,7 @@ def create_naive_maps(stats_filename, # setup arrary to store summary stats per pixel sum_stats = ["Av", "Rv", "f_A", "logT", "M_act", "logA"] + print("sum_stats", sum_stats) n_sum = len(sum_stats) summary_stats = np.zeros((n_y + 1, n_x + 1, n_sum + 1), dtype=float) summary_sigmas = np.zeros((n_y + 1, n_x + 1, n_sum), dtype=float) @@ -99,8 +108,18 @@ def create_naive_maps(stats_filename, for k, cur_stat in enumerate(sum_stats): values = cat[cur_stat + "_" + stat_type][tindxs] values_foreach_pixel[cur_stat][i, j] = values - summary_stats[j, i, k] = np.average(values) - summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) + + # weigh R(V) and f_A by A(V) + if weigh_by_av and ("Rv" in cur_stat or "f_A" in cur_stat): + # get Av values + av_values = cat["Av" + "_" + stat_type][tindxs] + values_foreach_pixel[cur_stat][i, j] = values / av_values + + summary_stats[j, i, k] = np.average(values / av_values) + summary_sigmas[j, i, k] = np.std(values / av_values, ddof=1) / math.sqrt(len(values)) + else: + summary_stats[j, i, k] = np.average(values) + summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) if median: summary_stats[j, i, k] = np.median(values) summary_sigmas[j, i, k] = median_abs_deviation(values) @@ -112,6 +131,7 @@ def create_naive_maps(stats_filename, cur_stat + "_" + \ str(pix_size) + \ "arcsec.fits") + print("writing naive maps to disk:", map_name) hdu = fits.PrimaryHDU(summary_stats[:, :, k], header=master_header) hdu.writeto(map_name, overwrite=True) @@ -120,10 +140,10 @@ def create_naive_maps(stats_filename, hdu_sigma.writeto(sigma_name, overwrite=True) hdu = fits.PrimaryHDU(summary_stats[:, :, n_sum], header=master_header) - hdu.writeto(stats_filename[0].replace("stats.fits", "npts.fits"), overwrite=True) + hdu.writeto(stats_filename[0].replace("stats_HRC.fits", "npts.fits"), overwrite=True) # And store all the values in HDF5 format - values_name = stats_filename[0].replace("stats.fits", "values_per_pixel.hd5") + values_name = stats_filename[0].replace("stats_HRC.fits", "values_per_pixel.hd5") f = h5py.File(values_name, "w") dt = h5py.special_dtype(vlen=np.dtype(np.float)) for cur_stat in sum_stats: From c40cb96650b9dd3beba01b367e8eee747d720cec Mon Sep 17 00:00:00 2001 From: Petia Yanchulova Merica-Jones Date: Fri, 8 Dec 2023 17:32:56 -0500 Subject: [PATCH 5/7] correct weighting in np.average for a weighted ave --- megabeast/tools/make_naive_maps.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/megabeast/tools/make_naive_maps.py b/megabeast/tools/make_naive_maps.py index 5c346e6..0924afb 100644 --- a/megabeast/tools/make_naive_maps.py +++ b/megabeast/tools/make_naive_maps.py @@ -115,8 +115,8 @@ def create_naive_maps(stats_filename, av_values = cat["Av" + "_" + stat_type][tindxs] values_foreach_pixel[cur_stat][i, j] = values / av_values - summary_stats[j, i, k] = np.average(values / av_values) - summary_sigmas[j, i, k] = np.std(values / av_values, ddof=1) / math.sqrt(len(values)) + summary_stats[j, i, k] = np.average(values, weights=av_values) + summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) else: summary_stats[j, i, k] = np.average(values) summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) From a63ed5ce7bad4801566b8736a73af4308815e484 Mon Sep 17 00:00:00 2001 From: Petia Yanchulova Merica-Jones Date: Tue, 30 Jul 2024 14:24:25 -0400 Subject: [PATCH 6/7] update weigh by A(V) --- megabeast/tools/make_naive_maps.py | 49 +++++++++++++++--------------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/megabeast/tools/make_naive_maps.py b/megabeast/tools/make_naive_maps.py index 0924afb..c60ca22 100644 --- a/megabeast/tools/make_naive_maps.py +++ b/megabeast/tools/make_naive_maps.py @@ -2,7 +2,6 @@ import argparse import numpy as np -from scipy.stats import median_abs_deviation import h5py import itertools as it from astropy.io import fits @@ -40,10 +39,10 @@ def create_naive_maps(stats_filename, calculate the median of the BEAST results (instead of the mean) chi2mincut : int (default=None) - place a chi2min cut on the BEAST results + place a chi2min cut on the BEAST results. Gives the max threshold. weigh_by_av : bool (default=False) - weigh R(V) and f_A by A(V) to determining R(V) and f_A of the total column + weigh R(V) and f_A by A(V) to determinŠµ R(V) and f_A of the total column of dust in a pixel (as opposed to finding a simple average across a pixel) """ @@ -52,7 +51,7 @@ def create_naive_maps(stats_filename, stat_type = "Exp" # read in the full brick catalog - if type(stats_filename) == str: + if isinstance(stats_filename, str): stats_filename = [stats_filename] cat = Table.read(stats_filename[0]) if len(stats_filename) > 1: @@ -60,14 +59,14 @@ def create_naive_maps(stats_filename, tcat = Table.read(fname) cat = vstack([cat, tcat]) + if chi2mincut: + cat = cat[cat['chi2min'] < chi2mincut] + # make RA/Dec grid ra = cat["RA"] dec = cat["DEC"] pixsize_degrees = pix_size / 3600 n_x, n_y, ra_delt, dec_delt = calc_nx_ny_from_pixsize(cat, pixsize_degrees) - print("pix_size", pix_size) - print("n_x", n_x) - print("ra_delt", ra_delt) # the ra spacing needs to be larger, as 1 degree of RA == # cos(DEC) degrees on the great circle ra_grid = ra.min() + ra_delt * np.arange(0, n_x + 1, dtype=float) @@ -99,39 +98,35 @@ def create_naive_maps(stats_filename, for i in range(n_x + 1): for j in range(n_y + 1): (tindxs,) = np.where((x == i) & (y == j)) - if chi2mincut: - (tindxs,) = np.where((x == i) & (y == j) & (cat['chi2min'] < chi2mincut)) if len(tindxs) > 0: summary_stats[j, i, n_sum] = len(tindxs) if verbose: print(i, j, len(tindxs)) for k, cur_stat in enumerate(sum_stats): values = cat[cur_stat + "_" + stat_type][tindxs] - values_foreach_pixel[cur_stat][i, j] = values # weigh R(V) and f_A by A(V) if weigh_by_av and ("Rv" in cur_stat or "f_A" in cur_stat): # get Av values av_values = cat["Av" + "_" + stat_type][tindxs] values_foreach_pixel[cur_stat][i, j] = values / av_values - - summary_stats[j, i, k] = np.average(values, weights=av_values) - summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) + weights = av_values else: - summary_stats[j, i, k] = np.average(values) - summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) + values_foreach_pixel[cur_stat][i, j] = values + weights = None + if median: summary_stats[j, i, k] = np.median(values) - summary_sigmas[j, i, k] = median_abs_deviation(values) + xabs = abs(values - np.median(values)) ** 2. + summary_sigmas[j, i, k] = np.sqrt(np.median(xabs)) / math.sqrt(len(values)) + else: + summary_stats[j, i, k] = np.average(values, weights=weights) + summary_sigmas[j, i, k] = np.std(values, ddof=1) / math.sqrt(len(values)) master_header = w.to_header() # Now, write the maps to disk for k, cur_stat in enumerate(sum_stats): - map_name = stats_filename[0].replace("stats.fits", "map_" + \ - cur_stat + "_" + \ - str(pix_size) + \ - "arcsec.fits") - print("writing naive maps to disk:", map_name) + map_name = stats_filename[0].replace("stats.fits", "map_" + cur_stat + "_" + str(pix_size) + "arcsec.fits") hdu = fits.PrimaryHDU(summary_stats[:, :, k], header=master_header) hdu.writeto(map_name, overwrite=True) @@ -140,12 +135,12 @@ def create_naive_maps(stats_filename, hdu_sigma.writeto(sigma_name, overwrite=True) hdu = fits.PrimaryHDU(summary_stats[:, :, n_sum], header=master_header) - hdu.writeto(stats_filename[0].replace("stats_HRC.fits", "npts.fits"), overwrite=True) + hdu.writeto(stats_filename[0].replace("stats.fits", "npts.fits"), overwrite=True) # And store all the values in HDF5 format - values_name = stats_filename[0].replace("stats_HRC.fits", "values_per_pixel.hd5") + values_name = stats_filename[0].replace("stats.fits", "values_per_pixel.hd5") f = h5py.File(values_name, "w") - dt = h5py.special_dtype(vlen=np.dtype(np.float)) + dt = h5py.special_dtype(vlen=float) for cur_stat in sum_stats: dset = f.create_dataset(cur_stat, (n_x, n_y), dtype=dt) for i, j in it.product(range(n_x), range(n_y)): @@ -175,6 +170,12 @@ def create_naive_maps(stats_filename, parser.add_argument( "--median", default=False, type=bool, help="find the median of the values" ) + parser.add_argument( + "--chi2mincut", default=False, type=int, help="max chi2min threshold to place on results" + ) + parser.add_argument( + "--weigh_by_av", default=False, type=bool, help="weigh R(V) or f_A by A(V)" + ) args = parser.parse_args() # call the function From 4c26930b2fa0eb8e1881322377539067219bd5f0 Mon Sep 17 00:00:00 2001 From: Petia Yanchulova Merica-Jones Date: Tue, 30 Jul 2024 14:35:29 -0400 Subject: [PATCH 7/7] small fix to resolve conflict --- megabeast/tools/make_naive_maps.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/megabeast/tools/make_naive_maps.py b/megabeast/tools/make_naive_maps.py index c60ca22..efea5e6 100644 --- a/megabeast/tools/make_naive_maps.py +++ b/megabeast/tools/make_naive_maps.py @@ -21,7 +21,7 @@ def create_naive_maps(stats_filename, pix_size=10.0, verbose=False, median=False, - chi2mincut=None, + chi2mincut=False, weigh_by_av=False): """ Make the naive maps by directly averaging the BEAST results for all the @@ -85,7 +85,7 @@ def create_naive_maps(stats_filename, # setup arrary to store summary stats per pixel sum_stats = ["Av", "Rv", "f_A", "logT", "M_act", "logA"] - print("sum_stats", sum_stats) + print("summary stats", sum_stats) n_sum = len(sum_stats) summary_stats = np.zeros((n_y + 1, n_x + 1, n_sum + 1), dtype=float) summary_sigmas = np.zeros((n_y + 1, n_x + 1, n_sum), dtype=float)