From df89c5a73afd978fdfe3f8c95ef725ad4f4a325b Mon Sep 17 00:00:00 2001 From: Saurav Maheshkar Date: Tue, 31 Dec 2024 07:17:43 +0000 Subject: [PATCH] feat: switch to logging --- .../association_catalog/partition_join_info.py | 6 ++++-- src/hats/catalog/partition_info.py | 7 +++++-- src/hats/inspection/almanac.py | 17 +++++++++-------- src/hats/inspection/visualize_catalog.py | 11 +++++------ src/hats/io/validation.py | 6 ++++-- src/hats/loaders/read_hats.py | 6 ++++-- 6 files changed, 31 insertions(+), 22 deletions(-) diff --git a/src/hats/catalog/association_catalog/partition_join_info.py b/src/hats/catalog/association_catalog/partition_join_info.py index 03dc2c2f..d01f0ece 100644 --- a/src/hats/catalog/association_catalog/partition_join_info.py +++ b/src/hats/catalog/association_catalog/partition_join_info.py @@ -2,7 +2,7 @@ from __future__ import annotations -import warnings +import logging from pathlib import Path import numpy as np @@ -19,6 +19,8 @@ ) from hats.pixel_math.healpix_pixel import HealpixPixel +logger = logging.getLogger(__name__) + class PartitionJoinInfo: """Association catalog metadata with which partitions matches occur in the join""" @@ -158,7 +160,7 @@ def read_from_dir(cls, catalog_base_dir: str | Path | UPath | None = None) -> Pa if file_io.does_file_or_directory_exist(partition_join_info_file): pixel_frame = PartitionJoinInfo._read_from_csv(partition_join_info_file) elif file_io.does_file_or_directory_exist(metadata_file): - warnings.warn("Reading partitions from parquet metadata. This is typically slow.") + logger.info("Reading partitions from parquet metadata. This is typically slow.") pixel_frame = PartitionJoinInfo._read_from_metadata_file(metadata_file) else: raise FileNotFoundError( diff --git a/src/hats/catalog/partition_info.py b/src/hats/catalog/partition_info.py index 4f9c4601..d529ac3f 100644 --- a/src/hats/catalog/partition_info.py +++ b/src/hats/catalog/partition_info.py @@ -2,7 +2,7 @@ from __future__ import annotations -import warnings +import logging from pathlib import Path import numpy as np @@ -19,6 +19,8 @@ ) from hats.pixel_math import HealpixPixel +logger = logging.getLogger(__name__) + class PartitionInfo: """Container class for per-partition info.""" @@ -132,7 +134,7 @@ def read_from_dir(cls, catalog_base_dir: str | Path | UPath | None) -> Partition if file_io.does_file_or_directory_exist(partition_info_file): pixel_list = PartitionInfo._read_from_csv(partition_info_file) elif file_io.does_file_or_directory_exist(metadata_file): - warnings.warn("Reading partitions from parquet metadata. This is typically slow.") + logger.info("Reading partitions from parquet metadata. This is typically slow.") pixel_list = PartitionInfo._read_from_metadata_file(metadata_file) else: raise FileNotFoundError( @@ -240,6 +242,7 @@ def _read_from_csv(cls, partition_info_file: str | Path | UPath) -> PartitionInf for order, pixel in zip( data_frame[cls.METADATA_ORDER_COLUMN_NAME], data_frame[cls.METADATA_PIXEL_COLUMN_NAME], + strict=False, ) ] diff --git a/src/hats/inspection/almanac.py b/src/hats/inspection/almanac.py index c57c927c..b4e1b88e 100644 --- a/src/hats/inspection/almanac.py +++ b/src/hats/inspection/almanac.py @@ -1,7 +1,7 @@ from __future__ import annotations +import logging import os -import warnings import pandas as pd @@ -37,6 +37,7 @@ def __init__(self, include_default_dir=True, dirs=None): self.files = {} self.entries = {} self.dir_to_catalog_name = {} + self.logger = logging.getLogger(__name__) self._init_files(include_default_dir=include_default_dir, dirs=dirs) self._init_catalog_objects() self._init_catalog_links() @@ -102,7 +103,7 @@ def _init_catalog_objects(self): else: full_name = catalog_info.catalog_name if full_name in self.entries: - warnings.warn(f"Duplicate catalog name ({full_name}). Try using namespaces.") + self.logger.warning(f"Duplicate catalog name ({full_name}). Try using namespaces.") else: self.entries[full_name] = catalog_info self.dir_to_catalog_name[catalog_info.catalog_path] = full_name @@ -124,7 +125,7 @@ def _init_catalog_links(self): if catalog_entry.primary: object_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not object_catalog: - warnings.warn( + self.logger.warning( f"source catalog {catalog_entry.catalog_name} missing " f"object catalog {catalog_entry.primary}" ) @@ -136,7 +137,7 @@ def _init_catalog_links(self): ## Association table MUST have a primary and join catalog primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not primary_catalog: - warnings.warn( + self.logger.warning( f"association table {catalog_entry.catalog_name} missing " f"primary catalog {catalog_entry.primary}" ) @@ -149,7 +150,7 @@ def _init_catalog_links(self): catalog_entry.namespace, ) if not join_catalog: - warnings.warn( + self.logger.warning( f"association table {catalog_entry.catalog_name} missing " f"join catalog {catalog_entry.join}" ) @@ -160,7 +161,7 @@ def _init_catalog_links(self): ## Margin catalogs MUST have a primary catalog primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not primary_catalog: - warnings.warn( + self.logger.warning( f"margin table {catalog_entry.catalog_name} missing " f"primary catalog {catalog_entry.primary}" ) @@ -171,7 +172,7 @@ def _init_catalog_links(self): ## Index tables MUST have a primary catalog primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not primary_catalog: - warnings.warn( + self.logger.warning( f"index table {catalog_entry.catalog_name} missing " f"primary catalog {catalog_entry.primary}" ) @@ -179,7 +180,7 @@ def _init_catalog_links(self): catalog_entry.primary_link = primary_catalog primary_catalog.indexes.append(catalog_entry) else: # pragma: no cover - warnings.warn(f"Unknown catalog type {catalog_entry.catalog_type}") + self.logger.warning(f"Unknown catalog type {catalog_entry.catalog_type}") def _get_linked_catalog(self, linked_text, namespace) -> AlmanacInfo | None: """Find a catalog to be used for linking catalogs within the almanac. diff --git a/src/hats/inspection/visualize_catalog.py b/src/hats/inspection/visualize_catalog.py index 04d08efe..aae7c7e4 100644 --- a/src/hats/inspection/visualize_catalog.py +++ b/src/hats/inspection/visualize_catalog.py @@ -5,7 +5,7 @@ from __future__ import annotations -import warnings +import logging from typing import TYPE_CHECKING, Type import astropy.units as u @@ -36,6 +36,8 @@ from hats.catalog import Catalog from hats.catalog.healpix_dataset.healpix_dataset import HealpixDataset +logger = logging.getLogger(__name__) + def _read_point_map(catalog_base_dir): """Read the object spatial distribution information from a healpix FITS file. @@ -235,13 +237,10 @@ def get_fov_moc_from_wcs(wcs: WCS) -> MOC | None: y_px = np.append(y_px, y[-1, :-1]) y_px = np.append(y_px, y[1:, 0][::-1]) - # Disable the output of warnings when encoutering NaNs. - warnings.filterwarnings("ignore") # Inverse projection from pixel coordinate space to the world coordinate space viewport = pixel_to_skycoord(x_px, y_px, wcs) # If one coordinate is a NaN we exit the function and do not go further ra_deg, dec_deg = viewport.icrs.ra.deg, viewport.icrs.dec.deg - warnings.filterwarnings("default") if np.isnan(ra_deg).any() or np.isnan(dec_deg).any(): return None @@ -306,7 +305,7 @@ def _merge_too_small_pixels(depth_ipix_d: dict[int, tuple[np.ndarray, np.ndarray # Combine healpix pixels smaller than 1px in the plot if max_depth > depth_res: - warnings.warn( + logger.info( "This plot contains HEALPix pixels smaller than a pixel of the plot. Some values may be lost" ) new_ipix_d = {} @@ -592,7 +591,7 @@ def initialize_wcs_axes( wcs = ax.wcs return fig, ax, wcs # Plot onto new axes on new figure if current axes is not correct type - warnings.warn("Current axes is not of correct type WCSAxes. A new figure and axes will be used.") + logger.warning("Current axes is not of correct type WCSAxes. A new figure and axes will be used.") fig = plt.figure(**kwargs) if wcs is None: # Initialize wcs with params if no WCS provided diff --git a/src/hats/io/validation.py b/src/hats/io/validation.py index 3717de3c..e8b4432f 100644 --- a/src/hats/io/validation.py +++ b/src/hats/io/validation.py @@ -1,6 +1,6 @@ from __future__ import annotations -import warnings +import logging from pathlib import Path import numpy as np @@ -18,6 +18,8 @@ from hats.pixel_math.healpix_pixel import INVALID_PIXEL from hats.pixel_math.healpix_pixel_function import sort_pixels +logger = logging.getLogger(__name__) + # pylint: disable=too-many-statements,too-many-locals def is_valid_catalog( @@ -63,7 +65,7 @@ def handle_error(msg): if verbose: print(msg) else: - warnings.warn(msg) + logger.warning(msg) is_valid = False if not is_catalog_info_valid(pointer): diff --git a/src/hats/loaders/read_hats.py b/src/hats/loaders/read_hats.py index 4fad8e5a..f798c8d5 100644 --- a/src/hats/loaders/read_hats.py +++ b/src/hats/loaders/read_hats.py @@ -1,6 +1,6 @@ from __future__ import annotations -import warnings +import logging from pathlib import Path import numpy as np @@ -26,6 +26,8 @@ CatalogType.MAP: MapCatalog, } +logger = logging.getLogger(__name__) + def read_hats(catalog_path: str | Path | UPath) -> Dataset: """Reads a HATS Catalog from a HATS directory @@ -88,7 +90,7 @@ def _read_schema_from_metadata(catalog_base_dir: str | Path | UPath) -> pa.Schem metadata_file = paths.get_parquet_metadata_pointer(catalog_base_dir) metadata_exists = file_io.does_file_or_directory_exist(metadata_file) if not (common_metadata_exists or metadata_exists): - warnings.warn( + logger.warning( "_common_metadata or _metadata files not found for this catalog." "The arrow schema will not be set." )