Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: switch to logging #448

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions src/hats/catalog/association_catalog/partition_join_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

import warnings
import logging
from pathlib import Path

import numpy as np
Expand All @@ -19,6 +19,8 @@
)
from hats.pixel_math.healpix_pixel import HealpixPixel

logger = logging.getLogger(__name__)


class PartitionJoinInfo:
"""Association catalog metadata with which partitions matches occur in the join"""
Expand Down Expand Up @@ -158,7 +160,7 @@ def read_from_dir(cls, catalog_base_dir: str | Path | UPath | None = None) -> Pa
if file_io.does_file_or_directory_exist(partition_join_info_file):
pixel_frame = PartitionJoinInfo._read_from_csv(partition_join_info_file)
elif file_io.does_file_or_directory_exist(metadata_file):
warnings.warn("Reading partitions from parquet metadata. This is typically slow.")
logger.info("Reading partitions from parquet metadata. This is typically slow.")
pixel_frame = PartitionJoinInfo._read_from_metadata_file(metadata_file)
else:
raise FileNotFoundError(
Expand Down
7 changes: 5 additions & 2 deletions src/hats/catalog/partition_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

import warnings
import logging
from pathlib import Path

import numpy as np
Expand All @@ -19,6 +19,8 @@
)
from hats.pixel_math import HealpixPixel

logger = logging.getLogger(__name__)


class PartitionInfo:
"""Container class for per-partition info."""
Expand Down Expand Up @@ -132,7 +134,7 @@ def read_from_dir(cls, catalog_base_dir: str | Path | UPath | None) -> Partition
if file_io.does_file_or_directory_exist(partition_info_file):
pixel_list = PartitionInfo._read_from_csv(partition_info_file)
elif file_io.does_file_or_directory_exist(metadata_file):
warnings.warn("Reading partitions from parquet metadata. This is typically slow.")
logger.info("Reading partitions from parquet metadata. This is typically slow.")
pixel_list = PartitionInfo._read_from_metadata_file(metadata_file)
else:
raise FileNotFoundError(
Expand Down Expand Up @@ -240,6 +242,7 @@ def _read_from_csv(cls, partition_info_file: str | Path | UPath) -> PartitionInf
for order, pixel in zip(
data_frame[cls.METADATA_ORDER_COLUMN_NAME],
data_frame[cls.METADATA_PIXEL_COLUMN_NAME],
strict=False,
)
]

Expand Down
17 changes: 9 additions & 8 deletions src/hats/inspection/almanac.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import logging
import os
import warnings

import pandas as pd

Expand Down Expand Up @@ -37,6 +37,7 @@ def __init__(self, include_default_dir=True, dirs=None):
self.files = {}
self.entries = {}
self.dir_to_catalog_name = {}
self.logger = logging.getLogger(__name__)
self._init_files(include_default_dir=include_default_dir, dirs=dirs)
self._init_catalog_objects()
self._init_catalog_links()
Expand Down Expand Up @@ -102,7 +103,7 @@ def _init_catalog_objects(self):
else:
full_name = catalog_info.catalog_name
if full_name in self.entries:
warnings.warn(f"Duplicate catalog name ({full_name}). Try using namespaces.")
self.logger.warning(f"Duplicate catalog name ({full_name}). Try using namespaces.")
else:
self.entries[full_name] = catalog_info
self.dir_to_catalog_name[catalog_info.catalog_path] = full_name
Expand All @@ -124,7 +125,7 @@ def _init_catalog_links(self):
if catalog_entry.primary:
object_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace)
if not object_catalog:
warnings.warn(
self.logger.warning(
f"source catalog {catalog_entry.catalog_name} missing "
f"object catalog {catalog_entry.primary}"
)
Expand All @@ -136,7 +137,7 @@ def _init_catalog_links(self):
## Association table MUST have a primary and join catalog
primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace)
if not primary_catalog:
warnings.warn(
self.logger.warning(
f"association table {catalog_entry.catalog_name} missing "
f"primary catalog {catalog_entry.primary}"
)
Expand All @@ -149,7 +150,7 @@ def _init_catalog_links(self):
catalog_entry.namespace,
)
if not join_catalog:
warnings.warn(
self.logger.warning(
f"association table {catalog_entry.catalog_name} missing "
f"join catalog {catalog_entry.join}"
)
Expand All @@ -160,7 +161,7 @@ def _init_catalog_links(self):
## Margin catalogs MUST have a primary catalog
primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace)
if not primary_catalog:
warnings.warn(
self.logger.warning(
f"margin table {catalog_entry.catalog_name} missing "
f"primary catalog {catalog_entry.primary}"
)
Expand All @@ -171,15 +172,15 @@ def _init_catalog_links(self):
## Index tables MUST have a primary catalog
primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace)
if not primary_catalog:
warnings.warn(
self.logger.warning(
f"index table {catalog_entry.catalog_name} missing "
f"primary catalog {catalog_entry.primary}"
)
else:
catalog_entry.primary_link = primary_catalog
primary_catalog.indexes.append(catalog_entry)
else: # pragma: no cover
warnings.warn(f"Unknown catalog type {catalog_entry.catalog_type}")
self.logger.warning(f"Unknown catalog type {catalog_entry.catalog_type}")

def _get_linked_catalog(self, linked_text, namespace) -> AlmanacInfo | None:
"""Find a catalog to be used for linking catalogs within the almanac.
Expand Down
11 changes: 5 additions & 6 deletions src/hats/inspection/visualize_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from __future__ import annotations

import warnings
import logging
from typing import TYPE_CHECKING, Type

import astropy.units as u
Expand Down Expand Up @@ -36,6 +36,8 @@
from hats.catalog import Catalog
from hats.catalog.healpix_dataset.healpix_dataset import HealpixDataset

logger = logging.getLogger(__name__)


def _read_point_map(catalog_base_dir):
"""Read the object spatial distribution information from a healpix FITS file.
Expand Down Expand Up @@ -235,13 +237,10 @@ def get_fov_moc_from_wcs(wcs: WCS) -> MOC | None:
y_px = np.append(y_px, y[-1, :-1])
y_px = np.append(y_px, y[1:, 0][::-1])

# Disable the output of warnings when encoutering NaNs.
warnings.filterwarnings("ignore")
# Inverse projection from pixel coordinate space to the world coordinate space
viewport = pixel_to_skycoord(x_px, y_px, wcs)
# If one coordinate is a NaN we exit the function and do not go further
ra_deg, dec_deg = viewport.icrs.ra.deg, viewport.icrs.dec.deg
warnings.filterwarnings("default")

if np.isnan(ra_deg).any() or np.isnan(dec_deg).any():
return None
Expand Down Expand Up @@ -306,7 +305,7 @@ def _merge_too_small_pixels(depth_ipix_d: dict[int, tuple[np.ndarray, np.ndarray

# Combine healpix pixels smaller than 1px in the plot
if max_depth > depth_res:
warnings.warn(
logger.info(
"This plot contains HEALPix pixels smaller than a pixel of the plot. Some values may be lost"
)
new_ipix_d = {}
Expand Down Expand Up @@ -592,7 +591,7 @@ def initialize_wcs_axes(
wcs = ax.wcs
return fig, ax, wcs
# Plot onto new axes on new figure if current axes is not correct type
warnings.warn("Current axes is not of correct type WCSAxes. A new figure and axes will be used.")
logger.warning("Current axes is not of correct type WCSAxes. A new figure and axes will be used.")
fig = plt.figure(**kwargs)
if wcs is None:
# Initialize wcs with params if no WCS provided
Expand Down
6 changes: 4 additions & 2 deletions src/hats/io/validation.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

import warnings
import logging
from pathlib import Path

import numpy as np
Expand All @@ -18,6 +18,8 @@
from hats.pixel_math.healpix_pixel import INVALID_PIXEL
from hats.pixel_math.healpix_pixel_function import sort_pixels

logger = logging.getLogger(__name__)


# pylint: disable=too-many-statements,too-many-locals
def is_valid_catalog(
Expand Down Expand Up @@ -63,7 +65,7 @@ def handle_error(msg):
if verbose:
print(msg)
else:
warnings.warn(msg)
logger.warning(msg)
is_valid = False

if not is_catalog_info_valid(pointer):
Expand Down
6 changes: 4 additions & 2 deletions src/hats/loaders/read_hats.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

import warnings
import logging
from pathlib import Path

import numpy as np
Expand All @@ -26,6 +26,8 @@
CatalogType.MAP: MapCatalog,
}

logger = logging.getLogger(__name__)


def read_hats(catalog_path: str | Path | UPath) -> Dataset:
"""Reads a HATS Catalog from a HATS directory
Expand Down Expand Up @@ -88,7 +90,7 @@ def _read_schema_from_metadata(catalog_base_dir: str | Path | UPath) -> pa.Schem
metadata_file = paths.get_parquet_metadata_pointer(catalog_base_dir)
metadata_exists = file_io.does_file_or_directory_exist(metadata_file)
if not (common_metadata_exists or metadata_exists):
warnings.warn(
logger.warning(
"_common_metadata or _metadata files not found for this catalog."
"The arrow schema will not be set."
)
Expand Down
Loading