From 031202cb019d3fd81519ddb1db70bb501579abb6 Mon Sep 17 00:00:00 2001 From: kkappler Date: Thu, 2 Jan 2025 12:23:05 -0800 Subject: [PATCH 1/9] towards mth5 spectrograms (partial factoring) --- aurora/pipelines/fourier_coefficients.py | 60 +++++++++++++++++++- tests/synthetic/test_fourier_coefficients.py | 4 +- 2 files changed, 60 insertions(+), 4 deletions(-) diff --git a/aurora/pipelines/fourier_coefficients.py b/aurora/pipelines/fourier_coefficients.py index 2b3b5839..cd97ba84 100644 --- a/aurora/pipelines/fourier_coefficients.py +++ b/aurora/pipelines/fourier_coefficients.py @@ -63,6 +63,7 @@ import mth5.mth5 import pathlib +import xarray as xr from aurora.pipelines.time_series_helpers import calibrate_stft_obj from aurora.pipelines.time_series_helpers import prototype_decimate @@ -232,9 +233,12 @@ def add_fcs_to_mth5(m: MTH5, fc_decimations: Optional[Union[str, list]] = None) # If timing corrections were needed they could go here, right before STFT for i_dec_level, fc_decimation in enumerate(fc_decimations): - if i_dec_level != 0: + if ( + i_dec_level != 0 + ): # TODO: take this number from fc_decimation.time_series_decimation.level # Apply decimation - run_xrds = prototype_decimate(fc_decimation, run_xrds) + ts_decimation = fc_decimation.time_series_decimation + run_xrds = prototype_decimate(ts_decimation, run_xrds) # check if this decimation level yields a valid spectrogram if not fc_decimation.is_valid_for_time_series_length( @@ -253,13 +257,62 @@ def add_fcs_to_mth5(m: MTH5, fc_decimations: Optional[Union[str, list]] = None) f"{i_dec_level}", decimation_level_metadata=fc_decimation ) fc_decimation_group.from_xarray( - stft_obj, fc_decimation_group.metadata.sample_rate_decimation + stft_obj, + fc_decimation_group.metadata.time_series_decimation.sample_rate, ) fc_decimation_group.update_metadata() fc_group.update_metadata() return +def _add_spectrogram_to_mth5( + fc_decimation: FCDecimation, + run_obj: mth5.groups.RunGroup, + run_xrds: xr.Dataset, + fc_group: mth5.groups.FCGroup, +) -> None: + """ + + This function has been factored out of add_fcs_to_mth5. + This is the most atomic level of adding FCs and will be useful as standalone method. + + Parameters + ---------- + fc_decimation : FCDecimation + Metadata about how the decimation level is to be processed + + run_xrds : xarray.core.dataset.Dataset + Time series to be converted to a spectrogram and stored in MTH5. + + Returns + ------- + run_xrds : xarray.core.dataset.Dataset + pre-whitened time series + + """ + + # check if this decimation level yields a valid spectrogram + if not fc_decimation.is_valid_for_time_series_length(run_xrds.time.shape[0]): + logger.info( + f"Decimation Level {fc_decimation.time_series_decimation.level} invalid, TS of {run_xrds.time.shape[0]} samples too short" + ) + return + + stft_obj = run_ts_to_stft_scipy(fc_decimation, run_xrds) + stft_obj = calibrate_stft_obj(stft_obj, run_obj) + + # Pack FCs into h5 and update metadata + fc_decimation_group: FCDecimationGroup = fc_group.add_decimation_level( + f"{fc_decimation.time_series_decimation.level}", + decimation_level_metadata=fc_decimation, + ) + fc_decimation_group.from_xarray( + stft_obj, fc_decimation_group.metadata.time_series_decimation.sample_rate + ) + fc_decimation_group.update_metadata() + fc_group.update_metadata() + + def get_degenerate_fc_decimation(sample_rate: float) -> list: """ @@ -289,6 +342,7 @@ def get_degenerate_fc_decimation(sample_rate: float) -> list: return output +# TODO: Delete after mth5 issue #271 is closed and merged. @path_or_mth5_object def read_back_fcs(m: Union[MTH5, pathlib.Path, str], mode: str = "r") -> None: """ diff --git a/tests/synthetic/test_fourier_coefficients.py b/tests/synthetic/test_fourier_coefficients.py index 8b89ff4d..573ba916 100644 --- a/tests/synthetic/test_fourier_coefficients.py +++ b/tests/synthetic/test_fourier_coefficients.py @@ -3,7 +3,8 @@ from aurora.config.config_creator import ConfigCreator from aurora.pipelines.fourier_coefficients import add_fcs_to_mth5 from aurora.pipelines.fourier_coefficients import fc_decimations_creator -from aurora.pipelines.fourier_coefficients import read_back_fcs + +# from aurora.pipelines.fourier_coefficients import read_back_fcs from aurora.pipelines.process_mth5 import process_mth5 from aurora.test_utils.synthetic.make_processing_configs import ( create_test_run_config, @@ -13,6 +14,7 @@ from mth5.data.make_mth5_from_asc import create_test2_h5 from mth5.data.make_mth5_from_asc import create_test3_h5 from mth5.data.make_mth5_from_asc import create_test12rr_h5 +from mth5.timeseries.spectre.helpers import read_back_fcs # from mtpy-v2 from mtpy.processing import RunSummary, KernelDataset From 14c650745ec25e8b0bc6d30b6abd75217acb8632 Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Thu, 2 Jan 2025 15:39:47 -0800 Subject: [PATCH 2/9] add TODO and clarify argument --- tests/synthetic/test_fourier_coefficients.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/synthetic/test_fourier_coefficients.py b/tests/synthetic/test_fourier_coefficients.py index 573ba916..ee26ce82 100644 --- a/tests/synthetic/test_fourier_coefficients.py +++ b/tests/synthetic/test_fourier_coefficients.py @@ -130,8 +130,13 @@ def test_123(self): return tfc def test_fc_decimations_creator(self): - """""" - cfgs = fc_decimations_creator(1.0) + """ + # TODO: Move this into mt_metadata + Returns + ------- + + """ + cfgs = fc_decimations_creator(initial_sample_rate=1.0) # test time period must of of type with self.assertRaises(NotImplementedError): From f290c615a9cb5b663ace58b703cd7bd5573ca293 Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Thu, 2 Jan 2025 15:41:16 -0800 Subject: [PATCH 3/9] add some TODOs --- aurora/pipelines/time_series_helpers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/aurora/pipelines/time_series_helpers.py b/aurora/pipelines/time_series_helpers.py index 90b71933..a81108da 100644 --- a/aurora/pipelines/time_series_helpers.py +++ b/aurora/pipelines/time_series_helpers.py @@ -157,6 +157,7 @@ def run_ts_to_stft_scipy( run_xrds_orig: xr.Dataset, ) -> xr.Dataset: """ + TODO: Replace with mth5 run_ts_to_stft_scipy method Converts a runts object into a time series of Fourier coefficients. This method uses scipy.signal.spectrogram. @@ -177,7 +178,9 @@ def run_ts_to_stft_scipy( Time series of Fourier coefficients """ run_xrds = apply_prewhitening(decimation_obj, run_xrds_orig) - windowing_scheme = window_scheme_from_decimation(decimation_obj) + windowing_scheme = window_scheme_from_decimation( + decimation_obj + ) # TODO: deprecate in favor of stft.window.taper stft_obj = xr.Dataset() for channel_id in run_xrds.data_vars: From 5dab8b92a05ee2db16d4b274e9d1cca6f5853368 Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Thu, 2 Jan 2025 15:42:14 -0800 Subject: [PATCH 4/9] factor add_spectrogram_to_mth5 from add_fcs_to_mth5 --- aurora/pipelines/fourier_coefficients.py | 40 ++++++++++-------------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/aurora/pipelines/fourier_coefficients.py b/aurora/pipelines/fourier_coefficients.py index cd97ba84..b22e9b7f 100644 --- a/aurora/pipelines/fourier_coefficients.py +++ b/aurora/pipelines/fourier_coefficients.py @@ -89,6 +89,7 @@ def fc_decimations_creator( time_period: Optional[TimePeriod] = None, ) -> List[FCDecimation]: """ + TODO: move this to mt_metadata / replace with mt_metadata method once moved. Creates mt_metadata FCDecimation objects that parameterize Fourier coefficient decimation levels. @@ -233,35 +234,28 @@ def add_fcs_to_mth5(m: MTH5, fc_decimations: Optional[Union[str, list]] = None) # If timing corrections were needed they could go here, right before STFT for i_dec_level, fc_decimation in enumerate(fc_decimations): + try: + assert i_dec_level == fc_decimation.time_series_decimation.level + except: + msg = "decimation level has unexpected value" + logger.warning(msg) + if ( i_dec_level != 0 ): # TODO: take this number from fc_decimation.time_series_decimation.level # Apply decimation ts_decimation = fc_decimation.time_series_decimation - run_xrds = prototype_decimate(ts_decimation, run_xrds) - - # check if this decimation level yields a valid spectrogram - if not fc_decimation.is_valid_for_time_series_length( - run_xrds.time.shape[0] - ): - logger.info( - f"Decimation Level {i_dec_level} invalid, TS of {run_xrds.time.shape[0]} samples too short" - ) - continue - - stft_obj = run_ts_to_stft_scipy(fc_decimation, run_xrds) - stft_obj = calibrate_stft_obj(stft_obj, run_obj) - - # Pack FCs into h5 and update metadata - fc_decimation_group: FCDecimationGroup = fc_group.add_decimation_level( - f"{i_dec_level}", decimation_level_metadata=fc_decimation - ) - fc_decimation_group.from_xarray( - stft_obj, - fc_decimation_group.metadata.time_series_decimation.sample_rate, + run_xrds = prototype_decimate( + ts_decimation, run_xrds + ) # TODO: replace this with mth5 decimation + + _add_spectrogram_to_mth5( + fc_decimation=fc_decimation, + run_obj=run_obj, + run_xrds=run_xrds, + fc_group=fc_group, ) - fc_decimation_group.update_metadata() - fc_group.update_metadata() + return From e0601d7e002a53ea241c8fa9f410699c6ccf63d2 Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Fri, 3 Jan 2025 15:51:34 -0800 Subject: [PATCH 5/9] try another way to make tests pass --- .github/workflows/tests.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 11955b94..09c8324d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -52,8 +52,10 @@ jobs: pip uninstall mth5 -y echo "Uninstalling mt_metadata" pip uninstall mt_metadata -y - pip install git+https://github.com/kujaku11/mt_metadata.git@fix_issue_235 - pip install git+https://github.com/kujaku11/mth5.git@fix_issue_271 + pip install git+https://github.com/kujaku11/mth5.git@features + echo "Uninstalling mt_metadata (again)" + pip uninstall mt_metadata -y + pip install git+https://github.com/kujaku11/mt_metadata.git@features conda list pip freeze From 91fbc7b80724fc665211a4b633dbd8e8286834b3 Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Fri, 3 Jan 2025 15:51:45 -0800 Subject: [PATCH 6/9] remane xrts to xrds --- tests/synthetic/test_decimation_methods.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/synthetic/test_decimation_methods.py b/tests/synthetic/test_decimation_methods.py index 64e5e5e8..919e7e84 100644 --- a/tests/synthetic/test_decimation_methods.py +++ b/tests/synthetic/test_decimation_methods.py @@ -49,16 +49,16 @@ def test_decimation_methods_agree(): if dec_level_id == 0: run_obj = mth5_obj.get_run(station_id, run_id, survey=None) run_ts = run_obj.to_runts(start=None, end=None) - run_xrts = run_ts.dataset - decimated_ts[dec_level_id]["run_xrts"] = run_xrts + run_xrds = run_ts.dataset + decimated_ts[dec_level_id]["run_xrds"] = run_xrds current_sample_rate = run_obj.metadata.sample_rate if dec_level_id > 0: - run_xrts = decimated_ts[dec_level_id - 1]["run_xrts"] + run_xrds = decimated_ts[dec_level_id - 1]["run_xrds"] target_sample_rate = current_sample_rate / (dec_config.decimation.factor) - decimated_1 = prototype_decimate(dec_config.decimation, run_xrts) - decimated_2 = run_xrts.sps_filters.decimate( + decimated_1 = prototype_decimate(dec_config.decimation, run_xrds) + decimated_2 = run_xrds.sps_filters.decimate( target_sample_rate=target_sample_rate ) @@ -67,7 +67,7 @@ def test_decimation_methods_agree(): assert np.isclose(difference.to_array(), 0).all() logger.info("prototype decimate aurora method agrees with mth5 decimate") - decimated_ts[dec_level_id]["run_xrts"] = decimated_1 + decimated_ts[dec_level_id]["run_xrds"] = decimated_1 current_sample_rate = target_sample_rate return From 96b3844905348b31183293098c3dd6f8abfacd57 Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Fri, 3 Jan 2025 15:56:28 -0800 Subject: [PATCH 7/9] modify tests to preinstall branches of mth5, mt_metadata --- .github/workflows/tests.yml | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 09c8324d..9d7512fb 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -37,9 +37,8 @@ jobs: echo $CONDA_PREFIX conda install -c conda-forge pytest pytest-cov certifi">=2017.4.17" pandoc pip install -r requirements-dev.txt - # pip install git+https://github.com/kujaku11/mt_metadata.git@main - # pip install git+https://github.com/kujaku11/mth5.git@master - # pip install git+https://github.com/MTgeophysics/mtpy-v2.git@main + pip install git+https://github.com/kujaku11/mt_metadata.git@features + pip install git+https://github.com/kujaku11/mth5.git@features pip install git+https://github.com/MTgeophysics/mtpy-v2.git@main pip uninstall aurora -y @@ -48,14 +47,14 @@ jobs: echo $CONDA_PREFIX pip install -e . echo "Install complete" - echo "Uninstalling mth5" - pip uninstall mth5 -y - echo "Uninstalling mt_metadata" - pip uninstall mt_metadata -y - pip install git+https://github.com/kujaku11/mth5.git@features - echo "Uninstalling mt_metadata (again)" - pip uninstall mt_metadata -y - pip install git+https://github.com/kujaku11/mt_metadata.git@features +# echo "Uninstalling mth5" +# pip uninstall mth5 -y +# echo "Uninstalling mt_metadata" +# pip uninstall mt_metadata -y +# pip install git+https://github.com/kujaku11/mth5.git@features +# echo "Uninstalling mt_metadata (again)" +# pip uninstall mt_metadata -y +# pip install git+https://github.com/kujaku11/mt_metadata.git@features conda list pip freeze From 33e88c43549f220270b209d1175e1933cef0fdee Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Fri, 3 Jan 2025 15:58:33 -0800 Subject: [PATCH 8/9] cleanup yaml --- .github/workflows/tests.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9d7512fb..c8ee6691 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -47,14 +47,6 @@ jobs: echo $CONDA_PREFIX pip install -e . echo "Install complete" -# echo "Uninstalling mth5" -# pip uninstall mth5 -y -# echo "Uninstalling mt_metadata" -# pip uninstall mt_metadata -y -# pip install git+https://github.com/kujaku11/mth5.git@features -# echo "Uninstalling mt_metadata (again)" -# pip uninstall mt_metadata -y -# pip install git+https://github.com/kujaku11/mt_metadata.git@features conda list pip freeze From 914998a81aa015dab966ed4bcf30a986c7539f6e Mon Sep 17 00:00:00 2001 From: "Karl N. Kappler" Date: Sat, 4 Jan 2025 10:58:59 -0800 Subject: [PATCH 9/9] uncommnet ipynb tests --- .github/workflows/tests.yml | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c8ee6691..f0fe8a47 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -50,23 +50,23 @@ jobs: conda list pip freeze -# - name: Install Jupyter and dependencies -# run: | -# pip install jupyter -# pip install ipykernel -# python -m ipykernel install --user --name aurora-test -# # Install any other dependencies you need -# -# - name: Execute Jupyter Notebooks -# run: | -# jupyter nbconvert --to notebook --execute docs/examples/dataset_definition.ipynb -# jupyter nbconvert --to notebook --execute docs/examples/operate_aurora.ipynb -# jupyter nbconvert --to notebook --execute docs/tutorials/pkd_units_check.ipynb -# jupyter nbconvert --to notebook --execute docs/tutorials/pole_zero_fitting/lemi_pole_zero_fitting_example.ipynb -# jupyter nbconvert --to notebook --execute docs/tutorials/processing_configuration.ipynb -# jupyter nbconvert --to notebook --execute docs/tutorials/process_cas04_multiple_station.ipynb -# jupyter nbconvert --to notebook --execute docs/tutorials/synthetic_data_processing.ipynb -# # Replace "notebook.ipynb" with your notebook's filename + - name: Install Jupyter and dependencies + run: | + pip install jupyter + pip install ipykernel + python -m ipykernel install --user --name aurora-test + # Install any other dependencies you need + + - name: Execute Jupyter Notebooks + run: | + jupyter nbconvert --to notebook --execute docs/examples/dataset_definition.ipynb + jupyter nbconvert --to notebook --execute docs/examples/operate_aurora.ipynb + jupyter nbconvert --to notebook --execute docs/tutorials/pkd_units_check.ipynb + jupyter nbconvert --to notebook --execute docs/tutorials/pole_zero_fitting/lemi_pole_zero_fitting_example.ipynb + jupyter nbconvert --to notebook --execute docs/tutorials/processing_configuration.ipynb + jupyter nbconvert --to notebook --execute docs/tutorials/process_cas04_multiple_station.ipynb + jupyter nbconvert --to notebook --execute docs/tutorials/synthetic_data_processing.ipynb + # Replace "notebook.ipynb" with your notebook's filename # - name: Commit changes (if any) # run: |