diff --git a/intro.md b/intro.md index ecdc192..8c3ddb4 100644 --- a/intro.md +++ b/intro.md @@ -58,6 +58,10 @@ Available notebooks: (Kimberly Siletti)** ## Release Notes +* **[abc_atlas_access (v0.110)]** + * Fixed compatibility issue with read only local caches specifically for + fuse style file mounts such as those used on CodeOcean. + * Added local cache example to notebooks. * **[abc_atlas_access (v0.1.0)]** * Fixed issue with loading 10X mouse data with the `get_gene_data` function. * **[Spring 2024 Public Beta (version 20240330)]** diff --git a/notebooks/10x_snRNASeq_tutorial_part_1.ipynb b/notebooks/10x_snRNASeq_tutorial_part_1.ipynb index 1c3c972..5e0e226 100644 --- a/notebooks/10x_snRNASeq_tutorial_part_1.ipynb +++ b/notebooks/10x_snRNASeq_tutorial_part_1.ipynb @@ -36,6 +36,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -58,6 +60,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -2421,7 +2427,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/10x_snRNASeq_tutorial_part_2a.ipynb b/notebooks/10x_snRNASeq_tutorial_part_2a.ipynb index bace66c..c758757 100644 --- a/notebooks/10x_snRNASeq_tutorial_part_2a.ipynb +++ b/notebooks/10x_snRNASeq_tutorial_part_2a.ipynb @@ -35,6 +35,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -57,6 +59,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -554,7 +560,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/10x_snRNASeq_tutorial_part_2b.ipynb b/notebooks/10x_snRNASeq_tutorial_part_2b.ipynb index b819c4d..ee7d47e 100644 --- a/notebooks/10x_snRNASeq_tutorial_part_2b.ipynb +++ b/notebooks/10x_snRNASeq_tutorial_part_2b.ipynb @@ -34,6 +34,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -56,6 +58,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -590,7 +596,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/WHB-10x_snRNASeq_tutorial_part_1.ipynb b/notebooks/WHB-10x_snRNASeq_tutorial_part_1.ipynb index 249e922..8be99c1 100644 --- a/notebooks/WHB-10x_snRNASeq_tutorial_part_1.ipynb +++ b/notebooks/WHB-10x_snRNASeq_tutorial_part_1.ipynb @@ -36,7 +36,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe from the stored csv file. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", + "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", + "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] @@ -60,6 +62,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -1346,7 +1352,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/WHB-10x_snRNASeq_tutorial_part_2.ipynb b/notebooks/WHB-10x_snRNASeq_tutorial_part_2.ipynb index f14919b..9bb2bfc 100644 --- a/notebooks/WHB-10x_snRNASeq_tutorial_part_2.ipynb +++ b/notebooks/WHB-10x_snRNASeq_tutorial_part_2.ipynb @@ -34,6 +34,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -56,6 +58,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -1061,7 +1067,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/WHB_cluster_annotation_tutorial.ipynb b/notebooks/WHB_cluster_annotation_tutorial.ipynb index 7e98dc6..ba1f204 100644 --- a/notebooks/WHB_cluster_annotation_tutorial.ipynb +++ b/notebooks/WHB_cluster_annotation_tutorial.ipynb @@ -34,6 +34,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -56,6 +58,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -1605,7 +1611,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/ccf_and_parcellation_annotation_tutorial.ipynb b/notebooks/ccf_and_parcellation_annotation_tutorial.ipynb index 622db51..a16e2f4 100644 --- a/notebooks/ccf_and_parcellation_annotation_tutorial.ipynb +++ b/notebooks/ccf_and_parcellation_annotation_tutorial.ipynb @@ -38,6 +38,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -58,8 +60,12 @@ } ], "source": [ - "download_base = pathlib.Path('../../abc_download_root')\n", + "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -2659,7 +2665,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/cluster_annotation_tutorial.ipynb b/notebooks/cluster_annotation_tutorial.ipynb index 0690e5b..94b6931 100644 --- a/notebooks/cluster_annotation_tutorial.ipynb +++ b/notebooks/cluster_annotation_tutorial.ipynb @@ -36,6 +36,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -56,8 +58,12 @@ } ], "source": [ - "download_base = pathlib.Path('../../abc_download_root')\n", + "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -1917,7 +1923,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/cluster_groups_and_embeddings_tutorial.ipynb b/notebooks/cluster_groups_and_embeddings_tutorial.ipynb index 63e165c..7bcf953 100644 --- a/notebooks/cluster_groups_and_embeddings_tutorial.ipynb +++ b/notebooks/cluster_groups_and_embeddings_tutorial.ipynb @@ -31,6 +31,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -51,8 +53,12 @@ } ], "source": [ - "download_base = pathlib.Path('../../abc_download_root')\n", + "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -1379,7 +1385,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/cluster_neighborhood_gallery.ipynb b/notebooks/cluster_neighborhood_gallery.ipynb index 3e3d606..6d6d21d 100644 --- a/notebooks/cluster_neighborhood_gallery.ipynb +++ b/notebooks/cluster_neighborhood_gallery.ipynb @@ -31,6 +31,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -51,8 +53,12 @@ } ], "source": [ - "download_base = pathlib.Path('../../abc_download_root')\n", + "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -878,7 +884,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/general_accessing_10x_snRNASeq_tutorial.ipynb b/notebooks/general_accessing_10x_snRNASeq_tutorial.ipynb index d4df1d5..6502b28 100644 --- a/notebooks/general_accessing_10x_snRNASeq_tutorial.ipynb +++ b/notebooks/general_accessing_10x_snRNASeq_tutorial.ipynb @@ -33,7 +33,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", + "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", + "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] @@ -57,6 +59,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, diff --git a/notebooks/getting_started.ipynb b/notebooks/getting_started.ipynb index efb2930..9e090d2 100644 --- a/notebooks/getting_started.ipynb +++ b/notebooks/getting_started.ipynb @@ -144,7 +144,9 @@ "\n", "Below we show how to setup up the cache to download from S3, how to list and switch to a different data release, and additionally how to list the directories available, their size, and the files in that directory.\n", "\n", - "Setup the **AbcProjectCache** object by specifying a directory and calling ``from_s3_cache`` as shown below. We also print what version of the manifest is being currently loaded by the cache." + "Setup the **AbcProjectCache** object by specifying a directory and calling ``from_s3_cache`` as shown below. We also print what version of the manifest is being currently loaded by the cache.\n", + "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar. Using the cache from a s3fs mount means that all files will be streamed over the internet instead of saved to the local disk." ] }, { @@ -166,6 +168,10 @@ "source": [ "download_base = Path('../../abc_download_root') # Path to where you would like to write the downloaded data.\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, diff --git a/notebooks/merfish_ccf_registration_tutorial.ipynb b/notebooks/merfish_ccf_registration_tutorial.ipynb index 7d52a56..72ebd2c 100644 --- a/notebooks/merfish_ccf_registration_tutorial.ipynb +++ b/notebooks/merfish_ccf_registration_tutorial.ipynb @@ -37,6 +37,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -57,8 +59,12 @@ } ], "source": [ - "download_base = pathlib.Path('../../abc_download_root')\n", + "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -2926,7 +2932,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/merfish_tutorial_part_1.ipynb b/notebooks/merfish_tutorial_part_1.ipynb index 4bfdc40..4824d6e 100644 --- a/notebooks/merfish_tutorial_part_1.ipynb +++ b/notebooks/merfish_tutorial_part_1.ipynb @@ -38,6 +38,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -60,6 +62,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -1729,7 +1735,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/merfish_tutorial_part_2a.ipynb b/notebooks/merfish_tutorial_part_2a.ipynb index 04dda9c..4e24131 100644 --- a/notebooks/merfish_tutorial_part_2a.ipynb +++ b/notebooks/merfish_tutorial_part_2a.ipynb @@ -35,6 +35,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -57,6 +59,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -302,7 +308,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/merfish_tutorial_part_2b.ipynb b/notebooks/merfish_tutorial_part_2b.ipynb index a994479..b957d9c 100644 --- a/notebooks/merfish_tutorial_part_2b.ipynb +++ b/notebooks/merfish_tutorial_part_2b.ipynb @@ -36,6 +36,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -58,6 +60,10 @@ "source": [ "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -722,7 +728,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/notebooks/zhuang_merfish_tutorial.ipynb b/notebooks/zhuang_merfish_tutorial.ipynb index 5e212cc..9b4284b 100644 --- a/notebooks/zhuang_merfish_tutorial.ipynb +++ b/notebooks/zhuang_merfish_tutorial.ipynb @@ -32,6 +32,8 @@ "source": [ "We will interact with the data using the **AbcProjectCache**. This cache object tracks which data has been downloaded and serves the path to the requsted data on disk. For metadata, the cache can also directly serve a up a Pandas Dataframe. See the ``getting_started`` notebook for more details on using the cache including installing it if it has not already been.\n", "\n", + "The commented section that calls ``from_local_cache`` can be used instead when a download of the data already exists on disk or running on CodeOcean with the attached Allen Brain Cell Atlas or similar mounting of the AWS s3 bucket as a directory through [s3fs-fuse](https://github.com/s3fs-fuse/s3fs-fuse) or similar.\n", + "\n", "**Change the download_base variable to where you have downloaded the data in your system.**" ] }, @@ -52,8 +54,12 @@ } ], "source": [ - "download_base = pathlib.Path('../../abc_download_root')\n", + "download_base = Path('../../abc_download_root')\n", "abc_cache = AbcProjectCache.from_s3_cache(download_base)\n", + "\n", + "# download_base = Path('../../data/abc_atlas') # Path to the already downloaded data or s3fs-fuse mount.\n", + "# abc_cache = AbcProjectCache.from_local_cache(download_base)\n", + "\n", "abc_cache.current_manifest" ] }, @@ -2342,7 +2348,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/pyproject.toml b/pyproject.toml index 3a1bd68..a2c97f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "abc_atlas_access" -version = "0.1.0" +version = "0.1.1" description = "A package for accessing/processing data from the ABC Atlas" dependencies = [ "anndata", diff --git a/src/abc_atlas_access/abc_atlas_cache/cloud_cache.py b/src/abc_atlas_access/abc_atlas_cache/cloud_cache.py index b1a32f4..dd77cc8 100644 --- a/src/abc_atlas_access/abc_atlas_cache/cloud_cache.py +++ b/src/abc_atlas_access/abc_atlas_cache/cloud_cache.py @@ -23,6 +23,10 @@ class MissingLocalManifestWarning(UserWarning): pass +class ReadOnlyLocalCacheWarning(UserWarning): + pass + + class BasicLocalCache(ABC): """ A class to handle the loading and accessing a project's data and @@ -719,11 +723,17 @@ def load_manifest(self, manifest_name: str): self._manifest = self._load_manifest(manifest_name) # Keep track of the newly loaded manifest - with open(self._manifest_last_used, 'w') as out_file: - out_file.write(manifest_name) + self._save_last_used_manifest(manifest_name) self._manifest_name = manifest_name + def _save_last_used_manifest(self, manifest_name: str): + """ + Save the name of the last manifest used in this cache. + """ + with open(self._manifest_last_used, 'w') as out_file: + out_file.write(manifest_name) + def _file_exists(self, file_attributes: CacheFileAttributes) -> bool: """ Given a CacheFileAttributes describing a file, assess whether that @@ -1347,3 +1357,17 @@ def _download_file(self, force_download: bool = False, skip_hash_check: bool = False) -> bool: raise NotImplementedError() + + def _save_last_used_manifest(self, manifest_name: str): + """ + """ + try: + with open(self._manifest_last_used, 'w') as out_file: + out_file.write(manifest_name) + except OSError: + warnings.warn( + f"""LocalCache is a read only directory and cannot + save the last used manifest. + Current Manifest: {manifest_name}""", + ReadOnlyLocalCacheWarning + ) \ No newline at end of file