From aec986bf38b73263ad99e26eb6255c816db846ac Mon Sep 17 00:00:00 2001 From: Chris Holdgraf Date: Wed, 8 Apr 2020 15:51:41 -0700 Subject: [PATCH] serializing nested ntbk metadata items --- myst_nb/parser.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/myst_nb/parser.py b/myst_nb/parser.py index 403dd0ae..7e8fa172 100644 --- a/myst_nb/parser.py +++ b/myst_nb/parser.py @@ -117,7 +117,7 @@ def parse_block(src, start_line): for cell_index, nb_cell in enumerate(ntbk.cells): - # if the the source_map ahs been stored (for text-based notebooks), + # if the the source_map has been stored (for text-based notebooks), # we use that do define the starting line for each cell # otherwise, we set a pseudo base that represents the cell index start_line = source_map[cell_index] if source_map else (cell_index + 1) * 10000 @@ -162,21 +162,11 @@ def parse_block(src, start_line): md.core.process(state) # Add the front matter. - # Note that myst_parser now serialises dict/list like keys, when rendering to - # docutils docinfo, - # so to stay consistent with the previous code (for now) we strip this data + # Note that myst_parser serialises dict/list like keys, when rendering to + # docutils docinfo. These could be read back with `json.loads`. state.tokens = [ Token( - "front_matter", - "", - 0, - content=( - { - k: v - for k, v in ntbk.metadata.items() - if isinstance(v, (str, int, float)) - } - ), + "front_matter", "", 0, content=({k: v for k, v in ntbk.metadata.items()}), ) ] + state.tokens