Skip to content

Commit

Permalink
serializing nested ntbk metadata items
Browse files Browse the repository at this point in the history
  • Loading branch information
choldgraf committed Apr 8, 2020
1 parent e77e33c commit aec986b
Showing 1 changed file with 4 additions and 14 deletions.
18 changes: 4 additions & 14 deletions myst_nb/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def parse_block(src, start_line):

for cell_index, nb_cell in enumerate(ntbk.cells):

# if the the source_map ahs been stored (for text-based notebooks),
# if the the source_map has been stored (for text-based notebooks),
# we use that do define the starting line for each cell
# otherwise, we set a pseudo base that represents the cell index
start_line = source_map[cell_index] if source_map else (cell_index + 1) * 10000
Expand Down Expand Up @@ -162,21 +162,11 @@ def parse_block(src, start_line):
md.core.process(state)

# Add the front matter.
# Note that myst_parser now serialises dict/list like keys, when rendering to
# docutils docinfo,
# so to stay consistent with the previous code (for now) we strip this data
# Note that myst_parser serialises dict/list like keys, when rendering to
# docutils docinfo. These could be read back with `json.loads`.
state.tokens = [
Token(
"front_matter",
"",
0,
content=(
{
k: v
for k, v in ntbk.metadata.items()
if isinstance(v, (str, int, float))
}
),
"front_matter", "", 0, content=({k: v for k, v in ntbk.metadata.items()}),
)
] + state.tokens

Expand Down

0 comments on commit aec986b

Please sign in to comment.