Skip to content

Commit

Permalink
More work on System class
Browse files Browse the repository at this point in the history
Added tests for each function
  • Loading branch information
aaschwanden committed Nov 7, 2023
1 parent 8cd8626 commit b5f2bd7
Show file tree
Hide file tree
Showing 7 changed files with 215 additions and 63 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python

name: Python package
name: pism-ragis

on:
push:
Expand Down
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,10 @@ repos:
"-sn", # Don't display the score
]

- repo: https://github.com/srstevenson/nb-clean
rev: 3.1.0
hooks:
- id: nb-clean
# - repo: https://github.com/srstevenson/nb-clean
# rev: 3.1.0
# hooks:
# - id: nb-clean

- repo: local
hooks:
Expand Down
41 changes: 9 additions & 32 deletions notebooks/analyze_scalars.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"id": "de938c1e",
"metadata": {},
"outputs": [],
Expand All @@ -34,7 +34,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": null,
"id": "a4d3493a",
"metadata": {},
"outputs": [],
Expand Down Expand Up @@ -77,7 +77,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": null,
"id": "1408e106",
"metadata": {},
"outputs": [],
Expand All @@ -99,7 +99,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"id": "33c60b76",
"metadata": {},
"outputs": [],
Expand Down Expand Up @@ -268,42 +268,20 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": null,
"id": "d512825d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"138.88888888888889"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"0.2e12 / 1000 / 1200**2"
]
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": null,
"id": "44dc6bbe",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"-720500"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"-720000 - 500"
]
Expand Down Expand Up @@ -332,8 +310,7 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
"pygments_lexer": "ipython3"
}
},
"nbformat": 4,
Expand Down
79 changes: 71 additions & 8 deletions pism_ragis/systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
Module provides System class
"""

import math
from pathlib import Path
from typing import Union

Expand All @@ -28,7 +29,7 @@

class System:
"""
Class fo a system
Class for a system
"""

def __init__(self, d: Union[dict, Path, str]):
Expand All @@ -43,6 +44,69 @@ def __init__(self, d: Union[dict, Path, str]):
else:
print(f"{d} not recognized")

def make_batch_header(
self,
partition: str = "chinook_new",
queue: str = "t2standard",
walltime: str = "8:00:00",
n_cores: int = 40,
gid: Union[None, str] = None,
):
"""
Create a batch header from system and kwargs
"""
assert n_cores > 0

assert partition in self.list_partitions()
assert queue in self.list_queues(partition)

partition = partition.split("_")[-1]
ppn = self.partitions[partition]["cores_per_node"] # type: ignore[attr-defined] # pylint: disable=E1101
nodes = int(math.ceil(float(n_cores) / ppn))

if nodes * ppn != n_cores:
print(
f"Warning! Running {n_cores} tasks on {nodes} {ppn}-processor nodes, wasting {ppn * nodes - n_cores} processors!"
)

lines = (
self.job["header"] # type: ignore[attr-defined] # pylint: disable=E1101
.format(
queue=queue,
walltime=walltime,
cores=n_cores,
ppn=ppn,
partition=partition,
gid=gid,
)
.split("\n")
)
m_str = "\n".join(list(lines))
return m_str

def list_partitions(self):
"""
List all partitions
"""
return [
values["name"]
for key, values in self.partitions.items() # type: ignore[attr-defined] # pylint: disable=E1101
if key != "default"
]

def list_queues(self, partition: Union[None, str] = None):
"""
List available queues for partition. If no partition
is given return default partition
"""

if not partition:
p = self.partitions["default"] # type: ignore[attr-defined] # pylint: disable=E1101
else:
p = partition
partition = p.split("_")[-1]
return self.partitions[partition]["queues"] # type: ignore[attr-defined] # pylint: disable=E1101

def to_dict(self):
"""
Returns self as dictionary
Expand All @@ -69,8 +133,8 @@ class Systems:
"""

def __init__(self):
self._default_path: Path = Path("data/")
self.add_systems_from_path(self.default_path)
self._default_path: Path = Path("tests/data")
self.add_systems_from_path(self._default_path)

@property
def default_path(self):
Expand Down Expand Up @@ -128,21 +192,20 @@ def add_systems_from_path(self, path):
for p_ in p:
s = toml.load(p_)
machine = s["machine"]
sys[machine] = s
sys[machine] = System(s)
self._values = sys

# def __len__(self):
# return len(self.systems)
def __len__(self):
return len(self.values())

def dump(self):
"""
Dump class to string
"""
repr_str = ""
for s in self.values():
repr_str += s["machine"]
repr_str += "\n------------\n\n"
repr_str += toml.dumps(s)
repr_str += toml.dumps(s.to_dict())
repr_str += "\n"
return repr_str

Expand Down
23 changes: 15 additions & 8 deletions tests/data/chinook.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,21 @@ job_id = "SLURM_JOBID"

work_dir = "SLURM_SUBMIT_DIR"

[queues]

t1standard = 24
t1small = 24
t2standard = 24
t2small = 24
debug = 24
analysis = 24
[partitions]

default = "new"

[partitions.old]

name = "old-chinook"
cores_per_node = 24
queues = ["t1standard", "t1small", "t2standard", "t2small"]

[partitions.new]

name = "new-chinook"
cores_per_node = 40
queues = ["t1standard", "t1small", "t2standard", "t2small"]

[job]

Expand Down
59 changes: 59 additions & 0 deletions tests/data/pleiades.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
machine = "pleiades"

[partitions]

default = "sandy_bridge"

[partitions.broadwell]

name = "bro"
cores_per_node = 28
queues = ["debug", "normal", "long"]

[partitions.haswell]

name = "has"
cores_per_node = 24
queues = ["debug", "normal", "long"]

[partitions.ivy_bridge]

name = "ivy"
cores_per_node = 20
queues = ["debug", "normal", "long"]

[partitions.sandy_bridge]

name = "san"
cores_per_node = 16
queues = ["debug", "normal", "long"]

[MPI]

mpido = "mpiexec -n {cores}"

[scheduler]

name = "QSUB"
submit = "qusb"
job_id = "PBS_JOBID"

[filesystem]

work_dir = "PBS_O_WORKDIR"

[job]

header = """#PBS -S /bin/bash
#PBS -N cfd
#PBS -l walltime={walltime}
#PBS -m e
#PBS -W group_list={gid}
#PBS -q {queue}
#PBS -lselect={nodes}:ncpus={ppn}:mpiprocs={ppn}:model={partition}
#PBS -j oe
module list
cd $PBS_O_WORKDIR
"""
Loading

0 comments on commit b5f2bd7

Please sign in to comment.