Skip to content

Commit

Permalink
#521: Enrich synthetic data set with shared blocks (#522)
Browse files Browse the repository at this point in the history
* #521: description, constructing script, and dataset files

* #521: Configuration file to run the synthetic+blocks case through PhaseStepper and render it

* #521: modified example and specification

* #521: new test case

* #521: excluse pngs from trailing whitespace check

* #521: fix png whitespace error

* #521: pull in LBDatafile_schema.py with JSON_data_files_validator.py

* #521: finalized example and configuration with baseline load-only setting that must reach 0 imbalance

* #521: generalize whitespace check to ignore all pngs

* #521: remove unused imports

---------

Co-authored-by: Caleb Schilly <[email protected]>
  • Loading branch information
ppebay and cwschilly authored Jul 26, 2024
1 parent c0b88ee commit 5c66bb6
Show file tree
Hide file tree
Showing 9 changed files with 119 additions and 18 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/find-trailing-whitespace.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ jobs:
- uses: actions/checkout@v4
- uses: DARMA-tasking/find-trailing-whitespace@master
with:
exclude: "doc" # ; separated path to exclude
exclude: "doc;.png$" # ; separated path to exclude
49 changes: 49 additions & 0 deletions config/synthetic-blocks.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Specify input
from_data:
data_stem: ../data/synthetic-blocks/synthetic-dataset-blocks
phase_ids:
- 0
check_schema: false

# Specify work model
work_model:
name: AffineCombination
parameters:
alpha: 1.0
beta: 0.0
gamma: 0.0

# Specify algorithm
algorithm:
name: InformAndTransfer
phase_id: 0
parameters:
n_iterations: 8
n_rounds: 2
fanout: 2
order_strategy: arbitrary
transfer_strategy: Recursive
criterion: Tempered
max_objects_per_transfer: 8
deterministic_transfer: true

# Specify output
output_dir: ../output
output_file_stem: synthetic-dataset-blocks
visualization:
x_ranks: 2
y_ranks: 2
z_ranks: 1
object_jitter: 0.5
rank_qoi: load
object_qoi: load
save_meshes: true
force_continuous_object_qoi: true
output_visualization_dir: ../output
output_visualization_file_stem: output_file

write_JSON:
compressed: False
suffix: json
communications: True
offline_LB_compatible: True
Binary file added data/synthetic-blocks/conf-blocks.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions data/synthetic-blocks/synthetic-dataset-blocks.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"metadata":{"type":"LBDatafile","rank":0},"phases":[{"id":0,"tasks":[{"entity":{"home":0,"id":1,"migratable":true,"type":"object"},"node":0,"resource":"cpu","time":0.5,"user_defined":{"shared_id":0,"shared_bytes":9.0,"home_rank":0}},{"entity":{"home":0,"id":3,"migratable":true,"type":"object"},"node":0,"resource":"cpu","time":0.5,"user_defined":{"shared_id":1,"shared_bytes":9.0,"home_rank":0}},{"entity":{"home":0,"id":2,"migratable":true,"type":"object"},"node":0,"resource":"cpu","time":0.5,"user_defined":{"shared_id":1,"shared_bytes":9.0,"home_rank":0}},{"entity":{"home":0,"id":0,"migratable":true,"type":"object"},"node":0,"resource":"cpu","time":1.0,"user_defined":{"shared_id":0,"shared_bytes":9.0,"home_rank":0}}],"communications":[{"type":"SendRecv","to":{"type":"object","id":5},"messages":1,"from":{"type":"object","id":0},"bytes":2.0},{"type":"SendRecv","to":{"type":"object","id":4},"messages":1,"from":{"type":"object","id":1},"bytes":1.0},{"type":"SendRecv","to":{"type":"object","id":2},"messages":1,"from":{"type":"object","id":3},"bytes":1.0},{"type":"SendRecv","to":{"type":"object","id":8},"messages":1,"from":{"type":"object","id":3},"bytes":0.5}]}]}
1 change: 1 addition & 0 deletions data/synthetic-blocks/synthetic-dataset-blocks.1.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"metadata":{"type":"LBDatafile","rank":1},"phases":[{"id":0,"tasks":[{"entity":{"home":1,"id":5,"migratable":true,"type":"object"},"node":1,"resource":"cpu","time":2.0,"user_defined":{"shared_id":2,"shared_bytes":9.0,"home_rank":1}},{"entity":{"home":1,"id":4,"migratable":true,"type":"object"},"node":1,"resource":"cpu","time":0.5,"user_defined":{"shared_id":2,"shared_bytes":9.0,"home_rank":1}},{"entity":{"home":1,"id":7,"migratable":true,"type":"object"},"node":1,"resource":"cpu","time":0.5,"user_defined":{"shared_id":3,"shared_bytes":9.0,"home_rank":1}},{"entity":{"home":1,"id":6,"migratable":true,"type":"object"},"node":1,"resource":"cpu","time":1.0,"user_defined":{"shared_id":3,"shared_bytes":9.0,"home_rank":1}}],"communications":[{"type":"SendRecv","to":{"type":"object","id":1},"messages":1,"from":{"type":"object","id":4},"bytes":2.0},{"type":"SendRecv","to":{"type":"object","id":8},"messages":1,"from":{"type":"object","id":5},"bytes":2.0},{"type":"SendRecv","to":{"type":"object","id":6},"messages":1,"from":{"type":"object","id":7},"bytes":1.0}]}]}
1 change: 1 addition & 0 deletions data/synthetic-blocks/synthetic-dataset-blocks.2.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"metadata":{"type":"LBDatafile","rank":2},"phases":[{"id":0,"tasks":[{"entity":{"home":2,"id":8,"migratable":true,"type":"object"},"node":2,"resource":"cpu","time":1.5,"user_defined":{"shared_id":4,"shared_bytes":9.0,"home_rank":2}}],"communications":[{"type":"SendRecv","to":{"type":"object","id":6},"messages":1,"from":{"type":"object","id":8},"bytes":1.5}]}]}
1 change: 1 addition & 0 deletions data/synthetic-blocks/synthetic-dataset-blocks.3.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"metadata":{"type":"LBDatafile","rank":3},"phases":[{"id":0,"tasks":[]}]}
36 changes: 36 additions & 0 deletions data/synthetic-blocks/synthetic-dataset-blocks.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# config: synthetic data set with blocks
tasks: [1.0, 0.5, 0.5, 0.5, 0.5, 2.0, 1.0, 0.5, 1.5]
communications:
- {from: 0, size: 2.0, to: 5}
- {from: 1, size: 1.0, to: 4}
- {from: 3, size: 1.0, to: 2}
- {from: 3, size: 0.5, to: 8}
- {from: 4, size: 2.0, to: 1}
- {from: 5, size: 2.0, to: 8}
- {from: 7, size: 1.0, to: 6}
- {from: 8, size: 1.5, to: 6}
ranks:
0:
tasks: [0, 1, 2, 3]
1:
tasks: [4, 5, 6, 7]
2:
tasks: [8]
3:
tasks: []
shared_blocks:
- size: 9.0
tasks: [0, 1]
home_rank: 0
- size: 9.0
tasks: [2, 3]
home_rank: 0
- size: 9.0
tasks: [4, 5]
home_rank: 1
- size: 9.0
tasks: [6, 7]
home_rank: 1
- size: 9.0
tasks: [8]
home_rank: 2
46 changes: 29 additions & 17 deletions src/lbaf/Utils/lbsJSONDataFilesValidatorLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
# Use lbaf module from source if lbaf package is not installed
if importlib.util.find_spec('lbaf') is None:
sys.path.insert(0, f"{os.sep}".join(os.path.abspath(__file__).split(os.sep)[:-3]))
from lbaf import PROJECT_PATH, __version__
from lbaf.Utils.lbsArgumentParser import PromptArgumentParser
from lbaf.Utils.lbsLogging import Logger, get_logger
from lbaf.Utils.lbsWeb import download
Expand All @@ -17,17 +16,14 @@
IMPORT_DIR = os.path.join(
os.path.dirname(os.path.dirname(CURRENT_PATH)),
"imported")
TARGET_SCRIPT_NAME = "JSON_data_files_validator.py"
SCRIPT_URL = f"https://raw.githubusercontent.com/DARMA-tasking/vt/develop/scripts/{TARGET_SCRIPT_NAME}"
SCRIPT_TITLE = "JSON data files validator"


class JSONDataFilesValidatorLoader:
"""Data Files Validator Loader application class."""

def __init__(self):
self.__args: dict = None
self.__logger: Logger = get_logger()
self.__scripts = ["JSON_data_files_validator.py", "LBDatafile_schema.py"]

def __parse_args(self):
"""Parse arguments."""
Expand All @@ -40,13 +36,12 @@ def __parse_args(self):
default=True)
self.__args = parser.parse_args()

def run(self, overwrite: Optional[bool] = None) -> int:
"""Downloads the VT Data validator script named self.TARGET_SCRIPT_NAME from the VT repository.
def __run(self, script_name, overwrite: Optional[bool] = None) -> int:
script_url = f"https://raw.githubusercontent.com/DARMA-tasking/vt/develop/scripts/{script_name}"
script_title = script_name.replace(".py", "").replace("_"," ")

exists = self.__is_loaded(script_name)

:param overwrite: None to parse arg from cli. True to overwrite the script if exists.
:returns: False if the script cannot be loaded.
"""
exists = self.is_loaded()
# Parse command line arguments
if overwrite is None:
self.__parse_args()
Expand All @@ -56,19 +51,36 @@ def run(self, overwrite: Optional[bool] = None) -> int:
self.__logger.info("Overwrite JSON data files validator")

if overwrite or not exists:
download(SCRIPT_URL, IMPORT_DIR, logger=self.__logger, file_title=SCRIPT_TITLE)
download(script_url, IMPORT_DIR, logger=self.__logger, file_title=script_title)
if not self.is_loaded():
self.__logger.warning("The JSON data files validator cannot be loaded")
self.__logger.warning(f"{script_title} cannot be loaded")
elif exists:
self.__logger.info("The JSON data files is ready to be used")
return 0 if os.path.isfile(os.path.join(IMPORT_DIR, TARGET_SCRIPT_NAME)) else 1
self.__logger.info(f"{script_title} is ready to be used")
return 0 if os.path.isfile(os.path.join(IMPORT_DIR, script_name)) else 1

def __is_loaded(self, script_name) -> bool:
return os.path.isfile(os.path.join(IMPORT_DIR, script_name))

def run(self, overwrite: Optional[bool] = None) -> int:
"""Downloads the VT Data validator script named script_name from the VT repository.
:param overwrite: None to parse arg from cli. True to overwrite the script if exists.
:returns: False if the script cannot be loaded.
"""
for script_name in self.__scripts:
if self.__run(script_name, overwrite=overwrite) == 1:
return 1
return 0

def is_loaded(self) -> bool:
"""Verify if the data files validator module has been downloaded.
:returns: True if the module has been downloaded to lbsDataFilesValidatorLoaderApplication.IMPORT_DIR
:returns: True if the module has been downloaded to IMPORT_DIR
"""
return os.path.isfile(os.path.join(IMPORT_DIR, TARGET_SCRIPT_NAME))
for script_name in self.__scripts:
if not self.__is_loaded(script_name):
return False
return True


if __name__ == "__main__":
Expand Down

0 comments on commit 5c66bb6

Please sign in to comment.