diff --git a/src/together/resources/finetune.py b/src/together/resources/finetune.py index 715a8225..da517afa 100644 --- a/src/together/resources/finetune.py +++ b/src/together/resources/finetune.py @@ -20,7 +20,7 @@ TrainingType, ) from together.types.finetune import DownloadCheckpointType -from together.utils import log_warn, normalize_key +from together.utils import log_warn_once, normalize_key class FineTuning: @@ -77,6 +77,12 @@ def create( FinetuneResponse: Object containing information about fine-tuning job. """ + if batch_size == "max": + log_warn_once( + "Since 1.3.0, batch size is automatically set to max. " + "This behavior can be disabled by setting the `batch_size` parameter to an integer value." + ) + requestor = api_requestor.APIRequestor( client=self._client, ) diff --git a/src/together/utils/_log.py b/src/together/utils/_log.py index 5efe51cf..23abe210 100644 --- a/src/together/utils/_log.py +++ b/src/together/utils/_log.py @@ -13,6 +13,8 @@ TOGETHER_LOG = os.environ.get("TOGETHER_LOG") +WARNING_MESSAGES_ONCE = set() + def _console_log_level() -> str | None: if together.log in ["debug", "info"]: @@ -59,3 +61,11 @@ def log_warn(message: str | Any, **params: Any) -> None: msg = logfmt(dict(message=message, **params)) print(msg, file=sys.stderr) logger.warn(msg) + + +def log_warn_once(message: str | Any, **params: Any) -> None: + msg = logfmt(dict(message=message, **params)) + if msg not in WARNING_MESSAGES_ONCE: + print(msg, file=sys.stderr) + logger.warn(msg) + WARNING_MESSAGES_ONCE.add(msg)