From 5c08a38e2b799b5897169eb18b3c9ba91568f717 Mon Sep 17 00:00:00 2001 From: Nicholas Cilfone Date: Wed, 3 Mar 2021 13:01:53 -0500 Subject: [PATCH] Transition to v2.0 (#37) * don't use unnecessary .keys() calls * Fixed Tuples to be length enforced unlike lists. This allows one to define a set length for an iterative argument * Fixed Tuples to be length enforced unlike lists. This allows one to define a set length for an iterative argument * Removed legacy backend and API (dataclasses and custom typed interface). Updated markdown save call to support advanced types so that saved configurations are now valid spock config input files. Changed tuples to support length restrictions. * updated versioneer * updated versioneer pt.2 * updating GitPython calls to get the correct info and to check parent directories that was causing git errors * added extra info to check for docker or k8s * removed block dump notation * added extra info write as comments to TOML. Fall back to no extra info for JSON and warn as comments are not allowed --- README.md | 30 +- docs/Quick-Start.md | 3 +- docs/basic_tutorial/Saving.md | 3 +- setup.cfg | 2 +- spock/__init__.py | 2 +- spock/_version.py | 31 +- spock/args.py | 2 +- spock/backend/__init__.py | 2 +- spock/backend/attr/saver.py | 79 +- spock/backend/attr/utils.py | 4 + spock/backend/base.py | 59 +- spock/backend/dataclass/__init__.py | 12 - spock/backend/dataclass/_dataclasses.py | 1270 -------------------- spock/backend/dataclass/args.py | 290 ----- spock/backend/dataclass/builder.py | 239 ---- spock/backend/dataclass/config.py | 192 --- spock/backend/dataclass/payload.py | 67 -- spock/backend/dataclass/saver.py | 39 - spock/backend/dataclass/utils.py | 57 - spock/builder.py | 4 +- spock/config.py | 6 +- spock/handlers.py | 51 +- spock/utils.py | 105 +- tests/attr/attr_configs_test.py | 16 +- tests/attr/test_all_attr.py | 10 + tests/conf/legacy/json/test.json | 20 - tests/conf/legacy/json/test_include.json | 4 - tests/conf/legacy/toml/test.toml | 34 - tests/conf/legacy/toml/test_include.toml | 5 - tests/conf/legacy/yaml/choice.yaml | 2 - tests/conf/legacy/yaml/inherited.yaml | 32 - tests/conf/legacy/yaml/test.yaml | 35 - tests/conf/legacy/yaml/test_include.yaml | 5 - tests/conf/legacy/yaml/test_incorrect.yaml | 36 - tests/conf/yaml/tuple.yaml | 5 + tests/dataclass/adapter_configs_test.py | 150 --- tests/dataclass/test_all_adapter.py | 378 ------ tests/debug/debug.py | 20 +- tests/debug/debug.yaml | 15 +- versioneer.py | 169 +-- 40 files changed, 421 insertions(+), 3064 deletions(-) delete mode 100644 spock/backend/dataclass/__init__.py delete mode 100644 spock/backend/dataclass/_dataclasses.py delete mode 100644 spock/backend/dataclass/args.py delete mode 100644 spock/backend/dataclass/builder.py delete mode 100644 spock/backend/dataclass/config.py delete mode 100644 spock/backend/dataclass/payload.py delete mode 100644 spock/backend/dataclass/saver.py delete mode 100644 spock/backend/dataclass/utils.py delete mode 100644 tests/conf/legacy/json/test.json delete mode 100644 tests/conf/legacy/json/test_include.json delete mode 100644 tests/conf/legacy/toml/test.toml delete mode 100644 tests/conf/legacy/toml/test_include.toml delete mode 100644 tests/conf/legacy/yaml/choice.yaml delete mode 100644 tests/conf/legacy/yaml/inherited.yaml delete mode 100644 tests/conf/legacy/yaml/test.yaml delete mode 100644 tests/conf/legacy/yaml/test_include.yaml delete mode 100644 tests/conf/legacy/yaml/test_incorrect.yaml create mode 100644 tests/conf/yaml/tuple.yaml delete mode 100644 tests/dataclass/adapter_configs_test.py delete mode 100644 tests/dataclass/test_all_adapter.py diff --git a/README.md b/README.md index eef493e2..9a1117b6 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ ## About `spock` is a framework that helps manage complex parameter configurations during research and development of Python -applications. `spock` let's you focus on the code you need to write instead of re-implementing boilerplate code like +applications. `spock` lets you focus on the code you need to write instead of re-implementing boilerplate code like creating ArgParsers, reading configuration files, implementing traceability etc. In short, `spock` configurations are defined by simple and familiar class-based structures. This allows `spock` to @@ -17,16 +17,33 @@ support inheritance, read from multiple markdown formats, and allow hierarchical ## Quick Install -Supports Python 3.6+ +Requires Python 3.6+ ```bash pip install spock-config ``` -## What's New +## Version(s) + +All prior versions are available on PyPi. If legacy API and backend support is needed please install a pre v2.0.0+ +version. We recommend refactoring your code to the new API and backend instead as legacy versions will be missing +recent features, bugfixes, and hotfixes. + +* v2.0.0+: Dropped support for legacy backend and API semantics +* v1.1.0-v1.2.1: New API with support for legacy backend and legacy API semantics +* v1.0.0: Legacy API and backend + +## News See [Releases](https://github.com/fidelity/spock/releases) for more information. +#### March 1st, 2021 + +* Removed legacy backend and API (dataclasses and custom typed interface) +* Updated markdown save call to support advanced types so that saved configurations are now valid `spock` config + input files +* Changed tuples to support length restrictions + #### November 25th, 2020 * Addition of [Advanced Types](docs/advanced_features/Advanced-Types.md) @@ -47,11 +64,14 @@ and automatic defaults. * Easily Managed Parameter Groups: Each class automatically generates its own object within a single namespace. * Parameter Inheritance: Classes support inheritance allowing for complex configurations derived from a common base set of parameters. +* Complex Types: Nested Lists/Tuples, List/Tuples of Enum of `@spock` classes, List of repeated `@spock` classes * Multiple Configuration File Types: Configurations are specified from YAML, TOML, or JSON files. -* Hierarchical Configuration: Composed from multiple configuration files via simple include statements. +* Hierarchical Configuration: Compose from multiple configuration files via simple include statements. * Command-Line Overrides: Quickly experiment by overriding a value with automatically generated command line arguments. * Immutable: All classes are *frozen* preventing any misuse or accidental overwrites. -* Tractability and Reproducibility: Save currently running parameter configuration with a single chained command. +* Tractability and Reproducibility: Save runtime parameter configuration to YAML, TOML, or JSON with a single chained + command (with extra runtime info such as Git info, Python version, machine FQDN, etc). The saved markdown file can be + used as the configuration input to reproduce prior runtime configurations. #### Main Contributors diff --git a/docs/Quick-Start.md b/docs/Quick-Start.md index 9c874eeb..cf05f4ed 100644 --- a/docs/Quick-Start.md +++ b/docs/Quick-Start.md @@ -10,7 +10,8 @@ This is a quick and dirty guide to getting up and running with `spock`. Read the All examples can be found [here](https://github.com/fidelity/spock/blob/master/examples). -Legacy documentation for the old API can be found [here](https://github.com/fidelity/spock/blob/master/docs/legacy) +Legacy documentation for the old API (pre v2.0) can be +found [here](https://github.com/fidelity/spock/blob/master/docs/legacy) ### TL;DR 1. Import the necessary components from `spock` diff --git a/docs/basic_tutorial/Saving.md b/docs/basic_tutorial/Saving.md index 8b2aea17..174e6bf9 100644 --- a/docs/basic_tutorial/Saving.md +++ b/docs/basic_tutorial/Saving.md @@ -2,7 +2,8 @@ The current configuration of running python code can be saved to file by chaining the `save()` method before the `generate()` call to the `ConfigArgBuilder` class. `spock` supports two ways to specify the path to write and the -output file can be either YAML, TOML, or JSON (via the `file_extension` keyword argument). +output file can be either YAML, TOML, or JSON (via the `file_extension` keyword argument). The saved markdown file can +be used as the configuration input to reproduce prior runtime configurations. ### Specify spock Special Parameter Type diff --git a/setup.cfg b/setup.cfg index 094b6202..2a03ef5a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,4 +3,4 @@ VCS = git style = pep440 versionfile_source = spock/_version.py versionfile_build = spock/_version.py -tag_prefix = \ No newline at end of file +tag_prefix= \ No newline at end of file diff --git a/spock/__init__.py b/spock/__init__.py index 78e12c71..0ea44bea 100644 --- a/spock/__init__.py +++ b/spock/__init__.py @@ -14,4 +14,4 @@ __all__ = ["args", "builder", "config"] __version__ = get_versions()['version'] -del get_versions +del get_versions \ No newline at end of file diff --git a/spock/_version.py b/spock/_version.py index 93ca0290..127a055d 100644 --- a/spock/_version.py +++ b/spock/_version.py @@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag @@ -10,7 +6,7 @@ # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) +# versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) """Git implementation of _version.py.""" @@ -45,7 +41,7 @@ def get_config(): cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" - cfg.tag_prefix = "None" + cfg.tag_prefix = "" cfg.parentdir_prefix = "None" cfg.versionfile_source = "spock/_version.py" cfg.verbose = False @@ -61,7 +57,7 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: @@ -97,9 +93,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() + stdout = p.communicate()[0].strip().decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) @@ -169,6 +163,10 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -304,6 +302,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -342,18 +343,18 @@ def render_pep440(pieces): def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. + """TAG[.post0.devDISTANCE] -- No -dirty. Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + rendered += ".post0.dev%d" % pieces["distance"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered @@ -389,7 +390,7 @@ def render_pep440_old(pieces): The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: diff --git a/spock/args.py b/spock/args.py index dbc0e01d..e98032b4 100644 --- a/spock/args.py +++ b/spock/args.py @@ -5,5 +5,5 @@ """Handles import aliases to allow backwards compat with backends""" -from spock.backend.dataclass.args import * +# from spock.backend.dataclass.args import * from spock.backend.attr.typed import SavePath diff --git a/spock/backend/__init__.py b/spock/backend/__init__.py index 43c41398..d8767c44 100644 --- a/spock/backend/__init__.py +++ b/spock/backend/__init__.py @@ -8,4 +8,4 @@ Please refer to the documentation provided in the README.md """ -__all__ = ["attr", "base", "dataclass"] +__all__ = ["attr", "base"] diff --git a/spock/backend/attr/saver.py b/spock/backend/attr/saver.py index fe341a63..3df6e73f 100644 --- a/spock/backend/attr/saver.py +++ b/spock/backend/attr/saver.py @@ -26,23 +26,68 @@ def __init__(self): def __call__(self, *args, **kwargs): return AttrSaver() - def _clean_up_values(self, payload, extra_info, file_extension): + def _clean_up_values(self, payload, file_extension): + # Dictionary to recursively write to out_dict = {} - for key, val in vars(payload).items(): - # Skip comment append in JSON as it doesn't allow comments - if file_extension == '.json': - if isinstance(val, list): - val = [attr.asdict(inner_val) for inner_val in val] - out_dict.update({(key): val}) - else: - out_dict.update({key: attr.asdict(val)}) - # Append comment tag to the base class and convert the spock class to a dict - else: - if isinstance(val, list): - val = [attr.asdict(inner_val) for inner_val in val] - out_dict.update({('# ' + key): val}) - else: - out_dict.update({('# ' + key): attr.asdict(val)}) + # All of the classes are defined at the top level + all_spock_cls = set(vars(payload).keys()) + out_dict = self._recursively_handle_clean(payload, out_dict, all_cls=all_spock_cls) # Convert values - clean_dict = self._clean_output(out_dict, extra_info) + clean_dict = self._clean_output(out_dict) return clean_dict + + def _recursively_handle_clean(self, payload, out_dict, parent_name=None, all_cls=None): + """Recursively works through spock classes and adds clean data to a dictionary + + Given a payload (Spockspace) work recursively through items that don't have parents to catch all + parameter definitions while correctly mapping nested class definitions to their base level class thus + allowing the output markdown to be a valid input file + + *Args*: + + payload: current payload (namespace) + out_dict: output dictionary + parent_name: name of the parent spock class if nested + all_cls: all top level spock class definitions + + *Returns*: + + out_dict: modified dictionary with the cleaned data + + """ + for key, val in vars(payload).items(): + val_name = type(val).__name__ + # This catches basic lists and list of classes + if isinstance(val, list): + # Check if each entry is a spock class + clean_val = [] + repeat_flag = False + for l_val in val: + cls_name = type(l_val).__name__ + # For those that are a spock class and are repeated (cls_name == key) simply convert to dict + if (cls_name in all_cls) and (cls_name == key): + clean_val.append(attr.asdict(l_val)) + # For those whose cls is different than the key just append the cls name + elif cls_name in all_cls: + # Change the flag as this is a repeated class -- which needs to be compressed into a single + # k:v pair + repeat_flag = True + clean_val.append(cls_name) + # Fall back to the passed in values + else: + clean_val.append(l_val) + # Handle repeated classes + if repeat_flag: + clean_val = list(set(clean_val))[-1] + out_dict.update({key: clean_val}) + # If it's a spock class but has a parent then just use the class name to reference the values + elif(val_name in all_cls) and parent_name is not None: + out_dict.update({key: val_name}) + # Check if it's a spock class without a parent -- iterate the values and recurse to catch more lists + elif val_name in all_cls: + new_dict = self._recursively_handle_clean(val, {}, parent_name=key, all_cls=all_cls) + out_dict.update({key: new_dict}) + # Either base type or no nested values that could be Spock classes + else: + out_dict.update({key: val}) + return out_dict diff --git a/spock/backend/attr/utils.py b/spock/backend/attr/utils.py index aa2ce6d4..51e84e63 100644 --- a/spock/backend/attr/utils.py +++ b/spock/backend/attr/utils.py @@ -135,6 +135,10 @@ def _recursive_list_to_tuple(value, typed, class_names): # from a composed payload if hasattr(typed, '__args__') and not isinstance(value, tuple) and not (isinstance(value, str) and value in class_names): + # Force those with origin tuple types to be of the defined length + if (typed.__origin__.__name__.lower() == 'tuple') and len(value) != len(typed.__args__): + raise ValueError(f'Tuple(s) use a fixed/defined length -- Length of the provided argument ({len(value)}) ' + f'does not match the length of the defined argument ({len(typed.__args__)})') # need to recurse before casting as we can't set values in a tuple with idx # Since it's generic it should be iterable to recurse and check it's children for idx, val in enumerate(value): diff --git a/spock/backend/base.py b/spock/backend/base.py index 87e9739e..99955ec9 100644 --- a/spock/backend/base.py +++ b/spock/backend/base.py @@ -17,7 +17,6 @@ from spock.handlers import TOMLHandler from spock.handlers import YAMLHandler from spock.utils import add_info -from spock.utils import convert_save_dict from spock.utils import make_argument @@ -31,7 +30,7 @@ def __init__(self, **kwargs): super(Spockspace, self).__init__(**kwargs) def __repr__(self): - # Remove aliases in YAML dump + # Remove aliases in YAML print yaml.Dumper.ignore_aliases = lambda *args: True return yaml.dump(self.__dict__, default_flow_style=False) @@ -70,24 +69,26 @@ def save(self, payload, path, create_save_path=False, extra_info=True, file_exte """ supported_extensions = list(self._writers.keys()) - if file_extension not in list(self._writers.keys()): + if file_extension not in self._writers: raise ValueError(f'Invalid fileout extension. Expected a fileout from {supported_extensions}') # Make the filename name = str(uuid1()) + '.spock.cfg' + file_extension fid = path / name - # Fix up values - out_dict = self._clean_up_values(payload, extra_info, file_extension) + # Fix up values -- parameters + out_dict = self._clean_up_values(payload, file_extension) + # Get extra info + extra_dict = add_info() if extra_info else None try: if not os.path.exists(path) and create_save_path: os.makedirs(path) with open(fid, 'w') as file_out: - self._writers.get(file_extension)().save(out_dict, file_out) + self._writers.get(file_extension)().save(out_dict, extra_dict, file_out) except OSError as e: print(f'Not a valid file path to write to: {fid}') raise e @abstractmethod - def _clean_up_values(self, payload, extra_info, file_extension): + def _clean_up_values(self, payload, file_extension): """Clean up the config payload so it can be written to file *Args*: @@ -102,8 +103,7 @@ def _clean_up_values(self, payload, extra_info, file_extension): """ - @staticmethod - def _clean_output(out_dict, extra_info): + def _clean_output(self, out_dict): """Clean up the dictionary so it can be written to file *Args*: @@ -124,26 +124,49 @@ def _clean_output(out_dict, extra_info): for idx, list_val in enumerate(val): tmp_dict = {} for inner_key, inner_val in list_val.items(): - tmp_dict = convert_save_dict(tmp_dict, inner_val, inner_key) + tmp_dict = self._convert(tmp_dict, inner_val, inner_key) val[idx] = tmp_dict clean_inner_dict = val else: for inner_key, inner_val in val.items(): - clean_inner_dict = convert_save_dict(clean_inner_dict, inner_val, inner_key) + clean_inner_dict = self._convert(clean_inner_dict, inner_val, inner_key) clean_dict.update({key: clean_inner_dict}) - if extra_info: - clean_dict = add_info(clean_dict) return clean_dict - @staticmethod - def _convert(clean_inner_dict, inner_val, inner_key): + def _convert(self, clean_inner_dict, inner_val, inner_key): # Convert tuples to lists so they get written correctly if isinstance(inner_val, tuple): - clean_inner_dict.update({inner_key: list(inner_val)}) + clean_inner_dict.update({inner_key: self._recursive_tuple_to_list(inner_val)}) elif inner_val is not None: clean_inner_dict.update({inner_key: inner_val}) return clean_inner_dict + def _recursive_tuple_to_list(self, value): + """Recursively turn tuples into lists + + Recursively looks through tuple(s) and convert to lists + + *Args*: + + value: value to check and set typ if necessary + typed: type of the generic alias to check against + + *Returns*: + + value: updated value with correct type casts + + """ + # Check for __args__ as it signifies a generic and make sure it's not already been cast as a tuple + # from a composed payload + list_v = [] + for v in value: + if isinstance(v, tuple): + v = self._recursive_tuple_to_list(v) + list_v.append(v) + else: + list_v.append(v) + return list_v + class BaseBuilder(ABC): # pylint: disable=too-few-public-methods """Base class for building the backend specific builders @@ -230,7 +253,7 @@ def generate(self, dict_args): def _auto_generate(self, args, input_class): """Builds an instance of a DataClass - Builds an instance of a dataclass with the necessary field values from the argument + Builds an instance with the necessary field values from the argument dictionary read from the config file(s) *Args*: @@ -411,7 +434,7 @@ def _check_protected_keys(all_attr): """ protected_names = ['config', 'help'] - if any([val in all_attr.keys() for val in protected_names]): + if any([val in all_attr for val in protected_names]): raise ValueError(f"Using a protected name from {protected_names} at general class level which prevents " f"command line overrides") diff --git a/spock/backend/dataclass/__init__.py b/spock/backend/dataclass/__init__.py deleted file mode 100644 index 86c78c29..00000000 --- a/spock/backend/dataclass/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -""" -Spock is a framework that helps manage complex parameter configurations for Python applications - -Please refer to the documentation provided in the README.md -""" - -__all__ = ["args", "builder", "config", "payload", "saver", "utils", "_dataclasses"] diff --git a/spock/backend/dataclass/_dataclasses.py b/spock/backend/dataclass/_dataclasses.py deleted file mode 100644 index eb648fbe..00000000 --- a/spock/backend/dataclass/_dataclasses.py +++ /dev/null @@ -1,1270 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -"""Manual fork of Python3.7 dataclass""" - -import re -import sys -import copy -import types -import inspect -import keyword -import builtins -import functools -import _thread - - -# This is a slight fork of the CPython dataclass module. The key difference -# is that the dataclass __init__ only accepts keyword arguments, which -# removes any ambiguity in the resolution order of arguments. This allows -# for non-default attributes to follow default attributes. That is, the following -# is now allowed: -# -# @dataclass -# class Base: -# a: int = 5 -# -# @dataclass -# class Derived(Base): -# b: int - - - -__all__ = ['dataclass', - 'field', - 'Field', - 'FrozenInstanceError', - 'InitVar', - 'MISSING', - - # Helper functions. - 'fields', - 'asdict', - 'astuple', - 'make_dataclass', - 'replace', - 'is_dataclass', - ] - -# Conditions for adding methods. The boxes indicate what action the -# dataclass decorator takes. For all of these tables, when I talk -# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm -# referring to the arguments to the @dataclass decorator. When -# checking if a dunder method already exists, I mean check for an -# entry in the class's __dict__. I never check to see if an attribute -# is defined in a base class. - -# Key: -# +=========+=========================================+ -# + Value | Meaning | -# +=========+=========================================+ -# | | No action: no method is added. | -# +---------+-----------------------------------------+ -# | add | Generated method is added. | -# +---------+-----------------------------------------+ -# | raise | TypeError is raised. | -# +---------+-----------------------------------------+ -# | None | Attribute is set to None. | -# +=========+=========================================+ - -# __init__ -# -# +--- init= parameter -# | -# v | | | -# | no | yes | <--- class has __init__ in __dict__? -# +=======+=======+=======+ -# | False | | | -# +-------+-------+-------+ -# | True | add | | <- the default -# +=======+=======+=======+ - -# __repr__ -# -# +--- repr= parameter -# | -# v | | | -# | no | yes | <--- class has __repr__ in __dict__? -# +=======+=======+=======+ -# | False | | | -# +-------+-------+-------+ -# | True | add | | <- the default -# +=======+=======+=======+ - - -# __setattr__ -# __delattr__ -# -# +--- frozen= parameter -# | -# v | | | -# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__? -# +=======+=======+=======+ -# | False | | | <- the default -# +-------+-------+-------+ -# | True | add | raise | -# +=======+=======+=======+ -# Raise because not adding these methods would break the "frozen-ness" -# of the class. - -# __eq__ -# -# +--- eq= parameter -# | -# v | | | -# | no | yes | <--- class has __eq__ in __dict__? -# +=======+=======+=======+ -# | False | | | -# +-------+-------+-------+ -# | True | add | | <- the default -# +=======+=======+=======+ - -# __lt__ -# __le__ -# __gt__ -# __ge__ -# -# +--- order= parameter -# | -# v | | | -# | no | yes | <--- class has any comparison method in __dict__? -# +=======+=======+=======+ -# | False | | | <- the default -# +-------+-------+-------+ -# | True | add | raise | -# +=======+=======+=======+ -# Raise because to allow this case would interfere with using -# functools.total_ordering. - -# __hash__ - -# +------------------- unsafe_hash= parameter -# | +----------- eq= parameter -# | | +--- frozen= parameter -# | | | -# v v v | | | -# | no | yes | <--- class has explicitly defined __hash__ -# +=======+=======+=======+========+========+ -# | False | False | False | | | No __eq__, use the base class __hash__ -# +-------+-------+-------+--------+--------+ -# | False | False | True | | | No __eq__, use the base class __hash__ -# +-------+-------+-------+--------+--------+ -# | False | True | False | None | | <-- the default, not hashable -# +-------+-------+-------+--------+--------+ -# | False | True | True | add | | Frozen, so hashable, allows override -# +-------+-------+-------+--------+--------+ -# | True | False | False | add | raise | Has no __eq__, but hashable -# +-------+-------+-------+--------+--------+ -# | True | False | True | add | raise | Has no __eq__, but hashable -# +-------+-------+-------+--------+--------+ -# | True | True | False | add | raise | Not frozen, but hashable -# +-------+-------+-------+--------+--------+ -# | True | True | True | add | raise | Frozen, so hashable -# +=======+=======+=======+========+========+ -# For boxes that are blank, __hash__ is untouched and therefore -# inherited from the base class. If the base is object, then -# id-based hashing is used. -# -# Note that a class may already have __hash__=None if it specified an -# __eq__ method in the class body (not one that was created by -# @dataclass). -# -# See _hash_action (below) for a coded version of this table. - - -# Raised when an attempt is made to modify a frozen class. -class FrozenInstanceError(AttributeError): pass - -# A sentinel object for default values to signal that a default -# factory will be used. This is given a nice repr() which will appear -# in the function signature of dataclasses' constructors. -class _HAS_DEFAULT_FACTORY_CLASS: - def __repr__(self): - return '' -_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS() - -# A sentinel object to detect if a parameter is supplied or not. Use -# a class to give it a better repr. -class _MISSING_TYPE: - pass -MISSING = _MISSING_TYPE() - -# Since most per-field metadata will be unused, create an empty -# read-only proxy that can be shared among all fields. -_EMPTY_METADATA = types.MappingProxyType({}) - -# Markers for the various kinds of fields and pseudo-fields. -class _FIELD_BASE: - def __init__(self, name): - self.name = name - def __repr__(self): - return self.name -_FIELD = _FIELD_BASE('_FIELD') -_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR') -_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR') - -# The name of an attribute on the class where we store the Field -# objects. Also used to check if a class is a Data Class. -_FIELDS = '__dataclass_fields__' - -# The name of an attribute on the class that stores the parameters to -# @dataclass. -_PARAMS = '__dataclass_params__' - -# The name of the function, that if it exists, is called at the end of -# __init__. -_POST_INIT_NAME = '__post_init__' - -# String regex that string annotations for ClassVar or InitVar must match. -# Allows "identifier.identifier[" or "identifier[". -# https://bugs.python.org/issue33453 for details. -_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)') - -class _InitVarMeta(type): - def __getitem__(self, params): - return self - -class InitVar(metaclass=_InitVarMeta): - pass - - -# Instances of Field are only ever created from within this module, -# and only from the field() function, although Field instances are -# exposed externally as (conceptually) read-only objects. -# -# name and type are filled in after the fact, not in __init__. -# They're not known at the time this class is instantiated, but it's -# convenient if they're available later. -# -# When cls._FIELDS is filled in with a list of Field objects, the name -# and type fields will have been populated. -class Field: - __slots__ = ('name', - 'type', - 'default', - 'default_factory', - 'repr', - 'hash', - 'init', - 'compare', - 'metadata', - '_field_type', # Private: not to be used by user code. - ) - - def __init__(self, default, default_factory, init, repr, hash, compare, - metadata): - self.name = None - self.type = None - self.default = default - self.default_factory = default_factory - self.init = init - self.repr = repr - self.hash = hash - self.compare = compare - self.metadata = (_EMPTY_METADATA - if metadata is None else - types.MappingProxyType(metadata)) - self._field_type = None - - def __repr__(self): - return ('Field(' - f'name={self.name!r},' - f'type={self.type!r},' - f'default={self.default!r},' - f'default_factory={self.default_factory!r},' - f'init={self.init!r},' - f'repr={self.repr!r},' - f'hash={self.hash!r},' - f'compare={self.compare!r},' - f'metadata={self.metadata!r},' - f'_field_type={self._field_type}' - ')') - - # This is used to support the PEP 487 __set_name__ protocol in the - # case where we're using a field that contains a descriptor as a - # default value. For details on __set_name__, see - # https://www.python.org/dev/peps/pep-0487/#implementation-details. - # - # Note that in _process_class, this Field object is overwritten - # with the default value, so the end result is a descriptor that - # had __set_name__ called on it at the right time. - def __set_name__(self, owner, name): - func = getattr(type(self.default), '__set_name__', None) - if func: - # There is a __set_name__ method on the descriptor, call - # it. - func(self.default, owner, name) - - -class _DataclassParams: - __slots__ = ('init', - 'repr', - 'eq', - 'order', - 'unsafe_hash', - 'frozen', - ) - - def __init__(self, init, repr, eq, order, unsafe_hash, frozen): - self.init = init - self.repr = repr - self.eq = eq - self.order = order - self.unsafe_hash = unsafe_hash - self.frozen = frozen - - def __repr__(self): - return ('_DataclassParams(' - f'init={self.init!r},' - f'repr={self.repr!r},' - f'eq={self.eq!r},' - f'order={self.order!r},' - f'unsafe_hash={self.unsafe_hash!r},' - f'frozen={self.frozen!r}' - ')') - - -# This function is used instead of exposing Field creation directly, -# so that a type checker can be told (via overloads) that this is a -# function whose type depends on its parameters. -def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, - hash=None, compare=True, metadata=None): - """Return an object to identify dataclass fields. - - default is the default value of the field. default_factory is a - 0-argument function called to initialize a field's value. If init - is True, the field will be a parameter to the class's __init__() - function. If repr is True, the field will be included in the - object's repr(). If hash is True, the field will be included in - the object's hash(). If compare is True, the field will be used - in comparison functions. metadata, if specified, must be a - mapping which is stored but not otherwise examined by dataclass. - - It is an error to specify both default and default_factory. - """ - - if default is not MISSING and default_factory is not MISSING: - raise ValueError('cannot specify both default and default_factory') - return Field(default, default_factory, init, repr, hash, compare, - metadata) - - -def _tuple_str(obj_name, fields): - # Return a string representing each field of obj_name as a tuple - # member. So, if fields is ['x', 'y'] and obj_name is "self", - # return "(self.x,self.y)". - - # Special case for the 0-tuple. - if not fields: - return '()' - # Note the trailing comma, needed if this turns out to be a 1-tuple. - return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)' - - -# This function's logic is copied from "recursive_repr" function in -# reprlib module to avoid dependency. -def _recursive_repr(user_function): - # Decorator to make a repr function return "..." for a recursive - # call. - repr_running = set() - - @functools.wraps(user_function) - def wrapper(self): - key = id(self), _thread.get_ident() - if key in repr_running: - return '...' - repr_running.add(key) - try: - result = user_function(self) - finally: - repr_running.discard(key) - return result - return wrapper - - -def _create_fn(name, args, body, *, globals=None, locals=None, - return_type=MISSING): - # Note that we mutate locals when exec() is called. Caller - # beware! The only callers are internal to this module, so no - # worries about external callers. - if locals is None: - locals = {} - # __builtins__ may be the "builtins" module or - # the value of its "__dict__", - # so make sure "__builtins__" is the module. - if globals is not None and '__builtins__' not in globals: - globals['__builtins__'] = builtins - return_annotation = '' - if return_type is not MISSING: - locals['_return_type'] = return_type - return_annotation = '->_return_type' - args = ','.join(args) - body = '\n'.join(f' {b}' for b in body) - - # Compute the text of the entire function. - txt = f'def {name}({args}){return_annotation}:\n{body}' - - exec(txt, globals, locals) - return locals[name] - - -def _field_assign(frozen, name, value, self_name): - # If we're a frozen class, then assign to our fields in __init__ - # via object.__setattr__. Otherwise, just use a simple - # assignment. - # - # self_name is what "self" is called in this function: don't - # hard-code "self", since that might be a field name. - if frozen: - return f'__builtins__.object.__setattr__({self_name},{name!r},{value})' - return f'{self_name}.{name}={value}' - - -def _field_init(f, frozen, globals, self_name): - # Return the text of the line in the body of __init__ that will - # initialize this field. - - default_name = f'_dflt_{f.name}' - if f.default_factory is not MISSING: - if f.init: - # This field has a default factory. If a parameter is - # given, use it. If not, call the factory. - globals[default_name] = f.default_factory - value = (f'{default_name}() ' - f'if {f.name} is _HAS_DEFAULT_FACTORY ' - f'else {f.name}') - else: - # This is a field that's not in the __init__ params, but - # has a default factory function. It needs to be - # initialized here by calling the factory function, - # because there's no other way to initialize it. - - # For a field initialized with a default=defaultvalue, the - # class dict just has the default value - # (cls.fieldname=defaultvalue). But that won't work for a - # default factory, the factory must be called in __init__ - # and we must assign that to self.fieldname. We can't - # fall back to the class dict's value, both because it's - # not set, and because it might be different per-class - # (which, after all, is why we have a factory function!). - - globals[default_name] = f.default_factory - value = f'{default_name}()' - else: - # No default factory. - if f.init: - if f.default is MISSING: - # There's no default, just do an assignment. - value = f.name - elif f.default is not MISSING: - globals[default_name] = f.default - value = f.name - else: - # This field does not need initialization. Signify that - # to the caller by returning None. - return None - - # Only test this now, so that we can create variables for the - # default. However, return None to signify that we're not going - # to actually do the assignment statement for InitVars. - if f._field_type is _FIELD_INITVAR: - return None - - # Now, actually generate the field assignment. - return _field_assign(frozen, f.name, value, self_name) - - -def _init_param(f): - # Return the __init__ parameter string for this field. For - # example, the equivalent of 'x:int=3' (except instead of 'int', - # reference a variable set to int, and instead of '3', reference a - # variable set to 3). - if f.default is MISSING and f.default_factory is MISSING: - # There's no default, and no default_factory, just output the - # variable name and type. - default = '' - elif f.default is not MISSING: - # There's a default, this will be the name that's used to look - # it up. - default = f'=_dflt_{f.name}' - elif f.default_factory is not MISSING: - # There's a factory function. Set a marker. - default = '=_HAS_DEFAULT_FACTORY' - return f'{f.name}:_type_{f.name}{default}' - - -def _init_fn(fields, frozen, has_post_init, self_name): - # fields contains both real fields and InitVar pseudo-fields. - - globals = {'MISSING': MISSING, - '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY} - - body_lines = [] - for f in fields: - line = _field_init(f, frozen, globals, self_name) - # line is None means that this field doesn't require - # initialization (it's a pseudo-field). Just skip it. - if line: - body_lines.append(line) - - # Does this class have a post-init function? - if has_post_init: - params_str = ','.join(f.name for f in fields - if f._field_type is _FIELD_INITVAR) - body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})') - - # If no body lines, use 'pass'. - if not body_lines: - body_lines = ['pass'] - - locals = {f'_type_{f.name}': f.type for f in fields} - return _create_fn('__init__', - [self_name] + ['*'] + [_init_param(f) for f in fields if f.init], - body_lines, - locals=locals, - globals=globals, - return_type=None) - - -def _repr_fn(fields): - fn = _create_fn('__repr__', - ('self',), - ['return self.__class__.__qualname__ + f"(' + - ', '.join([f"{f.name}={{self.{f.name}!r}}" - for f in fields]) + - ')"']) - return _recursive_repr(fn) - - -def _frozen_get_del_attr(cls, fields): - # XXX: globals is modified on the first call to _create_fn, then - # the modified version is used in the second call. Is this okay? - globals = {'cls': cls, - 'FrozenInstanceError': FrozenInstanceError} - if fields: - fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)' - else: - # Special case for the zero-length tuple. - fields_str = '()' - return (_create_fn('__setattr__', - ('self', 'name', 'value'), - (f'if type(self) is cls or name in {fields_str}:', - ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', - f'super(cls, self).__setattr__(name, value)'), - globals=globals), - _create_fn('__delattr__', - ('self', 'name'), - (f'if type(self) is cls or name in {fields_str}:', - ' raise FrozenInstanceError(f"cannot delete field {name!r}")', - f'super(cls, self).__delattr__(name)'), - globals=globals), - ) - - -def _cmp_fn(name, op, self_tuple, other_tuple): - # Create a comparison function. If the fields in the object are - # named 'x' and 'y', then self_tuple is the string - # '(self.x,self.y)' and other_tuple is the string - # '(other.x,other.y)'. - - return _create_fn(name, - ('self', 'other'), - [ 'if other.__class__ is self.__class__:', - f' return {self_tuple}{op}{other_tuple}', - 'return NotImplemented']) - - -def _hash_fn(fields): - self_tuple = _tuple_str('self', fields) - return _create_fn('__hash__', - ('self',), - [f'return hash({self_tuple})']) - - -def _is_classvar(a_type, typing): - # This test uses a typing internal class, but it's the best way to - # test if this is a ClassVar. - return (a_type is typing.ClassVar - or (type(a_type) is typing._GenericAlias - and a_type.__origin__ is typing.ClassVar)) - - -def _is_initvar(a_type, dataclasses): - # The module we're checking against is the module we're - # currently in (dataclasses.py). - return a_type is dataclasses.InitVar - - -def _is_type(annotation, cls, a_module, a_type, is_type_predicate): - # Given a type annotation string, does it refer to a_type in - # a_module? For example, when checking that annotation denotes a - # ClassVar, then a_module is typing, and a_type is - # typing.ClassVar. - - # It's possible to look up a_module given a_type, but it involves - # looking in sys.modules (again!), and seems like a waste since - # the caller already knows a_module. - - # - annotation is a string type annotation - # - cls is the class that this annotation was found in - # - a_module is the module we want to match - # - a_type is the type in that module we want to match - # - is_type_predicate is a function called with (obj, a_module) - # that determines if obj is of the desired type. - - # Since this test does not do a local namespace lookup (and - # instead only a module (global) lookup), there are some things it - # gets wrong. - - # With string annotations, cv0 will be detected as a ClassVar: - # CV = ClassVar - # @dataclass - # class C0: - # cv0: CV - - # But in this example cv1 will not be detected as a ClassVar: - # @dataclass - # class C1: - # CV = ClassVar - # cv1: CV - - # In C1, the code in this function (_is_type) will look up "CV" in - # the module and not find it, so it will not consider cv1 as a - # ClassVar. This is a fairly obscure corner case, and the best - # way to fix it would be to eval() the string "CV" with the - # correct global and local namespaces. However that would involve - # a eval() penalty for every single field of every dataclass - # that's defined. It was judged not worth it. - - match = _MODULE_IDENTIFIER_RE.match(annotation) - if match: - ns = None - module_name = match.group(1) - if not module_name: - # No module name, assume the class's module did - # "from dataclasses import InitVar". - ns = sys.modules.get(cls.__module__).__dict__ - else: - # Look up module_name in the class's module. - module = sys.modules.get(cls.__module__) - if module and module.__dict__.get(module_name) is a_module: - ns = sys.modules.get(a_type.__module__).__dict__ - if ns and is_type_predicate(ns.get(match.group(2)), a_module): - return True - return False - - -def _get_field(cls, a_name, a_type): - # Return a Field object for this field name and type. ClassVars - # and InitVars are also returned, but marked as such (see - # f._field_type). - - # If the default value isn't derived from Field, then it's only a - # normal default value. Convert it to a Field(). - default = getattr(cls, a_name, MISSING) - if isinstance(default, Field): - f = default - else: - if isinstance(default, types.MemberDescriptorType): - # This is a field in __slots__, so it has no default value. - default = MISSING - f = field(default=default) - - # Only at this point do we know the name and the type. Set them. - f.name = a_name - f.type = a_type - - # Assume it's a normal field until proven otherwise. We're next - # going to decide if it's a ClassVar or InitVar, everything else - # is just a normal field. - f._field_type = _FIELD - - # In addition to checking for actual types here, also check for - # string annotations. get_type_hints() won't always work for us - # (see https://github.com/python/typing/issues/508 for example), - # plus it's expensive and would require an eval for every stirng - # annotation. So, make a best effort to see if this is a ClassVar - # or InitVar using regex's and checking that the thing referenced - # is actually of the correct type. - - # For the complete discussion, see https://bugs.python.org/issue33453 - - # If typing has not been imported, then it's impossible for any - # annotation to be a ClassVar. So, only look for ClassVar if - # typing has been imported by any module (not necessarily cls's - # module). - typing = sys.modules.get('typing') - if typing: - if (_is_classvar(a_type, typing) - or (isinstance(f.type, str) - and _is_type(f.type, cls, typing, typing.ClassVar, - _is_classvar))): - f._field_type = _FIELD_CLASSVAR - - # If the type is InitVar, or if it's a matching string annotation, - # then it's an InitVar. - if f._field_type is _FIELD: - # The module we're checking against is the module we're - # currently in (dataclasses.py). - dataclasses = sys.modules[__name__] - if (_is_initvar(a_type, dataclasses) - or (isinstance(f.type, str) - and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, - _is_initvar))): - f._field_type = _FIELD_INITVAR - - # Validations for individual fields. This is delayed until now, - # instead of in the Field() constructor, since only here do we - # know the field name, which allows for better error reporting. - - # Special restrictions for ClassVar and InitVar. - if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR): - if f.default_factory is not MISSING: - raise TypeError(f'field {f.name} cannot have a ' - 'default factory') - # Should I check for other field settings? default_factory - # seems the most serious to check for. Maybe add others. For - # example, how about init=False (or really, - # init=)? It makes no sense for - # ClassVar and InitVar to specify init=. - - # For real fields, disallow mutable defaults for known types. - if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): - raise ValueError(f'mutable default {type(f.default)} for field ' - f'{f.name} is not allowed: use default_factory') - - return f - - -def _set_new_attribute(cls, name, value): - # Never overwrites an existing attribute. Returns True if the - # attribute already exists. - if name in cls.__dict__: - return True - setattr(cls, name, value) - return False - - -# Decide if/how we're going to create a hash function. Key is -# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to -# take. The common case is to do nothing, so instead of providing a -# function that is a no-op, use None to signify that. - -def _hash_set_none(cls, fields): - return None - -def _hash_add(cls, fields): - flds = [f for f in fields if (f.compare if f.hash is None else f.hash)] - return _hash_fn(flds) - -def _hash_exception(cls, fields): - # Raise an exception. - raise TypeError(f'Cannot overwrite attribute __hash__ ' - f'in class {cls.__name__}') - -# -# +-------------------------------------- unsafe_hash? -# | +------------------------------- eq? -# | | +------------------------ frozen? -# | | | +---------------- has-explicit-hash? -# | | | | -# | | | | +------- action -# | | | | | -# v v v v v -_hash_action = {(False, False, False, False): None, - (False, False, False, True ): None, - (False, False, True, False): None, - (False, False, True, True ): None, - (False, True, False, False): _hash_set_none, - (False, True, False, True ): None, - (False, True, True, False): _hash_add, - (False, True, True, True ): None, - (True, False, False, False): _hash_add, - (True, False, False, True ): _hash_exception, - (True, False, True, False): _hash_add, - (True, False, True, True ): _hash_exception, - (True, True, False, False): _hash_add, - (True, True, False, True ): _hash_exception, - (True, True, True, False): _hash_add, - (True, True, True, True ): _hash_exception, - } -# See https://bugs.python.org/issue32929#msg312829 for an if-statement -# version of this table. - - -def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): - # Now that dicts retain insertion order, there's no reason to use - # an ordered dict. I am leveraging that ordering here, because - # derived class fields overwrite base class fields, but the order - # is defined by the base class, which is found first. - fields = {} - - setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order, - unsafe_hash, frozen)) - - # Find our base classes in reverse MRO order, and exclude - # ourselves. In reversed order so that more derived classes - # override earlier field definitions in base classes. As long as - # we're iterating over them, see if any are frozen. - any_frozen_base = False - has_dataclass_bases = False - for b in cls.__mro__[-1:0:-1]: - # Only process classes that have been processed by our - # decorator. That is, they have a _FIELDS attribute. - base_fields = getattr(b, _FIELDS, None) - if base_fields: - has_dataclass_bases = True - for f in base_fields.values(): - fields[f.name] = f - if getattr(b, _PARAMS).frozen: - any_frozen_base = True - - # Annotations that are defined in this class (not in base - # classes). If __annotations__ isn't present, then this class - # adds no new annotations. We use this to compute fields that are - # added by this class. - # - # Fields are found from cls_annotations, which is guaranteed to be - # ordered. Default values are from class attributes, if a field - # has a default. If the default value is a Field(), then it - # contains additional info beyond (and possibly including) the - # actual default value. Pseudo-fields ClassVars and InitVars are - # included, despite the fact that they're not real fields. That's - # dealt with later. - cls_annotations = cls.__dict__.get('__annotations__', {}) - - # Now find fields in our class. While doing so, validate some - # things, and set the default values (as class attributes) where - # we can. - cls_fields = [_get_field(cls, name, type) - for name, type in cls_annotations.items()] - for f in cls_fields: - fields[f.name] = f - - # If the class attribute (which is the default value for this - # field) exists and is of type 'Field', replace it with the - # real default. This is so that normal class introspection - # sees a real default value, not a Field. - if isinstance(getattr(cls, f.name, None), Field): - if f.default is MISSING: - # If there's no default, delete the class attribute. - # This happens if we specify field(repr=False), for - # example (that is, we specified a field object, but - # no default value). Also if we're using a default - # factory. The class attribute should not be set at - # all in the post-processed class. - delattr(cls, f.name) - else: - setattr(cls, f.name, f.default) - - # Do we have any Field members that don't also have annotations? - for name, value in cls.__dict__.items(): - if isinstance(value, Field) and not name in cls_annotations: - raise TypeError(f'{name!r} is a field but has no type annotation') - - # Check rules that apply if we are derived from any dataclasses. - if has_dataclass_bases: - # Raise an exception if any of our bases are frozen, but we're not. - if any_frozen_base and not frozen: - raise TypeError('cannot inherit non-frozen dataclass from a ' - 'frozen one') - - # Raise an exception if we're frozen, but none of our bases are. - if not any_frozen_base and frozen: - raise TypeError('cannot inherit frozen dataclass from a ' - 'non-frozen one') - - # Remember all of the fields on our class (including bases). This - # also marks this class as being a dataclass. - setattr(cls, _FIELDS, fields) - - # Was this class defined with an explicit __hash__? Note that if - # __eq__ is defined in this class, then python will automatically - # set __hash__ to None. This is a heuristic, as it's possible - # that such a __hash__ == None was not auto-generated, but it - # close enough. - class_hash = cls.__dict__.get('__hash__', MISSING) - has_explicit_hash = not (class_hash is MISSING or - (class_hash is None and '__eq__' in cls.__dict__)) - - # If we're generating ordering methods, we must be generating the - # eq methods. - if order and not eq: - raise ValueError('eq must be true if order is true') - - if init: - # Does this class have a post-init function? - has_post_init = hasattr(cls, _POST_INIT_NAME) - - # Include InitVars and regular fields (so, not ClassVars). - flds = [f for f in fields.values() - if f._field_type in (_FIELD, _FIELD_INITVAR)] - _set_new_attribute(cls, '__init__', - _init_fn(flds, - frozen, - has_post_init, - # The name to use for the "self" - # param in __init__. Use "self" - # if possible. - '__dataclass_self__' if 'self' in fields - else 'self', - )) - - # Get the fields as a list, and include only real fields. This is - # used in all of the following methods. - field_list = [f for f in fields.values() if f._field_type is _FIELD] - - if repr: - flds = [f for f in field_list if f.repr] - _set_new_attribute(cls, '__repr__', _repr_fn(flds)) - - if eq: - # Create _eq__ method. There's no need for a __ne__ method, - # since python will call __eq__ and negate it. - flds = [f for f in field_list if f.compare] - self_tuple = _tuple_str('self', flds) - other_tuple = _tuple_str('other', flds) - _set_new_attribute(cls, '__eq__', - _cmp_fn('__eq__', '==', - self_tuple, other_tuple)) - - if order: - # Create and set the ordering methods. - flds = [f for f in field_list if f.compare] - self_tuple = _tuple_str('self', flds) - other_tuple = _tuple_str('other', flds) - for name, op in [('__lt__', '<'), - ('__le__', '<='), - ('__gt__', '>'), - ('__ge__', '>='), - ]: - if _set_new_attribute(cls, name, - _cmp_fn(name, op, self_tuple, other_tuple)): - raise TypeError(f'Cannot overwrite attribute {name} ' - f'in class {cls.__name__}. Consider using ' - 'functools.total_ordering') - - if frozen: - for fn in _frozen_get_del_attr(cls, field_list): - if _set_new_attribute(cls, fn.__name__, fn): - raise TypeError(f'Cannot overwrite attribute {fn.__name__} ' - f'in class {cls.__name__}') - - # Decide if/how we're going to create a hash function. - hash_action = _hash_action[bool(unsafe_hash), - bool(eq), - bool(frozen), - has_explicit_hash] - if hash_action: - # No need to call _set_new_attribute here, since by the time - # we're here the overwriting is unconditional. - cls.__hash__ = hash_action(cls, field_list) - - if not getattr(cls, '__doc__'): - # Create a class doc-string. - cls.__doc__ = (cls.__name__ + - str(inspect.signature(cls)).replace(' -> None', '')) - - return cls - - -# _cls should never be specified by keyword, so start it with an -# underscore. The presence of _cls is used to detect if this -# decorator is being called with parameters or not. -def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False, - unsafe_hash=False, frozen=False): - """Returns the same class as was passed in, with dunder methods - added based on the fields defined in the class. - - Examines PEP 526 __annotations__ to determine fields. - - If init is true, an __init__() method is added to the class. If - repr is true, a __repr__() method is added. If order is true, rich - comparison dunder methods are added. If unsafe_hash is true, a - __hash__() method function is added. If frozen is true, fields may - not be assigned to after instance creation. - """ - - def wrap(cls): - return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen) - - # See if we're being called as @dataclass or @dataclass(). - if _cls is None: - # We're called with parens. - return wrap - - # We're called as @dataclass without parens. - return wrap(_cls) - - -def fields(class_or_instance): - """Return a tuple describing the fields of this dataclass. - - Accepts a dataclass or an instance of one. Tuple elements are of - type Field. - """ - - # Might it be worth caching this, per class? - try: - fields = getattr(class_or_instance, _FIELDS) - except AttributeError: - raise TypeError('must be called with a dataclass type or instance') - - # Exclude pseudo-fields. Note that fields is sorted by insertion - # order, so the order of the tuple is as the fields were defined. - return tuple(f for f in fields.values() if f._field_type is _FIELD) - - -def _is_dataclass_instance(obj): - """Returns True if obj is an instance of a dataclass.""" - return hasattr(type(obj), _FIELDS) - - -def is_dataclass(obj): - """Returns True if obj is a dataclass or an instance of a - dataclass.""" - cls = obj if isinstance(obj, type) else type(obj) - return hasattr(cls, _FIELDS) - - -def asdict(obj, *, dict_factory=dict): - """Return the fields of a dataclass instance as a new dictionary mapping - field names to field values. - - Example usage: - - @dataclass - class C: - x: int - y: int - - c = C(1, 2) - assert asdict(c) == {'x': 1, 'y': 2} - - If given, 'dict_factory' will be used instead of built-in dict. - The function applies recursively to field values that are - dataclass instances. This will also look into built-in containers: - tuples, lists, and dicts. - """ - if not _is_dataclass_instance(obj): - raise TypeError("asdict() should be called on dataclass instances") - return _asdict_inner(obj, dict_factory) - - -def _asdict_inner(obj, dict_factory): - if _is_dataclass_instance(obj): - result = [] - for f in fields(obj): - value = _asdict_inner(getattr(obj, f.name), dict_factory) - result.append((f.name, value)) - return dict_factory(result) - elif isinstance(obj, tuple) and hasattr(obj, '_fields'): - # obj is a namedtuple. Recurse into it, but the returned - # object is another namedtuple of the same type. This is - # similar to how other list- or tuple-derived classes are - # treated (see below), but we just need to create them - # differently because a namedtuple's __init__ needs to be - # called differently (see bpo-34363). - - # I'm not using namedtuple's _asdict() - # method, because: - # - it does not recurse in to the namedtuple fields and - # convert them to dicts (using dict_factory). - # - I don't actually want to return a dict here. The the main - # use case here is json.dumps, and it handles converting - # namedtuples to lists. Admittedly we're losing some - # information here when we produce a json list instead of a - # dict. Note that if we returned dicts here instead of - # namedtuples, we could no longer call asdict() on a data - # structure where a namedtuple was used as a dict key. - - return type(obj)(*[_asdict_inner(v, dict_factory) for v in obj]) - elif isinstance(obj, (list, tuple)): - # Assume we can create an object of this type by passing in a - # generator (which is not true for namedtuples, handled - # above). - return type(obj)(_asdict_inner(v, dict_factory) for v in obj) - elif isinstance(obj, dict): - return type(obj)((_asdict_inner(k, dict_factory), - _asdict_inner(v, dict_factory)) - for k, v in obj.items()) - else: - return copy.deepcopy(obj) - - -def astuple(obj, *, tuple_factory=tuple): - """Return the fields of a dataclass instance as a new tuple of field values. - - Example usage:: - - @dataclass - class C: - x: int - y: int - - c = C(1, 2) - assert astuple(c) == (1, 2) - - If given, 'tuple_factory' will be used instead of built-in tuple. - The function applies recursively to field values that are - dataclass instances. This will also look into built-in containers: - tuples, lists, and dicts. - """ - - if not _is_dataclass_instance(obj): - raise TypeError("astuple() should be called on dataclass instances") - return _astuple_inner(obj, tuple_factory) - - -def _astuple_inner(obj, tuple_factory): - if _is_dataclass_instance(obj): - result = [] - for f in fields(obj): - value = _astuple_inner(getattr(obj, f.name), tuple_factory) - result.append(value) - return tuple_factory(result) - elif isinstance(obj, tuple) and hasattr(obj, '_fields'): - # obj is a namedtuple. Recurse into it, but the returned - # object is another namedtuple of the same type. This is - # similar to how other list- or tuple-derived classes are - # treated (see below), but we just need to create them - # differently because a namedtuple's __init__ needs to be - # called differently (see bpo-34363). - return type(obj)(*[_astuple_inner(v, tuple_factory) for v in obj]) - elif isinstance(obj, (list, tuple)): - # Assume we can create an object of this type by passing in a - # generator (which is not true for namedtuples, handled - # above). - return type(obj)(_astuple_inner(v, tuple_factory) for v in obj) - elif isinstance(obj, dict): - return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory)) - for k, v in obj.items()) - else: - return copy.deepcopy(obj) - - -def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True, - repr=True, eq=True, order=False, unsafe_hash=False, - frozen=False): - """Return a new dynamically created dataclass. - - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'. - - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - - is equivalent to: - - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) - - For the bases and namespace parameters, see the builtin type() function. - - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ - - if namespace is None: - namespace = {} - else: - # Copy namespace since we're going to mutate it. - namespace = namespace.copy() - - # While we're looking through the field names, validate that they - # are identifiers, are not keywords, and not duplicates. - seen = set() - anns = {} - for item in fields: - if isinstance(item, str): - name = item - tp = 'typing.Any' - elif len(item) == 2: - name, tp, = item - elif len(item) == 3: - name, tp, spec = item - namespace[name] = spec - else: - raise TypeError(f'Invalid field: {item!r}') - - if not isinstance(name, str) or not name.isidentifier(): - raise TypeError(f'Field names must be valid identifiers: {name!r}') - if keyword.iskeyword(name): - raise TypeError(f'Field names must not be keywords: {name!r}') - if name in seen: - raise TypeError(f'Field name duplicated: {name!r}') - - seen.add(name) - anns[name] = tp - - namespace['__annotations__'] = anns - # We use `types.new_class()` instead of simply `type()` to allow dynamic creation - # of generic dataclassses. - cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace)) - return dataclass(cls, init=init, repr=repr, eq=eq, order=order, - unsafe_hash=unsafe_hash, frozen=frozen) - - -def replace(*args, **changes): - """Return a new object replacing specified fields with new values. - - This is especially useful for frozen classes. Example usage: - - @dataclass(frozen=True) - class C: - x: int - y: int - - c = C(1, 2) - c1 = replace(c, x=3) - assert c1.x == 3 and c1.y == 2 - """ - if len(args) > 1: - raise TypeError(f'replace() takes 1 positional argument but {len(args)} were given') - if args: - obj, = args - elif 'obj' in changes: - obj = changes.pop('obj') - else: - raise TypeError("replace() missing 1 required positional argument: 'obj'") - - # We're going to mutate 'changes', but that's okay because it's a - # new dict, even if called with 'replace(obj, **my_changes)'. - - if not _is_dataclass_instance(obj): - raise TypeError("replace() should be called on dataclass instances") - - # It's an error to have init=False fields in 'changes'. - # If a field is not in 'changes', read its value from the provided obj. - - for f in getattr(obj, _FIELDS).values(): - # Only consider normal fields or InitVars. - if f._field_type is _FIELD_CLASSVAR: - continue - - if not f.init: - # Error if this field is specified in changes. - if f.name in changes: - raise ValueError(f'field {f.name} is declared with ' - 'init=False, it cannot be specified with ' - 'replace()') - continue - - if f.name not in changes: - if f._field_type is _FIELD_INITVAR: - raise ValueError(f"InitVar {f.name!r} " - 'must be specified with replace()') - changes[f.name] = getattr(obj, f.name) - - # Create the new object, which calls __init__() and - # __post_init__() (if defined), using all of the init fields we've - # added and/or left in 'changes'. If there are values supplied in - # changes that aren't fields, this will correctly raise a - # TypeError. - return obj.__class__(**changes) diff --git a/spock/backend/dataclass/args.py b/spock/backend/dataclass/args.py deleted file mode 100644 index 4962bdfd..00000000 --- a/spock/backend/dataclass/args.py +++ /dev/null @@ -1,290 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -"""Handles the definitions of arguments types for Spock (backend: dataclass)""" - -from typing import List -from typing import Tuple -from typing import TypeVar - - -class BoolArg(int): - """Spock boolean argument - - Overloads the bool type for Spock - - """ - def __new__(cls, x): - """Creates a new instance of a Spock boolean arg - - *Args*: - - x: boolean value - - *Returns*: - - boolean variable - """ - return super().__new__(cls, bool(x)) - - -class IntArg(int): - """Spock integer argument - - Overloads the integer type for Spock - - """ - def __new__(cls, x): - """Creates a new instance of a Spock integer arg - - *Args*: - - x: integer value - - *Returns*: - - integer variable - """ - return super().__new__(cls, x) - - -class IntOptArg(int): - """Spock integer optional argument - - Overloads the integer type and makes the argument optional for Spock - - """ - def __new__(cls, x): - """Creates a new instance of a Spock optional integer arg - - *Args*: - - x: integer value - - *Returns*: - - integer variable - """ - return super().__new__(cls, x) - - -class FloatArg(float): - """Spock float argument - - Overloads the float type for Spock - - """ - def __new__(cls, x): - """Creates a new instance of a Spock float arg - - *Args*: - - x: float value - - *Returns*: - - float variable - """ - return super().__new__(cls, x) - - -class FloatOptArg(float): - """Spock float optional argument - - Overloads the float type and makes the argument optional for Spock - - """ - def __new__(cls, x): - """Creates a new instance of a Spock float optional arg - - *Args*: - - x: float value - - *Returns*: - - float variable - """ - return super().__new__(cls, x) - - -class StrArg(str): - """Spock string argument - - Overloads the string type for Spock - - """ - def __new__(cls, x): - """Creates a new instance of a Spock string arg - - *Args*: - - x: string value - - *Returns*: - - string variable - """ - return super().__new__(cls, x) - - -class StrOptArg(str): - """Spock string optional argument - - Overloads the string type and makes the argument optional for Spock - - """ - def __new__(cls, x): - """Creates a new instance of a Spock string optional arg - - *Args*: - - x: string value - - *Returns*: - - string variable - """ - return super().__new__(cls, x) - - -# Make a type var -__T = TypeVar('__T') - - -class ListArg(List[__T]): # pylint: disable=too-few-public-methods - """Spock list argument - - Overloads the list type for Spock - - """ - @staticmethod - def defaults(values: List): - """Creates a new instance of a Spock list arg - - *Args*: - - values: list values - - *Returns*: - - list variable - """ - return values - - -class ListOptArg(List[__T]): # pylint: disable=too-few-public-methods - """Spock list optional argument - - Overloads the list type and makes the argument optional for Spock - - """ - @staticmethod - def defaults(values: List): - """Creates a new instance of a Spock list optional arg - - *Args*: - - values: list values - - *Returns*: - - list variable - """ - return values - - -class TupleArg(Tuple[__T]): # pylint: disable=too-few-public-methods - """Spock tuple argument - - Overloads the tuple type for Spock - - """ - @staticmethod - def defaults(values: Tuple): - """Creates a new instance of a Spock tuple arg - - *Args*: - - values: tuple values - - *Returns*: - - tuple variable - """ - return values - - -class TupleOptArg(Tuple[__T]): # pylint: disable=too-few-public-methods - """Spock tuple optional argument - - Overloads the tuple type and makes the argument optional for Spock - - """ - @staticmethod - def defaults(values: Tuple): - """Creates a new instance of a Spock tuple optional arg - - *Args*: - - values: tuple values - - *Returns*: - - tuple variable - """ - return values - - -class ChoiceArg: # pylint: disable=too-few-public-methods - """Spock type of a choice set - - Requires default or value to be from a predefined set - - """ - def __init__(self, choice_set: List, default=None): - self.choice_set = choice_set - self.default = default - self.set_type = self._verify() - - def _verify(self): - """Validates the types within a set - - *Returns*: - - str name of type - - """ - type_set = {type(val) for val in self.choice_set} - if len(type_set) > 1: - raise TypeError(f'ChoiceArg must all be of the same type: {type_set}') - return list(type_set)[0] - - -class SavePathOptArg(str): - """Spock special key for saving the Spock config to file - - Defines a special key use to save the current Spock config to file - - """ - def __new__(cls, x): - return super().__new__(cls, x) - - -def boolean_string(bool_string): - """Map boolean string to boolean type - - *Args*: - - s: boolean string - - *Returns*: - - string True/False to actual bool type - - """ - if bool_string not in {'False', 'True'}: - raise ValueError('Not a valid boolean string') - return bool_string == 'True' diff --git a/spock/backend/dataclass/builder.py b/spock/backend/dataclass/builder.py deleted file mode 100644 index 0e94c905..00000000 --- a/spock/backend/dataclass/builder.py +++ /dev/null @@ -1,239 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -"""Handles the building/saving of the configurations from the Spock config classes""" - -import sys -from typing import Generic -from spock.backend.dataclass._dataclasses import is_dataclass -from spock.backend.base import BaseBuilder -from spock.backend.dataclass.utils import cast -minor = sys.version_info.minor -if minor < 7: - from typing import GenericMeta as _GenericAlias -else: - from typing import _GenericAlias - - -class DataClassBuilder(BaseBuilder): - """Dataclass specific builder - - Class that handles building for the dataclass backend - - *Attributes* - - input_classes: list of input classes that link to a backend - _configs: None or List of configs to read from - _create_save_path: boolean to make the path to save to - _desc: description for the arg parser - _no_cmd_line: flag to force no command line reads - save_path: list of path(s) to save the configs to - - """ - def __init__(self, *args, configs=None, create_save_path=False, desc='', no_cmd_line=False, **kwargs): - super().__init__(*args, configs=configs, create_save_path=create_save_path, desc=desc, - no_cmd_line=no_cmd_line, **kwargs) - self._optional_types = {'FloatOptArg', 'IntOptArg', 'StrOptArg', - 'ListOptArg', 'TupleOptArg', 'SavePathOptArg'} - for arg in self.input_classes: - if not is_dataclass(arg): - raise TypeError('*arg inputs to ConfigArgBuilder must all be instances of dataclass') - - def print_usage_and_exit(self, msg=None, sys_exit=True): - print('USAGE:') - print(f' {sys.argv[0]} -c [--config] config1 [config2, config3, ...]') - print('CONFIG:') - for data_class in self.input_classes: - print(' ' + data_class.__name__ + ':') - dc_vars = vars(data_class) - for key, val in dc_vars.get('__dataclass_fields__').items(): - if type(val.type).__name__ == 'ChoiceArg': - type_name = type(val.type).__name__ - # Magic again -- check for type == type allows for delineation between basic types and list/tuples - elif type(val.type) == type: - type_name = val.type.__name__ - else: - type_name = val.type.__origin__.__name__ - type_name += '[' + self._extract_base_type(val.type).__name__ + ']' - print(f' {key}: {type_name}') - if msg is not None: - print(msg) - if sys_exit: - sys.exit(1) - - def _handle_arguments(self, args, class_obj): - fields = {} - # Access the vars - dc_vars = vars(class_obj) - # Get the dataclass name - dc_name = class_obj.__name__ - for key, val in dc_vars.get('__dataclass_fields__').items(): - # pure magic -- Lists, Tuples, etc. are not of type type (they are GenericAlias) so one must - # check against this before accessing the __name__ attribute which GenericAlias does not have - if type(val.type) == type: - if val.type.__name__ == 'SavePathOptArg': - self.save_path = args.get(key) - # Check if namespace is named and then check for key -- checking for local def - if dc_name in args and key in args[dc_name]: - fields[key] = self._check_function(args[dc_name][key], val) - # If not named then just check for keys -- checking for global def - elif key in args: - fields[key] = self._check_function(args[key], val) - # If not found then fallback on defaults if defined - else: - default, found_default = self.check_for_defaults(val) - if not found_default: - if type(val.type) == type and val.type.__name__ in self._optional_types: - fields[key] = None - continue - elif type(val.type) != type and val.type.__origin__.__name__ in self._optional_types: - fields[key] = None - continue - elif 'Bool' in val.type.__name__: - fields[key] = False - continue - else: - raise ValueError(f'Required value {dc_name}.{key}: no default set or value defined in file') - fields[key] = self._check_function(default, val) - return fields - - @staticmethod - def _int_to_float(inst, target_type): - """Converts instance int to float - - *Args*: - inst: instance - target_type: target type - - *Returns*: - inst: instance type cast into float - - """ - if target_type == float and type(inst) == int: - inst = float(inst) - return inst - - def _check_function(self, x, val): - """Wrapper around the valid type check with a cast - - *Args*: - - x: instance - val: value - - *Returns*: - - casted value - - """ - return cast(self._check_valid_type(x, val)) - - def _check_valid_type(self, instance, val): - """Checks that the instance is of the correct type - - *Args*: - - instance: object instance - val: value - - *Returns*: - - instance: object instance - - """ - if type(val.type) == type: - # pure magic -- Lists, Tuples, etc. are not of type type (they are GenericAlias) so one must - # check against this before accessing the __name__ attribute which GenericAlias does not have - # Get the base variable type - var_type = val.type.__bases__[0] - instance = self._int_to_float(instance, var_type) - valid = isinstance(instance, var_type) - if not valid: - raise ValueError(f'Wrong type ({type(instance)}) passed to {val.name}. Require {var_type}') - elif type(val.type).__name__ == 'ChoiceArg': - instance = self._check_choice_type(val.type, instance) - var_type = val.type.set_type - valid = isinstance(instance, var_type) - if not valid: - raise ValueError(f'Wrong type ({type(instance)}) passed to {val.name}. Require {var_type}') - else: - # It's an iterable - check it's a list (only iterable provided by markup) - var_type = self._extract_base_type(val.type) - iter_name = val.type.__origin__.__name__ - valid = isinstance(instance, (tuple, list)) - if not valid: - raise ValueError( - f'Wrong type ({type(instance).__name__}) passed to {val.name}.\n' - f'Require {iter_name}[{var_type.__name__}]') - instance = tuple((self._int_to_float(i, var_type) for i in instance)) - if len(instance) > 0: - valid = isinstance(instance[0], var_type) - if not valid: - raise ValueError( - f'Wrong type (List[{type(instance[0]).__name__}]) passed to {val.name}.\n' - f'Require {iter_name}[{var_type.__name__}]') - return instance - - @staticmethod - def _extract_base_type(given_type): - """Extracts the type from a _GenericAlias - - *Args*: - - tp: type - - *Returns*: - - tp: type of generic type - """ - if isinstance(given_type, _GenericAlias) and given_type is not Generic: - return given_type.__args__[0] # assume we only have generic types with a single argument - return given_type - - def check_for_defaults(self, val): - """Checks for default values - - *Args*: - - default: default value - default_factory: default factory - - *Returns*: - - default_val: value of default - found_default: boolean if default found - - """ - found_default = False - default_val = None - if type(val.type).__name__ == 'ChoiceArg': - default_val = self._check_choice_type(val.type, val.type.default) - found_default = True - elif type(val.default).__name__ != '_MISSING_TYPE': - default_val = val.default - found_default = True - elif type(val.default_factory).__name__ != '_MISSING_TYPE': - default_val = val.default_factory() - found_default = True - return default_val, found_default - - @staticmethod - def _check_choice_type(choice_set, val): - """Checks the type and set of a ChoiceArg value - - *Args*: - - choice_set: ChoiceArg instance - val: value to set - val_name: name of the parameter - - *Returns*: - - val: value to set - - """ - if val not in choice_set.choice_set: - raise ValueError(f'{val} is not within the set of defined choices {choice_set.choice_set}') - return val diff --git a/spock/backend/dataclass/config.py b/spock/backend/dataclass/config.py deleted file mode 100644 index 1ce54aea..00000000 --- a/spock/backend/dataclass/config.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -"""Creates the spock config decorator that wraps dataclasses - now an adapter to attr""" - -import enum -from inspect import isfunction -from typing import List -from typing import Optional -from typing import Tuple -import attr -from spock.backend.attr.typed import katra -from spock.backend.attr.typed import SavePath -from spock.backend.dataclass._dataclasses import dataclass - - -def _enum_adapter(key, obj): - """Adapter for ChoiceSet to Enum - - *Args*: - - key: name of enum - obj: values in the ChoiceSet - - Returns: - - enum_obj: constructed enum object - - """ - enum_set = {('option_' + str(idx)): val for idx, val in enumerate(obj.choice_set)} - enum_obj = enum.Enum(key, enum_set) - return enum_obj - - -def _list_adapter(_, obj): - """Adapter for List types - - *Args*: - - _: unused - obj: old list type - - Returns: - - List type - - """ - return List[obj.__args__[0]] - - -def _list_optional_adapter(_, obj): - """Adapter for Optional List types - - *Args*: - - _: unused - obj: old list type - - Returns: - - Optional List type - - """ - return Optional[List[obj.__args__[0]]] - - -def _tuple_adapter(_, obj): - """Adapter for Tuple types - - *Args*: - - _: unused - obj: old tuple type - - Returns: - - Tuple type - - """ - return Tuple[obj.__args__[0]] - - -def _tuple_optional_adapter(_, obj): - """Adapter for Optional Tuple types - - *Args*: - - _: unused - obj: old tuple type - - Returns: - - Optional Tuple type - - """ - return Optional[Tuple[obj.__args__[0]]] - - -TYPE_MAP = { - 'BoolArg': bool, - 'IntArg': int, - 'IntOptArg': Optional[int], - 'FloatArg': float, - 'FloatOptArg': Optional[float], - 'StrArg': str, - 'StrOptArg': Optional[str], - 'ListArg': _list_adapter, - 'ListOptArg': _list_optional_adapter, - 'TupleArg': _tuple_adapter, - 'TupleOptArg': _tuple_optional_adapter, - 'SavePathOptArg': SavePath, - 'ChoiceArg': _enum_adapter -} - - -def spock_legacy_dataclass(*args, **kwargs): - """Wrapper to dataclass that forms the base of Spock configs - *Args*: - *args: - **kwargs: - *Returns*: - frozen dataclass: frozen version of the dataclass - """ - kwargs['frozen'] = True - return dataclass(*args, **kwargs) - - -def spock_dataclass(*args, **kwargs): - """Wrapper to dataclass that forms the base of Spock configs - - *Args*: - - *args: - **kwargs: - - *Returns*: - - frozen dataclass: frozen version of the dataclass - """ - cls = args[0] - # Use the adaptor to convert into the attr class - attrs_dict, bases = _adapter(cls=cls) - return attr.make_class(name=cls.__name__, bases=bases, attrs=attrs_dict, kw_only=True, frozen=True) - - -def _adapter(cls): - """Takes a class an adapts the dataclass backend to the attr backend - - Maps the old interface and backend of dataclasses to the new interface and backend of attrs. Based on a type map - dictionary and mapping functions it provides the ability to 1:1 map between inferfaces. - - *Args*: - - cls: input class - - *Returns*: - - attrs_dict: a dictionary of current attributes to make - bases: any base classes to inherit from - - """ - # Make a blank attrs dict for new attrs - attrs_dict = {} - # We are mapping to the attr backend thus we need to get the parent classes for inheritance - # We do this by using the mro and grabbing anything that is not the first and last indices in the list and wrapping - # it into a tuple - if len(cls.__mro__[1:-1]) > 0: - bases = tuple(cls.__mro__[1:-1]) - # if there are not parents pass a blank tuple - else: - bases = () - if hasattr(cls, '__annotations__'): - for k, v in cls.__annotations__.items(): - # If the cls has the attribute then a default was set - if hasattr(cls, k): - default = getattr(cls, k) - elif hasattr(v, 'default'): - default = getattr(v, 'default') - else: - default = None - if hasattr(v, '__name__'): - typed = TYPE_MAP.get(v.__name__) - elif hasattr(v, '__origin__'): - typed = TYPE_MAP.get(v.__origin__.__name__) - else: - typed = TYPE_MAP.get(type(v).__name__) - if isfunction(typed): - typed = typed(k, v) - attrs_dict.update({k: katra(typed=typed, default=default)}) - return attrs_dict, bases diff --git a/spock/backend/dataclass/payload.py b/spock/backend/dataclass/payload.py deleted file mode 100644 index 60a341f2..00000000 --- a/spock/backend/dataclass/payload.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -"""Handles payloads from markup files""" - -from itertools import chain -from spock.backend.base import BasePayload - - -class DataClassPayload(BasePayload): - """Handles building the payload for dataclass backend - - This class builds out the payload from config files of multiple types. It handles various - file types and also composition of config files via a recursive calls - - *Attributes*: - - _loaders: maps of each file extension to the loader class - - """ - def __init__(self): - super().__init__() - - def __call__(self, *args, **kwargs): - """Call to allow self chaining - - *Args*: - - *args: - **kwargs: - - *Returns*: - - Payload: instance of self - - """ - return DataClassPayload() - - @staticmethod - def _update_payload(base_payload, input_classes, payload): - # Get basic args - dc_fields = {dc.__name__: list(vars(dc).get('__dataclass_fields__').keys()) for dc in input_classes} - # Get the choice args and insert them - # dc_fields = self._handle_choices(dc_fields, input_classes) - for keys, values in base_payload.items(): - # check if the keys, value pair is expected by a dataclass - if keys != 'config': - # Dict infers that we are overriding a global setting in a specific config - if isinstance(values, dict): - # we're in a namespace - # Check for incorrect specific override of global def - if keys not in dc_fields: - raise TypeError(f'Referring to a class space {keys} that is undefined') - for i_keys in values.keys(): - if i_keys not in dc_fields[keys]: - raise ValueError(f'Provided an unknown argument named {keys}.{i_keys}') - else: - # Chain all the values from multiple spock classes into one list - if keys not in list(chain(*dc_fields.values())): - raise ValueError(f'Provided an unknown argument named {keys}') - if keys in payload and isinstance(values, dict): - payload[keys].update(values) - else: - payload[keys] = values - return payload diff --git a/spock/backend/dataclass/saver.py b/spock/backend/dataclass/saver.py deleted file mode 100644 index d1044e88..00000000 --- a/spock/backend/dataclass/saver.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -"""Handles prepping and saving the Spock config""" - -from spock.backend.base import BaseSaver -from spock.backend.dataclass._dataclasses import asdict - - -class DataClassSaver(BaseSaver): - """Base class for saving configs for the dataclass backend - - Contains methods to build a correct output payload and then writes to file based on the file - extension - - *Attributes*: - - _writers: maps file extension to the correct i/o handler - - """ - def __init__(self): - super().__init__() - - def __call__(self, *args, **kwargs): - return DataClassSaver() - - def _clean_up_values(self, payload, extra_info, file_extension): - out_dict = {} - for key, val in vars(payload).items(): - # Append comment tag to the base class and convert the spock class to a dict - if file_extension == '.json': - out_dict.update({key: asdict(val)}) - else: - out_dict.update({('# ' + key): asdict(val)}) - # Convert values - clean_dict = self._clean_output(out_dict, extra_info) - return clean_dict diff --git a/spock/backend/dataclass/utils.py b/spock/backend/dataclass/utils.py deleted file mode 100644 index cb97ead4..00000000 --- a/spock/backend/dataclass/utils.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -"""Dataclass utility functions for Spock""" - -from copy import copy -from typing import List -from typing import Tuple -from spock.backend.dataclass._dataclasses import field - - -def cast(x): - """Recasts lists as tuples - - *Args*: - - x: object - - *Returns*: - - x: object or object recast as Tuple - """ - if isinstance(x, list): - x = tuple(x) - return x - - -def _def_list(values: List): - """Creates a list of default values for List datatype that is mutable - - *Args*: - - values: default list - - Returns: - - list built from default factory - - """ - return field(default_factory=lambda: copy(values)) - - -def _def_tuple(values: Tuple): - """Creates a tuple of default values for Tuple datatype that is mutable - - *Args*: - - values: default tuple - - Returns: - - tuple built from default factory - - """ - return field(default_factory=lambda: copy(values)) diff --git a/spock/builder.py b/spock/builder.py index ca70398c..9a237dd3 100644 --- a/spock/builder.py +++ b/spock/builder.py @@ -39,7 +39,7 @@ def __init__(self, *args, configs=None, create_save_path=False, desc='', no_cmd_ self._builder_obj = backend.get('builder')( *args, configs=configs, create_save_path=create_save_path, desc=desc, no_cmd_line=no_cmd_line, **kwargs) self._payload_obj = backend.get('payload') - self._saver_obj = backend.get('saver') + self._saver_obj = backend.get('saver')() try: self._dict_args = self._get_payload() self._arg_namespace = self._builder_obj.generate(self._dict_args) @@ -160,5 +160,5 @@ def save(self, user_specified_path=None, extra_info=True, file_extension='.yaml' raise ValueError('Save did not receive a valid path from: (1) markup file(s) or (2) ' 'the keyword arg user_specified_path') # Call the saver class and save function - self._saver_obj().save(self._arg_namespace, save_path, self._create_save_path, extra_info, file_extension) + self._saver_obj.save(self._arg_namespace, save_path, self._create_save_path, extra_info, file_extension) return self diff --git a/spock/config.py b/spock/config.py index 5e17d81c..1c91ebf5 100644 --- a/spock/config.py +++ b/spock/config.py @@ -3,13 +3,9 @@ # Copyright 2019 FMR LLC # SPDX-License-Identifier: Apache-2.0 -"""Creates the spock config decorator that wraps dataclasses""" +"""Creates the spock config decorator that wraps attrs""" -from spock.backend.dataclass.config import spock_dataclass from spock.backend.attr.config import spock_attr -# Dataclasses for legacy support -- now wraps attr via an adapter -spock_config = spock_dataclass - # Simplified decorator for attrs spock = spock_attr diff --git a/spock/handlers.py b/spock/handlers.py index b67aca08..a767543c 100644 --- a/spock/handlers.py +++ b/spock/handlers.py @@ -9,7 +9,9 @@ from abc import abstractmethod import json import re +from spock import __version__ import toml +from warnings import warn import yaml @@ -34,12 +36,13 @@ def load(self, path): raise NotImplementedError @abstractmethod - def save(self, out_dict, path): + def save(self, out_dict, info_dict, path): """Write function for file type *Args*: out_dict: payload to write + info_dict: info payload to write path: path to write out *Returns*: @@ -47,6 +50,28 @@ def save(self, out_dict, path): """ raise NotImplementedError + @staticmethod + def write_extra_info(path, info_dict): + """Writes extra info to commented newlines + + *Args*: + + path: path to write out + info_dict: info payload to write + + *Returns*: + + """ + # Write the commented info as new lines + with open(path.name, 'w+') as fid: + # Write a spock header + fid.write(f'# Spock Version: {__version__}\n') + # Write info dict if not None + if info_dict is not None: + for k, v in info_dict.items(): + fid.write(f'{k}: {v}\n') + fid.write('\n') + class YAMLHandler(Handler): """YAML class for loading YAML config files @@ -86,20 +111,24 @@ def load(self, path): base_payload = yaml.safe_load(file_contents) return base_payload - def save(self, out_dict, path): + def save(self, out_dict, info_dict, path): """Write function for YAML type *Args*: out_dict: payload to write + info_dict: info payload to write path: path to write out *Returns*: """ + # First write the commented info + self.write_extra_info(path=path, info_dict=info_dict) # Remove aliases in YAML dump yaml.Dumper.ignore_aliases = lambda *args: True - yaml.dump(out_dict, path, default_flow_style=False) + with open(path.name, 'a') as yaml_fid: + yaml.safe_dump(out_dict, yaml_fid, default_flow_style=False) class TOMLHandler(Handler): @@ -123,18 +152,22 @@ def load(self, path): base_payload = toml.load(path) return base_payload - def save(self, out_dict, path): + def save(self, out_dict, info_dict, path): """Write function for TOML type *Args*: out_dict: payload to write + info_dict: info payload to write path: path to write out *Returns*: """ - toml.dump(out_dict, path) + # First write the commented info + self.write_extra_info(path=path, info_dict=info_dict) + with open(path.name, 'a') as toml_fid: + toml.dump(out_dict, toml_fid) class JSONHandler(Handler): @@ -159,16 +192,20 @@ def load(self, path): base_payload = json.load(json_fid) return base_payload - def save(self, out_dict, path): + def save(self, out_dict, info_dict, path): """Write function for JSON type *Args*: out_dict: payload to write + info_dict: info payload to write path: path to write out *Returns*: """ - with open(path.name, 'w') as json_fid: + if info_dict is not None: + warn('JSON does not support comments and thus cannot save extra info to file... removing extra info') + info_dict = None + with open(path.name, 'a') as json_fid: json.dump(out_dict, json_fid, indent=4, separators=(',', ': ')) diff --git a/spock/utils.py b/spock/utils.py index cf7425dc..681ae83f 100644 --- a/spock/utils.py +++ b/spock/utils.py @@ -8,6 +8,7 @@ import ast from enum import EnumMeta import os +import socket import subprocess import sys from time import localtime @@ -21,28 +22,6 @@ from typing import _GenericAlias -def convert_save_dict(clean_inner_dict, inner_val, inner_key): - """Convert tuples in save dictionary - - *Args*: - - clean_inner_dict: inner dictionary that is clean - inner_val: inner value - inner_key: inner key - - *Returns*: - - clean_inner_dict: updated with cleaned values - - """ - # Convert tuples to lists so they get written correctly - if isinstance(inner_val, tuple): - clean_inner_dict.update({inner_key: list(inner_val)}) - elif inner_val is not None: - clean_inner_dict.update({inner_key: inner_val}) - return clean_inner_dict - - def make_argument(arg_name, arg_type, parser): """Make argparser argument based on type @@ -80,18 +59,18 @@ def _handle_generic_type_args(val): return ast.literal_eval(val) -def add_info(out_dict): +def add_info(): """Adds extra information to the output dictionary *Args*: - out_dict: output dictionary *Returns*: out_dict: output dictionary """ - out_dict = add_date_info(out_dict) + out_dict = {} + out_dict = add_generic_info(out_dict) out_dict = add_repo_info(out_dict) return out_dict @@ -126,8 +105,8 @@ def add_repo_info(out_dict): """ try: # Assume we are working out of a repo - repo = git.Repo(os.getcwd()) - # Check if we are really in a detached head state as this will fail + repo = git.Repo(os.getcwd(), search_parent_directories=True) + # Check if we are really in a detached head state as later info will fail if we are if minor < 7: head_result = subprocess.run('git rev-parse --abbrev-ref --symbolic-full-name HEAD', stdout=subprocess.PIPE, shell=True, check=False) @@ -137,23 +116,23 @@ def add_repo_info(out_dict): if head_result.stdout.decode().rstrip('\n') == 'HEAD': out_dict = make_blank_git(out_dict) else: - out_dict.update({'# Git BRANCH': repo.active_branch.name}) - out_dict.update({'# Git COMMIT SHA': repo.head.object.hexsha}) - if len(repo.untracked_files) > 0 or len(repo.head.commit.diff(None)) > 0: + out_dict.update({'# Git Branch': repo.active_branch.name}) + out_dict.update({'# Git Commit': repo.active_branch.commit.hexsha}) + out_dict.update({'# Git Date': repo.active_branch.commit.committed_datetime}) + if len(repo.untracked_files) > 0 or len(repo.active_branch.commit.diff(None)) > 0: git_status = 'DIRTY' else: git_status = 'CLEAN' - out_dict.update({'# Git STATUS': git_status}) - out_dict.update({'# Git ORIGIN': repo.remotes.origin.url}) + out_dict.update({'# Git Status': git_status}) + out_dict.update({'# Git Origin': repo.active_branch.commit.repo.remotes.origin.url}) except git.InvalidGitRepositoryError: # But it's okay if we are not out_dict = make_blank_git(out_dict) - return out_dict -def add_date_info(out_dict): - """Adds date information to the output dictionary +def add_generic_info(out_dict): + """Adds date, fqdn information to the output dictionary *Args*: @@ -163,10 +142,62 @@ def add_date_info(out_dict): out_dict: output dictionary """ - out_dict.update({'# Run Date': strftime('%Y_%m_%d_%H_%M_%S', localtime())}) + out_dict.update({'# Machine FQDN': socket.getfqdn()}) + out_dict.update({'# Python Executable': sys.executable}) + out_dict.update({'# Python Version': f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}'}) + out_dict.update({'# Python Script': os.path.realpath(sys.argv[0])}) + out_dict.update({'# Run Date': strftime('%Y-%m-%d', localtime())}) + out_dict.update({'# Run Time': strftime('%H:%M:%S', localtime())}) + # Make a best effort to determine if run in a container + out_dict.update({'# Run w/ Docker': str(_maybe_docker())}) + # Make a best effort to determine if run in a container via k8s + out_dict.update({'# Run w/ Kubernetes': str(_maybe_k8s())}) + return out_dict +def _maybe_docker(cgroup_path="/proc/self/cgroup"): + """Make a best effort to determine if run in a docker container + + *Args*: + + cgroup_path: path to cgroup file + + Returns: + + boolean of best effort docker determination + + """ + # A few options seem to be at play here: + # 1. Check for /.dockerenv -- docker should create this is any container + bool_env = os.path.exists('/.dockerenv') + # 2. Check /proc/self/cgroup for "docker" + # https://stackoverflow.com/a/48710609 + bool_cgroup = os.path.isfile(cgroup_path) and any("docker" in line for line in open(cgroup_path)) + return bool_env or bool_cgroup + + +def _maybe_k8s(cgroup_path="/proc/self/cgroup"): + """Make a best effort to determine if run in a container via k8s + + *Args*: + + cgroup_path: path to cgroup file + + Returns: + + boolean of best effort k8s determination + + """ + # A few options seem to be at play here: + # 1. Check for KUBERNETES_SERVICE_HOST -- kublet should add this to every running pod + bool_env = os.environ.get("KUBERNETES_SERVICE_HOST") is not None + # 2. Similar to docker check /proc/self/cgroup for "kubepods" + # https://stackoverflow.com/a/48710609 + bool_cgroup = os.path.isfile(cgroup_path) and any("kubepods" in line for line in open(cgroup_path)) + return bool_env or bool_cgroup + + def deep_payload_update(source, updates): """Deeply updates a dictionary @@ -214,7 +245,7 @@ def check_payload_overwrite(payload, updates, configs, overwrite=''): current_payload = {} if payload.get(k) is None else payload.get(k) check_payload_overwrite(current_payload, v, configs, overwrite=overwrite) else: - if k in payload.keys(): + if k in payload: warn(f'Overriding an already set parameter {overwrite + k} from {configs}\n' f'Be aware that value precedence is set by the order of the config files (last to load)...', SyntaxWarning) diff --git a/tests/attr/attr_configs_test.py b/tests/attr/attr_configs_test.py index ac7eb9ff..79493165 100644 --- a/tests/attr/attr_configs_test.py +++ b/tests/attr/attr_configs_test.py @@ -76,13 +76,13 @@ class TypeConfig: # Required List -- Bool list_p_bool: List[bool] # Required Tuple -- Float - tuple_p_float: Tuple[float] + tuple_p_float: Tuple[float, float] # Required Tuple -- Int - tuple_p_int: Tuple[int] + tuple_p_int: Tuple[int, int] # Required Tuple -- Str - tuple_p_str: Tuple[str] + tuple_p_str: Tuple[str, str] # Required Tuple -- Bool - tuple_p_bool: Tuple[bool] + tuple_p_bool: Tuple[bool, bool] # Required choice -- Str choice_p_str: StrChoice # Required choice -- Int @@ -124,13 +124,13 @@ class TypeOptConfig: # Optional List default not set list_p_opt_no_def_bool: Optional[List[bool]] # Optional Tuple default not set - tuple_p_opt_no_def_float: Optional[Tuple[float]] + tuple_p_opt_no_def_float: Optional[Tuple[float, float]] # Optional Tuple default not set - tuple_p_opt_no_def_int: Optional[Tuple[int]] + tuple_p_opt_no_def_int: Optional[Tuple[int, int]] # Optional Tuple default not set - tuple_p_opt_no_def_str: Optional[Tuple[str]] + tuple_p_opt_no_def_str: Optional[Tuple[str, str]] # Optional Tuple default not set - tuple_p_opt_no_def_bool: Optional[Tuple[bool]] + tuple_p_opt_no_def_bool: Optional[Tuple[bool, bool]] # Required choice -- Str choice_p_opt_no_def_str: Optional[StrChoice] # Required list of choice -- Str diff --git a/tests/attr/test_all_attr.py b/tests/attr/test_all_attr.py index ae786aa0..cde10a6e 100644 --- a/tests/attr/test_all_attr.py +++ b/tests/attr/test_all_attr.py @@ -324,6 +324,16 @@ def test_choice_raise(self, monkeypatch): ConfigArgBuilder(ChoiceFail, desc='Test Builder') +class TestTupleRaises: + """Check that Tuple lengths are being enforced correctly""" + def test_tuple_raise(self, monkeypatch): + with monkeypatch.context() as m: + m.setattr(sys, 'argv', ['', '--config', + './tests/conf/yaml/tuple.yaml']) + with pytest.raises(ValueError): + ConfigArgBuilder(TypeConfig, desc='Test Builder') + + class TestOverrideRaise: """Checks that override of a specific class variable is failing gracefully""" def test_override_raise(self, monkeypatch): diff --git a/tests/conf/legacy/json/test.json b/tests/conf/legacy/json/test.json deleted file mode 100644 index 62b892f3..00000000 --- a/tests/conf/legacy/json/test.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "bool_p_set": true, - "int_p": 10, - "float_p": 1e1, - "string_p": "Spock", - "list_p_float": [10.0, 20.0], - "list_p_int": [10, 20], - "list_p_str": ["Spock", "Package"], - "list_p_bool": [true, false], - "tuple_p_float": [10.0, 20.0], - "tuple_p_int": [10, 20], - "tuple_p_str": ["Spock", "Package"], - "tuple_p_bool": [true, false], - "choice_p_str": "option_1", - "choice_p_int": 10, - "choice_p_float": 10.0, - "TypeConfig": { - "float_p": 12.0 - } -} \ No newline at end of file diff --git a/tests/conf/legacy/json/test_include.json b/tests/conf/legacy/json/test_include.json deleted file mode 100644 index c1d77f06..00000000 --- a/tests/conf/legacy/json/test_include.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "config": ["test.json"], - "int_p": 9 -} \ No newline at end of file diff --git a/tests/conf/legacy/toml/test.toml b/tests/conf/legacy/toml/test.toml deleted file mode 100644 index aaddb45e..00000000 --- a/tests/conf/legacy/toml/test.toml +++ /dev/null @@ -1,34 +0,0 @@ -# conf file for all TOML tests -### Required or Boolean Base Types ### -bool_p_set = true -# Required Int -int_p = 10 -# Required Float -float_p = 1e1 -# Required String -string_p = "Spock" -# Required List -- Float -list_p_float = [10.0, 20.0] -# Required List -- Int -list_p_int = [10, 20] -# Required List -- Str -list_p_str = ["Spock", "Package"] -# Required List -- Bool -list_p_bool = [true, false] -# Required Tuple -- Float -tuple_p_float = [10.0, 20.0] -# Required Tuple -- Int -tuple_p_int = [10, 20] -# Required Tuple -- Str -tuple_p_str = ["Spock", "Package"] -# Required Tuple -- Bool -tuple_p_bool = [true, false] -# Required Choice -- Str type -choice_p_str = 'option_1' -# Required Choice -- Int -choice_p_int = 10 -# Required Choice -- Str -choice_p_float = 10.0 -# Overrride general definition -[TypeConfig] - float_p = 12.0 \ No newline at end of file diff --git a/tests/conf/legacy/toml/test_include.toml b/tests/conf/legacy/toml/test_include.toml deleted file mode 100644 index 8e2af2c2..00000000 --- a/tests/conf/legacy/toml/test_include.toml +++ /dev/null @@ -1,5 +0,0 @@ -# additional conf file to test inheritance -# include another file -config = ["test.toml"] -# override a paramter -int_p = 9 \ No newline at end of file diff --git a/tests/conf/legacy/yaml/choice.yaml b/tests/conf/legacy/yaml/choice.yaml deleted file mode 100644 index eb8bbc2e..00000000 --- a/tests/conf/legacy/yaml/choice.yaml +++ /dev/null @@ -1,2 +0,0 @@ -# Required Choice -- Str -choice_p_str: option_3 \ No newline at end of file diff --git a/tests/conf/legacy/yaml/inherited.yaml b/tests/conf/legacy/yaml/inherited.yaml deleted file mode 100644 index 91d9bd9f..00000000 --- a/tests/conf/legacy/yaml/inherited.yaml +++ /dev/null @@ -1,32 +0,0 @@ -# conf file for all YAML tests -### Required or Boolean Base Types ### -# Boolean - Set ---bool_p_set -# Required Int -int_p: 10 -# Required Float -float_p: 1e1 -# Required String -string_p: Spock -# Required List -- Float -list_p_float: [10.0, 20.0] -# Required List -- Int -list_p_int: [10, 20] -# Required List -- Str -list_p_str: [Spock, Package] -# Required List -- Bool -list_p_bool: [True, False] -# Required Tuple -- Float -tuple_p_float: [10.0, 20.0] -# Required Tuple -- Int -tuple_p_int: [10, 20] -# Required Tuple -- Str -tuple_p_str: [Spock, Package] -# Required Tuple -- Bool -tuple_p_bool: [True, False] -# Required Choice -- Str -choice_p_str: option_1 -# Required Choice -- Int -choice_p_int: 10 -# Required Choice -- Str -choice_p_float: 10.0 \ No newline at end of file diff --git a/tests/conf/legacy/yaml/test.yaml b/tests/conf/legacy/yaml/test.yaml deleted file mode 100644 index dbbb239d..00000000 --- a/tests/conf/legacy/yaml/test.yaml +++ /dev/null @@ -1,35 +0,0 @@ -# conf file for all YAML tests -### Required or Boolean Base Types ### -# Boolean - Set -bool_p_set: true -# Required Int -int_p: 10 -# Required Float -float_p: 1e1 -# Required String -string_p: Spock -# Required List -- Float -list_p_float: [10.0, 20.0] -# Required List -- Int -list_p_int: [10, 20] -# Required List -- Str -list_p_str: [Spock, Package] -# Required List -- Bool -list_p_bool: [True, False] -# Required Tuple -- Float -tuple_p_float: [10.0, 20.0] -# Required Tuple -- Int -tuple_p_int: [10, 20] -# Required Tuple -- Str -tuple_p_str: [Spock, Package] -# Required Tuple -- Bool -tuple_p_bool: [True, False] -# Required Choice -- Str -choice_p_str: option_1 -# Required Choice -- Int -choice_p_int: 10 -# Required Choice -- Str -choice_p_float: 10.0 -# Override general definition -TypeConfig: - float_p: 12.0 diff --git a/tests/conf/legacy/yaml/test_include.yaml b/tests/conf/legacy/yaml/test_include.yaml deleted file mode 100644 index a633eed1..00000000 --- a/tests/conf/legacy/yaml/test_include.yaml +++ /dev/null @@ -1,5 +0,0 @@ -# additional conf file to test inheritance -# include another file -config: [test.yaml] -# override a paramter -int_p: 9 diff --git a/tests/conf/legacy/yaml/test_incorrect.yaml b/tests/conf/legacy/yaml/test_incorrect.yaml deleted file mode 100644 index 85d72115..00000000 --- a/tests/conf/legacy/yaml/test_incorrect.yaml +++ /dev/null @@ -1,36 +0,0 @@ -# conf file for all YAML tests -### Required or Boolean Base Types ### -# Boolean - Set ---bool_p_set -failure: 10.0 -# Required Int -int_p: 10 -# Required Float -float_p: 1e1 -# Required String -string_p: Spock -# Required List -- Float -list_p_float: [10.0, 20.0] -# Required List -- Int -list_p_int: [10, 20] -# Required List -- Str -list_p_str: [Spock, Package] -# Required List -- Bool -list_p_bool: [True, False] -# Required Tuple -- Float -tuple_p_float: [10.0, 20.0] -# Required Tuple -- Int -tuple_p_int: [10, 20] -# Required Tuple -- Str -tuple_p_str: [Spock, Package] -# Required Tuple -- Bool -tuple_p_bool: [True, False] -# Required Choice -- Str -choice_p_str: option_1 -# Required Choice -- Int -choice_p_int: 10 -# Required Choice -- Str -choice_p_float: 10.0 -# Override general definition -TypeConfig: - float_p: 12.0 diff --git a/tests/conf/yaml/tuple.yaml b/tests/conf/yaml/tuple.yaml new file mode 100644 index 00000000..3874ccd5 --- /dev/null +++ b/tests/conf/yaml/tuple.yaml @@ -0,0 +1,5 @@ +# include another file +config: [test.yaml] +# Override with incorrect tuple length +# Required Tuple -- Int +tuple_p_int: [10] \ No newline at end of file diff --git a/tests/dataclass/adapter_configs_test.py b/tests/dataclass/adapter_configs_test.py deleted file mode 100644 index f82dde4f..00000000 --- a/tests/dataclass/adapter_configs_test.py +++ /dev/null @@ -1,150 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -from spock.args import * -from spock.config import spock_config - - -@spock_config -class ChoiceFail: - """This creates a test config to fail on an out of set choice""" - # Required choice -- Str - choice_p_str: ChoiceArg(choice_set=['option_1', 'option_2']) - - -@spock_config -class TypeConfig: - """This creates a test Spock config of all supported variable types as required parameters""" - # Special Type -- Saves Generated Configs to this path - save_path: SavePathOptArg - # Boolean - Set - bool_p_set: BoolArg - # Boolean - Un Set - bool_p: BoolArg - # Required Int - int_p: IntArg - # Required Float - float_p: FloatArg - # Required String - string_p: StrArg - # Required List -- Float - list_p_float: ListArg[float] - # Required List -- Int - list_p_int: ListArg[int] - # Required List -- Str - list_p_str: ListArg[str] - # Required List -- Bool - list_p_bool: ListArg[bool] - # Required Tuple -- Float - tuple_p_float: TupleArg[float] - # Required Tuple -- Int - tuple_p_int: TupleArg[int] - # Required Tuple -- Str - tuple_p_str: TupleArg[str] - # Required Tuple -- Bool - tuple_p_bool: TupleArg[bool] - # Required choice -- Str - choice_p_str: ChoiceArg(choice_set=['option_1', 'option_2']) - # Required choice -- Int - choice_p_int: ChoiceArg(choice_set=[10, 20]) - # Required choice -- Float - choice_p_float: ChoiceArg(choice_set=[10.0, 20.0]) - - -@spock_config -class TypeOptConfig: - """This creates a test Spock config of all supported variable types as optional parameters""" - # DEFAULTS NOT SET # - # Optional Int default not set - int_p_opt_no_def: IntOptArg - # Optional Float default not set - float_p_opt_no_def: FloatOptArg - # Optional String default not set - string_p_opt_no_def: StrOptArg - # Optional List default not set - list_p_opt_no_def_float: ListOptArg[float] - # Optional List default not set - list_p_opt_no_def_int: ListOptArg[int] - # Optional List default not set - list_p_opt_no_def_str: ListOptArg[str] - # Optional List default not set - list_p_opt_no_def_bool: ListOptArg[bool] - # Optional Tuple default not set - tuple_p_opt_no_def_float: TupleOptArg[float] - # Optional Tuple default not set - tuple_p_opt_no_def_int: TupleOptArg[int] - # Optional Tuple default not set - tuple_p_opt_no_def_str: TupleOptArg[str] - # Optional Tuple default not set - tuple_p_opt_no_def_bool: TupleOptArg[bool] - # Additional dummy argument - int_p: IntOptArg - - -@spock_config -class TypeDefaultConfig: - """This creates a test Spock config of all supported variable types as required parameters and falls back - to defaults - """ - # Boolean - Set - bool_p_set_def: BoolArg = True - # Required Int - int_p_def: IntArg = 10 - # Required Float - float_p_def: FloatArg = 10.0 - # Required String - string_p_def: StrArg = 'Spock' - # Required List -- Float - list_p_float_def: ListArg[float] = ListArg.defaults([10.0, 20.0]) - # Required List -- Int - list_p_int_def: ListArg[int] = ListArg.defaults([10, 20]) - # Required List -- Str - list_p_str_def: ListArg[str] = ListArg.defaults(['Spock', 'Package']) - # Required List -- Bool - list_p_bool_def: ListArg[bool] = ListArg.defaults([True, False]) - # Required Tuple -- Float - tuple_p_float_def: TupleArg[float] = TupleArg.defaults((10.0, 20.0)) - # Required Tuple -- Int - tuple_p_int_def: TupleArg[int] = TupleArg.defaults((10, 20)) - # Required Tuple -- Str - tuple_p_str_def: TupleArg[str] = TupleArg.defaults(('Spock', 'Package')) - # Required Tuple -- Bool - tuple_p_bool_def: TupleArg[bool] = TupleArg.defaults((True, False)) - # Required choice - choice_p_str_def: ChoiceArg(choice_set=['option_1', 'option_2'], default='option_2') - - -@spock_config -class TypeDefaultOptConfig: - """This creates a test Spock config of all supported variable types as optional parameters""" - # DEFAULTS SET # - # Optional Int default set - int_p_opt_def: IntOptArg = 10 - # Optional Int default set - float_p_opt_def: FloatOptArg = 10.0 - # Optional String default set - string_p_opt_def: StrOptArg = 'Spock' - # Optional List default set - list_p_opt_def_float: ListOptArg[float] = ListOptArg.defaults([10.0, 20.0]) - # Optional List default set - list_p_opt_def_int: ListOptArg[int] = ListOptArg.defaults([10, 20]) - # Optional List default set - list_p_opt_def_bool: ListOptArg[bool] = ListOptArg.defaults([True, False]) - # Optional List default set - list_p_opt_def_str: ListOptArg[str] = ListOptArg.defaults(['Spock', 'Package']) - # Optional Tuple default set - tuple_p_opt_def_float: TupleOptArg[float] = TupleOptArg.defaults((10.0, 20.0)) - # Optional Tuple default set - tuple_p_opt_def_int: TupleOptArg[int] = TupleOptArg.defaults((10, 20)) - # Optional Tuple default set - tuple_p_opt_def_str: TupleOptArg[str] = TupleOptArg.defaults(('Spock', 'Package')) - # Optional Tuple default set - tuple_p_opt_def_bool: TupleOptArg[bool] = TupleOptArg.defaults((True, False)) - - -@spock_config -class TypeInherited(TypeConfig, TypeDefaultOptConfig): - """This tests inheritance with mixed default and non-default arguments""" - ... diff --git a/tests/dataclass/test_all_adapter.py b/tests/dataclass/test_all_adapter.py deleted file mode 100644 index f8cacaec..00000000 --- a/tests/dataclass/test_all_adapter.py +++ /dev/null @@ -1,378 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2019 FMR LLC -# SPDX-License-Identifier: Apache-2.0 - -from attr.exceptions import FrozenInstanceError -import glob -import pytest -from spock.builder import ConfigArgBuilder -from tests.dataclass.adapter_configs_test import * -import sys - - -class AllTypes: - # Required # - def test_all_set(self, arg_builder): - # Required # - assert arg_builder.TypeConfig.bool_p_set is True - assert arg_builder.TypeConfig.bool_p is False - assert arg_builder.TypeConfig.int_p == 10 - assert arg_builder.TypeConfig.float_p == 12.0 - assert arg_builder.TypeConfig.string_p == 'Spock' - assert arg_builder.TypeConfig.list_p_float == [10.0, 20.0] - assert arg_builder.TypeConfig.list_p_int == [10, 20] - assert arg_builder.TypeConfig.list_p_str == ['Spock', 'Package'] - assert arg_builder.TypeConfig.list_p_bool == [True, False] - assert arg_builder.TypeConfig.tuple_p_float == (10.0, 20.0) - assert arg_builder.TypeConfig.tuple_p_int == (10, 20) - assert arg_builder.TypeConfig.tuple_p_str == ('Spock', 'Package') - assert arg_builder.TypeConfig.tuple_p_bool == (True, False) - assert arg_builder.TypeConfig.choice_p_str == 'option_1' - assert arg_builder.TypeConfig.choice_p_int == 10 - assert arg_builder.TypeConfig.choice_p_float == 10.0 - # Optional # - assert arg_builder.TypeOptConfig.int_p_opt_no_def is None - assert arg_builder.TypeOptConfig.float_p_opt_no_def is None - assert arg_builder.TypeOptConfig.string_p_opt_no_def is None - assert arg_builder.TypeOptConfig.list_p_opt_no_def_float is None - assert arg_builder.TypeOptConfig.list_p_opt_no_def_int is None - assert arg_builder.TypeOptConfig.list_p_opt_no_def_str is None - assert arg_builder.TypeOptConfig.list_p_opt_no_def_bool is None - assert arg_builder.TypeOptConfig.tuple_p_opt_no_def_float is None - assert arg_builder.TypeOptConfig.tuple_p_opt_no_def_int is None - assert arg_builder.TypeOptConfig.tuple_p_opt_no_def_str is None - assert arg_builder.TypeOptConfig.tuple_p_opt_no_def_bool is None - - -class AllDefaults: - def test_all_defaults(self, arg_builder): - # Defaults # - assert arg_builder.TypeDefaultConfig.bool_p_set_def is True - assert arg_builder.TypeDefaultConfig.int_p_def == 10 - assert arg_builder.TypeDefaultConfig.float_p_def == 10.0 - assert arg_builder.TypeDefaultConfig.string_p_def == 'Spock' - assert arg_builder.TypeDefaultConfig.list_p_float_def == [10.0, 20.0] - assert arg_builder.TypeDefaultConfig.list_p_int_def == [10, 20] - assert arg_builder.TypeDefaultConfig.list_p_str_def == ['Spock', 'Package'] - assert arg_builder.TypeDefaultConfig.list_p_bool_def == [True, False] - assert arg_builder.TypeDefaultConfig.tuple_p_float_def == (10.0, 20.0) - assert arg_builder.TypeDefaultConfig.tuple_p_int_def == (10, 20) - assert arg_builder.TypeDefaultConfig.tuple_p_str_def == ('Spock', 'Package') - assert arg_builder.TypeDefaultConfig.tuple_p_bool_def == (True, False) - assert arg_builder.TypeDefaultConfig.choice_p_str_def == 'option_2' - # Optional w/ Defaults # - assert arg_builder.TypeDefaultOptConfig.int_p_opt_def == 10 - assert arg_builder.TypeDefaultOptConfig.float_p_opt_def == 10.0 - assert arg_builder.TypeDefaultOptConfig.string_p_opt_def == 'Spock' - assert arg_builder.TypeDefaultOptConfig.list_p_opt_def_float == [10.0, 20.0] - assert arg_builder.TypeDefaultOptConfig.list_p_opt_def_int == [10, 20] - assert arg_builder.TypeDefaultOptConfig.list_p_opt_def_str == ['Spock', 'Package'] - assert arg_builder.TypeDefaultOptConfig.list_p_opt_def_bool == [True, False] - assert arg_builder.TypeDefaultOptConfig.tuple_p_opt_def_float == (10.0, 20.0) - assert arg_builder.TypeDefaultOptConfig.tuple_p_opt_def_int == (10, 20) - assert arg_builder.TypeDefaultOptConfig.tuple_p_opt_def_str == ('Spock', 'Package') - assert arg_builder.TypeDefaultOptConfig.tuple_p_opt_def_bool == (True, False) - - -class AllInherited: - def test_all_inherited(self, arg_builder): - # Required # - assert arg_builder.TypeInherited.bool_p_set is True - assert arg_builder.TypeInherited.bool_p is False - assert arg_builder.TypeInherited.int_p == 10 - assert arg_builder.TypeInherited.float_p == 10.0 - assert arg_builder.TypeInherited.string_p == 'Spock' - assert arg_builder.TypeInherited.list_p_float == [10.0, 20.0] - assert arg_builder.TypeInherited.list_p_int == [10, 20] - assert arg_builder.TypeInherited.list_p_str == ['Spock', 'Package'] - assert arg_builder.TypeInherited.list_p_bool == [True, False] - assert arg_builder.TypeInherited.tuple_p_float == (10.0, 20.0) - assert arg_builder.TypeInherited.tuple_p_int == (10, 20) - assert arg_builder.TypeInherited.tuple_p_str == ('Spock', 'Package') - assert arg_builder.TypeInherited.tuple_p_bool == (True, False) - assert arg_builder.TypeInherited.choice_p_str == 'option_1' - assert arg_builder.TypeInherited.choice_p_int == 10 - assert arg_builder.TypeInherited.choice_p_float == 10.0 - # Optional w/ Defaults # - assert arg_builder.TypeInherited.int_p_opt_def == 10 - assert arg_builder.TypeInherited.float_p_opt_def == 10.0 - assert arg_builder.TypeInherited.string_p_opt_def == 'Spock' - assert arg_builder.TypeInherited.list_p_opt_def_float == [10.0, 20.0] - assert arg_builder.TypeInherited.list_p_opt_def_int == [10, 20] - assert arg_builder.TypeInherited.list_p_opt_def_str == ['Spock', 'Package'] - assert arg_builder.TypeInherited.list_p_opt_def_bool == [True, False] - assert arg_builder.TypeInherited.tuple_p_opt_def_float == (10.0, 20.0) - assert arg_builder.TypeInherited.tuple_p_opt_def_int == (10, 20) - assert arg_builder.TypeInherited.tuple_p_opt_def_str == ('Spock', 'Package') - assert arg_builder.TypeInherited.tuple_p_opt_def_bool == (True, False) - - -# TESTS -# BASED ON YAML FILE -class TestAllTypesYAML(AllTypes): - """Check all required types work as expected """ - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test.yaml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - return config.generate() - - -class TestAllDefaultsYAML(AllDefaults): - """Check all required types falling back to default work as expected """ - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test.yaml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, TypeDefaultConfig, TypeDefaultOptConfig, - desc='Test Builder') - return config.generate() - - -class TestFrozen: - """Testing the frozen state of the spock config object""" - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test.yaml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - return config.generate() - - # Check frozen state works - def test_frozen_state(self, arg_builder): - with pytest.raises(FrozenInstanceError): - arg_builder.TypeConfig.float_p = 1.0 - with pytest.raises(FrozenInstanceError): - arg_builder.TypeOptConfig.int_p_opt_def = 1 - with pytest.raises(FrozenInstanceError): - arg_builder.TypeConfig.list_p_float = [1.0, 2.0] - with pytest.raises(FrozenInstanceError): - arg_builder.TypeOptConfig.tuple_p_opt_no_def_float = (1.0, 2.0) - - -class TestConfigKwarg(AllTypes): - """Testing to see that the kwarg overload path works""" - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder', - configs=['./tests/conf/legacy/yaml/test.yaml']) - return config.generate() - - -class TestNoCmdLineKwarg(AllTypes): - """Testing to see that the kwarg no cmd line works""" - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, no_cmd_line=True, - configs=['./tests/conf/legacy/yaml/test.yaml']) - return config.generate() - - -class TestNoCmdLineRaise: - """Check raise when no cmd line and no configs works as expected """ - def test_choice_raise(self, monkeypatch): - with monkeypatch.context() as m: - with pytest.raises(ValueError): - ConfigArgBuilder(TypeConfig, TypeOptConfig, no_cmd_line=True) - - -class TestComposition: - """Check all composed files work as expected """ - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test_include.yaml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - return config.generate() - - def test_req_int(self, arg_builder): - assert arg_builder.TypeConfig.int_p == 9 - - -class TestInheritance(AllInherited): - """Check that inheritance between classes works correctly""" - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/inherited.yaml']) - config = ConfigArgBuilder(TypeInherited, desc='Test Builder') - return config.generate() - - -class TestChoiceRaises: - """Check all inherited types work as expected """ - def test_choice_raise(self, monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/choice.yaml']) - with pytest.raises(ValueError): - ConfigArgBuilder(ChoiceFail, desc='Test Builder') - - -class TestOverrideRaise: - """Checks that override of a specific class variable is failing gracefully""" - def test_override_raise(self, monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test.yaml']) - with pytest.raises(ValueError): - ConfigArgBuilder(TypeInherited, desc='Test Builder') - - -class TestConfigArgType: - """Test functions related to the argument builder""" - def test_type_arg_builder(self, monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/test.yaml']) - with pytest.raises(TypeError): - ConfigArgBuilder(['Names'], desc='Test Builder') - - -class TestUnknownArg: - def test_type_unknown(self, monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test_incorrect.yaml']) - with pytest.raises(ValueError): - ConfigArgBuilder(TypeConfig, desc='Test Builder') - - -class TestDefaultWriter: - def test_default_file_writer(self, monkeypatch, tmp_path): - """Test the default writer works correctly""" - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test.yaml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - # Test the chained version - config.save(user_specified_path=tmp_path).generate() - assert len(list(tmp_path.iterdir())) == 1 - - -class TestYAMLWriter: - def test_yaml_file_writer(self, monkeypatch, tmp_path): - """Test the YAML writer works correctly""" - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test.yaml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - # Test the chained version - config.save(user_specified_path=tmp_path, file_extension='.yaml').generate() - check_path = str(tmp_path) + '/*.yaml' - fname = glob.glob(check_path)[0] - with open(fname, 'r') as fin: - print(fin.read()) - assert len(list(tmp_path.iterdir())) == 1 - - -class TestWritePathRaise: - def test_yaml_file_writer(self, monkeypatch, tmp_path): - """Test the YAML writer works correctly""" - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/yaml/test.yaml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - # Test the chained version - with pytest.raises(FileNotFoundError): - config.save(user_specified_path=str(tmp_path)+'/foo.bar/fizz.buzz/', file_extension='.yaml').generate() - - -# TOML TESTS -class TestAllTypesTOML(AllTypes): - """Check all required types work as expected """ - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/toml/test.toml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - return config.generate() - - -class TestAllDefaultsTOML(AllDefaults): - """Check all required types falling back to default work as expected """ - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/toml/test.toml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, TypeDefaultConfig, TypeDefaultOptConfig, - desc='Test Builder') - return config.generate() - - -class TestTOMLWriter: - def test_toml_file_writer(self, monkeypatch, tmp_path): - """Check the TOML writer works correctly""" - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/toml/test.toml']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - # Test the chained version - config.save(user_specified_path=tmp_path, file_extension='.toml').generate() - check_path = str(tmp_path) + '/*.toml' - fname = glob.glob(check_path)[0] - with open(fname, 'r') as fin: - print(fin.read()) - assert len(list(tmp_path.iterdir())) == 1 - - -# JSON TESTS -class TestAllTypesJSON(AllTypes): - """Check all required types work as expected """ - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/json/test.json']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - return config.generate() - - -class TestAllDefaultsJSON(AllDefaults): - """Check all required types falling back to default work as expected """ - @staticmethod - @pytest.fixture - def arg_builder(monkeypatch): - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/json/test.json']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, TypeDefaultConfig, TypeDefaultOptConfig, - desc='Test Builder') - return config.generate() - - -class TestJSONWriter: - def test_json_file_writer(self, monkeypatch, tmp_path): - """Check JSON writer works correctly""" - with monkeypatch.context() as m: - m.setattr(sys, 'argv', ['', '--config', - './tests/conf/legacy/json/test.json']) - config = ConfigArgBuilder(TypeConfig, TypeOptConfig, desc='Test Builder') - # Test the chained version - config.save(user_specified_path=tmp_path, file_extension='.json').generate() - check_path = str(tmp_path) + '/*.json' - fname = glob.glob(check_path)[0] - with open(fname, 'r') as fin: - print(fin.read()) - assert len(list(tmp_path.iterdir())) == 1 diff --git a/tests/debug/debug.py b/tests/debug/debug.py index 5c0eadb6..cbf605b4 100644 --- a/tests/debug/debug.py +++ b/tests/debug/debug.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -from spock.backend.dataclass.args import IntArg, ListArg, IntOptArg, ChoiceArg, SavePathOptArg from spock.config import spock -from spock.config import spock_config from typing import List from typing import Optional from typing import Tuple @@ -39,6 +37,11 @@ class ClassStuff(Enum): stuff = Stuff +@spock +class RepeatStuff: + hi: int + bye: float + @spock class Test: @@ -46,13 +49,13 @@ class Test: # # fix_me: Tuple[Tuple[int]] # new: int = 3 # fail: bool - # fail: List - # test: List[int] = [1, 2] + fail: Tuple[Tuple[int, int], Tuple[int, int]] + test: List[int] = [1, 2] # fail: List[List[int]] = [[1, 2], [1, 2]] # borken: Stuff = Stuff - # borken: List[Stuff] = Stuff + borken: List[RepeatStuff] # more_borken: OtherStuff - most_broken: ClassStuff = Stuff + most_broken: ClassStuff # borken: int # borken: Optional[List[List[Choice]]] = [['pear'], ['banana']] # save_path: SavePath = '/tmp' @@ -74,7 +77,10 @@ class Test: def main(): - attrs_class = ConfigArgBuilder(Test, OtherStuff, Stuff).generate() + attrs_class = ConfigArgBuilder(Test, OtherStuff, Stuff, RepeatStuff).save( + '/tmp', + file_extension='.json' + ).generate() # with open('/tmp/debug.pickle', 'wb') as fid: # pickle.dump(attrs_class, file=fid) diff --git a/tests/debug/debug.yaml b/tests/debug/debug.yaml index adce275b..761e366e 100644 --- a/tests/debug/debug.yaml +++ b/tests/debug/debug.yaml @@ -24,8 +24,19 @@ OtherStuff: ### other: 12 ###Test: ### new: 12 -###fail: false -###ccccombo_breaker: 10 + +RepeatStuff: + - hi: 10 + bye: 30.0 + - hi: 20 + bye: 45.0 + +Test: + fail: [[1, 2], [3, 4]] + most_broken: Stuff + borken: RepeatStuff + +##ccccombo_breaker: 10 #new: 10 ##other: 1 ##Test2: diff --git a/versioneer.py b/versioneer.py index 64fea1c8..1040c218 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,5 +1,5 @@ -# Version: 0.18 +# Version: 0.19 """The Versioneer - like a rocketeer, but for versions. @@ -7,16 +7,12 @@ ============== * like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer +* https://github.com/python-versioneer/python-versioneer * Brian Warner * License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) +* Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update @@ -27,9 +23,10 @@ ## Quick Install -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) +* `pip install versioneer` to somewhere in your $PATH +* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) * run `versioneer install` in your source tree, commit the results +* Verify version information with `python setup.py version` ## Version Identifiers @@ -61,7 +58,7 @@ for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. +uncommitted changes). The version identifier is used for multiple purposes: @@ -166,7 +163,7 @@ Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). +[issues page](https://github.com/python-versioneer/python-versioneer/issues). ### Subprojects @@ -180,7 +177,7 @@ `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other langauges) in subdirectories. + provide bindings to Python (and perhaps other languages) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs @@ -194,9 +191,9 @@ Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve @@ -224,22 +221,10 @@ cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - ## Updating Versioneer @@ -265,6 +250,12 @@ direction and include code from all supported VCS systems, reducing the number of intermediate scripts. +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer ## License @@ -274,13 +265,15 @@ Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer + """ -from __future__ import print_function -try: - import configparser -except ImportError: - import ConfigParser as configparser +import configparser import errno import json import os @@ -339,9 +332,9 @@ def get_config_from_root(root): # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() + parser = configparser.ConfigParser() with open(setup_cfg, "r") as f: - parser.readfp(f) + parser.read_file(f) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): @@ -371,7 +364,7 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: @@ -407,9 +400,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() + stdout = p.communicate()[0].strip().decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) @@ -418,7 +409,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, return stdout, p.returncode -LONG_VERSION_PY['git'] = ''' +LONG_VERSION_PY['git'] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -426,7 +417,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) +# versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) """Git implementation of _version.py.""" @@ -477,7 +468,7 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: @@ -513,9 +504,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() + stdout = p.communicate()[0].strip().decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) @@ -585,6 +574,10 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -720,6 +713,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -758,18 +754,18 @@ def render_pep440(pieces): def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. + """TAG[.post0.devDISTANCE] -- No -dirty. Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] + rendered += ".post0.dev%%d" %% pieces["distance"] else: # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] + rendered = "0.post0.dev%%d" %% pieces["distance"] return rendered @@ -805,7 +801,7 @@ def render_pep440_old(pieces): The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -977,6 +973,10 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -1112,6 +1112,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -1181,7 +1184,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.18) from +# This file was generated by 'versioneer.py' (0.19) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. @@ -1259,18 +1262,18 @@ def render_pep440(pieces): def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. + """TAG[.post0.devDISTANCE] -- No -dirty. Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + rendered += ".post0.dev%d" % pieces["distance"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered @@ -1306,7 +1309,7 @@ def render_pep440_old(pieces): The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -1480,8 +1483,12 @@ def get_version(): return get_versions()["version"] -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" +def get_cmdclass(cmdclass=None): + """Get the custom setuptools/distutils subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and @@ -1495,9 +1502,9 @@ def get_cmdclass(): # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - cmds = {} + cmds = {} if cmdclass is None else cmdclass.copy() # we add "version" to both distutils and setuptools from distutils.core import Command @@ -1539,7 +1546,9 @@ def run(self): # setup.py egg_info -> ? # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: + if 'build_py' in cmds: + _build_py = cmds['build_py'] + elif "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py @@ -1559,6 +1568,31 @@ def run(self): write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py + if "setuptools" in sys.modules: + from setuptools.command.build_ext import build_ext as _build_ext + else: + from distutils.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_ext"] = cmd_build_ext + if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # nczeczulin reports that py2exe won't like the pep440-style string @@ -1592,10 +1626,7 @@ def run(self): del cmds["build_py"] if 'py2exe' in sys.modules: # py2exe enabled? - try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 - except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 + from py2exe.distutils_buildexe import py2exe as _py2exe class cmd_py2exe(_py2exe): def run(self): @@ -1620,7 +1651,9 @@ def run(self): cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: + if 'sdist' in cmds: + _sdist = cmds['sdist'] + elif "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist @@ -1695,7 +1728,7 @@ def make_release_tree(self, base_dir, files): def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" + """Do main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root)