From 3718ee6faa45437bb642905fb7f8c624a6ad4bb3 Mon Sep 17 00:00:00 2001 From: mario4tier Date: Fri, 20 Dec 2024 22:14:18 -0500 Subject: [PATCH] Fix --- .github/workflows/publish-step-1.yml | 15 ++++- CHANGELOG.md | 44 ++++++++++++++ scripts/pre-release-checks.py | 91 ++++++++++++++++++++++++++++ scripts/utilities/common.py | 26 ++++++++ scripts/utilities/versions.py | 57 ++++++++++++++--- 5 files changed, 222 insertions(+), 11 deletions(-) create mode 100644 CHANGELOG.md create mode 100755 scripts/pre-release-checks.py diff --git a/.github/workflows/publish-step-1.yml b/.github/workflows/publish-step-1.yml index 591ec48e..3de570eb 100644 --- a/.github/workflows/publish-step-1.yml +++ b/.github/workflows/publish-step-1.yml @@ -35,6 +35,11 @@ jobs: git config --global user.name "github-actions[bot]" git config --global user.email "github-actions[bot]@users.noreply.github.com" + - name: Pre-release checks + shell: bash + run: | + $PYTHON $GITHUB_WORKSPACE/scripts/pre-release-checks.py + - name: Tagging shell: bash env: @@ -57,13 +62,15 @@ jobs: git tag "$TAG" git push origin "$TAG" - - name: Ensure Release exists and init release_vars + - name: Ensure draft release exists and set upload_url id: release_vars uses: actions/github-script@v7 env: TAG: ${{ env.TAG }} with: script: | + const fs = require('fs'); + const path = require('path'); let tag_name = process.env.TAG; console.log(`tag_name: ${tag_name}`); let upload_url; @@ -83,7 +90,6 @@ jobs: core.setFailed(`Release for tag ${tag_name} is already published.`); return; } - // console.log(`Draft release for tag ${tag_name} already exists.`); // Update the release to point to potentially new commit (noop when no change) const release_id = release.id; @@ -108,12 +114,17 @@ jobs: upload_url = updatedRelease.upload_url; } else { + // Read release notes from file + const release_notes_path = path.join(process.env.GITHUB_WORKSPACE, 'temp', 'DRAFT_RELEASE_NOTES.md'); + const release_notes = fs.readFileSync(release_notes_path, 'utf8'); + // Release does not exists, so create it. const response = await github.rest.repos.createRelease({ owner: context.repo.owner, repo: context.repo.repo, tag_name: tag_name, name: tag_name, + body: release_notes, draft: true, prerelease: false, }); diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..97395171 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,44 @@ +# Changelog + +Check https://ta-lib.org/install/ for latest installation instructions. + +Only notable changes are documented here. See GitHub commits for all changes. + +## [0.6.0] 2024-12-24 + +### Added + +- Packaging automation for various platforms, notably Windows 64 bits. +- Fix for very small inputs to TA functions (floating point epsilon problem). + +### Fixed + +- Autotools and CMakeLists.txt have been modernized. + +### Changed + +- Static/Shared lib file names uses hyphen instead of underscore. This was needed for some package naming convention. + In other word, look for "ta-lib" instead of "ta_lib". + + Example: when linking you now use "-lta-lib" instead of "-lta_lib". + +- C/C++ headers are now under a "ta-lib" subdirectory. You may have to change your code accordingly. + + Example: `#include ` instead of `#include ` + + This change is for namespace best-practice for when TA-Lib is installed at the system level. + +- Moving forward, autotools and CMake are the only two supported build systems. Consequently: + - All xcode/Visual Studio projects (.sln) are not maintained anymore. + - There is no "cdd", "cdr" etc... library variants anymore. This is an outdated way of doing. + - The ide/ and make/ directories from 0.4.0 have been removed. + + Recommendation: VSCode+CMake works consistently on most platforms. + +- TA_GetVersionBuild() is deprecated. Use TA_GetVersionPatch() instead. + +## [0.5.0] +A tentative release that was tag, but never widely published and used. + +## [0.4.0] +The major stable release from Sourceforge that kept TA-Lib alive for years. diff --git a/scripts/pre-release-checks.py b/scripts/pre-release-checks.py new file mode 100755 index 00000000..cf4bbbd6 --- /dev/null +++ b/scripts/pre-release-checks.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 + +# This is called from Github Actions to: +# - Detect VERSION inconsistenties among the files. +# - Detect if some release candidate assets are missing in dist/ +# - Detect if a top entry for VERSION is missing in CHANGELOG.md +# +# If no problem are found, the script will create a temp/DRAFT_RELEASE_NOTES.md +# which is intended for the CI when creating the initial draft release. +# +# You can also call this script locally for early detection of problems that +# would prevent the release to proceed smoothly. + +import argparse +import os +import re + +from utilities.files import path_join +from utilities.common import get_release_assets, verify_git_repo +from utilities.versions import check_sources_digest, check_versions + +if __name__ == "__main__": + + root_dir = verify_git_repo() + print(f"Running pre-release checks") + + parser = argparse.ArgumentParser(description='Pre-release checks') + args = parser.parse_args() + + version = check_versions(root_dir) + if not version: + print("Error: Version inconsistencies found. Did you forget to run scripts/sync.py?") + exit(1) + + sources_digest = check_sources_digest(root_dir) + if not sources_digest: + print("Error: Source digest inconsistencies found. Did you forget to run scripts/sync.py?") + exit(1) + + expected_assets = get_release_assets(version) + missing_assets = [asset for asset in expected_assets if not os.path.exists(path_join(root_dir, 'dist', asset))] + if missing_assets: + print("Error: Missing assets in dist/:") + for asset in missing_assets: + print(f" - {asset}") + + print("Did you forget to wait for all release candidates assets be auto-generated in the dev branch?") + exit(1) + + # Verify CHANGELOG.md exists and there is an entry for version. + # At same accumulates all lines until end of top entry for latest version. + + changelog_path = path_join(root_dir, 'CHANGELOG.md') + version_pattern = re.compile(r'##\s+\[\d+\.\d+\.\d+\].*') + top_version_found = False + release_notes = [] + + with open(changelog_path, 'r') as f: + for line in f: + if version_pattern.match(line): + if not top_version_found: + # Extract the 0.0.0 part of the pattern + changelog_version = line.split('[')[1].split(']')[0] + if changelog_version != version: + print(f"Error: Found top entry for version {changelog_version}, expected {version}.") + print("Did you forget to update CHANGELOG.md?") + exit(1) + top_version_found = True + else: + break + release_notes.append(line) + if not top_version_found: + print(f"Error: No entry found in CHANGELOG.md for version {version}.") + print("Did you forget to update CHANGELOG.md?") + exit(1) + + # Write the release_notes in temp/DRAFT_RELEASE_NOTES.md + temp_dir = path_join(root_dir, 'temp') + os.makedirs(temp_dir, exist_ok=True) + release_notes_path = path_join(temp_dir, 'DRAFT_RELEASE_NOTES.md') + with open(release_notes_path, 'w') as f: + f.write(''.join(release_notes)) + + # Verify that the release notes file was indeed written. + if not os.path.exists(release_notes_path): + print(f"Error: Failed to write draft release notes to {release_notes_path}.") + exit(1) + + print(f"Draft release notes written to {release_notes_path}") + + print("pre-release checks completed successfully.") diff --git a/scripts/utilities/common.py b/scripts/utilities/common.py index 287382e8..34ecca8b 100644 --- a/scripts/utilities/common.py +++ b/scripts/utilities/common.py @@ -132,6 +132,24 @@ def is_i386_toolchain_installed() -> bool: # TODO - Add tool specific detection for Windows/MacOS return sys.platform.machine().lower() in ['i386', 'i686'] +# Utility function to list all assets expected for a release. +def get_release_assets(version:str) -> list: + """ + Return the list of assets expected for a release. + + This is used for CI. + + TA-Lib maintainers should modify this list everytime an assets is added/removed + """ + return [ + f'ta-lib-{version}-src.tar.gz', + f'ta-lib-{version}-windows-x86_64.msi', + f'ta-lib-{version}-windows-x86_64.zip', + f'ta-lib_{version}_amd64.deb', + f'ta-lib_{version}_arm64.deb', + f'ta-lib_{version}_x86.deb', + ] + # Utility functions to identify the gen_code generated files. def get_src_generated_files() -> list: """ @@ -142,6 +160,10 @@ def get_src_generated_files() -> list: Everything under a directory ('**') and file glob allowed ('*') See get_all_generated_files() for more... + + This is used for CI. + + TA-Lib maintainers should update this list everytime a new file is generated (or not). """ return [ 'include/ta_func.h', @@ -158,6 +180,10 @@ def get_all_generated_files() -> list: """ Returns list of all generated files and directories. Everything under a directory ('**') and file glob allowed ('*') + + This is used for CI. + + TA-Lib maintainers should update this list everytime a new file is generated (or not). """ return [ 'swig/src/interface/ta_func.swg', diff --git a/scripts/utilities/versions.py b/scripts/utilities/versions.py index dd8b302c..d5aacae8 100644 --- a/scripts/utilities/versions.py +++ b/scripts/utilities/versions.py @@ -4,6 +4,8 @@ import sys from typing import Tuple +from jinja2 import Undefined + from .common import expand_globs from .files import path_join @@ -303,6 +305,22 @@ def sync_versions(root_dir: str) -> Tuple[bool,str]: return is_updated, version_c +def check_versions(root_dir: str) -> str: + # Similar to sync_versions() but only checks if the versions are in sync, do not modify anything. + version_file = get_version_string(root_dir) + version_c = get_version_string_source_code(root_dir) + version_cmake = get_version_string_cmake(root_dir) + + if version_file != version_c: + print(f"Error: VERSION [{version_file}] does not match ta_version.c [{version_c}]") + return None + + if version_file != version_cmake: + print(f"Error: VERSION [{version_file}] does not match CMakeLists.txt [{version_cmake}]") + return None + + return version_file + def write_sources_digest(root_dir: str, new_digest: str) -> bool: """Update the ta_common.h file with the new digest.""" ta_common_h_path = path_join(root_dir, 'include', 'ta_common.h') @@ -350,7 +368,7 @@ def read_sources_digest(root_dir: str) -> str: return None -def sync_sources_digest(root_dir: str) -> Tuple[bool,str]: +def calculate_sources_digest(root_dir: str, silent: bool = False) -> str: # This is for a calculated digest of all source file contant relevant # to packaging. It helps to trig CI repackaging when a change # is detected. @@ -375,6 +393,7 @@ def sync_sources_digest(root_dir: str) -> Tuple[bool,str]: "LICENSE", "VERSION", ] + file_list = expand_globs(root_dir, file_patterns) # Remove potential duplicate entries @@ -394,7 +413,7 @@ def sync_sources_digest(root_dir: str) -> Tuple[bool,str]: for file_path in sorted_files: try: n_files += 1 - full_file_path = os.path.join(root_dir, file_path) + full_file_path = path_join(root_dir, file_path) with open(full_file_path, 'r', encoding='utf-8') as f: n_opens += 1 for line in f: @@ -409,17 +428,37 @@ def sync_sources_digest(root_dir: str) -> Tuple[bool,str]: print(f"Error reading file while updating source digest [{file_path}]: {e}") sys.exit(1) - sources_hash = running_hash.hexdigest() + if not silent: + print(f"Digest input is n_files: {n_files}, n_lines: {n_lines}, n_opens: {n_opens}, n_chars: {n_chars}") + return running_hash.hexdigest() + +def sync_sources_digest(root_dir: str) -> Tuple[bool,str]: + calculated_digest = calculate_sources_digest(root_dir) # Update ta_common.h (touch only if different) current_digest = read_sources_digest(root_dir) - if current_digest == sources_hash: - return False, sources_hash + if current_digest == calculated_digest: + return False, calculated_digest print(f"Difference detected in source digest. Updating ta_common.h") print(f"Old source digest: {current_digest}") - print(f"New source digest: {sources_hash}") - print(f"Calculated using n_files: {n_files}, n_lines: {n_lines}, n_opens: {n_opens}, n_chars: {n_chars}") + print(f"New source digest: {calculated_digest}") + + write_sources_digest(root_dir, calculated_digest) + return True, calculated_digest + + +def check_sources_digest(root_dir: str) -> str: + # Similar to sync_sources_digest() but only checks if the digests are in sync, do not modify anything. + current_digest = read_sources_digest(root_dir) + if current_digest is None: + print(f"Error: TA_LIB_SOURCES_DIGEST not found in ta_common.h") + return None + + calculated_digest = calculate_sources_digest(root_dir, silent=True) + if current_digest != calculated_digest: + print(f"Error: TA_LIB_SOURCES_DIGEST [{current_digest}] does not match calculated digest [{calculated_digest}]") + return None + + return calculated_digest - write_sources_digest(root_dir, sources_hash) - return True, sources_hash