From b2549d7810aa2bcd9dd6b47aed3f54f6d80a47d2 Mon Sep 17 00:00:00 2001 From: Bart Skowron Date: Thu, 2 Jun 2022 23:17:04 +0200 Subject: [PATCH] Add a new option to stop further downloading after duplicate occurs --exitondup enables faster processing in cases such as syncing all activities in one directory. --- README.md | 3 ++- gcexport.py | 19 +++++++++++++------ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index bf908b5..d049a24 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ usage: gcexport.py [-h] [--version] [-v] [--username USERNAME] [--password PASSWORD] [-c COUNT] [-e EXTERNAL] [-a ARGS] [-f {gpx,tcx,original,json}] [-d DIRECTORY] [-s SUBDIR] [-u] [-ot] [--desc [DESC]] [-t TEMPLATE] [-fp] - [-sa START_ACTIVITY_NO] [-ex FILE] + [-sa START_ACTIVITY_NO] [-ex FILE] [-x] Garmin Connect Exporter @@ -87,6 +87,7 @@ optional arguments: -ex FILE, --exclude FILE Json file with Array of activity IDs to exclude from download. Format example: {"ids": ["6176888711"]} + -x, --exitondup Stop further downloads after the first duplicate file occurs ``` ### Examples diff --git a/gcexport.py b/gcexport.py index af4667b..eb6c506 100644 --- a/gcexport.py +++ b/gcexport.py @@ -528,6 +528,8 @@ def parse_arguments(argv): parser.add_argument('-ex', '--exclude', metavar="FILE", help="Json file with Array of activity IDs to exclude from download. " "Format example: {\"ids\": [\"6176888711\"]}") + parser.add_argument('-x', '--exitondup', action='store_true', + help="Stop further downloads after the first duplicate file occurs") return parser.parse_args(argv[1:]) @@ -813,7 +815,7 @@ def export_data_file(activity_id, activity_details, args, file_time, append_desc :param file_time: if given the desired time stamp for the activity file (in seconds since 1970-01-01) :param append_desc: suffix to the default filename :param date_time: datetime in ISO format used for '--fileprefix' and '--subdir' options - :return: True if the file was written, False if the file existed already + :raise FileExistsError: if the file existed already """ # Time dependent subdirectory for activity files, e.g. '{YYYY}' if not args.subdir is None: @@ -856,14 +858,14 @@ def export_data_file(activity_id, activity_details, args, file_time, append_desc logging.debug('Data file for %s already exists', activity_id) print('\tData file already exists; skipping...') # Inform the main program that the file already exists - return False + raise FileExistsError # Regardless of unzip setting, don't redownload if the ZIP or FIT/GPX/TCX original file exists. if args.format == 'original' and (os.path.isfile(original_basename + '.fit') or os.path.isfile(original_basename + '.gpx') or os.path.isfile(original_basename + '.tcx')): logging.debug('Original data file for %s already exists', activity_id) print('\tOriginal data file already exists; skipping...') # Inform the main program that the file already exists - return False + raise FileExistsError if args.format != 'json': # Download the data file from Garmin Connect. If the download fails (e.g., due to timeout), @@ -926,8 +928,6 @@ def export_data_file(activity_id, activity_details, args, file_time, append_desc print('\tSkipping 0Kb zip file.') os.remove(data_filename) - # Inform the main program that the file is new - return True def setup_logging(): """Setup logging""" @@ -1322,7 +1322,14 @@ def main(argv): extract['hrZones'] = load_zones(str(actvty['activityId']), start_time_seconds, args, http_req_as_string, write_to_file) # Save the file and inform if it already existed. If the file already existed, do not apped the record to the csv - if export_data_file(str(actvty['activityId']), activity_details, args, start_time_seconds, append_desc, actvty['startTimeLocal']): + try: + export_data_file(str(actvty['activityId']), activity_details, args, start_time_seconds, append_desc, actvty['startTimeLocal']) + except FileExistsError: + if args.exitondup: + logging.info('--exitondup flag enabled. Skipping the remaining activities.') + print('--exitondup flag enabled. Skipping the remaining activities.') + break + else: # Write stats to CSV. csv_write_record(csv_filter, extract, actvty, details, activity_type_name, event_type_name)