From fe1fd0f754ccedf8dad8a7f291a670de9e15a90a Mon Sep 17 00:00:00 2001 From: Michal Charemza Date: Mon, 9 Dec 2024 12:56:52 +0000 Subject: [PATCH] fix: carry on in the face of more arrow-exceptions There are more Parquet-related exceptions that prevent the file format conversion from progressing. Instead of continuing to add to the list of exception types, have found the PyArrow exception base class at https://github.com/apache/arrow/blob/fb8e8122f623f4548b22ece7485c4570d7ece1a6/python/pyarrow/error.pxi#L33C7-L33C21 (This is vey similary to previous fixes) --- app_worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app_worker.py b/app_worker.py index da9faa3..e772ef3 100644 --- a/app_worker.py +++ b/app_worker.py @@ -275,7 +275,7 @@ def sqlite_to_ods(use_zip_64): try: aws_multipart_upload(signed_s3_request, s3_key, stream_write_parquet(cols, rows)) - except (pa.ArrowNotImplementedError, pa.ArrowTypeError): + except pa.ArrowException: logger.exception('Unable to convert to parquet') # And save as a single ODS file