Skip to content

Commit

Permalink
add extra tags for uploaded Postgres log files
Browse files Browse the repository at this point in the history
  • Loading branch information
FxKu committed Sep 4, 2024
1 parent 41c888c commit c2d7445
Showing 1 changed file with 16 additions and 3 deletions.
19 changes: 16 additions & 3 deletions postgres-appliance/scripts/upload_pg_log_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import subprocess
import sys
import time
import croniter

from datetime import datetime, timedelta

Expand All @@ -16,12 +17,21 @@
logger = logging.getLogger(__name__)


def compress_pg_log():
def generate_file_name():
schedule = os.getenv('LOG_SHIP_SCHEDULE')
itr = croniter(schedule, datetime.now() - timedelta(minutes=1))
prev_log = itr.get_prev(datetime.datetime)
yesterday = datetime.now() - timedelta(days=1)
yesterday_day_number = yesterday.strftime('%u')

log_file = os.path.join(os.getenv('PGLOG'), 'postgresql-' + yesterday_day_number + '.csv')
archived_log_file = os.path.join(os.getenv('LOG_TMPDIR'), yesterday.strftime('%F') + '.csv.gz')
archived_log_file = os.path.join(os.getenv('LOG_TMPDIR'), prev_log.strftime('%F') + '.csv.gz')

return log_file, archived_log_file


def compress_pg_log():
log_file, archived_log_file = generate_file_name()

if os.path.getsize(log_file) == 0:
logger.warning("Postgres log from yesterday '%s' is empty.", log_file)
Expand All @@ -48,12 +58,15 @@ def upload_to_s3(local_file_path):
bucket = s3.Bucket(bucket_name)

key_name = os.path.join(os.getenv('LOG_S3_KEY'), os.path.basename(local_file_path))
if os.getenv('LOG_GROUP_BY_DATE'):
key_name = key_name.format(**{'DATE': os.path.basename(local_file_path).split('.')[0]})

chunk_size = 52428800 # 50 MiB
config = TransferConfig(multipart_threshold=chunk_size, multipart_chunksize=chunk_size)
tags = {'LogEndpoint': os.getenv('LOG_S3_ENDPOINT'), 'Namespace': os.getenv('POD_NAMESPACE'), 'ClusterName': os.getenv('SCOPE')}

try:
bucket.upload_file(local_file_path, key_name, Config=config)
bucket.upload_file(local_file_path, key_name, Config=config, ExtraArgs=tags)
except S3UploadFailedError as e:
logger.exception('Failed to upload the %s to the bucket %s under the key %s. Exception: %r',
local_file_path, bucket_name, key_name, e)
Expand Down

0 comments on commit c2d7445

Please sign in to comment.