Skip to content

Commit

Permalink
refactor: python code
Browse files Browse the repository at this point in the history
  • Loading branch information
kehiy committed Jul 24, 2024
1 parent 1b85025 commit 49ffdd1
Showing 1 changed file with 77 additions and 54 deletions.
131 changes: 77 additions & 54 deletions scripts/snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
# - `--data_path`: This argument specifies the path to the Pactus data folder to create snapshots.
# - Windows: `C:\Users\{user}\pactus\data`
# - Linux or Mac: `/home/{user}/pactus/data`
# - `--compress`: This argument specifies the compression method based on your choice ['zip', 'tar'],
# default is zip.
# - `--compress`: This argument specifies the compression method based on your choice ['none', 'zip', 'tar'],
# with 'none' being without compression.
# - `--retention`: This argument sets the number of snapshots to keep.
# - `--snapshot_path`: This argument sets a custom path for snapshots, with the default being the current
# working directory of the script.
Expand All @@ -25,6 +25,7 @@
# sudo python3 snapshot.py --service_path /etc/systemd/system/pactus.service --data_path /home/{user}/pactus/data
# --compress zip --retention 3


import argparse
import os
import shutil
Expand Down Expand Up @@ -70,7 +71,14 @@ def update_metadata_file(snapshot_path, snapshot_metadata):
logging.info(f"Creating new metadata file '{metadata_file}'")
metadata = []

metadata.append(snapshot_metadata)
formatted_metadata = {
"name": snapshot_metadata["name"],
"created_at": snapshot_metadata["created_at"],
"compress": snapshot_metadata["compress"],
"data": snapshot_metadata["data"],
}

metadata.append(formatted_metadata)

with open(metadata_file, "w") as f:
json.dump(metadata, f, indent=4)
Expand All @@ -92,6 +100,37 @@ def update_metadata_after_removal(snapshots_dir, removed_snapshots):
with open(metadata_file, "w") as f:
json.dump(updated_metadata, f, indent=4)

@staticmethod
def create_snapshot_json(data_dir, snapshot_subdir):
files = []
for root, _, filenames in os.walk(data_dir):
for filename in filenames:
file_path = os.path.join(root, filename)
rel_path = os.path.relpath(file_path, data_dir)
snapshot_rel_path = os.path.join(snapshot_subdir, rel_path).replace(
"\\", "/"
)
file_info = {
"name": filename,
"path": snapshot_rel_path,
"sha": Metadata.sha256(file_path),
}
files.append(file_info)

return {"data": files}

@staticmethod
def create_compressed_snapshot_json(compressed_file, rel_path):
compressed_file_size = os.path.getsize(compressed_file)
file_info = {
"name": os.path.basename(compressed_file),
"path": rel_path,
"sha": Metadata.sha256(compressed_file),
"size": compressed_file_size,
}

return {"data": file_info}


def run_command(command):
logging.info(f"Running command: {' '.join(command)}")
Expand Down Expand Up @@ -170,57 +209,38 @@ def create_snapshot(self):
logging.info(f"Creating snapshot directory '{snapshot_dir}'")
os.makedirs(snapshot_dir, exist_ok=True)

data_dir = self.args.data_path
snapshot_metadata = {
"name": timestamp_str,
"created_at": get_current_time_iso(),
"compress": self.args.compress,
"total_size": 0,
"data": [],
}

for root, _, files in os.walk(data_dir):
for file in files:
file_path = os.path.join(root, file)
file_name, file_ext = os.path.splitext(file)
compressed_file_name = f"{file_name}{file_ext}.{self.args.compress}"
compressed_file_path = os.path.join(snapshot_dir, compressed_file_name)
rel_path = os.path.relpath(
compressed_file_path, self.args.snapshot_path
)

if rel_path.startswith("snapshots" + os.path.sep):
rel_path = rel_path[len("snapshots" + os.path.sep) :]

if self.args.compress == "zip":
logging.info(f"Creating ZIP archive '{compressed_file_path}'")
with zipfile.ZipFile(
compressed_file_path, "w", zipfile.ZIP_DEFLATED
) as zipf:
zipf.write(file_path, file)
elif self.args.compress == "tar":
logging.info(f"Creating TAR archive '{compressed_file_path}'")
subprocess.run(
[
"tar",
"-cvf",
compressed_file_path,
"-C",
os.path.dirname(file_path),
file,
]
)

compressed_file_size = os.path.getsize(compressed_file_path)
snapshot_metadata["total_size"] += compressed_file_size
data_dir = os.path.join(snapshot_dir, "data")
if self.args.compress == "none":
logging.info(f"Copying data from '{self.args.data_path}' to '{data_dir}'")
shutil.copytree(self.args.data_path, data_dir)
snapshot_metadata = Metadata.create_snapshot_json(data_dir, timestamp_str)
elif self.args.compress == "zip":
zip_file = os.path.join(snapshot_dir, "data.zip")
rel = os.path.relpath(zip_file, snapshot_dir)
meta_path = os.path.join(timestamp_str, rel)
logging.info(f"Creating ZIP archive '{zip_file}'")
with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED) as zipf:
for root, _, files in os.walk(self.args.data_path):
for file in files:
full_path = os.path.join(root, file)
rel_path = os.path.relpath(full_path, self.args.data_path)
zipf.write(full_path, os.path.join("data", rel_path))
snapshot_metadata = Metadata.create_compressed_snapshot_json(
zip_file, meta_path
)
elif self.args.compress == "tar":
tar_file = os.path.join(snapshot_dir, "data.tar.gz")
rel = os.path.relpath(tar_file, snapshot_dir)
meta_path = os.path.join(timestamp_str, rel)
logging.info(f"Creating TAR.GZ archive '{tar_file}'")
subprocess.run(["tar", "-czvf", tar_file, "-C", self.args.data_path, "."])
snapshot_metadata = Metadata.create_compressed_snapshot_json(
tar_file, meta_path
)

file_info = {
"name": file_name,
"path": rel_path,
"sha": Metadata.sha256(compressed_file_path),
"size": compressed_file_size,
}
snapshot_metadata["data"].append(file_info)
snapshot_metadata["name"] = timestamp_str
snapshot_metadata["created_at"] = get_current_time_iso()
snapshot_metadata["compress"] = self.args.compress

Metadata.update_metadata_file(self.args.snapshot_path, snapshot_metadata)

Expand Down Expand Up @@ -309,7 +329,10 @@ def parse_args():
"--data_path", default=default_data_path, help="Path to data directory"
)
parser.add_argument(
"--compress", choices=["zip", "tar"], default="zip", help="Compression type"
"--compress",
choices=["none", "zip", "tar"],
default="none",
help="Compression type",
)
parser.add_argument(
"--retention", type=int, default=3, help="Number of snapshots to retain"
Expand Down

0 comments on commit 49ffdd1

Please sign in to comment.