Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upload and AssetBlob garbage collection implementation #2087

Merged
merged 5 commits into from
Jan 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions dandiapi/api/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
AssetBlob,
AuditRecord,
Dandiset,
GarbageCollectionEvent,
Upload,
UserMetadata,
Version,
Expand Down Expand Up @@ -266,3 +267,8 @@ def has_change_permission(self, request, obj=None):

def has_delete_permission(self, request, obj=None):
return False


@admin.register(GarbageCollectionEvent)
class GarbageCollectionEventAdmin(admin.ModelAdmin):
pass
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# Generated by Django 4.2.17 on 2024-12-11 17:17
from __future__ import annotations

from django.db import migrations, models
import django.db.models.deletion


class Migration(migrations.Migration):
dependencies = [
('api', '0013_remove_assetpath_consistent_slash_and_more'),
]

operations = [
migrations.CreateModel(
name='GarbageCollectionEvent',
fields=[
(
'id',
models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name='ID'
),
),
('timestamp', models.DateTimeField(auto_now_add=True)),
(
'type',
models.CharField(
help_text='The model name of the records that were garbage collected.',
max_length=255,
),
),
],
),
migrations.CreateModel(
name='GarbageCollectionEventRecord',
fields=[
(
'id',
models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name='ID'
),
),
(
'record',
models.JSONField(
help_text='JSON serialization of the record that was garbage collected.'
),
),
(
'event',
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name='records',
to='api.garbagecollectionevent',
),
),
],
),
]
3 changes: 3 additions & 0 deletions dandiapi/api/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from .asset_paths import AssetPath, AssetPathRelation
from .audit import AuditRecord
from .dandiset import Dandiset
from .garbage_collection import GarbageCollectionEvent, GarbageCollectionEventRecord
from .oauth import StagingApplication
from .upload import Upload
from .user import UserMetadata
Expand All @@ -16,6 +17,8 @@
'AssetPathRelation',
'AuditRecord',
'Dandiset',
'GarbageCollectionEvent',
'GarbageCollectionEventRecord',
'StagingApplication',
'Upload',
'UserMetadata',
Expand Down
26 changes: 26 additions & 0 deletions dandiapi/api/models/garbage_collection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from __future__ import annotations

from django.db import models


class GarbageCollectionEvent(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
type = models.CharField(
max_length=255, help_text='The model name of the records that were garbage collected.'
)

def __str__(self) -> str:
return f'{self.type} ({self.created})'


class GarbageCollectionEventRecord(models.Model):
event = models.ForeignKey(
GarbageCollectionEvent, on_delete=models.CASCADE, related_name='records'
)

record = models.JSONField(
help_text='JSON serialization of the record that was garbage collected.'
)

def __str__(self) -> str:
return f'{self.event.type} record'
121 changes: 121 additions & 0 deletions dandiapi/api/services/garbage_collection/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
from __future__ import annotations

from concurrent.futures import Future, ThreadPoolExecutor, wait
from datetime import timedelta
import json

from celery.utils.log import get_task_logger
from django.core import serializers
from django.db import transaction
from django.utils import timezone
from more_itertools import chunked

from dandiapi.api.models import (
AssetBlob,
GarbageCollectionEvent,
GarbageCollectionEventRecord,
Upload,
)
from dandiapi.api.storage import DandiMultipartMixin

logger = get_task_logger(__name__)

UPLOAD_EXPIRATION_TIME = DandiMultipartMixin._url_expiration # noqa: SLF001
ASSET_BLOB_EXPIRATION_TIME = timedelta(days=7)

GARBAGE_COLLECTION_EVENT_CHUNK_SIZE = 1000

# How long to keep GarbageCollectionEvent records around for after the garbage collection
# is performed to allow for restoration of deleted records.
# This should be equal to the time set in the "trailing delete" lifecycle rule.
RESTORATION_WINDOW = timedelta(
days=30
) # TODO: pick this up from env var set by Terraform to ensure consistency?


def _garbage_collect_uploads() -> int:
qs = Upload.objects.filter(
created__lt=timezone.now() - UPLOAD_EXPIRATION_TIME,
)
if not qs.exists():
return 0

deleted_records = 0
futures: list[Future] = []

with transaction.atomic(), ThreadPoolExecutor() as executor:
event = GarbageCollectionEvent.objects.create(type=Upload.__name__)
for uploads_chunk in chunked(qs.iterator(), GARBAGE_COLLECTION_EVENT_CHUNK_SIZE):
GarbageCollectionEventRecord.objects.bulk_create(
GarbageCollectionEventRecord(
event=event, record=json.loads(serializers.serialize('json', [u]))[0]
)
for u in uploads_chunk
)

# Delete the blobs from S3
futures.append(
executor.submit(
lambda chunk: [u.blob.delete(save=False) for u in chunk],
uploads_chunk,
)
jjnesbitt marked this conversation as resolved.
Show resolved Hide resolved
)

deleted_records += Upload.objects.filter(
pk__in=[u.pk for u in uploads_chunk],
).delete()[0]

wait(futures)

return deleted_records


def _garbage_collect_asset_blobs() -> int:
qs = AssetBlob.objects.filter(
assets__isnull=True,
created__lt=timezone.now() - ASSET_BLOB_EXPIRATION_TIME,
)
if not qs.exists():
return 0

deleted_records = 0
futures: list[Future] = []

with transaction.atomic(), ThreadPoolExecutor() as executor:
event = GarbageCollectionEvent.objects.create(type=AssetBlob.__name__)
for asset_blobs_chunk in chunked(qs.iterator(), GARBAGE_COLLECTION_EVENT_CHUNK_SIZE):
GarbageCollectionEventRecord.objects.bulk_create(
GarbageCollectionEventRecord(
event=event, record=json.loads(serializers.serialize('json', [a]))[0]
)
for a in asset_blobs_chunk
)

# Delete the blobs from S3
futures.append(
executor.submit(
lambda chunk: [a.blob.delete(save=False) for a in chunk],
asset_blobs_chunk,
)
)

deleted_records += AssetBlob.objects.filter(
pk__in=[a.pk for a in asset_blobs_chunk],
).delete()[0]

wait(futures)

return deleted_records
jjnesbitt marked this conversation as resolved.
Show resolved Hide resolved


def garbage_collect():
with transaction.atomic():
garbage_collected_uploads = _garbage_collect_uploads()
garbage_collected_asset_blobs = _garbage_collect_asset_blobs()

GarbageCollectionEvent.objects.filter(
timestamp__lt=timezone.now() - RESTORATION_WINDOW
).delete()

logger.info('Garbage collected %s Uploads.', garbage_collected_uploads)
logger.info('Garbage collected %s AssetBlobs.', garbage_collected_asset_blobs)
10 changes: 10 additions & 0 deletions dandiapi/api/tasks/scheduled.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from dandiapi.api.mail import send_pending_users_message
from dandiapi.api.models import UserMetadata, Version
from dandiapi.api.models.asset import Asset
from dandiapi.api.services.garbage_collection import garbage_collect
from dandiapi.api.services.metadata import version_aggregate_assets_summary
from dandiapi.api.services.metadata.exceptions import VersionMetadataConcurrentlyModifiedError
from dandiapi.api.tasks import (
Expand Down Expand Up @@ -127,6 +128,11 @@ def refresh_materialized_view_search() -> None:
cursor.execute('REFRESH MATERIALIZED VIEW CONCURRENTLY asset_search;')


@shared_task(soft_time_limit=60)
def garbage_collection() -> None:
garbage_collect()


def register_scheduled_tasks(sender: Celery, **kwargs):
"""Register tasks with a celery beat schedule."""
logger.info(
Expand All @@ -153,3 +159,7 @@ def register_scheduled_tasks(sender: Celery, **kwargs):

# Process new S3 logs every hour
sender.add_periodic_task(timedelta(hours=1), collect_s3_log_records_task.s())

# Run garbage collection once a day
# TODO: enable this once we're ready to run garbage collection automatically
# sender.add_periodic_task(timedelta(days=1), garbage_collection.s())
Loading