Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include robots.txt in UI and API for handling of web crawlers #2084

Merged
merged 7 commits into from
Nov 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -125,5 +125,5 @@ dmypy.json

# Editor settings
.vscode

.idea/
.DS_Store
13 changes: 13 additions & 0 deletions dandiapi/api/tests/test_robots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from __future__ import annotations

from django.urls import reverse
import pytest


@pytest.mark.django_db
def test_robots_txt(api_client):
response = api_client.get(reverse('robots_txt'))
assert response.status_code == 200
assert response['Content-Type'] == 'text/plain'
expected_content = 'User-agent: *\nDisallow: /'
assert response.content.decode('utf-8').strip() == expected_content.strip()
2 changes: 2 additions & 0 deletions dandiapi/api/views/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from .dandiset import DandisetViewSet
from .dashboard import DashboardView, mailchimp_csv_view, user_approval_view
from .info import info_view
from .robots import robots_txt_view
from .root import root_content_view
from .stats import stats_view
from .upload import (
Expand Down Expand Up @@ -36,4 +37,5 @@
'stats_view',
'info_view',
'root_content_view',
'robots_txt_view',
]
8 changes: 8 additions & 0 deletions dandiapi/api/views/robots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from __future__ import annotations

from django.http import HttpResponse


def robots_txt_view(request):
content = 'User-agent: *\nDisallow: /'
return HttpResponse(content, content_type='text/plain')
2 changes: 2 additions & 0 deletions dandiapi/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
blob_read_view,
info_view,
mailchimp_csv_view,
robots_txt_view,
root_content_view,
stats_view,
upload_complete_view,
Expand Down Expand Up @@ -80,6 +81,7 @@ def to_url(self, value):
register_converter(DandisetIDConverter, 'dandiset_id')
urlpatterns = [
path('', root_content_view),
path('robots.txt', robots_txt_view, name='robots_txt'),
path('api/', include(router.urls)),
path('api/auth/token/', auth_token_view, name='auth-token'),
path('api/stats/', stats_view),
Expand Down
2 changes: 2 additions & 0 deletions web/public/robots.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
User-agent: *
Disallow: /
Loading