diff --git a/.app-template/urls.py-tpl b/.app-template/urls.py-tpl index 71d3b4d5..02eab9de 100644 --- a/.app-template/urls.py-tpl +++ b/.app-template/urls.py-tpl @@ -1,8 +1,6 @@ -from django.urls import path - -from . import views +from django.urls import URLPattern app_name = "{{ app_name }}" -urlpatterns = [ +urlpatterns: list[URLPattern] = [ ] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d722332c..e01513fe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -79,12 +79,14 @@ repos: rev: v1.36.1 hooks: - id: djlint-reformat-django + exclude: "ninja/swagger.html" - id: djlint-django - repo: https://github.com/shellcheck-py/shellcheck-py rev: v0.10.0.1 hooks: - id: shellcheck + exclude: ".envrc" args: [-e, SC1091] - repo: https://github.com/thibaudcolas/pre-commit-stylelint diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e081e82..87d9d205 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## 24.11.1 + +- Reduced allow times in which daily updates are run. We still support bi-hourly cron runs. +- Display a contact email to all authenticated users. +- Add an API: + - The documentation is available at `/api/docs/`. + - You can manage application tokens in your profile. + - You can get auth tokens from these applications tokens to use the API. + ## 24.10.3 - Correct display of titles with HTML entities when adding an article. diff --git a/config/api.py b/config/api.py new file mode 100644 index 00000000..4595f8e8 --- /dev/null +++ b/config/api.py @@ -0,0 +1,11 @@ +from ninja import NinjaAPI +from ninja.security import django_auth + +from legadilo.feeds.api import feeds_api_router +from legadilo.reading.api import reading_api_router +from legadilo.users.api import AuthBearer, users_api_router + +api = NinjaAPI(title="Legadilo API", auth=[django_auth, AuthBearer()], docs_url="/docs/") +api.add_router("reading/", reading_api_router) +api.add_router("feeds/", feeds_api_router) +api.add_router("users/", users_api_router) diff --git a/config/settings.py b/config/settings.py index 65306ed4..3690d7af 100644 --- a/config/settings.py +++ b/config/settings.py @@ -2,6 +2,7 @@ import concurrent import warnings +from datetime import timedelta from pathlib import Path import asgiref @@ -113,6 +114,7 @@ "django.forms", ] THIRD_PARTY_APPS = [ + "ninja", "django_version_checks", "extra_checks", "anymail", @@ -607,8 +609,18 @@ def before_send_to_sentry(event, hint): print("Failed to import sentry_sdk") # noqa: T201 print found +# django-ninja +# ------------------------------------------------------------------------------ +# See https://django-ninja.dev/reference/settings/ +NINJA_PAGINATION_MAX_LIMIT = 500 +NINJA_PAGINATION_CLASS = "legadilo.utils.pagination.LimitOffsetPagination" + + # Your stuff... # ------------------------------------------------------------------------------ ARTICLE_FETCH_TIMEOUT = env.int("LEGADILO_ARTICLE_FETCH_TIMEOUT", default=50) RSS_FETCH_TIMEOUT = env.int("LEGADILO_RSS_FETCH_TIMEOUT", default=300) CONTACT_EMAIL = env.str("LEGADILO_CONTACT_EMAIL", default=None) +TOKEN_LENGTH = 50 +JWT_ALGORITHM = "HS256" +JWT_MAX_AGE = timedelta(hours=4) diff --git a/config/urls.py b/config/urls.py index 239f7a1d..9dccc3e9 100644 --- a/config/urls.py +++ b/config/urls.py @@ -4,6 +4,8 @@ from django.urls import include, path from django.views import defaults as default_views +from config.api import api + def _correct_admin_url(path: str) -> str: path = path.removeprefix("/") @@ -26,6 +28,7 @@ def _correct_admin_url(path: str) -> str: path("feeds/", include("legadilo.feeds.urls", namespace="feeds")), path("reading/", include("legadilo.reading.urls", namespace="reading")), path("import-export/", include("legadilo.import_export.urls", namespace="import_export")), + path("api/", api.urls), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) diff --git a/docs/adrs/0007-api.md b/docs/adrs/0007-api.md new file mode 100644 index 00000000..feb91991 --- /dev/null +++ b/docs/adrs/0007-api.md @@ -0,0 +1,103 @@ +# 7 - API + +* **Date:** 2024-10-26 +* **Status:** Accepted + +## Context + +I’d like to build a browser extension to save articles and subscribe to feeds more easily directly on the page we are on. +I think it’s a pretty common feature of feed aggregators and links savers (mostly links savers to be honest). +We have two options: +* Build a proper API and make the browser extension use this API: + * We have several possibilities to help us build the backend. + DRF and Django Ninja being the two real pretenders. + Since we have only a simple feature set and little times, I think Django Ninja is a better fit here: it’s easier to use and should allow us to develop the API faster. + It’s also async compatible out of the box and based on Pydantic (a package I use at work) which we can reuse for other validation! + * We will have to develop the API and dig a bit into how to do it properly with Django Ninja. + * It will unlok other possibilities in the long run in how to use the app (mobile app, integration with 3rd parties…). + * It should be easier to authenticate to the app: we can manage authentication differently and let the user configure the extension with an application token. + We could let the cookies be transmitted and rely on that (not sure how it will behave though). + And it makes the extension very tied to the connection to the app in the browser. + Whereas normal flow in this case is to never be disconnected. + Handling connection with MFA might be a problem too: we can’t display much in the extension and may have to redirect the user the app anyway to login. + That would be a very poor UX. + * It should also be easier to post and retrieve data to manipulate it as part of the browser extension. +* Call the standard views (ie the views that powers the app): + * We will have to adapt how we manage CSRF tokens to be able to supply them to our views. + It’s doable, I’ve done it in the past, but I always disliked it. + * We will have to post data as form data. + Not the cleanest way, but manageable. + Having a view that accepts both JSON and form is too much of a hassle in bare Django for me to do that. + And if I’m not building an API, there isn’t really a point into bringing a package just for that. + * We will manipulate HTML. + It may ease display (but I don’t think we will have anything fancy to display) at the cost of harder manipulations. + And we won’t be able to use the "normal" templates since we won’t have the same display. + This implies to make some views or templates even more complex. + +I think it’s worth trying to develop the API and see how it goes. +If it’s not too much work, I think we should commit to it. + +See: +* https://github.com/Jenselme/legadilo/issues/318 +* https://github.com/Jenselme/legadilo/issues/320 +* https://github.com/Jenselme/legadilo/issues/156 + + +## Decisions + +The test is a success and I think I achieved something good. +Let’s commit the API with Ninja! + +### Tokens and API protection + +Auth: Django Ninja doesn’t provide anything out of the box to build an auth with tokens. +It does however allow us to protect endpoints, routers or the full API with authentication. +It also gives us the basic tooling to fetch a token from the `Authorization` header and validate it. +If it’s valid, access is granted to the endpoint, if not the user gets an error message. +Django Ninja also allows us to have a list of authentication methods to use, so we can use token based auth for the extension and cookie auth as usual to try stuff in the browser (and in tests). + +How to create tokens to access the API? +* We could create them from username and password. + But as part of the extension, this would involve to store them as part of the configuration. + I don’t think it’s desirable. + It would also make working with MFA harder. + And if the password needs to be changed, it would impact all consumers of the API. +* I think it’s safer to have the user create application "passwords" like in many other apps and create the access tokens from that. + These applications passwords would act as a refresh token. +* These passwords won’t be transmitted much over the network: we will transmit them only to get an access token. + They can be revoked easily from the user profile without impacting anything else. + We should be able to give them an expiration date just in case. +* They may be transmitted a bit too much and I may be relying too much on my knowledge to build this. + Not a problem right now, but I’ll have to recheck all this when I have more time. + Right now, I think it’s safe enough for myself. + +### Summary + +* We will build the API with Django Ninja. +* We will authenticate to the API with application tokens that will be used to create an actual access token usable to access the API. + They will: + * Be a long random string stored in the database generated with the `secrets` module. + * Be visible to the user only at creation to prevent stealing. + * Have an optional expiration date. + If unset, they are valid until manually deleted. +* The access tokens: + * Will be relatively short-lived to prevent attacks if it leaks. + * Will be in the JWT format and will contain the name of the application token used to generate it (mostly for debug purpose) and will contain the id of the user to use. + This will enable us to store and validate the token using well a known format with well known libraries. + + +## Consequences + +* Let’s use Pydantic instead of JSON schemas to validate our JSON model fields and the data we read from external sources. + This will help us limit the number of libraries we use. + At this stage, I think Pydantic is a safe choice: it’s well known and maintained. + It’s also used a lot in the community nowadays and has become very popular. +* The API won’t allow all capabilities in the first time to gain time. + We will develop first and foremost what we need for the extension. +* We already unlock some API usage for everybody! + We will improve it later as part of https://github.com/Jenselme/legadilo/issues/320. +* The API will be documented more or less automatically and browsable thanks to Swagger. +* We should dig further to make sure our model is secured. + This is not a problem *right now* since we don’t have users, but can become in the future. + I’m mostly thinking of [Wallabag](https://doc.wallabag.org/en/developer/api/oauth) which has a different way to handle tokens. + This is logged here: https://github.com/Jenselme/legadilo/issues/325 diff --git a/legadilo/core/forms/widgets.py b/legadilo/core/forms/widgets.py index ac25c072..9f78fb2c 100644 --- a/legadilo/core/forms/widgets.py +++ b/legadilo/core/forms/widgets.py @@ -55,3 +55,7 @@ def format_value(self, value): return value except (JSONDecodeError, ValueError, TypeError): return super().format_value(value) + + +class DateTimeWidget(widgets.DateTimeInput): + input_type = "datetime-local" diff --git a/legadilo/core/models/timezone.py b/legadilo/core/models/timezone.py index 7e554e56..7f9d4e8b 100644 --- a/legadilo/core/models/timezone.py +++ b/legadilo/core/models/timezone.py @@ -15,6 +15,7 @@ # along with this program. If not, see . from typing import TYPE_CHECKING +from zoneinfo import ZoneInfo from django.db import models @@ -45,3 +46,7 @@ def __str__(self): def __repr__(self): return f"Timezone(name={self.name})" + + @property + def zone_info(self) -> ZoneInfo: + return ZoneInfo(self.name) diff --git a/legadilo/feeds/api.py b/legadilo/feeds/api.py new file mode 100644 index 00000000..f085ca98 --- /dev/null +++ b/legadilo/feeds/api.py @@ -0,0 +1,289 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from datetime import datetime +from http import HTTPStatus +from operator import xor +from typing import Annotated, Self + +from asgiref.sync import sync_to_async +from django.shortcuts import aget_object_or_404 +from ninja import ModelSchema, PatchDict, Router, Schema +from ninja.errors import ValidationError as NinjaValidationError +from ninja.pagination import paginate +from pydantic import model_validator + +from legadilo.feeds import constants +from legadilo.feeds.models import Feed, FeedCategory, FeedTag +from legadilo.feeds.services.feed_parsing import ( + FeedFileTooBigError, + MultipleFeedFoundError, + NoFeedUrlFoundError, + get_feed_data, +) +from legadilo.reading.models import Tag +from legadilo.users.models import User +from legadilo.users.user_types import AuthenticatedApiRequest +from legadilo.utils.api import ApiError, update_model_from_patch_dict +from legadilo.utils.http_utils import get_rss_async_client +from legadilo.utils.validators import ( + CleanedString, + FullSanitizeValidator, + ValidUrlValidator, + remove_falsy_items, +) + +feeds_api_router = Router(tags=["feeds"]) + + +class OutFeedCategorySchema(ModelSchema): + class Meta: + model = FeedCategory + exclude = ("user", "created_at", "updated_at") + + +class OutFeedSchema(ModelSchema): + category: OutFeedCategorySchema | None + + class Meta: + model = Feed + exclude = ("user", "created_at", "updated_at", "articles") + + +@feeds_api_router.get( + "", response=list[OutFeedSchema], url_name="list_feeds", summary="List all you feeds" +) +@paginate +async def list_feeds_view(request: AuthenticatedApiRequest): # noqa: RUF029 paginate is async! + return Feed.objects.get_queryset().for_user(request.auth).select_related("category") + + +class FeedSubscription(Schema): + feed_url: Annotated[str, ValidUrlValidator] + refresh_delay: constants.FeedRefreshDelays = constants.FeedRefreshDelays.DAILY_AT_NOON + article_retention_time: int = 0 + category_id: int | None = None + tags: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + open_original_link_by_default: bool = False + + +@feeds_api_router.post( + "", + response={ + HTTPStatus.CREATED: OutFeedSchema, + HTTPStatus.CONFLICT: ApiError, + HTTPStatus.NOT_ACCEPTABLE: ApiError, + }, + url_name="subscribe_to_feed", + summary="Subscribe to feed from its link", +) +async def subscribe_to_feed_view(request: AuthenticatedApiRequest, payload: FeedSubscription): + """Many parameters of the feed can be customized directly at creation.""" + category = await _get_category(request.auth, payload.category_id) + + try: + async with get_rss_async_client() as client: + feed_medata = await get_feed_data(payload.feed_url, client=client) + tags = await sync_to_async(Tag.objects.get_or_create_from_list)(request.auth, payload.tags) + feed, created = await sync_to_async(Feed.objects.create_from_metadata)( + feed_medata, + request.auth, + payload.refresh_delay, + payload.article_retention_time, + tags, + category, + open_original_link_by_default=payload.open_original_link_by_default, + ) + except (NoFeedUrlFoundError, MultipleFeedFoundError): + return HTTPStatus.NOT_ACCEPTABLE, { + "detail": "We failed to find a feed at the supplied URL." + } + except FeedFileTooBigError: + return HTTPStatus.NOT_ACCEPTABLE, {"detail": "The feed is too big."} + except Exception: # noqa: BLE001 Do not catch blind exception: `Exception` + # That's the catch of weird validation, parsing and network errors. + return HTTPStatus.NOT_ACCEPTABLE, { + "detail": "We failed to access or parse the feed you supplied. Please make sure it is " + "accessible and valid." + } + + if not created: + return HTTPStatus.CONFLICT, {"detail": "You are already subscribed to this feed"} + + return HTTPStatus.CREATED, feed + + +async def _get_category(user: User, category_id: int | None) -> FeedCategory | None: + if category_id is None: + return None + + try: + return await FeedCategory.objects.aget(id=category_id, user=user) + except FeedCategory.DoesNotExist as e: + raise NinjaValidationError([ + {"category_id": f"We failed to find the category with id: {category_id}"} + ]) from e + + +@feeds_api_router.get( + "/{int:feed_id}/", + response=OutFeedSchema, + url_name="get_feed", + summary="View the details of a specific feed", +) +async def get_feed_view(request: AuthenticatedApiRequest, feed_id: int): + return await aget_object_or_404( + Feed.objects.get_queryset().select_related("category"), id=feed_id, user=request.auth + ) + + +class FeedUpdate(Schema): + disabled_reason: Annotated[str, FullSanitizeValidator] = "" + disabled_at: datetime | None = None + category_id: int | None = None + tags: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + refresh_delay: constants.FeedRefreshDelays + article_retention_time: int + open_original_link_by_default: bool + + @model_validator(mode="after") + def check_disabled(self) -> Self: + if xor(bool(self.disabled_reason), bool(self.disabled_at)): + raise ValueError( + "You must supply none of disabled_reason and disabled_at or both of them" + ) + + if self.disabled_reason is None: + self.disabled_reason = "" + + return self + + +@feeds_api_router.patch( + "/{int:feed_id}/", response=OutFeedSchema, url_name="update_feed", summary="Update a feed" +) +async def update_feed_view( + request: AuthenticatedApiRequest, + feed_id: int, + payload: PatchDict[FeedUpdate], # type: ignore[type-arg] +): + qs = Feed.objects.get_queryset().select_related("category") + feed = await aget_object_or_404(qs, id=feed_id, user=request.auth) + + if (tags := payload.pop("tags", None)) is not None: + await _update_feed_tags(request.auth, feed, tags) + + # We must refresh to update generated fields & tags. + await update_model_from_patch_dict(feed, payload, must_refresh=True, refresh_qs=qs) + + return feed + + +async def _update_feed_tags(user: User, feed: Feed, new_tags: tuple[str, ...]): + tags = await sync_to_async(Tag.objects.get_or_create_from_list)(user, new_tags) + await sync_to_async(FeedTag.objects.associate_feed_with_tag_slugs)( + feed, [tag.slug for tag in tags], clear_existing=True + ) + + +@feeds_api_router.delete( + "/{int:feed_id}/", + response={HTTPStatus.NO_CONTENT: None}, + url_name="delete_feed", + summary="Delete a feed", +) +async def delete_feed_view(request: AuthenticatedApiRequest, feed_id: int): + feed = await aget_object_or_404(Feed, id=feed_id, user=request.auth) + + await feed.adelete() + + return HTTPStatus.NO_CONTENT, None + + +@feeds_api_router.get( + "/categories/", + response=list[OutFeedCategorySchema], + url_name="list_feed_categories", + summary="List all your feed categories", +) +@paginate +async def list_categories_view(request: AuthenticatedApiRequest): # noqa: RUF029 paginate is async! + return FeedCategory.objects.get_queryset().for_user(request.auth) + + +class FeedCategoryPayload(Schema): + title: str + + +@feeds_api_router.post( + "/categories/", + response={HTTPStatus.CREATED: OutFeedCategorySchema, HTTPStatus.CONFLICT: ApiError}, + url_name="create_feed_category", + summary="Create a feed category", +) +async def create_category_view(request: AuthenticatedApiRequest, payload: FeedCategoryPayload): + # For some reason, I always get a 400 error with a useless HTML body if I do a try/catch with + # the IntegrityError. Let's use aget_or_create to prevent this. + category, created = await FeedCategory.objects.aget_or_create( + title=payload.title, user=request.auth + ) + + if not created: + return HTTPStatus.CONFLICT, {"detail": "A category with this title already exists."} + + return HTTPStatus.CREATED, category + + +@feeds_api_router.get( + "/categories/{int:category_id}", + response=OutFeedCategorySchema, + url_name="get_feed_category", + summary="View a specific feed category", +) +async def get_category_view(request: AuthenticatedApiRequest, category_id: int): + return await aget_object_or_404(FeedCategory, id=category_id, user=request.auth) + + +@feeds_api_router.patch( + "/categories/{int:category_id}/", + response=OutFeedCategorySchema, + url_name="update_feed_category", + summary="Update a feed category", +) +async def update_category_view( + request: AuthenticatedApiRequest, + category_id: int, + payload: PatchDict[FeedCategoryPayload], # type: ignore[type-arg] +) -> FeedCategory: + category = await aget_object_or_404(FeedCategory, id=category_id, user=request.auth) + + await update_model_from_patch_dict(category, payload) + + return category + + +@feeds_api_router.delete( + "/categories/{int:category_id}/", + url_name="delete_feed_category", + response={HTTPStatus.NO_CONTENT: None}, + summary="Delete a feed category", +) +async def delete_category_view(request: AuthenticatedApiRequest, category_id: int): + category = await aget_object_or_404(FeedCategory, id=category_id, user=request.auth) + + await category.adelete() + + return HTTPStatus.NO_CONTENT, None diff --git a/legadilo/feeds/migrations/0009_feedupdate_ignored_article_links_and_more.py b/legadilo/feeds/migrations/0009_feedupdate_ignored_article_links_and_more.py index c25de1ca..97d66606 100644 --- a/legadilo/feeds/migrations/0009_feedupdate_ignored_article_links_and_more.py +++ b/legadilo/feeds/migrations/0009_feedupdate_ignored_article_links_and_more.py @@ -35,12 +35,7 @@ class Migration(migrations.Migration): field=models.JSONField( blank=True, default=list, - validators=[ - legadilo.utils.validators.JsonSchemaValidator({ - "items": {"type": "string"}, - "type": "array", - }) - ], + validators=[legadilo.utils.validators.list_of_strings_validator], ), ), migrations.AlterField( diff --git a/legadilo/feeds/models/feed.py b/legadilo/feeds/models/feed.py index 473b31b0..82a228af 100644 --- a/legadilo/feeds/models/feed.py +++ b/legadilo/feeds/models/feed.py @@ -208,7 +208,7 @@ def get_by_categories(self, user: User) -> dict[str | None, list[Feed]]: self.get_queryset() .for_user(user) .select_related("category") - .order_by("category__title") + .order_by("category__title", "id") ): category_title = feed.category.title if feed.category else None feeds_by_categories.setdefault(category_title, []).append(feed) diff --git a/legadilo/feeds/models/feed_update.py b/legadilo/feeds/models/feed_update.py index 6ef20a7b..954d5da8 100644 --- a/legadilo/feeds/models/feed_update.py +++ b/legadilo/feeds/models/feed_update.py @@ -22,7 +22,7 @@ from django.db import models from ...utils.time_utils import utcnow -from ...utils.validators import list_of_strings_json_schema_validator +from ...utils.validators import list_of_strings_validator from .. import constants if TYPE_CHECKING: @@ -115,7 +115,7 @@ def _get_feed_deactivation_error_time_window(self, feed: Feed) -> relativedelta: class FeedUpdate(models.Model): status = models.CharField(choices=constants.FeedUpdateStatus.choices, max_length=100) ignored_article_links = models.JSONField( - validators=[list_of_strings_json_schema_validator], blank=True, default=list + validators=[list_of_strings_validator], blank=True, default=list ) error_message = models.TextField(blank=True) technical_debug_data = models.JSONField(blank=True, null=True) diff --git a/legadilo/feeds/services/feed_parsing.py b/legadilo/feeds/services/feed_parsing.py index 0fc8a181..12807629 100644 --- a/legadilo/feeds/services/feed_parsing.py +++ b/legadilo/feeds/services/feed_parsing.py @@ -18,66 +18,50 @@ import re import sys import time -from dataclasses import dataclass from datetime import UTC, datetime from html import unescape from itertools import chain +from typing import Annotated from urllib.parse import parse_qs, urlparse import httpx from bs4 import BeautifulSoup from feedparser import FeedParserDict from feedparser import parse as parse_feed +from pydantic import BaseModel as BaseSchema from legadilo.reading.services.article_fetching import ( ArticleData, - build_article_data, parse_tags_list, ) from legadilo.utils.security import full_sanitize from ...utils.time_utils import dt_to_http_date -from ...utils.validators import normalize_url +from ...utils.validators import ( + FullSanitizeValidator, + ValidUrlValidator, + default_frozen_model_config, + normalize_url, + truncate, +) from .. import constants logger = logging.getLogger(__name__) -@dataclass(frozen=True) -class FeedData: - feed_url: str - site_url: str - title: str - description: str +class FeedData(BaseSchema): + model_config = default_frozen_model_config + + feed_url: Annotated[str, ValidUrlValidator] + site_url: Annotated[str, ValidUrlValidator] + title: Annotated[str, FullSanitizeValidator, truncate(constants.FEED_TITLE_MAX_LENGTH)] + description: Annotated[str, FullSanitizeValidator] feed_type: constants.SupportedFeedType etag: str last_modified: datetime | None articles: list[ArticleData] -def build_feed_data( # noqa: PLR0913 too many arguments - *, - feed_url: str, - site_url: str, - title: str, - description: str, - feed_type: constants.SupportedFeedType, - etag: str, - last_modified: datetime | None, - articles: list[ArticleData], -) -> FeedData: - return FeedData( - feed_url=feed_url, - site_url=site_url, - title=full_sanitize(title)[: constants.FEED_TITLE_MAX_LENGTH], - description=full_sanitize(description), - feed_type=feed_type, - articles=articles, - etag=etag, - last_modified=last_modified, - ) - - class NoFeedUrlFoundError(Exception): pass @@ -153,8 +137,9 @@ def _find_youtube_rss_feed_link(url: str) -> str: def build_feed_data_from_parsed_feed(parsed_feed: FeedParserDict, resolved_url: str) -> FeedData: - feed_title = full_sanitize(parsed_feed.feed.get("title", "")) - return build_feed_data( + feed_title = parsed_feed.feed.get("title", "") + + return FeedData( feed_url=resolved_url, site_url=_normalize_found_link(parsed_feed.feed.get("link", resolved_url)), title=feed_title, @@ -241,7 +226,7 @@ def _parse_articles_in_feed( article_link = _get_article_link(feed_url, entry) content = _get_article_content(entry) articles_data.append( - build_article_data( + ArticleData( external_article_id=entry.get("id", ""), title=entry.title, summary=_get_summary(article_link, entry), diff --git a/legadilo/feeds/tests/factories.py b/legadilo/feeds/tests/factories.py index 0cb445e0..9f82cd5b 100644 --- a/legadilo/feeds/tests/factories.py +++ b/legadilo/feeds/tests/factories.py @@ -23,6 +23,7 @@ from .. import constants from ..models import Feed, FeedCategory, FeedDeletedArticle +from ..services.feed_parsing import FeedData class FeedCategoryFactory(DjangoModelFactory): @@ -64,3 +65,17 @@ class FeedDeletedArticleFactory(DjangoModelFactory): class Meta: model = FeedDeletedArticle + + +class FeedDataFactory(factory.DictFactory): + feed_url = factory.Sequence(lambda n: f"https://example.com/feeds-{n}.rss") + site_url = "https://example.com" + title = factory.Sequence(lambda n: f"Feed {n}") + description = "Some feed description" + feed_type = SupportedFeedType.rss + etag = "" + last_modified = None + articles = factory.ListFactory() + + class Meta: + model = FeedData diff --git a/legadilo/feeds/tests/snapshots/test_api/test_get/feed.json b/legadilo/feeds/tests/snapshots/test_api/test_get/feed.json new file mode 100644 index 00000000..8b5c5942 --- /dev/null +++ b/legadilo/feeds/tests/snapshots/test_api/test_get/feed.json @@ -0,0 +1,16 @@ +{ + "article_retention_time": 0, + "category": null, + "description": "", + "disabled_at": null, + "disabled_reason": "", + "enabled": true, + "feed_type": "rss", + "feed_url": "https://example.com/feed.rss", + "id": 1, + "open_original_link_by_default": false, + "refresh_delay": "DAILY_AT_NOON", + "site_url": "https://example.com", + "slug": "feed-slug", + "title": "Feed title" +} \ No newline at end of file diff --git a/legadilo/feeds/tests/snapshots/test_api/test_list/feeds.json b/legadilo/feeds/tests/snapshots/test_api/test_list/feeds.json new file mode 100644 index 00000000..f236fc17 --- /dev/null +++ b/legadilo/feeds/tests/snapshots/test_api/test_list/feeds.json @@ -0,0 +1,21 @@ +{ + "count": 1, + "items": [ + { + "article_retention_time": 0, + "category": null, + "description": "", + "disabled_at": null, + "disabled_reason": "", + "enabled": true, + "feed_type": "rss", + "feed_url": "https://example.com/feed.rss", + "id": 1, + "open_original_link_by_default": false, + "refresh_delay": "DAILY_AT_NOON", + "site_url": "https://example.com", + "slug": "feed-slug", + "title": "Feed title" + } + ] +} \ No newline at end of file diff --git a/legadilo/feeds/tests/snapshots/test_api/test_subscribe_to_feed/feed.json b/legadilo/feeds/tests/snapshots/test_api/test_subscribe_to_feed/feed.json new file mode 100644 index 00000000..67b9fdb5 --- /dev/null +++ b/legadilo/feeds/tests/snapshots/test_api/test_subscribe_to_feed/feed.json @@ -0,0 +1,20 @@ +{ + "article_retention_time": 100, + "category": { + "id": 10, + "slug": "category-slug", + "title": "Category title" + }, + "description": "Some feed description", + "disabled_at": null, + "disabled_reason": "", + "enabled": true, + "feed_type": "rss", + "feed_url": "https://example.com/feed.rss", + "id": 1, + "open_original_link_by_default": true, + "refresh_delay": "HOURLY", + "site_url": "https://example.com", + "slug": "feed-slug", + "title": "Feed title" +} \ No newline at end of file diff --git a/legadilo/feeds/tests/snapshots/test_api/test_subscribe_to_feed_with_just_url/feed.json b/legadilo/feeds/tests/snapshots/test_api/test_subscribe_to_feed_with_just_url/feed.json new file mode 100644 index 00000000..7f09ee52 --- /dev/null +++ b/legadilo/feeds/tests/snapshots/test_api/test_subscribe_to_feed_with_just_url/feed.json @@ -0,0 +1,16 @@ +{ + "article_retention_time": 0, + "category": null, + "description": "Some feed description", + "disabled_at": null, + "disabled_reason": "", + "enabled": true, + "feed_type": "rss", + "feed_url": "https://example.com/feed.rss", + "id": 1, + "open_original_link_by_default": false, + "refresh_delay": "DAILY_AT_NOON", + "site_url": "https://example.com", + "slug": "feed-slug", + "title": "Feed title" +} \ No newline at end of file diff --git a/legadilo/feeds/tests/snapshots/test_api/test_update/feed.json b/legadilo/feeds/tests/snapshots/test_api/test_update/feed.json new file mode 100644 index 00000000..0aaca195 --- /dev/null +++ b/legadilo/feeds/tests/snapshots/test_api/test_update/feed.json @@ -0,0 +1,20 @@ +{ + "article_retention_time": 600, + "category": { + "id": 10, + "slug": "category-slug", + "title": "Category title" + }, + "description": "", + "disabled_at": null, + "disabled_reason": "", + "enabled": true, + "feed_type": "rss", + "feed_url": "https://example.com/feed.rss", + "id": 1, + "open_original_link_by_default": false, + "refresh_delay": "TWICE_A_WEEK", + "site_url": "https://example.com", + "slug": "feed-slug", + "title": "Feed title" +} \ No newline at end of file diff --git a/legadilo/feeds/tests/snapshots/test_api/test_update_tags/feed.json b/legadilo/feeds/tests/snapshots/test_api/test_update_tags/feed.json new file mode 100644 index 00000000..e8e38208 --- /dev/null +++ b/legadilo/feeds/tests/snapshots/test_api/test_update_tags/feed.json @@ -0,0 +1,20 @@ +{ + "article_retention_time": 0, + "category": { + "id": 10, + "slug": "category-slug", + "title": "Category title" + }, + "description": "", + "disabled_at": null, + "disabled_reason": "", + "enabled": true, + "feed_type": "rss", + "feed_url": "https://example.com/feed.rss", + "id": 1, + "open_original_link_by_default": false, + "refresh_delay": "DAILY_AT_NOON", + "site_url": "https://example.com", + "slug": "feed-slug", + "title": "Feed title" +} \ No newline at end of file diff --git a/legadilo/feeds/tests/test_api.py b/legadilo/feeds/tests/test_api.py new file mode 100644 index 00000000..3eabf749 --- /dev/null +++ b/legadilo/feeds/tests/test_api.py @@ -0,0 +1,572 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from http import HTTPStatus +from typing import Any + +import httpx +import pytest +from django.urls import reverse + +from legadilo.feeds import constants +from legadilo.feeds.models import Feed, FeedCategory +from legadilo.feeds.tests.factories import FeedCategoryFactory, FeedDataFactory, FeedFactory +from legadilo.reading.tests.factories import TagFactory +from legadilo.utils.testing import serialize_for_snapshot +from legadilo.utils.time_utils import utcdt + + +def _prepare_feed_for_snapshot(data: dict[str, Any], feed: Feed) -> dict[str, Any]: + data = data.copy() + assert data["id"] == feed.id + assert data["slug"] == feed.slug + assert data["title"] == feed.title + assert data["feed_url"] == feed.feed_url + assert (feed.category_id is None and data["category"] is None) or ( + feed.category_id == data["category"]["id"] + ) + + data["id"] = 1 + data["slug"] = "feed-slug" + data["title"] = "Feed title" + data["feed_url"] = "https://example.com/feed.rss" + if data.get("category"): + data["category"]["id"] = 10 + data["category"]["title"] = "Category title" + data["category"]["slug"] = "category-slug" + + return data + + +@pytest.mark.django_db +class TestListCategoriesView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.url = reverse("api-1.0.0:list_feed_categories") + self.feed_category = FeedCategoryFactory(user=user) + + def test_not_logged_in(self, client): + response = client.get(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_list_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + assert response.json() == {"count": 0, "items": []} + + def test_list(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(7): + response = logged_in_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + assert response.json() == { + "count": 1, + "items": [ + { + "id": self.feed_category.id, + "slug": self.feed_category.slug, + "title": self.feed_category.title, + } + ], + } + + +@pytest.mark.django_db +class TestCreateCategoryView: + @pytest.fixture(autouse=True) + def _setup_data(self): + self.url = reverse("api-1.0.0:create_feed_category") + + def test_not_logged_in(self, client): + response = client.get(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_create(self, logged_in_sync_client, user, django_assert_num_queries): + with django_assert_num_queries(9): + response = logged_in_sync_client.post( + self.url, {"title": "Test category"}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.CREATED + assert FeedCategory.objects.count() == 1 + feed_category = FeedCategory.objects.get() + assert feed_category.title == "Test category" + assert feed_category.user == user + assert response.json() == { + "id": feed_category.id, + "slug": feed_category.slug, + "title": feed_category.title, + } + + def test_create_duplicate(self, user, logged_in_sync_client): + feed_category = FeedCategoryFactory(user=user) + + response = logged_in_sync_client.post( + self.url, {"title": feed_category.title}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.CONFLICT + assert response.json() == {"detail": "A category with this title already exists."} + assert FeedCategory.objects.count() == 1 + + +@pytest.mark.django_db +class TestGetCategoryView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.feed_category = FeedCategoryFactory(user=user) + self.url = reverse( + "api-1.0.0:get_feed_category", kwargs={"category_id": self.feed_category.id} + ) + + def test_not_logged_in(self, client): + response = client.get(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_get_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.NOT_FOUND + + def test_get(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(6): + response = logged_in_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + assert response.json() == { + "id": self.feed_category.id, + "slug": self.feed_category.slug, + "title": self.feed_category.title, + } + + +@pytest.mark.django_db +class TestUpdateCategoryView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.feed_category = FeedCategoryFactory(user=user) + self.url = reverse( + "api-1.0.0:update_feed_category", kwargs={"category_id": self.feed_category.id} + ) + + def test_not_logged_in(self, client): + response = client.patch(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_update_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.patch( + self.url, {"title": "New title"}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.NOT_FOUND + + def test_update(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(7): + response = logged_in_sync_client.patch( + self.url, {"title": "New title"}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.OK + self.feed_category.refresh_from_db() + assert self.feed_category.title == "New title" + assert response.json() == { + "id": self.feed_category.id, + "slug": self.feed_category.slug, + "title": "New title", + } + + +@pytest.mark.django_db +class TestDeleteCategoryView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.feed_category = FeedCategoryFactory(user=user) + self.url = reverse( + "api-1.0.0:delete_feed_category", kwargs={"category_id": self.feed_category.id} + ) + + def test_not_logged_in(self, client): + response = client.delete(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_delete_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.delete(self.url) + + assert response.status_code == HTTPStatus.NOT_FOUND + + def test_delete(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(8): + response = logged_in_sync_client.delete(self.url) + + assert response.status_code == HTTPStatus.NO_CONTENT + assert FeedCategory.objects.count() == 0 + + +@pytest.mark.django_db +class TestListFeedsView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.feed = FeedFactory(user=user) + self.url = reverse("api-1.0.0:list_feeds") + + def test_not_logged_in(self, client): + response = client.get(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_list_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + assert response.json() == {"count": 0, "items": []} + + def test_list(self, logged_in_sync_client, django_assert_num_queries, snapshot): + with django_assert_num_queries(7): + response = logged_in_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + data = response.json() + assert len(data["items"]) == 1 + data["items"][0] = _prepare_feed_for_snapshot(data["items"][0], self.feed) + snapshot.assert_match(serialize_for_snapshot(data), "feeds.json") + + +@pytest.mark.django_db +class TestSubscribeToFeedView: + @pytest.fixture(autouse=True) + def _setup_data(self): + self.url = reverse("api-1.0.0:subscribe_to_feed") + + def test_not_logged_in(self, client): + response = client.post(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_subscribe_to_feed_invalid_url(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.post( + self.url, {"feed_url": "toto"}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY + assert response.json() == { + "detail": [ + { + "ctx": {"error": "toto is not a valid url"}, + "loc": ["body", "payload", "feed_url"], + "msg": "Value error, toto is not a valid url", + "type": "value_error", + } + ] + } + + def test_subscribe_to_feed_with_just_url( + self, user, logged_in_sync_client, mocker, django_assert_num_queries, snapshot + ): + feed_url = "https://example.com/feed.rss" + mocker.patch( + "legadilo.feeds.api.get_feed_data", return_value=FeedDataFactory(feed_url=feed_url) + ) + + with django_assert_num_queries(19): + response = logged_in_sync_client.post( + self.url, {"feed_url": feed_url}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.CREATED + assert Feed.objects.count() == 1 + feed = Feed.objects.get() + assert feed.feed_url == feed_url + assert feed.category is None + assert feed.user == user + snapshot.assert_match( + serialize_for_snapshot(_prepare_feed_for_snapshot(response.json(), feed)), "feed.json" + ) + + def test_subscribe_to_feed( + self, user, logged_in_sync_client, mocker, django_assert_num_queries, snapshot + ): + feed_url = "https://example.com/feed.rss" + mocker.patch( + "legadilo.feeds.api.get_feed_data", return_value=FeedDataFactory(feed_url=feed_url) + ) + category = FeedCategoryFactory(user=user) + existing_tag = TagFactory(user=user) + + with django_assert_num_queries(23): + response = logged_in_sync_client.post( + self.url, + { + "feed_url": feed_url, + "refresh_delay": constants.FeedRefreshDelays.HOURLY.value, + "article_retention_time": 100, + "category_id": category.id, + "tags": ["", "

Some tag

", existing_tag.slug], + "open_original_link_by_default": True, + }, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.CREATED + assert Feed.objects.count() == 1 + feed = Feed.objects.get() + assert list(feed.tags.values_list("title", flat=True)) == ["Some tag", existing_tag.title] + assert feed.user == user + snapshot.assert_match( + serialize_for_snapshot(_prepare_feed_for_snapshot(response.json(), feed)), "feed.json" + ) + + def test_subscribe_to_feed_invalid_category(self, logged_in_sync_client): + response = logged_in_sync_client.post( + self.url, + {"feed_url": "https://example.com/feed.rss", "category_id": 0}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY + assert response.json() == { + "detail": [{"category_id": "We failed to find the category with id: 0"}] + } + assert FeedCategory.objects.count() == 0 + + def test_subscribe_to_already_subscribed_feed(self, user, logged_in_sync_client, mocker): + feed_url = "https://example.com/feed.rss" + mocker.patch( + "legadilo.feeds.api.get_feed_data", return_value=FeedDataFactory(feed_url=feed_url) + ) + FeedFactory(user=user, feed_url=feed_url) + + response = logged_in_sync_client.post( + self.url, {"feed_url": feed_url}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.CONFLICT + assert response.json() == {"detail": "You are already subscribed to this feed"} + assert Feed.objects.count() == 1 + + def test_subscribe_to_feed_but_error_occurred(self, user, logged_in_sync_client, mocker): + mocker.patch("legadilo.feeds.api.get_feed_data", side_effect=httpx.HTTPError("Kaboom!")) + + response = logged_in_sync_client.post( + self.url, {"feed_url": "https://example.com/feed.rss"}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.NOT_ACCEPTABLE + assert response.json() == { + "detail": "We failed to access or parse the feed you supplied. Please make " + "sure it is accessible and valid." + } + + +@pytest.mark.django_db +class TestGetFeedView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.feed = FeedFactory(user=user) + self.url = reverse("api-1.0.0:get_feed", kwargs={"feed_id": self.feed.id}) + + def test_not_logged_in(self, client): + response = client.get(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_get_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.NOT_FOUND + + def test_get(self, logged_in_sync_client, django_assert_num_queries, snapshot): + with django_assert_num_queries(6): + response = logged_in_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + + snapshot.assert_match( + serialize_for_snapshot(_prepare_feed_for_snapshot(response.json(), self.feed)), + "feed.json", + ) + + +@pytest.mark.django_db +class TestUpdateFeedView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.feed_category = FeedCategoryFactory(user=user) + self.other_feed_category = FeedCategoryFactory(user=user) + self.feed = FeedFactory(user=user, category=self.feed_category) + self.url = reverse("api-1.0.0:update_feed", kwargs={"feed_id": self.feed.id}) + + def test_not_logged_in(self, client): + response = client.patch(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_update_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.patch( + self.url, {"category_id": self.other_feed_category.id}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.NOT_FOUND + + def test_update_category(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(8): + response = logged_in_sync_client.patch( + self.url, + {"category_id": self.other_feed_category.id}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.OK + self.feed.refresh_from_db() + assert self.feed.category_id == self.other_feed_category.id + + def test_unset_category(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(8): + response = logged_in_sync_client.patch( + self.url, + {"category_id": None}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.OK + self.feed.refresh_from_db() + assert self.feed.category_id is None + + def test_disable_feed_invalid_payload(self, logged_in_sync_client): + response = logged_in_sync_client.patch( + self.url, + {"disabled_at": "2024-11-24 21:00:00Z"}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY + assert response.json() == { + "detail": [ + { + "ctx": { + "error": "You must supply none of disabled_reason and disabled_at " + "or both of them" + }, + "loc": ["body", "payload"], + "msg": "Value error, You must supply none of disabled_reason and " + "disabled_at or both of them", + "type": "value_error", + } + ] + } + + def test_update(self, logged_in_sync_client, django_assert_num_queries, snapshot): + with django_assert_num_queries(8): + response = logged_in_sync_client.patch( + self.url, + { + "refresh_delay": constants.FeedRefreshDelays.TWICE_A_WEEK, + "article_retention_time": 600, + }, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.OK + self.feed.refresh_from_db() + assert not self.feed.open_original_link_by_default + assert self.feed.enabled + assert self.feed.refresh_delay == constants.FeedRefreshDelays.TWICE_A_WEEK + assert self.feed.article_retention_time == 600 + snapshot.assert_match( + serialize_for_snapshot(_prepare_feed_for_snapshot(response.json(), self.feed)), + "feed.json", + ) + + def test_disable_feed(self, logged_in_sync_client): + response = logged_in_sync_client.patch( + self.url, + {"disabled_at": "2024-11-24 21:00:00Z", "disabled_reason": "

Manually disabled

"}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.OK + self.feed.refresh_from_db() + assert self.feed.disabled_at == utcdt(2024, 11, 24, 21) + assert self.feed.disabled_reason == "Manually disabled" + + def test_reenable_feed(self, logged_in_sync_client): + self.feed.disable("Manually disabled") + self.feed.save() + + response = logged_in_sync_client.patch( + self.url, + {"disabled_at": None, "disabled_reason": None}, + content_type="application/json", + ) + assert response.status_code == HTTPStatus.OK + self.feed.refresh_from_db() + assert self.feed.disabled_at is None + assert not self.feed.disabled_reason + + def test_update_tags(self, logged_in_sync_client, user, django_assert_num_queries, snapshot): + existing_tag = TagFactory(user=user, title="Tag to keep") + tag_to_delete = TagFactory(user=user, title="Tag to delete") + self.feed.tags.add(existing_tag, tag_to_delete) + + with django_assert_num_queries(18): + response = logged_in_sync_client.patch( + self.url, + { + "tags": [existing_tag.slug, "", "

New tag

"], + }, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.OK + self.feed.refresh_from_db() + assert list(self.feed.tags.all().values_list("title", flat=True)) == [ + "New tag", + "Tag to keep", + ] + snapshot.assert_match( + serialize_for_snapshot(_prepare_feed_for_snapshot(response.json(), self.feed)), + "feed.json", + ) + + +@pytest.mark.django_db +class TestDeleteFeedView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.feed = FeedFactory(user=user) + self.url = reverse("api-1.0.0:delete_feed", kwargs={"feed_id": self.feed.id}) + + def test_not_logged_in(self, client): + response = client.delete(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_delete_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.delete(self.url) + + assert response.status_code == HTTPStatus.NOT_FOUND + + def test_delete(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(11): + response = logged_in_sync_client.delete(self.url) + + assert response.status_code == HTTPStatus.NO_CONTENT + assert Feed.objects.count() == 0 diff --git a/legadilo/feeds/tests/test_models/test_feed.py b/legadilo/feeds/tests/test_models/test_feed.py index 778c7d11..2838a852 100644 --- a/legadilo/feeds/tests/test_models/test_feed.py +++ b/legadilo/feeds/tests/test_models/test_feed.py @@ -54,11 +54,11 @@ title="Article 1", summary="Summary 1", content="Description 1", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], - link="https//example.com/article/1", + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), + link="https://example.com/article/1", preview_picture_url="https://example.com/preview.png", preview_picture_alt="Some image alt", published_at=datetime.now(tz=UTC), @@ -331,11 +331,11 @@ def test_create_from_feed_data(self, user, django_assert_num_queries): title="Article 1", summary="Summary 1", content="Description 1", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], - link="https//example.com/article/1", + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), + link="https://example.com/article/1", preview_picture_url="https://example.com/preview.png", preview_picture_alt="Some image alt", published_at=datetime.now(tz=UTC), @@ -474,11 +474,11 @@ def test_update_feed(self, django_assert_num_queries): title="Article 1", summary="Summary 1", content="Description 1", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], - link="https//example.com/article/1", + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), + link="https://example.com/article/1", preview_picture_url="https://example.com/preview.png", preview_picture_alt="Some image alt", published_at=datetime.now(tz=UTC), @@ -491,10 +491,10 @@ def test_update_feed(self, django_assert_num_queries): title="Article 2", summary="Summary 2", content="Description existing updated", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), link=existing_article.link, preview_picture_url="", preview_picture_alt="", @@ -538,11 +538,11 @@ def test_update_feed_with_deleted_articles(self, django_assert_num_queries): title="Article 1", summary="Summary 1", content="Description 1", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], - link="https//example.com/article/1", + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), + link="https://example.com/article/1", preview_picture_url="https://example.com/preview.png", preview_picture_alt="Some image alt", published_at=datetime.now(tz=UTC), @@ -555,10 +555,10 @@ def test_update_feed_with_deleted_articles(self, django_assert_num_queries): title="Article 2", summary="Summary 2", content="Description", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), link=deleted_link, preview_picture_url="", preview_picture_alt="", diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-atom-feed/feed_data.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-atom-feed/feed_data.json index ae940993..5cb48b6d 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-atom-feed/feed_data.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-atom-feed/feed_data.json @@ -11,14 +11,14 @@ "link": "http://example.org/entry/3", "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2005-11-09T00:23:47+00:00", + "published_at": "2005-11-09T00:23:47Z", "read_at": null, "source_title": "Sample Feed", "summary": "Watch out for nasty tricks", "table_of_content": [], "tags": [], "title": "First entry title", - "updated_at": "2005-11-09T11:56:34+00:00" + "updated_at": "2005-11-09T11:56:34Z" }, { "annotations": [], diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-rss-feed/feed_data.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-rss-feed/feed_data.json index 9965e9e9..0800c7da 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-rss-feed/feed_data.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_feed_url/sample-rss-feed/feed_data.json @@ -11,14 +11,14 @@ "link": "http://example.org/entry/3", "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2002-09-05T00:00:01+00:00", + "published_at": "2002-09-05T00:00:01Z", "read_at": null, "source_title": "Sample Feed", "summary": "Watch out for nasty\n tricks", "table_of_content": [], "tags": [], "title": "First entry title", - "updated_at": "2002-09-05T00:00:01+00:00" + "updated_at": "2002-09-05T00:00:01Z" } ], "description": "For documentation only", diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_page_url/feed_data.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_page_url/feed_data.json index 9a4a2b6b..2efa72fc 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_page_url/feed_data.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_get_feed_metadata_from_page_url/feed_data.json @@ -11,14 +11,14 @@ "link": "http://example.org/entry/3", "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2005-11-09T00:23:47+00:00", + "published_at": "2005-11-09T00:23:47Z", "read_at": null, "source_title": "Sample Feed", "summary": "Watch out for nasty tricks", "table_of_content": [], "tags": [], "title": "First entry title", - "updated_at": "2005-11-09T11:56:34+00:00" + "updated_at": "2005-11-09T11:56:34Z" }, { "annotations": [], diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-from-youtube/articles.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-from-youtube/articles.json index b63fbcad..9a2a40f8 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-from-youtube/articles.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-from-youtube/articles.json @@ -12,13 +12,13 @@ "link": "https://www.youtube.com/watch?v=SOME_VIDEO_ID", "preview_picture_alt": "The lengthy description of my video!", "preview_picture_url": "https://i2.ytimg.com/vi/SOME_VIDEO_ID/hqdefault.jpg", - "published_at": "2024-03-08T17:00:00+00:00", + "published_at": "2024-03-08T17:00:00Z", "read_at": null, "source_title": "Some feed", "summary": "The lengthy description of my video!", "table_of_content": [], "tags": [], "title": "What a good video!", - "updated_at": "2024-03-14T18:49:15+00:00" + "updated_at": "2024-03-14T18:49:15Z" } ] \ No newline at end of file diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-description/articles.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-description/articles.json index 39ad8556..8d03c98d 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-description/articles.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-description/articles.json @@ -10,14 +10,14 @@ "link": "http://example.org/entry/3", "preview_picture_alt": "My image description My image credit", "preview_picture_url": "https://example.com/my-image.jpg", - "published_at": "2005-11-09T00:23:47+00:00", + "published_at": "2005-11-09T00:23:47Z", "read_at": null, "source_title": "Some feed", "summary": "Watch out for nasty tricks", "table_of_content": [], "tags": [], "title": "First entry title", - "updated_at": "2005-11-09T11:56:34+00:00" + "updated_at": "2005-11-09T11:56:34Z" }, { "annotations": [], diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-title/articles.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-title/articles.json index 8272e091..d04461ea 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-title/articles.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/atom-with-media-title/articles.json @@ -10,14 +10,14 @@ "link": "http://example.org/entry/3", "preview_picture_alt": "My image title \u00a9 Tester", "preview_picture_url": "https://example.com/my-image.jpg", - "published_at": "2005-11-09T00:23:47+00:00", + "published_at": "2005-11-09T00:23:47Z", "read_at": null, "source_title": "Some feed", "summary": "Watch out for nasty tricks", "table_of_content": [], "tags": [], "title": "First entry title", - "updated_at": "2005-11-09T11:56:34+00:00" + "updated_at": "2005-11-09T11:56:34Z" }, { "annotations": [], diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-atom-feed/articles.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-atom-feed/articles.json index a1d87724..92708eca 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-atom-feed/articles.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-atom-feed/articles.json @@ -10,14 +10,14 @@ "link": "http://example.org/entry/3", "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2005-11-09T00:23:47+00:00", + "published_at": "2005-11-09T00:23:47Z", "read_at": null, "source_title": "Some feed", "summary": "Watch out for nasty tricks", "table_of_content": [], "tags": [], "title": "First entry title", - "updated_at": "2005-11-09T11:56:34+00:00" + "updated_at": "2005-11-09T11:56:34Z" }, { "annotations": [], diff --git a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-rss-feed/articles.json b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-rss-feed/articles.json index 3e40c6b0..4deb4fb2 100644 --- a/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-rss-feed/articles.json +++ b/legadilo/feeds/tests/test_services/snapshots/test_feed_parsing/test_parse_articles/sample-rss-feed/articles.json @@ -10,13 +10,13 @@ "link": "http://example.org/entry/3", "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2002-09-05T00:00:01+00:00", + "published_at": "2002-09-05T00:00:01Z", "read_at": null, "source_title": "Some feed", "summary": "Watch out for nasty\n tricks", "table_of_content": [], "tags": [], "title": "First entry title", - "updated_at": "2002-09-05T00:00:01+00:00" + "updated_at": "2002-09-05T00:00:01Z" } ] \ No newline at end of file diff --git a/legadilo/feeds/tests/test_views/test_feeds_admin_view.py b/legadilo/feeds/tests/test_views/test_feeds_admin_view.py index a98bd402..78ab6204 100644 --- a/legadilo/feeds/tests/test_views/test_feeds_admin_view.py +++ b/legadilo/feeds/tests/test_views/test_feeds_admin_view.py @@ -21,9 +21,10 @@ from legadilo.conftest import assert_redirected_to_login_page from legadilo.feeds import constants -from legadilo.feeds.models import Feed +from legadilo.feeds.models import Feed, FeedArticle from legadilo.feeds.tests.factories import FeedCategoryFactory, FeedFactory -from legadilo.reading.tests.factories import TagFactory +from legadilo.reading.models import Article +from legadilo.reading.tests.factories import ArticleFactory, TagFactory @pytest.mark.django_db @@ -82,12 +83,16 @@ def test_edit_as_other_user(self, logged_in_other_user_sync_client): assert response.status_code == HTTPStatus.NOT_FOUND - def test_delete_feed(self, logged_in_sync_client): + def test_delete_feed(self, user, logged_in_sync_client): + article = ArticleFactory(user=user) + FeedArticle.objects.create(feed=self.feed, article=article) + response = logged_in_sync_client.post(self.url, data={"delete": ""}) assert response.status_code == HTTPStatus.FOUND assert response["Location"] == reverse("feeds:feeds_admin") assert Feed.objects.count() == 0 + assert Article.objects.count() > 0 def test_disable_feed(self, logged_in_sync_client): response = logged_in_sync_client.post(self.url, data={"disable": ""}) diff --git a/legadilo/feeds/views/subscribe_to_feed_view.py b/legadilo/feeds/views/subscribe_to_feed_view.py index fe59d8af..9743cf1d 100644 --- a/legadilo/feeds/views/subscribe_to_feed_view.py +++ b/legadilo/feeds/views/subscribe_to_feed_view.py @@ -28,6 +28,7 @@ from django.utils.html import format_html from django.utils.translation import gettext_lazy as _ from django.views.decorators.http import require_http_methods +from pydantic import ValidationError as PydanticValidationError from legadilo.core.forms import FormChoices from legadilo.core.forms.fields import MultipleTagsField @@ -206,7 +207,7 @@ async def _handle_creation(request: AuthenticatedHttpRequest): # noqa: PLR0911 _("The feed file is too big, we won't parse it. Try to find a more lightweight feed."), ) return HTTPStatus.BAD_REQUEST, form - except (InvalidFeedFileError, ValueError, TypeError): + except (InvalidFeedFileError, PydanticValidationError, ValueError, TypeError): messages.error( request, _( diff --git a/legadilo/import_export/management/commands/import_data.py b/legadilo/import_export/management/commands/import_data.py index 95ee8609..b048d9a9 100644 --- a/legadilo/import_export/management/commands/import_data.py +++ b/legadilo/import_export/management/commands/import_data.py @@ -23,7 +23,7 @@ from django.core.management import BaseCommand from django.core.management.base import CommandError, CommandParser from django.db import transaction -from jsonschema import ValidationError as JsonSchemaValidationError +from pydantic import ValidationError as PydanticValidationError from legadilo.import_export.services.custom_csv import import_custom_csv_file_sync from legadilo.import_export.services.exceptions import DataImportError @@ -72,7 +72,7 @@ def handle(self, *args, **options): self._import(options) except User.DoesNotExist as e: raise CommandError(f"No user with id {options['user_id']} was found!") from e - except JsonSchemaValidationError as e: + except PydanticValidationError as e: logger.debug(str(e)) raise CommandError("The file you supplied is not valid") from e except FileNotFoundError as e: diff --git a/legadilo/import_export/services/custom_csv.py b/legadilo/import_export/services/custom_csv.py index d66530cc..0f8a95d9 100644 --- a/legadilo/import_export/services/custom_csv.py +++ b/legadilo/import_export/services/custom_csv.py @@ -30,16 +30,16 @@ from legadilo.feeds import constants as feeds_constants from legadilo.feeds.models import Feed, FeedArticle, FeedCategory from legadilo.feeds.services.feed_parsing import ( + FeedData, FeedFileTooBigError, InvalidFeedFileError, NoFeedUrlFoundError, - build_feed_data, get_feed_data, ) from legadilo.import_export.services.exceptions import DataImportError from legadilo.reading import constants as reading_constants from legadilo.reading.models import Article -from legadilo.reading.services.article_fetching import build_article_data +from legadilo.reading.services.article_fetching import ArticleData from legadilo.users.models import User from legadilo.utils.http_utils import get_rss_async_client from legadilo.utils.security import full_sanitize @@ -143,7 +143,7 @@ async def _import_feed(user, category, row, feed_url_in_file_to_true_feed): logger.error( f"Failed to import feed {row['feed_url']} Created with basic data and disabled." ) - feed_data = build_feed_data( + feed_data = FeedData( feed_url=row["feed_url"], site_url=row["feed_site_url"], title=row["feed_title"], @@ -173,14 +173,14 @@ async def _import_feed(user, category, row, feed_url_in_file_to_true_feed): async def _import_article(user, feed, row): - article_data = build_article_data( + article_data = ArticleData( external_article_id=f"custom_csv:{row['article_id']}", source_title=feed.title if feed else urlparse(row["article_link"]).netloc, title=row["article_title"], summary="", content=row["article_content"], authors=_safe_json_parse(row["article_authors"], []), - contributors=[], + contributors=(), tags=_safe_json_parse(row["article_tags"], []), link=row["article_link"], preview_picture_url="", diff --git a/legadilo/import_export/services/opml.py b/legadilo/import_export/services/opml.py index 9dc387cc..f093f453 100644 --- a/legadilo/import_export/services/opml.py +++ b/legadilo/import_export/services/opml.py @@ -22,6 +22,7 @@ from asgiref.sync import async_to_sync, sync_to_async from defusedxml.ElementTree import parse from django.db import IntegrityError +from pydantic import ValidationError as PydanticValidationError from legadilo.feeds import constants as feeds_constants from legadilo.feeds.models import Feed, FeedCategory @@ -176,7 +177,7 @@ async def _process_feed(user, client, outline, category=None): nb_imported_feeds += 1 except IntegrityError: logger.info(f"You are already subscribed to {outline.feed_url}") - except (FeedFileTooBigError, InvalidFeedFileError): + except (FeedFileTooBigError, InvalidFeedFileError, PydanticValidationError): logger.exception("Failed to import the feed") return nb_imported_feeds diff --git a/legadilo/import_export/services/wallabag.py b/legadilo/import_export/services/wallabag.py index 07b886de..62359ccc 100644 --- a/legadilo/import_export/services/wallabag.py +++ b/legadilo/import_export/services/wallabag.py @@ -16,23 +16,54 @@ import json import logging +from datetime import datetime from pathlib import Path -from typing import Any +from typing import Annotated from django.core.files import File -from jsonschema import validate as validate_json_schema +from pydantic import BaseModel as BaseSchema +from pydantic import ConfigDict, TypeAdapter -from legadilo.import_export.services.exceptions import InvalidEntryError from legadilo.reading import constants as reading_constants from legadilo.reading.models import Article, Tag -from legadilo.reading.services.article_fetching import build_article_data +from legadilo.reading.services.article_fetching import ArticleData, Language, OptionalUrl from legadilo.users.models import User -from legadilo.utils.time_utils import safe_datetime_parse -from legadilo.utils.validators import is_url_valid +from legadilo.utils.validators import ( + CleanedString, + FullSanitizeValidator, + ValidUrlValidator, + remove_falsy_items, + sanitize_keep_safe_tags_validator, +) logger = logging.getLogger(__name__) +class WallabagArticle(BaseSchema): + model_config = ConfigDict( + extra="ignore", frozen=True, validate_default=True, validate_assignment=True + ) + + id: int + is_archived: bool + is_starred: bool + tags: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + title: Annotated[str, FullSanitizeValidator] + url: Annotated[str, ValidUrlValidator] + content: Annotated[str, sanitize_keep_safe_tags_validator()] = "" + created_at: datetime | None = None + updated_at: datetime | None = None + published_by: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + reading_time: int = 0 + domain_name: Annotated[str, FullSanitizeValidator] + preview_picture: OptionalUrl = "" + annotations: tuple[str, ...] = () + language: Language = "" + + +ListOfWallabagArticles = TypeAdapter(list[WallabagArticle]) + + def import_wallabag_json_file_path(user: User, path_to_file: str) -> int: with Path(path_to_file).open("rb") as f: data = json.load(f) @@ -45,34 +76,28 @@ def import_wallabag_file(user: User, file: File) -> int: def _import_wallabag_data(user: User, data: list[dict]) -> int: - _validate_data_batch(data) + wallabag_articles = ListOfWallabagArticles.validate_python(data) nb_added_articles = 0 - for raw_article_data in data: - link = raw_article_data["url"] - preview_picture_url = raw_article_data.get("preview_picture", "") - if preview_picture_url and not is_url_valid(preview_picture_url): - logger.debug(f"Some preview url link {preview_picture_url} is not valid") - preview_picture_url = "" - if not is_url_valid(link): - raise InvalidEntryError(f"The article URL ({link}) is not valid") - - tags = Tag.objects.get_or_create_from_list(user, raw_article_data.get("tags", [])) - article_data = build_article_data( - external_article_id=f"wallabag:{raw_article_data['id']}", - source_title=raw_article_data["domain_name"], - title=raw_article_data["title"], + for wallabag_article in wallabag_articles: + link = wallabag_article.url + + tags = Tag.objects.get_or_create_from_list(user, wallabag_article.tags) + article_data = ArticleData( + external_article_id=f"wallabag:{wallabag_article.id}", + source_title=wallabag_article.domain_name, + title=wallabag_article.title, summary="", - content=raw_article_data.get("content", ""), - authors=raw_article_data.get("published_by", []), - contributors=[], - tags=[], + content=wallabag_article.content, + authors=wallabag_article.published_by, + contributors=(), + tags=(), link=link, - annotations=raw_article_data.get("annotations", []), - preview_picture_url=preview_picture_url, + annotations=wallabag_article.annotations, + preview_picture_url=str(wallabag_article.preview_picture), preview_picture_alt="", - published_at=safe_datetime_parse(raw_article_data["created_at"]), - updated_at=safe_datetime_parse(raw_article_data["updated_at"]), - language=raw_article_data.get("language", ""), + published_at=wallabag_article.created_at, + updated_at=wallabag_article.updated_at, + language=wallabag_article.language, ) Article.objects.update_or_create_from_articles_list( user=user, @@ -83,43 +108,3 @@ def _import_wallabag_data(user: User, data: list[dict]) -> int: nb_added_articles += 1 return nb_added_articles - - -def _validate_data_batch(article_data: Any): - validate_json_schema( - article_data, - { - "type": "array", - "items": { - "type": "object", - "properties": { - "id": {"type": "number"}, - "is_archived": {"type": "number"}, - "is_starred": {"type": "number"}, - "tags": {"type": "array", "items": {"type": "string"}}, - "title": {"type": "string"}, - "url": {"type": "string"}, - "content": {"type": "string"}, - "created_at": {"type": "string"}, - "updated_at": {"type": "string"}, - "published_by": {"type": "array", "items": {"type": "string"}}, - "reading_time": {"type": "number"}, - "domain_name": {"type": "string"}, - "preview_picture": {"type": "string"}, - "annotations": {"type": "array"}, - "language": {"type": "string"}, - }, - "required": [ - "id", - "is_archived", - "is_starred", - "title", - "url", - "created_at", - "updated_at", - "reading_time", - "domain_name", - ], - }, - }, - ) diff --git a/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/articles.json b/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/articles.json index 2648a744..04e7b9a6 100644 --- a/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/articles.json +++ b/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/articles.json @@ -16,14 +16,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2024-05-17T13:00:00+00:00", + "published_at": "2024-05-17T13:00:00Z", "read_at": null, "reading_time": 0, "slug": "existing-article", "summary": "", "table_of_content": [], "title": "Existing article", - "updated_at": "2024-05-17T13:00:00+00:00", + "updated_at": "2024-05-17T13:00:00Z", "was_opened": false }, { @@ -43,14 +43,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2002-09-05T00:00:01+00:00", + "published_at": "2002-09-05T00:00:01Z", "read_at": null, "reading_time": 0, "slug": "first-entry-title", "summary": "Watch out for nasty tricks", "table_of_content": [], "title": "First entry title", - "updated_at": "2005-11-09T11:56:34+00:00", + "updated_at": "2005-11-09T11:56:34Z", "was_opened": false }, { @@ -74,14 +74,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2017-05-21T20:46:00+00:00", - "read_at": "2017-09-26T06:53:00.022164+00:00", + "published_at": "2017-05-21T20:46:00Z", + "read_at": "2017-09-26T06:53:00.022Z", "reading_time": 0, "slug": "article-3", "summary": "

Test content

", "table_of_content": [], "title": "Article 3", - "updated_at": "2024-05-03T19:46:15+00:00", + "updated_at": "2024-05-03T19:46:15Z", "was_opened": false }, { @@ -133,14 +133,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2017-05-21T20:46:00+00:00", - "read_at": "2017-09-26T06:53:00.022164+00:00", + "published_at": "2017-05-21T20:46:00Z", + "read_at": "2017-09-26T06:53:00.022Z", "reading_time": 0, "slug": "article-4", "summary": "

Test content

", "table_of_content": [], "title": "Article 4", - "updated_at": "2024-05-03T19:46:15+00:00", + "updated_at": "2024-05-03T19:46:15Z", "was_opened": false }, { @@ -164,14 +164,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2017-05-21T20:46:00+00:00", - "read_at": "2017-09-26T06:53:00.022164+00:00", + "published_at": "2017-05-21T20:46:00Z", + "read_at": "2017-09-26T06:53:00.022Z", "reading_time": 0, "slug": "article-5", "summary": "

Test content

", "table_of_content": [], "title": "Article 5", - "updated_at": "2024-05-03T19:46:15+00:00", + "updated_at": "2024-05-03T19:46:15Z", "was_opened": false }, { @@ -193,14 +193,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2017-05-21T20:46:00+00:00", + "published_at": "2017-05-21T20:46:00Z", "read_at": null, "reading_time": 0, "slug": "article-6", "summary": "

Test content

", "table_of_content": [], "title": "Article 6", - "updated_at": "2024-05-03T19:46:15+00:00", + "updated_at": "2024-05-03T19:46:15Z", "was_opened": false }, { @@ -222,14 +222,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "", - "published_at": "2017-05-21T20:46:00+00:00", - "read_at": "2017-09-26T06:53:00.022164+00:00", + "published_at": "2017-05-21T20:46:00Z", + "read_at": "2017-09-26T06:53:00.022Z", "reading_time": 0, "slug": "article-7", "summary": "

Test content

", "table_of_content": [], "title": "Article 7", - "updated_at": "2020-10-04T19:00:19.463831+00:00", + "updated_at": "2020-10-04T19:00:19.463Z", "was_opened": false } ] \ No newline at end of file diff --git a/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/feeds.json b/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/feeds.json index 3c3f050a..0ab495fc 100644 --- a/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/feeds.json +++ b/legadilo/import_export/tests/services/snapshots/test_custom_csv/test_import_custom_csv/feeds.json @@ -48,7 +48,7 @@ "article_retention_time": 0, "category__title": null, "description": "", - "disabled_at": "2024-05-17T13:00:00+00:00", + "disabled_at": "2024-05-17T13:00:00Z", "disabled_reason": "Failed to reach feed URL while importing from custom CSV.", "enabled": false, "feed_type": "rss", diff --git a/legadilo/import_export/tests/services/test_wallabag.py b/legadilo/import_export/tests/services/test_wallabag.py index bbed58ee..322fbe38 100644 --- a/legadilo/import_export/tests/services/test_wallabag.py +++ b/legadilo/import_export/tests/services/test_wallabag.py @@ -15,7 +15,7 @@ # along with this program. If not, see . import pytest -from jsonschema import ValidationError as JsonSchemaValidationError +from pydantic import ValidationError as PydanticValidationError from legadilo.import_export.services.wallabag import _import_wallabag_data from legadilo.reading import constants as reading_constants @@ -24,7 +24,7 @@ def test_import_invalid_data(user): - with pytest.raises(JsonSchemaValidationError): + with pytest.raises(PydanticValidationError): _import_wallabag_data(user, [{"key": "value"}]) diff --git a/legadilo/import_export/tests/views/snapshots/test_import_export_articles_views/test_import_valid_file/walabag_articles.json b/legadilo/import_export/tests/views/snapshots/test_import_export_articles_views/test_import_valid_file/walabag_articles.json index a6ba305d..d442b035 100644 --- a/legadilo/import_export/tests/views/snapshots/test_import_export_articles_views/test_import_valid_file/walabag_articles.json +++ b/legadilo/import_export/tests/views/snapshots/test_import_export_articles_views/test_import_valid_file/walabag_articles.json @@ -16,14 +16,14 @@ "opened_at": null, "preview_picture_alt": "", "preview_picture_url": "https://examplec.com/preview.png", - "published_at": "2024-04-19T17:18:29+00:00", + "published_at": "2024-04-19T17:18:29Z", "read_at": null, "reading_time": 0, "slug": "refactoring-with-ai", "summary": "

Some data

", "table_of_content": [], "title": "Refactoring with AI", - "updated_at": "2024-04-20T17:17:54+00:00", + "updated_at": "2024-04-20T17:17:54Z", "was_opened": false } ] \ No newline at end of file diff --git a/legadilo/import_export/views/import_export_articles_views.py b/legadilo/import_export/views/import_export_articles_views.py index 917764bf..f9df7e97 100644 --- a/legadilo/import_export/views/import_export_articles_views.py +++ b/legadilo/import_export/views/import_export_articles_views.py @@ -26,8 +26,7 @@ from django.template.response import TemplateResponse from django.utils.translation import gettext_lazy as _ from django.views.decorators.http import require_GET, require_http_methods -from jsonschema import ValidationError as JsonSchemaValidationError -from jsonschema.exceptions import ValidationError as JsonValidationError +from pydantic import ValidationError as PydanticValidationError from legadilo.import_export.services.exceptions import DataImportError from legadilo.users.models import User @@ -102,7 +101,7 @@ async def _import_custom_csv(request: AuthenticatedHttpRequest): nb_imported_feeds, nb_imported_categories, ) = await import_custom_csv_file(await request.auser(), file_path) - except (JsonSchemaValidationError, DataImportError, UnicodeDecodeError): + except (DataImportError, UnicodeDecodeError, PydanticValidationError): status = HTTPStatus.BAD_REQUEST messages.error(request, _("The file you supplied is not valid.")) else: @@ -127,7 +126,7 @@ async def _import_wallabag(request: AuthenticatedHttpRequest): nb_imported_articles = await sync_to_async(import_wallabag_file)( await request.auser(), import_wallabag_form.cleaned_data["wallabag_file"] ) - except (JSONDecodeError, UnicodeDecodeError, JsonValidationError): + except (JSONDecodeError, UnicodeDecodeError, PydanticValidationError): status = HTTPStatus.BAD_REQUEST messages.error(request, _("The file you supplied is not valid.")) else: diff --git a/legadilo/reading/api.py b/legadilo/reading/api.py new file mode 100644 index 00000000..0859c94f --- /dev/null +++ b/legadilo/reading/api.py @@ -0,0 +1,161 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +from datetime import datetime +from http import HTTPStatus +from operator import xor +from typing import Annotated, Self + +from asgiref.sync import sync_to_async +from django.shortcuts import aget_object_or_404 +from ninja import ModelSchema, PatchDict, Router, Schema +from pydantic import model_validator + +from legadilo.reading import constants +from legadilo.reading.models import Article, ArticleTag, Tag +from legadilo.reading.services.article_fetching import ( + build_article_data_from_content, + get_article_from_url, +) +from legadilo.users.models import User +from legadilo.users.user_types import AuthenticatedApiRequest +from legadilo.utils.api import update_model_from_patch_dict +from legadilo.utils.validators import ( + CleanedString, + FullSanitizeValidator, + ValidUrlValidator, + remove_falsy_items, +) + +reading_api_router = Router(tags=["reading"]) + + +class OutArticleSchema(ModelSchema): + class Meta: + model = Article + exclude = ("user", "obj_created_at", "obj_updated_at") + + +class ArticleCreation(Schema): + link: Annotated[str, ValidUrlValidator] + title: Annotated[str, FullSanitizeValidator] = "" + # We must not sanitize this yet: we need the raw content when building the article to fetch some + # data (like authors, canonicals…). It will be sanitized later when we extract the actual + # content of the article. + content: str = "" + tags: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + + @model_validator(mode="after") + def check_title_and_content(self) -> Self: + if xor(len(self.title) > 0, len(self.content) > 0): + raise ValueError("You must supply either both title and content or none of them") + + return self + + @property + def has_data(self) -> bool: + return bool(self.title) and bool(self.content) + + +@reading_api_router.post( + "/articles/", + response={HTTPStatus.CREATED: OutArticleSchema}, + url_name="create_article", + summary="Create a new article", +) +async def create_article_view(request: AuthenticatedApiRequest, payload: ArticleCreation): + """Create an article either just with a link or with a link, a title and some content.""" + if payload.has_data: + article_data = build_article_data_from_content( + url=payload.link, title=payload.title, content=payload.content + ) + else: + article_data = await get_article_from_url(payload.link) + + # Tags specified in article data are the raw tags used in feeds, they are not used to link an + # article to tag objects. + tags = await sync_to_async(Tag.objects.get_or_create_from_list)(request.auth, payload.tags) + article_data = article_data.model_copy(update={"tags": ()}) + + articles = await sync_to_async(Article.objects.update_or_create_from_articles_list)( + request.auth, [article_data], tags, source_type=constants.ArticleSourceType.MANUAL + ) + return HTTPStatus.CREATED, articles[0] + + +@reading_api_router.get( + "/articles/{int:article_id}/", + url_name="get_article", + response=OutArticleSchema, + summary="View the details of a specific article", +) +async def get_article_view(request: AuthenticatedApiRequest, article_id: int) -> Article: + return await aget_object_or_404(Article, id=article_id, user=request.auth) + + +class ArticleUpdate(Schema): + title: Annotated[str, FullSanitizeValidator] + tags: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + read_at: datetime + is_favorite: bool + is_for_later: bool + reading_time: int + + +@reading_api_router.patch( + "/articles/{int:article_id}/", + response=OutArticleSchema, + url_name="update_article", + summary="Update an article", +) +async def update_article_view( + request: AuthenticatedApiRequest, + article_id: int, + payload: PatchDict[ArticleUpdate], # type: ignore[type-arg] +) -> Article: + article = await aget_object_or_404(Article, id=article_id, user=request.auth) + + if (tags := payload.pop("tags", None)) is not None: + await _update_article_tags(request.auth, article, tags) + + # Required to update tags and generated fields + await update_model_from_patch_dict(article, payload, must_refresh=True) + + return article + + +async def _update_article_tags(user: User, article: Article, new_tags: tuple[str, ...]): + tags = await sync_to_async(Tag.objects.get_or_create_from_list)(user, new_tags) + await sync_to_async(ArticleTag.objects.associate_articles_with_tags)( + [article], + tags, + tagging_reason=constants.TaggingReason.ADDED_MANUALLY, + readd_deleted=True, + ) + await sync_to_async(ArticleTag.objects.dissociate_article_with_tags_not_in_list)(article, tags) + + +@reading_api_router.delete( + "/articles/{int:article_id}/", + url_name="delete_article", + response={HTTPStatus.NO_CONTENT: None}, + summary="Delete an article", +) +async def delete_article_view(request: AuthenticatedApiRequest, article_id: int): + article = await aget_object_or_404(Article, id=article_id, user=request.auth) + + await article.adelete() + + return HTTPStatus.NO_CONTENT, None diff --git a/legadilo/reading/migrations/0001_initial.py b/legadilo/reading/migrations/0001_initial.py index 1c34548c..659455cf 100644 --- a/legadilo/reading/migrations/0001_initial.py +++ b/legadilo/reading/migrations/0001_initial.py @@ -57,12 +57,7 @@ class Migration(migrations.Migration): models.JSONField( blank=True, default=list, - validators=[ - legadilo.utils.validators.JsonSchemaValidator({ - "items": {"type": "string"}, - "type": "array", - }) - ], + validators=[legadilo.utils.validators.list_of_strings_validator], ), ), ( @@ -70,12 +65,7 @@ class Migration(migrations.Migration): models.JSONField( blank=True, default=list, - validators=[ - legadilo.utils.validators.JsonSchemaValidator({ - "items": {"type": "string"}, - "type": "array", - }) - ], + validators=[legadilo.utils.validators.list_of_strings_validator], ), ), ("link", models.URLField(max_length=1024)), @@ -87,12 +77,7 @@ class Migration(migrations.Migration): blank=True, default=list, help_text="Tags of the article from the its source", - validators=[ - legadilo.utils.validators.JsonSchemaValidator({ - "items": {"type": "string"}, - "type": "array", - }) - ], + validators=[legadilo.utils.validators.list_of_strings_validator], ), ), ( diff --git a/legadilo/reading/migrations/0007_article_table_of_content.py b/legadilo/reading/migrations/0007_article_table_of_content.py index 767ffb8f..4f4d3974 100644 --- a/legadilo/reading/migrations/0007_article_table_of_content.py +++ b/legadilo/reading/migrations/0007_article_table_of_content.py @@ -47,34 +47,7 @@ class Migration(migrations.Migration): blank=True, default=list, help_text="The table of content of the article.", - validators=[ - legadilo.utils.validators.JsonSchemaValidator({ - "items": { - "additionalProperties": False, - "properties": { - "children": { - "items": { - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "level": {"type": "integer"}, - "text": {"type": "string"}, - }, - "required": ["id", "text", "level"], - "type": "object", - }, - "type": "array", - }, - "id": {"type": "string"}, - "level": {"type": "integer"}, - "text": {"type": "string"}, - }, - "required": ["id", "text", "level"], - "type": "object", - }, - "type": "array", - }) - ], + validators=[legadilo.utils.validators.table_of_content_validator], ), ), migrations.RunPython(build_toc, reverse_code=migrations.RunPython.noop), diff --git a/legadilo/reading/migrations/0010_alter_article_table_of_content.py b/legadilo/reading/migrations/0010_alter_article_table_of_content.py new file mode 100644 index 00000000..fe9d13f3 --- /dev/null +++ b/legadilo/reading/migrations/0010_alter_article_table_of_content.py @@ -0,0 +1,42 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +# Generated by Django 5.1.3 on 2024-11-23 16:49 + +from django.db import migrations, models + +import legadilo.utils.collections_utils +import legadilo.utils.validators + + +class Migration(migrations.Migration): + dependencies = [ + ("reading", "0009_comment"), + ] + + operations = [ + migrations.AlterField( + model_name="article", + name="table_of_content", + field=models.JSONField( + blank=True, + default=list, + encoder=legadilo.utils.collections_utils.CustomJsonEncoder, + help_text="The table of content of the article.", + validators=[legadilo.utils.validators.table_of_content_validator], + ), + ), + ] diff --git a/legadilo/reading/models/article.py b/legadilo/reading/models/article.py index 3271d163..1081b53c 100644 --- a/legadilo/reading/models/article.py +++ b/legadilo/reading/models/article.py @@ -21,6 +21,7 @@ import math from collections.abc import Iterable from dataclasses import dataclass +from itertools import chain from typing import TYPE_CHECKING, Literal, Self, assert_never from urllib.parse import urlparse @@ -35,14 +36,14 @@ from legadilo.reading import constants from legadilo.reading.models.tag import ArticleTag -from legadilo.utils.collections_utils import max_or_none, min_or_none +from legadilo.utils.collections_utils import CustomJsonEncoder, max_or_none, min_or_none from legadilo.utils.security import full_sanitize from legadilo.utils.text import get_nb_words_from_html from legadilo.utils.time_utils import utcnow from legadilo.utils.validators import ( language_code_validator, - list_of_strings_json_schema_validator, - table_of_content_json_schema_validator, + list_of_strings_validator, + table_of_content_validator, ) from .article_fetch_error import ArticleFetchError @@ -696,17 +697,15 @@ class Article(models.Model): "we will use 0." ), ) - authors = models.JSONField( - validators=[list_of_strings_json_schema_validator], blank=True, default=list - ) + authors = models.JSONField(validators=[list_of_strings_validator], blank=True, default=list) contributors = models.JSONField( - validators=[list_of_strings_json_schema_validator], blank=True, default=list + validators=[list_of_strings_validator], blank=True, default=list ) link = models.URLField(max_length=1_024) preview_picture_url = models.URLField(blank=True, max_length=1_024) preview_picture_alt = models.TextField(blank=True) external_tags = models.JSONField( - validators=[list_of_strings_json_schema_validator], + validators=[list_of_strings_validator], blank=True, default=list, help_text=_("Tags of the article from the its source"), @@ -733,10 +732,11 @@ class Article(models.Model): validators=[language_code_validator], ) table_of_content = models.JSONField( - validators=[table_of_content_json_schema_validator], + validators=[table_of_content_validator], blank=True, default=list, help_text=_("The table of content of the article."), + encoder=CustomJsonEncoder, ) read_at = models.DateTimeField(null=True, blank=True) @@ -846,9 +846,13 @@ def update_article_from_data( ) or self.reading_time self.preview_picture_url = article_data.preview_picture_url or self.preview_picture_alt self.preview_picture_alt = article_data.preview_picture_alt or self.preview_picture_alt - self.authors = list(dict.fromkeys(self.authors + article_data.authors)) - self.contributors = list(dict.fromkeys(self.contributors + article_data.contributors)) - self.external_tags = list(dict.fromkeys(self.external_tags + article_data.tags)) + # We create the deduplicated list with dict.fromkeys and not sets to preserve the + # initial order. We chain the iterable since they don't have the same type. + self.authors = list(dict.fromkeys(chain(self.authors, article_data.authors))) + self.contributors = list( + dict.fromkeys(chain(self.contributors, article_data.contributors)) + ) + self.external_tags = list(dict.fromkeys(chain(self.external_tags, article_data.tags))) self.updated_at = max_or_none([article_data.updated_at, self.updated_at]) self.published_at = min_or_none([article_data.published_at, self.published_at]) elif has_content_unlike_saved: diff --git a/legadilo/reading/models/tag.py b/legadilo/reading/models/tag.py index e4304041..f9bf69ce 100644 --- a/legadilo/reading/models/tag.py +++ b/legadilo/reading/models/tag.py @@ -153,7 +153,7 @@ def get_slugs_to_ids(self, user: User, slugs: Iterable[str]) -> dict[str, int]: } @transaction.atomic() - def get_or_create_from_list(self, user: User, titles_or_slugs: list[str]) -> list[Tag]: + def get_or_create_from_list(self, user: User, titles_or_slugs: Iterable[str]) -> list[Tag]: existing_tags = list( Tag.objects.get_queryset() .for_user(user) diff --git a/legadilo/reading/services/article_fetching.py b/legadilo/reading/services/article_fetching.py index bfcd163d..f90d7467 100644 --- a/legadilo/reading/services/article_fetching.py +++ b/legadilo/reading/services/article_fetching.py @@ -13,17 +13,18 @@ # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from __future__ import annotations import logging import sys -from dataclasses import dataclass, field from datetime import datetime -from typing import TypedDict +from typing import Annotated, Any, Literal from urllib.parse import urlparse from bs4 import BeautifulSoup -from django.core.exceptions import ValidationError from django.template.defaultfilters import truncatewords_html +from pydantic import BaseModel as BaseSchema +from pydantic import model_validator from slugify import slugify from legadilo.reading import constants @@ -33,117 +34,100 @@ sanitize_keep_safe_tags, ) from legadilo.utils.time_utils import safe_datetime_parse -from legadilo.utils.validators import is_url_valid, language_code_validator, normalize_url +from legadilo.utils.validators import ( + CleanedString, + FullSanitizeValidator, + LanguageCodeValidatorOrDefault, + TableOfContentItem, + TableOfContentTopItem, + ValidUrlValidator, + default_frozen_model_config, + is_url_valid, + none_to_value, + normalize_url, + remove_falsy_items, + sanitize_keep_safe_tags_validator, + truncate, +) logger = logging.getLogger(__name__) -class TocItem(TypedDict): - id: str - text: str - level: int - - -class TocTopItem(TocItem): - children: list[TocItem] - - -@dataclass(frozen=True) -class ArticleData: - external_article_id: str - source_title: str - title: str - summary: str - content: str - table_of_content: list[TocTopItem] - authors: list[str] - contributors: list[str] - tags: list[str] - link: str - preview_picture_url: str - preview_picture_alt: str - published_at: datetime | None - updated_at: datetime | None - language: str - annotations: list[str] | tuple[str] = field(default_factory=list) +Language = Annotated[ + str, + FullSanitizeValidator, + truncate(constants.LANGUAGE_CODE_MAX_LENGTH), + LanguageCodeValidatorOrDefault, + none_to_value(""), +] +OptionalUrl = Literal[""] | Annotated[str, ValidUrlValidator] + + +class ArticleData(BaseSchema): + model_config = default_frozen_model_config + + external_article_id: Annotated[ + str, FullSanitizeValidator, truncate(constants.EXTERNAL_ARTICLE_ID_MAX_LENGTH) + ] + source_title: Annotated[ + str, FullSanitizeValidator, truncate(constants.ARTICLE_SOURCE_TITLE_MAX_LENGTH) + ] + title: Annotated[str, FullSanitizeValidator] + summary: Annotated[ + str, + sanitize_keep_safe_tags_validator(constants.EXTRA_TAGS_TO_REMOVE_FROM_SUMMARY), + ] + content: Annotated[str, sanitize_keep_safe_tags_validator()] + table_of_content: tuple[TableOfContentTopItem, ...] = () + authors: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + contributors: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + tags: Annotated[tuple[CleanedString, ...], remove_falsy_items(tuple)] = () + link: Annotated[str, ValidUrlValidator] + preview_picture_url: OptionalUrl = "" + preview_picture_alt: Annotated[str, FullSanitizeValidator, none_to_value("")] = "" + published_at: datetime | None = None + updated_at: datetime | None = None + language: Language + annotations: tuple[str, ...] = () read_at: datetime | None = None is_favorite: bool = False + @model_validator(mode="before") + @staticmethod + def prepare_values( + values: dict[str, Any], + ) -> dict[str, Any]: + summary = values.get("summary", "") + content = values.get("content", "") + title = values.get("title", "") + source_title = values.get("source_title", "") + link = values.get("link") -def build_article_data( # noqa: PLR0913 too many arguments - *, - external_article_id: str, - source_title: str, - title: str, - summary: str, - content: str, - authors: list[str], - contributors: list[str], - tags: list[str], - link: str, - preview_picture_url: str, - preview_picture_alt: str, - published_at: datetime | None, - updated_at: datetime | None, - language: str, - annotations: list[str] | tuple[str] = (), # type: ignore[assignment] - read_at: datetime | None = None, - is_favorite: bool = False, -) -> ArticleData: - summary = _resolve_relative_links(link, summary) - content = _resolve_relative_links(link, content) - content, toc = _build_table_of_content(content) - if not summary and content: - summary = _get_fallback_summary_from_content(content) - - try: - language = full_sanitize(language)[: constants.LANGUAGE_CODE_MAX_LENGTH] - language_code_validator(language) - except (ValidationError, TypeError): - language = "" + # Consider link optional here to please mypy. It's mandatory anyway so validation will fail + # later if needed. + if link: + summary = _resolve_relative_links(link, summary) + content = _resolve_relative_links(link, content) - title = full_sanitize(title)[: constants.ARTICLE_TITLE_MAX_LENGTH] - if not title: - title = urlparse(link).netloc + content, table_of_content = _build_table_of_content(content) - source_title = full_sanitize(source_title)[: constants.ARTICLE_SOURCE_TITLE_MAX_LENGTH] - if not source_title: - source_title = urlparse(link).netloc + if not summary and content: + summary = _get_fallback_summary_from_content(content) - return ArticleData( - external_article_id=full_sanitize(external_article_id)[ - : constants.EXTERNAL_ARTICLE_ID_MAX_LENGTH - ], - source_title=source_title, - title=title, - summary=sanitize_keep_safe_tags( - summary, extra_tags_to_cleanup=constants.EXTRA_TAGS_TO_REMOVE_FROM_SUMMARY - ), - content=sanitize_keep_safe_tags(content), - table_of_content=toc, - authors=_sanitize_lists(authors), - contributors=_sanitize_lists(contributors), - tags=_sanitize_lists(tags), - link=link, - preview_picture_url=preview_picture_url, - preview_picture_alt=full_sanitize(preview_picture_alt), - published_at=published_at, - updated_at=updated_at, - language=language, - annotations=annotations, - read_at=read_at, - is_favorite=is_favorite, - ) + if not title: + title = urlparse(values.get("link")).netloc + if not source_title: + source_title = urlparse(values.get("link")).netloc -def _sanitize_lists(alist: list[str]) -> list[str]: - cleaned_list = [] - for item in alist: - cleaned_item = full_sanitize(item.strip()) - if cleaned_item: - cleaned_list.append(cleaned_item) - - return cleaned_list + return { + **values, + "summary": summary, + "content": content, + "title": title, + "source_title": source_title, + "table_of_content": table_of_content, + } def _resolve_relative_links(article_link: str, content: str) -> str: @@ -181,10 +165,16 @@ async def get_article_from_url(url: str) -> ArticleData: return _build_article_data_from_soup( url, soup, - content_language, + content_language=content_language, ) +def build_article_data_from_content(*, url: str, title: str, content: str) -> ArticleData: + soup = BeautifulSoup(content, "html.parser") + + return _build_article_data_from_soup(url, soup, forced_title=title) + + async def _get_page_content(url: str) -> tuple[str, BeautifulSoup, str | None]: async with get_async_client() as client: # We can have HTTP redirect with the meta htt-equiv tag. Let's read them to up to 10 time @@ -226,19 +216,23 @@ def _parse_http_equiv_refresh(value: str) -> str | None: def _build_article_data_from_soup( - fetched_url: str, soup: BeautifulSoup, content_language: str | None + fetched_url: str, + soup: BeautifulSoup, + *, + content_language: str | None = None, + forced_title: str | None = None, ) -> ArticleData: content = _get_content(soup) - return build_article_data( + return ArticleData( external_article_id="", source_title=_get_site_title(fetched_url, soup), - title=_get_title(soup), + title=forced_title or _get_title(soup), summary=_get_summary(soup, content), content=content, - authors=_get_authors(soup), - contributors=[], - tags=_get_tags(soup), + authors=tuple(_get_authors(soup)), + contributors=(), + tags=tuple(_get_tags(soup)), link=_get_link(fetched_url, soup), preview_picture_url=_get_preview_picture_url(fetched_url, soup), preview_picture_alt="", @@ -465,10 +459,10 @@ def _get_lang(soup: BeautifulSoup, content_language: str | None) -> str: return language -def _build_table_of_content(content: str) -> tuple[str, list[TocTopItem]]: +def _build_table_of_content(content: str) -> tuple[str, list[TableOfContentTopItem]]: soup = BeautifulSoup(content, "html.parser") toc = [] - toc_item_top_level: TocTopItem | None = None + toc_item_top_level: TableOfContentTopItem | None = None for header in soup.find_all(["h1", "h2", "h3", "h4", "h5", "h6"]): text = full_sanitize(header.text) @@ -477,11 +471,11 @@ def _build_table_of_content(content: str) -> tuple[str, list[TocTopItem]]: level = int(header.name.replace("h", "")) # If the content is well-structured, all top level title will be at the same level. # Since we don't know, we allow for a first h2 to be followed by a h1. - if toc_item_top_level is None or level <= toc_item_top_level["level"]: - toc_item_top_level = TocTopItem(id=id_, text=text, level=level, children=[]) + if toc_item_top_level is None or level <= toc_item_top_level.level: + toc_item_top_level = TableOfContentTopItem(id=id_, text=text, level=level) toc.append(toc_item_top_level) # We only allow one level in the TOC. It's enough. - elif level == toc_item_top_level["level"] + 1: - toc_item_top_level["children"].append(TocItem(id=id_, text=text, level=level)) + elif level == toc_item_top_level.level + 1: + toc_item_top_level.children.append(TableOfContentItem(id=id_, text=text, level=level)) return str(soup), toc diff --git a/legadilo/reading/tests/factories.py b/legadilo/reading/tests/factories.py index e9ba0712..fb02cf3a 100644 --- a/legadilo/reading/tests/factories.py +++ b/legadilo/reading/tests/factories.py @@ -22,6 +22,7 @@ from legadilo.users.tests.factories import UserFactory from ..models import Article, ArticleFetchError, Comment, ReadingList, Tag +from ..services.article_fetching import ArticleData class ArticleFactory(DjangoModelFactory): @@ -75,3 +76,16 @@ class CommentFactory(DjangoModelFactory): class Meta: model = Comment + + +class ArticleDataFactory(factory.DictFactory): + external_article_id = factory.Sequence(lambda n: f"external-id-{n}") + source_title = factory.Sequence(lambda n: f"Source {n}") + title = factory.Sequence(lambda n: f"Article {n}") + summary = "" + content = "" + link = factory.Sequence(lambda n: f"https://example.com/article-{n}.html") + language = "en" + + class Meta: + model = ArticleData diff --git a/legadilo/reading/tests/snapshots/test_api/test_create_article_from_data/article.json b/legadilo/reading/tests/snapshots/test_api/test_create_article_from_data/article.json new file mode 100644 index 00000000..0f97e3c6 --- /dev/null +++ b/legadilo/reading/tests/snapshots/test_api/test_create_article_from_data/article.json @@ -0,0 +1,30 @@ +{ + "annotations": [], + "authors": [ + "Alexandre Dumas" + ], + "content": "
\n
\n

\n Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc porttitor dolor in justo pharetra suscipit. Vestibulum hendrerit felis id ex gravida egestas. Sed tempus placerat nibh. Proin faucibus bibendum magna in ultricies. Fusce feugiat sagittis odio a gravida. Pellentesque dignissim lorem quis eros placerat ullamcorper nec ut quam. Curabitur non tortor a justo hendrerit vehicula in a neque. Mauris vitae mi ante. Aenean et efficitur massa. Donec nec scelerisque lectus, eu malesuada urna. Aenean at dignissim purus. Praesent et tellus non ligula mollis commodo id sed felis. Phasellus fringilla non libero vitae efficitur.\n

\n

\n Vivamus eu ornare ligula. Sed ac justo eget metus tempus venenatis. Aenean ante arcu, dignissim sed bibendum nec, commodo ut tellus. Donec rhoncus leo a enim volutpat, ut porttitor risus sodales. Proin sit amet sapien vitae felis mollis luctus. Morbi malesuada nec quam sed facilisis. Vivamus urna quam, sagittis at eros vitae, porta eleifend orci. Aliquam nec velit enim. Suspendisse egestas pulvinar volutpat. Pellentesque nec sem eget nunc facilisis porta. Ut eleifend mi sed laoreet sollicitudin. Sed sagittis nibh eget quam luctus facilisis.\n

\n

\n Vestibulum eu nibh ullamcorper, luctus tortor eget, semper arcu. Curabitur id cursus urna, eu accumsan mi. Curabitur ornare elit vitae quam tempor egestas. Maecenas viverra malesuada sapien non blandit. Sed luctus pellentesque nulla eu pretium. Cras iaculis interdum interdum. Ut in metus purus. Aliquam id pretium velit, eu tempus tellus.\n

\n
\n

Opinion

\n

You may disagree

\n
\n
\n
\n

\nMusketeers\n

\n
\n
", + "contributors": [], + "external_article_id": "external-article-id", + "external_tags": [], + "id": 1, + "is_favorite": false, + "is_for_later": false, + "is_read": false, + "language": "fr", + "link": "https://example.com/articles/article.html", + "main_source_title": "Super blog", + "main_source_type": "MANUAL", + "opened_at": null, + "preview_picture_alt": "", + "preview_picture_url": "https://example.com/images/profile.png", + "published_at": "2024-02-26T23:00:00Z", + "read_at": null, + "reading_time": 1, + "slug": "article-slug", + "summary": "I just wrote a new book, I\u2019ll hope you will like it! Here are some thoughts on it.", + "table_of_content": [], + "title": "Article title", + "updated_at": "2024-03-08T23:00:00Z", + "was_opened": false +} \ No newline at end of file diff --git a/legadilo/reading/tests/snapshots/test_api/test_create_article_from_link_only/article.json b/legadilo/reading/tests/snapshots/test_api/test_create_article_from_link_only/article.json new file mode 100644 index 00000000..ac76ca92 --- /dev/null +++ b/legadilo/reading/tests/snapshots/test_api/test_create_article_from_link_only/article.json @@ -0,0 +1,28 @@ +{ + "annotations": [], + "authors": [], + "content": "", + "contributors": [], + "external_article_id": "external-article-id", + "external_tags": [], + "id": 1, + "is_favorite": false, + "is_for_later": false, + "is_read": false, + "language": "en", + "link": "https://example.com/articles/article.html", + "main_source_title": "Source 0", + "main_source_type": "MANUAL", + "opened_at": null, + "preview_picture_alt": "", + "preview_picture_url": "", + "published_at": null, + "read_at": null, + "reading_time": 0, + "slug": "article-slug", + "summary": "", + "table_of_content": [], + "title": "Article title", + "updated_at": null, + "was_opened": false +} \ No newline at end of file diff --git a/legadilo/reading/tests/snapshots/test_api/test_create_article_with_tags/article.json b/legadilo/reading/tests/snapshots/test_api/test_create_article_with_tags/article.json new file mode 100644 index 00000000..f079c047 --- /dev/null +++ b/legadilo/reading/tests/snapshots/test_api/test_create_article_with_tags/article.json @@ -0,0 +1,28 @@ +{ + "annotations": [], + "authors": [], + "content": "", + "contributors": [], + "external_article_id": "external-article-id", + "external_tags": [], + "id": 1, + "is_favorite": false, + "is_for_later": false, + "is_read": false, + "language": "en", + "link": "https://example.com/articles/article.html", + "main_source_title": "Source 1", + "main_source_type": "MANUAL", + "opened_at": null, + "preview_picture_alt": "", + "preview_picture_url": "", + "published_at": null, + "read_at": null, + "reading_time": 0, + "slug": "article-slug", + "summary": "", + "table_of_content": [], + "title": "Article title", + "updated_at": null, + "was_opened": false +} \ No newline at end of file diff --git a/legadilo/reading/tests/snapshots/test_api/test_get/article.json b/legadilo/reading/tests/snapshots/test_api/test_get/article.json new file mode 100644 index 00000000..0a51ace4 --- /dev/null +++ b/legadilo/reading/tests/snapshots/test_api/test_get/article.json @@ -0,0 +1,28 @@ +{ + "annotations": [], + "authors": [], + "content": "", + "contributors": [], + "external_article_id": "external-article-id", + "external_tags": [], + "id": 1, + "is_favorite": false, + "is_for_later": false, + "is_read": false, + "language": "", + "link": "https://example.com/articles/article.html", + "main_source_title": "", + "main_source_type": "FEED", + "opened_at": null, + "preview_picture_alt": "", + "preview_picture_url": "", + "published_at": "2024-11-24T17:57:00Z", + "read_at": null, + "reading_time": 0, + "slug": "article-slug", + "summary": "", + "table_of_content": [], + "title": "Article title", + "updated_at": "2024-11-24T17:57:00Z", + "was_opened": false +} \ No newline at end of file diff --git a/legadilo/reading/tests/snapshots/test_api/test_no_update/article.json b/legadilo/reading/tests/snapshots/test_api/test_no_update/article.json new file mode 100644 index 00000000..0a51ace4 --- /dev/null +++ b/legadilo/reading/tests/snapshots/test_api/test_no_update/article.json @@ -0,0 +1,28 @@ +{ + "annotations": [], + "authors": [], + "content": "", + "contributors": [], + "external_article_id": "external-article-id", + "external_tags": [], + "id": 1, + "is_favorite": false, + "is_for_later": false, + "is_read": false, + "language": "", + "link": "https://example.com/articles/article.html", + "main_source_title": "", + "main_source_type": "FEED", + "opened_at": null, + "preview_picture_alt": "", + "preview_picture_url": "", + "published_at": "2024-11-24T17:57:00Z", + "read_at": null, + "reading_time": 0, + "slug": "article-slug", + "summary": "", + "table_of_content": [], + "title": "Article title", + "updated_at": "2024-11-24T17:57:00Z", + "was_opened": false +} \ No newline at end of file diff --git a/legadilo/reading/tests/snapshots/test_api/test_update/article.json b/legadilo/reading/tests/snapshots/test_api/test_update/article.json new file mode 100644 index 00000000..c4713520 --- /dev/null +++ b/legadilo/reading/tests/snapshots/test_api/test_update/article.json @@ -0,0 +1,28 @@ +{ + "annotations": [], + "authors": [], + "content": "", + "contributors": [], + "external_article_id": "external-article-id", + "external_tags": [], + "id": 1, + "is_favorite": false, + "is_for_later": false, + "is_read": true, + "language": "", + "link": "https://example.com/articles/article.html", + "main_source_title": "", + "main_source_type": "FEED", + "opened_at": null, + "preview_picture_alt": "", + "preview_picture_url": "", + "published_at": "2024-11-24T17:57:00Z", + "read_at": "2024-11-24T18:00:00Z", + "reading_time": 10, + "slug": "article-slug", + "summary": "", + "table_of_content": [], + "title": "Article title", + "updated_at": "2024-11-24T17:57:00Z", + "was_opened": false +} \ No newline at end of file diff --git a/legadilo/reading/tests/snapshots/test_api/test_update_tags/article.json b/legadilo/reading/tests/snapshots/test_api/test_update_tags/article.json new file mode 100644 index 00000000..0a51ace4 --- /dev/null +++ b/legadilo/reading/tests/snapshots/test_api/test_update_tags/article.json @@ -0,0 +1,28 @@ +{ + "annotations": [], + "authors": [], + "content": "", + "contributors": [], + "external_article_id": "external-article-id", + "external_tags": [], + "id": 1, + "is_favorite": false, + "is_for_later": false, + "is_read": false, + "language": "", + "link": "https://example.com/articles/article.html", + "main_source_title": "", + "main_source_type": "FEED", + "opened_at": null, + "preview_picture_alt": "", + "preview_picture_url": "", + "published_at": "2024-11-24T17:57:00Z", + "read_at": null, + "reading_time": 0, + "slug": "article-slug", + "summary": "", + "table_of_content": [], + "title": "Article title", + "updated_at": "2024-11-24T17:57:00Z", + "was_opened": false +} \ No newline at end of file diff --git a/legadilo/reading/tests/test_api.py b/legadilo/reading/tests/test_api.py new file mode 100644 index 00000000..e15a9e56 --- /dev/null +++ b/legadilo/reading/tests/test_api.py @@ -0,0 +1,316 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from http import HTTPStatus +from typing import Any + +import pytest +from django.urls import reverse + +from legadilo.reading import constants +from legadilo.reading.models import Article +from legadilo.reading.tests.factories import ArticleDataFactory, ArticleFactory, TagFactory +from legadilo.reading.tests.fixtures import get_article_fixture_content +from legadilo.utils.testing import serialize_for_snapshot +from legadilo.utils.time_utils import utcdt + + +def _prepare_article_for_serialization(data: dict[str, Any], article: Article) -> dict[str, Any]: + data = data.copy() + assert data["id"] == article.id + assert data["title"] == article.title + assert data["slug"] == article.slug + assert data["external_article_id"] == article.external_article_id + assert data["link"] == article.link + + data["id"] = 1 + data["title"] = "Article title" + data["slug"] = "article-slug" + data["external_article_id"] = "external-article-id" + data["link"] = "https://example.com/articles/article.html" + + return data + + +@pytest.mark.django_db +class TestCreateArticleView: + @pytest.fixture(autouse=True) + def _setup_data(self): + self.url = reverse("api-1.0.0:create_article") + self.article_link = "https://example.com/articles/legadilo.html" + + def test_not_logged_in(self, client): + response = client.post(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_create_article_empty_payload(self, user, logged_in_sync_client): + response = logged_in_sync_client.post(self.url, {}, content_type="application/json") + + assert response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY + assert response.json() == { + "detail": [ + {"type": "missing", "loc": ["body", "payload", "link"], "msg": "Field required"} + ] + } + + def test_create_article_invalid_data(self, user, logged_in_sync_client): + response = logged_in_sync_client.post( + self.url, + {"link": self.article_link, "content": "Some content"}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY + assert response.json() == { + "detail": [ + { + "ctx": { + "error": "You must supply either both title and content or none of them" + }, + "loc": ["body", "payload"], + "msg": "Value error, You must supply either both title and content or none of them", # noqa: E501 + "type": "value_error", + } + ] + } + + def test_create_article_from_link_only( + self, django_assert_num_queries, logged_in_sync_client, mocker, snapshot + ): + mocked_get_article_from_url = mocker.patch( + "legadilo.reading.api.get_article_from_url", + return_value=ArticleDataFactory(link=self.article_link), + ) + + with django_assert_num_queries(11): + response = logged_in_sync_client.post( + self.url, {"link": self.article_link}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.CREATED + assert Article.objects.count() == 1 + article = Article.objects.get() + assert article.link == self.article_link + mocked_get_article_from_url.assert_called_once_with(self.article_link) + snapshot.assert_match( + serialize_for_snapshot(_prepare_article_for_serialization(response.json(), article)), + "article.json", + ) + + def test_create_article_with_tags( + self, django_assert_num_queries, logged_in_sync_client, mocker, snapshot + ): + mocked_get_article_from_url = mocker.patch( + "legadilo.reading.api.get_article_from_url", + return_value=ArticleDataFactory(link=self.article_link), + ) + + with django_assert_num_queries(15): + response = logged_in_sync_client.post( + self.url, + {"link": self.article_link, "tags": ["Some tag"]}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.CREATED + assert Article.objects.count() == 1 + article = Article.objects.get() + assert article.link == self.article_link + assert list(article.tags.all().values_list("title", flat=True)) == ["Some tag"] + mocked_get_article_from_url.assert_called_once_with(self.article_link) + snapshot.assert_match( + serialize_for_snapshot(_prepare_article_for_serialization(response.json(), article)), + "article.json", + ) + + def test_create_article_from_data( + self, django_assert_num_queries, logged_in_sync_client, mocker, snapshot + ): + mocked_get_article_from_url = mocker.patch( + "legadilo.reading.api.get_article_from_url", + return_value=ArticleDataFactory(link=self.article_link), + ) + + with django_assert_num_queries(11): + response = logged_in_sync_client.post( + self.url, + { + "link": self.article_link, + "content": get_article_fixture_content("sample_blog_article.html"), + "title": "My article", + }, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.CREATED + assert Article.objects.count() == 1 + article = Article.objects.get() + assert article.link == "https://www.example.com/posts/en/1-super-article/" + assert article.table_of_content == [] + assert not mocked_get_article_from_url.called + snapshot.assert_match( + serialize_for_snapshot(_prepare_article_for_serialization(response.json(), article)), + "article.json", + ) + + +@pytest.mark.django_db +class TestGetArticleView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.article = ArticleFactory( + user=user, + published_at=utcdt(2024, 11, 24, 17, 57, 0), + updated_at=utcdt(2024, 11, 24, 17, 57, 0), + ) + self.url = reverse("api-1.0.0:get_article", kwargs={"article_id": self.article.id}) + + def test_not_logged_in(self, client): + response = client.get(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_get_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert Article.objects.count() == 1 + + def test_get(self, logged_in_sync_client, snapshot): + response = logged_in_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + snapshot.assert_match( + serialize_for_snapshot( + _prepare_article_for_serialization(response.json(), self.article) + ), + "article.json", + ) + + +@pytest.mark.django_db +class TestUpdateArticleView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.article = ArticleFactory( + user=user, + published_at=utcdt(2024, 11, 24, 17, 57, 0), + updated_at=utcdt(2024, 11, 24, 17, 57, 0), + ) + self.url = reverse("api-1.0.0:update_article", kwargs={"article_id": self.article.id}) + + def test_not_logged_in(self, client): + response = client.patch(self.url, {}, content_type="application/json") + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_update_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.patch( + self.url, {}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.NOT_FOUND + + def test_no_update(self, logged_in_sync_client, django_assert_num_queries, snapshot): + with django_assert_num_queries(7): + response = logged_in_sync_client.patch(self.url, {}, content_type="application/json") + + assert response.status_code == HTTPStatus.OK + snapshot.assert_match( + serialize_for_snapshot( + _prepare_article_for_serialization(response.json(), self.article) + ), + "article.json", + ) + + def test_update(self, logged_in_sync_client, django_assert_num_queries, snapshot): + with django_assert_num_queries(8): + response = logged_in_sync_client.patch( + self.url, + { + "title": "

New title

", + "read_at": "2024-11-24 18:00:00Z", + "reading_time": 10, + }, + content_type="application/json", + ) + + self.article.refresh_from_db() + assert self.article.title == "New title" + assert self.article.read_at == utcdt(2024, 11, 24, 18) + assert self.article.reading_time == 10 + snapshot.assert_match( + serialize_for_snapshot( + _prepare_article_for_serialization(response.json(), self.article) + ), + "article.json", + ) + + def test_update_tags(self, logged_in_sync_client, user, django_assert_num_queries, snapshot): + existing_tag = TagFactory(user=user, title="Tag to keep") + tag_to_delete = TagFactory(user=user, title="Tag to delete") + self.article.tags.add(existing_tag, tag_to_delete) + + with django_assert_num_queries(16): + response = logged_in_sync_client.patch( + self.url, + { + "tags": [existing_tag.slug, "", "

New tag

"], + }, + content_type="application/json", + ) + + self.article.refresh_from_db() + assert list( + self.article.article_tags.all().values_list("tag__title", "tagging_reason") + ) == [ + ("New tag", constants.TaggingReason.ADDED_MANUALLY), + ("Tag to delete", constants.TaggingReason.DELETED), + ("Tag to keep", constants.TaggingReason.ADDED_MANUALLY), + ] + snapshot.assert_match( + serialize_for_snapshot( + _prepare_article_for_serialization(response.json(), self.article) + ), + "article.json", + ) + + +@pytest.mark.django_db +class TestDeleteArticleView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.article = ArticleFactory(user=user) + self.url = reverse("api-1.0.0:delete_article", kwargs={"article_id": self.article.id}) + + def test_not_logged_in(self, client): + response = client.delete(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + assert Article.objects.count() == 1 + + def test_delete_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.delete(self.url) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert Article.objects.count() == 1 + + def test_delete(self, logged_in_sync_client): + response = logged_in_sync_client.delete(self.url) + + assert response.status_code == HTTPStatus.NO_CONTENT + assert Article.objects.count() == 0 diff --git a/legadilo/reading/tests/test_models/test_article.py b/legadilo/reading/tests/test_models/test_article.py index bef6932e..93ffa13a 100644 --- a/legadilo/reading/tests/test_models/test_article.py +++ b/legadilo/reading/tests/test_models/test_article.py @@ -40,6 +40,7 @@ ) from legadilo.utils.testing import serialize_for_snapshot from legadilo.utils.time_utils import utcdt, utcnow +from legadilo.utils.validators import TableOfContentTopItem @pytest.mark.parametrize( @@ -1013,12 +1014,13 @@ def test_update_and_create_articles(self, user, django_assert_num_queries): external_article_id="some-article-1", title="Article 1", summary="Summary 1", - content="Description 1" + " word " * user.settings.default_reading_time * 3, - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], - link="https//example.com/article/1", + content="""

My title

Sub-section

Description 1""" # noqa: E501 + + " word " * user.settings.default_reading_time * 3, + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), + link="https://example.com/article/1", preview_picture_url="https://example.com/preview.png", preview_picture_alt="Some image alt", published_at=now_dt, @@ -1032,10 +1034,10 @@ def test_update_and_create_articles(self, user, django_assert_num_queries): title="Article updated", summary="Summary updated", content="Description updated", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), preview_picture_url="", preview_picture_alt="", published_at=now_dt, @@ -1049,10 +1051,10 @@ def test_update_and_create_articles(self, user, django_assert_num_queries): title="Updated article", summary="Summary updated", content="Description updated", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), preview_picture_url="", preview_picture_alt="", published_at=utcdt(2024, 4, 19), @@ -1065,11 +1067,11 @@ def test_update_and_create_articles(self, user, django_assert_num_queries): title="Article 3", summary="Summary 3", content="Description 3", - table_of_content=[], - authors=["Author"], - contributors=["Contributor"], - tags=["Some tag"], - link="https//example.com/article/3", + table_of_content=(), + authors=("Author",), + contributors=("Contributor",), + tags=("Some tag",), + link="https://example.com/article/3", preview_picture_url="", preview_picture_alt="", published_at=now_dt, @@ -1098,15 +1100,20 @@ def test_update_and_create_articles(self, user, django_assert_num_queries): assert existing_article_to_keep.updated_at == utcdt(2024, 4, 20) assert existing_article_to_keep.obj_created_at == utcdt(2024, 6, 1, 12, 0) assert existing_article_to_keep.obj_updated_at == utcdt(2024, 6, 2, 12, 0) - other_article = Article.objects.exclude( - id__in=[existing_article_to_update.id, existing_article_to_keep.id] - ).first() - assert other_article is not None + other_article = Article.objects.get(external_article_id="some-article-1") assert other_article.title == "Article 1" assert other_article.slug == "article-1" assert other_article.reading_time == 3 assert other_article.obj_created_at == utcdt(2024, 6, 2, 12, 0) assert other_article.obj_updated_at == utcdt(2024, 6, 2, 12, 0) + assert other_article.table_of_content == [ + { + "children": [{"id": "sub-section", "level": 3, "text": "Sub-section"}], + "id": "section-title", + "level": 2, + "text": "My title", + } + ] assert list( Article.objects.annotate(tag_slugs=ArrayAgg("tags__slug")).values_list( "tag_slugs", flat=True @@ -1130,11 +1137,11 @@ def test_same_link_multiple_times(self, user, django_assert_num_queries): title="Article 1", summary="Summary 1", content="Description 1" + " word " * user.settings.default_reading_time * 3, - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], - link="https//example.com/article/1", + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), + link="https://example.com/article/1", preview_picture_url="https://example.com/preview.png", preview_picture_alt="Some image alt", published_at=now_dt, @@ -1144,14 +1151,14 @@ def test_same_link_multiple_times(self, user, django_assert_num_queries): ), ArticleData( external_article_id="some-article-1", - link="https//example.com/article/1", + link="https://example.com/article/1", title="Article updated", summary="Summary updated", content="Description updated", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), preview_picture_url="", preview_picture_alt="", published_at=now_dt, @@ -1186,10 +1193,10 @@ def test_manually_readd_read_article(self, user, django_assert_num_queries): title=existing_article.title, summary=existing_article.summary, content=existing_article.content, - table_of_content=[], - authors=existing_article.authors, - contributors=existing_article.contributors, - tags=existing_article.external_tags, + table_of_content=(), + authors=tuple(existing_article.authors), + contributors=tuple(existing_article.contributors), + tags=tuple(existing_article.external_tags), published_at=now_dt, updated_at=now_dt, source_title="Some site", @@ -1222,10 +1229,10 @@ def test_readd_read_article_from_a_feed(self, user, django_assert_num_queries): title=existing_article.title, summary=existing_article.summary, content=existing_article.content, - table_of_content=[], - authors=existing_article.authors, - contributors=existing_article.contributors, - tags=existing_article.external_tags, + table_of_content=(), + authors=tuple(existing_article.authors), + contributors=tuple(existing_article.contributors), + tags=tuple(existing_article.external_tags), published_at=now_dt, updated_at=now_dt, source_title="Some site", @@ -1525,7 +1532,7 @@ def test_generated_fields(self): True, { "title": "Updated title", - "content": "Updated content", + "content": """

My title

Updated content""", "updated_at": utcdt(2024, 4, 21), }, True, @@ -1540,7 +1547,7 @@ def test_generated_fields(self): False, { "title": "Initial title", - "content": "Updated content", + "content": """

My title

Updated content""", "updated_at": utcdt(2024, 4, 21), }, True, @@ -1561,10 +1568,10 @@ def test_generated_fields(self): { "title": "Updated title", "summary": "Updated summary", - "content": "Updated content", - "table_of_content": [ - {"id": "header", "text": "My title", "level": 2, "children": []} - ], + "content": """

My title

Updated content""", + "table_of_content": ( + TableOfContentTopItem(id="my-title", text="My title", level=2, children=[]), + ), "updated_at": utcdt(2024, 4, 20), "external_tags": ["Initial tag", "Some tag", "Updated tag"], "authors": ["Author 1", "Author 2", "Author 3"], @@ -1590,12 +1597,14 @@ def test_update_article_from_data( external_article_id="some-article-1", title="Updated title", summary="Updated summary", - content="Updated content", - table_of_content=[{"id": "header", "text": "My title", "level": 2, "children": []}], - authors=["Author 2", "Author 3"], - contributors=["Contributor 2", "Contributor 3"], - tags=["Some tag", "Updated tag"], - link="https//example.com/article/1", + content="

My title

Updated content", + table_of_content=( + TableOfContentTopItem(id="header", text="My title", level=2, children=[]), + ), + authors=("Author 2", "Author 3"), + contributors=("Contributor 2", "Contributor 3"), + tags=("Some tag", "Updated tag"), + link="https://example.com/article/1", preview_picture_url="https://example.com/preview.png", preview_picture_alt="Some image alt", published_at=utcdt(2024, 4, 20), @@ -1633,11 +1642,11 @@ def test_update_article_from_data_article_data_is_missing_some_data(self, user): title="Updated title", summary="", content="", - table_of_content=[], - authors=["Author"], - contributors=[], - tags=[], - link="https//example.com/article/1", + table_of_content=(), + authors=("Author",), + contributors=(), + tags=(), + link="https://example.com/article/1", preview_picture_url="", preview_picture_alt="", published_at=utcdt(2024, 4, 20), diff --git a/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_build_article_data/with-headers/article_data.json b/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_build_article_data/with-headers/article_data.json index dabf711d..06ff7a69 100644 --- a/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_build_article_data/with-headers/article_data.json +++ b/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_build_article_data/with-headers/article_data.json @@ -28,7 +28,7 @@ { "id": "this-one-has-html-in", "level": 2, - "text": "This one has HTML in " + "text": "This one has HTML in" } ], "id": "some-header", diff --git a/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url/article_data.json b/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url/article_data.json index 08bf4eca..f103aa07 100644 --- a/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url/article_data.json +++ b/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url/article_data.json @@ -11,7 +11,7 @@ "link": "https://www.example.com/posts/en/1-super-article/", "preview_picture_alt": "", "preview_picture_url": "https://www.example.com/images/profile.png", - "published_at": "2024-02-26T23:00:00+00:00", + "published_at": "2024-02-26T23:00:00Z", "read_at": null, "source_title": "Super blog", "summary": "I just wrote a new book, I\u2019ll hope you will like it! Here are some thoughts on it.", @@ -20,5 +20,5 @@ "Musketeers" ], "title": "On the 3 musketeers", - "updated_at": "2024-03-08T23:00:00+00:00" + "updated_at": "2024-03-08T23:00:00Z" } \ No newline at end of file diff --git a/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url_process_fixture/no-article-tag/article_data.json b/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url_process_fixture/no-article-tag/article_data.json index 08bf4eca..f103aa07 100644 --- a/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url_process_fixture/no-article-tag/article_data.json +++ b/legadilo/reading/tests/test_services/snapshots/test_article_fetching/test_get_article_from_url_process_fixture/no-article-tag/article_data.json @@ -11,7 +11,7 @@ "link": "https://www.example.com/posts/en/1-super-article/", "preview_picture_alt": "", "preview_picture_url": "https://www.example.com/images/profile.png", - "published_at": "2024-02-26T23:00:00+00:00", + "published_at": "2024-02-26T23:00:00Z", "read_at": null, "source_title": "Super blog", "summary": "I just wrote a new book, I\u2019ll hope you will like it! Here are some thoughts on it.", @@ -20,5 +20,5 @@ "Musketeers" ], "title": "On the 3 musketeers", - "updated_at": "2024-03-08T23:00:00+00:00" + "updated_at": "2024-03-08T23:00:00Z" } \ No newline at end of file diff --git a/legadilo/reading/tests/test_services/test_article_fetching.py b/legadilo/reading/tests/test_services/test_article_fetching.py index dabb64bd..362fc526 100644 --- a/legadilo/reading/tests/test_services/test_article_fetching.py +++ b/legadilo/reading/tests/test_services/test_article_fetching.py @@ -19,7 +19,7 @@ import pytest -from legadilo.reading.services.article_fetching import build_article_data, get_article_from_url +from legadilo.reading.services.article_fetching import ArticleData, get_article_from_url from legadilo.reading.tests.fixtures import get_article_fixture_content from legadilo.utils.testing import serialize_for_snapshot @@ -216,6 +216,6 @@ async def test_get_article_from_url_process_fixture( ], ) def test_build_article_data(parameters: dict[str, Any], snapshot): - article_data = build_article_data(**parameters) + article_data = ArticleData(**parameters) snapshot.assert_match(serialize_for_snapshot(article_data), "article_data.json") diff --git a/legadilo/reading/views/fetch_article_views.py b/legadilo/reading/views/fetch_article_views.py index bd022f9e..77045923 100644 --- a/legadilo/reading/views/fetch_article_views.py +++ b/legadilo/reading/views/fetch_article_views.py @@ -29,6 +29,7 @@ from django.utils.safestring import mark_safe from django.utils.translation import gettext_lazy as _ from django.views.decorators.http import require_http_methods +from pydantic import ValidationError as PydanticValidationError from legadilo.core.forms.fields import MultipleTagsField from legadilo.reading import constants @@ -154,7 +155,7 @@ async def _handle_save( force_update=force_update, ) )[0] - except (httpx.HTTPError, ArticleTooBigError) as e: + except (httpx.HTTPError, ArticleTooBigError, PydanticValidationError) as e: article, created = await sync_to_async(Article.objects.create_invalid_article)( request.user, article_link, diff --git a/legadilo/templates/ninja/swagger.html b/legadilo/templates/ninja/swagger.html new file mode 100644 index 00000000..1a1b7832 --- /dev/null +++ b/legadilo/templates/ninja/swagger.html @@ -0,0 +1,28 @@ +{% load static %} + + + + + + + {{ api.title }} + + + +
+ + + + diff --git a/legadilo/templates/users/manage_tokens.html b/legadilo/templates/users/manage_tokens.html new file mode 100644 index 00000000..9a4a37c3 --- /dev/null +++ b/legadilo/templates/users/manage_tokens.html @@ -0,0 +1,67 @@ +{% extends "base.html" %} + +{% load i18n static crispy_forms_tags %} + +{% block title %} + {% translate "Manage API tokens" %} +{% endblock title %} +{% block page_js %} + +{% endblock page_js %} +{% block content %} +

{% translate "Manage API tokens" %}

+ {% if new_application_token %} +

+ {% blocktranslate with token_title=new_application_token.title %} + Successfully created token {{ token_title }}. Copy the token below, you + won’t be able to get it back. + {% endblocktranslate %} +

{{ new_application_token.token }}
+

+ {% endif %} +

{% translate "List of tokens" %}

+
    + {% for token in tokens %} +
  • + {{ token.title }} + + {% blocktranslate with created_at=token.created_at|date:"SHORT_DATETIME_FORMAT" %} + Created on {{ created_at }} + {% endblocktranslate %} + + {% if token.validity_end %} + + {% blocktranslate with validity_end=token.validity_end|date:"SHORT_DATETIME_FORMAT" %} + Valid until {{ validity_end }} + {% endblocktranslate %} + + {% endif %} + {% if token.last_used_at %} + + {% blocktranslate with last_used_at=token.last_used_at|date:"SHORT_DATETIME_FORMAT" %} + Last used {{ last_used_at }} + {% endblocktranslate %} + + {% endif %} +
    + {% csrf_token %} + +
    +
  • + {% empty %} +
    {% translate "No token found" %}
    + {% endfor %} +
+

{% translate "Create new token" %}

+
+ {% csrf_token %} + {{ form|crispy }} + +
+{% endblock content %} diff --git a/legadilo/templates/users/user_detail.html b/legadilo/templates/users/user_detail.html index 57c89ee8..737624d5 100644 --- a/legadilo/templates/users/user_detail.html +++ b/legadilo/templates/users/user_detail.html @@ -31,6 +31,9 @@

{{ object.email }}

{% translate "Import/Export articles" %} + {% translate "Manage API tokens" %} diff --git a/legadilo/users/admin.py b/legadilo/users/admin.py index 99ba93d2..886b5db8 100644 --- a/legadilo/users/admin.py +++ b/legadilo/users/admin.py @@ -22,7 +22,7 @@ from django.utils.translation import gettext_lazy as _ from legadilo.users.forms import UserAdminChangeForm, UserAdminCreationForm -from legadilo.users.models import Notification, UserSettings +from legadilo.users.models import ApplicationToken, Notification, UserSettings User = get_user_model() @@ -90,3 +90,8 @@ class NotificationAdmin(admin.ModelAdmin): autocomplete_fields = ("user",) list_display = ("title", "created_at", "user", "is_read") list_filter = ("is_read",) + + +@admin.register(ApplicationToken) +class ApplicationTokenAdmin(admin.ModelAdmin): + autocomplete_fields = ("user",) diff --git a/legadilo/users/api.py b/legadilo/users/api.py new file mode 100644 index 00000000..a8d2aa5a --- /dev/null +++ b/legadilo/users/api.py @@ -0,0 +1,124 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from datetime import datetime + +import jwt +from django.http import HttpRequest +from django.shortcuts import aget_object_or_404 +from ninja import ModelSchema, Router, Schema +from ninja.errors import AuthenticationError +from ninja.security import HttpBearer +from pydantic import BaseModel as BaseSchema +from pydantic import ValidationError as PydanticValidationError + +from config import settings +from legadilo.users.models import ApplicationToken +from legadilo.utils.time_utils import utcnow + +from .models import User +from .user_types import AuthenticatedApiRequest + +users_api_router = Router(tags=["auth"]) + + +class AuthBearer(HttpBearer): + async def authenticate(self, request, token) -> User | None: + if not token: + return None + + decoded_jwt = _decode_jwt(token) + return await _get_user_from_jwt(decoded_jwt) + + +class JWT(BaseSchema): + application_token_title: str + user_id: int + exp: datetime + + +def _decode_jwt(token: str) -> JWT: + try: + decoded_token = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]) + return JWT.model_validate(decoded_token) + except jwt.ExpiredSignatureError as e: + raise AuthenticationError("Expired JWT token") from e + except (jwt.PyJWTError, PydanticValidationError) as e: + raise AuthenticationError("Invalid JWT token") from e + + +async def _get_user_from_jwt(decoded_jwt: JWT) -> User | None: + try: + return await User.objects.aget(id=decoded_jwt.user_id) + except User.DoesNotExist: + return None + + +class RefreshTokenPayload(Schema): + application_token: str + + +class Token(Schema): + jwt: str + + +@users_api_router.post( + "/refresh/", + auth=None, + response=Token, + url_name="refresh_token", + summary="Create a new access token from an application token", +) +async def refresh_token_view(request: HttpRequest, payload: RefreshTokenPayload) -> Token: + application_token = await aget_object_or_404( + ApplicationToken.objects.get_queryset().only_valid().defer(None), + token=payload.application_token, + ) + application_token.last_used_at = utcnow() + await application_token.asave() + jwt = _create_jwt(application_token.user_id, application_token.title) + + return Token(jwt=jwt) + + +def _create_jwt(user_id: int, application_token: str) -> str: + return jwt.encode( + { + "application_token_title": application_token, + "user_id": user_id, + "exp": utcnow() + settings.JWT_MAX_AGE, + }, + settings.SECRET_KEY, + algorithm=settings.JWT_ALGORITHM, + ) + + +class UserSchema(ModelSchema): + class Meta: + model = User + fields = ("email",) + + +@users_api_router.get( + "", response=UserSchema, url_name="user_info", summary="Get current user info" +) +async def get_user_view(request: AuthenticatedApiRequest) -> User: # noqa: RUF029 auth is async! + """Access information about your user. + + It mostly serves as an endpoint to check that you are correctly authenticated and can use the + API with a token. + """ + return request.auth diff --git a/legadilo/users/migrations/0006_applicationtoken.py b/legadilo/users/migrations/0006_applicationtoken.py new file mode 100644 index 00000000..5f744e26 --- /dev/null +++ b/legadilo/users/migrations/0006_applicationtoken.py @@ -0,0 +1,73 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +# Generated by Django 5.1.3 on 2024-11-23 20:47 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("users", "0005_notification_link_notification_link_text_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="ApplicationToken", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("title", models.CharField(max_length=255)), + ("token", models.CharField(max_length=255)), + ( + "validity_end", + models.DateTimeField( + blank=True, + help_text="Leave empty to have a token that will last until deletion.", + null=True, + verbose_name="Validity end", + ), + ), + ("last_used_at", models.DateTimeField(blank=True, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="application_tokens", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "ordering": ["title"], + "constraints": [ + models.UniqueConstraint( + fields=("token",), name="users_applicationtoken_token_unique" + ), + models.UniqueConstraint( + fields=("title", "user"), name="users_applicationtoken_title_user_unique" + ), + ], + }, + ), + ] diff --git a/legadilo/users/models/__init__.py b/legadilo/users/models/__init__.py index b572628a..ee6c117a 100644 --- a/legadilo/users/models/__init__.py +++ b/legadilo/users/models/__init__.py @@ -14,8 +14,9 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from .application_token import ApplicationToken from .notification import Notification from .user import User from .user_settings import UserSettings -__all__ = ["Notification", "User", "UserSettings"] +__all__ = ["ApplicationToken", "Notification", "User", "UserSettings"] diff --git a/legadilo/users/models/application_token.py b/legadilo/users/models/application_token.py new file mode 100644 index 00000000..996a376d --- /dev/null +++ b/legadilo/users/models/application_token.py @@ -0,0 +1,90 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from __future__ import annotations + +import secrets +from datetime import datetime +from typing import TYPE_CHECKING + +from django.conf import settings +from django.db import models, transaction +from django.utils.translation import gettext_lazy as _ + +from ...utils.time_utils import utcnow +from .user import User + +if TYPE_CHECKING: + from django_stubs_ext.db.models import TypedModelMeta +else: + TypedModelMeta = object + + +class ApplicationTokenQuerySet(models.QuerySet["ApplicationToken"]): + def only_valid(self): + return self.filter(models.Q(validity_end=None) | models.Q(validity_end__gt=utcnow())) + + +class ApplicationTokenManager(models.Manager["ApplicationToken"]): + _hints: dict + + def get_queryset(self): + return ApplicationTokenQuerySet(model=self.model, using=self._db, hints=self._hints).defer( + "token" + ) + + @transaction.atomic + def create_new_token( + self, user: User, title: str, validity_end: datetime | None = None + ) -> ApplicationToken: + return self.create( + title=title, + token=secrets.token_urlsafe(settings.TOKEN_LENGTH), + validity_end=validity_end, + user=user, + ) + + +class ApplicationToken(models.Model): + title = models.CharField(max_length=255) + token = models.CharField(max_length=255) + validity_end = models.DateTimeField( + verbose_name=_("Validity end"), + help_text=_("Leave empty to have a token that will last until deletion."), + null=True, + blank=True, + ) + last_used_at = models.DateTimeField(null=True, blank=True) + + created_at = models.DateTimeField(auto_now_add=True) + + user = models.ForeignKey( + "users.User", related_name="application_tokens", on_delete=models.CASCADE + ) + + objects = ApplicationTokenManager() + + class Meta(TypedModelMeta): + ordering = ["title"] + constraints = [ + models.UniqueConstraint(fields=["token"], name="%(app_label)s_%(class)s_token_unique"), + models.UniqueConstraint( + fields=["title", "user"], name="%(app_label)s_%(class)s_title_user_unique" + ), + ] + + def __str__(self): + return f"ApplicationToken(id={self.id}, user_id={self.user_id}, title={self.title})" diff --git a/legadilo/users/models/user.py b/legadilo/users/models/user.py index 0445c1b1..fc288ea9 100644 --- a/legadilo/users/models/user.py +++ b/legadilo/users/models/user.py @@ -59,4 +59,4 @@ def count_unread_notifications(self) -> int: @cached_property def tzinfo(self) -> ZoneInfo: - return ZoneInfo(self.settings.timezone.name) + return self.settings.timezone.zone_info diff --git a/legadilo/users/tests/factories.py b/legadilo/users/tests/factories.py index c4a65d9a..c2d174f9 100644 --- a/legadilo/users/tests/factories.py +++ b/legadilo/users/tests/factories.py @@ -19,10 +19,11 @@ from django.contrib.auth import get_user_model from factory import Faker, SubFactory, post_generation +from factory import Sequence as FactorySequence from factory.django import DjangoModelFactory from legadilo.core.models import Timezone -from legadilo.users.models import Notification, UserSettings +from legadilo.users.models import ApplicationToken, Notification, UserSettings class UserFactory(DjangoModelFactory): @@ -69,3 +70,12 @@ class NotificationFactory(DjangoModelFactory): class Meta: model = Notification + + +class ApplicationTokenFactory(DjangoModelFactory): + title = FactorySequence(lambda n: f"Token {n}") + token = FactorySequence(lambda n: f"token-{n}") + user = SubFactory(UserFactory) + + class Meta: + model = ApplicationToken diff --git a/legadilo/users/tests/models/test_application_token.py b/legadilo/users/tests/models/test_application_token.py new file mode 100644 index 00000000..41042d54 --- /dev/null +++ b/legadilo/users/tests/models/test_application_token.py @@ -0,0 +1,70 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import pytest +import time_machine +from django.db import IntegrityError + +from legadilo.users.models import ApplicationToken +from legadilo.users.tests.factories import ApplicationTokenFactory +from legadilo.utils.time_utils import utcdt + + +@pytest.mark.django_db +class TestApplicationTokenQuerySet: + def test_only_valid(self, user): + always_valid_token = ApplicationTokenFactory( + user=user, validity_end=None, title="Always valid token" + ) + still_valid_token = ApplicationTokenFactory( + user=user, validity_end=utcdt(2024, 11, 25), title="Still valid token" + ) + ApplicationTokenFactory( + user=user, validity_end=utcdt(2024, 11, 24, 12, 0, 0), title="Expired token" + ) + + with time_machine.travel("2024-11-24 15:00:00"): + tokens = ApplicationToken.objects.get_queryset().only_valid().order_by("id") + + assert list(tokens) == [always_valid_token, still_valid_token] + + +@pytest.mark.django_db +class TestApplicationTokenManager: + def test_create_always_valid_token(self, user): + application_token = ApplicationToken.objects.create_new_token( + user, "My token", validity_end=None + ) + + assert application_token.title == "My token" + assert application_token.user == user + assert application_token.validity_end is None + assert len(application_token.token) == 67 + + def test_create_token_with_validity_end(self, user): + validity_end = utcdt(2024, 11, 24, 12, 0, 0) + + token = ApplicationToken.objects.create_new_token(user, "My token", validity_end) + + assert token.validity_end == validity_end + + def test_create_tokens_with_same_name(self, user, other_user): + token_title = "" + ApplicationToken.objects.create_new_token(user, token_title, validity_end=None) + ApplicationToken.objects.create_new_token(other_user, token_title, validity_end=None) + + with pytest.raises(IntegrityError): + ApplicationToken.objects.create_new_token(user, token_title, validity_end=None) diff --git a/legadilo/users/tests/test_api.py b/legadilo/users/tests/test_api.py new file mode 100644 index 00000000..c1fad923 --- /dev/null +++ b/legadilo/users/tests/test_api.py @@ -0,0 +1,124 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from http import HTTPStatus + +import jwt +import pytest +import time_machine +from django.urls import reverse + +from config import settings +from legadilo.users.api import _create_jwt +from legadilo.users.tests.factories import ApplicationTokenFactory +from legadilo.utils.testing import build_bearer_header +from legadilo.utils.time_utils import utcdt + + +@pytest.mark.django_db +class TestGetRefreshTokenView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.url = reverse("api-1.0.0:refresh_token") + self.application_token = ApplicationTokenFactory(user=user) + + def test_get_refresh_token_invalid_payload(self, client): + response = client.post(self.url, {}, content_type="application/json") + + assert response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY + assert response.json() == { + "detail": [ + { + "loc": ["body", "payload", "application_token"], + "msg": "Field required", + "type": "missing", + } + ] + } + + def test_get_refresh_token_invalid_token(self, client): + response = client.post( + self.url, {"application_token": "inexistent"}, content_type="application/json" + ) + + assert response.status_code == HTTPStatus.NOT_FOUND + + @time_machine.travel("2024-11-24 16:30:00", tick=False) + def test_get_refresh_token(self, client, user, django_assert_num_queries): + with django_assert_num_queries(2): + response = client.post( + self.url, + {"application_token": self.application_token.token}, + content_type="application/json", + ) + + assert response.status_code == HTTPStatus.OK + assert list(response.json().keys()) == ["jwt"] + self.application_token.refresh_from_db() + assert self.application_token.last_used_at == utcdt(2024, 11, 24, 16, 30) + decoded_jwt = jwt.decode( + response.json()["jwt"], settings.SECRET_KEY, algorithms=[settings.JWT_ALGORITHM] + ) + exp = utcdt(2024, 11, 24, 16, 30) + settings.JWT_MAX_AGE + assert decoded_jwt == { + "application_token_title": self.application_token.title, + "exp": exp.timestamp(), + "user_id": user.id, + } + + +@pytest.mark.django_db +class TestGetUserView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.url = reverse("api-1.0.0:user_info") + self.application_token = ApplicationTokenFactory(user=user) + + def test_get_user_info_no_token(self, client): + response = client.get(self.url) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_get_user(self, client, user, django_assert_num_queries): + jwt = _create_jwt(user.id, self.application_token.token) + + with django_assert_num_queries(1): + response = client.get( + self.url, + HTTP_AUTHORIZATION=build_bearer_header(jwt), + ) + + assert response.status_code == HTTPStatus.OK + assert response.json() == {"email": user.email} + + def test_get_user_expired_token(self, client, user): + with time_machine.travel("2024-11-20 16:30:00"): + jwt = _create_jwt(user.id, self.application_token.token) + + response = client.get( + self.url, + HTTP_AUTHORIZATION=build_bearer_header(jwt), + ) + + assert response.status_code == HTTPStatus.UNAUTHORIZED + + def test_get_user_invalid_token(self, client, user): + response = client.get( + self.url, + HTTP_AUTHORIZATION=build_bearer_header("toto"), + ) + + assert response.status_code == HTTPStatus.UNAUTHORIZED diff --git a/legadilo/users/tests/views/test_manage_tokens_views.py b/legadilo/users/tests/views/test_manage_tokens_views.py new file mode 100644 index 00000000..248d9c57 --- /dev/null +++ b/legadilo/users/tests/views/test_manage_tokens_views.py @@ -0,0 +1,159 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from datetime import datetime +from http import HTTPStatus + +import pytest +from django.contrib.messages import DEFAULT_LEVELS, get_messages +from django.contrib.messages.storage.base import Message +from django.urls import reverse + +from legadilo.conftest import assert_redirected_to_login_page +from legadilo.core.models import Timezone +from legadilo.users.models import ApplicationToken +from legadilo.users.tests.factories import ApplicationTokenFactory +from legadilo.utils.time_utils import utcdt + + +@pytest.mark.django_db +class TestManageTokensView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.url = reverse("users:manage_tokens") + self.application_token = ApplicationTokenFactory(user=user) + + def test_list_not_logged_in(self, client): + response = client.get(self.url) + + assert_redirected_to_login_page(response) + + def test_list(self, logged_in_sync_client, other_user, django_assert_num_queries): + ApplicationTokenFactory(user=other_user) + + with django_assert_num_queries(8): + response = logged_in_sync_client.get(self.url) + + assert response.status_code == HTTPStatus.OK + assert response.template_name == "users/manage_tokens.html" + assert response.context_data["new_application_token"] is None + assert list(response.context_data["tokens"]) == [self.application_token] + + def test_create_token_invalid_form( + self, user, logged_in_sync_client, django_assert_num_queries + ): + with django_assert_num_queries(8): + response = logged_in_sync_client.post(self.url, data={}) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert response.template_name == "users/manage_tokens.html" + assert response.context_data["new_application_token"] is None + assert response.context_data["form"].errors == {"title": ["This field is required."]} + + def test_create_token(self, user, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(11): + response = logged_in_sync_client.post(self.url, data={"title": "Test token"}) + + assert response.status_code == HTTPStatus.OK + assert ApplicationToken.objects.count() == 2 + new_token = ApplicationToken.objects.exclude(id=self.application_token.id).get() + assert new_token.title == "Test token" + assert new_token.user == user + assert new_token.validity_end is None + assert response.template_name == "users/manage_tokens.html" + assert response.context_data["new_application_token"] is not None + assert list(response.context_data["tokens"]) == [new_token, self.application_token] + + def test_create_duplicated_token(self, logged_in_sync_client, django_assert_num_queries): + with django_assert_num_queries(12): + response = logged_in_sync_client.post( + self.url, data={"title": self.application_token.title} + ) + + assert response.status_code == HTTPStatus.CONFLICT + assert ApplicationToken.objects.count() == 1 + assert response.template_name == "users/manage_tokens.html" + assert response.context_data["new_application_token"] is None + messages = list(get_messages(response.wsgi_request)) + assert messages == [ + Message( + level=DEFAULT_LEVELS["ERROR"], + message="A token already exists with this name.", + ) + ] + + def test_create_token_with_validity_end( + self, user, logged_in_sync_client, django_assert_num_queries + ): + with django_assert_num_queries(11): + response = logged_in_sync_client.post( + self.url, data={"title": "Test token", "validity_end": "2024-11-24 12:00:00Z"} + ) + + assert response.status_code == HTTPStatus.OK + assert ApplicationToken.objects.count() == 2 + new_token = ApplicationToken.objects.exclude(id=self.application_token.id).get() + assert new_token.title == "Test token" + assert new_token.user == user + assert new_token.validity_end == utcdt(2024, 11, 24, 12) + + def test_create_token_with_validity_end_in_timezone( + self, user, logged_in_sync_client, django_assert_num_queries + ): + new_york_tz, _created = Timezone.objects.get_or_create(name="America/New_York") + + with django_assert_num_queries(12): + response = logged_in_sync_client.post( + self.url, + data={ + "title": "Test token", + "validity_end": "2024-11-24 12:00:00Z", + "timezone": new_york_tz.id, + }, + ) + + assert response.status_code == HTTPStatus.OK + assert ApplicationToken.objects.count() == 2 + new_token = ApplicationToken.objects.exclude(id=self.application_token.id).get() + assert new_token.title == "Test token" + assert new_token.user == user + assert new_token.validity_end == datetime(2024, 11, 24, 12, tzinfo=new_york_tz.zone_info) + + +@pytest.mark.django_db +class TestDeleteTokenView: + @pytest.fixture(autouse=True) + def _setup_data(self, user): + self.application_token = ApplicationTokenFactory(user=user) + self.url = reverse("users:delete_token", kwargs={"token_id": self.application_token.id}) + + def test_delete_not_logged_in(self, client): + response = client.get(self.url) + + assert_redirected_to_login_page(response) + + def test_delete_other_user(self, logged_in_other_user_sync_client): + response = logged_in_other_user_sync_client.post(self.url) + + assert response.status_code == HTTPStatus.NOT_FOUND + assert ApplicationToken.objects.count() == 1 + + def test_delete(self, logged_in_sync_client, other_user, django_assert_num_queries): + with django_assert_num_queries(7): + response = logged_in_sync_client.post(self.url) + + assert response.status_code == HTTPStatus.OK + assert ApplicationToken.objects.count() == 0 diff --git a/legadilo/users/urls.py b/legadilo/users/urls.py index 79c9c1fa..8438eee6 100644 --- a/legadilo/users/urls.py +++ b/legadilo/users/urls.py @@ -25,4 +25,6 @@ path("~settings/", view=views.user_update_settings_view, name="update_settings"), path("/", view=views.user_detail_view, name="detail"), path("notifications/", views.list_notifications_view, name="list_notifications"), + path("tokens/", views.manage_tokens_view, name="manage_tokens"), + path("tokens//delete/", views.delete_token_view, name="delete_token"), ] diff --git a/legadilo/users/user_types.py b/legadilo/users/user_types.py index f8bf7a59..defe16a6 100644 --- a/legadilo/users/user_types.py +++ b/legadilo/users/user_types.py @@ -28,3 +28,10 @@ class AuthenticatedHttpRequest(HttpRequest): @abstractmethod async def auser(self) -> User: pass + + +class AuthenticatedApiRequest(HttpRequest): + # In the API, we cannot use user because it's not defined when using auth tokens. We must rely + # on auth which will always contains the proper user object. + user: None # type: ignore[assignment] + auth: User diff --git a/legadilo/users/views/__init__.py b/legadilo/users/views/__init__.py index c8294e59..83c8c3fc 100644 --- a/legadilo/users/views/__init__.py +++ b/legadilo/users/views/__init__.py @@ -14,6 +14,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from .manage_tokens_views import delete_token_view, manage_tokens_view from .notifications_views import list_notifications_view from .user_views import ( user_detail_view, @@ -23,7 +24,9 @@ ) __all__ = [ + "delete_token_view", "list_notifications_view", + "manage_tokens_view", "user_detail_view", "user_redirect_view", "user_update_settings_view", diff --git a/legadilo/users/views/manage_tokens_views.py b/legadilo/users/views/manage_tokens_views.py new file mode 100644 index 00000000..08afb11a --- /dev/null +++ b/legadilo/users/views/manage_tokens_views.py @@ -0,0 +1,116 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +from http import HTTPStatus + +from django import forms +from django.contrib import messages +from django.contrib.auth.decorators import login_required +from django.db import IntegrityError +from django.http import HttpResponse +from django.shortcuts import get_object_or_404 +from django.template.response import TemplateResponse +from django.utils.translation import gettext_lazy as _ +from django.views.decorators.http import require_http_methods, require_POST + +from legadilo.core.forms.widgets import AutocompleteSelectWidget, DateTimeWidget +from legadilo.core.models import Timezone +from legadilo.users.models import ApplicationToken +from legadilo.users.user_types import AuthenticatedHttpRequest + + +class CreateTokenForm(forms.ModelForm): + timezone = forms.ModelChoiceField( + Timezone.objects.all(), + required=False, + widget=AutocompleteSelectWidget(), + help_text=_("The timezone in which the validity end date should be understood."), + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields["validity_end"].widget = DateTimeWidget() + + class Meta: + model = ApplicationToken + fields = ("title", "validity_end") + + def clean(self): + super().clean() + + if self.cleaned_data["validity_end"] and self.cleaned_data["timezone"]: + self.cleaned_data["validity_end"] = self.cleaned_data["validity_end"].replace( + tzinfo=self.cleaned_data["timezone"].zone_info + ) + + +@login_required +@require_http_methods(["GET", "POST"]) +def manage_tokens_view(request: AuthenticatedHttpRequest) -> TemplateResponse: + form = CreateTokenForm(initial={"timezone": request.user.settings.timezone}) + new_application_token = None + status = HTTPStatus.OK + + if request.method == "POST": + form = CreateTokenForm(request.POST) + if form.is_valid(): + new_application_token, form, status = _create_token(request, form) + else: + status = HTTPStatus.BAD_REQUEST + + return TemplateResponse( + request, + "users/manage_tokens.html", + { + "new_application_token": new_application_token, + "tokens": ApplicationToken.objects.filter(user=request.user), + "form": form, + }, + status=status, + ) + + +def _create_token( + request: AuthenticatedHttpRequest, form: CreateTokenForm +) -> tuple[ApplicationToken | None, CreateTokenForm, HTTPStatus]: + status = HTTPStatus.OK + + try: + new_application_token = ApplicationToken.objects.create_new_token( + request.user, form.cleaned_data["title"], form.cleaned_data["validity_end"] + ) + form = CreateTokenForm( + initial={ + "validity_end": form.cleaned_data["validity_end"].replace(tzinfo=None).isoformat() + if form.cleaned_data["validity_end"] + else "", + "timezone": form.cleaned_data["timezone"], + } + ) + except IntegrityError: + new_application_token = None + status = HTTPStatus.CONFLICT + messages.error(request, _("A token already exists with this name.")) + + return new_application_token, form, status + + +@login_required +@require_POST +def delete_token_view(request: AuthenticatedHttpRequest, token_id: int) -> HttpResponse: + token = get_object_or_404(ApplicationToken, id=token_id, user=request.user) + token.delete() + + return HttpResponse() diff --git a/legadilo/utils/api.py b/legadilo/utils/api.py new file mode 100644 index 00000000..0c781351 --- /dev/null +++ b/legadilo/utils/api.py @@ -0,0 +1,41 @@ +# Legadilo +# Copyright (C) 2023-2024 by Legadilo contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from typing import Any + +from django.db import models +from django.db.models import Model +from ninja import Schema + + +class ApiError(Schema): + detail: str + + +async def update_model_from_patch_dict( + model: Model, + data: dict[str, Any], + *, + must_refresh: bool = False, + refresh_qs: models.QuerySet | None = None, +): + for attr, value in data.items(): + setattr(model, attr, value) + + await model.asave(update_fields=list(data.keys())) + + if must_refresh: + await model.arefresh_from_db(from_queryset=refresh_qs) diff --git a/legadilo/utils/collections_utils.py b/legadilo/utils/collections_utils.py index 46a31d10..7d999e0f 100644 --- a/legadilo/utils/collections_utils.py +++ b/legadilo/utils/collections_utils.py @@ -17,11 +17,22 @@ from __future__ import annotations from collections.abc import AsyncIterable, Iterable -from typing import TypeVar +from typing import Any, TypeVar + +from django.core.serializers.json import DjangoJSONEncoder +from pydantic import BaseModel as BaseSchema T = TypeVar("T") +class CustomJsonEncoder(DjangoJSONEncoder): + def default(self, o: Any) -> Any: + if isinstance(o, BaseSchema): + return o.model_dump(mode="json") + + return super().default(o) + + def min_or_none(collection: Iterable[T]) -> T | None: return _select_item_from_collection(min, collection) @@ -44,11 +55,7 @@ def max_or_none( async def alist(collection: AsyncIterable[T]) -> list[T]: - output = [] - async for item in collection: - output.append(item) - - return output + return [item async for item in collection] async def aset(collection: AsyncIterable[T]) -> set[T]: diff --git a/legadilo/utils/pagination.py b/legadilo/utils/pagination.py index 37dd2f6d..0de30752 100644 --- a/legadilo/utils/pagination.py +++ b/legadilo/utils/pagination.py @@ -13,8 +13,13 @@ # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from typing import Any from django.core.paginator import Page, Paginator +from django.db.models import QuerySet +from ninja.pagination import LimitOffsetPagination as NinjaLimitOffsetPagination + +from legadilo.utils.collections_utils import alist def get_requested_page(paginator: Paginator, requested_page: int) -> Page: @@ -23,3 +28,22 @@ def get_requested_page(paginator: Paginator, requested_page: int) -> Page: if 1 <= requested_page <= paginator.num_pages else paginator.page(1) ) + + +class LimitOffsetPagination(NinjaLimitOffsetPagination): + """Custom paginator to fix a bug in Ninja pagination. + + There is a bug in Ninja when we try to paginate querysets in async context: we will get a + SynchronousOnlyOperation error. This should be solved "soon" with + https://github.com/vitalik/django-ninja/pull/1340 + """ + + async def apaginate_queryset( + self, + queryset: QuerySet, + pagination: Any, + **params: Any, + ) -> Any: + result = await super().apaginate_queryset(queryset, pagination, **params) + result["items"] = await alist(result["items"]) + return result diff --git a/legadilo/utils/testing.py b/legadilo/utils/testing.py index 3ec42e67..60da20d0 100644 --- a/legadilo/utils/testing.py +++ b/legadilo/utils/testing.py @@ -15,25 +15,20 @@ # along with this program. If not, see . import json -from dataclasses import asdict, is_dataclass -from datetime import date, datetime from typing import Any from django.db import models +from pydantic import BaseModel as BaseSchema - -class CustomJsonEncoder(json.JSONEncoder): - def default(self, obj: Any) -> Any: - if isinstance(obj, datetime | date): - return obj.isoformat() - - if is_dataclass(obj): - return asdict(obj) # type: ignore[arg-type] - - return super().default(obj) +from legadilo.utils.collections_utils import CustomJsonEncoder def serialize_for_snapshot(value: Any) -> str: + if isinstance(value, BaseSchema): + value = value.model_dump(mode="json") + elif isinstance(value, list | tuple) and len(value) > 0 and isinstance(value[0], BaseSchema): + value = [item.model_dump(mode="json") for item in value] + value = json.dumps(value, indent=2, sort_keys=True, cls=CustomJsonEncoder) return str(value) @@ -41,3 +36,7 @@ def serialize_for_snapshot(value: Any) -> str: def all_model_fields_except(model: type[models.Model], excluded_fields: set[str]): return [field.name for field in model._meta.fields if field.name not in excluded_fields] + + +def build_bearer_header(jwt: str = ""): + return f"Bearer {jwt}" diff --git a/legadilo/utils/tests/test_validators.py b/legadilo/utils/tests/test_validators.py index c9f59315..ed29add8 100644 --- a/legadilo/utils/tests/test_validators.py +++ b/legadilo/utils/tests/test_validators.py @@ -21,29 +21,10 @@ get_page_number_from_request, is_url_valid, language_code_validator, - list_of_strings_json_schema_validator, normalize_url, ) -class TestListOfStringsJsonSchemaValidator: - def test_list_of_string_json_schema_validator_with_array_of_strings(self): - # Must not raise. - list_of_strings_json_schema_validator(["Value1", "2", "Hi!"]) - - @pytest.mark.parametrize( - "value", - [ - ["Test", 1], - {"nota": "a list"}, - "Just a string", - ], - ) - def test_list_of_string_json_schema_validator_with_invalid_data(self, value): - with pytest.raises(ValidationError): - list_of_strings_json_schema_validator(value) - - class TestLanguageCodeValidator: @pytest.mark.parametrize("code", ["", None, 12, "test", "aaaaa"]) def test_invalid_codes(self, code): diff --git a/legadilo/utils/validators.py b/legadilo/utils/validators.py index fcbe41b8..90173db8 100644 --- a/legadilo/utils/validators.py +++ b/legadilo/utils/validators.py @@ -13,65 +13,80 @@ # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . - import re -from typing import Any +from collections.abc import Set +from typing import Annotated, Any from urllib.parse import urljoin, urlparse, urlsplit, urlunsplit from django.core.exceptions import ValidationError from django.core.validators import URLValidator from django.http import HttpRequest -from django.utils.deconstruct import deconstructible -from jsonschema import ValidationError as JsonSchemaValidationError -from jsonschema import validate as validate_json_schema from nh3 import is_html +from pydantic import ( + AfterValidator, + BeforeValidator, + ConfigDict, + Field, + StringConstraints, + TypeAdapter, +) +from pydantic import BaseModel as BaseSchema +from pydantic import ValidationError as PydanticValidationError + +from legadilo.utils.security import full_sanitize, sanitize_keep_safe_tags + +default_frozen_model_config = ConfigDict( + extra="forbid", frozen=True, validate_default=True, validate_assignment=True +) + +FullSanitizeValidator = AfterValidator(full_sanitize) + + +def sanitize_keep_safe_tags_validator(extra_tags: Set[str] = frozenset()) -> AfterValidator: + return AfterValidator( + lambda value: sanitize_keep_safe_tags(value, extra_tags_to_cleanup=extra_tags) + ) + + +def truncate(max_size: int) -> AfterValidator: + # We must use a lambda here: Pydantic cannot recognize the signature of + # operator.itemgetter(slice(max_size)) if we pass it to its validator. + return AfterValidator(lambda value: value[:max_size]) # noqa: FURB118 don't use a lambda + + +def remove_falsy_items(container_type: type): + return AfterValidator(lambda items: container_type(item for item in items if item)) + + +def none_to_value(none_replacer: Any) -> BeforeValidator: + return BeforeValidator(lambda value: none_replacer if value is None else value) + + +def list_of_strings_validator(value: Any): + try: + TypeAdapter(list[str]).validate_python(value) + except PydanticValidationError as e: + raise ValidationError(str(e)) from e + + +CleanedString = Annotated[str, FullSanitizeValidator, StringConstraints(strip_whitespace=True)] -from legadilo.utils.security import full_sanitize - - -@deconstructible -class JsonSchemaValidator: - def __init__(self, schema): - self._schema = schema - - def __call__(self, value): - try: - validate_json_schema(value, self._schema) - except JsonSchemaValidationError as e: - raise ValidationError(str(e)) from e - - -list_of_strings_json_schema_validator = JsonSchemaValidator({ - "type": "array", - "items": {"type": "string"}, -}) - -table_of_content_json_schema_validator = JsonSchemaValidator({ - "type": "array", - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "text": {"type": "string"}, - "level": {"type": "integer"}, - "children": { - "type": "array", - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "text": {"type": "string"}, - "level": {"type": "integer"}, - }, - "additionalProperties": False, - "required": ["id", "text", "level"], - }, - }, - }, - "additionalProperties": False, - "required": ["id", "text", "level"], - }, -}) + +class TableOfContentItem(BaseSchema): + id: CleanedString + text: CleanedString + level: int + + +class TableOfContentTopItem(TableOfContentItem): + children: list[TableOfContentItem] = Field(default_factory=list) + + +def table_of_content_validator(value: Any): + try: + TypeAdapter(TableOfContentItem).validate_python(value) + except PydanticValidationError as e: + raise ValidationError(str(e)) from e def language_code_validator(value: Any): @@ -89,6 +104,17 @@ def language_code_validator(value: Any): raise ValidationError("Language code is invalid") +def language_code_validator_or_default(value: Any) -> str: + try: + language_code_validator(value) + return value + except (ValidationError, TypeError): + return "" + + +LanguageCodeValidatorOrDefault = AfterValidator(language_code_validator_or_default) + + def get_page_number_from_request(request: HttpRequest) -> int: raw_page = request.GET.get("page", 1) @@ -117,6 +143,16 @@ def is_url_valid(url: str | None) -> bool: return True +def _is_url_valid_for_pydantic_validator(url: str | None) -> str: + if url is None or not is_url_valid(url): + raise ValueError(f"{url} is not a valid url") + + return url + + +ValidUrlValidator = AfterValidator(_is_url_valid_for_pydantic_validator) + + def normalize_url(base_url: str, url_to_normalize: str) -> str: """Normalize HTTP URLs. diff --git a/poetry.lock b/poetry.lock index 5f67010e..361018e4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,17 @@ files = [ {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, ] +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "anyio" version = "4.6.2.post1" @@ -879,6 +890,26 @@ files = [ asgiref = ">=3.6" django = ">=4.2" +[[package]] +name = "django-ninja" +version = "1.3.0" +description = "Django Ninja - Fast Django REST framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "django_ninja-1.3.0-py3-none-any.whl", hash = "sha256:f58096b6c767d1403dfd6c49743f82d780d7b9688d9302ecab316ac1fa6131bb"}, + {file = "django_ninja-1.3.0.tar.gz", hash = "sha256:5b320e2dc0f41a6032bfa7e1ebc33559ae1e911a426f0c6be6674a50b20819be"}, +] + +[package.dependencies] +Django = ">=3.1" +pydantic = ">=2.0,<3.0.0" + +[package.extras] +dev = ["pre-commit"] +doc = ["markdown-include", "mkdocs", "mkdocs-material", "mkdocstrings"] +test = ["django-stubs", "mypy (==1.7.1)", "psycopg2-binary", "pytest", "pytest-asyncio", "pytest-cov", "pytest-django", "ruff (==0.5.7)"] + [[package]] name = "django-stubs" version = "5.1.1" @@ -1301,41 +1332,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rpds-py = ">=0.7.1" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2024.10.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.9" -files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - [[package]] name = "markdown" version = "3.7" @@ -1986,6 +1982,138 @@ files = [ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +[[package]] +name = "pydantic" +version = "2.10.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.10.0-py3-none-any.whl", hash = "sha256:5e7807ba9201bdf61b1b58aa6eb690916c40a47acfb114b1b4fef3e7fd5b30fc"}, + {file = "pydantic-2.10.0.tar.gz", hash = "sha256:0aca0f045ff6e2f097f1fe89521115335f15049eeb8a7bef3dafe4b19a74e289"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.0" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.0" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2ac6b919f7fed71b17fe0b4603c092a4c9b5bae414817c9c81d3c22d1e1bcc"}, + {file = "pydantic_core-2.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e015833384ca3e1a0565a79f5d953b0629d9138021c27ad37c92a9fa1af7623c"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db72e40628967f6dc572020d04b5f800d71264e0531c6da35097e73bdf38b003"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df45c4073bed486ea2f18757057953afed8dd77add7276ff01bccb79982cf46c"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:836a4bfe0cc6d36dc9a9cc1a7b391265bf6ce9d1eb1eac62ac5139f5d8d9a6fa"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bf1340ae507f6da6360b24179c2083857c8ca7644aab65807023cf35404ea8d"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ab325fc86fbc077284c8d7f996d904d30e97904a87d6fb303dce6b3de7ebba9"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1da0c98a85a6c6ed702d5556db3b09c91f9b0b78de37b7593e2de8d03238807a"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b0202ebf2268954090209a84f9897345719e46a57c5f2c9b7b250ca0a9d3e63"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:35380671c3c921fe8adf31ad349dc6f7588b7e928dbe44e1093789734f607399"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b4c19525c3538fbc0bbda6229f9682fb8199ce9ac37395880e6952798e00373"}, + {file = "pydantic_core-2.27.0-cp310-none-win32.whl", hash = "sha256:333c840a1303d1474f491e7be0b718226c730a39ead0f7dab2c7e6a2f3855555"}, + {file = "pydantic_core-2.27.0-cp310-none-win_amd64.whl", hash = "sha256:99b2863c1365f43f74199c980a3d40f18a218fbe683dd64e470199db426c4d6a"}, + {file = "pydantic_core-2.27.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4523c4009c3f39d948e01962223c9f5538602e7087a628479b723c939fab262d"}, + {file = "pydantic_core-2.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84af1cf7bfdcbc6fcf5a5f70cc9896205e0350306e4dd73d54b6a18894f79386"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e65466b31be1070b4a5b7dbfbd14b247884cb8e8b79c64fb0f36b472912dbaea"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a5c022bb0d453192426221605efc865373dde43b17822a264671c53b068ac20c"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bb69bf3b6500f195c3deb69c1205ba8fc3cb21d1915f1f158a10d6b1ef29b6a"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aa4d1b2eba9a325897308b3124014a142cdccb9f3e016f31d3ebee6b5ea5e75"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e96ca781e0c01e32115912ebdf7b3fb0780ce748b80d7d28a0802fa9fbaf44e"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b872c86d8d71827235c7077461c502feb2db3f87d9d6d5a9daa64287d75e4fa0"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:82e1ad4ca170e8af4c928b67cff731b6296e6a0a0981b97b2eb7c275cc4e15bd"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:eb40f828bc2f73f777d1eb8fee2e86cd9692a4518b63b6b5aa8af915dfd3207b"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9a8fbf506fde1529a1e3698198fe64bfbe2e0c09557bc6a7dcf872e7c01fec40"}, + {file = "pydantic_core-2.27.0-cp311-none-win32.whl", hash = "sha256:24f984fc7762ed5f806d9e8c4c77ea69fdb2afd987b4fd319ef06c87595a8c55"}, + {file = "pydantic_core-2.27.0-cp311-none-win_amd64.whl", hash = "sha256:68950bc08f9735306322bfc16a18391fcaac99ded2509e1cc41d03ccb6013cfe"}, + {file = "pydantic_core-2.27.0-cp311-none-win_arm64.whl", hash = "sha256:3eb8849445c26b41c5a474061032c53e14fe92a11a5db969f722a2716cd12206"}, + {file = "pydantic_core-2.27.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8117839a9bdbba86e7f9df57018fe3b96cec934c3940b591b0fd3fbfb485864a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a291d0b4243a259c8ea7e2b84eb9ccb76370e569298875a7c5e3e71baf49057a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e35afd9e10b2698e6f2f32256678cb23ca6c1568d02628033a837638b3ed12"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58ab0d979c969983cdb97374698d847a4acffb217d543e172838864636ef10d9"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d06b667e53320332be2bf6f9461f4a9b78092a079b8ce8634c9afaa7e10cd9f"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78f841523729e43e3928a364ec46e2e3f80e6625a4f62aca5c345f3f626c6e8a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:400bf470e4327e920883b51e255617dfe4496d4e80c3fea0b5a5d0bf2c404dd4"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:951e71da6c89d354572098bada5ba5b5dc3a9390c933af8a614e37755d3d1840"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a51ce96224eadd1845150b204389623c8e129fde5a67a84b972bd83a85c6c40"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:483c2213a609e7db2c592bbc015da58b6c75af7360ca3c981f178110d9787bcf"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:359e7951f04ad35111b5ddce184db3391442345d0ab073aa63a95eb8af25a5ef"}, + {file = "pydantic_core-2.27.0-cp312-none-win32.whl", hash = "sha256:ee7d9d5537daf6d5c74a83b38a638cc001b648096c1cae8ef695b0c919d9d379"}, + {file = "pydantic_core-2.27.0-cp312-none-win_amd64.whl", hash = "sha256:2be0ad541bb9f059954ccf8877a49ed73877f862529575ff3d54bf4223e4dd61"}, + {file = "pydantic_core-2.27.0-cp312-none-win_arm64.whl", hash = "sha256:6e19401742ed7b69e51d8e4df3c03ad5ec65a83b36244479fd70edde2828a5d9"}, + {file = "pydantic_core-2.27.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5f2b19b8d6fca432cb3acf48cf5243a7bf512988029b6e6fd27e9e8c0a204d85"}, + {file = "pydantic_core-2.27.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c86679f443e7085ea55a7376462553996c688395d18ef3f0d3dbad7838f857a2"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:510b11e9c3b1a852876d1ccd8d5903684336d635214148637ceb27366c75a467"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb704155e73b833801c247f39d562229c0303f54770ca14fb1c053acb376cf10"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ce048deb1e033e7a865ca384770bccc11d44179cf09e5193a535c4c2f497bdc"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58560828ee0951bb125c6f2862fbc37f039996d19ceb6d8ff1905abf7da0bf3d"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb4785894936d7682635726613c44578c420a096729f1978cd061a7e72d5275"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2883b260f7a93235488699d39cbbd94fa7b175d3a8063fbfddd3e81ad9988cb2"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6fcb3fa3855d583aa57b94cf146f7781d5d5bc06cb95cb3afece33d31aac39b"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:e851a051f7260e6d688267eb039c81f05f23a19431bd7dfa4bf5e3cb34c108cd"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edb1bfd45227dec8d50bc7c7d86463cd8728bcc574f9b07de7369880de4626a3"}, + {file = "pydantic_core-2.27.0-cp313-none-win32.whl", hash = "sha256:678f66462058dd978702db17eb6a3633d634f7aa0deaea61e0a674152766d3fc"}, + {file = "pydantic_core-2.27.0-cp313-none-win_amd64.whl", hash = "sha256:d28ca7066d6cdd347a50d8b725dc10d9a1d6a1cce09836cf071ea6a2d4908be0"}, + {file = "pydantic_core-2.27.0-cp313-none-win_arm64.whl", hash = "sha256:6f4a53af9e81d757756508b57cae1cf28293f0f31b9fa2bfcb416cc7fb230f9d"}, + {file = "pydantic_core-2.27.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:e9f9feee7f334b72ceae46313333d002b56f325b5f04271b4ae2aadd9e993ae4"}, + {file = "pydantic_core-2.27.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:225bfff5d425c34e1fd562cef52d673579d59b967d9de06178850c4802af9039"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921ad596ff1a82f9c692b0758c944355abc9f0de97a4c13ca60ffc6d8dc15d4"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6354e18a9be37bfa124d6b288a87fb30c673745806c92956f1a25e3ae6e76b96"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ee4c2a75af9fe21269a4a0898c5425afb01af1f5d276063f57e2ae1bc64e191"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c91e3c04f5191fd3fb68764bddeaf02025492d5d9f23343b283870f6ace69708"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6ebfac28fd51890a61df36ef202adbd77d00ee5aca4a3dadb3d9ed49cfb929"}, + {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36aa167f69d8807ba7e341d67ea93e50fcaaf6bc433bb04939430fa3dab06f31"}, + {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e8d89c276234579cd3d095d5fa2a44eb10db9a218664a17b56363cddf226ff3"}, + {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:5cc822ab90a70ea3a91e6aed3afac570b276b1278c6909b1d384f745bd09c714"}, + {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e15315691fe2253eb447503153acef4d7223dfe7e7702f9ed66539fcd0c43801"}, + {file = "pydantic_core-2.27.0-cp38-none-win32.whl", hash = "sha256:dfa5f5c0a4c8fced1422dc2ca7eefd872d5d13eb33cf324361dbf1dbfba0a9fe"}, + {file = "pydantic_core-2.27.0-cp38-none-win_amd64.whl", hash = "sha256:513cb14c0cc31a4dfd849a4674b20c46d87b364f997bbcb02282306f5e187abf"}, + {file = "pydantic_core-2.27.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:4148dc9184ab79e356dc00a4199dc0ee8647973332cb385fc29a7cced49b9f9c"}, + {file = "pydantic_core-2.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5fc72fbfebbf42c0856a824b8b0dc2b5cd2e4a896050281a21cfa6fed8879cb1"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:185ef205256cd8b38431205698531026979db89a79587725c1e55c59101d64e9"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:395e3e1148fa7809016231f8065f30bb0dc285a97b4dc4360cd86e17bab58af7"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33d14369739c5d07e2e7102cdb0081a1fa46ed03215e07f097b34e020b83b1ae"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7820bb0d65e3ce1e3e70b6708c2f66143f55912fa02f4b618d0f08b61575f12"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43b61989068de9ce62296cde02beffabcadb65672207fc51e7af76dca75e6636"}, + {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15e350efb67b855cd014c218716feea4986a149ed1f42a539edd271ee074a196"}, + {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:433689845288f9a1ee5714444e65957be26d30915f7745091ede4a83cfb2d7bb"}, + {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:3fd8bc2690e7c39eecdf9071b6a889ce7b22b72073863940edc2a0a23750ca90"}, + {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:884f1806609c2c66564082540cffc96868c5571c7c3cf3a783f63f2fb49bd3cd"}, + {file = "pydantic_core-2.27.0-cp39-none-win32.whl", hash = "sha256:bf37b72834e7239cf84d4a0b2c050e7f9e48bced97bad9bdf98d26b8eb72e846"}, + {file = "pydantic_core-2.27.0-cp39-none-win_amd64.whl", hash = "sha256:31a2cae5f059329f9cfe3d8d266d3da1543b60b60130d186d9b6a3c20a346361"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4fb49cfdb53af5041aba909be00cccfb2c0d0a2e09281bf542371c5fd36ad04c"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:49633583eb7dc5cba61aaf7cdb2e9e662323ad394e543ee77af265736bcd3eaa"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:153017e3d6cd3ce979de06d84343ca424bb6092727375eba1968c8b4693c6ecb"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff63a92f6e249514ef35bc795de10745be0226eaea06eb48b4bbeaa0c8850a4a"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5982048129f40b082c2654de10c0f37c67a14f5ff9d37cf35be028ae982f26df"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:91bc66f878557313c2a6bcf396e7befcffe5ab4354cfe4427318968af31143c3"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:68ef5377eb582fa4343c9d0b57a5b094046d447b4c73dd9fbd9ffb216f829e7d"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c5726eec789ee38f2c53b10b1821457b82274f81f4f746bb1e666d8741fcfadb"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0c431e4be5c1a0c6654e0c31c661cd89e0ca956ef65305c3c3fd96f4e72ca39"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8e21d927469d04b39386255bf00d0feedead16f6253dcc85e9e10ddebc334084"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b51f964fcbb02949fc546022e56cdb16cda457af485e9a3e8b78ac2ecf5d77e"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a7fd4de38f7ff99a37e18fa0098c3140286451bc823d1746ba80cec5b433a1"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fda87808429c520a002a85d6e7cdadbf58231d60e96260976c5b8f9a12a8e13"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a150392102c402c538190730fda06f3bce654fc498865579a9f2c1d2b425833"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c9ed88b398ba7e3bad7bd64d66cc01dcde9cfcb7ec629a6fd78a82fa0b559d78"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:9fe94d9d2a2b4edd7a4b22adcd45814b1b59b03feb00e56deb2e89747aec7bfe"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d8b5ee4ae9170e2775d495b81f414cc20268041c42571530513496ba61e94ba3"}, + {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d29e235ce13c91902ef3efc3d883a677655b3908b1cbc73dee816e5e1f8f7739"}, + {file = "pydantic_core-2.27.0.tar.gz", hash = "sha256:f57783fbaf648205ac50ae7d646f27582fc706be3977e87c3c124e7a92407b10"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pygments" version = "2.18.0" @@ -2000,6 +2128,23 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyjwt" +version = "2.10.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "PyJWT-2.10.0-py3-none-any.whl", hash = "sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15"}, + {file = "pyjwt-2.10.0.tar.gz", hash = "sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pyopenssl" version = "24.2.1" @@ -2303,21 +2448,6 @@ all = ["pillow (>=9.1.0)", "pypng"] pil = ["pillow (>=9.1.0)"] png = ["pypng"] -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - [[package]] name = "requests" version = "2.32.3" @@ -2357,114 +2487,15 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] -[[package]] -name = "rpds-py" -version = "0.21.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, - {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, - {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, - {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, - {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, - {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, - {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, - {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, - {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, - {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, - {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, - {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, - {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, -] - [[package]] name = "sentry-sdk" -version = "2.18.0" +version = "2.19.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" files = [ - {file = "sentry_sdk-2.18.0-py2.py3-none-any.whl", hash = "sha256:ee70e27d1bbe4cd52a38e1bd28a5fadb9b17bc29d91b5f2b97ae29c0a7610442"}, - {file = "sentry_sdk-2.18.0.tar.gz", hash = "sha256:0dc21febd1ab35c648391c664df96f5f79fb0d92d7d4225cd9832e53a617cafd"}, + {file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"}, + {file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"}, ] [package.dependencies] @@ -2491,7 +2522,7 @@ grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] http2 = ["httpcore[http2] (==1.*)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] -huggingface-hub = ["huggingface-hub (>=0.22)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] langchain = ["langchain (>=0.0.210)"] launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] litestar = ["litestar (>=2.0.0)"] @@ -2500,7 +2531,7 @@ openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] openfeature = ["openfeature-sdk (>=0.7.1)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro"] -pure-eval = ["asttokens", "executing", "pure-eval"] +pure-eval = ["asttokens", "executing", "pure_eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] @@ -3087,13 +3118,13 @@ dev = ["black", "pytest"] [[package]] name = "uvicorn" -version = "0.32.0" +version = "0.32.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, - {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, + {file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"}, + {file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"}, ] [package.dependencies] @@ -3101,7 +3132,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "virtualenv" @@ -3378,4 +3409,4 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "7b8330ef8763b4576a478b0dc88db3a5b7be1f31aeee2bd96003bd9bcfea5f12" +content-hash = "4ef906af892644639bb75613f925273285294d1397130122f85593b93ae45731" diff --git a/pyproject.toml b/pyproject.toml index 57396c5c..9759437d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -231,6 +231,7 @@ django-csp = "3.8" # https://django-csp.readthedocs.io/en/latest/index.html django-axes = "7.0.0" # https://github.com/jazzband/django-axes django-htmx = "1.21.0" # https://django-htmx.readthedocs.io/en/latest/ django-template-partials = "24.4" # https://github.com/carltongibson/django-template-partials +django-ninja = "1.3.0" # https://django-ninja.dev # Project # ------------------------------------------------------------------------------ @@ -241,10 +242,11 @@ feedparser = {git = "https://github.com/Jenselme/feedparser.git", rev = "correct httpx = "^0.27.2" # https://www.python-httpx.org/ beautifulsoup4 = "^4.12.3" # https://www.crummy.com/software/BeautifulSoup/bs4/doc/ nh3 = "^0.2.17" # https://nh3.readthedocs.io/en/latest/ -jsonschema = "^4.23.0" # https://github.com/python-jsonschema/jsonschema python-dateutil = "^2.9.0.post0" # https://github.com/dateutil/dateutil defusedxml = "0.7.1" # https://github.com/tiran/defusedxml -markdown = "^3.7" +markdown = "^3.7" # https://python-markdown.github.io +pydantic = "2.10.0" # https://docs.pydantic.dev/latest/ +pyjwt = "2.10.0" # https://pyjwt.readthedocs.io/en/stable/ [tool.poetry.group.prod]