From 60356e56774477ef71e35ce9736147369047519a Mon Sep 17 00:00:00 2001 From: zimbatm Date: Fri, 27 Feb 2026 01:10:40 +0100 Subject: [PATCH 1/4] Add Pages CRUD endpoints to v1 API --- apps/api/plane/api/serializers/__init__.py | 5 + apps/api/plane/api/serializers/page.py | 73 +++++ apps/api/plane/api/urls/__init__.py | 2 + apps/api/plane/api/urls/page.py | 23 ++ apps/api/plane/api/views/__init__.py | 5 + apps/api/plane/api/views/page.py | 353 +++++++++++++++++++++ apps/api/plane/settings/openapi.py | 15 + 7 files changed, 476 insertions(+) create mode 100644 apps/api/plane/api/serializers/page.py create mode 100644 apps/api/plane/api/urls/page.py create mode 100644 apps/api/plane/api/views/page.py diff --git a/apps/api/plane/api/serializers/__init__.py b/apps/api/plane/api/serializers/__init__.py index 44e527a2dc5..8af82ce3188 100644 --- a/apps/api/plane/api/serializers/__init__.py +++ b/apps/api/plane/api/serializers/__init__.py @@ -60,3 +60,8 @@ from .invite import WorkspaceInviteSerializer from .member import ProjectMemberSerializer from .sticky import StickySerializer +from .page import ( + PageSerializer, + PageCreateSerializer, + PageUpdateSerializer, +) diff --git a/apps/api/plane/api/serializers/page.py b/apps/api/plane/api/serializers/page.py new file mode 100644 index 00000000000..b5b41a5db60 --- /dev/null +++ b/apps/api/plane/api/serializers/page.py @@ -0,0 +1,73 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from plane.db.models import Page + + +class PageCreateSerializer(BaseSerializer): + """Serializer for creating pages via the v1 API.""" + + class Meta: + model = Page + fields = [ + "name", + "description_html", + "color", + "access", + "parent", + "external_source", + "external_id", + ] + read_only_fields = [ + "id", + "workspace", + "owned_by", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + ] + + +class PageUpdateSerializer(PageCreateSerializer): + """ + Serializer for updating pages via the v1 API. + + Extends PageCreateSerializer for partial update support. + """ + + class Meta(PageCreateSerializer.Meta): + pass + + +class PageSerializer(BaseSerializer): + """ + Full read serializer for pages in the v1 API. + + Returns all page fields including description_html, lock status, + archive state, and associated label/project IDs. + """ + + label_ids = serializers.ListField(child=serializers.UUIDField(), read_only=True) + project_ids = serializers.ListField(child=serializers.UUIDField(), read_only=True) + + class Meta: + model = Page + fields = "__all__" + read_only_fields = [ + "id", + "workspace", + "owned_by", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + ] diff --git a/apps/api/plane/api/urls/__init__.py b/apps/api/plane/api/urls/__init__.py index 4a202431bc7..fc98a9454ab 100644 --- a/apps/api/plane/api/urls/__init__.py +++ b/apps/api/plane/api/urls/__init__.py @@ -14,6 +14,7 @@ from .work_item import urlpatterns as work_item_patterns from .invite import urlpatterns as invite_patterns from .sticky import urlpatterns as sticky_patterns +from .page import urlpatterns as page_patterns urlpatterns = [ *asset_patterns, @@ -22,6 +23,7 @@ *label_patterns, *member_patterns, *module_patterns, + *page_patterns, *project_patterns, *state_patterns, *user_patterns, diff --git a/apps/api/plane/api/urls/page.py b/apps/api/plane/api/urls/page.py new file mode 100644 index 00000000000..250d6d94407 --- /dev/null +++ b/apps/api/plane/api/urls/page.py @@ -0,0 +1,23 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +from django.urls import path + +from plane.api.views.page import ( + PageListCreateAPIEndpoint, + PageDetailAPIEndpoint, +) + +urlpatterns = [ + path( + "workspaces//projects//pages/", + PageListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), + name="pages", + ), + path( + "workspaces//projects//pages//", + PageDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), + name="pages", + ), +] diff --git a/apps/api/plane/api/views/__init__.py b/apps/api/plane/api/views/__init__.py index 305ebfdb39a..703b69d4633 100644 --- a/apps/api/plane/api/views/__init__.py +++ b/apps/api/plane/api/views/__init__.py @@ -61,3 +61,8 @@ from .invite import WorkspaceInvitationsViewset from .sticky import StickyViewSet + +from .page import ( + PageListCreateAPIEndpoint, + PageDetailAPIEndpoint, +) diff --git a/apps/api/plane/api/views/page.py b/apps/api/plane/api/views/page.py new file mode 100644 index 00000000000..f8662c0c1a4 --- /dev/null +++ b/apps/api/plane/api/views/page.py @@ -0,0 +1,353 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +# Django imports +from django.db.models import ( + Q, + Value, + UUIDField, +) +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models.functions import Coalesce + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.api.serializers import ( + PageSerializer, + PageCreateSerializer, + PageUpdateSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Page, + ProjectPage, + ProjectMember, + UserFavorite, + UserRecentVisit, + Project, +) +from .base import BaseAPIView +from plane.bgtasks.page_transaction_task import page_transaction + + +class PageListCreateAPIEndpoint(BaseAPIView): + """Page List and Create Endpoint""" + + serializer_class = PageSerializer + model = Page + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + Page.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter( + projects__id=self.kwargs.get("project_id"), + project_pages__deleted_at__isnull=True, + ) + .filter( + projects__project_projectmember__member=self.request.user, + projects__project_projectmember__is_active=True, + ) + .filter(Q(owned_by=self.request.user) | Q(access=0)) + .filter(archived_at__isnull=True) + .select_related("workspace", "owned_by") + .prefetch_related("projects", "labels") + .annotate( + label_ids=Coalesce( + ArrayAgg( + "page_labels__label_id", + distinct=True, + filter=~Q(page_labels__label_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + project_ids=Coalesce( + ArrayAgg( + "projects__id", + distinct=True, + filter=~Q(projects__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .order_by("-created_at") + .distinct() + ) + + def get(self, request, slug, project_id): + """List pages + + Retrieve all non-archived pages in a project that the user has access to. + """ + return self.paginate( + request=request, + queryset=self.get_queryset(), + on_results=lambda pages: ( + PageSerializer( + pages, + many=True, + fields=self.fields, + expand=self.expand, + ).data + ), + ) + + def post(self, request, slug, project_id): + """Create page + + Create a new page within a project. Creates the Page record and + associates it with the project via ProjectPage. + """ + serializer = PageCreateSerializer(data=request.data) + if serializer.is_valid(): + # Check for duplicate external_id + external_source + if ( + request.data.get("external_id") + and request.data.get("external_source") + and Page.objects.filter( + workspace__slug=slug, + projects__id=project_id, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).exists() + ): + page = Page.objects.filter( + workspace__slug=slug, + projects__id=project_id, + external_source=request.data.get("external_source"), + external_id=request.data.get("external_id"), + ).first() + return Response( + { + "error": "Page with the same external id and external source already exists", + "id": str(page.id), + }, + status=status.HTTP_409_CONFLICT, + ) + + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + page = serializer.save( + workspace_id=project.workspace_id, + owned_by=request.user, + ) + + # Create the project-page association + ProjectPage.objects.create( + workspace_id=project.workspace_id, + project_id=project_id, + page_id=page.id, + ) + + # Fire the page transaction background task + page_transaction.delay( + new_description_html=request.data.get("description_html", "

"), + old_description_html=None, + page_id=page.id, + ) + + # Re-fetch with annotations for the response + page = self.get_queryset().get(pk=page.id) + return Response( + PageSerializer(page).data, + status=status.HTTP_201_CREATED, + ) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class PageDetailAPIEndpoint(BaseAPIView): + """Page Detail Endpoint — retrieve, update, delete""" + + serializer_class = PageSerializer + model = Page + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + Page.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter( + projects__id=self.kwargs.get("project_id"), + project_pages__deleted_at__isnull=True, + ) + .filter( + projects__project_projectmember__member=self.request.user, + projects__project_projectmember__is_active=True, + ) + .filter(Q(owned_by=self.request.user) | Q(access=0)) + .select_related("workspace", "owned_by") + .prefetch_related("projects", "labels") + .annotate( + label_ids=Coalesce( + ArrayAgg( + "page_labels__label_id", + distinct=True, + filter=~Q(page_labels__label_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + project_ids=Coalesce( + ArrayAgg( + "projects__id", + distinct=True, + filter=~Q(projects__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .distinct() + ) + + def get(self, request, slug, project_id, pk): + """Retrieve page + + Get a single page with full details including description_html. + """ + page = self.get_queryset().get(pk=pk) + return Response( + PageSerializer(page, fields=self.fields, expand=self.expand).data, + status=status.HTTP_200_OK, + ) + + def patch(self, request, slug, project_id, pk): + """Update page + + Update page properties. Locked pages cannot be updated. + Only the page owner can change the access level. + """ + page = Page.objects.get( + pk=pk, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + if page.is_locked: + return Response( + {"error": "Page is locked"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Validate parent exists in the same project if provided + parent = request.data.get("parent", None) + if parent: + Page.objects.get( + pk=parent, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + # Only the owner can change access + if page.access != request.data.get("access", page.access) and page.owned_by_id != request.user.id: + return Response( + {"error": "Access cannot be updated since this page is owned by someone else"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + page_description = page.description_html + + serializer = PageUpdateSerializer(page, data=request.data, partial=True) + if serializer.is_valid(): + if ( + request.data.get("external_id") + and (page.external_id != request.data.get("external_id")) + and Page.objects.filter( + workspace__slug=slug, + projects__id=project_id, + external_source=request.data.get("external_source", page.external_source), + external_id=request.data.get("external_id"), + ).exists() + ): + return Response( + { + "error": "Page with the same external id and external source already exists", + "id": str(page.id), + }, + status=status.HTTP_409_CONFLICT, + ) + + serializer.save() + + # Fire page transaction on description change + if request.data.get("description_html"): + page_transaction.delay( + new_description_html=request.data.get("description_html", "

"), + old_description_html=page_description, + page_id=pk, + ) + + # Re-fetch with annotations + page = self.get_queryset().get(pk=pk) + return Response( + PageSerializer(page).data, + status=status.HTTP_200_OK, + ) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, pk): + """Delete page + + Permanently delete a page. The page must be archived first. + Only the owner or a project admin can delete. + """ + page = Page.objects.get( + pk=pk, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + if page.archived_at is None: + return Response( + {"error": "The page should be archived before deleting"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if page.owned_by_id != request.user.id and ( + not ProjectMember.objects.filter( + workspace__slug=slug, + member=request.user, + role=20, + project_id=project_id, + is_active=True, + ).exists() + ): + return Response( + {"error": "Only admin or owner can delete the page"}, + status=status.HTTP_403_FORBIDDEN, + ) + + # Remove parent from all children + Page.objects.filter( + parent_id=pk, + projects__id=project_id, + workspace__slug=slug, + project_pages__deleted_at__isnull=True, + ).update(parent=None) + + page.delete() + + # Clean up favorites + UserFavorite.objects.filter( + project=project_id, + workspace__slug=slug, + entity_identifier=pk, + entity_type="page", + ).delete() + + # Clean up recent visits + UserRecentVisit.objects.filter( + project_id=project_id, + workspace__slug=slug, + entity_identifier=pk, + entity_name="page", + ).delete(soft=False) + + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apps/api/plane/settings/openapi.py b/apps/api/plane/settings/openapi.py index a1961a0c582..58fc567836f 100644 --- a/apps/api/plane/settings/openapi.py +++ b/apps/api/plane/settings/openapi.py @@ -141,6 +141,21 @@ "*Use Cases:* Feature planning, release organization, progress tracking, team coordination." ), }, + # Knowledge Management + { + "name": "Pages", + "description": ( + "**Pages & Documentation**\n\n" + "Create and manage pages for project documentation, notes, and knowledge sharing. " + "Organize pages in hierarchies with parent-child relationships.\n\n" + "*Key Features:*\n" + "- Create, update, and delete pages\n" + "- Archive and unarchive pages with descendants\n" + "- Control page access (public or private)\n" + "- Organize pages in parent-child hierarchies\n\n" + "*Use Cases:* Project documentation, meeting notes, knowledge bases, team wikis." + ), + }, # Core Project Management { "name": "Projects", From 5dc4fac9e64cd54b82b787688f6471484277b909 Mon Sep 17 00:00:00 2001 From: zimbatm Date: Fri, 27 Feb 2026 01:10:44 +0100 Subject: [PATCH 2/4] Add contract tests for Pages CRUD endpoints --- .../plane/tests/contract/api/test_pages.py | 464 ++++++++++++++++++ 1 file changed, 464 insertions(+) create mode 100644 apps/api/plane/tests/contract/api/test_pages.py diff --git a/apps/api/plane/tests/contract/api/test_pages.py b/apps/api/plane/tests/contract/api/test_pages.py new file mode 100644 index 00000000000..3a1d7f718e3 --- /dev/null +++ b/apps/api/plane/tests/contract/api/test_pages.py @@ -0,0 +1,464 @@ +# Copyright (c) 2023-present Plane Software, Inc. and contributors +# SPDX-License-Identifier: AGPL-3.0-only +# See the LICENSE file for details. + +import pytest +from unittest.mock import MagicMock, patch +from rest_framework import status +from uuid import uuid4 +from django.utils import timezone + +from plane.db.models import Page, ProjectPage, Project, ProjectMember + + +@pytest.fixture +def project(db, workspace, create_user): + """Create a test project with the user as a member""" + project = Project.objects.create( + name="Test Project", + identifier="TP", + workspace=workspace, + created_by=create_user, + ) + ProjectMember.objects.create( + project=project, + member=create_user, + role=20, # Admin role + is_active=True, + ) + return project + + +@pytest.fixture +def page_data(): + """Sample page data for tests""" + return { + "name": "Test Page", + "description_html": "

Test page content

", + } + + +@pytest.fixture(autouse=True) +def _mock_celery(): + """Prevent all celery tasks from hitting a broker""" + with patch("celery.app.task.Task.delay", return_value=MagicMock(id="mock-task-id")): + yield + + +@pytest.fixture +def create_page(db, project, create_user): + """Create a test page with project association""" + page = Page.objects.create( + name="Existing Page", + description_html="

Existing content

", + workspace=project.workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=project.workspace, + project=project, + page=page, + ) + return page + + +@pytest.mark.contract +class TestPageListCreateAPIEndpoint: + """Test Page List and Create API Endpoint""" + + def get_page_url(self, workspace_slug, project_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/pages/" + + @pytest.mark.django_db + def test_create_page_success(self, api_key_client, workspace, project, page_data): + """Test successful page creation""" + url = self.get_page_url(workspace.slug, project.id) + + response = api_key_client.post(url, page_data, format="json") + + assert response.status_code == status.HTTP_201_CREATED + assert Page.objects.count() == 1 + + created_page = Page.objects.first() + assert created_page.name == page_data["name"] + assert created_page.description_html == page_data["description_html"] + assert created_page.owned_by is not None + + # Verify ProjectPage association was created + assert ProjectPage.objects.filter(page=created_page, project=project).exists() + + @pytest.mark.django_db + def test_create_page_with_external_id(self, api_key_client, workspace, project): + """Test creating page with external ID""" + url = self.get_page_url(workspace.slug, project.id) + + data = { + "name": "External Page", + "external_id": "ext-123", + "external_source": "confluence", + } + + response = api_key_client.post(url, data, format="json") + + assert response.status_code == status.HTTP_201_CREATED + created_page = Page.objects.first() + assert created_page.external_id == "ext-123" + assert created_page.external_source == "confluence" + + @pytest.mark.django_db + def test_create_page_duplicate_external_id(self, api_key_client, workspace, project, create_user): + """Test creating page with duplicate external ID returns 409""" + url = self.get_page_url(workspace.slug, project.id) + + # Create first page with external ID + page = Page.objects.create( + name="First Page", + workspace=workspace, + owned_by=create_user, + external_id="ext-123", + external_source="confluence", + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Try to create second page with same external ID + data = { + "name": "Second Page", + "external_id": "ext-123", + "external_source": "confluence", + } + + response = api_key_client.post(url, data, format="json") + + assert response.status_code == status.HTTP_409_CONFLICT + assert "same external id" in response.data["error"] + + @pytest.mark.django_db + def test_list_pages_success(self, api_key_client, workspace, project, create_page, create_user): + """Test successful page listing""" + url = self.get_page_url(workspace.slug, project.id) + + # Create additional pages + for i in range(2): + page = Page.objects.create( + name=f"Page {i + 2}", + workspace=workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert "results" in response.data + assert len(response.data["results"]) == 3 # Including create_page fixture + + @pytest.mark.django_db + def test_list_pages_excludes_archived(self, api_key_client, workspace, project, create_user): + """Test that archived pages are excluded from listing""" + url = self.get_page_url(workspace.slug, project.id) + + # Create a non-archived page + active_page = Page.objects.create( + name="Active Page", + workspace=workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=active_page, + ) + + # Create an archived page + archived_page = Page.objects.create( + name="Archived Page", + workspace=workspace, + owned_by=create_user, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=archived_page, + ) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + page_ids = [str(p["id"]) for p in response.data["results"]] + assert str(active_page.id) in page_ids + assert str(archived_page.id) not in page_ids + + @pytest.mark.django_db + def test_list_pages_excludes_private_pages_of_other_users(self, api_key_client, workspace, project, create_user): + """Test that private pages owned by other users are excluded""" + url = self.get_page_url(workspace.slug, project.id) + + from plane.db.models import User + + other_user = User.objects.create( + email="other@plane.so", + username=f"other_{uuid4().hex[:8]}", + first_name="Other", + last_name="User", + ) + + # Public page by other user -- should be visible + public_page = Page.objects.create( + name="Public Page", + workspace=workspace, + owned_by=other_user, + access=0, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=public_page, + ) + + # Private page by other user -- should be hidden + private_page = Page.objects.create( + name="Private Page", + workspace=workspace, + owned_by=other_user, + access=1, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=private_page, + ) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + page_ids = [str(p["id"]) for p in response.data["results"]] + assert str(public_page.id) in page_ids + assert str(private_page.id) not in page_ids + + +@pytest.mark.contract +class TestPageDetailAPIEndpoint: + """Test Page Detail API Endpoint""" + + def get_page_detail_url(self, workspace_slug, project_id, page_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/pages/{page_id}/" + + @pytest.mark.django_db + def test_get_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page retrieval""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert str(response.data["id"]) == str(create_page.id) + assert response.data["name"] == create_page.name + + @pytest.mark.django_db + def test_get_page_not_found(self, api_key_client, workspace, project): + """Test getting non-existent page""" + fake_id = uuid4() + url = self.get_page_detail_url(workspace.slug, project.id, fake_id) + + response = api_key_client.get(url) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.django_db + def test_update_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page update""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + update_data = { + "name": f"Updated Page {uuid4()}", + } + + response = api_key_client.patch(url, update_data, format="json") + + assert response.status_code == status.HTTP_200_OK + + create_page.refresh_from_db() + assert create_page.name == update_data["name"] + + @pytest.mark.django_db + def test_update_page_description(self, api_key_client, workspace, project, create_page): + """Test updating page description fires page_transaction""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + update_data = { + "description_html": "

Updated content

", + } + + response = api_key_client.patch(url, update_data, format="json") + + assert response.status_code == status.HTTP_200_OK + + create_page.refresh_from_db() + assert create_page.description_html == "

Updated content

" + + @pytest.mark.django_db + def test_update_locked_page(self, api_key_client, workspace, project, create_page): + """Test that locked pages cannot be updated""" + create_page.is_locked = True + create_page.save() + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.patch(url, {"name": "New Name"}, format="json") + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "locked" in response.data["error"].lower() + + @pytest.mark.django_db + def test_update_page_access_by_non_owner(self, api_key_client, workspace, project, create_user): + """Test that non-owners cannot change page access level""" + from plane.db.models import User + + other_user = User.objects.create( + email="owner@plane.so", + username=f"owner_{uuid4().hex[:8]}", + first_name="Owner", + last_name="User", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, # Public + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + url = self.get_page_detail_url(workspace.slug, project.id, page.id) + + response = api_key_client.patch(url, {"access": 1}, format="json") + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "owned by someone else" in response.data["error"] + + @pytest.mark.django_db + def test_update_page_external_id_conflict(self, api_key_client, workspace, project, create_page, create_user): + """Test updating page with conflicting external ID""" + # Create another page with an external ID + other_page = Page.objects.create( + name="Other Page", + workspace=workspace, + owned_by=create_user, + external_id="ext-456", + external_source="confluence", + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=other_page, + ) + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.patch( + url, + {"external_id": "ext-456", "external_source": "confluence"}, + format="json", + ) + + assert response.status_code == status.HTTP_409_CONFLICT + assert "same external id" in response.data["error"] + + @pytest.mark.django_db + def test_delete_archived_page_success(self, api_key_client, workspace, project, create_page): + """Test successful deletion of an archived page""" + create_page.archived_at = timezone.now().date() + create_page.save() + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + assert not Page.objects.filter(id=create_page.id).exists() + + @pytest.mark.django_db + def test_delete_non_archived_page(self, api_key_client, workspace, project, create_page): + """Test that non-archived pages cannot be deleted""" + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "archived before deleting" in response.data["error"] + + @pytest.mark.django_db + def test_delete_page_non_owner_non_admin(self, api_key_client, workspace, project, create_user): + """Test that non-owner non-admin cannot delete a page""" + from plane.db.models import User + + other_user = User.objects.create( + email="owner2@plane.so", + username=f"owner2_{uuid4().hex[:8]}", + first_name="Page", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Downgrade the API user to member (role=15) so they are not admin + ProjectMember.objects.filter(project=project, member=create_user).update(role=15) + + url = self.get_page_detail_url(workspace.slug, project.id, page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + assert "Only admin or owner" in response.data["error"] + + @pytest.mark.django_db + def test_delete_page_clears_children_parent(self, api_key_client, workspace, project, create_page, create_user): + """Test that deleting a page sets parent=None on its children""" + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + # Archive the parent so it can be deleted + create_page.archived_at = timezone.now().date() + create_page.save() + + url = self.get_page_detail_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + child_page.refresh_from_db() + assert child_page.parent is None + From ad24b5a63897ca633a2f900e3fbc049d6be73adc Mon Sep 17 00:00:00 2001 From: zimbatm Date: Fri, 27 Feb 2026 01:10:51 +0100 Subject: [PATCH 3/4] Add Pages archive/unarchive endpoints to v1 API --- apps/api/plane/api/urls/page.py | 16 +++ apps/api/plane/api/views/__init__.py | 1 + apps/api/plane/api/views/page.py | 145 +++++++++++++++++++++++++++ 3 files changed, 162 insertions(+) diff --git a/apps/api/plane/api/urls/page.py b/apps/api/plane/api/urls/page.py index 250d6d94407..3ef74e569f6 100644 --- a/apps/api/plane/api/urls/page.py +++ b/apps/api/plane/api/urls/page.py @@ -7,6 +7,7 @@ from plane.api.views.page import ( PageListCreateAPIEndpoint, PageDetailAPIEndpoint, + PageArchiveUnarchiveAPIEndpoint, ) urlpatterns = [ @@ -20,4 +21,19 @@ PageDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), name="pages", ), + path( + "workspaces//projects//pages//archive/", + PageArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]), + name="page-archive", + ), + path( + "workspaces//projects//archived-pages/", + PageArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]), + name="page-archive-list", + ), + path( + "workspaces//projects//archived-pages//unarchive/", + PageArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]), + name="page-unarchive", + ), ] diff --git a/apps/api/plane/api/views/__init__.py b/apps/api/plane/api/views/__init__.py index 703b69d4633..7739ebb5bba 100644 --- a/apps/api/plane/api/views/__init__.py +++ b/apps/api/plane/api/views/__init__.py @@ -65,4 +65,5 @@ from .page import ( PageListCreateAPIEndpoint, PageDetailAPIEndpoint, + PageArchiveUnarchiveAPIEndpoint, ) diff --git a/apps/api/plane/api/views/page.py b/apps/api/plane/api/views/page.py index f8662c0c1a4..9b298e8201f 100644 --- a/apps/api/plane/api/views/page.py +++ b/apps/api/plane/api/views/page.py @@ -11,6 +11,7 @@ from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.fields import ArrayField from django.db.models.functions import Coalesce +from django.utils import timezone # Third party imports from rest_framework import status @@ -31,7 +32,9 @@ UserRecentVisit, Project, ) +from plane.db.models.project import ROLE from .base import BaseAPIView +from plane.app.views.page.base import unarchive_archive_page_and_descendants from plane.bgtasks.page_transaction_task import page_transaction @@ -351,3 +354,145 @@ def delete(self, request, slug, project_id, pk): ).delete(soft=False) return Response(status=status.HTTP_204_NO_CONTENT) + + +class PageArchiveUnarchiveAPIEndpoint(BaseAPIView): + """Page Archive and Unarchive Endpoint""" + + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + Page.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter( + projects__id=self.kwargs.get("project_id"), + project_pages__deleted_at__isnull=True, + ) + .filter( + projects__project_projectmember__member=self.request.user, + projects__project_projectmember__is_active=True, + ) + .filter(Q(owned_by=self.request.user) | Q(access=0)) + .filter(archived_at__isnull=False) + .select_related("workspace", "owned_by") + .prefetch_related("projects", "labels") + .annotate( + label_ids=Coalesce( + ArrayAgg( + "page_labels__label_id", + distinct=True, + filter=~Q(page_labels__label_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + project_ids=Coalesce( + ArrayAgg( + "projects__id", + distinct=True, + filter=~Q(projects__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .order_by("-created_at") + .distinct() + ) + + def get(self, request, slug, project_id): + """List archived pages + + Retrieve all pages that have been archived in the project. + """ + return self.paginate( + request=request, + queryset=self.get_queryset(), + on_results=lambda pages: ( + PageSerializer( + pages, + many=True, + fields=self.fields, + expand=self.expand, + ).data + ), + ) + + def post(self, request, slug, project_id, page_id): + """Archive page + + Move a page and its descendants to archived status. + Only the page owner or a project admin can archive. + """ + page = Page.objects.get( + pk=page_id, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + # Only the owner or admin can archive + if ( + ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + role__lte=ROLE.MEMBER.value, + ).exists() + and request.user.id != page.owned_by_id + ): + return Response( + {"error": "Only the owner or admin can archive the page"}, + status=status.HTTP_403_FORBIDDEN, + ) + + UserFavorite.objects.filter( + entity_type="page", + entity_identifier=page_id, + project_id=project_id, + workspace__slug=slug, + ).delete() + + today = timezone.now().date() + unarchive_archive_page_and_descendants(page_id, today) + + return Response( + {"archived_at": str(today)}, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug, project_id, page_id): + """Unarchive page + + Restore an archived page and its descendants to active status. + Only the page owner or a project admin can unarchive. + """ + page = Page.objects.get( + pk=page_id, + workspace__slug=slug, + projects__id=project_id, + project_pages__deleted_at__isnull=True, + ) + + # Only the owner or admin can unarchive + if ( + ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + role__lte=ROLE.MEMBER.value, + ).exists() + and request.user.id != page.owned_by_id + ): + return Response( + {"error": "Only the owner or admin can unarchive the page"}, + status=status.HTTP_403_FORBIDDEN, + ) + + # If parent is still archived, break the hierarchy + if page.parent_id and page.parent.archived_at: + page.parent = None + page.save(update_fields=["parent"]) + + unarchive_archive_page_and_descendants(page_id, None) + + return Response(status=status.HTTP_204_NO_CONTENT) From a43cc26ccbc16556f1c42c1622d30ae9a07e820b Mon Sep 17 00:00:00 2001 From: zimbatm Date: Fri, 27 Feb 2026 01:10:56 +0100 Subject: [PATCH 4/4] Add contract tests for Pages archive/unarchive endpoints --- .../plane/tests/contract/api/test_pages.py | 307 ++++++++++++++++++ 1 file changed, 307 insertions(+) diff --git a/apps/api/plane/tests/contract/api/test_pages.py b/apps/api/plane/tests/contract/api/test_pages.py index 3a1d7f718e3..d3c411228e1 100644 --- a/apps/api/plane/tests/contract/api/test_pages.py +++ b/apps/api/plane/tests/contract/api/test_pages.py @@ -462,3 +462,310 @@ def test_delete_page_clears_children_parent(self, api_key_client, workspace, pro child_page.refresh_from_db() assert child_page.parent is None + +@pytest.mark.contract +class TestPageArchiveUnarchiveAPIEndpoint: + """Test Page Archive and Unarchive API Endpoint""" + + def get_archive_url(self, workspace_slug, project_id, page_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/pages/{page_id}/archive/" + + def get_unarchive_url(self, workspace_slug, project_id, page_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/archived-pages/{page_id}/unarchive/" + + def get_archived_list_url(self, workspace_slug, project_id): + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/archived-pages/" + + @pytest.mark.django_db + def test_archive_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page archiving""" + url = self.get_archive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_200_OK + assert "archived_at" in response.data + + # Verify the response returns a date string (YYYY-MM-DD), not a datetime + import re + + assert re.fullmatch(r"\d{4}-\d{2}-\d{2}", response.data["archived_at"]) + + create_page.refresh_from_db() + assert create_page.archived_at is not None + # Verify the response matches what's stored in the DB + assert response.data["archived_at"] == str(create_page.archived_at) + + @pytest.mark.django_db + def test_archive_page_archives_descendants(self, api_key_client, workspace, project, create_page, create_user): + """Test that archiving a page also archives its descendants""" + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + url = self.get_archive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_200_OK + + child_page.refresh_from_db() + assert child_page.archived_at is not None + + @pytest.mark.django_db + def test_archive_page_non_owner_non_admin(self, api_key_client, workspace, project, create_user): + """Test that non-owner non-admin cannot archive a page""" + from plane.db.models import User + + other_user = User.objects.create( + email="archowner@plane.so", + username=f"archowner_{uuid4().hex[:8]}", + first_name="Arch", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Downgrade to member role + ProjectMember.objects.filter(project=project, member=create_user).update(role=15) + + url = self.get_archive_url(workspace.slug, project.id, page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + assert "owner or admin" in response.data["error"].lower() + + @pytest.mark.django_db + def test_archive_page_non_owner_admin_success(self, api_key_client, workspace, project, create_user): + """Test that a project admin can archive a page they don't own""" + from plane.db.models import User + + other_user = User.objects.create( + email="pageowner@plane.so", + username=f"pageowner_{uuid4().hex[:8]}", + first_name="Page", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # create_user is already admin (role=20) from the project fixture + url = self.get_archive_url(workspace.slug, project.id, page.id) + + response = api_key_client.post(url) + + assert response.status_code == status.HTTP_200_OK + page.refresh_from_db() + assert page.archived_at is not None + + @pytest.mark.django_db + def test_unarchive_page_non_owner_non_admin(self, api_key_client, workspace, project, create_user): + """Test that non-owner non-admin cannot unarchive a page""" + from plane.db.models import User + + other_user = User.objects.create( + email="unarchowner@plane.so", + username=f"unarchowner_{uuid4().hex[:8]}", + first_name="Unarch", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # Downgrade to member role + ProjectMember.objects.filter(project=project, member=create_user).update(role=15) + + url = self.get_unarchive_url(workspace.slug, project.id, page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + assert "owner or admin" in response.data["error"].lower() + + @pytest.mark.django_db + def test_unarchive_page_non_owner_admin_success(self, api_key_client, workspace, project, create_user): + """Test that a project admin can unarchive a page they don't own""" + from plane.db.models import User + + other_user = User.objects.create( + email="unarchowner2@plane.so", + username=f"unarchowner2_{uuid4().hex[:8]}", + first_name="Unarch2", + last_name="Owner", + ) + + page = Page.objects.create( + name="Other's Page", + workspace=workspace, + owned_by=other_user, + access=0, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=page, + ) + + # create_user is already admin (role=20) from the project fixture + url = self.get_unarchive_url(workspace.slug, project.id, page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + page.refresh_from_db() + assert page.archived_at is None + + @pytest.mark.django_db + def test_unarchive_page_success(self, api_key_client, workspace, project, create_page): + """Test successful page unarchiving""" + create_page.archived_at = timezone.now().date() + create_page.save() + + url = self.get_unarchive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + create_page.refresh_from_db() + assert create_page.archived_at is None + + @pytest.mark.django_db + def test_unarchive_page_unarchives_descendants(self, api_key_client, workspace, project, create_page, create_user): + """Test that unarchiving a page also unarchives its descendants""" + now = timezone.now().date() + create_page.archived_at = now + create_page.save() + + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + archived_at=now, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + url = self.get_unarchive_url(workspace.slug, project.id, create_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + child_page.refresh_from_db() + assert child_page.archived_at is None + + @pytest.mark.django_db + def test_unarchive_page_breaks_archived_parent_hierarchy( + self, api_key_client, workspace, project, create_page, create_user + ): + """Test that unarchiving a child with an archived parent sets parent=None""" + now = timezone.now().date() + create_page.archived_at = now + create_page.save() + + child_page = Page.objects.create( + name="Child Page", + workspace=workspace, + owned_by=create_user, + parent=create_page, + archived_at=now, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=child_page, + ) + + # Unarchive only the child, not the parent + url = self.get_unarchive_url(workspace.slug, project.id, child_page.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + child_page.refresh_from_db() + assert child_page.archived_at is None + assert child_page.parent is None + + @pytest.mark.django_db + def test_list_archived_pages(self, api_key_client, workspace, project, create_user): + """Test listing archived pages""" + # Create an active page + active_page = Page.objects.create( + name="Active Page", + workspace=workspace, + owned_by=create_user, + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=active_page, + ) + + # Create an archived page + archived_page = Page.objects.create( + name="Archived Page", + workspace=workspace, + owned_by=create_user, + archived_at=timezone.now().date(), + ) + ProjectPage.objects.create( + workspace=workspace, + project=project, + page=archived_page, + ) + + url = self.get_archived_list_url(workspace.slug, project.id) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + page_ids = [str(p["id"]) for p in response.data["results"]] + assert str(archived_page.id) in page_ids + assert str(active_page.id) not in page_ids