diff --git a/docs/changelog/next_release/41.bugfix.rst b/docs/changelog/next_release/41.bugfix.rst new file mode 100644 index 00000000..5e4c696e --- /dev/null +++ b/docs/changelog/next_release/41.bugfix.rst @@ -0,0 +1 @@ +Fix 500 error while creating HDFS connection. diff --git a/syncmaster/backend/api/v1/router.py b/syncmaster/backend/api/v1/router.py index 28da8cc1..c4cbe0d3 100644 --- a/syncmaster/backend/api/v1/router.py +++ b/syncmaster/backend/api/v1/router.py @@ -6,7 +6,7 @@ from syncmaster.backend.api.v1.connections import router as connection_router from syncmaster.backend.api.v1.groups import router as group_router from syncmaster.backend.api.v1.queue import router as queue_router -from syncmaster.backend.api.v1.transfers.router import router as transfer_router +from syncmaster.backend.api.v1.transfers import router as transfer_router from syncmaster.backend.api.v1.users import router as user_router router = APIRouter(prefix="/v1") diff --git a/syncmaster/backend/api/v1/transfers/router.py b/syncmaster/backend/api/v1/transfers.py similarity index 98% rename from syncmaster/backend/api/v1/transfers/router.py rename to syncmaster/backend/api/v1/transfers.py index 4c795d07..3aca7302 100644 --- a/syncmaster/backend/api/v1/transfers/router.py +++ b/syncmaster/backend/api/v1/transfers.py @@ -6,9 +6,6 @@ from kombu.exceptions import KombuError from syncmaster.backend.api.deps import UnitOfWorkMarker -from syncmaster.backend.api.v1.transfers.utils import ( - process_file_transfer_directory_path, -) from syncmaster.backend.services import UnitOfWork, get_user from syncmaster.db.models import Status, User from syncmaster.db.utils import Permission @@ -115,8 +112,6 @@ async def create_transfer( if transfer_data.group_id != queue.group_id: raise DifferentTransferAndQueueGroupError - transfer_data = process_file_transfer_directory_path(transfer_data) # type: ignore - async with unit_of_work: transfer = await unit_of_work.transfer.create( group_id=transfer_data.group_id, @@ -316,8 +311,6 @@ async def update_transfer( params_type=transfer_data.source_params.type, ) - transfer_data = process_file_transfer_directory_path(transfer_data) # type: ignore - async with unit_of_work: transfer = await unit_of_work.transfer.update( transfer=transfer, diff --git a/syncmaster/backend/api/v1/transfers/__init__.py b/syncmaster/backend/api/v1/transfers/__init__.py deleted file mode 100644 index 104aecaf..00000000 --- a/syncmaster/backend/api/v1/transfers/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) -# SPDX-License-Identifier: Apache-2.0 diff --git a/syncmaster/backend/api/v1/transfers/utils.py b/syncmaster/backend/api/v1/transfers/utils.py deleted file mode 100644 index 22b8232a..00000000 --- a/syncmaster/backend/api/v1/transfers/utils.py +++ /dev/null @@ -1,17 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS (Mobile Telesystems) -# SPDX-License-Identifier: Apache-2.0 -from syncmaster.schemas.v1.transfers import CreateTransferSchema, UpdateTransferSchema - - -def process_file_transfer_directory_path( - transfer_data: UpdateTransferSchema | CreateTransferSchema, -) -> UpdateTransferSchema | CreateTransferSchema: - if transfer_data.source_params is not None: - if hasattr(transfer_data.source_params, "directory_path"): # s3 or hdfs connection - transfer_data.source_params.directory_path = str(transfer_data.source_params.directory_path) - - if transfer_data.target_params is not None: - if hasattr(transfer_data.source_params, "directory_path"): # s3 or hdfs connection - transfer_data.target_params.directory_path = str(transfer_data.target_params.directory_path) # type: ignore - - return transfer_data diff --git a/syncmaster/schemas/v1/transfers/file/base.py b/syncmaster/schemas/v1/transfers/file/base.py index d01041c3..f03217e6 100644 --- a/syncmaster/schemas/v1/transfers/file/base.py +++ b/syncmaster/schemas/v1/transfers/file/base.py @@ -4,15 +4,11 @@ from pathlib import PurePosixPath -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Field, field_validator from syncmaster.schemas.v1.transfers.file_format import CSV, JSON, JSONLine -def validate_directory_path(path: str) -> PurePosixPath: - return PurePosixPath(path) - - # At the moment the ReadTransferSourceParams and ReadTransferTargetParams # classes are identical but may change in the future class ReadFileTransferSource(BaseModel): @@ -28,20 +24,30 @@ class ReadFileTransferTarget(BaseModel): # At the moment the CreateTransferSourceParams and CreateTransferTargetParams # classes are identical but may change in the future class CreateFileTransferSource(BaseModel): - directory_path: PurePosixPath + directory_path: str file_format: CSV | JSONLine | JSON = Field(..., discriminator="type") class Config: arbitrary_types_allowed = True - _validate_dir_path = validator("directory_path", allow_reuse=True, pre=True)(validate_directory_path) + @field_validator("directory_path", mode="before") + @classmethod + def _directory_path_is_valid_path(cls, value): + if not PurePosixPath(value).is_absolute(): + raise ValueError("Directory path must be absolute") + return value class CreateFileTransferTarget(BaseModel): - directory_path: PurePosixPath + directory_path: str file_format: CSV | JSONLine = Field(..., discriminator="type") # JSON FORMAT IS NOT SUPPORTED AS A TARGET ! class Config: arbitrary_types_allowed = True - _validate_dir_path = validator("directory_path", allow_reuse=True, pre=True)(validate_directory_path) + @field_validator("directory_path", mode="before") + @classmethod + def _directory_path_is_valid_path(cls, value): + if not PurePosixPath(value).is_absolute(): + raise ValueError("Directory path must be absolute") + return value diff --git a/tests/test_unit/test_transfers/test_file_transfers/test_create_transfer.py b/tests/test_unit/test_transfers/test_file_transfers/test_create_transfer.py index f8762140..27277f60 100644 --- a/tests/test_unit/test_transfers/test_file_transfers/test_create_transfer.py +++ b/tests/test_unit/test_transfers/test_file_transfers/test_create_transfer.py @@ -173,3 +173,81 @@ async def test_developer_plus_can_create_hdfs_transfer( "strategy_params": transfer.strategy_params, "queue_id": transfer.queue_id, } + + +@pytest.mark.parametrize( + "create_connection_data", + [ + { + "type": "s3", + "host": "localhost", + "port": 443, + }, + ], + indirect=True, +) +@pytest.mark.parametrize( + "target_source_params", + [ + { + "type": "s3", + "directory_path": "some/path", + "file_format": { + "type": "csv", + }, + }, + ], +) +async def test_cannot_create_file_transfer_with_relative_path( + client: AsyncClient, + two_group_connections: tuple[MockConnection, MockConnection], + group_queue: Queue, + mock_group: MockGroup, + target_source_params: dict, + create_connection_data: dict, +): + # Arrange + first_connection, second_connection = two_group_connections + user = mock_group.get_member_of_role(UserTestRoles.Developer) + + # Act + result = await client.post( + "v1/transfers", + headers={"Authorization": f"Bearer {user.token}"}, + json={ + "group_id": mock_group.group.id, + "name": "new test transfer", + "description": "", + "is_scheduled": False, + "schedule": "", + "source_connection_id": first_connection.id, + "target_connection_id": second_connection.id, + "source_params": target_source_params, + "target_params": target_source_params, + "strategy_params": {"type": "full"}, + "queue_id": group_queue.id, + }, + ) + + # Assert + assert result.status_code == 422 + assert result.json() == { + "detail": [ + { + "ctx": {"error": {}}, + "input": "some/path", + "loc": ["body", "source_params", "s3", "directory_path"], + "msg": "Value error, Directory path must be absolute", + "type": "value_error", + "url": "https://errors.pydantic.dev/2.7/v/value_error", + }, + { + "ctx": {"error": {}}, + "input": "some/path", + "loc": ["body", "target_params", "s3", "directory_path"], + "msg": "Value error, Directory path must be absolute", + "type": "value_error", + "url": "https://errors.pydantic.dev/2.7/v/value_error", + }, + ], + }