From 0b09bc5eb1e7dec56e3a28872b17a50de93538b3 Mon Sep 17 00:00:00 2001 From: fab-dlock Date: Fri, 21 Jul 2023 14:15:59 +0200 Subject: [PATCH] wip --- .github/workflows/ci.yml | 3 + .gitignore | 1 + Makefile | 10 ++ distributed_lock/__init__.py | 29 ++++ distributed_lock/const.py | 5 + distributed_lock/exception.py | 29 ++++ distributed_lock/sync.py | 240 ++++++++++++++++++++++++++++++++++ poetry.lock | 112 +++++++++++++++- pyproject.toml | 22 +++- tests/test_misc.py | 45 +++++++ tests/test_sync.py | 110 ++++++++++++++++ 11 files changed, 604 insertions(+), 2 deletions(-) create mode 100644 distributed_lock/const.py create mode 100644 distributed_lock/exception.py create mode 100644 distributed_lock/sync.py create mode 100644 tests/test_misc.py create mode 100644 tests/test_sync.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 859c6e3..b25c1b0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,3 +50,6 @@ jobs: - name: Run lint run: | poetry run make LINT_FIX=0 lint + - name: Run test + run: | + poetry run make test diff --git a/.gitignore b/.gitignore index 68bc17f..8e25ea7 100644 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,4 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ +/.vscode diff --git a/Makefile b/Makefile index 874a2ae..f2b97dd 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,5 @@ LINT_FIX = 1 +COVERAGE = 0 .PHONY: lint_ruff lint_ruff: @@ -22,3 +23,12 @@ lint_mypy: .PHONY: lint lint: lint_ruff lint_black lint_mypy + +.PHONY: test +test: +ifeq ($(COVERAGE),0) + pytest tests +else + pytest --no-cov-on-fail --cov=distributed_lock --cov-report=term --cov-report=html --cov-report=xml tests +endif + diff --git a/distributed_lock/__init__.py b/distributed_lock/__init__.py index e69de29..1671f72 100644 --- a/distributed_lock/__init__.py +++ b/distributed_lock/__init__.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from distributed_lock.const import DEFAULT_CLUSTER, DEFAULT_LIFETIME, DEFAULT_WAIT +from distributed_lock.exception import ( + BadConfigurationError, + DistributedLockError, + DistributedLockException, + NotAcquiredError, + NotAcquiredException, + NotReleasedError, + NotReleasedException, +) +from distributed_lock.sync import AcquiredRessource, DistributedLockClient + +__all__ = [ + "DEFAULT_CLUSTER", + "DEFAULT_LIFETIME", + "DEFAULT_WAIT", + "AcquiredRessource", + "DistributedLockClient", + "DistributedlockException", + "NotAcquiredError", + "NotReleasedException", + "NotReleasedError", + "NotAcquiredException", + "BadConfigurationError", + "DistributedLockError", + "DistributedLockException", +] diff --git a/distributed_lock/const.py b/distributed_lock/const.py new file mode 100644 index 0000000..5d166b2 --- /dev/null +++ b/distributed_lock/const.py @@ -0,0 +1,5 @@ +from __future__ import annotations + +DEFAULT_CLUSTER = "europe-free" +DEFAULT_LIFETIME = 3600 +DEFAULT_WAIT = 10 diff --git a/distributed_lock/exception.py b/distributed_lock/exception.py new file mode 100644 index 0000000..482edaf --- /dev/null +++ b/distributed_lock/exception.py @@ -0,0 +1,29 @@ +from __future__ import annotations + + +class DistributedLockException(Exception): + pass + + +class DistributedLockError(DistributedLockException): + pass + + +class BadConfigurationError(DistributedLockError): + pass + + +class NotAcquiredException(DistributedLockException): + pass + + +class NotReleasedException(DistributedLockException): + pass + + +class NotReleasedError(DistributedLockError): + pass + + +class NotAcquiredError(DistributedLockError): + pass diff --git a/distributed_lock/sync.py b/distributed_lock/sync.py new file mode 100644 index 0000000..e55460c --- /dev/null +++ b/distributed_lock/sync.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +import datetime +import logging +import os +import time +from contextlib import contextmanager +from dataclasses import asdict, dataclass, field +from typing import Any + +import httpx + +from distributed_lock.const import DEFAULT_CLUSTER, DEFAULT_LIFETIME, DEFAULT_WAIT +from distributed_lock.exception import ( + BadConfigurationError, + DistributedLockError, + DistributedLockException, + NotAcquiredError, + NotAcquiredException, + NotReleasedError, + NotReleasedException, +) + +logger = logging.getLogger("distributed-lock.sync") + + +def get_cluster() -> str: + if os.environ.get("DLOCK_CLUSTER"): + return os.environ["DLOCK_CLUSTER"].lower().strip() + return DEFAULT_CLUSTER + + +def get_token() -> str: + if os.environ.get("DLOCK_TOKEN"): + return os.environ["DLOCK_TOKEN"].lower().strip() + raise BadConfigurationError("You must provide a token (or set DLOCK_TOKEN env var)") + + +def get_tenant_id() -> str: + if os.environ.get("DLOCK_TENANT_ID"): + return os.environ["DLOCK_TENANT_ID"].lower().strip() + raise BadConfigurationError( + "You must provide a tenant_id (or set DLOCK_TENANT_ID env var)" + ) + + +def make_httpx_client() -> httpx.Client: + timeout = httpx.Timeout(connect=10.0, read=65.0, write=10.0, pool=10.0) + return httpx.Client(timeout=timeout) + + +@dataclass +class AcquiredRessource: + resource: str + lock_id: str + + @classmethod + def from_dict(cls, d: dict) -> AcquiredRessource: + for f in ("lock_id", "resource"): + if f not in d: + raise DistributedLockError(f"bad reply from service, missing {f}") + return cls(resource=d["resource"], lock_id=d["lock_id"]) + + def to_dict(self) -> dict: + return asdict(self) + + +@dataclass +class DistributedLockClient: + cluster: str = field(default_factory=get_cluster) + token: str = field(default_factory=get_token) + tenant_id: str = field(default_factory=get_tenant_id) + client: httpx.Client = field(default_factory=make_httpx_client) + user_agent: str | None = None + service_wait: int = DEFAULT_WAIT + + def get_resource_url(self, resource: str) -> str: + return f"https://{self.cluster}.distributed-lock.com/exclusive_locks/{self.tenant_id}/{resource}" + + def get_headers(self) -> dict[str, str]: + return {"Authorization": f"Bearer {self.token}"} + + def __del__(self): + self.client.close() + + def _acquire( + self, + resource: str, + lifetime: int = DEFAULT_LIFETIME, + user_data: str | None = None, + ) -> AcquiredRessource: + body: dict[str, Any] = {"wait": self.service_wait, "lifetime": lifetime} + if self.user_agent: + body["user_agent"] = self.user_agent + if user_data: + body["user_data"] = user_data + url = self.get_resource_url(resource) + logger.debug(f"Try to lock {resource} with url: {url}...") + try: + r = self.client.post(url, json=body, headers=self.get_headers()) + except httpx.ConnectTimeout as e: + logger.warning(f"connect timeout error during POST on {url}") + raise NotAcquiredError("timeout during connect") from e + except httpx.ReadTimeout as e: + logger.warning(f"read timeout error during POST on {url}") + raise NotAcquiredError("timeout during read") from e + except httpx.WriteTimeout as e: + logger.warning(f"write timeout error during POST on {url}") + raise NotAcquiredError("timeout during write") from e + except httpx.PoolTimeout as e: + logger.warning("timeout in connection pool") + raise NotAcquiredError("timeout in connection pool") from e + except httpx.HTTPError as e: + logger.warning("generic http error") + raise NotAcquiredError("generic http error") from e + if r.status_code == 409: + logger.info(f"Lock on {resource} NOT acquired") + raise NotAcquiredException() + # FIXME other codes + d = r.json() + logger.info(f"Lock on {resource} acquired") + return AcquiredRessource.from_dict(d) + + def acquire_exclusive_lock( + self, + resource: str, + lifetime: int = DEFAULT_LIFETIME, + wait: int = DEFAULT_WAIT, + user_data: str | None = None, + automatic_retry: bool = True, + sleep_after_unsuccessful: float = 1.0, + ) -> AcquiredRessource: + before = datetime.datetime.utcnow() + while True: + catched_exception: Exception | None = None + try: + return self._acquire( + resource=resource, lifetime=lifetime, user_data=user_data + ) + except DistributedLockError as e: + if not automatic_retry: + raise + catched_exception = e + except DistributedLockException as e: + catched_exception = e + elapsed = (datetime.datetime.utcnow() - before).total_seconds() + if elapsed > wait - sleep_after_unsuccessful: + raise catched_exception + logger.debug(f"wait {sleep_after_unsuccessful}s...") + time.sleep(sleep_after_unsuccessful) + if elapsed + sleep_after_unsuccessful + self.service_wait > wait: + self.service_wait = max( + int(wait - elapsed - sleep_after_unsuccessful), 1 + ) + + def _release(self, resource: str, lock_id: str): + url = self.get_resource_url(resource) + "/" + lock_id + logger.debug(f"Try to unlock {resource} with url: {url}...") + try: + r = self.client.delete(url, headers=self.get_headers()) + except httpx.ConnectTimeout as e: + logger.warning(f"connect timeout error during DELETE on {url}") + raise NotReleasedError("timeout during connect") from e + except httpx.ReadTimeout as e: + logger.warning(f"read timeout error during DELTE on {url}") + raise NotReleasedError("timeout during read") from e + except httpx.WriteTimeout as e: + logger.warning(f"write timeout error during DELETE on {url}") + raise NotReleasedError("timeout during write") from e + except httpx.PoolTimeout as e: + logger.warning("timeout in connection pool") + raise NotReleasedError("timeout in connection pool") from e + except httpx.HTTPError as e: + logger.warning("generic http error") + raise NotReleasedError("generic http error") from e + if r.status_code == 409: + logger.warning( + f"Lock on {resource} NOT released (because it's acquired by another lock_id!)" + ) + raise NotReleasedException() + if r.status_code == 204: + return + logger.warning( + f"Lock on {resource} NOT released (because of an unexpected status code: {r.status_code})" + ) + raise NotReleasedError(f"unexpected status code: {r.status_code}") + + def release_exclusive_lock( + self, + resource: str, + lock_id: str, + wait: int = 30, + automatic_retry: bool = True, + sleep_after_unsuccessful: float = 1.0, + ): + before = datetime.datetime.utcnow() + while True: + catched_exception = None + try: + return self._release(resource=resource, lock_id=lock_id) + except DistributedLockError as e: + if not automatic_retry: + raise + catched_exception = e + elapsed = (datetime.datetime.utcnow() - before).total_seconds() + if elapsed > wait - sleep_after_unsuccessful: + raise catched_exception + logger.debug(f"wait {sleep_after_unsuccessful}s...") + time.sleep(sleep_after_unsuccessful) + + @contextmanager + def exclusive_lock( + self, + resource: str, + lifetime: int = DEFAULT_LIFETIME, + wait: int = DEFAULT_WAIT, + user_data: str | None = None, + automatic_retry: bool = True, + sleep_after_unsuccessful: float = 1.0, + ): + ar: AcquiredRessource | None = None + try: + ar = self.acquire_exclusive_lock( + resource=resource, + lifetime=lifetime, + wait=wait, + user_data=user_data, + automatic_retry=automatic_retry, + sleep_after_unsuccessful=sleep_after_unsuccessful, + ) + yield + finally: + if ar is not None: + self.release_exclusive_lock( + resource=resource, + lock_id=ar.lock_id, + wait=wait, + automatic_retry=automatic_retry, + sleep_after_unsuccessful=sleep_after_unsuccessful, + ) diff --git a/poetry.lock b/poetry.lock index 7fb0916..66fc204 100644 --- a/poetry.lock +++ b/poetry.lock @@ -108,6 +108,82 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.2.7" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "exceptiongroup" version = "1.1.2" @@ -343,6 +419,40 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "respx" +version = "0.20.2" +description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"}, + {file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"}, +] + +[package.dependencies] +httpx = ">=0.21.0" + [[package]] name = "ruff" version = "0.0.278" @@ -409,4 +519,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "6db9993ac03215b4ed31ce9c8ad09ee6a5462d4e3c392e8647137bc9d7c275d4" +content-hash = "cb67194898fedf4189a44e4c35c091560b6edb5dd7acb8cad7089b2e17855a43" diff --git a/pyproject.toml b/pyproject.toml index ec15969..90a3e2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "distributed-lock" +name = "distributed-lock-client" version = "0.1.0" description = "" authors = ["fab-dlock "] @@ -17,7 +17,27 @@ black = "^23.7.0" ruff = "^0.0.278" mypy = "^1.4.1" pytest = "^7.4.0" +respx = "^0.20.2" +pytest-cov = "^4.1.0" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.ruff] +# Enable Pyflakes `E` and `F` codes by default. +select = ["E", "F", "W", "N", "UP", "B", "I", "PL", "RUF"] +ignore = [ + "E501", + "PLR2004", + "PLR0913", + "PLW0603", + "N805", + "N818" +] +line-length = 88 +target-version = "py38" +extend-exclude = [] + +[tool.ruff.isort] +required-imports = ["from __future__ import annotations"] diff --git a/tests/test_misc.py b/tests/test_misc.py new file mode 100644 index 0000000..be20dfb --- /dev/null +++ b/tests/test_misc.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import os +from unittest import mock + +import pytest + +from distributed_lock import DEFAULT_CLUSTER, BadConfigurationError +from distributed_lock.sync import ( + get_cluster, + get_tenant_id, + get_token, +) + + +@mock.patch.dict(os.environ, {"DLOCK_CLUSTER": "foo"}, clear=True) +def test_get_cluster(): + assert get_cluster() == "foo" + + +@mock.patch.dict(os.environ, {}, clear=True) +def test_get_cluster2(): + assert get_cluster() == DEFAULT_CLUSTER + + +@mock.patch.dict(os.environ, {"DLOCK_TOKEN": "foo"}, clear=True) +def test_get_token(): + assert get_token() == "foo" + + +@mock.patch.dict(os.environ, {}, clear=True) +def test_get_token2(): + with pytest.raises(BadConfigurationError): + get_token() + + +@mock.patch.dict(os.environ, {"DLOCK_TENANT_ID": "foo"}, clear=True) +def test_get_tenant_id(): + assert get_tenant_id() == "foo" + + +@mock.patch.dict(os.environ, {}, clear=True) +def test_get_tenant_id2(): + with pytest.raises(BadConfigurationError): + get_tenant_id() diff --git a/tests/test_sync.py b/tests/test_sync.py new file mode 100644 index 0000000..2df47db --- /dev/null +++ b/tests/test_sync.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +import json +import os +from unittest import mock + +import httpx +import pytest + +from distributed_lock import ( + DEFAULT_LIFETIME, + DEFAULT_WAIT, + AcquiredRessource, + DistributedLockClient, + NotAcquiredException, + NotReleasedError, +) + +MOCKED_ENVIRON = { + "DLOCK_CLUSTER": "cluster", + "DLOCK_TENANT_ID": "tenant_id", + "DLOCK_TOKEN": "token", +} +AR = AcquiredRessource(lock_id="1234", resource="bar") + + +@mock.patch.dict(os.environ, MOCKED_ENVIRON, clear=True) +def test_resource_url(): + x = DistributedLockClient() + assert ( + x.get_resource_url("bar") + == "https://cluster.distributed-lock.com/exclusive_locks/tenant_id/bar" + ) + + +@mock.patch.dict(os.environ, MOCKED_ENVIRON, clear=True) +@pytest.mark.respx(base_url="https://cluster.distributed-lock.com") +def test_acquire(respx_mock): + respx_mock.post("/exclusive_locks/tenant_id/bar").mock( + return_value=httpx.Response(201, json=AR.to_dict()) + ) + x = DistributedLockClient() + ar = x.acquire_exclusive_lock("bar") + assert len(respx_mock.calls) == 1 + body = json.loads(respx_mock.calls.last.request.content.decode("utf8")) + assert body["wait"] == DEFAULT_WAIT + assert body["lifetime"] == DEFAULT_LIFETIME + headers = respx_mock.calls.last.request.headers + assert headers["host"] == "cluster.distributed-lock.com" + assert headers["authorization"] == "Bearer token" + assert headers["content-type"] == "application/json" + assert ar is not None + assert ar.lock_id == AR.lock_id + assert ar.resource == AR.resource + + +@mock.patch.dict(os.environ, MOCKED_ENVIRON, clear=True) +@pytest.mark.respx(base_url="https://cluster.distributed-lock.com") +def test_not_acquired(respx_mock): + respx_mock.post("/exclusive_locks/tenant_id/bar").mock( + return_value=httpx.Response(409) + ) + x = DistributedLockClient() + with pytest.raises(NotAcquiredException): + x.acquire_exclusive_lock("bar", wait=3) + assert len(respx_mock.calls) > 1 + + +@mock.patch.dict(os.environ, MOCKED_ENVIRON, clear=True) +@pytest.mark.respx(base_url="https://cluster.distributed-lock.com") +def test_release(respx_mock): + respx_mock.delete("/exclusive_locks/tenant_id/bar/1234").mock( + return_value=httpx.Response(204) + ) + x = DistributedLockClient() + x.release_exclusive_lock("bar", "1234") + assert len(respx_mock.calls) == 1 + headers = respx_mock.calls.last.request.headers + assert headers["host"] == "cluster.distributed-lock.com" + assert headers["authorization"] == "Bearer token" + + +@mock.patch.dict(os.environ, MOCKED_ENVIRON, clear=True) +@pytest.mark.respx(base_url="https://cluster.distributed-lock.com") +def test_not_released(respx_mock): + respx_mock.delete("/exclusive_locks/tenant_id/bar/1234").mock( + return_value=httpx.Response(500) + ) + x = DistributedLockClient() + with pytest.raises(NotReleasedError): + x.release_exclusive_lock("bar", lock_id="1234", wait=3) + assert len(respx_mock.calls) > 1 + headers = respx_mock.calls.last.request.headers + assert headers["host"] == "cluster.distributed-lock.com" + assert headers["authorization"] == "Bearer token" + + +@mock.patch.dict(os.environ, MOCKED_ENVIRON, clear=True) +@pytest.mark.respx(base_url="https://cluster.distributed-lock.com") +def test_context_manager(respx_mock): + respx_mock.post("/exclusive_locks/tenant_id/bar").mock( + return_value=httpx.Response(201, json=AR.to_dict()) + ) + respx_mock.delete("/exclusive_locks/tenant_id/bar/1234").mock( + return_value=httpx.Response(204) + ) + x = DistributedLockClient() + with x.exclusive_lock("bar"): + pass + assert len(respx_mock.calls) == 2