diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b25c1b0..0a600bd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,15 +9,15 @@ env: POETRY_CACHE: /opt/poetry_cache jobs: - lint: + lint_and_test: runs-on: ubuntu-22.04 steps: - name: Check out repository code uses: actions/checkout@v3 - - name: Set up Python 3.9 + - name: Set up Python 3.7 uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: 3.8 - name: Cache deps install id: cache-deps uses: actions/cache@v3 @@ -53,3 +53,36 @@ jobs: - name: Run test run: | poetry run make test + - name: Make apidocs + run: | + poetry run make apidoc + - name: Upload API doc as an artifact + uses: actions/upload-artifact@v3 + with: + name: apidoc + path: apihtml/ + + githubpages: + runs-on: ubuntu-latest + needs: lint_and_test + #permissions: + # contents: read + # pages: write + # id-token: write + concurrency: + group: "pages" + cancel-in-progress: true + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + steps: + - name: Download API doc artifact + uses: actions/download-artifact@v3 + with: + name: apidoc + - name: Upload artifact + uses: actions/upload-pages-artifact@v1 + with: + # Upload entire repository + path: '.' + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 8e25ea7..a8cfdcf 100644 --- a/.gitignore +++ b/.gitignore @@ -159,3 +159,4 @@ cython_debug/ # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ /.vscode +/apihtml diff --git a/Makefile b/Makefile index f2b97dd..ba4d554 100644 --- a/Makefile +++ b/Makefile @@ -32,3 +32,17 @@ else pytest --no-cov-on-fail --cov=distributed_lock --cov-report=term --cov-report=html --cov-report=xml tests endif +.PHONY: apidoc +apidoc: + @rm -Rf apihtml + pdoc -d google -o apihtml distributed_lock + +.PHONY: clean +clean: + rm -Rf apihtml htmlcov + rm -Rf .mypy_cache .ruff_cache .pytest_cache + find . -type d -name __pycache__ -exec rm -Rf {} \; 2>/dev/null || true + +.PHONY: bump_version +bump_version: + python ./bump_version.py \ No newline at end of file diff --git a/bump_version.py b/bump_version.py new file mode 100644 index 0000000..c52950a --- /dev/null +++ b/bump_version.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +import os + +from dunamai import Style, Version + +version = Version.from_git().serialize(style=Style.SemVer) + +with open("distributed_lock/__init__.py") as f: + c = f.read() + +lines = [] +for line in c.splitlines(): + if line.startswith("VERSION = "): + lines.append(f'VERSION = "{version}"') + else: + lines.append(line) + +with open("distributed_lock/__init__.py", "w") as g: + g.write("\n".join(lines)) + +print(f"Setting version={version}") +os.system(f"poetry version {version}") diff --git a/distributed_lock/__init__.py b/distributed_lock/__init__.py index 1671f72..ddca68b 100644 --- a/distributed_lock/__init__.py +++ b/distributed_lock/__init__.py @@ -1,29 +1,31 @@ from __future__ import annotations -from distributed_lock.const import DEFAULT_CLUSTER, DEFAULT_LIFETIME, DEFAULT_WAIT +from distributed_lock.common import AcquiredRessource +from distributed_lock.const import ( + DEFAULT_CLUSTER, + DEFAULT_LIFETIME, + DEFAULT_SERVER_SIDE_WAIT, +) from distributed_lock.exception import ( BadConfigurationError, - DistributedLockError, - DistributedLockException, NotAcquiredError, NotAcquiredException, NotReleasedError, - NotReleasedException, ) -from distributed_lock.sync import AcquiredRessource, DistributedLockClient +from distributed_lock.sync import DistributedLockClient __all__ = [ "DEFAULT_CLUSTER", "DEFAULT_LIFETIME", - "DEFAULT_WAIT", + "DEFAULT_SERVER_SIDE_WAIT", "AcquiredRessource", "DistributedLockClient", - "DistributedlockException", "NotAcquiredError", - "NotReleasedException", "NotReleasedError", "NotAcquiredException", "BadConfigurationError", - "DistributedLockError", - "DistributedLockException", ] + +__pdoc__ = {"sync": False, "exception": False, "common": False} + +VERSION = "v0.0.0" diff --git a/distributed_lock/common.py b/distributed_lock/common.py new file mode 100644 index 0000000..5064fd3 --- /dev/null +++ b/distributed_lock/common.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import datetime +import os +from dataclasses import asdict, dataclass, field +from typing import Any + +from distributed_lock.const import DEFAULT_CLUSTER +from distributed_lock.exception import BadConfigurationError, DistributedLockError + + +@dataclass +class AcquiredRessource: + """This dataclass holds an acquired ressource.""" + + resource: str + """The resource name.""" + + lock_id: str + """The lock unique identifier (you will need it to unlock).""" + + tenant_id: str + """The tenant identifier.""" + + created: datetime.datetime = field(default_factory=datetime.datetime.utcnow) + """The lock creation datetime.""" + + expires: datetime.datetime = field(default_factory=datetime.datetime.utcnow) + """The lock expiration datetime.""" + + user_agent: str = "" + """Your user-agent (warning: not supported in all plans).""" + + user_data: Any = "" + """User data stored with the lock (warning: not supported in all plans).""" + + @classmethod + def from_dict(cls, d: dict) -> AcquiredRessource: + """Create an AcquiredRessource from a dict.""" + for f in ( + "lock_id", + "resource", + "tenant_id", + "created", + "expires", + "user_agent", + "user_data", + ): + if f not in d: + raise DistributedLockError(f"bad reply from service, missing {f}") + d2 = dict(d) + for f in ("created", "expires"): + if isinstance(d2[f], str): + d2[f] = datetime.datetime.fromisoformat(d2[f]) + return cls(**d2) + + def to_dict(self) -> dict: + """Convert an AcquiredRessource to a dict.""" + d = asdict(self) + for f in ("created", "expires"): + d[f] = d[f].isoformat()[0:19] + "Z" + return d + + +def get_cluster() -> str: + """Get the target cluster from env or from default.""" + if os.environ.get("DLOCK_CLUSTER"): + return os.environ["DLOCK_CLUSTER"].lower().strip() + return DEFAULT_CLUSTER + + +def get_token() -> str: + """Get the service token from env (raise an exception if not set). + + Raises: + BadConfigurationError: if the token is not set. + """ + if os.environ.get("DLOCK_TOKEN"): + return os.environ["DLOCK_TOKEN"].lower().strip() + raise BadConfigurationError("You must provide a token (or set DLOCK_TOKEN env var)") + + +def get_tenant_id() -> str: + """Get the "tenant id" from env (raise an exception if not set). + + Raises: + BadConfigurationError: if the "tenant id" is not set. + """ + if os.environ.get("DLOCK_TENANT_ID"): + return os.environ["DLOCK_TENANT_ID"].lower().strip() + raise BadConfigurationError( + "You must provide a tenant_id (or set DLOCK_TENANT_ID env var)" + ) diff --git a/distributed_lock/const.py b/distributed_lock/const.py index 5d166b2..1687741 100644 --- a/distributed_lock/const.py +++ b/distributed_lock/const.py @@ -1,5 +1,10 @@ from __future__ import annotations DEFAULT_CLUSTER = "europe-free" +"""Default cluster to request.""" + DEFAULT_LIFETIME = 3600 -DEFAULT_WAIT = 10 +"""Default lock lifetime (in seconds).""" + +DEFAULT_SERVER_SIDE_WAIT = 60 +"""Default server side wait (in seconds).""" diff --git a/distributed_lock/exception.py b/distributed_lock/exception.py index 482edaf..3f68c92 100644 --- a/distributed_lock/exception.py +++ b/distributed_lock/exception.py @@ -2,28 +2,36 @@ class DistributedLockException(Exception): + """Base class for lock exceptions.""" + pass -class DistributedLockError(DistributedLockException): +class DistributedLockError(Exception): + """Base class for lock errors.""" + pass class BadConfigurationError(DistributedLockError): + """Bad configuration.""" + pass class NotAcquiredException(DistributedLockException): - pass + """Not acquired because the lock is still held by someone else.""" - -class NotReleasedException(DistributedLockException): pass class NotReleasedError(DistributedLockError): + """Not released lock because some errors popped during the lock release.""" + pass class NotAcquiredError(DistributedLockError): + """Not acquired because some errors popped during the lock acquisition.""" + pass diff --git a/distributed_lock/py.typed b/distributed_lock/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/distributed_lock/sync.py b/distributed_lock/sync.py index e5a8f5e..d162604 100644 --- a/distributed_lock/sync.py +++ b/distributed_lock/sync.py @@ -3,17 +3,21 @@ import datetime import functools import logging -import os import time from contextlib import contextmanager -from dataclasses import asdict, dataclass, field +from dataclasses import dataclass, field from typing import Any import httpx -from distributed_lock.const import DEFAULT_CLUSTER, DEFAULT_LIFETIME, DEFAULT_WAIT +from distributed_lock.common import ( + AcquiredRessource, + get_cluster, + get_tenant_id, + get_token, +) +from distributed_lock.const import DEFAULT_LIFETIME, DEFAULT_SERVER_SIDE_WAIT from distributed_lock.exception import ( - BadConfigurationError, DistributedLockError, DistributedLockException, NotAcquiredError, @@ -24,45 +28,25 @@ logger = logging.getLogger("distributed-lock.sync") -def get_cluster() -> str: - if os.environ.get("DLOCK_CLUSTER"): - return os.environ["DLOCK_CLUSTER"].lower().strip() - return DEFAULT_CLUSTER - - -def get_token() -> str: - if os.environ.get("DLOCK_TOKEN"): - return os.environ["DLOCK_TOKEN"].lower().strip() - raise BadConfigurationError("You must provide a token (or set DLOCK_TOKEN env var)") - - -def get_tenant_id() -> str: - if os.environ.get("DLOCK_TENANT_ID"): - return os.environ["DLOCK_TENANT_ID"].lower().strip() - raise BadConfigurationError( - "You must provide a tenant_id (or set DLOCK_TENANT_ID env var)" - ) - - def make_httpx_client() -> httpx.Client: timeout = httpx.Timeout(connect=10.0, read=65.0, write=10.0, pool=10.0) return httpx.Client(timeout=timeout) -def with_retry(service_wait: bool = False): +def with_retry(server_side_wait: bool = False): def decorator(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): - wait = kwargs.get("wait", DEFAULT_WAIT) + wait = kwargs.get("wait", DEFAULT_SERVER_SIDE_WAIT) automatic_retry = kwargs.get("automatic_retry", True) sleep_after_failure = kwargs.get("sleep_after_failure", 1.0) - _forced_service_wait: float | None = None + _forced_server_side_wait: float | None = None before = datetime.datetime.utcnow() while True: catched_exception: Exception | None = None try: - if _forced_service_wait is not None: - kwargs["_forced_service_wait"] = _forced_service_wait + if _forced_server_side_wait is not None: + kwargs["_forced_server_side_wait"] = _forced_server_side_wait return func(self, *args, **kwargs) except DistributedLockError as e: if not automatic_retry: @@ -75,9 +59,9 @@ def wrapper(self, *args, **kwargs): raise catched_exception logger.debug(f"wait {sleep_after_failure}s...") time.sleep(sleep_after_failure) - if service_wait: - if elapsed + sleep_after_failure + self.service_wait > wait: - _forced_service_wait = max( + if server_side_wait: + if elapsed + sleep_after_failure + self.server_side_wait > wait: + _forced_server_side_wait = max( int(wait - elapsed - sleep_after_failure), 1 ) @@ -86,59 +70,55 @@ def wrapper(self, *args, **kwargs): return decorator -@dataclass -class AcquiredRessource: - resource: str - lock_id: str - tenant_id: str - created: datetime.datetime = field(default_factory=datetime.datetime.utcnow) - expires: datetime.datetime = field(default_factory=datetime.datetime.utcnow) - user_agent: str = "" - user_data: Any = "" - - @classmethod - def from_dict(cls, d: dict) -> AcquiredRessource: - for f in ( - "lock_id", - "resource", - "tenant_id", - "created", - "expires", - "user_agent", - "user_data", - ): - if f not in d: - raise DistributedLockError(f"bad reply from service, missing {f}") - d2 = dict(d) - for f in ("created", "expires"): - if isinstance(d2[f], str): - d2[f] = datetime.datetime.fromisoformat(d2[f]) - return cls(**d2) - - def to_dict(self) -> dict: - d = asdict(self) - for f in ("created", "expires"): - d[f] = d[f].isoformat()[0:19] + "Z" - return d - - @dataclass class DistributedLockClient: + """Client object for https://distributed-lock.com service.""" + cluster: str = field(default_factory=get_cluster) + """The cluster name to request. + + If not set, we will use the `DLOCK_CLUSTER` env var value (if set), + else default value (`DEFAULT_CLUSTER`). + """ + token: str = field(default_factory=get_token) + """Your service token. + + If not set, we will use the `DLOCK_TOKEN` env var value (if set). + Else, a `BadConfigurationError` will be raised. + """ + tenant_id: str = field(default_factory=get_tenant_id) - client: httpx.Client = field(default_factory=make_httpx_client) + """Your tenant id. + + If not set, we will use the `DLOCK_TENANT_ID` env var value (if set). + Else, a `BadConfigurationError` will be raised. + """ + user_agent: str | None = None - service_wait: int = DEFAULT_WAIT + """Your 'user-agent'. + + Warning: this is not supported in all plans! + """ + + server_side_wait: int = DEFAULT_SERVER_SIDE_WAIT + """Your "server side maximum wait" in seconds. + + The default value `DEFAULT_SERVER_SIDE_WAIT` is supported by all plans. + If you pay for a better service, put your maximum supported value here. + """ + + _client: httpx.Client = field(default_factory=make_httpx_client) def get_resource_url(self, resource: str) -> str: + """Return the full url of the given resource.""" return f"https://{self.cluster}.distributed-lock.com/exclusive_locks/{self.tenant_id}/{resource}" - def get_headers(self) -> dict[str, str]: + def _get_headers(self) -> dict[str, str]: return {"Authorization": f"Bearer {self.token}"} def __del__(self): - self.client.close() + self._client.close() def _request( self, @@ -150,7 +130,7 @@ def _request( exception_class, ): try: - r = self.client.request(method, url, json=body, headers=headers) + r = self._client.request(method, url, json=body, headers=headers) except httpx.ConnectTimeout as e: raise error_class("timeout during connect") from e except httpx.ReadTimeout as e: @@ -172,7 +152,7 @@ def _request( raise error_class("got an HTTP/403 Forbidden with no detail") from None elif r.status_code == 429: try: - logger.warning( + raise error_class( f"got a HTTP/429 Rate limited error with message: {r.json()['message']}" ) except Exception: @@ -185,13 +165,13 @@ def _acquire( self, resource: str, lifetime: int = DEFAULT_LIFETIME, - user_data: str | None = None, - forced_service_wait: float | None = None, + user_data: dict | list | str | float | int | bool | None = None, + server_side_wait: float | None = None, ) -> AcquiredRessource: body: dict[str, Any] = { - "wait": forced_service_wait - if forced_service_wait is not None - else self.service_wait, + "wait": min( + server_side_wait or self.server_side_wait, self.server_side_wait + ), "lifetime": lifetime, } if self.user_agent: @@ -203,7 +183,7 @@ def _acquire( r = self._request( "POST", url, - headers=self.get_headers(), + headers=self._get_headers(), body=body, error_class=NotAcquiredError, exception_class=NotAcquiredException, @@ -212,23 +192,59 @@ def _acquire( logger.info(f"Lock on {resource} acquired") return AcquiredRessource.from_dict(d) - @with_retry(service_wait=True) + @with_retry(server_side_wait=True) def acquire_exclusive_lock( self, resource: str, *, lifetime: int = DEFAULT_LIFETIME, - wait: int = DEFAULT_WAIT, - user_data: str | None = None, + wait: int = DEFAULT_SERVER_SIDE_WAIT, + user_data: dict | list | str | float | int | bool | None = None, automatic_retry: bool = True, sleep_after_failure: float = 1.0, - _forced_service_wait: float | None = None, + _forced_server_side_wait: float | None = None, ) -> AcquiredRessource: + """Acquire an exclusive lock on the given resource. + + Notes: + - the wait parameter is implemented as a mix of: + - server side wait (without polling) thanks to the server_side_wait property + - client side wait (with multiple calls if automatic_retry=True default) + - the most performant way to configure this is: + - to set server_side_wait (when creating the DistributedLockClient object) + to the highest value allowed by your plan + - use the wait parameter here at the value of your choice + + Args: + resource: the resource name to acquire. + lifetime: the lock max lifetime (in seconds). + wait: the maximum wait (in seconds) for acquiring the lock. + user_data: user_data to store with the lock (warning: it's not allowed with + all plans). + automatic_retry: if the operation fails (because of some errors or because the + lock is already held by someone else), if set to True, we are going to + try multiple times until the maximum wait delay. + sleep_after_failure: when doing multiple client side retry, let's sleep during + this number of seconds before retrying. + _forced_server_side_wait: don't use it (it's internal use only). + + Returns: + An object `AcquiredRessource`. Note: you will need the lock_id field + of this object to call `release_exclusive_lock()`. + + Raises: + NotAcquiredException: Can't acquire the lock (even after the wait time + and after automatic retries) because it's already held by + someone else after the wait time. + NotAcquiredError: Can't acquire the lock (even after the wait time + and after automatic retries) because some other errors raised. + + """ return self._acquire( resource=resource, lifetime=lifetime, user_data=user_data, - forced_service_wait=_forced_service_wait, + server_side_wait=_forced_server_side_wait, ) def _release(self, resource: str, lock_id: str): @@ -237,22 +253,44 @@ def _release(self, resource: str, lock_id: str): self._request( "DELETE", url, - headers=self.get_headers(), + headers=self._get_headers(), body=None, error_class=NotReleasedError, exception_class=NotReleasedError, ) - @with_retry() + @with_retry(server_side_wait=False) def release_exclusive_lock( self, resource: str, lock_id: str, *, - wait: int = 30, + wait: int = 10, automatic_retry: bool = True, sleep_after_failure: float = 1.0, ): + """Release an exclusive lock on the given resource. + + Notes: + - the wait parameter is only a "client side wait" + (with multiple calls if automatic_retry=True default). + + Args: + resource: the resource name to acquire. + lock_id: the lock unique identifier (field of `AcquiredRessource` object) + wait: the maximum wait (in seconds) for acquiring the lock. + automatic_retry: if the operation fails (because of some errors or because the + lock is already held by someone else), if set to True, we are going to + try multiple times until the maximum wait delay. + sleep_after_failure: when doing multiple client side retry, let's sleep during + this number of seconds before retrying. + _forced_server_side_wait: don't use it (it's internal use only). + + Raises: + NotReleasedError: Can't release the lock (even after the wait time + and after automatic retries) because some errors raised. + + """ return self._release(resource=resource, lock_id=lock_id) @contextmanager @@ -260,7 +298,7 @@ def exclusive_lock( self, resource: str, lifetime: int = DEFAULT_LIFETIME, - wait: int = DEFAULT_WAIT, + wait: int = DEFAULT_SERVER_SIDE_WAIT, user_data: str | None = None, automatic_retry: bool = True, sleep_after_failure: float = 1.0, diff --git a/poetry.lock b/poetry.lock index 66fc204..509dc2e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -22,11 +21,25 @@ doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd- test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] +[[package]] +name = "astunparse" +version = "1.6.3" +description = "An AST unparser for Python" +optional = false +python-versions = "*" +files = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, +] + +[package.dependencies] +six = ">=1.6.1,<2.0" +wheel = ">=0.23.0,<1.0" + [[package]] name = "black" version = "23.7.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -73,7 +86,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -85,7 +97,6 @@ files = [ name = "click" version = "8.1.6" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -100,7 +111,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -112,7 +122,6 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -184,11 +193,24 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "dunamai" +version = "1.18.0" +description = "Dynamic version generation" +optional = false +python-versions = ">=3.5,<4.0" +files = [ + {file = "dunamai-1.18.0-py3-none-any.whl", hash = "sha256:f9284a9f4048f0b809d11539896e78bde94c05b091b966a04a44ab4c48df03ce"}, + {file = "dunamai-1.18.0.tar.gz", hash = "sha256:5200598561ea5ba956a6174c36e402e92206c6a6aa4a93a6c5cb8003ee1e0997"}, +] + +[package.dependencies] +packaging = ">=20.9" + [[package]] name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -203,7 +225,6 @@ test = ["pytest (>=6)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -215,7 +236,6 @@ files = [ name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -227,17 +247,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httpx" version = "0.24.1" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -253,15 +272,14 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -273,7 +291,6 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -281,11 +298,86 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + [[package]] name = "mypy" version = "1.4.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -332,7 +424,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -344,7 +435,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -356,7 +446,6 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -364,11 +453,30 @@ files = [ {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] +[[package]] +name = "pdoc" +version = "14.0.0" +description = "API Documentation for Python Projects" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pdoc-14.0.0-py3-none-any.whl", hash = "sha256:4514041ff5da33f1adbc700002a661600fc13a9adadef317bc6ae8be9e61154b"}, + {file = "pdoc-14.0.0.tar.gz", hash = "sha256:ad6c16c949e5dd8b30effc5398aedb5779ffe8ab94be91ce2cddc320e8127900"}, +] + +[package.dependencies] +astunparse = {version = "*", markers = "python_version < \"3.9\""} +Jinja2 = ">=2.11.0" +MarkupSafe = "*" +pygments = ">=2.12.0" + +[package.extras] +dev = ["black", "hypothesis", "mypy", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] + [[package]] name = "platformdirs" version = "3.9.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -384,7 +492,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -396,11 +503,24 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -423,7 +543,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -442,7 +561,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "respx" version = "0.20.2" description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -457,7 +575,6 @@ httpx = ">=0.21.0" name = "ruff" version = "0.0.278" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -480,11 +597,21 @@ files = [ {file = "ruff-0.0.278.tar.gz", hash = "sha256:1a9f1d925204cfba81b18368b7ac943befcfccc3a41e170c91353b674c6b7a66"}, ] +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -496,7 +623,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -508,7 +634,6 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -516,7 +641,21 @@ files = [ {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] +[[package]] +name = "wheel" +version = "0.41.1" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.41.1-py3-none-any.whl", hash = "sha256:473219bd4cbedc62cea0cb309089b593e47c15c4a2531015f94e4e3b9a0f6981"}, + {file = "wheel-0.41.1.tar.gz", hash = "sha256:12b911f083e876e10c595779709f8a88a59f45aacc646492a67fe9ef796c1b47"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "cb67194898fedf4189a44e4c35c091560b6edb5dd7acb8cad7089b2e17855a43" +content-hash = "9e8938954301afab1a7ac183c4b5db42ca34b0411b41058b09f6aa347c1b001f" diff --git a/pyproject.toml b/pyproject.toml index 72119ce..3645211 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ packages = [{include = "distributed_lock"}] [tool.poetry.dependencies] python = "^3.8" httpx = "^0.24.1" +pdoc = "^14.0.0" [tool.poetry.group.dev.dependencies] @@ -19,6 +20,7 @@ mypy = "^1.4.1" pytest = "^7.4.0" respx = "^0.20.2" pytest-cov = "^4.1.0" +dunamai = "^1.18.0" [build-system] requires = ["poetry-core"] diff --git a/tests/test_misc.py b/tests/test_misc.py index be20dfb..311ded1 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -6,7 +6,7 @@ import pytest from distributed_lock import DEFAULT_CLUSTER, BadConfigurationError -from distributed_lock.sync import ( +from distributed_lock.common import ( get_cluster, get_tenant_id, get_token, diff --git a/tests/test_sync.py b/tests/test_sync.py index ad693b0..a47e47e 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -9,7 +9,7 @@ from distributed_lock import ( DEFAULT_LIFETIME, - DEFAULT_WAIT, + DEFAULT_SERVER_SIDE_WAIT, AcquiredRessource, DistributedLockClient, NotAcquiredException, @@ -49,7 +49,7 @@ def test_acquire(respx_mock): ar = x.acquire_exclusive_lock("bar") assert len(respx_mock.calls) == 1 body = json.loads(respx_mock.calls.last.request.content.decode("utf8")) - assert body["wait"] == DEFAULT_WAIT + assert body["wait"] == DEFAULT_SERVER_SIDE_WAIT assert body["lifetime"] == DEFAULT_LIFETIME headers = respx_mock.calls.last.request.headers assert headers["host"] == "cluster.distributed-lock.com"