Skip to content

Commit

Permalink
Vendor broadcaster package (#579)
Browse files Browse the repository at this point in the history
  • Loading branch information
Mark90 authored Mar 21, 2024
1 parent d3d4574 commit e4b00fa
Show file tree
Hide file tree
Showing 43 changed files with 1,169 additions and 9 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/run-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ jobs:
env:
FLIT_ROOT_INSTALL: 1
- name: Run Unit tests
run: CACHE_URI=redis://redis DATABASE_URI=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST/$POSTGRES_DB pytest --cov-branch --cov=orchestrator --cov-report=xml --ignore=test --ignore=orchestrator/devtools --ignore=examples --ignore=docs
run: CACHE_URI=redis://redis DATABASE_URI=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST/$POSTGRES_DB pytest --cov-branch --cov=orchestrator --cov-report=xml --ignore=test --ignore=orchestrator/devtools --ignore=examples --ignore=docs --ignore=orchestrator/vendor
env:
POSTGRES_DB: orchestrator-core-test
POSTGRES_USER: nwa
Expand Down
3 changes: 1 addition & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
default_language_version:
python: python3
exclude:
^test/unit_tests/cli/data/generate/.*\.py
exclude: ^test/unit_tests/cli/data/generate/.*\.py|orchestrator/vendor.*
repos:
- repo: https://github.com/psf/black
rev: 24.1.1
Expand Down
5 changes: 5 additions & 0 deletions orchestrator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@

__version__ = "2.1.2rc3"

import pathlib
import site

site.addsitedir(str(pathlib.Path(__file__).resolve().parent / "vendor/broadcaster"))

from orchestrator.app import OrchestratorCore
from orchestrator.settings import app_settings, oauth2_settings
from orchestrator.workflow import begin, conditional, done, focussteps, inputstep, retrystep, step, steplens, workflow
Expand Down
1 change: 1 addition & 0 deletions orchestrator/vendor/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Libraries temporarily vendored into the codebase
Empty file added orchestrator/vendor/__init__.py
Empty file.
1 change: 1 addition & 0 deletions orchestrator/vendor/broadcaster/.github/FUNDING.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
github: encode
29 changes: 29 additions & 0 deletions orchestrator/vendor/broadcaster/.github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: Publish

on:
push:
tags:
- "*"

jobs:
publish:
name: "Publish release"
runs-on: "ubuntu-latest"

steps:
- uses: "actions/checkout@v3"
- uses: "actions/setup-python@v4"
with:
python-version: "3.10"

- name: "Install dependencies"
run: "scripts/install"

- name: "Build package & docs"
run: "scripts/build"

- name: "Publish to PyPI & deploy docs"
run: "scripts/publish"
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
69 changes: 69 additions & 0 deletions orchestrator/vendor/broadcaster/.github/workflows/test-suite.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
---
name: Test Suite

on:
push:
branches: ["master"]
pull_request:
branches: ["master"]

jobs:
tests:
name: "Python ${{ matrix.python-version }}"
runs-on: "ubuntu-latest"

strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11-dev"]

services:
zookeeper:
image: confluentinc/cp-zookeeper
ports:
- 32181:32181
env:
ZOOKEEPER_CLIENT_PORT: 32181
ALLOW_ANONYMOUS_LOGIN: yes
options: --hostname zookeeper
kafka:
image: confluentinc/cp-kafka
ports:
- 9092:9092
- 29092:29092
env:
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:32181"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT"
KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT_HOST://localhost:29092,PLAINTEXT://localhost:9092"
KAFKA_BROKER_ID: 1
ALLOW_PLAINTEXT_LISTENER: yes
options: --hostname kafka
redis:
image: redis:alpine
ports:
- 6379:6379
postgres:
image: postgres:12
env:
POSTGRES_DB: broadcaster
POSTGRES_PASSWORD: postgres
POSTGRES_HOST_AUTH_METHOD: trust
POSTGRES_USER: postgres
ports:
- 5432:5432

steps:
- uses: "actions/checkout@v2"
- uses: "actions/setup-python@v2"
with:
python-version: "${{ matrix.python-version }}"
- name: "Install dependencies"
run: "scripts/install"
- name: "Run linting checks"
run: "scripts/check"
- name: "Build package & docs"
run: "scripts/build"
- name: "Run tests"
run: "scripts/test"
- name: "Enforce coverage"
run: "scripts/coverage"
9 changes: 9 additions & 0 deletions orchestrator/vendor/broadcaster/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
*.pyc
test.db
.coverage
.pytest_cache/
.mypy_cache/
*.egg-info/
venv/
build/
dist/
27 changes: 27 additions & 0 deletions orchestrator/vendor/broadcaster/LICENSE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/).
All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.

* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.

* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
118 changes: 118 additions & 0 deletions orchestrator/vendor/broadcaster/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
# READ THIS FIRST

This directory is a clone of https://github.com/encode/broadcaster from commit 377b404, taken at 2024-03-21.

The last release of broadster is 0.2.0 dating back to 2020.
Since then the master branch contains a bugfix which we need for the `rediss://` backend.

As pypi does not allow direct github dependencies, we vendor broadcaster's source code into this codebase until broadcaster receives a new release.

https://github.com/encode/broadcaster/issues/103

We do not claim ownership of this code, please refer to the included `LICENSE.md` file.

Original README.md continues below.

---

# Broadcaster

Broadcaster helps you develop realtime streaming functionality by providing
a simple broadcast API onto a number of different backend services.

It currently supports [Redis PUB/SUB](https://redis.io/topics/pubsub), [Apache Kafka](https://kafka.apache.org/), and [Postgres LISTEN/NOTIFY](https://www.postgresql.org/docs/current/sql-notify.html), plus a simple in-memory backend, that you can use for local development or during testing.

<img src="https://raw.githubusercontent.com/encode/broadcaster/master/docs/demo.gif" alt='WebSockets Demo'>

Here's a complete example of the backend code for a simple websocket chat app:

**app.py**

```python
# Requires: `starlette`, `uvicorn`, `jinja2`
# Run with `uvicorn example:app`
import anyio
from broadcaster import Broadcast
from starlette.applications import Starlette
from starlette.routing import Route, WebSocketRoute
from starlette.templating import Jinja2Templates


broadcast = Broadcast("redis://localhost:6379")
templates = Jinja2Templates("templates")


async def homepage(request):
template = "index.html"
context = {"request": request}
return templates.TemplateResponse(template, context)


async def chatroom_ws(websocket):
await websocket.accept()

async with anyio.create_task_group() as task_group:
# run until first is complete
async def run_chatroom_ws_receiver() -> None:
await chatroom_ws_receiver(websocket=websocket)
task_group.cancel_scope.cancel()

task_group.start_soon(run_chatroom_ws_receiver)
await chatroom_ws_sender(websocket)


async def chatroom_ws_receiver(websocket):
async for message in websocket.iter_text():
await broadcast.publish(channel="chatroom", message=message)


async def chatroom_ws_sender(websocket):
async with broadcast.subscribe(channel="chatroom") as subscriber:
async for event in subscriber:
await websocket.send_text(event.message)


routes = [
Route("/", homepage),
WebSocketRoute("/", chatroom_ws, name='chatroom_ws'),
]


app = Starlette(
routes=routes, on_startup=[broadcast.connect], on_shutdown=[broadcast.disconnect],
)
```

The HTML template for the front end [is available here](https://github.com/encode/broadcaster/blob/master/example/templates/index.html), and is adapted from [Pieter Noordhuis's PUB/SUB demo](https://gist.github.com/pietern/348262).

## Requirements

Python 3.8+

## Installation

* `pip install broadcaster`
* `pip install broadcaster[redis]`
* `pip install broadcaster[postgres]`
* `pip install broadcaster[kafka]`

## Available backends

* `Broadcast('memory://')`
* `Broadcast("redis://localhost:6379")`
* `Broadcast("postgres://localhost:5432/broadcaster")`
* `Broadcast("kafka://localhost:9092")`

## Where next?

At the moment `broadcaster` is in Alpha, and should be considered a working design document.

The API should be considered subject to change. If you *do* want to use Broadcaster in its current
state, make sure to strictly pin your requirements to `broadcaster==0.2.0`.

To be more capable we'd really want to add some additional backends, provide API support for reading recent event history from persistent stores, and provide a serialization/deserialization API...

* Serialization / deserialization to support broadcasting structured data.
* Backends for Redis Streams, Apache Kafka, and RabbitMQ.
* Add support for `subscribe('chatroom', history=100)` for backends which provide persistence. (Redis Streams, Apache Kafka) This will allow applications to subscribe to channel updates, while also being given an initial window onto the most recent events. We *might* also want to support some basic paging operations, to allow applications to scan back in the event history.
* Support for pattern subscribes in backends that support it.
Empty file.
4 changes: 4 additions & 0 deletions orchestrator/vendor/broadcaster/broadcaster/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from ._base import Broadcast, Event

__version__ = "0.2.0"
__all__ = ["Broadcast", "Event"]
Empty file.
26 changes: 26 additions & 0 deletions orchestrator/vendor/broadcaster/broadcaster/_backends/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from typing import Any

from .._base import Event


class BroadcastBackend:
def __init__(self, url: str) -> None:
raise NotImplementedError()

async def connect(self) -> None:
raise NotImplementedError()

async def disconnect(self) -> None:
raise NotImplementedError()

async def subscribe(self, group: str) -> None:
raise NotImplementedError()

async def unsubscribe(self, group: str) -> None:
raise NotImplementedError()

async def publish(self, channel: str, message: Any) -> None:
raise NotImplementedError()

async def next_published(self) -> Event:
raise NotImplementedError()
37 changes: 37 additions & 0 deletions orchestrator/vendor/broadcaster/broadcaster/_backends/kafka.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import typing
from urllib.parse import urlparse

from aiokafka import AIOKafkaConsumer, AIOKafkaProducer

from .._base import Event
from .base import BroadcastBackend


class KafkaBackend(BroadcastBackend):
def __init__(self, url: str):
self._servers = [urlparse(url).netloc]
self._consumer_channels: typing.Set = set()

async def connect(self) -> None:
self._producer = AIOKafkaProducer(bootstrap_servers=self._servers)
self._consumer = AIOKafkaConsumer(bootstrap_servers=self._servers)
await self._producer.start()
await self._consumer.start()

async def disconnect(self) -> None:
await self._producer.stop()
await self._consumer.stop()

async def subscribe(self, channel: str) -> None:
self._consumer_channels.add(channel)
self._consumer.subscribe(topics=self._consumer_channels)

async def unsubscribe(self, channel: str) -> None:
self._consumer.unsubscribe()

async def publish(self, channel: str, message: typing.Any) -> None:
await self._producer.send_and_wait(channel, message.encode("utf8"))

async def next_published(self) -> Event:
message = await self._consumer.getone()
return Event(channel=message.topic, message=message.value.decode("utf8"))
32 changes: 32 additions & 0 deletions orchestrator/vendor/broadcaster/broadcaster/_backends/memory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import asyncio
import typing

from .._base import Event
from .base import BroadcastBackend


class MemoryBackend(BroadcastBackend):
def __init__(self, url: str):
self._subscribed: typing.Set = set()

async def connect(self) -> None:
self._published: asyncio.Queue = asyncio.Queue()

async def disconnect(self) -> None:
pass

async def subscribe(self, channel: str) -> None:
self._subscribed.add(channel)

async def unsubscribe(self, channel: str) -> None:
self._subscribed.remove(channel)

async def publish(self, channel: str, message: typing.Any) -> None:
event = Event(channel=channel, message=message)
await self._published.put(event)

async def next_published(self) -> Event:
while True:
event = await self._published.get()
if event.channel in self._subscribed:
return event
Loading

0 comments on commit e4b00fa

Please sign in to comment.