Skip to content

Commit f6d2ca2

Browse files
committed
improving tests
1 parent ebb0e61 commit f6d2ca2

File tree

4 files changed

+65
-24
lines changed

4 files changed

+65
-24
lines changed

packages/models-library/src/models_library/clusters.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from pathlib import Path
33
from typing import Any, ClassVar, Final, Literal, TypeAlias
44

5+
from attr import frozen
56
from pydantic import (
67
AnyUrl,
78
BaseModel,
@@ -46,6 +47,7 @@ class BaseAuthentication(BaseModel):
4647
type: str
4748

4849
class Config:
50+
frozen = True
4951
extra = Extra.forbid
5052

5153

services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
version: "3.8"
22
x-dask-tls-secrets: &dask_tls_secrets
3+
- source: dask_tls_ca
4+
target: ${DASK_TLS_KEY}
5+
mode: 0444
36
- source: dask_tls_key
47
target: ${DASK_TLS_KEY}
58
mode: 0444
@@ -48,7 +51,7 @@ services:
4851
DASK_TLS_CERT: ${DASK_TLS_CERT}
4952
DASK_TLS_KEY: ${DASK_TLS_KEY}
5053
LOG_LEVEL: ${LOG_LEVEL}
51-
SIDECAR_COMP_SERVICES_SHARED_FOLDER: ${SIDECAR_COMP_SERVICES_SHARED_FOLDER:-/home/scu/computational_shared_data}
54+
SIDECAR_COMP_SERVICES_SHARED_FOLDER: /home/scu/computational_shared_data
5255
SIDECAR_COMP_SERVICES_SHARED_VOLUME_NAME: computational_shared_data
5356
deploy:
5457
mode: global
@@ -115,7 +118,9 @@ volumes:
115118
redis-data:
116119

117120
secrets:
121+
dask_tls_ca:
122+
file: .dask-certificates/tls_dask_ca.pem
118123
dask_tls_key:
119-
file: ./dask-sidecar/.dask-certificates/dask-key.pem
124+
file: .dask-certificates/tls_dask_cert.pem
120125
dask_tls_cert:
121-
file: ./dask-sidecar/.dask-certificates/dask-cert.pem
126+
file: .dask-certificates/tls_dask_key.pem

services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py

Lines changed: 46 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import datetime
33
import functools
44
import json
5+
from pathlib import Path
56
from typing import Any, Final
67

78
from aws_library.ec2.models import EC2InstanceBootSpecific, EC2InstanceData, EC2Tags
@@ -10,7 +11,7 @@
1011
ClusterState,
1112
OnDemandCluster,
1213
)
13-
from models_library.clusters import NoAuthentication
14+
from models_library.clusters import NoAuthentication, TLSAuthentication
1415
from models_library.users import UserID
1516
from models_library.wallets import WalletID
1617
from types_aiobotocore_ec2.literals import InstanceStateNameType
@@ -20,14 +21,35 @@
2021
from .dask import get_scheduler_url
2122

2223
_DOCKER_COMPOSE_FILE_NAME: Final[str] = "docker-compose.yml"
24+
_HOST_DOCKER_COMPOSE_PATH: Final[Path] = Path(f"/{_DOCKER_COMPOSE_FILE_NAME}")
25+
_HOST_CERTIFICATES_BASE_PATH: Final[Path] = Path("/.dask-sidecar-certificates")
26+
_HOST_TLS_CA_FILE_PATH: Final[Path] = _HOST_CERTIFICATES_BASE_PATH / "tls_dask_ca.pem"
27+
_HOST_TLS_CERT_FILE_PATH: Final[Path] = (
28+
_HOST_CERTIFICATES_BASE_PATH / "tls_dask_cert.pem"
29+
)
30+
_HOST_TLS_KEY_FILE_PATH: Final[Path] = _HOST_CERTIFICATES_BASE_PATH / "tls_dask_key.pem"
31+
32+
33+
def _base_64_encode(file: Path) -> str:
34+
assert file.exists() # nosec
35+
with file.open("rb") as f:
36+
return base64.b64encode(f.read()).decode("utf-8")
2337

2438

2539
@functools.lru_cache
2640
def _docker_compose_yml_base64_encoded() -> str:
2741
file_path = PACKAGE_DATA_FOLDER / _DOCKER_COMPOSE_FILE_NAME
28-
assert file_path.exists() # nosec
29-
with file_path.open("rb") as f:
30-
return base64.b64encode(f.read()).decode("utf-8")
42+
return _base_64_encode(file_path)
43+
44+
45+
@functools.lru_cache
46+
def _write_tls_certificates_commands(auth: TLSAuthentication) -> list[str]:
47+
return [
48+
f"mkdir --parents {_HOST_CERTIFICATES_BASE_PATH}",
49+
f"echo '{_base_64_encode(auth.tls_ca_file)}' > {_HOST_TLS_CA_FILE_PATH}",
50+
f"echo '{_base_64_encode(auth.tls_client_cert)}' > {_HOST_TLS_CERT_FILE_PATH}",
51+
f"echo '{_base_64_encode(auth.tls_client_key)}' > {_HOST_TLS_KEY_FILE_PATH}",
52+
]
3153

3254

3355
def _prepare_environment_variables(
@@ -47,21 +69,24 @@ def _convert_to_env_dict(entries: dict[str, Any]) -> str:
4769
return f"'{json.dumps(jsonable_encoder(entries))}'"
4870

4971
return [
50-
f"DOCKER_IMAGE_TAG={app_settings.CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG}",
51-
f"DASK_NTHREADS={app_settings.CLUSTERS_KEEPER_DASK_NTHREADS or ''}",
5272
f"CLUSTERS_KEEPER_EC2_ACCESS_KEY_ID={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_ACCESS_KEY_ID}",
5373
f"CLUSTERS_KEEPER_EC2_ENDPOINT={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_ENDPOINT}",
5474
f"CLUSTERS_KEEPER_EC2_REGION_NAME={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_REGION_NAME}",
5575
f"CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_SECRET_ACCESS_KEY}",
76+
f"DASK_NTHREADS={app_settings.CLUSTERS_KEEPER_DASK_NTHREADS or ''}",
77+
f"DASK_TLS_CA_FILE={_HOST_TLS_CA_FILE_PATH}",
78+
f"DASK_TLS_CERT={_HOST_TLS_CERT_FILE_PATH}",
79+
f"DASK_TLS_KEY={_HOST_TLS_KEY_FILE_PATH}",
80+
f"DOCKER_IMAGE_TAG={app_settings.CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG}",
81+
f"EC2_INSTANCES_NAME_PREFIX={cluster_machines_name_prefix}",
82+
f"LOG_LEVEL={app_settings.LOG_LEVEL}",
5683
f"WORKERS_EC2_INSTANCES_ALLOWED_TYPES={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_ALLOWED_TYPES)}",
84+
f"WORKERS_EC2_INSTANCES_CUSTOM_TAGS={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_CUSTOM_TAGS | additional_custom_tags)}", # type: ignore
5785
f"WORKERS_EC2_INSTANCES_KEY_NAME={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_KEY_NAME}",
5886
f"WORKERS_EC2_INSTANCES_MAX_INSTANCES={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_MAX_INSTANCES}",
59-
f"EC2_INSTANCES_NAME_PREFIX={cluster_machines_name_prefix}",
6087
f"WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS={_convert_to_env_list(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS)}",
6188
f"WORKERS_EC2_INSTANCES_SUBNET_ID={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_SUBNET_ID}",
6289
f"WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION}",
63-
f"WORKERS_EC2_INSTANCES_CUSTOM_TAGS={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_CUSTOM_TAGS | additional_custom_tags)}", # type: ignore
64-
f"LOG_LEVEL={app_settings.LOG_LEVEL}",
6590
]
6691

6792

@@ -82,13 +107,23 @@ def create_startup_script(
82107
)
83108

84109
startup_commands = ec2_boot_specific.custom_boot_scripts.copy()
110+
111+
if isinstance(
112+
app_settings.CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH,
113+
TLSAuthentication,
114+
):
115+
write_certificates_commands = _write_tls_certificates_commands(
116+
app_settings.CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH
117+
)
118+
startup_commands.extend(write_certificates_commands)
119+
85120
startup_commands.extend(
86121
[
87122
# NOTE: https://stackoverflow.com/questions/41203492/solving-redis-warnings-on-overcommit-memory-and-transparent-huge-pages-for-ubunt
88123
"sysctl vm.overcommit_memory=1",
89-
f"echo '{_docker_compose_yml_base64_encoded()}' | base64 -d > docker-compose.yml",
124+
f"echo '{_docker_compose_yml_base64_encoded()}' | base64 -d > {_HOST_DOCKER_COMPOSE_PATH}",
90125
"docker swarm init",
91-
f"{' '.join(environment_variables)} docker stack deploy --with-registry-auth --compose-file=docker-compose.yml dask_stack",
126+
f"{' '.join(environment_variables)} docker stack deploy --with-registry-auth --compose-file={_HOST_DOCKER_COMPOSE_PATH} dask_stack",
92127
]
93128
)
94129
return "\n".join(startup_commands)

services/clusters-keeper/tests/unit/test_utils_clusters.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -67,12 +67,12 @@ def test_create_startup_script(
6767
for boot_script in ec2_boot_specs.custom_boot_scripts:
6868
assert boot_script in startup_script
6969
# we have commands to pipe into a docker-compose file
70-
assert " | base64 -d > docker-compose.yml" in startup_script
70+
assert " | base64 -d > /docker-compose.yml" in startup_script
7171
# we have commands to init a docker-swarm
7272
assert "docker swarm init" in startup_script
7373
# we have commands to deploy a stack
7474
assert (
75-
"docker stack deploy --with-registry-auth --compose-file=docker-compose.yml dask_stack"
75+
"docker stack deploy --with-registry-auth --compose-file=/docker-compose.yml dask_stack"
7676
in startup_script
7777
)
7878
# before that we have commands that setup ENV variables, let's check we have all of them as defined in the docker-compose
@@ -87,20 +87,20 @@ def test_create_startup_script(
8787
)
8888
startup_script_env_keys_names = [key for key, _ in startup_script_key_value_pairs]
8989
# docker-compose expected values
90+
docker_compose_expected_environment: dict[str, str] = {}
9091
assert "services" in clusters_keeper_docker_compose
91-
assert "autoscaling" in clusters_keeper_docker_compose["services"]
92-
assert "environment" in clusters_keeper_docker_compose["services"]["autoscaling"]
93-
docker_compose_expected_environment: dict[
94-
str, str
95-
] = clusters_keeper_docker_compose["services"]["autoscaling"]["environment"]
96-
assert isinstance(docker_compose_expected_environment, dict)
92+
assert isinstance(clusters_keeper_docker_compose["services"], dict)
93+
for service_details in clusters_keeper_docker_compose["services"].values():
94+
if "environment" in service_details:
95+
assert isinstance(service_details["environment"], dict)
96+
docker_compose_expected_environment |= service_details["environment"]
9797

9898
# check the expected environment variables are set so the docker-compose will be complete (we define enough)
9999
expected_env_keys = [
100100
v[2:-1].split(":")[0]
101101
for v in docker_compose_expected_environment.values()
102102
if isinstance(v, str) and v.startswith("${")
103-
] + ["DASK_NTHREADS", "DOCKER_IMAGE_TAG"]
103+
] + ["DOCKER_IMAGE_TAG"]
104104
for env_key in expected_env_keys:
105105
assert (
106106
env_key in startup_script_env_keys_names
@@ -173,7 +173,6 @@ def test_startup_script_defines_all_envs_for_docker_compose(
173173
_ENV_VARIABLE_NOT_SET_ERROR = "variable is not set"
174174
assert _ENV_VARIABLE_NOT_SET_ERROR not in process.stderr.decode()
175175
assert process.stdout
176-
assert process.stdout is None
177176

178177

179178
@pytest.mark.parametrize(

0 commit comments

Comments
 (0)