diff --git a/README.md b/README.md index 306ce20..9ee4493 100644 --- a/README.md +++ b/README.md @@ -227,7 +227,33 @@ def test_workspace_operations(ws): assert len(clusters) >= 0 ``` -See also [`log_workspace_link`](#log_workspace_link-fixture), [`make_alert_permissions`](#make_alert_permissions-fixture), [`make_authorization_permissions`](#make_authorization_permissions-fixture), [`make_catalog`](#make_catalog-fixture), [`make_cluster`](#make_cluster-fixture), [`make_cluster_permissions`](#make_cluster_permissions-fixture), [`make_cluster_policy`](#make_cluster_policy-fixture), [`make_cluster_policy_permissions`](#make_cluster_policy_permissions-fixture), [`make_dashboard_permissions`](#make_dashboard_permissions-fixture), [`make_directory`](#make_directory-fixture), [`make_directory_permissions`](#make_directory_permissions-fixture), [`make_experiment`](#make_experiment-fixture), [`make_experiment_permissions`](#make_experiment_permissions-fixture), [`make_feature_table_permissions`](#make_feature_table_permissions-fixture), [`make_group`](#make_group-fixture), [`make_instance_pool`](#make_instance_pool-fixture), [`make_instance_pool_permissions`](#make_instance_pool_permissions-fixture), [`make_job`](#make_job-fixture), [`make_job_permissions`](#make_job_permissions-fixture), [`make_lakeview_dashboard_permissions`](#make_lakeview_dashboard_permissions-fixture), [`make_model`](#make_model-fixture), [`make_notebook`](#make_notebook-fixture), [`make_notebook_permissions`](#make_notebook_permissions-fixture), [`make_pipeline`](#make_pipeline-fixture), [`make_pipeline_permissions`](#make_pipeline_permissions-fixture), [`make_query`](#make_query-fixture), [`make_query_permissions`](#make_query_permissions-fixture), [`make_registered_model_permissions`](#make_registered_model_permissions-fixture), [`make_repo`](#make_repo-fixture), [`make_repo_permissions`](#make_repo_permissions-fixture), [`make_secret_scope`](#make_secret_scope-fixture), [`make_secret_scope_acl`](#make_secret_scope_acl-fixture), [`make_serving_endpoint`](#make_serving_endpoint-fixture), [`make_serving_endpoint_permissions`](#make_serving_endpoint_permissions-fixture), [`make_storage_credential`](#make_storage_credential-fixture), [`make_udf`](#make_udf-fixture), [`make_user`](#make_user-fixture), [`make_warehouse`](#make_warehouse-fixture), [`make_warehouse_permissions`](#make_warehouse_permissions-fixture), [`make_workspace_file_path_permissions`](#make_workspace_file_path_permissions-fixture), [`make_workspace_file_permissions`](#make_workspace_file_permissions-fixture), [`sql_backend`](#sql_backend-fixture), [`workspace_library`](#workspace_library-fixture), [`debug_env`](#debug_env-fixture), [`product_info`](#product_info-fixture). +See also [`log_workspace_link`](#log_workspace_link-fixture), [`make_alert_permissions`](#make_alert_permissions-fixture), [`make_authorization_permissions`](#make_authorization_permissions-fixture), [`make_catalog`](#make_catalog-fixture), [`make_cluster`](#make_cluster-fixture), [`make_cluster_permissions`](#make_cluster_permissions-fixture), [`make_cluster_policy`](#make_cluster_policy-fixture), [`make_cluster_policy_permissions`](#make_cluster_policy_permissions-fixture), [`make_dashboard_permissions`](#make_dashboard_permissions-fixture), [`make_directory`](#make_directory-fixture), [`make_directory_permissions`](#make_directory_permissions-fixture), [`make_experiment`](#make_experiment-fixture), [`make_experiment_permissions`](#make_experiment_permissions-fixture), [`make_feature_table_permissions`](#make_feature_table_permissions-fixture), [`make_group`](#make_group-fixture), [`make_instance_pool`](#make_instance_pool-fixture), [`make_instance_pool_permissions`](#make_instance_pool_permissions-fixture), [`make_job`](#make_job-fixture), [`make_job_permissions`](#make_job_permissions-fixture), [`make_lakeview_dashboard_permissions`](#make_lakeview_dashboard_permissions-fixture), [`make_model`](#make_model-fixture), [`make_notebook`](#make_notebook-fixture), [`make_notebook_permissions`](#make_notebook_permissions-fixture), [`make_pipeline`](#make_pipeline-fixture), [`make_pipeline_permissions`](#make_pipeline_permissions-fixture), [`make_query`](#make_query-fixture), [`make_query_permissions`](#make_query_permissions-fixture), [`make_registered_model_permissions`](#make_registered_model_permissions-fixture), [`make_repo`](#make_repo-fixture), [`make_repo_permissions`](#make_repo_permissions-fixture), [`make_secret_scope`](#make_secret_scope-fixture), [`make_secret_scope_acl`](#make_secret_scope_acl-fixture), [`make_serving_endpoint`](#make_serving_endpoint-fixture), [`make_serving_endpoint_permissions`](#make_serving_endpoint_permissions-fixture), [`make_storage_credential`](#make_storage_credential-fixture), [`make_udf`](#make_udf-fixture), [`make_user`](#make_user-fixture), [`make_warehouse`](#make_warehouse-fixture), [`make_warehouse_permissions`](#make_warehouse_permissions-fixture), [`make_workspace_file_path_permissions`](#make_workspace_file_path_permissions-fixture), [`make_workspace_file_permissions`](#make_workspace_file_permissions-fixture), [`sql_backend`](#sql_backend-fixture), [`debug_env`](#debug_env-fixture), [`product_info`](#product_info-fixture). + + +[[back to top](#python-testing-for-databricks)] + +### `sql_backend` fixture +Create and provide a SQL backend for executing statements. + +Requires the environment variable `DATABRICKS_WAREHOUSE_ID` to be set. + +See also [`make_catalog`](#make_catalog-fixture), [`make_schema`](#make_schema-fixture), [`make_table`](#make_table-fixture), [`make_udf`](#make_udf-fixture), [`sql_exec`](#sql_exec-fixture), [`sql_fetch_all`](#sql_fetch_all-fixture), [`ws`](#ws-fixture), [`env_or_skip`](#env_or_skip-fixture). + + +[[back to top](#python-testing-for-databricks)] + +### `sql_exec` fixture +Execute SQL statement and don't return any results. + +See also [`sql_backend`](#sql_backend-fixture). + + +[[back to top](#python-testing-for-databricks)] + +### `sql_fetch_all` fixture +Fetch all rows from a SQL statement. + +See also [`sql_backend`](#sql_backend-fixture). [[back to top](#python-testing-for-databricks)] @@ -253,7 +279,7 @@ random_string = make_random(k=8) assert len(random_string) == 8 ``` -See also [`make_catalog`](#make_catalog-fixture), [`make_cluster`](#make_cluster-fixture), [`make_cluster_policy`](#make_cluster_policy-fixture), [`make_directory`](#make_directory-fixture), [`make_experiment`](#make_experiment-fixture), [`make_group`](#make_group-fixture), [`make_instance_pool`](#make_instance_pool-fixture), [`make_job`](#make_job-fixture), [`make_model`](#make_model-fixture), [`make_notebook`](#make_notebook-fixture), [`make_pipeline`](#make_pipeline-fixture), [`make_query`](#make_query-fixture), [`make_repo`](#make_repo-fixture), [`make_schema`](#make_schema-fixture), [`make_secret_scope`](#make_secret_scope-fixture), [`make_serving_endpoint`](#make_serving_endpoint-fixture), [`make_table`](#make_table-fixture), [`make_udf`](#make_udf-fixture), [`make_user`](#make_user-fixture), [`make_warehouse`](#make_warehouse-fixture), [`workspace_library`](#workspace_library-fixture). +See also [`make_catalog`](#make_catalog-fixture), [`make_cluster`](#make_cluster-fixture), [`make_cluster_policy`](#make_cluster_policy-fixture), [`make_directory`](#make_directory-fixture), [`make_experiment`](#make_experiment-fixture), [`make_group`](#make_group-fixture), [`make_instance_pool`](#make_instance_pool-fixture), [`make_job`](#make_job-fixture), [`make_model`](#make_model-fixture), [`make_notebook`](#make_notebook-fixture), [`make_pipeline`](#make_pipeline-fixture), [`make_query`](#make_query-fixture), [`make_repo`](#make_repo-fixture), [`make_schema`](#make_schema-fixture), [`make_secret_scope`](#make_secret_scope-fixture), [`make_serving_endpoint`](#make_serving_endpoint-fixture), [`make_table`](#make_table-fixture), [`make_udf`](#make_udf-fixture), [`make_user`](#make_user-fixture), [`make_warehouse`](#make_warehouse-fixture). [[back to top](#python-testing-for-databricks)] @@ -273,7 +299,7 @@ def test_instance_pool(make_instance_pool): logger.info(f"created {make_instance_pool()}") ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -305,7 +331,7 @@ def test_job(make_job): logger.info(f"created {make_job()}") ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_notebook`](#make_notebook-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_notebook`](#make_notebook-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -334,7 +360,7 @@ def test_cluster(make_cluster): logger.info(f"created {make_cluster(single_node=True)}") ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -360,7 +386,7 @@ def test_cluster_policy(make_cluster_policy): logger.info(f"created {make_cluster_policy()}") ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -394,7 +420,7 @@ def test_pipeline(make_pipeline, make_pipeline_permissions, make_group): ) ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_notebook`](#make_notebook-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_notebook`](#make_notebook-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -416,7 +442,7 @@ def test_warehouse_has_remove_after_tag(ws, make_warehouse): assert warehouse_tags["custom_tags"][0]["key"] == "RemoveAfter" ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -445,7 +471,7 @@ def test_new_group(make_group, make_user, ws): assert group.members == loaded.members ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -463,7 +489,7 @@ def test_new_user(make_user, ws): assert home_dir.object_type == ObjectType.DIRECTORY ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -494,7 +520,7 @@ def test_creates_some_notebook(make_notebook): assert "print(1)" in notebook.read_text() ``` -See also [`make_job`](#make_job-fixture), [`make_pipeline`](#make_pipeline-fixture), [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`make_job`](#make_job-fixture), [`make_pipeline`](#make_pipeline-fixture), [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -524,7 +550,7 @@ def test_creates_some_folder_with_a_notebook(make_directory, make_notebook): assert notebook.parent == folder ``` -See also [`make_experiment`](#make_experiment-fixture), [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`make_experiment`](#make_experiment-fixture), [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -552,7 +578,7 @@ def test_repo(make_repo): logger.info(f"created {make_repo()}") ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -696,7 +722,7 @@ def test_catalog_fixture(make_catalog, make_schema, make_table): logger.info(f"Created new schema: {from_table_1}") ``` -See also [`make_table`](#make_table-fixture), [`make_udf`](#make_udf-fixture), [`sql_backend`](#sql_backend-fixture), [`make_random`](#make_random-fixture). +See also [`make_table`](#make_table-fixture), [`make_udf`](#make_udf-fixture), [`sql_backend`](#sql_backend-fixture), [`make_random`](#make_random-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -728,7 +754,7 @@ def test_catalog_fixture(make_catalog, make_schema, make_table): logger.info(f"Created new schema: {from_table_1}") ``` -See also [`make_query`](#make_query-fixture), [`sql_backend`](#sql_backend-fixture), [`make_schema`](#make_schema-fixture), [`make_random`](#make_random-fixture). +See also [`make_query`](#make_query-fixture), [`sql_backend`](#sql_backend-fixture), [`make_schema`](#make_schema-fixture), [`make_random`](#make_random-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -766,32 +792,6 @@ _No description yet._ See also [`ws`](#ws-fixture). -[[back to top](#python-testing-for-databricks)] - -### `sql_backend` fixture -te and provide a SQL backend for executing statements. - -Requires the environment variable `DATABRICKS_WAREHOUSE_ID` to be set. - -See also [`make_catalog`](#make_catalog-fixture), [`make_schema`](#make_schema-fixture), [`make_table`](#make_table-fixture), [`make_udf`](#make_udf-fixture), [`sql_exec`](#sql_exec-fixture), [`sql_fetch_all`](#sql_fetch_all-fixture), [`ws`](#ws-fixture), [`env_or_skip`](#env_or_skip-fixture). - - -[[back to top](#python-testing-for-databricks)] - -### `sql_exec` fixture -ute SQL statement and don't return any results. - -See also [`sql_backend`](#sql_backend-fixture). - - -[[back to top](#python-testing-for-databricks)] - -### `sql_fetch_all` fixture -h all rows from a SQL statement. - -See also [`sql_backend`](#sql_backend-fixture). - - [[back to top](#python-testing-for-databricks)] ### `make_model` fixture @@ -815,7 +815,7 @@ def test_models(make_group, make_model, make_registered_model_permissions): ) ``` -See also [`make_serving_endpoint`](#make_serving_endpoint-fixture), [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). +See also [`make_serving_endpoint`](#make_serving_endpoint-fixture), [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -842,7 +842,7 @@ def test_experiments(make_group, make_experiment, make_experiment_permissions): ) ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_directory`](#make_directory-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_directory`](#make_directory-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -869,14 +869,6 @@ _No description yet._ See also [`ws`](#ws-fixture). -[[back to top](#python-testing-for-databricks)] - -### `workspace_library` fixture -_No description yet._ - -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture). - - [[back to top](#python-testing-for-databricks)] ### `log_workspace_link` fixture @@ -924,7 +916,7 @@ def test_permissions_for_redash( ) ``` -See also [`ws`](#ws-fixture), [`make_table`](#make_table-fixture), [`make_random`](#make_random-fixture). +See also [`ws`](#ws-fixture), [`make_table`](#make_table-fixture), [`make_random`](#make_random-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). [[back to top](#python-testing-for-databricks)] @@ -963,7 +955,7 @@ def test_endpoints(make_group, make_serving_endpoint, make_serving_endpoint_perm ) ``` -See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_model`](#make_model-fixture). +See also [`ws`](#ws-fixture), [`make_random`](#make_random-fixture), [`make_model`](#make_model-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). [[back to top](#python-testing-for-databricks)] @@ -982,6 +974,22 @@ _No description yet._ See also [`ws`](#ws-fixture). +[[back to top](#python-testing-for-databricks)] + +### `watchdog_remove_after` fixture +Purge time for test objects, representing the (UTC-based) hour from which objects may be purged. + +See also [`make_cluster`](#make_cluster-fixture), [`make_instance_pool`](#make_instance_pool-fixture), [`make_job`](#make_job-fixture), [`make_model`](#make_model-fixture), [`make_pipeline`](#make_pipeline-fixture), [`make_schema`](#make_schema-fixture), [`make_serving_endpoint`](#make_serving_endpoint-fixture), [`make_table`](#make_table-fixture), [`make_warehouse`](#make_warehouse-fixture), [`watchdog_purge_suffix`](#watchdog_purge_suffix-fixture). + + +[[back to top](#python-testing-for-databricks)] + +### `watchdog_purge_suffix` fixture +HEX-encoded purge time suffix for test objects. + +See also [`make_cluster_policy`](#make_cluster_policy-fixture), [`make_directory`](#make_directory-fixture), [`make_experiment`](#make_experiment-fixture), [`make_group`](#make_group-fixture), [`make_notebook`](#make_notebook-fixture), [`make_pipeline`](#make_pipeline-fixture), [`make_query`](#make_query-fixture), [`make_repo`](#make_repo-fixture), [`make_user`](#make_user-fixture), [`watchdog_remove_after`](#watchdog_remove_after-fixture). + + [[back to top](#python-testing-for-databricks)] diff --git a/pyproject.toml b/pyproject.toml index 316a8a8..0933f14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -157,6 +157,11 @@ exclude_lines = [ "if __name__ == .__main__.:", "if TYPE_CHECKING:", ] +exclude_also = [ + "import", + "__all__", + "@pytest.fixture" +] [tool.pylint.main] # PyLint configuration is adapted from Google Python Style Guide with modifications. diff --git a/src/databricks/labs/pytester/environment.py b/src/databricks/labs/pytester/environment.py deleted file mode 100644 index dc528e0..0000000 --- a/src/databricks/labs/pytester/environment.py +++ /dev/null @@ -1,58 +0,0 @@ -import logging -import os -import pathlib -import sys -import json - -_LOG = logging.getLogger(__name__) - - -def load_debug_env_if_runs_from_ide(key) -> bool: - """ - Load environment variables from the debug configuration if running from an IDE. - - This function loads environment variables from a debug configuration file if the code is running from an IDE - (such as a local development environment). The debug configuration file is located at ~/.databricks/debug-env.json. - - Parameters: - ----------- - key : str - The key to identify the set of environment variables in the debug configuration. - - Returns: - -------- - bool: - True if the environment variables were loaded, False otherwise. - - Raises: - ------ - KeyError: - If the specified key is not found in the debug configuration. - - Usage Example: - -------------- - To load debug environment variables if running from an IDE: - - .. code-block:: python - - if load_debug_env_if_runs_from_ide("my_key"): - print("Debug environment variables loaded.") - """ - if not _is_in_debug(): - return False - conf_file = pathlib.Path.home() / ".databricks/debug-env.json" - with conf_file.open("r") as f: - conf = json.load(f) - if key not in conf: - msg = f"{key} not found in ~/.databricks/debug-env.json" - raise KeyError(msg) - for env_key, value in conf[key].items(): - os.environ[env_key] = value - return True - - -def _is_in_debug() -> bool: - return os.path.basename(sys.argv[0]) in { - "_jb_pytest_runner.py", - "testlauncher.py", - } diff --git a/src/databricks/labs/pytester/fixtures/baseline.py b/src/databricks/labs/pytester/fixtures/baseline.py index c19ed2f..5760514 100644 --- a/src/databricks/labs/pytester/fixtures/baseline.py +++ b/src/databricks/labs/pytester/fixtures/baseline.py @@ -1,22 +1,15 @@ import logging import random import string -from datetime import timedelta, datetime, timezone -from functools import partial from pytest import fixture -from databricks.labs.lsql.backends import StatementExecutionBackend from databricks.sdk import WorkspaceClient from databricks.sdk.errors import DatabricksError _LOG = logging.getLogger(__name__) -"""Preserve resources created during tests for at least this long.""" -TEST_RESOURCE_PURGE_TIMEOUT = timedelta(hours=1) - - @fixture def make_random(): """ @@ -175,41 +168,3 @@ def inner(name: str, path: str, *, anchor: bool = True): _LOG.info(f'Created {name}: {url}') return inner - - -@fixture -def sql_backend(ws, env_or_skip) -> StatementExecutionBackend: - """Create and provide a SQL backend for executing statements. - - Requires the environment variable `DATABRICKS_WAREHOUSE_ID` to be set. - """ - warehouse_id = env_or_skip("DATABRICKS_WAREHOUSE_ID") - return StatementExecutionBackend(ws, warehouse_id) - - -@fixture -def sql_exec(sql_backend): - """Execute SQL statement and don't return any results.""" - return partial(sql_backend.execute) - - -@fixture -def sql_fetch_all(sql_backend): - """Fetch all rows from a SQL statement.""" - return partial(sql_backend.fetch) - - -def get_test_purge_time(timeout: timedelta = TEST_RESOURCE_PURGE_TIMEOUT) -> str: - """Purge time for test objects, representing the (UTC-based) hour from which objects may be purged.""" - # Note: this code is duplicated in the workflow installer (WorkflowsDeployment) so that it can avoid the - # transitive pytest deployment from this module. - now = datetime.now(timezone.utc) - purge_deadline = now + timeout - # Round UP to the next hour boundary: that is when resources will be deleted. - purge_hour = purge_deadline + (datetime.min.replace(tzinfo=timezone.utc) - purge_deadline) % timedelta(hours=1) - return purge_hour.strftime("%Y%m%d%H") - - -def get_purge_suffix() -> str: - """HEX-encoded purge time suffix for test objects.""" - return f'ra{int(get_test_purge_time()):x}' diff --git a/src/databricks/labs/pytester/fixtures/catalog.py b/src/databricks/labs/pytester/fixtures/catalog.py index 1dc7866..5b97b70 100644 --- a/src/databricks/labs/pytester/fixtures/catalog.py +++ b/src/databricks/labs/pytester/fixtures/catalog.py @@ -2,7 +2,7 @@ from collections.abc import Generator, Callable from pytest import fixture from databricks.labs.blueprint.commands import CommandExecutor -from databricks.sdk.errors import NotFound +from databricks.sdk.errors import DatabricksError from databricks.sdk.service.catalog import ( FunctionInfo, SchemaInfo, @@ -10,13 +10,12 @@ TableType, DataSourceFormat, CatalogInfo, - ColumnInfo, StorageCredentialInfo, AwsIamRoleRequest, AzureServicePrincipal, ) from databricks.sdk.service.compute import Language -from databricks.labs.pytester.fixtures.baseline import factory, get_test_purge_time +from databricks.labs.pytester.fixtures.baseline import factory logger = logging.getLogger(__name__) @@ -46,6 +45,7 @@ def make_table( make_schema, make_random, log_workspace_link, + watchdog_remove_after, ) -> Generator[Callable[..., TableInfo], None, None]: """ Create a table and return its info. Remove it after the test. Returns instance of `databricks.sdk.service.catalog.TableInfo`. @@ -75,33 +75,23 @@ def test_catalog_fixture(make_catalog, make_schema, make_table): ``` """ - def generate_sql_schema(columns: list[ColumnInfo]) -> str: + def generate_sql_schema(columns: list[tuple[str, str]]) -> str: """Generate a SQL schema from columns.""" schema = "(" - for index, column in enumerate(columns): - schema += escape_sql_identifier(column.name or str(index), maxsplit=0) - if column.type_name is None: - type_name = "STRING" - else: - type_name = column.type_name.value + for index, (col_name, type_name) in enumerate(columns): + schema += escape_sql_identifier(col_name or str(index), maxsplit=0) schema += f" {type_name}, " schema = schema[:-2] + ")" # Remove the last ', ' return schema - def generate_sql_column_casting(existing_columns: list[ColumnInfo], new_columns: list[ColumnInfo]) -> str: + def generate_sql_column_casting(existing_columns: list[tuple[str, str]], new_columns: list[tuple[str, str]]) -> str: """Generate the SQL to cast columns""" - if any(column.name is None for column in existing_columns): - raise ValueError(f"Columns should have a name: {existing_columns}") if len(new_columns) > len(existing_columns): raise ValueError(f"Too many columns: {new_columns}") select_expressions = [] - for index, (existing_column, new_column) in enumerate(zip(existing_columns, new_columns)): - column_name_new = escape_sql_identifier(new_column.name or str(index), maxsplit=0) - if new_column.type_name is None: - type_name = "STRING" - else: - type_name = new_column.type_name.value - select_expression = f"CAST({existing_column.name} AS {type_name}) AS {column_name_new}" + for index, ((existing_name, _), (new_name, new_type)) in enumerate(zip(existing_columns, new_columns)): + column_name_new = escape_sql_identifier(new_name or str(index), maxsplit=0) + select_expression = f"CAST({existing_name} AS {new_type}) AS {column_name_new}" select_expressions.append(select_expression) select = ", ".join(select_expressions) return select @@ -120,7 +110,7 @@ def create( # pylint: disable=too-many-locals,too-many-arguments,too-many-state tbl_properties: dict[str, str] | None = None, hiveserde_ddl: str | None = None, storage_override: str | None = None, - columns: list[ColumnInfo] | None = None, + columns: list[tuple[str, str]] | None = None, ) -> TableInfo: if schema_name is None: schema = make_schema(catalog_name=catalog_name) @@ -154,14 +144,14 @@ def create( # pylint: disable=too-many-locals,too-many-arguments,too-many-state else: # These are the columns from the JSON dataset below dataset_columns = [ - ColumnInfo(name="calories_burnt"), - ColumnInfo(name="device_id"), - ColumnInfo(name="id"), - ColumnInfo(name="miles_walked"), - ColumnInfo(name="num_steps"), - ColumnInfo(name="timestamp"), - ColumnInfo(name="user_id"), - ColumnInfo(name="value"), + ('calories_burnt', 'STRING'), + ('device_id', 'STRING'), + ('id', 'STRING'), + ('miles_walked', 'STRING'), + ('num_steps', 'STRING'), + ('timestamp', 'STRING'), + ('user_id', 'STRING'), + ('value', 'STRING'), ] select = generate_sql_column_casting(dataset_columns, columns) # Modified, otherwise it will identify the table as a DB Dataset @@ -193,9 +183,9 @@ def create( # pylint: disable=too-many-locals,too-many-arguments,too-many-state storage_location = f"dbfs:/user/hive/warehouse/{schema_name}/{name}" ddl = f"{ddl} {schema}" if tbl_properties: - tbl_properties.update({"RemoveAfter": get_test_purge_time()}) + tbl_properties.update({"RemoveAfter": watchdog_remove_after}) else: - tbl_properties = {"RemoveAfter": get_test_purge_time()} + tbl_properties = {"RemoveAfter": watchdog_remove_after} str_properties = ",".join([f" '{k}' = '{v}' " for k, v in tbl_properties.items()]) @@ -238,11 +228,9 @@ def create( # pylint: disable=too-many-locals,too-many-arguments,too-many-state def remove(table_info: TableInfo): try: sql_backend.execute(f"DROP TABLE IF EXISTS {table_info.full_name}") - except RuntimeError as e: + except DatabricksError as e: if "Cannot drop a view" in str(e): sql_backend.execute(f"DROP VIEW IF EXISTS {table_info.full_name}") - elif "SCHEMA_NOT_FOUND" in str(e): - logger.warning("Schema was already dropped while executing the test", exc_info=e) else: raise e @@ -250,7 +238,12 @@ def remove(table_info: TableInfo): @fixture -def make_schema(sql_backend, make_random, log_workspace_link) -> Generator[Callable[..., SchemaInfo], None, None]: +def make_schema( + sql_backend, + make_random, + log_workspace_link, + watchdog_remove_after, +) -> Generator[Callable[..., SchemaInfo], None, None]: """ Create a schema and return its info. Remove it after the test. Returns instance of `databricks.sdk.service.catalog.SchemaInfo`. @@ -272,20 +265,14 @@ def create(*, catalog_name: str = "hive_metastore", name: str | None = None) -> if name is None: name = f"dummy_S{make_random(4)}".lower() full_name = f"{catalog_name}.{name}".lower() - sql_backend.execute(f"CREATE SCHEMA {full_name} WITH DBPROPERTIES (RemoveAfter={get_test_purge_time()})") + sql_backend.execute(f"CREATE SCHEMA {full_name} WITH DBPROPERTIES (RemoveAfter={watchdog_remove_after})") schema_info = SchemaInfo(catalog_name=catalog_name, name=name, full_name=full_name) path = f'explore/data/{schema_info.catalog_name}/{schema_info.name}' log_workspace_link(f'{schema_info.full_name} schema', path) return schema_info def remove(schema_info: SchemaInfo): - try: - sql_backend.execute(f"DROP SCHEMA IF EXISTS {schema_info.full_name} CASCADE") - except RuntimeError as e: - if "SCHEMA_NOT_FOUND" in str(e): - logger.warning("Schema was already dropped while executing the test", exc_info=e) - else: - raise e + sql_backend.execute(f"DROP SCHEMA IF EXISTS {schema_info.full_name} CASCADE") yield from factory("schema", create, remove) @@ -390,13 +377,7 @@ def create( return udf_info def remove(udf_info: FunctionInfo): - try: - sql_backend.execute(f"DROP FUNCTION IF EXISTS {udf_info.full_name}") - except NotFound as e: - if "SCHEMA_NOT_FOUND" in str(e): - logger.warning("Schema was already dropped while executing the test", exc_info=e) - else: - raise e + sql_backend.execute(f"DROP FUNCTION IF EXISTS {udf_info.full_name}") yield from factory("table", create, remove) diff --git a/src/databricks/labs/pytester/fixtures/compute.py b/src/databricks/labs/pytester/fixtures/compute.py index 3e1b098..ca4dbce 100644 --- a/src/databricks/labs/pytester/fixtures/compute.py +++ b/src/databricks/labs/pytester/fixtures/compute.py @@ -15,11 +15,16 @@ from databricks.sdk.service._internal import Wait from databricks.sdk.service.compute import CreatePolicyResponse, ClusterDetails, ClusterSpec, CreateInstancePoolResponse -from databricks.labs.pytester.fixtures.baseline import factory, get_purge_suffix, get_test_purge_time +from databricks.labs.pytester.fixtures.baseline import factory @fixture -def make_cluster_policy(ws, make_random, log_workspace_link) -> Generator[CreatePolicyResponse, None, None]: +def make_cluster_policy( + ws, + make_random, + log_workspace_link, + watchdog_purge_suffix, +) -> Generator[CreatePolicyResponse, None, None]: """ Create a Databricks cluster policy and clean it up after the test. Returns a function to create cluster policies, which returns `databricks.sdk.service.compute.CreatePolicyResponse` instance. @@ -36,7 +41,7 @@ def test_cluster_policy(make_cluster_policy): def create(*, name: str | None = None, **kwargs) -> CreatePolicyResponse: if name is None: - name = f"dummy-{make_random(4)}-{get_purge_suffix()}" + name = f"dummy-{make_random(4)}-{watchdog_purge_suffix}" if "definition" not in kwargs: kwargs["definition"] = json.dumps( { @@ -51,7 +56,7 @@ def create(*, name: str | None = None, **kwargs) -> CreatePolicyResponse: @fixture -def make_cluster(ws, make_random, log_workspace_link) -> Generator[ClusterDetails, None, None]: +def make_cluster(ws, make_random, log_workspace_link, watchdog_remove_after) -> Generator[ClusterDetails, None, None]: """ Create a Databricks cluster, waits for it to start, and clean it up after the test. Returns a function to create clusters. You can get `cluster_id` attribute from the returned object. @@ -94,9 +99,9 @@ def create( if "instance_pool_id" not in kwargs: kwargs["node_type_id"] = ws.clusters.select_node_type(local_disk=True, min_memory_gb=16) if "custom_tags" not in kwargs: - kwargs["custom_tags"] = {"RemoveAfter": get_test_purge_time()} + kwargs["custom_tags"] = {"RemoveAfter": watchdog_remove_after} else: - kwargs["custom_tags"]["RemoveAfter"] = get_test_purge_time() + kwargs["custom_tags"]["RemoveAfter"] = watchdog_remove_after wait = ws.clusters.create( cluster_name=cluster_name, spark_version=spark_version, @@ -110,7 +115,12 @@ def create( @fixture -def make_instance_pool(ws, make_random, log_workspace_link) -> Generator[CreateInstancePoolResponse, None, None]: +def make_instance_pool( + ws, + make_random, + log_workspace_link, + watchdog_remove_after, +) -> Generator[CreateInstancePoolResponse, None, None]: """ Create a Databricks instance pool and clean it up after the test. Returns a function to create instance pools. Use `instance_pool_id` attribute from the returned object to get an ID of the pool. @@ -135,7 +145,7 @@ def create(*, instance_pool_name=None, node_type_id=None, **kwargs) -> CreateIns pool = ws.instance_pools.create( instance_pool_name, node_type_id, - custom_tags={"RemoveAfter": get_test_purge_time()}, + custom_tags={"RemoveAfter": watchdog_remove_after}, **kwargs, ) log_workspace_link(instance_pool_name, f'compute/instance-pools/{pool.instance_pool_id}', anchor=False) @@ -145,7 +155,7 @@ def create(*, instance_pool_name=None, node_type_id=None, **kwargs) -> CreateIns @fixture -def make_job(ws, make_random, make_notebook, log_workspace_link) -> Generator[Job, None, None]: +def make_job(ws, make_random, make_notebook, log_workspace_link, watchdog_remove_after) -> Generator[Job, None, None]: """ Create a Databricks job and clean it up after the test. Returns a function to create jobs. @@ -193,7 +203,7 @@ def create(notebook_path: str | Path | None = None, **kwargs) -> Job: ) ] # add RemoveAfter tag for test job cleanup - date_to_remove = get_test_purge_time() + date_to_remove = watchdog_remove_after remove_after_tag = {"key": "RemoveAfter", "value": date_to_remove} if 'tags' not in kwargs: kwargs["tags"] = [remove_after_tag] @@ -207,7 +217,13 @@ def create(notebook_path: str | Path | None = None, **kwargs) -> Job: @fixture -def make_pipeline(ws, make_random, make_notebook) -> Generator[CreatePipelineResponse, None, None]: +def make_pipeline( + ws, + make_random, + make_notebook, + watchdog_remove_after, + watchdog_purge_suffix, +) -> Generator[CreatePipelineResponse, None, None]: """ Create Delta Live Table Pipeline and clean it up after the test. Returns a function to create pipelines. Results in a `databricks.sdk.service.pipelines.CreatePipelineResponse` instance. @@ -232,7 +248,7 @@ def test_pipeline(make_pipeline, make_pipeline_permissions, make_group): def create(**kwargs) -> CreatePipelineResponse: if "name" not in kwargs: - kwargs["name"] = f"sdk-{make_random(4)}-{get_purge_suffix()}" + kwargs["name"] = f"sdk-{make_random(4)}-{watchdog_purge_suffix}" if "libraries" not in kwargs: notebook_library = NotebookLibrary(path=make_notebook().as_posix()) kwargs["libraries"] = [PipelineLibrary(notebook=notebook_library)] @@ -242,7 +258,7 @@ def create(**kwargs) -> CreatePipelineResponse: node_type_id=ws.clusters.select_node_type(local_disk=True, min_memory_gb=16), label="default", num_workers=1, - custom_tags={"cluster_type": "default", "RemoveAfter": get_test_purge_time()}, + custom_tags={"cluster_type": "default", "RemoveAfter": watchdog_remove_after}, ) ] return ws.pipelines.create(continuous=False, **kwargs) @@ -251,7 +267,7 @@ def create(**kwargs) -> CreatePipelineResponse: @fixture -def make_warehouse(ws, make_random) -> Generator[Wait[GetWarehouseResponse], None, None]: +def make_warehouse(ws, make_random, watchdog_remove_after) -> Generator[Wait[GetWarehouseResponse], None, None]: """ Create a Databricks warehouse and clean it up after the test. Returns a function to create warehouses. @@ -286,7 +302,7 @@ def create( if cluster_size is None: cluster_size = "2X-Small" - remove_after_tags = EndpointTags(custom_tags=[EndpointTagPair(key="RemoveAfter", value=get_test_purge_time())]) + remove_after_tags = EndpointTags(custom_tags=[EndpointTagPair(key="RemoveAfter", value=watchdog_remove_after)]) return ws.warehouses.create( name=warehouse_name, cluster_size=cluster_size, diff --git a/src/databricks/labs/pytester/fixtures/environment.py b/src/databricks/labs/pytester/fixtures/environment.py index 5c9d375..2e78554 100644 --- a/src/databricks/labs/pytester/fixtures/environment.py +++ b/src/databricks/labs/pytester/fixtures/environment.py @@ -9,7 +9,8 @@ from databricks.labs.blueprint.entrypoint import find_dir_with_leaf -def _is_in_debug() -> bool: +@fixture +def is_in_debug() -> bool: return os.path.basename(sys.argv[0]) in {"_jb_pytest_runner.py", "testlauncher.py"} @@ -70,13 +71,13 @@ def debug_env_name(): @fixture -def debug_env(monkeypatch, debug_env_name) -> MutableMapping[str, str]: +def debug_env(monkeypatch, debug_env_name, is_in_debug) -> MutableMapping[str, str]: """ Loads environment variables specified in [`debug_env_name` fixture](#debug_env_name-fixture) from a file for local debugging in IDEs, otherwise allowing the tests to run with the default environment variables specified in the CI/CD pipeline. """ - if not _is_in_debug(): + if not is_in_debug: return os.environ if debug_env_name == ".env": dot_env = _parse_dotenv() @@ -100,7 +101,7 @@ def debug_env(monkeypatch, debug_env_name) -> MutableMapping[str, str]: @fixture -def env_or_skip(debug_env) -> Callable[[str], str]: +def env_or_skip(debug_env, is_in_debug) -> Callable[[str], str]: """ Fixture to get environment variables or skip tests. @@ -116,7 +117,7 @@ def test_something(env_or_skip): ``` """ skip = pytest.skip - if _is_in_debug(): + if is_in_debug: skip = pytest.fail # type: ignore[assignment] def inner(var: str) -> str: @@ -133,7 +134,7 @@ def _parse_dotenv(): if dot_env is None: return {} env_vars = {} - with dot_env.open(encoding='utf8') as file: + with (dot_env / '.env').open(encoding='utf8') as file: for line in file: line = line.strip() if not line or line.startswith('#'): diff --git a/src/databricks/labs/pytester/fixtures/iam.py b/src/databricks/labs/pytester/fixtures/iam.py index d6c6b22..c36f055 100644 --- a/src/databricks/labs/pytester/fixtures/iam.py +++ b/src/databricks/labs/pytester/fixtures/iam.py @@ -10,13 +10,13 @@ from databricks.sdk import WorkspaceClient from databricks.sdk.service import iam -from databricks.labs.pytester.fixtures.baseline import factory, get_purge_suffix +from databricks.labs.pytester.fixtures.baseline import factory logger = logging.getLogger(__name__) @fixture -def make_user(ws, make_random, log_workspace_link): +def make_user(ws, make_random, log_workspace_link, watchdog_purge_suffix): """ This fixture returns a function that creates a Databricks workspace user and removes it after the test is complete. In case of random naming conflicts, @@ -33,7 +33,7 @@ def test_new_user(make_user, ws): @retried(on=[ResourceConflict], timeout=timedelta(seconds=30)) def create(**kwargs) -> User: - user_name = f"dummy-{make_random(4)}-{get_purge_suffix()}@example.com".lower() + user_name = f"dummy-{make_random(4)}-{watchdog_purge_suffix}@example.com".lower() user = ws.users.create(user_name=user_name, **kwargs) log_workspace_link(user.user_name, f'settings/workspace/identity-and-access/users/{user.id}') return user @@ -42,7 +42,7 @@ def create(**kwargs) -> User: @fixture -def make_group(ws: WorkspaceClient, make_random): +def make_group(ws: WorkspaceClient, make_random, watchdog_purge_suffix): """ This fixture provides a function to manage Databricks workspace groups. Groups can be created with specified members and roles, and they will be deleted after the test is complete. Deals with eventual @@ -67,14 +67,14 @@ def test_new_group(make_group, make_user, ws): assert group.members == loaded.members ``` """ - yield from _make_group("workspace group", ws.config, ws.groups, make_random) + yield from _make_group("workspace group", ws.config, ws.groups, make_random, watchdog_purge_suffix) def _scim_values(ids: list[str]) -> list[iam.ComplexValue]: return [iam.ComplexValue(value=x) for x in ids] -def _make_group(name: str, cfg: Config, interface, make_random) -> Generator[Group, None, None]: +def _make_group(name: str, cfg: Config, interface, make_random, watchdog_purge_suffix) -> Generator[Group, None, None]: @retried(on=[ResourceConflict], timeout=timedelta(seconds=30)) def create( *, @@ -85,7 +85,9 @@ def create( wait_for_provisioning: bool = False, **kwargs, ): - kwargs["display_name"] = f"sdk-{make_random(4)}-{get_purge_suffix()}" if display_name is None else display_name + kwargs["display_name"] = ( + f"sdk-{make_random(4)}-{watchdog_purge_suffix}" if display_name is None else display_name + ) if members is not None: kwargs["members"] = _scim_values(members) if roles is not None: diff --git a/src/databricks/labs/pytester/fixtures/ml.py b/src/databricks/labs/pytester/fixtures/ml.py index cb0921c..4f984a9 100644 --- a/src/databricks/labs/pytester/fixtures/ml.py +++ b/src/databricks/labs/pytester/fixtures/ml.py @@ -11,7 +11,7 @@ ) from databricks.sdk.service.ml import CreateExperimentResponse, ModelTag, GetModelResponse -from databricks.labs.pytester.fixtures.baseline import factory, get_purge_suffix, get_test_purge_time +from databricks.labs.pytester.fixtures.baseline import factory @fixture @@ -20,6 +20,7 @@ def make_experiment( make_random, make_directory, log_workspace_link, + watchdog_purge_suffix, ) -> Generator[CreateExperimentResponse, None, None]: """ Returns a function to create Databricks Experiments and clean them up after the test. @@ -53,7 +54,7 @@ def create( folder = make_directory(path=path) if experiment_name is None: # The purge suffix is needed here as well, just in case the path was supplied. - experiment_name = f"dummy-{make_random(4)}-{get_purge_suffix()}" + experiment_name = f"dummy-{make_random(4)}-{watchdog_purge_suffix}" experiment = ws.experiments.create_experiment(name=f"{folder}/{experiment_name}", **kwargs) log_workspace_link(f'{experiment_name} experiment', f'ml/experiments/{experiment.experiment_id}', anchor=False) return experiment @@ -62,7 +63,7 @@ def create( @fixture -def make_model(ws, make_random) -> Generator[Callable[..., GetModelResponse], None, None]: +def make_model(ws, make_random, watchdog_remove_after) -> Generator[Callable[..., GetModelResponse], None, None]: """ Returns a function to create Databricks Models and clean them up after the test. The function returns a `databricks.sdk.service.ml.GetModelResponse` object. @@ -88,7 +89,7 @@ def test_models(make_group, make_model, make_registered_model_permissions): def create(*, model_name: str | None = None, **kwargs) -> GetModelResponse: if model_name is None: model_name = f"dummy-{make_random(4)}" - remove_after_tag = ModelTag(key="RemoveAfter", value=get_test_purge_time()) + remove_after_tag = ModelTag(key="RemoveAfter", value=watchdog_remove_after) if 'tags' not in kwargs: kwargs["tags"] = [remove_after_tag] else: @@ -101,7 +102,7 @@ def create(*, model_name: str | None = None, **kwargs) -> GetModelResponse: @fixture -def make_serving_endpoint(ws, make_random, make_model): +def make_serving_endpoint(ws, make_random, make_model, watchdog_remove_after): """ Returns a function to create Databricks Serving Endpoints and clean them up after the test. The function returns a `databricks.sdk.service.serving.ServingEndpointDetailed` object. @@ -136,7 +137,7 @@ def create() -> Wait[ServingEndpointDetailed]: ) ] ), - tags=[EndpointTag(key="RemoveAfter", value=get_test_purge_time())], + tags=[EndpointTag(key="RemoveAfter", value=watchdog_remove_after)], ) return endpoint diff --git a/src/databricks/labs/pytester/fixtures/notebooks.py b/src/databricks/labs/pytester/fixtures/notebooks.py index 4c4bc51..e7e7aa6 100644 --- a/src/databricks/labs/pytester/fixtures/notebooks.py +++ b/src/databricks/labs/pytester/fixtures/notebooks.py @@ -9,14 +9,13 @@ from databricks.sdk.service.workspace import Language, ImportFormat, RepoInfo from databricks.sdk import WorkspaceClient -from databricks.labs.pytester.fixtures.baseline import factory, get_purge_suffix - +from databricks.labs.pytester.fixtures.baseline import factory logger = logging.getLogger(__name__) @fixture -def make_notebook(ws, make_random) -> Generator[WorkspacePath, None, None]: +def make_notebook(ws, make_random, watchdog_purge_suffix) -> Generator[WorkspacePath, None, None]: """ Returns a function to create Databricks Notebooks and clean them up after the test. The function returns [`os.PathLike` object](https://github.com/databrickslabs/blueprint?tab=readme-ov-file#python-native-pathlibpath-like-interfaces). @@ -45,7 +44,7 @@ def create( overwrite: bool = False, ) -> WorkspacePath: if path is None: - path = f"/Users/{ws.current_user.me().user_name}/dummy-{make_random(4)}-{get_purge_suffix()}" + path = f"/Users/{ws.current_user.me().user_name}/dummy-{make_random(4)}-{watchdog_purge_suffix}" elif isinstance(path, Path): path = str(path) if content is None: @@ -60,7 +59,7 @@ def create( @fixture -def make_directory(ws: WorkspaceClient, make_random) -> Generator[WorkspacePath, None, None]: +def make_directory(ws: WorkspaceClient, make_random, watchdog_purge_suffix) -> Generator[WorkspacePath, None, None]: """ Returns a function to create Databricks Workspace Folders and clean them up after the test. The function returns [`os.PathLike` object](https://github.com/databrickslabs/blueprint?tab=readme-ov-file#python-native-pathlibpath-like-interfaces). @@ -81,7 +80,7 @@ def test_creates_some_folder_with_a_notebook(make_directory, make_notebook): def create(*, path: str | Path | None = None) -> WorkspacePath: if path is None: - path = f"~/dummy-{make_random(4)}-{get_purge_suffix()}" + path = f"~/dummy-{make_random(4)}-{watchdog_purge_suffix}" workspace_path = WorkspacePath(ws, path).expanduser() workspace_path.mkdir(exist_ok=True) logger.info(f"Created folder: {workspace_path.as_uri()}") @@ -91,7 +90,7 @@ def create(*, path: str | Path | None = None) -> WorkspacePath: @fixture -def make_repo(ws, make_random) -> Generator[RepoInfo, None, None]: +def make_repo(ws, make_random, watchdog_purge_suffix) -> Generator[RepoInfo, None, None]: """ Returns a function to create Databricks Repos and clean them up after the test. The function returns a `databricks.sdk.service.workspace.RepoInfo` object. @@ -110,7 +109,7 @@ def test_repo(make_repo): def create(*, url=None, provider=None, path=None, **kwargs) -> RepoInfo: if path is None: - path = f"/Repos/{ws.current_user.me().user_name}/sdk-{make_random(4)}-{get_purge_suffix()}" + path = f"/Repos/{ws.current_user.me().user_name}/sdk-{make_random(4)}-{watchdog_purge_suffix}" if url is None: url = "https://github.com/shreyas-goenka/empty-repo.git" if provider is None: diff --git a/src/databricks/labs/pytester/fixtures/permissions.py b/src/databricks/labs/pytester/fixtures/permissions.py index ca57b3e..7240a5e 100644 --- a/src/databricks/labs/pytester/fixtures/permissions.py +++ b/src/databricks/labs/pytester/fixtures/permissions.py @@ -371,7 +371,7 @@ def _path(ws, path): make_dashboard_permissions = pytest.fixture( _make_redash_permissions_factory( "dashboard", - "sql/dashboards", + "dashboards", [ PermissionLevel.CAN_EDIT, PermissionLevel.CAN_RUN, @@ -384,7 +384,7 @@ def _path(ws, path): make_alert_permissions = pytest.fixture( _make_redash_permissions_factory( "alert", - "sql/alerts", + "alerts", [ PermissionLevel.CAN_EDIT, PermissionLevel.CAN_RUN, @@ -397,7 +397,7 @@ def _path(ws, path): make_query_permissions = pytest.fixture( _make_redash_permissions_factory( "query", - "sql/queries", + "queries", [ PermissionLevel.CAN_EDIT, PermissionLevel.CAN_RUN, diff --git a/src/databricks/labs/pytester/fixtures/plugin.py b/src/databricks/labs/pytester/fixtures/plugin.py index 392f3fa..5d7e2a6 100644 --- a/src/databricks/labs/pytester/fixtures/plugin.py +++ b/src/databricks/labs/pytester/fixtures/plugin.py @@ -3,12 +3,10 @@ from databricks.labs.pytester.fixtures.baseline import ( ws, make_random, - sql_backend, - sql_exec, - sql_fetch_all, product_info, log_workspace_link, ) +from databricks.labs.pytester.fixtures.sql import sql_backend, sql_exec, sql_fetch_all from databricks.labs.pytester.fixtures.compute import ( make_instance_pool, make_job, @@ -49,16 +47,19 @@ make_feature_table_permissions, ) from databricks.labs.pytester.fixtures.secrets import make_secret_scope, make_secret_scope_acl -from databricks.labs.pytester.fixtures.wheel import workspace_library -from databricks.labs.pytester.fixtures.environment import debug_env, debug_env_name, env_or_skip +from databricks.labs.pytester.fixtures.environment import debug_env, debug_env_name, env_or_skip, is_in_debug from databricks.labs.pytester.fixtures.ml import make_experiment, make_model, make_serving_endpoint from databricks.labs.pytester.fixtures.redash import make_query +from databricks.labs.pytester.fixtures.watchdog import watchdog_remove_after, watchdog_purge_suffix __all__ = [ 'debug_env_name', 'debug_env', 'env_or_skip', 'ws', + 'sql_backend', + 'sql_exec', + 'sql_fetch_all', 'make_random', 'make_instance_pool', 'make_instance_pool_permissions', @@ -90,15 +91,11 @@ 'make_table', 'make_storage_credential', 'product_info', - 'sql_backend', - 'sql_exec', - 'sql_fetch_all', 'make_model', 'make_experiment', 'make_experiment_permissions', 'make_warehouse_permissions', 'make_lakeview_dashboard_permissions', - 'workspace_library', 'log_workspace_link', 'make_dashboard_permissions', 'make_alert_permissions', @@ -108,6 +105,9 @@ 'make_serving_endpoint', 'make_serving_endpoint_permissions', 'make_feature_table_permissions', + 'watchdog_remove_after', + 'watchdog_purge_suffix', + 'is_in_debug', ] diff --git a/src/databricks/labs/pytester/fixtures/redash.py b/src/databricks/labs/pytester/fixtures/redash.py index bad91be..9f55819 100644 --- a/src/databricks/labs/pytester/fixtures/redash.py +++ b/src/databricks/labs/pytester/fixtures/redash.py @@ -4,11 +4,13 @@ from databricks.sdk.errors import DatabricksError from databricks.sdk.service.sql import LegacyQuery -from databricks.labs.pytester.fixtures.baseline import get_purge_suffix, factory +from databricks.labs.pytester.fixtures.baseline import factory @fixture -def make_query(ws, make_table, make_random, log_workspace_link) -> Generator[LegacyQuery, None, None]: +def make_query( + ws, make_table, make_random, log_workspace_link, watchdog_purge_suffix +) -> Generator[LegacyQuery, None, None]: """ Create a query and remove it after the test is done. Returns the `databricks.sdk.service.sql.LegacyQuery` object. @@ -33,7 +35,7 @@ def test_permissions_for_redash( def create() -> LegacyQuery: table = make_table() - query_name = f"dummy_query_Q{make_random(4)}_{get_purge_suffix()}" + query_name = f"dummy_query_Q{make_random(4)}_{watchdog_purge_suffix}" query = ws.queries_legacy.create( name=query_name, description="TEST QUERY FOR UCX", diff --git a/src/databricks/labs/pytester/fixtures/sql.py b/src/databricks/labs/pytester/fixtures/sql.py new file mode 100644 index 0000000..80bce21 --- /dev/null +++ b/src/databricks/labs/pytester/fixtures/sql.py @@ -0,0 +1,31 @@ +from functools import partial + +from pytest import fixture +from databricks.labs.lsql.backends import StatementExecutionBackend + + +@fixture +def sql_backend(ws, env_or_skip) -> StatementExecutionBackend: + """ + Create and provide a SQL backend for executing statements. + + Requires the environment variable `DATABRICKS_WAREHOUSE_ID` to be set. + """ + warehouse_id = env_or_skip("DATABRICKS_WAREHOUSE_ID") + return StatementExecutionBackend(ws, warehouse_id) + + +@fixture +def sql_exec(sql_backend): + """ + Execute SQL statement and don't return any results. + """ + return partial(sql_backend.execute) + + +@fixture +def sql_fetch_all(sql_backend): + """ + Fetch all rows from a SQL statement. + """ + return partial(sql_backend.fetch) diff --git a/src/databricks/labs/pytester/fixtures/unwrap.py b/src/databricks/labs/pytester/fixtures/unwrap.py index 1b67a9c..e64591f 100644 --- a/src/databricks/labs/pytester/fixtures/unwrap.py +++ b/src/databricks/labs/pytester/fixtures/unwrap.py @@ -27,6 +27,8 @@ def __init__(self): 'sql_backend': MockBackend(), 'make_random': self.make_random, 'env_or_skip': self.env_or_skip, + 'watchdog_remove_after': '2024091313', + 'watchdog_purge_suffix': 'XXXXX', } def __getitem__(self, name: str): @@ -96,7 +98,7 @@ def _bfs_call_context(fn: Callable) -> Generator: _bfs_call_context(some) result = ctx[some.__name__](**kwargs) - for generator in drains: + for generator in reversed(drains): try: # drain the generator and call cleanup next(generator) except StopIteration: diff --git a/src/databricks/labs/pytester/fixtures/watchdog.py b/src/databricks/labs/pytester/fixtures/watchdog.py new file mode 100644 index 0000000..4f5e402 --- /dev/null +++ b/src/databricks/labs/pytester/fixtures/watchdog.py @@ -0,0 +1,26 @@ +from datetime import timedelta, datetime, timezone +from pytest import fixture + +TEST_RESOURCE_PURGE_TIMEOUT = timedelta(hours=1) + + +@fixture +def watchdog_remove_after() -> str: + """ + Purge time for test objects, representing the (UTC-based) hour from which objects may be purged. + """ + # Note: this code is duplicated in the workflow installer (WorkflowsDeployment) so that it can avoid the + # transitive pytest deployment from this module. + now = datetime.now(timezone.utc) + purge_deadline = now + TEST_RESOURCE_PURGE_TIMEOUT + # Round UP to the next hour boundary: that is when resources will be deleted. + purge_hour = purge_deadline + (datetime.min.replace(tzinfo=timezone.utc) - purge_deadline) % timedelta(hours=1) + return purge_hour.strftime("%Y%m%d%H") + + +@fixture +def watchdog_purge_suffix(watchdog_remove_after) -> str: + """ + HEX-encoded purge time suffix for test objects. + """ + return f'ra{int(watchdog_remove_after):x}' diff --git a/src/databricks/labs/pytester/fixtures/wheel.py b/src/databricks/labs/pytester/fixtures/wheel.py deleted file mode 100644 index 41e4c3f..0000000 --- a/src/databricks/labs/pytester/fixtures/wheel.py +++ /dev/null @@ -1,78 +0,0 @@ -import os -import shutil -import subprocess -import sys -from pathlib import Path -from typing import Optional - -import pytest -from databricks.sdk.service.workspace import ImportFormat - - -# pylint: disable=consider-alternative-union-syntax - - -def find_dir_with_leaf(folder: Path, leaf: str) -> Optional[Path]: - root = folder.root - while str(folder.absolute()) != root: - if (folder / leaf).exists(): - return folder - folder = folder.parent - return None - - -def find_project_root(folder: Path) -> Optional[Path]: - for leaf in ('pyproject.toml', 'setup.py'): - root = find_dir_with_leaf(folder, leaf) - if root is not None: - return root - return None - - -def build_wheel_in(project_path: Path, out_path: Path) -> Path: - try: - subprocess.run( - [sys.executable, "-m", "build", "--wheel", "--outdir", out_path.absolute(), project_path.absolute()], - capture_output=True, - check=True, - ) - except subprocess.CalledProcessError as e: - if e.stderr is not None: - sys.stderr.write(e.stderr.decode()) - raise RuntimeError(e.output.decode().strip()) from None - - found_wheels = list(out_path.glob("*.whl")) - if not found_wheels: - msg = f"cannot find *.whl in {out_path}" - raise RuntimeError(msg) - if len(found_wheels) > 1: - conflicts = ", ".join(str(whl) for whl in found_wheels) - msg = f"more than one wheel match: {conflicts}" - raise RuntimeError(msg) - wheel_file = found_wheels[0] - - return wheel_file - - -@pytest.fixture -def fresh_local_wheel_file(tmp_path) -> Path: - project_root = find_project_root(Path(os.getcwd())) - build_root = tmp_path / fresh_local_wheel_file.__name__ - shutil.copytree(project_root, build_root) # type: ignore - - return build_wheel_in(build_root, tmp_path / 'dist') - - -@pytest.fixture -def workspace_library(ws, fresh_local_wheel_file, make_random): - my_user = ws.current_user.me().user_name - workspace_folder = f"/Users/{my_user}/wheels/{make_random(10)}" - ws.workspace.mkdirs(workspace_folder) - - wheel_path = f"{workspace_folder}/{fresh_local_wheel_file.name}" - with fresh_local_wheel_file.open("rb") as f: - ws.workspace.upload(wheel_path, f, format=ImportFormat.AUTO) - - yield f'/Workspace/{wheel_path}' - - ws.workspace.delete(workspace_folder, recursive=True) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 0000000..b34e556 --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1 @@ +pytest_plugins = ['pytester'] \ No newline at end of file diff --git a/tests/unit/fixtures/.env b/tests/unit/fixtures/.env new file mode 100644 index 0000000..5548861 --- /dev/null +++ b/tests/unit/fixtures/.env @@ -0,0 +1 @@ +TEST_FOO=BAR \ No newline at end of file diff --git a/tests/unit/fixtures/test_baseline.py b/tests/unit/fixtures/test_baseline.py index b567a1e..f081148 100644 --- a/tests/unit/fixtures/test_baseline.py +++ b/tests/unit/fixtures/test_baseline.py @@ -5,7 +5,8 @@ from databricks.sdk import WorkspaceClient from databricks.sdk.service.sql import StatementResponse, StatementState, StatementStatus -from databricks.labs.pytester.fixtures.baseline import ws, log_workspace_link, sql_backend +from databricks.labs.pytester.fixtures.baseline import ws, log_workspace_link +from databricks.labs.pytester.fixtures.sql import sql_backend def test_ws() -> None: diff --git a/tests/unit/fixtures/test_catalog.py b/tests/unit/fixtures/test_catalog.py index b7cec68..39ad8a5 100644 --- a/tests/unit/fixtures/test_catalog.py +++ b/tests/unit/fixtures/test_catalog.py @@ -1,7 +1,7 @@ -from databricks.sdk.service.catalog import TableInfo, TableType, DataSourceFormat +from databricks.sdk.service.catalog import TableInfo, TableType, DataSourceFormat, FunctionInfo from databricks.labs.pytester.fixtures.unwrap import call_stateful -from databricks.labs.pytester.fixtures.catalog import make_table +from databricks.labs.pytester.fixtures.catalog import make_table, make_udf, make_catalog, make_storage_credential def test_make_table_no_args(): @@ -21,6 +21,114 @@ def test_make_table_no_args(): assert ctx['sql_backend'].queries == [ 'CREATE SCHEMA hive_metastore.dummy_srandom WITH DBPROPERTIES (RemoveAfter=2024091313)', "CREATE TABLE hive_metastore.dummy_srandom.ucx_trandom (id INT, value STRING) TBLPROPERTIES ( 'RemoveAfter' = '2024091313' )", + 'DROP TABLE IF EXISTS hive_metastore.dummy_srandom.ucx_trandom', + 'DROP SCHEMA IF EXISTS hive_metastore.dummy_srandom CASCADE', + ] + + +def test_make_view(): + ctx, table_info = call_stateful(make_table, view=True, ctas='SELECT 1') + + assert table_info == TableInfo( + catalog_name='hive_metastore', + schema_name='dummy_srandom', + name='ucx_trandom', + table_type=TableType.VIEW, + full_name='hive_metastore.dummy_srandom.ucx_trandom', + properties={'RemoveAfter': '2024091313'}, + view_definition='SELECT 1', + ) + + assert ctx['sql_backend'].queries == [ + 'CREATE SCHEMA hive_metastore.dummy_srandom WITH DBPROPERTIES (RemoveAfter=2024091313)', + "CREATE VIEW hive_metastore.dummy_srandom.ucx_trandom AS SELECT 1", + "ALTER VIEW hive_metastore.dummy_srandom.ucx_trandom SET TBLPROPERTIES ( 'RemoveAfter' = '2024091313' )", + 'DROP TABLE IF EXISTS hive_metastore.dummy_srandom.ucx_trandom', + 'DROP SCHEMA IF EXISTS hive_metastore.dummy_srandom CASCADE', + ] + + +def test_make_external_table(): + ctx, table_info = call_stateful(make_table, non_delta=True, columns=[('id', 'INT'), ('value', 'STRING')]) + + assert table_info == TableInfo( + catalog_name='hive_metastore', + schema_name='dummy_srandom', + name='ucx_trandom', + table_type=TableType.EXTERNAL, + data_source_format=DataSourceFormat.JSON, + full_name='hive_metastore.dummy_srandom.ucx_trandom', + storage_location='dbfs:/tmp/ucx_test_RANDOM', + properties={'RemoveAfter': '2024091313'}, + ) + + ctx['log_workspace_link'].assert_called_with( + 'hive_metastore.dummy_srandom.ucx_trandom schema', + 'explore/data/hive_metastore/dummy_srandom/ucx_trandom', + ) + + assert ctx['sql_backend'].queries == [ + 'CREATE SCHEMA hive_metastore.dummy_srandom WITH DBPROPERTIES (RemoveAfter=2024091313)', + 'CREATE TABLE hive_metastore.dummy_srandom.ucx_trandom USING json location ' + "'dbfs:/tmp/ucx_test_RANDOM' as SELECT CAST(calories_burnt AS INT) AS `id`, " + 'CAST(device_id AS STRING) AS `value` FROM ' + 'JSON.`dbfs:/databricks-datasets/iot-stream/data-device`', + "ALTER TABLE hive_metastore.dummy_srandom.ucx_trandom SET TBLPROPERTIES ( 'RemoveAfter' = '2024091313' )", + 'DROP TABLE IF EXISTS hive_metastore.dummy_srandom.ucx_trandom', 'DROP SCHEMA IF EXISTS hive_metastore.dummy_srandom CASCADE', + ] + + +def test_make_table_custom_schema(): + ctx, table_info = call_stateful(make_table, columns=[('a', 'INT'), ('b', 'STRING')]) + + assert table_info == TableInfo( + catalog_name='hive_metastore', + schema_name='dummy_srandom', + name='ucx_trandom', + table_type=TableType.MANAGED, + data_source_format=DataSourceFormat.DELTA, + full_name='hive_metastore.dummy_srandom.ucx_trandom', + storage_location='dbfs:/user/hive/warehouse/dummy_srandom/ucx_trandom', + properties={'RemoveAfter': '2024091313'}, + ) + + assert ctx['sql_backend'].queries == [ + 'CREATE SCHEMA hive_metastore.dummy_srandom WITH DBPROPERTIES (RemoveAfter=2024091313)', + "CREATE TABLE hive_metastore.dummy_srandom.ucx_trandom (`a` INT, `b` STRING) TBLPROPERTIES ( 'RemoveAfter' = '2024091313' )", 'DROP TABLE IF EXISTS hive_metastore.dummy_srandom.ucx_trandom', + 'DROP SCHEMA IF EXISTS hive_metastore.dummy_srandom CASCADE', + ] + + +def test_make_catalog(): + ctx, _ = call_stateful(make_catalog) + + ctx['ws'].catalogs.get.assert_called_with('dummy_crandom') + + assert ctx['sql_backend'].queries == ['CREATE CATALOG dummy_crandom'] + + +def test_make_udf(): + ctx, fn_info = call_stateful(make_udf) + + assert fn_info == FunctionInfo( + catalog_name='hive_metastore', + schema_name='dummy_srandom', + name='ucx_trandom', + full_name='hive_metastore.dummy_srandom.ucx_trandom', + ) + + assert ctx['sql_backend'].queries == [ + 'CREATE SCHEMA hive_metastore.dummy_srandom WITH DBPROPERTIES (RemoveAfter=2024091313)', + 'CREATE FUNCTION hive_metastore.dummy_srandom.ucx_trandom(x INT) RETURNS ' + 'FLOAT CONTAINS SQL DETERMINISTIC RETURN 0;', + 'DROP FUNCTION IF EXISTS hive_metastore.dummy_srandom.ucx_trandom', + 'DROP SCHEMA IF EXISTS hive_metastore.dummy_srandom CASCADE', ] + + +def test_storage_credential(): + ctx, fn_info = call_stateful(make_storage_credential, credential_name='abc') + assert ctx is not None + assert fn_info is not None diff --git a/tests/unit/fixtures/test_compute.py b/tests/unit/fixtures/test_compute.py new file mode 100644 index 0000000..c443739 --- /dev/null +++ b/tests/unit/fixtures/test_compute.py @@ -0,0 +1,45 @@ +from databricks.labs.pytester.fixtures.compute import ( + make_cluster_policy, + make_cluster, + make_instance_pool, + make_job, + make_pipeline, + make_warehouse, +) +from databricks.labs.pytester.fixtures.unwrap import call_stateful + + +def test_make_cluster_policy_no_args(): + ctx, cluster_policy = call_stateful(make_cluster_policy) + assert ctx is not None + assert cluster_policy is not None + + +def test_make_cluster_no_args(): + ctx, cluster = call_stateful(make_cluster) + assert ctx is not None + assert cluster is not None + + +def test_make_instance_pool_no_args(): + ctx, instance_pool = call_stateful(make_instance_pool) + assert ctx is not None + assert instance_pool is not None + + +def test_make_job_no_args(): + ctx, job = call_stateful(make_job) + assert ctx is not None + assert job is not None + + +def test_make_pipeline_no_args(): + ctx, pipeline = call_stateful(make_pipeline) + assert ctx is not None + assert pipeline is not None + + +def test_make_warehouse_no_args(): + ctx, warehouse = call_stateful(make_warehouse) + assert ctx is not None + assert warehouse is not None diff --git a/tests/unit/fixtures/test_iam.py b/tests/unit/fixtures/test_iam.py new file mode 100644 index 0000000..ad06cd4 --- /dev/null +++ b/tests/unit/fixtures/test_iam.py @@ -0,0 +1,14 @@ +from databricks.labs.pytester.fixtures.iam import make_user, make_group +from databricks.labs.pytester.fixtures.unwrap import call_stateful + + +def test_make_user_no_args(): + ctx, user = call_stateful(make_user) + assert ctx is not None + assert user is not None + + +def test_make_group_no_args(): + ctx, group = call_stateful(make_group) + assert ctx is not None + assert group is not None diff --git a/tests/unit/fixtures/test_ml.py b/tests/unit/fixtures/test_ml.py new file mode 100644 index 0000000..804fc47 --- /dev/null +++ b/tests/unit/fixtures/test_ml.py @@ -0,0 +1,20 @@ +from databricks.labs.pytester.fixtures.ml import make_experiment, make_model, make_serving_endpoint +from databricks.labs.pytester.fixtures.unwrap import call_stateful + + +def test_make_experiment_no_args(): + ctx, experiment = call_stateful(make_experiment) + assert ctx is not None + assert experiment is not None + + +def test_make_model_no_args(): + ctx, model = call_stateful(make_model) + assert ctx is not None + assert model is not None + + +def test_make_serving_endpoint_no_args(): + ctx, serving_endpoint = call_stateful(make_serving_endpoint) + assert ctx is not None + assert serving_endpoint is not None diff --git a/tests/unit/fixtures/test_notebooks.py b/tests/unit/fixtures/test_notebooks.py new file mode 100644 index 0000000..fdf6660 --- /dev/null +++ b/tests/unit/fixtures/test_notebooks.py @@ -0,0 +1,20 @@ +from databricks.labs.pytester.fixtures.notebooks import make_notebook, make_directory, make_repo +from databricks.labs.pytester.fixtures.unwrap import call_stateful + + +def test_make_notebook_no_args(): + ctx, notebook = call_stateful(make_notebook) + assert ctx is not None + assert notebook is not None + + +def test_make_directory_no_args(): + ctx, directory = call_stateful(make_directory) + assert ctx is not None + assert directory is not None + + +def test_make_repo_no_args(): + ctx, repo = call_stateful(make_repo) + assert ctx is not None + assert repo is not None diff --git a/tests/unit/fixtures/test_permissions.py b/tests/unit/fixtures/test_permissions.py new file mode 100644 index 0000000..7a73c93 --- /dev/null +++ b/tests/unit/fixtures/test_permissions.py @@ -0,0 +1,24 @@ +from databricks.sdk.service.iam import PermissionLevel + +from databricks.labs.pytester.fixtures.permissions import make_cluster_permissions, make_query_permissions +from databricks.labs.pytester.fixtures.unwrap import call_stateful + + +def test_make_cluster_permissions_no_args(): + ctx, cluster_permissions = call_stateful( + make_cluster_permissions, + object_id="dummy", + permission_level=PermissionLevel.CAN_MANAGE, + ) + assert ctx is not None + assert cluster_permissions is not None + + +def test_make_query_permissions_no_args(): + ctx, query_permissions = call_stateful( + make_query_permissions, + object_id="dummy", + permission_level=PermissionLevel.CAN_MANAGE, + ) + assert ctx is not None + assert query_permissions is not None diff --git a/tests/unit/fixtures/test_plugin.py b/tests/unit/fixtures/test_plugin.py new file mode 100644 index 0000000..fd8ba38 --- /dev/null +++ b/tests/unit/fixtures/test_plugin.py @@ -0,0 +1,49 @@ +INLINE = """ +import pytest + +from unittest.mock import create_autospec + +from databricks.sdk import WorkspaceClient +from databricks.sdk.service.sql import StatementStatus, StatementState, StatementResponse + + +@pytest.fixture +def ws(): # noqa: F811 + some = create_autospec(WorkspaceClient) # pylint: disable=mock-no-assign + some.statement_execution.execute_statement.return_value = StatementResponse( + status=StatementStatus(state=StatementState.SUCCEEDED) + ) + return some + +@pytest.fixture +def _is_in_debug() -> bool: + return True + +@pytest.fixture +def env_or_skip(): + def inner(n): + return n + return inner + +def test_some( + debug_env, + make_job, + make_directory, + make_repo, + make_model, + make_experiment, + make_serving_endpoint, + make_secret_scope, + sql_exec, +): + make_job() + sql_exec("SELECT 1") +""" + + +def test_a_thing(pytester): + pytester.makepyfile(INLINE) + result = pytester.runpytest() + result.assert_outcomes(passed=1) + + diff --git a/tests/unit/fixtures/test_redash.py b/tests/unit/fixtures/test_redash.py new file mode 100644 index 0000000..15fa27e --- /dev/null +++ b/tests/unit/fixtures/test_redash.py @@ -0,0 +1,8 @@ +from databricks.labs.pytester.fixtures.redash import make_query +from databricks.labs.pytester.fixtures.unwrap import call_stateful + + +def test_make_query_no_args(): + ctx, query = call_stateful(make_query) + assert ctx is not None + assert query is not None diff --git a/tests/unit/fixtures/test_secrets.py b/tests/unit/fixtures/test_secrets.py new file mode 100644 index 0000000..44164e6 --- /dev/null +++ b/tests/unit/fixtures/test_secrets.py @@ -0,0 +1,14 @@ +from databricks.labs.pytester.fixtures.secrets import make_secret_scope, make_secret_scope_acl +from databricks.labs.pytester.fixtures.unwrap import call_stateful + + +def test_make_secret_scope_no_args(): + ctx, secret_scope = call_stateful(make_secret_scope) + assert ctx is not None + assert secret_scope is not None + + +def test_make_secret_scope_acl_no_args(): + ctx, secret_scope_acl = call_stateful(make_secret_scope_acl, scope='foo', principal='bar', permission='read') + assert ctx is not None + assert secret_scope_acl is not None