From 93e0f3f22d926688d2004cb23cefc61bb916d104 Mon Sep 17 00:00:00 2001 From: Alex Remedios Date: Mon, 2 Oct 2023 23:43:14 +0100 Subject: [PATCH] Anon metrics (#113) --- README.md | 6 +++++ src/nbmake/metrics.py | 39 ++++++++++++++++++++++++++++++ src/nbmake/pytest_plugin.py | 18 +++++++++++++- tests/test_pytest_plugin.py | 47 ++++++++++++++++++++++++++++++++++++- 4 files changed, 108 insertions(+), 2 deletions(-) create mode 100644 src/nbmake/metrics.py diff --git a/README.md b/README.md index 2961a6d..45eb3a8 100644 --- a/README.md +++ b/README.md @@ -219,6 +219,12 @@ repos: See https://pre-commit.com/ for more... +## Disable anonymous metrics + +We collect anonymous usage metrics to improve nbmake. + +You can disable them by setting the environment variable NBMAKE_METRICS=0 + ## Disable Nbmake Implicitly: diff --git a/src/nbmake/metrics.py b/src/nbmake/metrics.py new file mode 100644 index 0000000..b658bd7 --- /dev/null +++ b/src/nbmake/metrics.py @@ -0,0 +1,39 @@ +import json +import os +import platform +import sys +import time +import urllib.request + + +def submit_event(): + """We collect anonymous usage metrics to improve nbmake. + + You can disable them by setting the environment variable NBMAKE_METRICS=0. + """ + mixpanel_token = "8440a5d8fa0ec1d43b6bcaf76037fae7" + url = "https://api-eu.mixpanel.com/track" + + payload = [ + { + "event": "Invocation", + "properties": { + "token": mixpanel_token, + "$os": platform.system(), + "time": int(time.time()), + "platform": platform.platform(), + "python": sys.version, + "ci": os.getenv("CI", False) and True, + }, + } + ] + headers = { + "accept": "text/plain", + "content-type": "application/json", + } + req = urllib.request.Request( + url, data=json.dumps(payload).encode("utf8"), headers=headers + ) + response = urllib.request.urlopen(req, timeout=1.0) + + return response.read().decode("utf8") diff --git a/src/nbmake/pytest_plugin.py b/src/nbmake/pytest_plugin.py index 7e147b0..a7455bd 100644 --- a/src/nbmake/pytest_plugin.py +++ b/src/nbmake/pytest_plugin.py @@ -1,12 +1,15 @@ import asyncio +import os import sys from pathlib import Path from typing import Any, Optional +import nbmake.metrics + try: from importlib.metadata import version except ImportError: - from importlib_metadata import version + from importlib_metadata import version # type: ignore from .pytest_items import NotebookFile @@ -65,3 +68,16 @@ def pytest_collect_file(path: str, parent: Any) -> Optional[Any]: return NotebookFile.from_parent(parent, path=p) return None + + +def pytest_terminal_summary(terminalreporter: Any, exitstatus: int, config: Any): + if not config.option.nbmake: + return + + if os.getenv("NBMAKE_METRICS", "1") != "1": + return + + try: + nbmake.metrics.submit_event() + except: + pass diff --git a/tests/test_pytest_plugin.py b/tests/test_pytest_plugin.py index 2749b47..f8a1ddf 100644 --- a/tests/test_pytest_plugin.py +++ b/tests/test_pytest_plugin.py @@ -1,8 +1,9 @@ from __future__ import print_function import os -from importlib import import_module, reload +from importlib import import_module, invalidate_caches, reload from pathlib import Path +from unittest.mock import patch from nbformat import read from pytest import ExitCode, Pytester @@ -207,3 +208,47 @@ def test_when_not_json_then_correct_err_msg(pytester: Pytester, testdir2: Never) assert longrepr is not None assert "NBMAKE INTERNAL ERROR" not in longrepr.term assert "json" in longrepr.term + + +def test_when_testing_nbs_then_submit_metrics(pytester: Pytester): + write_nb(failing_nb, Path(pytester.path) / "a.ipynb") + + with patch("nbmake.metrics.submit_event", return_value="1") as mock_method: + hook_recorder = pytester.inline_run("--nbmake", "-v") + assert hook_recorder.ret == ExitCode.TESTS_FAILED + mock_method.assert_called_once() + + +def test_when_metrics_fail_then_ignore(pytester: Pytester): + write_nb(failing_nb, Path(pytester.path) / "a.ipynb") + + with patch("nbmake.metrics.submit_event", return_value="1") as mock_method: + mock_method.side_effect = Exception("test") + write_nb(passing_nb, Path(pytester.path) / "a.ipynb") + + items, hook_recorder = pytester.inline_genitems("--nbmake") + + assert hook_recorder.ret == ExitCode.OK + assert len(items) == 1 + mock_method.assert_called_once() + + +def test_when_metrics_disabled_dont_log_metrics(pytester: Pytester): + write_nb(failing_nb, Path(pytester.path) / "a.ipynb") + # not thread-safe + os.environ["NBMAKE_METRICS"] = "0" + + with patch("nbmake.metrics.submit_event", return_value="1") as mock_method: + hook_recorder = pytester.inline_run("--nbmake", "-v") + assert hook_recorder.ret == ExitCode.TESTS_FAILED + mock_method.assert_not_called() + + del os.environ["NBMAKE_METRICS"] + + +def test_when_no_nbmake_flag_then_no_metrics(pytester: Pytester): + write_nb(failing_nb, Path(pytester.path) / "a.ipynb") + with patch("nbmake.metrics.submit_event", return_value="1") as mock_method: + hook_recorder = pytester.inline_run("-v") + assert hook_recorder.ret == ExitCode.NO_TESTS_COLLECTED + mock_method.assert_not_called()