Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Anon metrics #113

Merged
merged 5 commits into from
Oct 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,12 @@ repos:

See https://pre-commit.com/ for more...

## Disable anonymous metrics

We collect anonymous usage metrics to improve nbmake.

You can disable them by setting the environment variable NBMAKE_METRICS=0

## Disable Nbmake

Implicitly:
Expand Down
39 changes: 39 additions & 0 deletions src/nbmake/metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import json
import os
import platform
import sys
import time
import urllib.request


def submit_event():
"""We collect anonymous usage metrics to improve nbmake.

You can disable them by setting the environment variable NBMAKE_METRICS=0.
"""
mixpanel_token = "8440a5d8fa0ec1d43b6bcaf76037fae7"
url = "https://api-eu.mixpanel.com/track"

payload = [
{
"event": "Invocation",
"properties": {
"token": mixpanel_token,
"$os": platform.system(),
"time": int(time.time()),
"platform": platform.platform(),
"python": sys.version,
"ci": os.getenv("CI", False) and True,
},
}
]
headers = {
"accept": "text/plain",
"content-type": "application/json",
}
req = urllib.request.Request(
url, data=json.dumps(payload).encode("utf8"), headers=headers
)
response = urllib.request.urlopen(req, timeout=1.0)

return response.read().decode("utf8")
18 changes: 17 additions & 1 deletion src/nbmake/pytest_plugin.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import asyncio
import os
import sys
from pathlib import Path
from typing import Any, Optional

import nbmake.metrics

try:
from importlib.metadata import version
except ImportError:
from importlib_metadata import version
from importlib_metadata import version # type: ignore

from .pytest_items import NotebookFile

Expand Down Expand Up @@ -65,3 +68,16 @@ def pytest_collect_file(path: str, parent: Any) -> Optional[Any]:
return NotebookFile.from_parent(parent, path=p)

return None


def pytest_terminal_summary(terminalreporter: Any, exitstatus: int, config: Any):
if not config.option.nbmake:
return

if os.getenv("NBMAKE_METRICS", "1") != "1":
return

try:
nbmake.metrics.submit_event()
except:
pass
47 changes: 46 additions & 1 deletion tests/test_pytest_plugin.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from __future__ import print_function

import os
from importlib import import_module, reload
from importlib import import_module, invalidate_caches, reload
from pathlib import Path
from unittest.mock import patch

from nbformat import read
from pytest import ExitCode, Pytester
Expand Down Expand Up @@ -207,3 +208,47 @@ def test_when_not_json_then_correct_err_msg(pytester: Pytester, testdir2: Never)
assert longrepr is not None
assert "NBMAKE INTERNAL ERROR" not in longrepr.term
assert "json" in longrepr.term


def test_when_testing_nbs_then_submit_metrics(pytester: Pytester):
write_nb(failing_nb, Path(pytester.path) / "a.ipynb")

with patch("nbmake.metrics.submit_event", return_value="1") as mock_method:
hook_recorder = pytester.inline_run("--nbmake", "-v")
assert hook_recorder.ret == ExitCode.TESTS_FAILED
mock_method.assert_called_once()


def test_when_metrics_fail_then_ignore(pytester: Pytester):
write_nb(failing_nb, Path(pytester.path) / "a.ipynb")

with patch("nbmake.metrics.submit_event", return_value="1") as mock_method:
mock_method.side_effect = Exception("test")
write_nb(passing_nb, Path(pytester.path) / "a.ipynb")

items, hook_recorder = pytester.inline_genitems("--nbmake")

assert hook_recorder.ret == ExitCode.OK
assert len(items) == 1
mock_method.assert_called_once()


def test_when_metrics_disabled_dont_log_metrics(pytester: Pytester):
write_nb(failing_nb, Path(pytester.path) / "a.ipynb")
# not thread-safe
os.environ["NBMAKE_METRICS"] = "0"

with patch("nbmake.metrics.submit_event", return_value="1") as mock_method:
hook_recorder = pytester.inline_run("--nbmake", "-v")
assert hook_recorder.ret == ExitCode.TESTS_FAILED
mock_method.assert_not_called()

del os.environ["NBMAKE_METRICS"]


def test_when_no_nbmake_flag_then_no_metrics(pytester: Pytester):
write_nb(failing_nb, Path(pytester.path) / "a.ipynb")
with patch("nbmake.metrics.submit_event", return_value="1") as mock_method:
hook_recorder = pytester.inline_run("-v")
assert hook_recorder.ret == ExitCode.NO_TESTS_COLLECTED
mock_method.assert_not_called()
Loading