diff --git a/cronjobs/src/commands/backport_records.py b/cronjobs/src/commands/backport_records.py index 3e66481c..b0c64ae9 100644 --- a/cronjobs/src/commands/backport_records.py +++ b/cronjobs/src/commands/backport_records.py @@ -17,24 +17,15 @@ def parse_querystring(qs): } -def backport_records(event, context, **kwargs): +def backport_records(): """Backport records creations, updates and deletions from one collection to another.""" - server_url = event["server"] - source_auth = ( - event.get("backport_records_source_auth") - or os.environ["BACKPORT_RECORDS_SOURCE_AUTH"] - ) - dest_auth = event.get( - "backport_records_dest_auth", - os.getenv("BACKPORT_RECORDS_DEST_AUTH", source_auth), - ) + SERVER_URL = os.environ["SERVER"] + source_auth = os.environ["BACKPORT_RECORDS_SOURCE_AUTH"] + dest_auth = os.getenv("BACKPORT_RECORDS_DEST_AUTH", source_auth) mappings = [] - if mappings_env := ( - event.get("backport_records_mappings") - or os.getenv("BACKPORT_RECORDS_MAPPINGS", "") - ): + if mappings_env := os.getenv("BACKPORT_RECORDS_MAPPINGS", ""): regexp = re.compile( r"^(?P[^/]+)/(?P[^/\?]+)(?P\?.*)? -> (?P[^/]+)/(?P[^/]+)$" ) @@ -54,39 +45,23 @@ def backport_records(event, context, **kwargs): else: raise ValueError(f"Invalid syntax in line {entry}") else: - sbid = ( - event.get("backport_records_source_bucket") - or os.environ["BACKPORT_RECORDS_SOURCE_BUCKET"] - ) - scid = ( - event.get("backport_records_source_collection") - or os.environ["BACKPORT_RECORDS_SOURCE_COLLECTION"] - ) - filters_json = event.get("backport_records_source_filters") or os.getenv( - "BACKPORT_RECORDS_SOURCE_FILTERS", "" - ) + sbid = os.environ["BACKPORT_RECORDS_SOURCE_BUCKET"] + scid = os.environ["BACKPORT_RECORDS_SOURCE_COLLECTION"] + filters_json = os.getenv("BACKPORT_RECORDS_SOURCE_FILTERS", "") filters_dict = json.loads(filters_json or "{}") - dbid = event.get( - "backport_records_dest_bucket", - os.getenv("BACKPORT_RECORDS_DEST_BUCKET", sbid), - ) - dcid = event.get( - "backport_records_dest_collection", - os.getenv("BACKPORT_RECORDS_DEST_COLLECTION", scid), - ) + dbid = os.getenv("BACKPORT_RECORDS_DEST_BUCKET", sbid) + dcid = os.getenv("BACKPORT_RECORDS_DEST_COLLECTION", scid) if sbid == dbid and scid == dcid: raise ValueError("Cannot copy records: destination is identical to source") mappings.append((sbid, scid, filters_dict, dbid, dcid)) - safe_headers = event.get( - "safe_headers", config("SAFE_HEADERS", default=False, cast=bool) - ) + safe_headers = config("SAFE_HEADERS", default=False, cast=bool) for mapping in mappings: - execute_backport(server_url, source_auth, dest_auth, safe_headers, *mapping) + execute_backport(SERVER_URL, source_auth, dest_auth, safe_headers, *mapping) def execute_backport( diff --git a/cronjobs/src/commands/build_bundles.py b/cronjobs/src/commands/build_bundles.py index 2973bf9d..253e50d2 100644 --- a/cronjobs/src/commands/build_bundles.py +++ b/cronjobs/src/commands/build_bundles.py @@ -19,7 +19,6 @@ from . import KintoClient, call_parallel, fetch_all_changesets, retry_timeout -SERVER = os.getenv("SERVER") BUNDLE_MAX_SIZE_BYTES = int(os.getenv("BUNDLE_MAX_SIZE_BYTES", "20_000_000")) ENVIRONMENT = os.getenv("ENVIRONMENT", "local") REALM = os.getenv("REALM", "test") @@ -114,7 +113,7 @@ def sync_cloud_storage( print(f"Deleted gs://{storage_bucket}/{blob.name}") -def build_bundles(event, context): +def build_bundles(): """ Build and upload bundles of changesets and attachments. @@ -126,9 +125,8 @@ def build_bundles(event, context): - builds `{bid}--{cid}.zip` for each of them - send the bundles to the Cloud storage bucket """ - rs_server = event.get("server") or SERVER - - client = KintoClient(server_url=rs_server) + server = os.getenv("SERVER") + client = KintoClient(server_url=server) base_url = client.server_info()["capabilities"]["attachments"]["base_url"] diff --git a/cronjobs/src/commands/expire_orphan_attachments.py b/cronjobs/src/commands/expire_orphan_attachments.py index 5bec75e0..79a4b5d3 100644 --- a/cronjobs/src/commands/expire_orphan_attachments.py +++ b/cronjobs/src/commands/expire_orphan_attachments.py @@ -19,7 +19,7 @@ BATCH_SIZE = int(os.getenv("BATCH_SIZE", "100")) -def expire_orphan_attachments(event, context): +def expire_orphan_attachments(): """ This cronjob will set the custom time field on orphaned attachments to the current time. We then have a retention policy on GCS bucket that will diff --git a/cronjobs/src/commands/git_export.py b/cronjobs/src/commands/git_export.py index 9d90ae9b..8c378183 100644 --- a/cronjobs/src/commands/git_export.py +++ b/cronjobs/src/commands/git_export.py @@ -97,7 +97,7 @@ GIT_EMAIL = _email.rstrip(">") -def git_export(event, context): +def git_export(): """ Export Remote Settings data to a Git repository. """ @@ -499,7 +499,9 @@ def process_attachments( if existing := existing_attachments.get(location): existing_hash, existing_size = existing if existing_hash != hash or existing_size != size: - print(f"Bundle {path} {'is new' if existing_hash is None else 'has changed'}") + print( + f"Bundle {path} {'is new' if existing_hash is None else 'has changed'}" + ) changed_attachments.append((hash, size, url)) return changed_attachments, common_content diff --git a/cronjobs/src/commands/purge_history.py b/cronjobs/src/commands/purge_history.py index c5246aff..fc8a890b 100644 --- a/cronjobs/src/commands/purge_history.py +++ b/cronjobs/src/commands/purge_history.py @@ -15,7 +15,7 @@ def utcnow(): return datetime.now(timezone.utc) -def purge_history(*args, **kwargs): +def purge_history(): """Purge old history entries on a regular basis.""" server_url = config("SERVER", default="http://localhost:8888/v1") auth = config("AUTH", default="admin:s3cr3t") diff --git a/cronjobs/src/commands/refresh_signature.py b/cronjobs/src/commands/refresh_signature.py index 8d94bf4b..fff9d2d4 100644 --- a/cronjobs/src/commands/refresh_signature.py +++ b/cronjobs/src/commands/refresh_signature.py @@ -39,18 +39,15 @@ def get_signed_source(server_info, change): } -def refresh_signature(event, context, **kwargs): +def refresh_signature(): """Refresh the signatures of each collection.""" - server_url = event["server"] - auth = event.get("refresh_signature_auth") or os.getenv("REFRESH_SIGNATURE_AUTH") - max_signature_age = int( - event.get("max_signature_age", os.getenv("MAX_SIGNATURE_AGE", 7)) - ) + + server = os.environ["SERVER"] + auth = os.getenv("REFRESH_SIGNATURE_AUTH") + max_signature_age = int(os.getenv("MAX_SIGNATURE_AGE", 7)) # Look at the collections in the changes endpoint. - bucket = event.get("bucket", "monitor") - collection = event.get("collection", "changes") - client = Client(server_url=server_url, bucket=bucket, collection=collection) + client = Client(server_url=server, bucket="monitor", collection="changes") print("Looking at %s: " % client.get_endpoint("collection")) changes = client.get_records() @@ -67,7 +64,7 @@ def refresh_signature(event, context, **kwargs): continue client = Client( - server_url=server_url, + server_url=server, bucket=source["bucket"], collection=source["collection"], auth=auth, diff --git a/cronjobs/src/main.py b/cronjobs/src/main.py index eb47a557..f9e7730c 100755 --- a/cronjobs/src/main.py +++ b/cronjobs/src/main.py @@ -65,12 +65,7 @@ def white_bold(s): ) -def run(command, event=None, context=None): - if event is None: - event = {"server": SERVER_URL} - if context is None: - context = {"sentry_sdk": sentry_sdk} - +def run(command): if isinstance(command, (str,)): # Import the command module and returns its main function. mod = importlib.import_module(f"commands.{command}") @@ -82,10 +77,10 @@ def run(command, event=None, context=None): # See https://docs.sentry.io/platforms/python/guides/gcp-functions/ # Option to test failure to test Sentry integration. - if event.get("force_fail") or os.getenv("FORCE_FAIL"): + if os.getenv("FORCE_FAIL"): raise Exception("Found forced failure flag") - return command(event, context) + return command() def main(*args): diff --git a/cronjobs/tests/commands/test_backport_records.py b/cronjobs/tests/commands/test_backport_records.py index 1a09effe..ce821a34 100644 --- a/cronjobs/tests/commands/test_backport_records.py +++ b/cronjobs/tests/commands/test_backport_records.py @@ -1,5 +1,6 @@ import json import unittest +from unittest import mock import pytest import responses @@ -8,7 +9,6 @@ class TestRecordsBackport(unittest.TestCase): server = "https://fake-server.net/v1" - auth = ("foo", "bar") source_bid = "main" source_cid = "one" dest_bid = "main-workspace" @@ -25,6 +25,21 @@ def setUp(self): ) self.dest_records_uri = f"{self.dest_collection_uri}/records" + # Set environment variables. + self.patcher = mock.patch.dict( + "os.environ", + { + "SERVER": self.server, + "BACKPORT_RECORDS_SOURCE_AUTH": "foo:bar", + "BACKPORT_RECORDS_SOURCE_BUCKET": self.source_bid, + "BACKPORT_RECORDS_SOURCE_COLLECTION": self.source_cid, + "BACKPORT_RECORDS_DEST_BUCKET": self.dest_bid, + "BACKPORT_RECORDS_DEST_COLLECTION": self.dest_cid, + }, + ) + self.patcher.start() + self.addCleanup(unittest.mock.patch.dict, "os.environ", {}, clear=True) + @responses.activate def test_missing_records_are_backported(self): responses.add( @@ -52,18 +67,13 @@ def test_missing_records_are_backported(self): ) responses.add(responses.POST, self.server + "/batch", json={"responses": []}) - backport_records( - event={ - "server": self.server, - "backport_records_source_auth": self.auth, - "backport_records_source_bucket": self.source_bid, - "backport_records_source_collection": self.source_cid, - "backport_records_source_filters": '{"min_age": 20}', - "backport_records_dest_bucket": self.dest_bid, - "backport_records_dest_collection": self.dest_cid, + with mock.patch.dict( + "os.environ", + { + "BACKPORT_RECORDS_SOURCE_FILTERS": '{"min_age": 20}', }, - context=None, - ) + ): + backport_records() assert responses.calls[0].request.method == "GET" assert responses.calls[0].request.url.endswith("?min_age=20") @@ -106,18 +116,8 @@ def test_outdated_records_are_overwritten(self): ) responses.add(responses.POST, self.server + "/batch", json={"responses": []}) - backport_records( - event={ - "server": self.server, - "safe_headers": True, - "backport_records_source_auth": self.auth, - "backport_records_source_bucket": self.source_bid, - "backport_records_source_collection": self.source_cid, - "backport_records_dest_bucket": self.dest_bid, - "backport_records_dest_collection": self.dest_cid, - }, - context=None, - ) + with mock.patch.dict("os.environ", {"SAFE_HEADERS": "true"}): + backport_records() assert responses.calls[3].request.method == "POST" posted_records = json.loads(responses.calls[3].request.body) @@ -162,17 +162,7 @@ def test_nothing_to_do(self): }, ) - backport_records( - event={ - "server": self.server, - "backport_records_source_auth": self.auth, - "backport_records_source_bucket": self.source_bid, - "backport_records_source_collection": self.source_cid, - "backport_records_dest_bucket": self.dest_bid, - "backport_records_dest_collection": self.dest_cid, - }, - context=None, - ) + backport_records() assert len(responses.calls) == 3 assert responses.calls[0].request.method == "GET" @@ -241,17 +231,7 @@ def test_pending_changes(self): }, ) - backport_records( - event={ - "server": self.server, - "backport_records_source_auth": self.auth, - "backport_records_source_bucket": self.source_bid, - "backport_records_source_collection": self.source_cid, - "backport_records_dest_bucket": self.dest_bid, - "backport_records_dest_collection": self.dest_cid, - }, - context=None, - ) + backport_records() assert len(responses.calls) == 6 assert responses.calls[0].request.method == "GET" @@ -301,14 +281,14 @@ def test_pending_changes(self): ) def test_correct_multiline_mappings(mapping_env, expected_calls): with unittest.mock.patch("commands.backport_records.execute_backport") as mocked: - backport_records( - event={ - "server": "http://server", - "backport_records_source_auth": "admin:admin", - "backport_records_mappings": mapping_env, + with mock.patch.dict( + "os.environ", + { + "BACKPORT_RECORDS_SOURCE_AUTH": "admin:admin", + "BACKPORT_RECORDS_MAPPINGS": mapping_env, }, - context=None, - ) + ): + backport_records() for expected_params in expected_calls: mocked.assert_any_call( unittest.mock.ANY, @@ -327,13 +307,13 @@ def test_correct_multiline_mappings(mapping_env, expected_calls): ], ) def test_incorrect_multiline_mappings(mapping_env): - with unittest.mock.patch("commands.backport_records.execute_backport"): - with pytest.raises(expected_exception=ValueError, match="Invalid syntax"): - backport_records( - event={ - "server": "http://server", - "backport_records_source_auth": "admin:admin", - "backport_records_mappings": mapping_env, - }, - context=None, - ) + with mock.patch.dict( + "os.environ", + { + "BACKPORT_RECORDS_SOURCE_AUTH": "admin:admin", + "BACKPORT_RECORDS_MAPPINGS": mapping_env, + }, + ): + with unittest.mock.patch("commands.backport_records.execute_backport"): + with pytest.raises(expected_exception=ValueError, match="Invalid syntax"): + backport_records() diff --git a/cronjobs/tests/commands/test_build_bundles.py b/cronjobs/tests/commands/test_build_bundles.py index c5693d1b..1696a967 100644 --- a/cronjobs/tests/commands/test_build_bundles.py +++ b/cronjobs/tests/commands/test_build_bundles.py @@ -164,9 +164,10 @@ def test_build_bundles( mock_write_zip, mock_write_json_mozlz4, mock_sync_cloud_storage, + monkeypatch, ): server_url = "http://testserver" - event = {"server": server_url} + monkeypatch.setenv("SERVER", server_url) responses.add( responses.GET, @@ -282,7 +283,7 @@ def test_build_bundles( status=404, ) - build_bundles(event, context={}) + build_bundles() assert mock_write_zip.call_count == 1 # only one for the attachments calls = mock_write_zip.call_args_list diff --git a/cronjobs/tests/commands/test_expire_orphan_attachments.py b/cronjobs/tests/commands/test_expire_orphan_attachments.py index 6027c55e..da301251 100644 --- a/cronjobs/tests/commands/test_expire_orphan_attachments.py +++ b/cronjobs/tests/commands/test_expire_orphan_attachments.py @@ -81,7 +81,7 @@ def patch(self): ), # already marked ] - expire_orphan_attachments(None, None) + expire_orphan_attachments() assert patched_blobs == {"folder1/orphan1.bin", "folder2/orphan2.png"} @@ -108,4 +108,4 @@ def patch(self): MockBlob("folder1/orphan.bin"), ] - expire_orphan_attachments.expire_orphan_attachments(None, None) + expire_orphan_attachments.expire_orphan_attachments() diff --git a/cronjobs/tests/commands/test_git_export.py b/cronjobs/tests/commands/test_git_export.py index 50bd0eb5..8b7ff387 100644 --- a/cronjobs/tests/commands/test_git_export.py +++ b/cronjobs/tests/commands/test_git_export.py @@ -84,7 +84,7 @@ def mock_rs_server_content(): }, "config": { "modified": "2024-01-01T00:00:00Z", - } + }, }, ) @@ -274,7 +274,7 @@ def test_clone_must_match_remote_url_if_dir_exists(mock_github_lfs): repo.remotes.create("origin", "https://example.com/repo.git") with pytest.raises(ValueError, match="does not match"): - git_export.git_export(None, None) + git_export.git_export() def test_remote_is_clone_if_dir_missing( @@ -291,7 +291,7 @@ def _fake_clone(url, path, *args, **kwargs): ) as mock_clone: assert not os.path.exists(git_export.WORK_DIR) - git_export.git_export(None, None) + git_export.git_export() ((called_url, called_path, *_), _kwargs) = mock_clone.call_args assert called_url == git_export.GIT_REMOTE_URL @@ -308,7 +308,7 @@ def test_repo_sync_content_starts_from_scratch_if_no_previous_run( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() mock_git_fetch.assert_called_once() stdout = capsys.readouterr().out @@ -357,11 +357,11 @@ def test_repo_sync_does_nothing_if_up_to_date( create_branch_with_empty_commit(repo, "v1/buckets/bid1") create_branch_with_empty_commit(repo, "v1/buckets/bid2") - git_export.git_export(None, None) + git_export.git_export() simulate_pushed(repo, mock_ls_remotes) capsys.readouterr() # Clear previous output - git_export.git_export(None, None) + git_export.git_export() stdout = capsys.readouterr().out assert "Found latest tag: 1700000000000" in stdout @@ -385,12 +385,12 @@ def test_repo_sync_can_be_forced_even_if_up_to_date( create_branch_with_empty_commit(repo, "v1/buckets/bid1") create_branch_with_empty_commit(repo, "v1/buckets/bid2") - git_export.git_export(None, None) + git_export.git_export() simulate_pushed(repo, mock_ls_remotes) capsys.readouterr() # Clear previous output git_export.FORCE = True - git_export.git_export(None, None) + git_export.git_export() stdout = capsys.readouterr().out assert "No changes for common branch" in stdout @@ -423,7 +423,7 @@ def test_repo_sync_content_uses_previous_run_to_fetch_changes( {"name": "refs/tags/v1/timestamps/common/1600000000000", "local": False} ] - git_export.git_export(None, None) + git_export.git_export() stdout = capsys.readouterr().out assert "Found latest tag: 1600000000000" in stdout @@ -472,7 +472,7 @@ def test_repo_sync_content_ignores_previous_run_if_forced( ] git_export.FORCE = True - git_export.git_export(None, None) + git_export.git_export() stdout = capsys.readouterr().out assert "Found latest tag: 1600000000000. Ignoring (forced)" in stdout @@ -489,7 +489,7 @@ def test_repo_sync_stores_server_info( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() blob = read_file(repo, "v1/common", "server-info.json") assert "capabilities" in blob.decode() @@ -504,7 +504,7 @@ def test_repo_sync_stores_monitor_changes( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() blob = read_file(repo, "v1/common", "monitor-changes.json") assert '{"changes":[{"bucket":"bid1","collection":"cid1"' in blob.decode() @@ -519,7 +519,7 @@ def test_repo_sync_stores_broadcasts( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() blob = read_file(repo, "v1/common", "broadcasts.json") assert "broadcasts/rs" in blob.decode() @@ -534,7 +534,7 @@ def test_repo_sync_stores_cert_chains( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() blob = read_file(repo, "v1/common", "cert-chains/keys/123") assert "---CERTIFICATE---" in blob.decode() @@ -549,7 +549,7 @@ def test_repo_sync_tags_common_branch( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() tags = [ tag @@ -568,7 +568,7 @@ def test_repo_sync_stores_collections_records_in_buckets_branches_with_tags( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() branches = [ b for b in repo.listall_references() if b.startswith("refs/heads/v1/buckets/") @@ -596,7 +596,7 @@ def test_repo_sync_stores_attachments_as_lfs_pointers( mock_github_lfs, mock_git_push, ): - git_export.git_export(None, None) + git_export.git_export() rid2 = read_file(repo, "v1/common", "attachments/bid2/random-name.bin") assert "lfs" in rid2.decode() @@ -656,7 +656,7 @@ def test_repo_syncs_attachment_bundles( body=b"fake bundle content", ) - git_export.git_export(None, None) + git_export.git_export() bundle = read_file(repo, "v1/common", "attachments/bundles/bid1--cid1.zip") assert "lfs" in bundle.decode() @@ -690,7 +690,7 @@ def test_attachment_bundles_is_skipped_if_no_attachment_in_changeset( ) # Does not fail with 404 on "http://cdn.example.com/v1/attachments/bundles/bid1--cid1.zip" - git_export.git_export(None, None) + git_export.git_export() @responses.activate @@ -712,7 +712,7 @@ def test_repo_syncs_deletes_attachments_if_flag_set( "http://cdn.example.com/v1/attachments/bid2/random-name.bin", body=b"a" * 42, ) - git_export.git_export(None, None) + git_export.git_export() # First check that attachment exists in repo. blob = read_file(repo, "v1/common", "attachments/bid2/random-name.bin") assert "lfs" in blob.decode() @@ -750,7 +750,7 @@ def test_repo_is_resetted_to_local_content_on_error( create_branch_with_empty_commit(repo, "v1/buckets/bid1") create_branch_with_empty_commit(repo, "v1/buckets/bid2") - git_export.git_export(None, None) + git_export.git_export() simulate_pushed(repo, mock_ls_remotes) responses.replace( @@ -797,7 +797,7 @@ def test_repo_is_resetted_to_local_content_on_error( mock_github_lfs.side_effect = Exception("GitHub LFS error") with pytest.raises(Exception, match="GitHub LFS error"): - git_export.git_export(None, None) + git_export.git_export() stdout = capsys.readouterr().out assert "Error occurred: GitHub LFS error" in stdout diff --git a/cronjobs/tests/commands/test_git_export_git_lfs.py b/cronjobs/tests/commands/test_git_export_git_lfs.py index 0e99250c..4554a510 100644 --- a/cronjobs/tests/commands/test_git_export_git_lfs.py +++ b/cronjobs/tests/commands/test_git_export_git_lfs.py @@ -17,7 +17,7 @@ ) -@pytest.fixture +@pytest.fixture(autouse=True) def no_sleep(monkeypatch): monkeypatch.setattr( commands._git_export_lfs.time, "sleep", lambda s: None, raising=False diff --git a/cronjobs/tests/commands/test_refresh_signature.py b/cronjobs/tests/commands/test_refresh_signature.py index df81d8dd..2e0b3135 100644 --- a/cronjobs/tests/commands/test_refresh_signature.py +++ b/cronjobs/tests/commands/test_refresh_signature.py @@ -36,6 +36,18 @@ class TestSignatureRefresh(unittest.TestCase): server = "https://fake-server.net/v1" auth = ("foo", "bar") + def setUp(self): + self.patcher = mock.patch.dict( + "os.environ", + { + "SERVER": self.server, + "REFRESH_SIGNATURE_AUTH": "foo:bar", + "MAX_SIGNATURE_AGE": "7", + }, + ) + self.patcher.start() + self.addCleanup(unittest.mock.patch.dict, "os.environ", {}, clear=True) + @responses.activate def test_skip_recently_signed(self): responses.add( @@ -77,12 +89,7 @@ def test_skip_recently_signed(self): mocked.return_value = datetime(2019, 1, 20).replace(tzinfo=timezone.utc) - refresh_signature( - event={ - "server": self.server, - }, - context=None, - ) + refresh_signature() patch_requests = [r for r in responses.calls if r.request.method == "PATCH"] @@ -130,13 +137,8 @@ def test_force_refresh_with_max_age_zero(self): }, ) - refresh_signature( - event={ - "server": self.server, - "max_signature_age": 0, - }, - context=None, - ) + with mock.patch.dict("os.environ", {"MAX_SIGNATURE_AGE": "0"}): + refresh_signature() patch_requests = [r for r in responses.calls if r.request.method == "PATCH"] diff --git a/cronjobs/tests/test_main.py b/cronjobs/tests/test_main.py index 4dffebcb..278466b7 100644 --- a/cronjobs/tests/test_main.py +++ b/cronjobs/tests/test_main.py @@ -34,6 +34,4 @@ def test_run_git_export(capsys): main.main("git_export") importlib_mock.assert_called_with("commands.git_export") - entrypoint.assert_called_with( - {"server": main.SERVER_URL}, {"sentry_sdk": main.sentry_sdk} - ) + entrypoint.assert_called()