From 096589bd42d149ac5568a42762652fd8c598c5be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Guimar=C3=A3es?= Date: Wed, 22 Nov 2023 17:53:11 +0000 Subject: [PATCH] Troca do ElasticSearch pelo Opensearch MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Alterando a biblioteca do motor de busca no requirements.txt Mudanças feitas para acomodar a substituição do Elasticsearch pelo cliente Opensearch Arquivos do elasticsearch deletados Makefile adaptado para o Opensearch Pequenas correções ao Makefile e sample.env Pequenas correções --- Makefile | 37 ++-- README.md | 2 +- contrib/sample.env | 7 +- index/__init__.py | 2 +- index/{elasticsearch.py => opensearch.py} | 45 +++-- requirements.txt | 2 +- tests/__init__.py | 6 +- tests/{elasticsearch.py => opensearch.py} | 212 +++++++++++----------- 8 files changed, 160 insertions(+), 153 deletions(-) rename index/{elasticsearch.py => opensearch.py} (63%) rename tests/{elasticsearch.py => opensearch.py} (51%) diff --git a/Makefile b/Makefile index d5fc959..34e510d 100644 --- a/Makefile +++ b/Makefile @@ -21,10 +21,10 @@ POSTGRES_HOST ?= localhost POSTGRES_PORT ?= 5432 POSTGRES_IMAGE ?= docker.io/postgres:10 DATABASE_RESTORE_FILE ?= contrib/data/queridodiariodb.tar -# Elasticsearch info to run the tests -ELASTICSEARCH_PORT1 ?= 9200 -ELASTICSEARCH_PORT2 ?= 9300 -ELASTICSEARCH_CONTAINER_NAME ?= queridodiario-elasticsearch +# OpenSearch port info +OPENSEARCH_PORT1 ?= 9200 +OPENSEARCH_PORT2 ?= 9300 +OPENSEARCH_CONTAINER_NAME ?= queridodiario-opensearch APACHE_TIKA_CONTAINER_NAME ?= queridodiario-apache-tika-server run-command=(podman run --rm -ti --volume $(PWD):/mnt/code:rw \ @@ -86,11 +86,11 @@ destroy-pod: create-pod: destroy-pod podman pod create -p $(POSTGRES_PORT):$(POSTGRES_PORT) \ - -p $(ELASTICSEARCH_PORT1):$(ELASTICSEARCH_PORT1) \ - -p $(STORAGE_PORT):$(STORAGE_PORT) \ - --name $(POD_NAME) + -p $(OPENSEARCH_PORT1):$(OPENSEARCH_PORT1) \ + -p $(STORAGE_PORT):$(STORAGE_PORT) \ + --name $(POD_NAME) -prepare-test-env: create-pod storage apache-tika-server elasticsearch database +prepare-test-env: create-pod storage apache-tika-server opensearch database .PHONY: test test: prepare-test-env retest @@ -117,7 +117,7 @@ retest-main: .PHONY: retest-index retest-index: - $(call run-command, python -m unittest -f tests/elasticsearch.py) + $(call run-command, python -m unittest -f tests/opensearch.py) .PHONY: retest-tika retest-tika: @@ -200,7 +200,7 @@ set-run-variable-values: cp --no-clobber contrib/sample.env envvars || true $(eval POD_NAME=run-$(POD_NAME)) $(eval DATABASE_CONTAINER_NAME=run-$(DATABASE_CONTAINER_NAME)) - $(eval ELASTICSEARCH_CONTAINER_NAME=run-$(ELASTICSEARCH_CONTAINER_NAME)) + $(eval OPENSEARCH_CONTAINER_NAME=run-$(OPENSEARCH_CONTAINER_NAME)) .PHONY: sql sql: set-run-variable-values @@ -209,7 +209,7 @@ sql: set-run-variable-values $(POSTGRES_IMAGE) psql -h localhost -U $(POSTGRES_USER) $(POSTGRES_DB) .PHONY: setup -setup: set-run-variable-values create-pod storage apache-tika-server elasticsearch database +setup: set-run-variable-values create-pod storage apache-tika-server opensearch database .PHONY: re-run re-run: set-run-variable-values @@ -235,19 +235,20 @@ shell-database: set-run-variable-values podman exec -it $(DATABASE_CONTAINER_NAME) \ psql -h localhost -d $(POSTGRES_DB) -U $(POSTGRES_USER) -elasticsearch: stop-elasticsearch start-elasticsearch wait-elasticsearch +opensearch: stop-opensearch start-opensearch wait-opensearch -start-elasticsearch: +start-opensearch: podman run -d --rm -ti \ - --name $(ELASTICSEARCH_CONTAINER_NAME) \ + --name $(OPENSEARCH_CONTAINER_NAME) \ --pod $(POD_NAME) \ --env discovery.type=single-node \ - docker.io/elasticsearch:7.9.1 + --env plugins.security.ssl.http.enabled=false \ + docker.io/opensearchproject/opensearch:2.9.0 -stop-elasticsearch: - podman rm --force --ignore $(ELASTICSEARCH_CONTAINER_NAME) +stop-opensearch: + podman rm --force --ignore $(OPENSEARCH_CONTAINER_NAME) -wait-elasticsearch: +wait-opensearch: $(call wait-for, localhost:9200) .PHONY: publish-tag diff --git a/README.md b/README.md index 31e4c9a..85fefdb 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ select processed, count(1) from gazettes g group by processed; make re-run ``` - and see gazettes processed running the query above -- you can search using ElasticSearch +- you can search using OpenSearch on port 9200 ```console curl 'http://localhost:9200/querido-diario/_search' \ -H 'Content-Type: application/json' \ diff --git a/contrib/sample.env b/contrib/sample.env index 6cfe254..3b8ef93 100644 --- a/contrib/sample.env +++ b/contrib/sample.env @@ -11,9 +11,10 @@ POSTGRES_HOST=127.0.0.1 POSTGRES_PORT=5432 DATABASE_RESTORE_FILE=contrib/data/queridodiariodb.tar -ELASTICSEARCH_HOST=http://localhost:9200 -ELASTICSEARCH_INDEX=querido-diario - +OPENSEARCH_HOST=http://localhost:9200 +OPENSEARCH_INDEX=querido-diario +OPENSEARCH_USER=admin +OPENSEARCH_PASSWORD=admin DEBUG=1 APACHE_TIKA_SERVER=http://localhost:9998 diff --git a/index/__init__.py b/index/__init__.py index a7aec07..8a62430 100644 --- a/index/__init__.py +++ b/index/__init__.py @@ -1 +1 @@ -from .elasticsearch import create_index_interface +from .opensearch import create_index_interface diff --git a/index/elasticsearch.py b/index/opensearch.py similarity index 63% rename from index/elasticsearch.py rename to index/opensearch.py index cb967da..941e248 100644 --- a/index/elasticsearch.py +++ b/index/opensearch.py @@ -1,19 +1,19 @@ from typing import Dict, Iterable, List, Union import os -import elasticsearch +import opensearchpy from tasks import IndexInterface -class ElasticSearchInterface(IndexInterface): - def __init__(self, hosts: List, timeout: str = "30s", default_index: str = ""): - self._es = elasticsearch.Elasticsearch(hosts=hosts) +class OpenSearchInterface(IndexInterface): + def __init__(self, hosts: List, user: str, password: str, timeout: int = 30, default_index: str = ""): + self._search_engine = opensearchpy.OpenSearch(hosts=hosts, http_auth=(user, password)) self._timeout = timeout self._default_index = default_index def index_exists(self, index_name: str) -> bool: - return self._es.indices.exists(index=index_name) + return self._search_engine.indices.exists(index=index_name) def is_valid_index_name(self, index_name: str) -> bool: return isinstance(index_name, str) and len(index_name) > 0 @@ -29,7 +29,7 @@ def create_index(self, index_name: str = "", body: Dict = {}) -> None: index_name = self.get_index_name(index_name) if self.index_exists(index_name): return - self._es.indices.create( + self._search_engine.indices.create( index=index_name, body=body, timeout=self._timeout, @@ -39,7 +39,7 @@ def refresh_index(self, index_name: str = "") -> None: index_name = self.get_index_name(index_name) if self.index_exists(index_name): return - self._es.indices.refresh( + self._search_engine.indices.refresh( index=index_name, ) @@ -51,23 +51,23 @@ def index_document( refresh: bool = False, ) -> None: index = self.get_index_name(index) - self._es.index(index=index, body=document, id=document_id, refresh=refresh) + self._search_engine.index(index=index, body=document, id=document_id, refresh=refresh) def search(self, query: Dict, index: str = "") -> Dict: index = self.get_index_name(index) - result = self._es.search(index=index, body=query, request_timeout=60) + result = self._search_engine.search(index=index, body=query, request_timeout=60) return result def analyze(self, text: str, field: str, index: str = "") -> Dict: index = self.get_index_name(index) - result = self._es.indices.analyze(body={"text": text, "field":field}, index=index) + result = self._search_engine.indices.analyze(body={"text": text, "field":field}, index=index) return result def paginated_search( self, query: Dict, index: str = "", keep_alive: str = "5m" ) -> Iterable[Dict]: index = self.get_index_name(index) - result = self._es.search( + result = self._search_engine.search( index=index, body=query, scroll=keep_alive, request_timeout=120 ) @@ -77,26 +77,31 @@ def paginated_search( while len(result["hits"]["hits"]) > 0: yield result scroll_id = result["_scroll_id"] - result = self._es.scroll( + result = self._search_engine.scroll( scroll_id=scroll_id, scroll=keep_alive, request_timeout=120 ) - self._es.clear_scroll(scroll_id=scroll_id) + self._search_engine.clear_scroll(scroll_id=scroll_id) -def get_elasticsearch_host(): - return os.environ["ELASTICSEARCH_HOST"] +def get_opensearch_host(): + return os.environ["OPENSEARCH_HOST"] -def get_elasticsearch_index(): - return os.environ["ELASTICSEARCH_INDEX"] +def get_opensearch_index(): + return os.environ["OPENSEARCH_INDEX"] +def get_opensearch_user(): + return os.environ["OPENSEARCH_USER"] + +def get_opensearch_password(): + return os.environ["OPENSEARCH_PASSWORD"] def create_index_interface() -> IndexInterface: - hosts = get_elasticsearch_host() + hosts = get_opensearch_host() if not isinstance(hosts, str) or len(hosts) == 0: raise Exception("Missing index hosts") - default_index_name = get_elasticsearch_index() + default_index_name = get_opensearch_index() if not isinstance(default_index_name, str) or len(default_index_name) == 0: raise Exception("Invalid index name") - return ElasticSearchInterface([hosts], default_index=default_index_name) + return OpenSearchInterface([hosts], get_opensearch_user(), get_opensearch_password(), default_index=default_index_name) diff --git a/requirements.txt b/requirements.txt index 92894c1..e7cd98a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ python-magic==0.4.18 boto3==1.22.6 psycopg2==2.8.6 botocore==1.25.6 -elasticsearch==7.17.3 +opensearch-py==2.3.2 requests==2.25.0 scikit-learn==1.0.2 sentence-transformers==2.2.0 diff --git a/tests/__init__.py b/tests/__init__.py index 86b5e16..1d2c663 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -17,8 +17,8 @@ from .main_tests import MainModuleTests -from .elasticsearch import ( - ElasticsearchBasicTests, +from .opensearch import ( + OpensearchBasicTests, IndexInterfaceFactoryFunctionTests, - ElasticsearchIntegrationTests, + OpensearchIntegrationTests, ) diff --git a/tests/elasticsearch.py b/tests/opensearch.py similarity index 51% rename from tests/elasticsearch.py rename to tests/opensearch.py index dc891da..f1bb112 100644 --- a/tests/elasticsearch.py +++ b/tests/opensearch.py @@ -3,9 +3,9 @@ from unittest.mock import patch, MagicMock import uuid -import elasticsearch +import opensearch -from index.elasticsearch import ElasticSearchInterface, create_index_interface +from index.opensearch import OpenSearchInterface, create_index_interface from tasks import IndexInterface @@ -13,8 +13,8 @@ class IndexInterfaceFactoryFunctionTests(TestCase): @patch.dict( "os.environ", { - "ELASTICSEARCH_HOST": "127.0.0.1", - "ELASTICSEARCH_INDEX": "index_name", + "OPENSEARCH_HOST": "127.0.0.1", + "OPENSEARCH_INDEX": "index_name", }, ) def test_create_index_interface_factory_method_with_valid_arguments(self): @@ -29,7 +29,7 @@ def test_index_interface_factory_method_failed_without_required_info(self): @patch.dict( "os.environ", { - "ELASTICSEARCH_INDEX": "index_name", + "OPENSEARCH_INDEX": "index_name", }, ) @expectedFailure @@ -39,7 +39,7 @@ def test_index_interface_factory_method_failed_with_no_hosts(self): @patch.dict( "os.environ", { - "ELASTICSEARCH_HOST": "127.0.0.1", + "OPENSEARCH_HOST": "127.0.0.1", }, ) @expectedFailure @@ -49,8 +49,8 @@ def test_create_index_interface_factory_method_with_no_index(self): @patch.dict( "os.environ", { - "ELASTICSEARCH_HOST": "127.0.0.1", - "ELASTICSEARCH_INDEX": "", + "OPENSEARCH_HOST": "127.0.0.1", + "OPENSEARCH_INDEX": "", }, ) @expectedFailure @@ -60,8 +60,8 @@ def test_create_index_interface_factory_method_with_empty_index(self): @patch.dict( "os.environ", { - "ELASTICSEARCH_HOST": "", - "ELASTICSEARCH_INDEX": "index_name", + "OPENSEARCH_HOST": "", + "OPENSEARCH_INDEX": "index_name", }, ) @expectedFailure @@ -69,7 +69,7 @@ def test_create_index_interface_factory_method_with_empty_hosts(self): interface = create_index_interface() -class ElasticsearchBasicTests(TestCase): +class OpensearchBasicTests(TestCase): def setUp(self): document_checksum = str(uuid.uuid1()) self.fake_document = { @@ -89,148 +89,148 @@ def setUp(self): "territory_name": "Gaspar", } - def test_elasticsearch_should_implement_index_interface(self): - self.assertIsInstance(ElasticSearchInterface([]), IndexInterface) + def test_opensearch_should_implement_index_interface(self): + self.assertIsInstance(OpenSearchInterface([]), IndexInterface) - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_connection(self, elasticsearch_mock): - interface = ElasticSearchInterface(["127.0.0.1"]) - elasticsearch_mock.assert_called_once_with(hosts=["127.0.0.1"]) + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_connection(self, opensearch_mock): + interface = OpenSearchInterface(["127.0.0.1"]) + opensearch_mock.assert_called_once_with(hosts=["127.0.0.1"]) - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_index_creation_should_check_if_index_exists( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_index_creation_should_check_if_index_exists( + self, opensearch_mock ): - interface = ElasticSearchInterface(["127.0.0.1"]) - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock() + interface = OpenSearchInterface(["127.0.0.1"]) + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock() interface.create_index("querido-diario") - interface._es.indices.exists.assert_called_once_with(index="querido-diario") + interface.search_engine.indices.exists.assert_called_once_with(index="querido-diario") - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_index_creation_should_failed_when_no_index_is_provided( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_index_creation_should_failed_when_no_index_is_provided( + self, opensearch_mock ): - interface = ElasticSearchInterface(["127.0.0.1"]) - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock() + interface = OpenSearchInterface(["127.0.0.1"]) + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock() with self.assertRaisesRegex(Exception, "Index name not defined"): interface.create_index() - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_index_creation_with_default_index_value( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_index_creation_with_default_index_value( + self, opensearch_mock ): - interface = ElasticSearchInterface( + interface = OpenSearchInterface( ["127.0.0.1"], default_index="querido-diario2" ) - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock() + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock() interface.create_index() - interface._es.indices.exists.assert_called_once_with(index="querido-diario2") + interface.search_engine.indices.exists.assert_called_once_with(index="querido-diario2") - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_index_default_timeout_should_be_30s( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_index_default_timeout_should_be_30s( + self, opensearch_mock ): - interface = ElasticSearchInterface(["127.0.0.1"]) - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock(return_value=False) - interface._es.indices.create = MagicMock() + interface = OpenSearchInterface(["127.0.0.1"]) + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock(return_value=False) + interface.search_engine.indices.create = MagicMock() interface.create_index("querido-diario") - interface._es.indices.create.assert_called_once_with( + interface.search_engine.indices.create.assert_called_once_with( index="querido-diario", body={"mappings": {"properties": {"date": {"type": "date"}}}}, timeout="30s", ) - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_index_should_allow_change_default_timeout( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_index_should_allow_change_default_timeout( + self, opensearch_mock ): - interface = ElasticSearchInterface(["127.0.0.1"], timeout="2m") - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock(return_value=False) - interface._es.indices.create = MagicMock() + interface = OpenSearchInterface(["127.0.0.1"], timeout="2m") + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock(return_value=False) + interface.search_engine.indices.create = MagicMock() interface.create_index("querido-diario") - interface._es.indices.create.assert_called_once_with( + interface.search_engine.indices.create.assert_called_once_with( index="querido-diario", body={"mappings": {"properties": {"date": {"type": "date"}}}}, timeout="2m", ) - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_index_creation_should_not_recreate_index_if_it_exists( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_index_creation_should_not_recreate_index_if_it_exists( + self, opensearch_mock ): - interface = ElasticSearchInterface(["127.0.0.1"]) - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock(return_value=True) - interface._es.indices.create = MagicMock() + interface = OpenSearchInterface(["127.0.0.1"]) + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock(return_value=True) + interface.search_engine.indices.create = MagicMock() interface.create_index("querido-diario") - interface._es.indices.exists.assert_called_once_with(index="querido-diario") - interface._es.indices.create.assert_not_called() + interface.search_engine.indices.exists.assert_called_once_with(index="querido-diario") + interface.search_engine.indices.create.assert_not_called() - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_should_create_index_if_it_does_not_exists( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_should_create_index_if_it_does_not_exists( + self, opensearch_mock ): - interface = ElasticSearchInterface(["127.0.0.1"]) - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock(return_value=False) - interface._es.indices.create = MagicMock() + interface = OpenSearchInterface(["127.0.0.1"]) + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock(return_value=False) + interface.search_engine.indices.create = MagicMock() interface.create_index("querido-diario") - interface._es.indices.exists.assert_called_once_with(index="querido-diario") - interface._es.indices.create.assert_called_once_with( + interface.search_engine.indices.exists.assert_called_once_with(index="querido-diario") + interface.search_engine.indices.create.assert_called_once_with( index="querido-diario", body={"mappings": {"properties": {"date": {"type": "date"}}}}, timeout="30s", ) - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_elasticsearch_should_create_index_with_default_value_with_function_has_no_arguments( - self, elasticsearch_mock + @patch("opensearch.Opensearch", autospec=True) + def test_opensearch_should_create_index_with_default_value_with_function_has_no_arguments( + self, opensearch_mock ): - interface = ElasticSearchInterface( + interface = OpenSearchInterface( ["127.0.0.1"], default_index="querido-diario2" ) - interface._es.indices = MagicMock() - interface._es.indices.exists = MagicMock(return_value=False) - interface._es.indices.create = MagicMock() + interface.search_engine.indices = MagicMock() + interface.search_engine.indices.exists = MagicMock(return_value=False) + interface.search_engine.indices.create = MagicMock() interface.create_index() - interface._es.indices.exists.assert_called_once_with(index="querido-diario2") - interface._es.indices.create.assert_called_once_with( + interface.search_engine.indices.exists.assert_called_once_with(index="querido-diario2") + interface.search_engine.indices.create.assert_called_once_with( index="querido-diario2", body={"mappings": {"properties": {"date": {"type": "date"}}}}, timeout="30s", ) - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_upload_document_to_index(self, elasticsearch_mock): - interface = ElasticSearchInterface(["127.0.0.1"]) + @patch("opensearch.Opensearch", autospec=True) + def test_upload_document_to_index(self, opensearch_mock): + interface = OpenSearchInterface(["127.0.0.1"]) document_checksum = str(uuid.uuid1()) interface.index_document(self.fake_document, "querido-diario") - interface._es.index.assert_called_once_with( + interface.search_engine.index.assert_called_once_with( index="querido-diario", body=self.fake_document, id=self.fake_document["file_checksum"], ) - @patch("elasticsearch.Elasticsearch", autospec=True) - def test_upload_document_to_index_using_default_index(self, elasticsearch_mock): - interface = ElasticSearchInterface( + @patch("opensearch.Opensearch", autospec=True) + def test_upload_document_to_index_using_default_index(self, opensearch_mock): + interface = OpenSearchInterface( ["127.0.0.1"], default_index="querido-diario2" ) document_checksum = str(uuid.uuid1()) interface.index_document(self.fake_document) - interface._es.index.assert_called_once_with( + interface.search_engine.index.assert_called_once_with( index="querido-diario2", body=self.fake_document, id=self.fake_document["file_checksum"], ) -class ElasticsearchIntegrationTests(TestCase): +class OpensearchIntegrationTests(TestCase): def setUp(self): document_checksum = str(uuid.uuid1()) self.fake_document = { @@ -249,40 +249,40 @@ def setUp(self): "state_code": "SC", "territory_name": "Gaspar", } - self._es = elasticsearch.Elasticsearch(hosts=["127.0.0.1"]) + self.search_engine = opensearch.Opensearch(hosts=["127.0.0.1"]) def clean_index(self, index): - self._es.delete_by_query( + self.search_engine.delete_by_query( index=index, body={"query": {"match_all": {}}}, timeout="5m" ) - self._es.indices.refresh(index="querido-diario") + self.search_engine.indices.refresh(index="querido-diario") def delete_index(self, index): - self._es.indices.delete( + self.search_engine.indices.delete( index="querido-diario", timeout="2m", ignore_unavailable=True ) - self.assertFalse(self._es.indices.exists("querido-diario")) + self.assertFalse(self.search_engine.indices.exists("querido-diario")) def test_index_creation(self): self.delete_index("querido-diario") - interface = ElasticSearchInterface(["127.0.0.1"], timeout="5m") + interface = OpenSearchInterface(["127.0.0.1"], timeout="5m") interface.create_index("querido-diario") - self.assertTrue(self._es.indices.exists("querido-diario")) + self.assertTrue(self.search_engine.indices.exists("querido-diario")) def test_index_document(self): self.clean_index("querido-diario") - interface = ElasticSearchInterface(["127.0.0.1"]) + interface = OpenSearchInterface(["127.0.0.1"]) interface.index_document(self.fake_document, "querido-diario") - self._es.indices.refresh(index="querido-diario") + self.search_engine.indices.refresh(index="querido-diario") - self.assertEqual(self._es.count(index="querido-diario")["count"], 1) + self.assertEqual(self.search_engine.count(index="querido-diario")["count"], 1) self.assertTrue( - self._es.exists( + self.search_engine.exists( id=self.fake_document["file_checksum"], index="querido-diario" ) ) - indexed_document = self._es.get( + indexed_document = self.search_engine.get( index="querido-diario", id=self.fake_document["file_checksum"] ) self.fake_document["date"] = self.fake_document["date"].strftime("%Y-%m-%d") @@ -292,18 +292,18 @@ def test_index_document(self): def test_index_document_twice(self): self.clean_index("querido-diario") - interface = ElasticSearchInterface(["127.0.0.1"]) + interface = OpenSearchInterface(["127.0.0.1"]) interface.index_document(self.fake_document, "querido-diario") interface.index_document(self.fake_document, "querido-diario") - self._es.indices.refresh(index="querido-diario") + self.search_engine.indices.refresh(index="querido-diario") - self.assertEqual(self._es.count(index="querido-diario")["count"], 1) + self.assertEqual(self.search_engine.count(index="querido-diario")["count"], 1) self.assertTrue( - self._es.exists( + self.search_engine.exists( id=self.fake_document["file_checksum"], index="querido-diario" ) ) - indexed_document = self._es.get( + indexed_document = self.search_engine.get( index="querido-diario", id=self.fake_document["file_checksum"] ) self.fake_document["date"] = self.fake_document["date"].strftime("%Y-%m-%d")