Skip to content

Commit

Permalink
Improved query_builder.py tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
Pablo Rodríguez Flores committed Jan 17, 2024
1 parent 3d489d8 commit 80f2661
Show file tree
Hide file tree
Showing 2 changed files with 56 additions and 18 deletions.
59 changes: 46 additions & 13 deletions resources/src/druid/query_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,11 @@
Module to modify the druid queries so they can be
processed by the model.
"""

import os
import json

from resources.src.logger import logger

class QueryBuilder:
"""
Class used to modify the manager usual query to exctract data about
Expand All @@ -41,14 +43,39 @@ def __init__(self, aggregations, post_aggregations):
value of the druid query.
"""
try:
with open(aggregations, encoding="utf-8") as agr:
self.aggregations = json.load(agr)
with open(post_aggregations, encoding="utf-8") as pagr:
self.post_aggregations = json.load(pagr)
except FileNotFoundError as exc:
raise FileNotFoundError("One or both files not found.") from exc
except json.JSONDecodeError as exc:
raise ValueError("JSON decoding failed. Check the JSON format.") from exc
self.aggregations=self.load_json(aggregations)
except Exception as e:
error_msg=f"Aggregations decoding failed."
logger.logger.error(error_msg)
raise e
try:
self.post_aggregations=self.load_json(post_aggregations)
except Exception as e:
error_msg=f"PostAggregations decoding failed."
logger.logger.error(error_msg)
raise e

def load_json(self, path):
"""
Load a json file as a dictionary.
Args:
path (string): path to json file.
Returns:
(dict): deserialized json.
"""
try:
with open(path, encoding="utf-8") as json_file:
return json.load(json_file)
except FileNotFoundError:
error_msg=f"File {os.path.basename(path)} not found."
logger.logger.error(error_msg)
raise FileNotFoundError(error_msg)
except json.JSONDecodeError:
error_msg=f"Could not decode{os.path.basename(path)} as a Json. Check the JSON format."
logger.logger.error(error_msg)
raise FileNotFoundError(error_msg)

def granularity_to_seconds(self, granularity):
"""
Expand All @@ -62,9 +89,13 @@ def granularity_to_seconds(self, granularity):
- (int): number of seconds in the granularity.
"""
if not isinstance(granularity, str):
raise ValueError("Granularity must be a string")
error_msg="Granularity must be a string"
logger.logger.error(error_msg)
raise ValueError(error_msg)
if len(granularity)==0:
raise ValueError("Granularity must be a non-empty string")
error_msg="Granularity must be a non-empty string"
logger.logger.error(error_msg)
raise ValueError(error_msg)
base_granularities = {
"minute": 60, "hour": 3600, "day": 86400,
"fifteen_minute": 900, "thirty_minute": 1800,
Expand All @@ -75,8 +106,10 @@ def granularity_to_seconds(self, granularity):
return base_granularities[granularity]
try:
multiplier = base_granularities[granularity[-1]]
except Exception as exc:
raise Exception('Invalid granularity') from exc
except Exception:
error_msg='Invalid granularity'
logger.logger.error(error_msg)
raise FileNotFoundError(error_msg)
numbers = int(''.join(filter(str.isdigit, granularity)))
return numbers * multiplier

Expand Down
15 changes: 10 additions & 5 deletions resources/tests/test_query_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,21 @@

class TestQueryBuilder(unittest.TestCase):
def setUp(self) -> None:
aggregations_file = os.path.join(os.getcwd(),"resources", "src", "druid", "data", "aggregations.json")
post_aggregations_file = os.path.join(os.getcwd(),"resources", "src", "druid", "data", "postAggregations.json")
self.builder = query_builder.QueryBuilder(aggregations_file, post_aggregations_file)
self.aggregations_file = os.path.join(os.getcwd(),"resources", "src", "druid", "data", "aggregations.json")
self.post_aggregations_file = os.path.join(os.getcwd(),"resources", "src", "druid", "data", "postAggregations.json")
self.builder = query_builder.QueryBuilder(self.aggregations_file, self.post_aggregations_file)

def test_invalid_files(self):
with self.assertRaises(FileNotFoundError):
query_builder.QueryBuilder("invalid.json", self.post_aggregations_file)
with self.assertRaises(FileNotFoundError):
query_builder.QueryBuilder(self.aggregations_file, "invalid.json")

def test_known_granularities_granularities_to_seconds(self):
test_cases = [
("minute", 60),
("pt2h", 7200),
("P1D", 86400),
# Add more known granularities and expected results here
]
for granularity, expected_seconds in test_cases:
with self.subTest(granularity=granularity):
Expand All @@ -51,7 +56,7 @@ def test_numeric_granularities_to_seconds(self):

def test_invalid_input_granularities_to_seconds(self):
with self.assertRaises(ValueError):
self.builder.granularity_to_seconds(None) # Test with None input
self.builder.granularity_to_seconds(None)
with self.assertRaises(ValueError):
self.builder.granularity_to_seconds("")

Expand Down

0 comments on commit 80f2661

Please sign in to comment.