diff --git a/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery.py b/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery.py index 00d11d4df7f7..b18d3a9c34ec 100644 --- a/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery.py +++ b/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery.py @@ -99,13 +99,14 @@ def build_query_job_config(allow_large_results, default_dataset_string, destinat return query_job_config -def convert_to_string_if_datetime(object_that_may_be_datetime): - if isinstance(object_that_may_be_datetime, datetime): - return object_that_may_be_datetime.strftime("%m/%d/%Y %H:%M:%S") - if isinstance(object_that_may_be_datetime, date): - return object_that_may_be_datetime.strftime("%m/%d/%Y") - else: - return object_that_may_be_datetime +def convert_to_string(field_value): + if isinstance(field_value, datetime): + return field_value.strftime("%m/%d/%Y %H:%M:%S") + if isinstance(field_value, date): + return field_value.strftime("%m/%d/%Y") + if isinstance(field_value, bytes): + return field_value.decode('utf-8') + return field_value ''' COMMANDS + REQUESTS FUNCTIONS ''' @@ -166,7 +167,7 @@ def query_command(query_to_run=None): else: for row in query_results: - row_context = {underscoreToCamelCase(k): convert_to_string_if_datetime(v) for k, v in row.items()} + row_context = {underscoreToCamelCase(k): convert_to_string(v) for k, v in row.items()} rows_contexts.append(row_context) if rows_contexts: @@ -241,7 +242,7 @@ def row_to_incident(row): Transform a Google BigQuery row to an incident's format. """ incident = {} - raw = {underscoreToCamelCase(k): convert_to_string_if_datetime(v) for k, v in row.items()} + raw = {underscoreToCamelCase(k): convert_to_string(v) for k, v in row.items()} incident["rawJSON"] = json.dumps(raw) incident_name_field = demisto.params().get("incident_name_field") if incident_name_field and incident_name_field in raw: diff --git a/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery_test.py b/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery_test.py index 14f402413a14..0bb30790dce8 100644 --- a/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery_test.py +++ b/Packs/GoogleBigQuery/Integrations/GoogleBigQuery/GoogleBigQuery_test.py @@ -4,21 +4,24 @@ import demistomock as demisto -def test_convert_to_string_if_datetime(): - from GoogleBigQuery import convert_to_string_if_datetime - test_conversion_for_none = convert_to_string_if_datetime(None) +def test_convert_to_string(): + from GoogleBigQuery import convert_to_string + test_conversion_for_none = convert_to_string(None) assert test_conversion_for_none is None now = datetime.datetime.now() - convert_to_string_if_datetime(now) - test_conversion_for_empty_string = convert_to_string_if_datetime("") + convert_to_string(now) + test_conversion_for_empty_string = convert_to_string("") assert test_conversion_for_empty_string == "" today = datetime.date.today() - convert_to_string_if_datetime(today) - test_conversion_for_empty_string = convert_to_string_if_datetime("") + convert_to_string(today) + test_conversion_for_empty_string = convert_to_string("") assert test_conversion_for_empty_string == "" + assert convert_to_string(b'test') == 'test' + assert convert_to_string('test') == 'test' + def test_remove_outdated_incident_ids_keep_equal(): """ diff --git a/Packs/GoogleBigQuery/ReleaseNotes/1_1_9.md b/Packs/GoogleBigQuery/ReleaseNotes/1_1_9.md new file mode 100644 index 000000000000..af69d31d98d0 --- /dev/null +++ b/Packs/GoogleBigQuery/ReleaseNotes/1_1_9.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Google BigQuery + +Fixed an issue where the ***bigquery-query*** command failed when the query result includes bytes. diff --git a/Packs/GoogleBigQuery/pack_metadata.json b/Packs/GoogleBigQuery/pack_metadata.json index 1a236dc57100..7f9abe61f061 100644 --- a/Packs/GoogleBigQuery/pack_metadata.json +++ b/Packs/GoogleBigQuery/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Google BigQuery", "description": "Integration for Google BigQuery, a data warehouse for querying and analyzing large databases. In all commands, for any argument not specified, the BigQuery default value for that argument will be applied.", "support": "xsoar", - "currentVersion": "1.1.8", + "currentVersion": "1.1.9", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "",