Skip to content

Commit

Permalink
Solve selected pylint issues
Browse files Browse the repository at this point in the history
Co-Authored-By: Anna Kvashchuk kvashchuk.anna@gmail.com

Mostly by disable statements close to the offending line.
Some cleanup in unused fixtures
  • Loading branch information
berland authored and kvashchuka committed Jul 8, 2022
1 parent 5bd4510 commit 57ebb06
Show file tree
Hide file tree
Showing 40 changed files with 218 additions and 122 deletions.
8 changes: 0 additions & 8 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -54,19 +54,11 @@ confidence=
# --disable=W"
disable=
duplicate-code,
import-outside-toplevel,
invalid-name,
logging-format-interpolation,
missing-class-docstring,
missing-function-docstring,
missing-module-docstring,
no-member,
no-self-use,
not-callable,
redefined-outer-name,
too-many-arguments,
too-many-locals,
unused-argument,
unspecified-encoding
#
# Enable the message, report, category or checker with the given id(s). You can
Expand Down
1 change: 1 addition & 0 deletions semeio/workflows/ahm_analysis/ahmanalysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ def run(
def _run_ministep(
ert, obs_group, data_parameters, prior_name, target_name, output_path
):
# pylint: disable=too-many-arguments
update_step = {
"name": "MINISTEP",
"observations": obs_group,
Expand Down
1 change: 1 addition & 0 deletions semeio/workflows/csv_export2/csv_export2.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ def csv_exporter(runpathfile, time_index, outputfile, column_keys=None):

class CsvExport2Job(ErtScript): # pylint: disable=too-few-public-methods
def run(self, *args):
# pylint: disable=no-self-use
main(args)


Expand Down
15 changes: 10 additions & 5 deletions semeio/workflows/localisation/local_script_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ def apply_decay(
tapering_range=None,
calculate_qc_parameter=False,
):
# pylint: disable=too-many-arguments,too-many-locals
"""Calculates the scaling factor, assign it to ERT instance by row_scaling
and returns a full sized grid parameter with scaling factors for active
grid cells and 0 elsewhere to be used for QC purpose.
Expand Down Expand Up @@ -300,6 +301,7 @@ def apply_decay(


def apply_from_file(row_scaling, data_size, grid, filename, param_name, log_level):
# pylint: disable=too-many-arguments
debug_print(
f"Read scaling factors as parameter {param_name}", LogLevel.LEVEL3, log_level
)
Expand Down Expand Up @@ -348,7 +350,7 @@ def define_look_up_index(user_defined_active_region_list, max_region_number):
def calculate_scaling_factors_in_regions(
grid, region_parameter, active_segment_list, scaling_value_list, smooth_range_list
):
# pylint: disable=unused-argument
# pylint: disable=unused-argument,too-many-locals
# ('grid' and 'smooth-range-list' are not currently used)

min_region_number = region_parameter.min()
Expand Down Expand Up @@ -399,6 +401,7 @@ def smooth_parameter(
active region and e.g 0 for all inactive regions and for inactive grid cells,
then the smoothing will only appear on the border between active regions.
"""
# pylint: disable=too-many-locals
nx = grid.get_nx()
ny = grid.get_ny()
nz = grid.get_nz()
Expand Down Expand Up @@ -439,6 +442,7 @@ def apply_segment(
corr_name,
log_level=LogLevel.OFF,
):
# pylint: disable=too-many-arguments,too-many-locals
"""
Purpose: Use region numbers and list of scaling factors per region to
create scaling factors per active .
Expand Down Expand Up @@ -571,9 +575,9 @@ def add_ministeps(
ert_ensemble_config,
grid_for_field,
):
# pylint: disable-msg=too-many-branches
# pylint: disable-msg=too-many-statements
# pylint: disable-msg=too-many-nested-blocks
# pylint: disable=too-many-branches,too-many-statements
# pylint: disable=too-many-nested-blocks,too-many-locals

debug_print("Add all ministeps:", LogLevel.LEVEL1, user_config.log_level)
ScalingValues.initialize()
# Read all region files used in correlation groups,
Expand Down Expand Up @@ -905,7 +909,7 @@ def __call__(self, data_index):


class ScalingValues:
# pylint: disable=R0903
# pylint: disable=too-few-public-methods
scaling_param_number = 1
corr_name = None

Expand All @@ -924,6 +928,7 @@ def write_qc_parameter(
param_for_field,
log_level=LogLevel.OFF,
):
# pylint: disable=too-many-arguments
if param_for_field is None or field_scale is None:
return

Expand Down
14 changes: 12 additions & 2 deletions semeio/workflows/localisation/localisation_config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# pylint: disable=E0213
# pylint: disable=no-self-argument
import itertools
import pathlib

Expand Down Expand Up @@ -54,7 +54,7 @@ def check_for_duplicated_correlation_specifications(correlations):


class BaseModel(PydanticBaseModel):
# pylint: disable=R0903
# pylint: disable=too-few-public-methods
class Config:
extra = Extra.forbid

Expand All @@ -79,18 +79,21 @@ class ObsConfig(BaseModel):

@validator("add")
def validate_add(cls, add):
# pylint: disable=no-self-use
if isinstance(add, str):
add = [add]
return add

@validator("remove")
def validate_remove(cls, remove):
# pylint: disable=no-self-use
if isinstance(remove, str):
remove = [remove]
return remove

@validator("result_items", always=True)
def expanded_items(cls, _, values):
# pylint: disable=no-self-use
add, remove = values["add"], values.get("remove", None)
result = _check_specification(add, remove, values["context"])
if len(result) == 0:
Expand Down Expand Up @@ -220,6 +223,7 @@ class ScalingForSegmentsConfig(BaseModel):

@validator("scalingfactors")
def check_length_consistency(cls, v, values):
# pylint: disable=no-self-use
# Ensure that active segment list and scaling factor lists are of equal length.
scalingfactors = v
active_segment_list = values.get("active_segments", None)
Expand Down Expand Up @@ -285,12 +289,14 @@ class CorrelationConfig(BaseModel):

@root_validator(pre=True)
def inject_context(cls, values: Dict) -> Dict:
# pylint: disable=no-self-use
values["obs_group"]["context"] = values["obs_context"]
values["param_group"]["context"] = values["params_context"]
return values

@validator("field_scale", pre=True)
def validate_field_scale(cls, value):
# pylint: disable=no-self-use
"""
To improve the user feedback we explicitly check
which method is configured and bypass the Union
Expand All @@ -316,6 +322,7 @@ def validate_field_scale(cls, value):

@validator("surface_scale", pre=True)
def validate_surface_scale(cls, value):
# pylint: disable=no-self-use
"""
To improve the user feedback we explicitly check
which method is configured and bypass the Union
Expand Down Expand Up @@ -372,19 +379,22 @@ class LocalisationConfig(BaseModel):

@validator("log_level")
def validate_log_level(cls, level):
# pylint: disable=no-self-use
if not isinstance(level, int):
level = LogLevel.OFF
return level

@root_validator(pre=True)
def inject_context(cls, values: Dict) -> Dict:
# pylint: disable=no-self-use
for correlation in values["correlations"]:
correlation["obs_context"] = values["observations"]
correlation["params_context"] = values["parameters"]
return values

@validator("correlations")
def validate_correlations(cls, correlations):
# pylint: disable=no-self-use
duplicates = check_for_duplicated_correlation_specifications(correlations)
if len(duplicates) > 0:
error_msgs = "\n".join(duplicates)
Expand Down
3 changes: 3 additions & 0 deletions semeio/workflows/misfit_preprocessor/hierarchical_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ class BaseFclusterConfig(BaseMisfitPreprocessorConfig):

@validator("depth", pre=True)
def constrained_int(cls, depth):
# pylint: disable=no-self-use
if isinstance(depth, float):
raise ValueError("Depth must be int")
return depth
Expand All @@ -55,6 +56,7 @@ class FclusterConfig(BaseFclusterConfig):

@root_validator(pre=True)
def validate_threshold(cls, values):
# pylint: disable=no-self-use
criterion = values.get("criterion")
threshold = values.get("threshold")
if criterion in ("maxclust", "maxclust_monocrit") and "threshold" not in values:
Expand All @@ -68,6 +70,7 @@ def validate_threshold(cls, values):

@validator("threshold")
def t_larger_than_zero(cls, threshold):
# pylint: disable=no-self-use
if threshold <= 0:
raise ValueError(f"threshold must be larger than zero, is {threshold}")
return threshold
Expand Down
1 change: 1 addition & 0 deletions semeio/workflows/misfit_preprocessor/workflow_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ class MisfitConfig(BaseMisfitPreprocessorConfig):

@validator("workflow", pre=True)
def validate_workflow(cls, value):
# pylint: disable=no-self-use
"""
To improve the user feedback we explicitly check if the type of workflow
is configured, and if it is, we bypass the Union. If it has not been given
Expand Down
2 changes: 2 additions & 0 deletions semeio/workflows/spearman_correlation_job/cluster_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ def fcluster_analysis(
method="single",
metric="euclidean",
):
# pylint: disable=too-many-arguments
a = linkage(correlation_matrix, method, metric)
return fcluster(a, threshold, criterion=criterion, depth=depth)

Expand All @@ -22,6 +23,7 @@ def kmeans_analysis(
max_iter=300,
random_state=None,
):
# pylint: disable=too-many-arguments
kmeans = KMeans(
init=init,
n_clusters=n_clusters,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def run(self, *args):


def spearman_job_parser():
description = """
description = """R
A module that calculates the Spearman correlation in simulated data
and clusters
"""
Expand Down
1 change: 1 addition & 0 deletions tests/communication/test-data/logging_test_workflow_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
class TestWorkflowJob(SemeioScript):
# pylint: disable=method-hidden
def run(self, *args):
# pylint: disable=unused-argument
self.reporter.publish("test_data", list(range(10)))

# The mission of this code is to simulate that something outside the
Expand Down
9 changes: 8 additions & 1 deletion tests/communication/unit/test_semeio_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from semeio.communication import SEMEIOSCRIPT_LOG_FILE, SemeioScript

# pylint: disable=method-hidden
# pylint: disable=method-hidden,no-self-use


def _ert_mock(ensemble_path="storage", user_case_name="case_name"):
Expand All @@ -33,6 +33,7 @@ def test_semeio_script_publish(tmpdir):

class MySuperScript(SemeioScript):
def run(self, *args):
# pylint: disable=unused-argument
self.reporter.publish(namespace, data)

my_super_script = MySuperScript(ert)
Expand Down Expand Up @@ -61,6 +62,7 @@ def test_semeio_script_logging(tmpdir):

class MySuperScript(SemeioScript):
def run(self, *args):
# pylint: disable=unused-argument
logging.error(msg)

my_super_script = MySuperScript(ert)
Expand Down Expand Up @@ -105,6 +107,7 @@ def test_semeio_script_multiple_logging(messages, tmpdir):

class MySuperScript(SemeioScript):
def run(self, *args):
# pylint: disable=unused-argument
posted_messages = []
for msg in messages:
logging.error(msg)
Expand Down Expand Up @@ -133,6 +136,7 @@ def test_semeio_script_post_logging(tmpdir):

class MySuperScript(SemeioScript):
def run(self, *args):
# pylint: disable=unused-argument
logging.error("A message from MySuperScript")

my_super_script = MySuperScript(ert)
Expand Down Expand Up @@ -161,6 +165,7 @@ def test_semeio_script_pre_logging(tmpdir):

class MySuperScript(SemeioScript):
def run(self, *args):
# pylint: disable=unused-argument
logging.error("A message from MySuperScript")

my_super_script = MySuperScript(ert)
Expand Down Expand Up @@ -190,6 +195,7 @@ def test_semeio_script_concurrent_logging(tmpdir):

class MySuperScript(SemeioScript):
def run(self, *args):
# pylint: disable=unused-argument
logging.error("A message from MySuperScript")
thread = Thread(target=lambda: logging.error("External event."))
thread.start()
Expand Down Expand Up @@ -218,6 +224,7 @@ def test_semeio_script_post_logging_exception(tmpdir):

class MySuperScript(SemeioScript):
def run(self, *args):
# pylint: disable=unused-argument
logging.error("A message from MySuperScript")
raise AssertionError("Bazinga")

Expand Down
3 changes: 3 additions & 0 deletions tests/jobs/ahm_analysis/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def test_ahmanalysis_run(test_data_root):
res_config.convertToCReference(None)
ert = EnKFMain(res_config)

# pylint: disable=not-callable
ahmanalysis.AhmAnalysisJob(ert).run(prior_name="default_0")

# assert that this returns/generates a KS csv file
Expand Down Expand Up @@ -82,6 +83,7 @@ def test_ahmanalysis_run_field(test_data_root, grid_prop):
res_config.convertToCReference(None)
ert = EnKFMain(res_config)

# pylint: disable=not-callable
ahmanalysis.AhmAnalysisJob(ert).run(prior_name="default")

# assert that this returns/generates the delta field parameter
Expand Down Expand Up @@ -121,4 +123,5 @@ def test_no_prior(test_data_root):
expected_msg = "Empty prior ensemble"
# check that it fails
with pytest.raises(ValidationError, match=expected_msg):
# pylint: disable=not-callable
ahmanalysis.AhmAnalysisJob(ert).run(prior_name="default")
11 changes: 5 additions & 6 deletions tests/jobs/correlated_observations_scaling/test_integration.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: disable=not-callable
import json
import os
import shutil
Expand All @@ -24,7 +25,7 @@ def get_std_from_obs_vector(vector):
return result


def test_installed_python_version_of_enkf_scaling_job(setup_ert, monkeypatch):
def test_installed_python_version_of_enkf_scaling_job(setup_ert):

pm = ErtPluginManager(
plugins=[
Expand Down Expand Up @@ -143,7 +144,7 @@ def test_main_entry_point_gen_data(setup_ert):
assert reported_scalefactor == pytest.approx(1.224744871391589, 0.1)


def test_main_entry_point_summary_data_calc(setup_ert, monkeypatch):
def test_main_entry_point_summary_data_calc(setup_ert):
cos_config = {
"CALCULATE_KEYS": {"keys": [{"key": "WOPR_OP1_108"}, {"key": "WOPR_OP1_144"}]}
}
Expand Down Expand Up @@ -180,9 +181,7 @@ def test_main_entry_point_summary_data_calc(setup_ert, monkeypatch):
),
],
)
def test_main_entry_point_history_data_calc(
setup_ert, monkeypatch, config, expected_result
):
def test_main_entry_point_history_data_calc(setup_ert, config, expected_result):

res_config = setup_ert

Expand All @@ -196,7 +195,7 @@ def test_main_entry_point_history_data_calc(
assert result == [expected_result] * 200


def test_main_entry_point_history_data_calc_subset(setup_ert, monkeypatch):
def test_main_entry_point_history_data_calc_subset(setup_ert):
config = {"CALCULATE_KEYS": {"keys": [{"key": "FOPR", "index": [10, 20]}]}}
res_config = setup_ert

Expand Down
Loading

0 comments on commit 57ebb06

Please sign in to comment.