Skip to content

Commit

Permalink
Merge pull request #44 from boutproject/flake8
Browse files Browse the repository at this point in the history
flake8 linting
  • Loading branch information
johnomotani authored May 27, 2021
2 parents 04c26f9 + 476aa69 commit a4c6065
Show file tree
Hide file tree
Showing 16 changed files with 85 additions and 85 deletions.
7 changes: 7 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[flake8]
max-line-length = 88
ignore = (
E741, # 'ambiguous variable names' forbids using 'I', 'O' or 'l'
W503, # 'line break before binary operator', but this is allowed and useful inside brackets
E203, # 'whitespace before ':'', but black formats some slice expressions with space before ':'
)
4 changes: 4 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,3 +74,7 @@ jobs:
run: |
pip install black
black --check --diff --color .
- name: Lint with flake8
run: |
pip install flake8
flake8 && echo "flake8 successful"
7 changes: 1 addition & 6 deletions boutdata/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
""" Routines for exchanging data to/from BOUT++ """

try:
from builtins import str
except ImportError:
raise ImportError("Please install the future module to use Python 2")

# Import this, as this almost always used when calling this package
from boutdata.collect import collect, attributes

Expand All @@ -27,7 +22,7 @@

path = Path(__file__).resolve()
__version__ = get_version(root="..", relative_to=path)
except (ModuleNotFoundError, LookupError) as e:
except (ModuleNotFoundError, LookupError):
# ModuleNotFoundError if setuptools_scm is not installed.
# LookupError if git is not installed, or the code is not in a git repo even
# though it has not been installed.
Expand Down
1 change: 0 additions & 1 deletion boutdata/cbdtoeqdsk.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from boututils.file_import import file_import
import numpy as np


Expand Down
37 changes: 23 additions & 14 deletions boutdata/collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,14 @@ def getDataFile(i):
# Read data from the first file
f = getDataFile(0)
grid_info, tind, xind, yind, zind = _get_grid_info(
f, xguards=xguards, yguards=yguards, tind=tind, xind=xind, yind=yind, zind=zind
f,
xguards=xguards,
yguards=yguards,
tind=tind,
xind=xind,
yind=yind,
zind=zind,
nfiles=len(file_list),
)

if varname not in grid_info["varNames"]:
Expand Down Expand Up @@ -637,8 +644,8 @@ def _collect_from_one_proc(

global_dim_slices = {"x": slice(xgstart, xgstop), "y": slice(ygstart, ygstop)}
if parallel_read:
# When reading in parallel, we are always reading into a 4-dimensional shared array.
# Should not reach this function unless we only have dimensions in
# When reading in parallel, we are always reading into a 4-dimensional shared
# array. Should not reach this function unless we only have dimensions in
# ("t", "x", "y", "z")
global_slices = tuple(
global_dim_slices.get(dim, slice(None)) if dim in dimensions else 0
Expand Down Expand Up @@ -949,7 +956,9 @@ def _check_fieldperp_attributes(
return yindex_global, fieldperp_yproc, var_attributes


def _get_grid_info(f, *, xguards, yguards, tind, xind, yind, zind, all_vars_info=False):
def _get_grid_info(
f, *, xguards, yguards, tind, xind, yind, zind, nfiles, all_vars_info=False
):
"""Get the grid info from an open DataFile
Parameters
Expand All @@ -968,6 +977,8 @@ def _get_grid_info(f, *, xguards, yguards, tind, xind, yind, zind, all_vars_info
Slice for y-dimension
zind : int, sequence of int or slice
Slice for z-dimension
nfiles : int
Number of files being read from
all_vars_info : bool, default False
Load extra info on names, dimensions and attributes of all variables.
"""
Expand Down Expand Up @@ -1162,9 +1173,8 @@ def findFiles(path, prefix):
if files:
if file_list_parallel: # Already had a list of files
raise IOError(
"Parallel dump files with both {0} and {1} extensions are present. Do not know which to read.".format(
suffix, test_suffix
)
"Parallel dump files with both {0} and {1} extensions are present. "
"Do not know which to read.".format(suffix_parallel, test_suffix)
)
suffix_parallel = test_suffix
file_list_parallel = files
Expand All @@ -1176,18 +1186,16 @@ def findFiles(path, prefix):
if files:
if file_list: # Already had a list of files
raise IOError(
"Dump files with both {0} and {1} extensions are present. Do not know which to read.".format(
suffix, test_suffix
)
"Dump files with both {0} and {1} extensions are present. Do not "
"know which to read.".format(suffix, test_suffix)
)
suffix = test_suffix
file_list = files

if file_list_parallel and file_list:
raise IOError(
"Both regular (with suffix {0}) and parallel (with suffix {1}) dump files are present. Do not know which to read.".format(
suffix_parallel, suffix
)
"Both regular (with suffix {0}) and parallel (with suffix {1}) dump files "
"are present. Do not know which to read.".format(suffix, suffix_parallel)
)
elif file_list_parallel:
return file_list_parallel, True, suffix_parallel
Expand Down Expand Up @@ -1215,7 +1223,8 @@ def create_cache(path, prefix):
Returns
-------
namedtuple : (list of str, bool, str, list of :py:obj:`~boututils.datafile.DataFile`)
namedtuple : (list of str, bool, str,
list of :py:obj:`~boututils.datafile.DataFile`)
The cache of DataFiles in a namedtuple along with the file_list,
and parallel and suffix attributes
Expand Down
43 changes: 25 additions & 18 deletions boutdata/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
# These are imported to be used by 'eval' in
# BoutOptions.evaluate_scalar() and BoutOptionsFile.evaluate().
# Change the names to match those used by C++/BOUT++
from numpy import (
from numpy import ( # noqa: F401
pi,
sin,
cos,
Expand Down Expand Up @@ -289,7 +289,8 @@ def ensure_sections(parent, path):
parent.getSection(path)

def rename_key(thing, new_name, old_name):
"""Rename a key in a dict while trying to preserve order, useful for minimising diffs"""
"""Rename a key in a dict while trying to preserve order, useful for
minimising diffs"""
return {new_name if k == old_name else k: v for k, v in thing.items()}

def get_immediate_parent_and_child(path):
Expand All @@ -313,7 +314,8 @@ def get_immediate_parent_and_child(path):
# Was it just a change in case?
case_change = new_child.lower() == old_child.lower()

# Renaming a child section just within the same parent section, we can preserve the order
# Renaming a child section just within the same parent section, we can
# preserve the order
if (new_parent is old_parent) and (new_section or case_change):
# We just put a new section in, but it will have been
# added at the end -- remove it so we can actually put
Expand Down Expand Up @@ -358,7 +360,8 @@ def get_immediate_parent_and_child(path):
new_parent, new_child = get_immediate_parent_and_child(new_name)
old_parent, old_child = get_immediate_parent_and_child(old_name)

# Renaming a child key just within the same parent section, we can preserve the order
# Renaming a child key just within the same parent section, we can preserve
# the order
if new_parent is old_parent:
new_parent._keys = rename_key(new_parent._keys, new_child, old_child)
new_parent.comments = rename_key(
Expand Down Expand Up @@ -564,7 +567,8 @@ def _evaluate_section(self, expression, nested_sectionname):
if re.search(
r"(?<!:)\b" + re.escape(nested_name.lower()) + r"\b", expression.lower()
):
# match nested_name only if not preceded by colon (which indicates more nesting)
# match nested_name only if not preceded by colon (which indicates more
# nesting)
expression = re.sub(
r"(?<!:)\b" + re.escape(nested_name.lower()) + r"\b",
"(" + self._substitute_expressions(var) + ")",
Expand Down Expand Up @@ -1078,6 +1082,7 @@ def __init__(
xind=xind,
yind=yind,
zind=zind,
nfiles=len(self._file_list),
all_vars_info=True,
)

Expand Down Expand Up @@ -1114,11 +1119,15 @@ def __init__(
)
if self.grid_info["npes"] < len(self._file_list):
print(
"WARNING: More files than expected ({})".format(grid_info["npes"])
"WARNING: More files than expected ({})".format(
self.grid_info["npes"]
)
)
elif self.grid_info["npes"] > len(self._file_list):
print(
"WARNING: Some files missing. Expected {}".format(grid_info["npes"])
"WARNING: Some files missing. Expected {}".format(
self.grid_info["npes"]
)
)

# Initialise private variables
Expand Down Expand Up @@ -1243,14 +1252,8 @@ def redistribute(self, npes, nxpe=None, mxg=2, myg=2, include_restarts=True):
old_processor_layout = get_processor_layout(
DataFile(self._file_list[0]), has_t_dimension=True, mxg=mxg, myg=myg
)
old_nxpe = old_processor_layout.nxpe
old_nype = old_processor_layout.nype
old_npes = old_processor_layout.npes
old_mxsub = old_processor_layout.mxsub
old_mysub = old_processor_layout.mysub
nx = old_processor_layout.nx
ny = old_processor_layout.ny
mz = old_processor_layout.mz
mxg = old_processor_layout.mxg
myg = old_processor_layout.myg

Expand All @@ -1264,7 +1267,8 @@ def redistribute(self, npes, nxpe=None, mxg=2, myg=2, include_restarts=True):
mysub = new_processor_layout.mysub

# move existing files to backup directory
# don't overwrite backup: os.mkdir will raise exception if directory already exists
# don't overwrite backup: os.mkdir will raise exception if directory already
# exists
backupdir = path.join(self._path, "redistribution_backups")
mkdir(backupdir)
for f in self._file_list:
Expand All @@ -1281,7 +1285,8 @@ def redistribute(self, npes, nxpe=None, mxg=2, myg=2, include_restarts=True):
self._path, this_prefix + str(i) + "." + self._suffix
)
if self._suffix.split(".")[-1] in ["nc", "ncdf", "cdl"]:
# set format option to DataFile explicitly to avoid creating netCDF3 files, which can only contain up to 2GB of data
# set format option to DataFile explicitly to avoid creating netCDF3
# files, which can only contain up to 2GB of data
outfile_list.append(
DataFile(outpath, write=True, create=True, format="NETCDF4")
)
Expand Down Expand Up @@ -1461,9 +1466,11 @@ def _collect_parallel(self, varname):
)
elif any(dim not in ("t", "x", "y", "z") for dim in dimensions):
raise ValueError(
"Dimensions {} of {} contain spatial dimensions but also have dimensions "
"that are not 't', 'x', 'y' or 'z'. This is not supported by parallel "
"reading. Try reading with parallel=False".format(dimensions, varname)
"Dimensions {} of {} contain spatial dimensions but also have "
"dimensions that are not 't', 'x', 'y' or 'z'. This is not supported "
"by parallel reading. Try reading with parallel=False".format(
dimensions, varname
)
)

is_fieldperp = dimensions in (("t", "x", "z"), ("x", "z"))
Expand Down
2 changes: 0 additions & 2 deletions boutdata/gen_surface.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ def gen_surface(grid):
ixseps1 = grid.read("ixseps1")
ixseps2 = grid.read("ixseps2")
jyseps1_1 = grid.read("jyseps1_1")
jyseps1_2 = grid.read("jyseps1_2")
jyseps2_1 = grid.read("jyseps2_1")
jyseps2_2 = grid.read("jyseps2_2")

if ixseps1 == ixseps2:
Expand Down
5 changes: 2 additions & 3 deletions boutdata/griddata.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def gridcontourf(
ix2 = grid["ixseps2"]
try:
nin = grid["ny_inner"]
except:
except KeyError:
nin = j12

nx = grid["nx"]
Expand All @@ -300,7 +300,7 @@ def gridcontourf(
nin = nin[0]
nx = nx[0]
ny = ny[0]
except:
except (TypeError, IndexError):
pass

R = grid["Rxy"]
Expand Down Expand Up @@ -593,7 +593,6 @@ def bout2sonnet(grdname, outf):
Zxy = g["Zxy"]
Bpxy = g["Bpxy"]
Btxy = g["Btxy"]
Bxy = g["Bxy"]

# Now iterate over cells in the order Eirene expects

Expand Down
11 changes: 5 additions & 6 deletions boutdata/mayavi2.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,10 @@ def aligned_points(grid, nz=1, period=1.0, maxshift=0.4):
zshift = grid["zShift"]
Rxy = grid["Rxy"]
Zxy = grid["Zxy"]
except:
except KeyError:
print("Missing required data")
return None

dz = 2.0 * pi / (period * (nz - 1))
phi0 = np.linspace(0, 2.0 * pi / period, nz)

# Need to insert additional points in Y so mesh looks smooth
Expand Down Expand Up @@ -79,13 +78,13 @@ def view3d(sgrid):
from enthought.mayavi.sources.vtk_data_source import VTKDataSource
from enthought.mayavi.modules.api import Outline, GridPlane

mayavi.new_scene()
mayavi2.new_scene()
src = VTKDataSource(data=sgrid)
mayavi.add_source(src)
mayavi.add_module(Outline())
mayavi2.add_source(src)
mayavi2.add_module(Outline())
g = GridPlane()
g.grid_plane.axis = "x"
mayavi.add_module(g)
mayavi2.add_module(g)


if __name__ == "__main__":
Expand Down
22 changes: 8 additions & 14 deletions boutdata/mms.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"""

from sympy import symbols, cos, sin, diff, sqrt, pi, simplify, trigsimp, Wild
from sympy import symbols, cos, sin, diff, sqrt, pi, simplify, trigsimp, integrate, Wild

from numpy import arange, zeros

Expand Down Expand Up @@ -39,6 +39,7 @@ def __init__(self):

identity = Metric()


# Basic differencing
def ddt(f):
"""Time derivative"""
Expand Down Expand Up @@ -173,15 +174,14 @@ def Grad_par(f, metric=identity):


def Vpar_Grad_par(v, f, metric=identity):
"""Parallel advection operator $$v_\parallel \cdot \nabla_\parallel (f)$$"""
r"""Parallel advection operator :math:`v_\parallel \cdot \nabla_\parallel (f)`"""
return v * Grad_par(f, metric=metric)


def Div_par(f, metric=identity):
"""
Divergence of magnetic field aligned vector $$v = \hat{b} f
\nabla \cdot (\hat{b} f) = 1/J \partial_y (f/B)
= B Grad_par(f/B)$$
r"""
Divergence of magnetic field aligned vector :math:`v = \hat{b} f \nabla \cdot
(\hat{b} f) = 1/J \partial_y (f/B) = B Grad_par(f/B)`
"""
return metric.B * Grad_par(f / metric.B, metric)

Expand Down Expand Up @@ -550,7 +550,7 @@ def __init__(
# NOTE: Approximate calculation

# Distance between flux surface relative to outboard midplane.
expansion = (1 - (old_div(ss, rmin)) * cos(y)) / (1 - (ss / rmin))
expansion = (1 - (ss / rmin) * cos(y)) / (1 - (ss / rmin))

Bpxy = Bp0 * ((Rmaj + rmin) / Rxy) / expansion

Expand Down Expand Up @@ -596,6 +596,7 @@ def write(self, nx, ny, filename, MXG=2):
output - boututils.datafile object, e.g., an open netCDF file
MXG, Number of guard cells in the x-direction
"""
raise NotImplementedError("Implementation of ShapedTokamak.write is unfinished")

ngx = nx + 2 * MXG
ngy = ny
Expand All @@ -612,8 +613,6 @@ def write(self, nx, ny, filename, MXG=2):

hthe = zeros([ngx, ngy])

I = zeros([ngx, ngy])

# Note: This is slow, and could be improved using something like lambdify
for i, x in enumerate(xarr):
for j, y in enumerate(yarr):
Expand All @@ -625,11 +624,6 @@ def write(self, nx, ny, filename, MXG=2):

hthe[i, j] = self.hthe.evalf(subs={self.x: x, self.y: y})

plt.plot(Rxy[i, :], Zxy[i, :])
plt.show()

Bxy = sqrt(Btxy ** 2 + Bpxy ** 2)

def metric(self):
"""
Returns an analytic metric tensor
Expand Down
Loading

0 comments on commit a4c6065

Please sign in to comment.