Skip to content

Commit

Permalink
Merge pull request #118 from boschresearch/fix-warnings-and-refactor
Browse files Browse the repository at this point in the history
Fix warnings and refactor
  • Loading branch information
johannes-mueller authored Oct 14, 2024
2 parents 4966fd0 + 64c9969 commit da130be
Show file tree
Hide file tree
Showing 10 changed files with 244 additions and 176 deletions.
2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,8 @@ addopts =
--cov src/pylife --cov-append -m "not slow_acceptance and not demos"
--doctest-modules
--ignore=src/pylife/materialdata/woehler/bayesian.py
--ignore=src/pylife/strength/helpers.py
--ignore=src/pylife/strength/sn_curve.py

norecursedirs =
dist
Expand Down
2 changes: 1 addition & 1 deletion src/pylife/materialdata/woehler/elementary.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def analyze(self, **kwargs):
Parameters
----------
\*\*kwargs : kwargs arguments
**kwargs : kwargs arguments
Arguments to be passed to the derived class
"""
if len(self._fd.load.unique()) < 2:
Expand Down
43 changes: 20 additions & 23 deletions src/pylife/materialdata/woehler/maxlike.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,42 +125,39 @@ def warn_and_fix_if_less_than_two_mixed_levels():
warn_and_fix_if_no_runouts()
warn_and_fix_if_less_than_two_mixed_levels()

p_opt = initial_wcurve.to_dict()
prms_to_optimize = initial_wcurve.to_dict()
for k in fixed_prms:
p_opt.pop(k)
prms_to_optimize.pop(k)

if not p_opt:
if not prms_to_optimize:
raise AttributeError('You need to leave at least one parameter empty!')
var_opt = optimize.fmin(
self.__likelihood_wrapper, [*p_opt.values()],
args=([*p_opt], fixed_prms),
optimized_prms = optimize.fmin(
self.__likelihood_wrapper, [*prms_to_optimize.values()],
args=([*prms_to_optimize], fixed_prms),
full_output=True,
disp=False,
maxiter=1e4,
maxfun=1e4,
)
res = {}
res.update(fixed_prms)
res.update(zip([*p_opt], var_opt[0]))
)[0]

return self.__make_parameters(res)
# TODO: Change to following line when python 3.8 is dropped
#r esult = fixed_prms | dict(zip(prms_to_optimize, optimized_prms))

result = {}
result.update(fixed_prms)
result.update(zip(prms_to_optimize, optimized_prms))

return self.__make_parameters(result)

def __make_parameters(self, params):
params['SD'] = np.abs(params['SD'])
params['TS'] = np.abs(params['TS'])
params['k_1'] = np.abs(params['k_1'])
params['ND'] = np.abs(params['ND'])
params['TN'] = np.abs(params['TN'])
return params
return {k: np.abs(v) for k, v in params.items()}

def __likelihood_wrapper(self, var_args, var_keys, fix_args):
''' 1) Finds the start values to be optimized. The rest of the paramters are fixed by the user.
2) Calls function mali_sum_lolli to calculate the maximum likelihood of the current
variable states.
'''
# TODO: Change to following line when python 3.8 is dropped
# args = self.__make_parameters(fix_args | dict(zip(var_keys, var_args)))

args = {}
args.update(fix_args)
args.update(zip(var_keys, var_args))
args = self.__make_parameters(args)

return -self._lh.likelihood_total(args['SD'], args['TS'], args['k_1'], args['ND'], args['TN'])
return -self._lh.likelihood_total(**args)
171 changes: 84 additions & 87 deletions src/pylife/mesh/surface.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,10 @@

import numpy as np
import pandas as pd
import warnings
import numpy as np
import numpy.linalg

from .meshsignal import Mesh


@pd.api.extensions.register_dataframe_accessor('surface_3D')
class Surface3D(Mesh):
'''Determines nodes at the surface in a 3D mesh.
Expand All @@ -39,52 +37,47 @@ class Surface3D(Mesh):
with the names `node_id` and `element_id`
'''

def _solid_angle(self, x0, y0, z0, x1, y1, z1, x2, y2, z2, x3, y3, z3):
def _solid_angle(self, df):
n = len(df)

p0 = np.array([x0, y0, z0]).T
p1 = np.array([x1, y1, z1]).T
p2 = np.array([x2, y2, z2]).T
p3 = np.array([x3, y3, z3]).T
p0 = np.array([df.x_n0, df.y_n0, df.z_n0]).T
p1 = np.array([df.x_n1, df.y_n1, df.z_n1]).T
p2 = np.array([df.x_n2, df.y_n2, df.z_n2]).T
p3 = np.array([df.x_n3, df.y_n3, df.z_n3]).T

# all vectors have shape (n,3)
r0 = p1 - p0
r1 = p2 - p0
r2 = p3 - p0

r0 /= np.linalg.norm(r0, axis=1)[:,None] * np.ones((1,3))
r1 /= np.linalg.norm(r1, axis=1)[:,None] * np.ones((1,3))
r2 /= np.linalg.norm(r2, axis=1)[:,None] * np.ones((1,3))


a = np.arccos(np.sum(r0*r1, axis=1))
b = np.arccos(np.sum(r0*r2, axis=1))
c = np.arccos(np.sum(r1*r2, axis=1))

s = (a+b+c) / 2

# silence invalid values error, the resulting nan values will be masked out at the end
old_settings = np.geterr()
np.seterr(invalid="ignore")
with np.errstate(invalid="ignore"):
r0 /= np.broadcast_to(np.linalg.norm(r0, axis=1), (3, n)).T
r1 /= np.broadcast_to(np.linalg.norm(r1, axis=1), (3, n)).T
r2 /= np.broadcast_to(np.linalg.norm(r2, axis=1), (3, n)).T

a = np.arccos(np.sum(r0*r1, axis=1))
b = np.arccos(np.sum(r0*r2, axis=1))
c = np.arccos(np.sum(r1*r2, axis=1))

sinA = np.sqrt( (np.sin(s-b) * np.sin(s-c)) / (np.sin(b)*np.sin(c)) )
sinB = np.sqrt( (np.sin(s-a) * np.sin(s-c)) / (np.sin(a)*np.sin(c)) )
sinC = np.sqrt( (np.sin(s-b) * np.sin(s-a)) / (np.sin(b)*np.sin(a)) )
s = (a+b+c) / 2

cosA = np.sqrt( (np.sin(s) * np.sin(s-a)) / (np.sin(b)*np.sin(c)) )
cosB = np.sqrt( (np.sin(s) * np.sin(s-b)) / (np.sin(b)*np.sin(c)) )
cosC = np.sqrt( (np.sin(s) * np.sin(s-c)) / (np.sin(b)*np.sin(c)) )
sinA = np.sqrt((np.sin(s-b) * np.sin(s-c)) / (np.sin(b)*np.sin(c)))
sinB = np.sqrt((np.sin(s-a) * np.sin(s-c)) / (np.sin(a)*np.sin(c)))
sinC = np.sqrt((np.sin(s-b) * np.sin(s-a)) / (np.sin(b)*np.sin(a)))

# restore silenced warnings
np.seterr(**old_settings)
cosA = np.sqrt((np.sin(s) * np.sin(s-a)) / (np.sin(b)*np.sin(c)))
cosB = np.sqrt((np.sin(s) * np.sin(s-b)) / (np.sin(b)*np.sin(c)))
cosC = np.sqrt((np.sin(s) * np.sin(s-c)) / (np.sin(b)*np.sin(c)))

# make large values to 1
sinA = np.minimum(sinA, np.ones_like(sinA))
sinB = np.minimum(sinB, np.ones_like(sinB))
sinC = np.minimum(sinC, np.ones_like(sinC))
sinA = np.minimum(sinA, 1.0)
sinB = np.minimum(sinB, 1.0)
sinC = np.minimum(sinC, 1.0)

cosA = np.minimum(cosA, np.ones_like(cosA))
cosB = np.minimum(cosB, np.ones_like(cosB))
cosC = np.minimum(cosC, np.ones_like(cosC))
cosA = np.minimum(cosA, 1.0)
cosB = np.minimum(cosB, 1.0)
cosC = np.minimum(cosC, 1.0)

A = np.where(np.isnan(sinA), 2 * np.arccos(cosA), 2 * np.arcsin(sinA))
B = np.where(np.isnan(sinB), 2 * np.arccos(cosB), 2 * np.arcsin(sinB))
Expand All @@ -95,58 +88,66 @@ def _solid_angle(self, x0, y0, z0, x1, y1, z1, x2, y2, z2, x3, y3, z3):

return E

def _compute_normals(self, x0, y0, z0, x1, y1, z1, x2, y2, z2):
p0 = np.array([x0, y0, z0]).T
p1 = np.array([x1, y1, z1]).T
p2 = np.array([x2, y2, z2]).T
def _compute_normals(self, df):
n = len(df)

p0 = np.array([df.x_n0_n0, df.y_n0_n0, df.z_n0_n0]).T
p1 = np.array([df.x_p1, df.y_p1, df.z_p1]).T
p2 = np.array([df.x_p2, df.y_p2, df.z_p2]).T

# all vectors have shape (n,3)
r0 = p1 - p0
r1 = p2 - p0

r0 /= np.linalg.norm(r0, axis=1)[:,None] * np.ones((1,3))
r1 /= np.linalg.norm(r1, axis=1)[:,None] * np.ones((1,3))
normal = np.cross(r0,r1)
normal /= np.linalg.norm(normal, axis=1)[:,None] * np.ones((1,3))
r0 /= np.broadcast_to(np.linalg.norm(r0, axis=1), (3, n)).T
r1 /= np.broadcast_to(np.linalg.norm(r1, axis=1), (3, n)).T

return normal[:,0], normal[:,1], normal[:,2]
normal = np.cross(r0, r1)
normal /= np.broadcast_to(np.linalg.norm(normal, axis=1), (3, n)).T

def _determine_is_at_surface(self):
df = self._obj[["x", "y", "z"]].reorder_levels(["element_id", "node_id"]).sort_index(level="element_id", sort_remaining=False)
df["node_id"] = df.index.get_level_values("node_id")
return normal

def _determine_is_at_surface(self):
df = (
self.coordinates
#.reorder_levels(["element_id", "node_id"])
#.sort_index(level="element_id", sort_remaining=False)
.assign(node_id=self._obj.index.get_level_values("node_id"))
)
# add two other nodes for every node
df_nodes = df.copy()
df_nodes = df_nodes.merge(df_nodes, on="element_id", how="outer", suffixes=["_n0", "_n1"])
df_nodes = df_nodes[df_nodes["node_id_n0"] != df_nodes["node_id_n1"]]

df_nodes = df_nodes.merge(df, on="element_id", how="outer").copy()
df_nodes = df_nodes[df_nodes["node_id_n1"] < df_nodes["node_id"]]
df_nodes = df_nodes.rename(columns={"node_id": "node_id_n2", "x": "x_n2", "y": "y_n2", "z": "z_n2"})

df_nodes = df_nodes.merge(df, on="element_id", how="outer", suffixes=["_n2", "_n3"]).copy()
df_nodes = df_nodes[df_nodes["node_id_n2"] < df_nodes["node_id"]]
df0 = df_nodes.rename(columns={"node_id": "node_id_n3", "x": "x_n3", "y": "y_n3", "z": "z_n3"})

df0.loc[:,"E"] \
= self._solid_angle(df0.x_n0, df0.y_n0, df0.z_n0, df0.x_n1, df0.y_n1, df0.z_n1, df0.x_n2, df0.y_n2, df0.z_n2, df0.x_n3, df0.y_n3, df0.z_n3)
df0 = (
df.merge(df, on="element_id", how="outer", suffixes=["_n0", "_n1"])
.query("node_id_n0 != node_id_n1")
.merge(df, on="element_id", how="outer")
.query("node_id_n1 < node_id")
.rename(
columns={"node_id": "node_id_n2", "x": "x_n2", "y": "y_n2", "z": "z_n2"}
)
.merge(df, on="element_id", how="outer", suffixes=["_n2", "_n3"])
.query("node_id_n2 < node_id")
.rename(
columns={"node_id": "node_id_n3", "x": "x_n3", "y": "y_n3", "z": "z_n3"}
)
.pipe(lambda df: df.assign(E=self._solid_angle(df)))
)

max_E = df0.groupby(["element_id", "node_id_n0"]).max()["E"]

df1 = df0.join(max_E, on=["element_id", "node_id_n0"], how="left", rsuffix="_max")
df2 = df1[df1["E"] == df1["E_max"]]
df2 = df2.groupby(["element_id", "node_id_n0"]).first()

index_columns = df2.reset_index()
index_columns = index_columns[["element_id", "node_id_n0"]]
df2.index = pd.MultiIndex.from_frame(index_columns, names=["element_id", "node_id"])

df3 = df2[["x_n0", "y_n0", "z_n0", "E"]]
df1 = (
df0.join(max_E, on=["element_id", "node_id_n0"], how="left", rsuffix="_max")
.query("E == E_max")
.groupby(["element_id", "node_id_n0"])
.first()
.reset_index()
.rename(columns={"node_id_n0": "node_id"})
.set_index(["element_id", "node_id"])
)

df3 = df3.join(df3.groupby(["node_id"]).sum().rename(columns={"E":"Esum"})["Esum"])
df3.loc[:,"is_at_surface"] = df3["Esum"] < 4*np.pi-1e-5
df3 = df1[["x_n0", "y_n0", "z_n0", "E"]].join(
df1.groupby(["node_id"]).sum().rename(columns={"E": "Esum"})["Esum"]
)

df3["is_at_surface"].fillna(False, inplace = True)
df3.loc[:, "is_at_surface"] = df3["Esum"] < 4*np.pi-1e-5

return df3

Expand Down Expand Up @@ -214,18 +215,15 @@ def is_at_surface_with_normals(self):

df = self._determine_is_at_surface()

df_at_surface = df[df["is_at_surface"]].reset_index().set_index("element_id")
df_at_surface = df[df["is_at_surface"]].reset_index("node_id")

d = df_at_surface.merge(df_at_surface, on="element_id", how="left", suffixes=["_n0", "_n1"])
d = d[d["node_id_n0"] != d["node_id_n1"]]

p0 = d.groupby(["element_id", "node_id_n0"], group_keys=True).nth(0)
p1 = d.groupby(["element_id", "node_id_n0"], group_keys=True).nth(1)
d2 = d.groupby(["element_id", "node_id_n0"], group_keys=True).first()

p0 = p0.reset_index().set_index(["element_id", "node_id_n0"])
p1 = p1.reset_index().set_index(["element_id", "node_id_n0"])
d2 = d2.reset_index().set_index(["element_id", "node_id_n0"])
groups = d.groupby(["element_id", "node_id_n0"], group_keys=True)
p0 = groups.first()
p1 = groups.nth(1).set_index("node_id_n0", append=True)
d2 = groups.first()

d2["node_id_p1"] = p0["node_id_n1"]
d2["x_p1"] = p0["x_n0_n1"]
Expand All @@ -237,14 +235,13 @@ def is_at_surface_with_normals(self):
d2["y_p2"] = p1["y_n0_n1"]
d2["z_p2"] = p1["z_n0_n1"]

d2.loc[:,"normal_x"], d2.loc[:,"normal_y"], d2.loc[:,"normal_z"] \
= self._compute_normals(d2.x_n0_n0, d2.y_n0_n0, d2.z_n0_n0, d2.x_p1, d2.y_p1, d2.z_p1, d2.x_p2, d2.y_p2, d2.z_p2)

df_with_normals = d2[["normal_x", "normal_y", "normal_z"]]
df_with_normals = pd.DataFrame(
self._compute_normals(d2),
columns=["normal_x", "normal_y", "normal_z"],
index=d2.index,
)
df_with_normals.index.names = ["element_id", "node_id"]

df_result = df.join(df_with_normals)[["is_at_surface", "normal_x", "normal_y", "normal_z"]]

return df_result


Loading

0 comments on commit da130be

Please sign in to comment.