Skip to content
Snippets Groups Projects
Commit d002888a authored by Michael Kuron's avatar Michael Kuron :mortar_board:
Browse files

FiniteDifferenceStaggeredStencilDerivation.apply for vector field

uses new Field.neighbor_vector
parent 2d7485a8
Branches
Tags
1 merge request!99finite difference stencil derivation for staggered positions
Pipeline #20230 passed with warnings with stage
in 4 minutes and 46 seconds
This commit is part of merge request !99. Comments created here will be created in the context of that merge request.
......@@ -340,4 +340,10 @@ class FiniteDifferenceStaggeredStencilDerivation:
plot(pts, data=ws)
def apply(self, field):
return sum([field.__getitem__(point) * weight for point, weight in zip(self.points, self.weights)])
if field.index_dimensions == 0:
return sum([field.__getitem__(point) * weight for point, weight in zip(self.points, self.weights)])
else:
total = field.neighbor_vector(self.points[0]) * self.weights[0]
for point, weight in zip(self.points[1:], self.weights[1:]):
total += field.neighbor_vector(point) * weight
return total
......@@ -441,6 +441,22 @@ class Field(AbstractField):
center = tuple([0] * self.spatial_dimensions)
return Field.Access(self, center)
def neighbor_vector(self, offset):
"""Like neighbor, but returns the entire vector/tensor stored at offset."""
if self.spatial_dimensions == 2 and len(offset) == 3:
assert offset[2] == 0
offset = offset[:2]
if self.index_dimensions == 0:
return sp.Matrix([self.__getitem__(offset)])
elif self.index_dimensions == 1:
return sp.Matrix([self.__getitem__(offset)(i) for i in range(self.index_shape[0])])
elif self.index_dimensions == 2:
return sp.Matrix([[self.__getitem__(offset)(i, k) for k in range(self.index_shape[1])]
for i in range(self.index_shape[0])])
else:
raise NotImplementedError("neighbor_vector is not implemented for more than 2 index dimensions")
def __getitem__(self, offset):
if type(offset) is np.ndarray:
offset = tuple(offset)
......
%% Cell type:code id: tags:
``` python
from pystencils.session import *
from pystencils.fd.derivation import *
```
%% Cell type:markdown id: tags:
# 2D standard stencils
%% Cell type:code id: tags:
``` python
stencil = [(-1, 0), (1, 0), (0, -1), (0, 1), (0, 0)]
standard_2d_00 = FiniteDifferenceStencilDerivation((0,0), stencil)
f = ps.fields("f: [2D]")
standard_2d_00_res = standard_2d_00.get_stencil()
res = standard_2d_00_res.apply(f.center)
expected = f[-1, 0] - 2 * f[0, 0] + f[1, 0]
assert res == expected
```
%% Cell type:code id: tags:
``` python
assert standard_2d_00_res.accuracy == 2
assert not standard_2d_00_res.is_isotropic
standard_2d_00_res
```
%% Output
Finite difference stencil of accuracy 2, isotropic error: False
%% Cell type:code id: tags:
``` python
standard_2d_00.get_stencil().as_matrix()
```
%% Output
$$\left[\begin{matrix}0 & 0 & 0\\1 & -2 & 1\\0 & 0 & 0\end{matrix}\right]$$
⎡0 0 0⎤
⎢ ⎥
⎢1 -2 1⎥
⎢ ⎥
⎣0 0 0⎦
%% Cell type:markdown id: tags:
# 2D isotropic stencils
## second x-derivative
%% Cell type:code id: tags:
``` python
stencil = [(i, j) for i in (-1, 0, 1) for j in (-1, 0, 1)]
isotropic_2d_00 = FiniteDifferenceStencilDerivation((0,0), stencil)
isotropic_2d_00_res = isotropic_2d_00.get_stencil(isotropic=True)
assert isotropic_2d_00_res.is_isotropic
assert isotropic_2d_00_res.accuracy == 2
isotropic_2d_00_res
```
%% Output
Finite difference stencil of accuracy 2, isotropic error: True
%% Cell type:code id: tags:
``` python
isotropic_2d_00_res.as_matrix()
```
%% Output
$$\left[\begin{matrix}\frac{1}{12} & - \frac{1}{6} & \frac{1}{12}\\\frac{5}{6} & - \frac{5}{3} & \frac{5}{6}\\\frac{1}{12} & - \frac{1}{6} & \frac{1}{12}\end{matrix}\right]$$
⎡1/12 -1/6 1/12⎤
⎢ ⎥
⎢5/6 -5/3 5/6 ⎥
⎢ ⎥
⎣1/12 -1/6 1/12⎦
%% Cell type:code id: tags:
``` python
plt.figure(figsize=(2,2))
isotropic_2d_00_res.visualize()
```
%% Output
%% Cell type:code id: tags:
``` python
expected_result = sp.Matrix([[1, -2, 1], [10, -20, 10], [1, -2, 1]]) / 12
assert expected_result == isotropic_2d_00_res.as_matrix()
```
%% Cell type:code id: tags:
``` python
type(isotropic_2d_00_res.as_matrix())
```
%% Output
sympy.matrices.dense.MutableDenseMatrix
%% Cell type:code id: tags:
``` python
type(expected_result)
```
%% Output
sympy.matrices.dense.MutableDenseMatrix
%% Cell type:markdown id: tags:
## Isotropic laplacian
%% Cell type:code id: tags:
``` python
isotropic_2d_11 = FiniteDifferenceStencilDerivation((1,1), stencil)
isotropic_2d_11_res = isotropic_2d_11.get_stencil(isotropic=True)
iso_laplacian = isotropic_2d_00_res.as_matrix() + isotropic_2d_11_res.as_matrix()
iso_laplacian
```
%% Output
$$\left[\begin{matrix}\frac{1}{6} & \frac{2}{3} & \frac{1}{6}\\\frac{2}{3} & - \frac{10}{3} & \frac{2}{3}\\\frac{1}{6} & \frac{2}{3} & \frac{1}{6}\end{matrix}\right]$$
⎡1/6 2/3 1/6⎤
⎢ ⎥
⎢2/3 -10/3 2/3⎥
⎢ ⎥
⎣1/6 2/3 1/6⎦
%% Cell type:code id: tags:
``` python
expected_result = sp.Matrix([[1, 4, 1], [4, -20, 4], [1, 4, 1]]) / 6
assert iso_laplacian == expected_result
```
%% Cell type:markdown id: tags:
# stencils for staggered fields
%% Cell type:code id: tags:
``` python
half = sp.Rational(1, 2)
fd_points_ex = (
(half, 0),
(-half, 0),
(half, 1),
(half, -1),
(-half, 1),
(-half, -1)
)
assert set(fd_points_ex) == set(FiniteDifferenceStaggeredStencilDerivation("E", 2).stencil)
fd_points_ey = (
(0, half),
(0, -half),
(-1,-half),
(-1, half),
(1, -half),
(1, half)
)
assert set(fd_points_ey) == set(FiniteDifferenceStaggeredStencilDerivation("N",2).stencil)
fd_points_c = (
(half, half),
(-half, -half),
(half, -half),
(-half, half)
)
assert set(fd_points_c) == set(FiniteDifferenceStaggeredStencilDerivation("NE",2).stencil)
assert len(FiniteDifferenceStaggeredStencilDerivation("E",3).points) == 10
assert len(FiniteDifferenceStaggeredStencilDerivation("NE",3).points) == 12
assert len(FiniteDifferenceStaggeredStencilDerivation("TNE",3).points) == 8
```
%% Cell type:code id: tags:
``` python
c = ps.fields("c: [2D]")
c3 = ps.fields("c3: [3D]")
assert FiniteDifferenceStaggeredStencilDerivation("E", 2, (0,)).apply(c) == c[1, 0] - c[0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("W", 2, (0,)).apply(c) == c[0, 0] - c[-1, 0]
assert FiniteDifferenceStaggeredStencilDerivation("N", 2, (1,)).apply(c) == c[0, 1] - c[0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("S", 2, (1,)).apply(c) == c[0, 0] - c[0, -1]
assert FiniteDifferenceStaggeredStencilDerivation("E", 3, (0,)).apply(c3) == c3[1, 0, 0] - c3[0, 0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("W", 3, (0,)).apply(c3) == c3[0, 0, 0] - c3[-1, 0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("N", 3, (1,)).apply(c3) == c3[0, 1, 0] - c3[0, 0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("S", 3, (1,)).apply(c3) == c3[0, 0, 0] - c3[0, -1, 0]
assert FiniteDifferenceStaggeredStencilDerivation("T", 3, (2,)).apply(c3) == c3[0, 0, 1] - c3[0, 0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("B", 3, (2,)).apply(c3) == c3[0, 0, 0] - c3[0, 0, -1]
assert FiniteDifferenceStaggeredStencilDerivation("NE", 2, (0,)).apply(c) + FiniteDifferenceStaggeredStencilDerivation("NE", 2, (1,)).apply(c) == c[1, 1] - c[0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("NE", 3, (0,)).apply(c3) + FiniteDifferenceStaggeredStencilDerivation("NE", 3, (1,)).apply(c3) == c3[1, 1, 0] - c3[0, 0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("NE", 2, (0,)).apply(c) + \
FiniteDifferenceStaggeredStencilDerivation("NE", 2, (1,)).apply(c) == c[1, 1] - c[0, 0]
assert FiniteDifferenceStaggeredStencilDerivation("NE", 3, (0,)).apply(c3) + \
FiniteDifferenceStaggeredStencilDerivation("NE", 3, (1,)).apply(c3) == c3[1, 1, 0] - c3[0, 0, 0]
```
%% Cell type:code id: tags:
``` python
FiniteDifferenceStaggeredStencilDerivation("NE", 2, (0, 1)).visualize()
d = FiniteDifferenceStaggeredStencilDerivation("NE", 2, (0, 1))
assert d.apply(c) == c[0,0] + c[1,1] - c[1,0] - c[0,1]
d.visualize()
```
%% Output
%% Cell type:code id: tags:
``` python
v3 = ps.fields("v(3): [3D]")
assert FiniteDifferenceStaggeredStencilDerivation("E", 3, (0,)).apply(v3) == \
sp.Matrix([v3[1,0,0](i) - v3[0,0,0](i) for i in range(*v3.index_shape)])
```
%% Cell type:code id: tags:
``` python
```
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment