-
Notifications
You must be signed in to change notification settings - Fork 219
Open
Description
phiflow-3.3.0
phiml-1.13.1
I tried to reproduce the example of solving the Navier-Stokes equations mentioned at https://physicsbaseddeeplearning.org/overview-ns-forw.html, but the code is giving errors.
---------------------------------------------------------------------------
Diverged Traceback (most recent call last)
Cell In[6], line 9
6 velocity, pressure = fluid.make_incompressible(velocity)
7 return velocity, smoke, pressure
----> 9 velocity, smoke, pressure = step(velocity, smoke, None, dt=DT)
11 print("Max. velocity and mean marker density: " + format( [ math.max(velocity.values) , math.mean(smoke.values) ] ))
13 pylab.imshow(np.asarray(smoke.values.numpy('y,x')), origin='lower', cmap='magma')
Cell In[6], line 6, in step(velocity, smoke, pressure, dt, buoyancy_factor)
4 velocity = advect.semi_lagrangian(velocity, velocity, dt) + dt * buoyancy_force
5 velocity = diffuse.explicit(velocity, NU, dt)
----> 6 velocity, pressure = fluid.make_incompressible(velocity)
7 return velocity, smoke, pressure
File D:\ProgramData\Anaconda3\envs\pytorch\Lib\site-packages\phi\physics\fluid.py:156, in make_incompressible(velocity, obstacles, solve, active, order, correct_skew, wide_stencil)
154 if wide_stencil is None:
155 wide_stencil = not velocity.is_staggered
--> 156 pressure = math.solve_linear(masked_laplace, div, solve, velocity.boundary, hard_bcs, active, wide_stencil=wide_stencil, order=order, implicit=None, upwind=None, correct_skew=correct_skew)
157 # --- Subtract grad p ---
158 grad_pressure = field.spatial_gradient(pressure, input_velocity.extrapolation, at=velocity.sampled_at, order=order, scheme='green-gauss')
File D:\ProgramData\Anaconda3\envs\pytorch\Lib\site-packages\phiml\math\_optimize.py:671, in solve_linear(f, y, solve, grad_for_f, f_kwargs, *f_args, **f_kwargs_)
668 return result # must return exactly `x` so gradient isn't computed w.r.t. other quantities
670 _matrix_solve = attach_gradient_solve(_matrix_solve_forward, auxiliary_args=f'is_backprop,solve{",matrix" if matrix.backend == NUMPY else ""}', matrix_adjoint=grad_for_f)
--> 671 return _matrix_solve(y - bias, solve, matrix)
672 else: # Matrix-free solve
673 f_args = cached(f_args)
File D:\ProgramData\Anaconda3\envs\pytorch\Lib\site-packages\phiml\math\_functional.py:952, in CustomGradientFunction.__call__(self, *args, **kwargs)
950 key, _, natives, _, _ = key_from_args(args, kwargs, self.f_params, cache=False, aux=self.auxiliary_args, attr_type=variable_attributes)
951 if not key.backend.supports(Backend.jacobian) and not key.backend.supports(Backend.jacobian):
--> 952 return self.f(*args, **kwargs) # no need to use custom gradient if gradients aren't supported anyway
953 elif not key.backend.supports(Backend.custom_gradient):
954 warnings.warn(f"custom_gradient() not supported by {key.backend}. Running function '{f_name(self.f)}' as-is.", RuntimeWarning)
File D:\ProgramData\Anaconda3\envs\pytorch\Lib\site-packages\phiml\math\_optimize.py:667, in solve_linear.<locals>._matrix_solve_forward(y, solve, matrix, is_backprop)
665 idx = b.concat([idx, new_col, new_row], 0)
666 nat_matrix = b.sparse_coo_tensor(idx, data, (N+1, N+1))
--> 667 result = _linear_solve_forward(y, solve, nat_matrix, pattern_dims_in, pattern_dims_out, preconditioner, backend, is_backprop)
668 return result
File D:\ProgramData\Anaconda3\envs\pytorch\Lib\site-packages\phiml\math\_optimize.py:780, in _linear_solve_forward(y, solve, native_lin_op, pattern_dims_in, pattern_dims_out, preconditioner, backend, is_backprop)
778 for tape in _SOLVE_TAPES:
779 tape._add(solve, trj, result)
--> 780 result.convergence_check(is_backprop and 'TensorFlow' in backend.name) # raises ConvergenceException
781 return final_x
File D:\ProgramData\Anaconda3\envs\pytorch\Lib\site-packages\phiml\math\_optimize.py:202, in SolveInfo.convergence_check(self, only_warn)
200 warnings.warn(self.msg, ConvergenceWarning)
201 else:
--> 202 raise Diverged(self)
203 if not self.converged.trajectory[-1].all:
204 if NotConverged not in self.solve.suppress:
Diverged: Direct solution does not satisfy tolerance: norm(residual)=2.795938416966237e-05
Reactions are currently unavailable
Metadata
Metadata
Assignees
Labels
No labels