Skip to content
This repository was archived by the owner on Sep 28, 2024. It is now read-only.

Commit 928fd7c

Browse files
committed
add reference page
1 parent 919554e commit 928fd7c

File tree

5 files changed

+63
-7
lines changed

5 files changed

+63
-7
lines changed

docs/Project.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
[deps]
22
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
3+
DocumenterCitations = "daee34ce-89f3-4625-b898-19384cb65244"
34
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
45
NeuralOperators = "ea5c82af-86e5-48da-8ee1-382d6ad7af4b"

docs/bibliography.bib

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
@inproceedings{FNO2021,
2+
abstract = {The classical development of neural networks has primarily focused on learning mappings between finite-dimensional Euclidean spaces. Recently, this has been generalized to neural operators that learn mappings between function spaces. For partial differential equations (PDEs), neural operators directly learn the mapping from any functional parametric dependence to the solution. Thus, they learn an entire family of PDEs, in contrast to classical methods which solve one instance of the equation. In this work, we formulate a new neural operator by parameterizing the integral kernel directly in Fourier space, allowing for an expressive and efficient architecture. We perform experiments on Burgers' equation, Darcy flow, and Navier-Stokes equation. The Fourier neural operator is the first ML-based method to successfully model turbulent flows with zero-shot super-resolution. It is up to three orders of magnitude faster compared to traditional PDE solvers. Additionally, it achieves superior accuracy compared to previous learning-based solvers under fixed resolution.},
3+
author = {Zongyi Li and Nikola Kovachki and Kamyar Azizzadenesheli and Burigede Liu and Kaushik Bhattacharya and Andrew Stuart and Anima Anandkumar},
4+
booktitle = {International Conference on Learning Representations},
5+
month = {10},
6+
title = {Fourier Neural Operator for Parametric Partial Differential Equations},
7+
url = {https://iclr.cc/virtual/2021/poster/3281},
8+
year = {2021},
9+
}
10+
11+
@article{NO2020,
12+
abstract = {The classical development of neural networks has been primarily for mappings between a finite-dimensional Euclidean space and a set of classes, or between two finite-dimensional Euclidean spaces. The purpose of this work is to generalize neural networks so that they can learn mappings between infinite-dimensional spaces (operators). The key innovation in our work is that a single set of network parameters, within a carefully designed network architecture, may be used to describe mappings between infinite-dimensional spaces and between different finite-dimensional approximations of those spaces. We formulate approximation of the infinite-dimensional mapping by composing nonlinear activation functions and a class of integral operators. The kernel integration is computed by message passing on graph networks. This approach has substantial practical consequences which we will illustrate in the context of mappings between input data to partial differential equations (PDEs) and their solutions. In this context, such learned networks can generalize among different approximation methods for the PDE (such as finite difference or finite element methods) and among approximations corresponding to different underlying levels of resolution and discretization. Experiments confirm that the proposed graph kernel network does have the desired properties and show competitive performance compared to the state of the art solvers.},
13+
author = {Zongyi Li and Nikola Kovachki and Kamyar Azizzadenesheli and Burigede Liu and Kaushik Bhattacharya and Andrew Stuart and Anima Anandkumar},
14+
journal = {ArXiv},
15+
doi = {10.48550/arxiv.2003.03485},
16+
month = {3},
17+
title = {Neural Operator: Graph Kernel Network for Partial Differential Equations},
18+
url = {http://arxiv.org/abs/2003.03485},
19+
year = {2020},
20+
}
21+
22+
@article{MNO2021,
23+
abstract = {Chaotic systems are notoriously challenging to predict because of their
24+
instability. Small errors accumulate in the simulation of each time step,
25+
resulting in completely different trajectories. However, the trajectories of
26+
many prominent chaotic systems live in a low-dimensional subspace (attractor).
27+
If the system is Markovian, the attractor is uniquely determined by the Markov
28+
operator that maps the evolution of infinitesimal time steps. This makes it
29+
possible to predict the behavior of the chaotic system by learning the Markov
30+
operator even if we cannot predict the exact trajectory. Recently, a new
31+
framework for learning resolution-invariant solution operators for PDEs was
32+
proposed, known as neural operators. In this work, we train a Markov neural
33+
operator (MNO) with only the local one-step evolution information. We then
34+
compose the learned operator to obtain the global attractor and invariant
35+
measure. Such a Markov neural operator forms a discrete semigroup and we
36+
empirically observe that does not collapse or blow up. Experiments show neural
37+
operators are more accurate and stable compared to previous methods on chaotic
38+
systems such as the Kuramoto-Sivashinsky and Navier-Stokes equations.},
39+
author = {Zongyi Li and Nikola Kovachki and Kamyar Azizzadenesheli and Burigede Liu and Kaushik Bhattacharya and Andrew Stuart and Anima Anandkumar},
40+
journal = {ArXiv},
41+
doi = {10.48550/arxiv.2106.06898},
42+
month = {6},
43+
title = {Markov Neural Operators for Learning Chaotic Systems},
44+
url = {https://arxiv.org/abs/2106.06898},
45+
year = {2021},
46+
}

docs/make.jl

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
using NeuralOperators
22
using Documenter
3+
using DocumenterCitations
4+
5+
bib = CitationBibliography(joinpath(@__DIR__, "bibliography.bib"), sorting=:nyt)
36

47
DocMeta.setdocmeta!(NeuralOperators, :DocTestSetup, :(using NeuralOperators); recursive=true)
58

6-
makedocs(;
9+
makedocs(
10+
bib,
711
modules=[NeuralOperators],
812
authors="JingYu Ning <[email protected]> and contributors",
913
repo="https://github.com/SciML/NeuralOperators.jl/blob/{commit}{path}#{line}",
@@ -16,7 +20,8 @@ makedocs(;
1620
pages=[
1721
"Home" => "index.md",
1822
"Introduction" => "introduction.md",
19-
"APIs" => "apis.md"
23+
"APIs" => "apis.md",
24+
"References" => "references.md",
2025
],
2126
)
2227

docs/src/apis.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ Function ``g`` is a linear transform for lowering Fouier modes.
1818
OperatorConv
1919
```
2020

21-
Reference: [Fourier Neural Operator for Parametric Partial Differential Equations](https://arxiv.org/abs/2010.08895)
21+
Reference: [FNO2021](@cite)
2222

2323
---
2424

@@ -35,7 +35,7 @@ Activation function ``\sigma`` can be arbitrary non-linear function.
3535
OperatorKernel
3636
```
3737

38-
Reference: [Fourier Neural Operator for Parametric Partial Differential Equations](https://arxiv.org/abs/2010.08895)
38+
Reference: [FNO2021](@cite)
3939

4040
---
4141

@@ -52,7 +52,7 @@ Activation function ``\sigma`` can be arbitrary non-linear function.
5252
GraphKernel
5353
```
5454

55-
Reference: [Neural Operator: Graph Kernel Network for Partial Differential Equations](https://arxiv.org/abs/2003.03485)
55+
Reference: [NO2020](@cite)
5656

5757
---
5858

@@ -64,7 +64,7 @@ Reference: [Neural Operator: Graph Kernel Network for Partial Differential Equat
6464
FourierNeuralOperator
6565
```
6666

67-
Reference: [Fourier Neural Operator for Parametric Partial Differential Equations](https://arxiv.org/abs/2010.08895)
67+
Reference: [FNO2021](@cite)
6868

6969
---
7070

@@ -74,4 +74,4 @@ Reference: [Fourier Neural Operator for Parametric Partial Differential Equation
7474
MarkovNeuralOperator
7575
```
7676

77-
Reference: [Markov Neural Operators for Learning Chaotic Systems](https://arxiv.org/abs/2106.06898)
77+
Reference: [MNO2021](@cite)

docs/src/references.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# References
2+
3+
```@bibliography
4+
```

0 commit comments

Comments
 (0)