Skip to content

Commit 64c883a

Browse files
committed
feat: fix full screen bug
1 parent 51d137a commit 64c883a

File tree

19 files changed

+832
-638
lines changed

19 files changed

+832
-638
lines changed

blog/2025-04/_partials/calculus.mdx

Lines changed: 66 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -109,52 +109,52 @@ $$
109109
A_{11} & \cdots & A_{1n} \\ \vdots & \ddots & \vdots \\ A_{m1} & \cdots & A_{mn} \
110110
\end{bmatrix}$"}
111111

112-
<Collapse label="$$
113-
\nabla_x (A x) = A^T
114-
$$">
115-
116-
$$
117-
\begin{split}
118-
&\ \nabla_x (A x) = \nabla_x (A_1 x_1 + A_2 x_2 + \cdots + A_n x_n) \\
119-
=&\ \nabla_x ([A_{11} \cdots A_{1m}] x_1 + [A_{21} \cdots A_{2m}] x_2 + \cdots + [A_{n1} \cdots A_{nm}] x_n) \\
120-
=&\ \nabla_x (A_1 x_1 + A_2 x_2 + \cdots + A_n x_n) \\
121-
=&\ \nabla_x (A_1 x_1) + \nabla_x (A_2 x_2) + \cdots + \nabla_x (A_n x_n) \\
122-
=&\ \begin{bmatrix}
123-
\frac{\partial}{\partial x_1} (A_1 x_1) , \frac{\partial}{\partial x_2} (A_2 x_2) , \cdots , \frac{\partial}{\partial x_n} (A_n x_n)
124-
\end{bmatrix} \\
125-
=&\ \begin{bmatrix}
126-
A_1 , A_2 , \cdots , A_n
127-
\end{bmatrix} = A^T
128-
\end{split}
129-
$$
130-
131-
</Collapse>
112+
<Collapse label="$$
113+
\nabla_x (A x) = A^T
114+
$$">
115+
116+
$$
117+
\begin{split}
118+
&\ \nabla_x (A x) = \nabla_x (A_1 x_1 + A_2 x_2 + \cdots + A_n x_n) \\
119+
=&\ \nabla_x ([A_{11} \cdots A_{1m}] x_1 + [A_{21} \cdots A_{2m}] x_2 + \cdots + [A_{n1} \cdots A_{nm}] x_n) \\
120+
=&\ \nabla_x (A_1 x_1 + A_2 x_2 + \cdots + A_n x_n) \\
121+
=&\ \nabla_x (A_1 x_1) + \nabla_x (A_2 x_2) + \cdots + \nabla_x (A_n x_n) \\
122+
=&\ \begin{bmatrix}
123+
\frac{\partial}{\partial x_1} (A_1 x_1) , \frac{\partial}{\partial x_2} (A_2 x_2) , \cdots , \frac{\partial}{\partial x_n} (A_n x_n)
124+
\end{bmatrix} \\
125+
=&\ \begin{bmatrix}
126+
A_1 , A_2 , \cdots , A_n
127+
\end{bmatrix} = A^T
128+
\end{split}
129+
$$
130+
131+
</Collapse>
132132

133133
- 对于所有的 :ctip[$\mathbf{x} \in R^n$]{id="$\mathbf{x} = [x_1, x_2, \cdots, x_n]^T$"} \
134134
和 :ctip[$A \in R^{n \times m}$]{id="$\mathbf{A} = \begin{bmatrix} \
135135
A_{11} & \cdots & A_{1m} \\ \vdots & \ddots & \vdots \\ A_{n1} & \cdots & A_{nm} \
136136
\end{bmatrix}$"}
137137

138-
<Collapse label="$$
139-
\nabla_x (x^T A) = A
140-
$$">
141-
142-
$$
143-
\begin{split}
144-
&\ \nabla_x (x^T A) = \nabla_x (x_1 A_1 + x_2 A_2 + \cdots + x_n A_n) \\
145-
=&\ \nabla_x (x_1 [A_{11} \cdots A_{1m}] + x_2 [A_{21} \cdots A_{2m}] + \cdots + x_n [A_{n1} \cdots A_{nm}]) \\
146-
=&\ \nabla_x (x_1 A_1 + x_2 A_2 + \cdots + x_n A_n) \\
147-
=&\ \nabla_x (x_1 A_1) + \nabla_x (x_2 A_2) + \cdots + \nabla_x (x_n A_n) \\
148-
=&\ \begin{bmatrix} \
149-
\frac{\partial}{\partial x_1} (x_1 A_1) , \frac{\partial}{\partial x_2} (x_2 A_2) , \cdots , \frac{\partial}{\partial x_n} (x_n A_n)
150-
\end{bmatrix} \\
151-
=&\ \begin{bmatrix}
152-
A_1 , A_2 , \cdots , A_n
153-
\end{bmatrix} = A
154-
\end{split}
155-
$$
156-
157-
</Collapse>
138+
<Collapse label="$$
139+
\nabla_x (x^T A) = A
140+
$$">
141+
142+
$$
143+
\begin{split}
144+
&\ \nabla_x (x^T A) = \nabla_x (x_1 A_1 + x_2 A_2 + \cdots + x_n A_n) \\
145+
=&\ \nabla_x (x_1 [A_{11} \cdots A_{1m}] + x_2 [A_{21} \cdots A_{2m}] + \cdots + x_n [A_{n1} \cdots A_{nm}]) \\
146+
=&\ \nabla_x (x_1 A_1 + x_2 A_2 + \cdots + x_n A_n) \\
147+
=&\ \nabla_x (x_1 A_1) + \nabla_x (x_2 A_2) + \cdots + \nabla_x (x_n A_n) \\
148+
=&\ \begin{bmatrix} \
149+
\frac{\partial}{\partial x_1} (x_1 A_1) , \frac{\partial}{\partial x_2} (x_2 A_2) , \cdots , \frac{\partial}{\partial x_n} (x_n A_n)
150+
\end{bmatrix} \\
151+
=&\ \begin{bmatrix}
152+
A_1 , A_2 , \cdots , A_n
153+
\end{bmatrix} = A
154+
\end{split}
155+
$$
156+
157+
</Collapse>
158158

159159
- **二次型**(二次型是二次函数在向量空间中的推广):
160160

@@ -163,23 +163,23 @@ A_{11} & \cdots & A_{1m} \\ \vdots & \ddots & \vdots \\ A_{n1} & \cdots & A_{nm}
163163
A_{11} & \cdots & A_{1n} \\ \vdots & \ddots & \vdots \\ A_{n1} & \cdots & A_{nn} \
164164
\end{bmatrix}$"}
165165

166-
<Collapse label="$$
167-
\nabla_x x^T A x = (A + A^T) x
168-
$$">
166+
<Collapse label="$$
167+
\nabla_x x^T A x = (A + A^T) x
168+
$$">
169169

170-
$$
171-
\begin{split}
172-
&\ \nabla_x x^T A x = \nabla_x \sum_{i=1}^n \sum_{j=1}^n x_i A_{ij} x_j \\
173-
=&\ \frac{\partial}{\partial x_k} \sum_{i=1}^n \sum_{j=1}^n x_i A_{ij} x_j + \frac{\partial}{\partial x_k} \sum_{i=1}^n \sum_{j=1}^n x_j A_{ji} x_i \\
174-
=&\ \sum_{i=1}^n \sum_{j=1}^n A_{ij} x_j + \sum_{i=1}^n \sum_{j=1}^n A_{ji} x_i \\
175-
=&\ \sum_{j=1}^n A_{kj} x_j + \sum_{i=1}^n A_{ik} x_i \text{($i, j = k$ 时,$A_{kj} x_j, A_{ik} x_i$ 分别存在一项 $A_{kk} x_k$)} \\
176-
=&\ \sum_{i=1}^n (\sum_{j=1}^n A_{ij} x_j) \cdot e_i + \sum_{j=1}^n (\sum_{i=1}^n A_{ji} x_i) \cdot e_j \\
177-
=&\ \sum_{i=1}^n (\mathbf{A} \mathbf{x})_i \cdot e_i + \sum_{j=1}^n (\mathbf{A^T} \mathbf{x})_j \cdot e_j \\ \
178-
=&\ (A + A^T) x
179-
\end{split}
180-
$$
170+
$$
171+
\begin{split}
172+
&\ \nabla_x x^T A x = \nabla_x \sum_{i=1}^n \sum_{j=1}^n x_i A_{ij} x_j \\
173+
=&\ \frac{\partial}{\partial x_k} \sum_{i=1}^n \sum_{j=1}^n x_i A_{ij} x_j + \frac{\partial}{\partial x_k} \sum_{i=1}^n \sum_{j=1}^n x_j A_{ji} x_i \\
174+
=&\ \sum_{i=1}^n \sum_{j=1}^n A_{ij} x_j + \sum_{i=1}^n \sum_{j=1}^n A_{ji} x_i \\
175+
=&\ \sum_{j=1}^n A_{kj} x_j + \sum_{i=1}^n A_{ik} x_i \text{($i, j = k$ 时,$A_{kj} x_j, A_{ik} x_i$ 分别存在一项 $A_{kk} x_k$)} \\
176+
=&\ \sum_{i=1}^n (\sum_{j=1}^n A_{ij} x_j) \cdot e_i + \sum_{j=1}^n (\sum_{i=1}^n A_{ji} x_i) \cdot e_j \\
177+
=&\ \sum_{i=1}^n (\mathbf{A} \mathbf{x})_i \cdot e_i + \sum_{j=1}^n (\mathbf{A^T} \mathbf{x})_j \cdot e_j \\ \
178+
=&\ (A + A^T) x
179+
\end{split}
180+
$$
181181

182-
</Collapse>
182+
</Collapse>
183183

184184
- **范数**
185185

@@ -195,19 +195,19 @@ A_{11} & \cdots & A_{1n} \\ \vdots & \ddots & \vdots \\ A_{n1} & \cdots & A_{nn}
195195
A_{11} & \cdots & A_{1n} \\ \vdots & \ddots & \vdots \\ A_{n1} & \cdots & A_{nn} \
196196
\end{bmatrix}$"}
197197

198-
<Collapse label="$$
199-
\nabla_x \|x\|_2^2 = \nabla_x (x^T x) = 2x
200-
$$">
198+
<Collapse label="$$
199+
\nabla_x \|x\|_2^2 = \nabla_x (x^T x) = 2x
200+
$$">
201201

202-
$$
203-
\begin{split}
204-
&\ \nabla_x \|x\|_2 = \nabla_x (\sqrt{x^T x}) ^ 2 \\
205-
=&\ \nabla_x (x^T x) = \nabla_x (x_1^2 + x_2^2 + \cdots + x_n^2) \\
206-
=&\ 2x
207-
\end{split}
208-
$$
202+
$$
203+
\begin{split}
204+
&\ \nabla_x \|x\|_2 = \nabla_x (\sqrt{x^T x}) ^ 2 \\
205+
=&\ \nabla_x (x^T x) = \nabla_x (x_1^2 + x_2^2 + \cdots + x_n^2) \\
206+
=&\ 2x
207+
\end{split}
208+
$$
209209

210-
</Collapse>
210+
</Collapse>
211211

212212
:::nerd
213213
在深度学习中每层神经网络之间由 **_权重矩阵_(通常还会添加同维度的偏置向量)桥接不同纬度矩阵的计算**。随后再通过:term[激活函数]{./terms/dl#activation-function}将计算结果映射到非线性空间,是:term[神经元]{./terms/dl#neuron}的计算核心。

blog/2025-08/prompt-engineering.mdx

Lines changed: 30 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -12,27 +12,6 @@ draft: true
1212

1313
<!--truncate-->
1414

15-
```markmap
16-
## **Tree of Thoughts (ToT)框架**
17-
- 基本信息
18-
- 标题:Tree of Thoughts: Deliberate Problem Solving with Large Language Models
19-
- 作者:Shunyu Yao, Dian Yu, Jeffrey Zhao等
20-
- 发布时间:2023-05-17
21-
- 会议版本:NeurIPS 2023 camera ready version
22-
- 提出背景
23-
- 现有LMs局限:token级、左到右决策
24-
- 短板场景:需探索、战略前瞻、初始决策关键的任务
25-
- 核心特点
26-
- 扩展Chain of Thought方法
27-
- 探索连贯“思维”单元作为中间步骤
28-
- 支持深思熟虑决策:多推理路径、自我评估、前瞻/回溯
29-
- 实验结果
30-
- 测试任务:24点游戏、创意写作、迷你填字游戏
31-
- 典型数据:24点游戏中,CoT成功率4%,ToT达74%
32-
- 其他信息
33-
- 提供代码库(含所有提示)
34-
```
35-
3615
## 提示词要素
3716

3817
常规的提示词通常包含以下要素:
@@ -124,13 +103,36 @@ draft: true
124103
> 思维树(Tree of Thought)维护着多条连贯的思维链,本质是缓解 LM 受限于 Token-Level 和从左到右决策在生成内容时带来的消极影响。
125104
126105
- LM 生成内容时,是从左到右的一个一个 token 去生成的,并且下一个 token 是基于上一个 token 预测的。将 LM 本身的前瞻性和计算流程全部压缩到了这一个 token。当 token 出现误判可能会导致雪崩。
127-
- ToT 通常有以下步骤:
128-
1. 问题拆解:确定解决问题的步骤,通常是“确定范围→筛选条件→验证答案”
129-
2. 定义节点:构造树形结构,需要提前定义节点的含义,存放什么样的数据,干哪些事情
130-
3. 思维评估:每个节点都会衍生*多个*更接近答案的思维,需要对这些思维进行评估打分
131-
4. 思维剪枝:针对评估算法验证对应的思维是否满足继续推理的条件,如果满足则继续推理,否则将枝叶剪掉
132-
5. 思维回溯:如果当前节点出现差错或者差错较多需要回溯到父层或者祖层,重新评估思维
133-
6. 思维合并:最终会得到多个思维,需要对这些思维进行合并,得到最终答案
106+
134107
- 思维树的构建过程可以通过 Prompt 实现,也能通过 RL(Reinforcement Learning)实现
135108
- 通过 RL 实现的思维树,在新环境上更具有鲁棒性
136109
- 通过 Prompt 实现的思维树,在推理过程中更具有可解释性
110+
111+
```markmap
112+
## ToT 构建过程
113+
### 问题拆解
114+
- 确定问题范围
115+
- 定义筛选条件
116+
- 进行验证答案
117+
118+
### 定义节点
119+
- 节点存放通常是二阶矩阵,存储单条思维树枝干
120+
- 回溯时直接减去当前节点,并回溯到父节点
121+
122+
### 思维评估
123+
124+
1. 思维评估:每个节点都会衍生*多个*更接近答案的思维,需要对这些思维进行评估打分
125+
126+
### 思维剪枝
127+
128+
1. 思维剪枝:针对评估算法验证对应的思维是否满足继续推理的条件,如果满足则继续推理,否则将枝叶剪掉
129+
130+
### 思维回溯
131+
132+
1. 思维回溯:如果当前节点出现差错或者差错较多需要回溯到父层或者祖层,重新评估思维
133+
134+
### 思维合并
135+
136+
1. 思维合并:最终会得到多个思维,需要对这些思维进行合并,得到最终答案
137+
138+
```

package.json

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,7 @@
4444
"canvas": "^3.1.0",
4545
"canvg": "^4.0.2",
4646
"clsx": "^2.0.0",
47-
"dom-to-image-more": "^3.7.1",
48-
"dom-to-image": "^2.6.0",
47+
"d3": "^7.9.0",
4948
"echarts": "^5.5.1",
5049
"file-saver": "^2.0.5",
5150
"heliannuuthus-docusaurus-authors": "file:./plugins/docusaurus-authors",
@@ -60,6 +59,7 @@
6059
"heliannuuthus-remark-mermaid": "file:./packages/remark-mermaid",
6160
"heliannuuthus-remark-terminology": "file:./packages/remark-terminology",
6261
"heliannuuthus-terminology-store": "file:./plugins/terminology-store",
62+
"html-to-image": "^1.11.13",
6363
"js-yaml": "^4.1.0",
6464
"jszip": "^3.10.1",
6565
"markmap-common": "^0.18.9",
@@ -83,14 +83,13 @@
8383
"remark-math": "^6.0.0",
8484
"remark-parse": "^11.0.0",
8585
"remark-rehype": "^11.1.1",
86-
"unified": "^11.0.5"
86+
"unified": "^11.0.5",
8787
},
8888
"devDependencies": {
8989
"@docusaurus/module-type-aliases": "3.8.1",
9090
"@docusaurus/tsconfig": "3.8.1",
9191
"@docusaurus/types": "3.8.1",
9292
"@trivago/prettier-plugin-sort-imports": "^5.2.0",
93-
"@types/dom-to-image": "^2.6.7",
9493
"@types/file-saver": "^2.0.7",
9594
"@types/react": "^19.0.0",
9695
"@types/react-dom": "^19.0.0",

src/components/ThemeProvider.tsx

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,9 @@ export default function ThemeProvider({
6969
fontFamily: "var(--ifm-font-family-base)"
7070
},
7171
components: {
72+
Message: {
73+
contentBg: dark ? "rgb(30, 30, 30)" : "rgb(240, 240, 240)"
74+
},
7275
Steps: {},
7376
Table: {},
7477
Layout: {

src/components/markdown/MDXRender.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import Tooltip from "@site/src/components/Tooltip";
2323
import { Comment } from "@site/src/components/Typography";
2424
import { CollapseHeading } from "@site/src/components/collapse";
2525
import CommentTooltip from "@site/src/components/comment/Tooltip";
26-
import { SvgViewer } from "@site/src/components/markdown";
26+
import Markmap from "@site/src/components/markdown/markmap";
2727
import TermPreview from "@site/src/components/terminology/TermPreview";
2828

2929
import TermAdmonition from "@theme/Admonition";
@@ -88,7 +88,7 @@ const MDXRender = ({
8888
Term: TermPreview,
8989
Mermaid: Mermaid,
9090
CollapseHeading,
91-
SvgViewer
91+
Markmap
9292
};
9393
}
9494
}).then((exports) => {

src/components/markdown/index.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
export { default as MDXRender } from "./MDXRender";
2-
export { SvgViewer } from "./svgviewer";
2+
export { default as View } from "./markmap";
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
import { Modal } from "antd";
2+
import { Transformer } from "markmap-lib";
3+
import * as markmap from "markmap-view";
4+
import "path-data-polyfill";
5+
import React, { useEffect, useMemo, useState } from "react";
6+
7+
import { MarkmapContext } from "./context";
8+
import { BlockViewer, FullscreenViewer } from "./viewer";
9+
10+
const { loadCSS, loadJS } = markmap;
11+
const transformer = new Transformer();
12+
13+
let assetsLoaded = false;
14+
15+
const ensureAssetsLoaded = () => {
16+
if (assetsLoaded) return;
17+
if (typeof window === "undefined") return;
18+
const { scripts, styles } = transformer.getAssets();
19+
loadCSS(styles);
20+
loadJS(scripts, { getMarkmap: () => markmap });
21+
assetsLoaded = true;
22+
};
23+
24+
export type MarkmapProps = {
25+
markdown: string;
26+
};
27+
28+
const View: React.FC<MarkmapProps> = ({ markdown }) => {
29+
const [isFullscreen, setFullscreen] = useState<boolean>(false);
30+
31+
const transformed = useMemo(
32+
() => transformer.transform(markdown),
33+
[markdown]
34+
);
35+
36+
useEffect(() => {
37+
ensureAssetsLoaded();
38+
}, []);
39+
40+
return (
41+
<MarkmapContext.Provider value={{ transformed }}>
42+
<Modal
43+
style={{
44+
top: 0,
45+
right: 0,
46+
left: 0,
47+
bottom: 0,
48+
margin: 0,
49+
padding: 0,
50+
position: "fixed",
51+
zIndex: 1000
52+
}}
53+
styles={{
54+
mask: {
55+
margin: 0
56+
},
57+
wrapper: {
58+
padding: 0,
59+
margin: 0
60+
},
61+
content: {
62+
height: "100vh",
63+
width: "100vw",
64+
padding: 0,
65+
margin: 0,
66+
borderRadius: 0,
67+
border: "none"
68+
},
69+
body: {
70+
height: "100vh",
71+
width: "100vw",
72+
margin: 0,
73+
padding: 0
74+
}
75+
}}
76+
width={"100vw"}
77+
height={"100vh"}
78+
destroyOnHidden={true}
79+
closable={false}
80+
footer={null}
81+
open={isFullscreen}
82+
onCancel={() => setFullscreen(false)}
83+
>
84+
<FullscreenViewer onFullscreenChange={setFullscreen} />
85+
</Modal>
86+
<BlockViewer onFullscreenChange={setFullscreen} />
87+
</MarkmapContext.Provider>
88+
);
89+
};
90+
91+
export default View;

0 commit comments

Comments
 (0)