forked from TeachingReps/Stochastic-Processes
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlecture-06.tex
More file actions
422 lines (402 loc) · 21.8 KB
/
lecture-06.tex
File metadata and controls
422 lines (402 loc) · 21.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
% !TEX spellcheck = en_US
% !TEX spellcheck = LaTeX
\documentclass[a4paper,10pt, english]{article}
\input{header}
\title{Lecture 06: Renewal Theory}
\author{}
\begin{document}
\maketitle
\section{Introduction}
One of the characterization for the Poisson process is of it being a counting process with \textit{iid} exponential inter-arrival times.
Now we shall relax the ``exponential" part.
As a result, we no longer have the nice properties such as independent and stationary increments that Poisson processes had.
However, we can still get some great results which also apply to Poisson Processes.
\subsection{Renewal instants}
%\begin{defn}[Inter-arrival Times]
We will consider \textbf{inter-arrival times} $\{X_i: i \in \N\}$ to be a sequence of non-negative \textit{iid} random variables with a common distribution $F$,
with finite mean $\mu$ and $F(0) < 1$.
We interpret $X_n$ as the time between $(n - 1)^{\text{st}}$ and the $n^{\text{th}}$ renewal event.
% \begin{enumerate}
% %\item Positive inter-arrival time,% i.e. $X_n \geq 0$,
% \item finite mean $\mu$, %i.e. $(0 \leq \mu = \E[X_1] < \infty)$, and
% \item $F(0) < 1$.%= \Pr\{X_n \leq 0\} = \Pr\{X_n = 0\} < 1$.
% \end{enumerate}
%\end{defn}
%\begin{defn}[Renewal Instants]
Let $S_n$ denote the time of $n^{\text{th}}$ \textbf{renewal instant} and assume $S_0 = 0$.
Then, we have
\begin{align*}
S_n = \sum_{i=1}^n X_i, \quad n\in \N.
\end{align*}
%\end{defn}
Second condition on inter-arrival times implies non-degenerate renewal process.
If $F(0)$ is equal to $1$ then it is a trivial process.
%\begin{defn}
A counting process $\{N(t),t \geq 0\}$ with \textit{iid} general inter-arrival times is called a \textbf{renewal process}, written as
%\end{defn}
%\begin{defn}[Renewal process] Let $\{N(t), t \geq 0\}$ be the counting process that counts number of events by time $t$. Then,
\begin{align*}
N(t) = \sup\{n \in \N_0 : S_n \leq t\} = \sum_{n \in \N}1_{\{S_n \leq t\}}.
\end{align*}
%This counting process $\{N(t), t \geq 0\}$ is called a renewal process.
%\end{defn}
\begin{lem}[Inverse Relationship]
There is an inverse relationship between time of $n^{\text{th}}$ event $S_n$, and the counting process $N(t)$. That is
\begin{align}
\label{eq:InverseRelationship}
\{S_n \leq t\} \iff \{N(t) \geq n\}.
\end{align}
%since $N(t) = \sum_{n \in \N}1_{\{S_n \leq t\}}$.
\end{lem}
\begin{lem}[Finiteness of $N(t)$]
For all $t > 0$, the number of renewals $N(t)$ in time $[0,t)$ is finite.
\end{lem}
\begin{proof}
We are interested in knowing how many renewals occur per unit time.
From strong law of large numbers, we know that the set
\begin{align*}
\left\{\frac{S_n}{n} = \mu, n \in \N\right\},
\end{align*}
has probability measure unity.
Further, since $\mu > 0$, we must have $S_n$ growing arbitrarily large as $n$ increases.
Thus, $S_n$ can be finite for at most finitely many $n$.
Indeed, the following set
\begin{align*}
\{N(t) \geq n, n \in \N\} &= \{S_n \leq t, n \in \N\} = \left\{\frac{S_n}{n} \leq \frac{t}{n}, n \in \N \right\}.% \subseteq\{\mu \leq 0\} = \emptyset.
\end{align*}
has measure zero for any finite $t$.
Therefore, $N(t)$ must be finite, and
%\begin{align*}
$N(t) = \max\{n \in \N_0 : S_n \leq t\}$.
%\end{align*}
\end{proof}
\subsection{Distribution functions}
%\begin{defn}
The distribution of renewal instant $S_n$ is denoted by $F_n(t) \triangleq \Pr\{S_n \leq t\}$ for all $t \in \R$.
%\end{defn}
\begin{lem} The distribution function $F_n$ for renewal instant $S_n$ can be computed inductively
\begin{xalignat*}{3}
&F_1 = F,&&F_n = F_{n-1}\ast F \triangleq \int_{0}^{t} F_{n-1}(t-y)dF(y),
\end{xalignat*}
where $\ast$ denotes convolution.
\end{lem}
\begin{proof} It follows from induction over sum of \textit{iid} random variables.
\end{proof}
%We need to know the distribution of $N(t)$.
\begin{lem} Counting process $N(t)$ assumes non-negative integer values with distribution
\begin{align*}
\Pr\{N(t) = n\} = \Pr\{S_n \leq t\} - \Pr\{S_{n+1} \leq t\} = F_n(t) - F_{n+1}(t).
\end{align*}
\end{lem}
\begin{proof}
It follows from the inverse relationship between renewal instants and the renewal process~\eqref{eq:InverseRelationship}.
\end{proof}
%Denote $F_n = F^{*(n)}$ where $*$ denotes convolution. Essentially, $F^{*(n)}$ is the distribution of $S_n$.
%\begin{defn}
Mean of the counting process $N(t)$ is called the \textbf{renewal function} denoted by $m(t) = \E[N(t)]$.
%\end{defn}
%We are interested in the following quantity.
%\begin{xalignat*}{3}
% &m(t) = \E[N(t)].
% %, &&M_{N(t)}(\theta) = \E[e^{\theta N(t)}].
%\end{xalignat*}
\begin{prop} Renewal function can be expressed in terms of distribution of renewal instants as
\begin{align*}
m(t) = \sum_{n \in \N} F_n(t).
\end{align*}
\end{prop}
\begin{proof}
Using the inverse relationship between counting process and the arrival instants, we can write
\begin{align*}
m(t) &= \E[N(t)] = \sum_{n \in \N} \Pr\{N(t) \geq n\} = \sum_{n \in \N} \Pr\{S_n \leq t\} = \sum_{n \in \N} F_n(t).
\end{align*}
%where the second equality follows from the fact that the expectation of a random variable being represented in terms of the \textit{ccdf} of the corresponding random variable.
%The third equality follows from the inverse relationship as seen in \eqref{eq:InverseRelationship}.
% Alternatively,
% \begin{align*}
% m(t) &= \E[N(t)]. \\
% &= \E \left[\sum_{n \in \N} \mathbb{I}_{\{S_n \leq t\}} \right] \\
% &= \sum_{n \in \N} \E \left[ \mathbb{I}_{\{S_n \leq t\}} \right] \\
% &= \sum_{n \in \N} \Pr\{S_n \leq t\} = \sum_{n \in \N} F_n(t).
% \end{align*}
% where the third equality follows from the Monotone Convergence Theorem.
We can exchange integrals and summations since the integrand is positive using monotone convergence theorem.
\end{proof}
\begin{prop} Renewal function is bounded for all finite times.
%\begin{align*}
%m(t) < \infty \quad \forall 0 \leq t < \infty
%\end{align*}
\end{prop}
%\begin{proof}
%Since $F(0) < 1$ and $F$ is right continuous, there exists a $b > 0$ such that $F(b) < 1$.
%Choose a $k \in \N$ such that $t \leq kb$.
%Then,
%\begin{align*}
%\{S_k \leq t \} \subseteq \{S_k \leq kb\} \subseteq \Omega \setminus \bigcap_{j =1}^{k}\{X_j > b\}.
%\end{align*}
%From independence of inter-arrival times $X_j$ it follows that
%\begin{align*}
%\Pr\{S_k \leq t \} \leq 1 - \prod_{j = 1}^{k}\bar{F}(b) = 1 - \beta.
%\end{align*}
%\end{proof}
\begin{proof}
Since we assumed that $\Pr\{X_n = 0\} < 1$, it follow from continuity of probabilities that there exists $\alpha > 0$, such that $\Pr\{X_n \geq \alpha\} = \beta >0$. Define
\begin{align*}
\bar{X}_n = \alpha 1_{\{X_n \geq \alpha\}}.
\end{align*}
Note that since $X_i$'s are \textit{iid}, so are $\bar{X}_i$'s, which are bivariate random variables taking values in $\{0, \alpha\}$ with probabilities $1-\beta$ and $\beta$ respectively. % (which will be evident from the proof of the distribution function of the number of arrivals till time t).
Let $\bar{N}(t)$ denote the renewal process with inter-arrival times $\bar{X}_n$, with arrivals at integer multiples of $\alpha$.
Since $\bar{X}_n \leq X_n$, we have $\bar{N}(t) \geq N(t)$ for all sample paths.
Hence, it follows that $\E N(t) \leq \E \bar{N}(t)$, and we will show that $\E\bar{N}(t)$ is finite.
%The number of arrivals at each arrival instant $k\alpha$ is \textit{iid} and we can write
%%Moreover, $X_n \geq \bar{X}_n$, and we can write
%\begin{align*}
%\Pr\{\bar{N}(0) = n\} %&= \Pr\{\bar{X}_1=\bar{X}_2=\dots=\bar{X}_n=0,\bar{X}_{n+1}=\alpha\}
%&= \Pr\{X_1 < \alpha,X_2 < \alpha,\ldots,X_n < \alpha,X_{n+1} \geq \alpha\} = (1-\beta)^n\beta.
%%&= \prod_{i=1}^{n} \Pr\{X_i < \alpha\} . \Pr\{X_{n+1} \geq \alpha \} \\
%%= \left(1- \Pr\{X_1 \geq \alpha \}\right) ^{n} \Pr\{X_1 \geq \alpha\}.
%\end{align*}
We can write the joint distribution of number of arrivals at each arrival instant $l\alpha$, as
\begin{align*}
\Pr\{\bar{N}(0)=n_1, \bar{N}(\alpha) = n_2\} &= \Pr\{X_i \leq \alpha, i \leq n_1, X_{n_1+1} \geq \alpha, X_i < \alpha, n_1 +2 \leq i \leq n_2, X_{n_2+1} \geq \alpha \}\\
&= (1-\beta)^{n_1}\beta(1-\beta)^{n_2-1}\beta
\end{align*}
%where the third equality follows from the fact that $ X_i, i \in \N$ are mutually independent, fourth equality follows from the fact that $ X_i, i \in \N$ are identical.
%\\
It follows that the number of arrivals is independent at each arrival instant $k\alpha$ and geometrically distributed with mean $1/\beta$ and $(1-\beta)/\beta$ for $k \geq 1$ and $k = 0$ respectively.
%The number of arrivals till time $t$ therefore is Geometric with mean $\frac{1}{\Pr\{X_n \geq \alpha\}}$.
Thus, for all $t \geq 0$,
\begin{align*}
\E N(t) \leq \E[\bar{N}(t)] \leq \frac{\lceil\frac{t}{\alpha} \rceil}{\beta} \leq \frac{\frac{t}{\alpha} + 1}{\beta} < \infty.
\end{align*}
%Since $\E[N(t)] \leq \E[\bar{N}(t)]$ which follows from $N(t) \leq \bar{N}(t)$, we are done.
\end{proof}
\subsection{Basic renewal theorem}
\begin{lem}
Let $N(\infty) \triangleq \lim_{t \to \infty} N(t)$. Then, $\Pr\{N(\infty) = \infty\} = 1$.
\end{lem}
\begin{proof}
It suffices to show $\Pr\{N(\infty) < \infty\} = 0$.
Since $\E[X_n] < \infty$, we have $\Pr\{X_n = \infty\} = 0$ and
\begin{flalign*}
\Pr\{N(\infty) < \infty\} &= \Pr\bigcup_{n \in \N} \{N(\infty) < n\}= \Pr\bigcup_{n \in \N} \{S_n = \infty\} = \Pr\{\bigcup_{n \in \N} \{X_n = \infty\}\} \leq \sum_{n \in \N}\Pr\{X_n = \infty\} =0.
\end{flalign*}
%The last step follows from the fact that $\E[X_n] < \infty$.
\end{proof}
Notice that $N(t)$ increases to infinity with time.
We are interested in rate of increase of $N(t)$ with $t$.
\begin{thm}[Basic Renewal Theorem]
\begin{align*}
\lim_{t \to \infty} \frac{N(t)}{t} = \frac{1}{\mu} \quad \mbox{almost surely}.
\end{align*}
\end{thm}
\begin{proof}
Note that $S_{N(t)}$ represents the time of last renewal before $t$, and $S_{N(t)+1}$ represents the time of first renewal after time $t$.
\begin{figure}[h!]
\includegraphics[width=.9\linewidth]{Figures/lecture_5_fig_1.png}
\caption{Time-line visualization}
\end{figure}
Consider $S_{N(t)}$. By definition, we have
\[S_{N(t)} \leq t < S_{N(t)+1}\]
Dividing by $N(t)$, we get
\[\frac{S_{N(t)}}{N(t)} \leq \frac{t}{N(t)} < \frac{S_{N(t)+1}}{N(t)}\]
By Strong Law of Large Numbers (SLLN) and the previous result, we have
\[\lim_{t \to \infty}\frac{S_{N(t)}}{N(t)} = \mu \quad \mbox{a.s.}\]
Also
\[\lim_{t \to \infty} \frac{S_{N(t)+1}}{N(t)} = \lim_{t \to \infty} \frac{S_{N(t)+1}}{N(t)+1}.\frac{N(t)+1}{N(t)} \]
Hence by squeeze theorem, the result follows.
\end{proof}
\begin{shaded*}
Suppose, you are in a casino with infinitely many games.
Every game has a probability of win $X$, \textit{iid} uniformly distributed between $(0,1)$.
One can continue to play a game or switch to another one. We are interested in a strategy that maximizes the long-run proportion of wins.
Let $N(n)$ denote the number of losses in $n$ plays.
Then the fraction of wins $P_W(n)$ is given by
\begin{align*}
P_W(n) = \frac{n-N(n)}{n}.
\end{align*}
We pick a strategy where any game is selected to play, and continue to be played till the first loss. Note that, time till first loss is geometrically distributed with mean $\frac{1}{1-X}$. We shall show that this fraction approaches unity as $n \to \infty$. By the previous proposition, we have:
\begin{align*}
\lim_{n \to \infty} \frac{N(n)}{n} &= \frac{1}{\E[\mbox{Time till first loss}]} \\
&= \frac{1}{\E\left[\frac{1}{1-X}\right]} = \frac{1}{\infty} = 0
\end{align*}
Hence Renewal theorems can be used to compute these long term averages. We'll have many such theorems in the following sections.
\end{shaded*}
\subsection{Elementary renewal theorem}
Basic renewal theorem implies $N(t)/t$ converges to $1/\mu$ almost surely. Now, we are interested in convergence of $\E[N(t)]/t$. Note that this is not obvious, since almost sure convergence doesn't imply convergence in mean.
\begin{shaded*}
Consider the following example.
Let $X_n$ be a Bernoulli random variable with $\Pr\{X_n = 1\} = 1/n$.
Let $Y_n = nX_n$.
%Then,
% \begin{align*}
% Y_n = \begin{cases}
% n, & \mbox{ w.p. } 1/n,\\
% 0, & \mbox{ w.p. } 1- 1/n.
% \end{cases}
% \end{align*}
Then, $\Pr\{ Y_n = 0 \} = 1 - 1/n$. %Then $\sum \Pr\{Y_n > \epsilon\} < \infty$. Hence by Borel Cantelli Lemma, $Y_n \to 0$
That is $Y_n \to 0$ a.s. However, $\E[Y_n] = 1$ for all $n \in \N$.
So $\E[Y_n] \to 1$.
\end{shaded*}
Even though, basic renewal theorem does \textbf{NOT} imply it, we still have $\E[N(t)]/t$ converging to $1/\mu$.
\begin{thm}[Elementary renewal theorem] Let $m(t)$ denote mean $\E[N(t)]$ of renewal process $N(t)$, then under the hypotheses of basic renewal theorem, we have
\begin{align*}
\lim_{t \to \infty}\frac{m(t)}{t} = \frac{1}{\mu}.
\end{align*}
\end{thm}
\begin{proof}
Take $\mu < \infty$. We know that $S_{N(t)+1} > t$. Therefore, taking expectations on both sides and using Proposition~\ref{prop:WaldRenewal}, we have
\begin{align*}
\mu (m(t) + 1) > t.
\end{align*}
Dividing both sides by $\mu t$ and taking $\liminf$ on both sides, we get
\begin{align*}
%\label{eq:LiminfMean}
\liminf_{t \to \infty} \frac{m(t)}{t} \geq \frac{1}{\mu}.
\end{align*}
%Thus we now have to show
%\[\limsup_{t \to \infty} \frac{m(t)}{t} \leq \frac{1}{\mu}\]
We employ a truncated random variable argument to show the reverse inequality. We define truncated inter-arrival times $\{\bar{X}_n\}$ as
\begin{align*}
\bar{X}_n = X_n 1_{\{X_n \leq M\}} + M1_{\{X_n > M\}}.
\end{align*}
We will call $\E[\bar{X}_n] = \mu_M$. Further, we can define arrival instants $\{\bar{S}_n\}$ and renewal process $\bar{N}(t)$ for this set of truncated inter-arrival times $\{\bar{X}_n\}$ as
\begin{align*}
\bar{S}_n &= \sum_{k=1}^n \bar{X}_k, & \bar{N}(t) &= \sup\{n \in \N_0: \bar{S}_n \leq t\}.
\end{align*}
Note that since $S_n \geq \bar{S}_n$, the number of arrivals would be higher for renewal process $\bar{N}(t)$ with truncated random variables, i.e.
\begin{align}
\label{eq:TruncRenewalInequality}
N(t) \leq \bar{N}(t).
\end{align}
Further, due to truncation of inter-arrival time, next renewal happens with-in $M$ units of time, i.e.
\begin{align*}
\bar{S}_{\bar{N}(t)+1} \leq t+M.
\end{align*}
Taking expectations on both sides in the above equation, using Wald's lemma for renewal processes, %using Proposition~\ref{prop:WaldRenewal},
dividing both sides by $t \mu_M$, and taking $\limsup$ on both sides, we obtain
\begin{align*}
\limsup_{t \to \infty}\frac{\bar{m}(t)}{t} \leq \frac{1}{\mu_M}.
\end{align*}
Taking expectations on both sides of~\eqref{eq:TruncRenewalInequality} and letting $M$ go arbitrary large on RHS, we get
\begin{align*}
%\label{eq:LimsupMean}
\limsup_{t \to \infty}\frac{m(t)}{t} \leq \frac{1}{\mu}.
\end{align*}
%Now observe that LHS is independent of $M$. Take limits $M \to \infty$, noting that $\mu_M \to \mu$ (Why?) to get
%\[\limsup_{t \to \infty}\frac{m(t)}{t} \leq \frac{1}{\mu}\]
%Putting it all together,
%\[\lim_{t \to \infty}\frac{m(t)}{t} = \frac{1}{\mu}\]
Result follows for finite $\mu$ from combining liminf and limsup of the $m(t)/t$
%~\eqref{eq:LiminfMean} and~\eqref{eq:LiminfMean}.
When $\mu$ grows arbitrary large, results follow from liminf of $m(t)/t$, %~\eqref{eq:LiminfMean},
where RHS is zero.
\end{proof}
\subsection{Central limit theorem for renewal processes}
\begin{thm}
Let $X_n$ be \textit{iid} random variables with $\mu = \E[X_n] < \infty$ and $\sigma^2 = Var(X_n) < \infty$. Then
\[\frac{N(t)-\frac{t}{\mu}}{\sigma \sqrt{\frac{t}{\mu^3}}} \to^d N(0,1) \]
\end{thm}
\begin{proof}
Take $u = \frac{t}{\mu} + y \sigma \sqrt{\frac{t}{\mu^3}}$. We shall treat $u$ as an integer and proceed, the proof for general $u$ is an exercise. Recall that $\{N(t) < u\} \iff \{S_u > t\}$. By equating probability measures on both sides, we get
\begin{align*}
\Pr\{N(t) < u\} = \Pr\left\{\frac{S_u - u\mu}{\sigma \sqrt{u}} > \frac{t - u\mu}{\sigma \sqrt{u}}\right\} = \Pr\left\{\frac{S_u - u\mu}{\sigma \sqrt{u}} > -y\left(1 + \frac{y\sigma}{\sqrt{tu}}\right)^2\right\}.
\end{align*}
By central limit theorem, $\frac{S_u - u\mu}{\sigma \sqrt{u}}$ converges to a normal random variable with zero mean and unit variance as $t$ grows. Also, note that
\begin{align*}
\lim_{t \to \infty} -y\left(1 + \frac{y\sigma}{\sqrt{tu}}\right)^2 = -y.
\end{align*}
These results combine with the symmetry of normal random variable to give us the result.
\end{proof}
\appendix
\section{Wald's Lemma}
%Before we get into Wald's Lemma, let us first define what a stopping time is.
%\begin{defn}[Stopping Time]
%Let $\{X_n: n\in \N\}$ be independent random variables.
An integer random variable $T$ is called a \textbf{stopping time} with respect to the \textit{independent} random sequence $\{X_n: n \in \N\}$ if the event $\{N=n\}$ depends only on $\{X_1,\cdots,X_n\}$ and is independent of $\{X_{n+1}, X_{n+2},\cdots\}$.
%\end{defn}
Intuitively, if we observe the $X_n$'s in sequential order and $N$ denotes the number observed before stopping then. Then, we have stopped after observing, $\{X_1, \ldots, X_N\}$, and before observing $\{X_{N+1}, X_{N+2}, \ldots\}$.
The intuition behind a stopping time is that it's value is determined by past and present events but NOT by future events.
\begin{shaded*}
\begin{enumerate}
\item For instance, while traveling on the bus, the random variable measuring ``Time until bus crosses Majestic and after that one stop" is a stopping time as it's value is determined by events before it happens. On the other hand ``Time until bus stops before Majestic is reached'' would not be a stopping time in the same context. This is because we have to cross this time, reach Majestic and then realize we have crossed that point.
\item Consider $X_n \in \{0,1\}$ \textit{iid} Bernoulli$(1/2)$. Then $N = \min \{n \in \N:\quad \sum_{i=1}^n X_i = 1\}$ is a stopping time. For instance, $\Pr\{N=2\} = \Pr\{X_1=0,X_2=1\}$ and hence $N$ is a stopping time by definition.
\item \textbf{Random Walk Stopping Time} Consider $X_n$ \textit{iid} bivariate random variables with
\begin{align*}
\Pr\{X_n = 1\} = \Pr\{X_n = -1\} = \frac{1}{2}.
\end{align*}
Then $N = min \{n \in \N:\quad \sum_{i=1}^n X_i = 1\}$ is a stopping time.
\end{enumerate}
\end{shaded*}
\subsection{Properties of stopping time}
Let $N_1,N_2$ be two stopping times with respect to independent random sequence $\{X_i : i \in \N \}$ then,
\begin{enumerate}[i\_]
\item $N_1+N_2$ is a stopping time.
\item $\min \{N_1,N_2\} $ is a stopping time.
\end{enumerate}
\begin{proof}
Let $\{X_i : i \in \N \} $ be an independent random sequence, and $N_1,N_2$ associated stopping times.
\begin{enumerate}[i\_]
\item It suffices to show that the event $\{N_1+N_2=n\}$ depends only on random variables $\{X_1, \dots, X_n\}$ and independent of $\{X_{n+1}, \dots\}$.
To this end, we observe that
\begin{align*}
\{N_1+N_2 = n \} &= \bigcup_{k=0}^{n} \{N_1 = k,N_2 = n-k\}.
\end{align*}
Result follows since the events $\{N_1 = k\}$ and $\{N_2=n-k\}$ depend solely on $\{X_1, \dots, X_n\}$ for all $k \in \{0, \dots, n\}$.
%Hence, $N_1+N_2$ is a stopping time.
\item It suffices to show that the event $\min \{N_1,N_2\} > n\}$ depends solely on $\{X_1, \dots, X_n\}$.
\begin{align*}
\min \{N_1,N_2\} > n\} = \{N_1 > n\} \cap \{N_2 > n\}.
% From ~De ~Morgan's ~Law ~we ~get ~\{\min \{N_1,N_2\} \leq n\} &= \{N_1 \leq n\} \cap \{N_2 \leq n\}
% \\
% & \indep \{X_{n+1},X_{n+2},\ldots\}.
\end{align*}
The result follows since the events $\{N_1 > n\}$ and $\{N_2 > n\}$ depend solely on $\{X_1, \dots, X_n\}$.
\end{enumerate}
\end{proof}
\begin{lem}[Wald's Lemma]
Let $\{X_i:\quad i\in \N\}$ be \textit{iid} random variables with finite mean $\E[X_1]$ and let $N$ be a stopping time with respect to this set of variables, such that $\E[N] < \infty$.
Then,
\begin{align*}
\E\left[\sum_{n=1}^N X_n\right] &= \E[X_1]\E[N].
\end{align*}
\end{lem}
\begin{proof}
We first show that the event $\{N \geq n\}$ is independent of $X_k$, for any $k \geq n$.
To this end, observe that
\begin{align*}
\{N \geq k\} = \{N < k\}^c = \{N \leq k-1\}^c = \left(\bigcup_{i=1}^{k-1} \{N = i\}\right)^c.
\end{align*}
Recall that $N$ is a stopping time and the event $\{N=i\}$ depends only on $\{X_1,\ldots, X_i\}$, by definition.
Therefore, $\{N \geq k\}$ depends only on $\{X_1,\ldots, X_{k-1}\}$, and is independent of the future and present samples.
Hence, we can write the $N$th renewal instant for a stopping time $N$ as
\begin{align*}
\E\left[\sum_{n=1}^N X_n\right] &= \E\left[\sum_{n \in \N} X_n 1_{\{N \geq n\}}\right] = \sum_{n \in \N} \E X_n \E\left[1_{\{N \geq n\}}\right] = \E X_1\E\left[ \sum_{n \in \N} 1_{\{N \geq n\}}\right] = \E[X_1]\E[N].
\end{align*}
We exchanged limit and expectation in the above step, which is not always allowed.
We were able to do it since the summand is positive and we apply monotone convergence theorem.
%I'd like to point out here that in step (\ref{tricky}), you cannot always exchange infinite sums and expectations.
%But here you can do so, because of the application of
%Refer Ross/Wolff for more information.
%Therefore, we can write
% \begin{align*}
% \sum_{n \in \N} \E\left[X_n 1_{\{N \geq n\}}\right] &= \sum_{n \in \N} \E\left[X_n\right]\E\left[ 1_{\{N \geq n\}}\right] \\
% &= \E\left[X_1\right] \sum_{n \in \N} \Pr\{N \geq n\} \\
% &= \E[X_1]\E[N].
% \end{align*}
%where the third equality follows from the fact that the expectation of a random variable being represented in terms of the \textit{ccdf} of the corresponding random variable.
\end{proof}
\begin{prop}[Wald's Lemma for Renewal Process] \label{prop:WaldRenewal}
Let $\{X_n, n \in \N\}$ be \textit{iid} inter-arrival times of a renewal process $N(t)$ with $\E[X_1] < \infty$, and let $m(t) = \E[N(t)]$ be its renewal function. Then, $N(t)+1$ is a stopping time and
\begin{align*}
\E\left[\sum_{i=1}^{N(t)+1}X_i\right] = \E[X_1][1+m(t)].
\end{align*}
\end{prop}
\begin{proof} It is easy to see that $\{N(t)+1=n\}$ depends solely on $\{X_1,\ldots,X_n\}$ from the discussion below.
\begin{align*}
\left\{N(t) + 1 = n \right\} \iff \{S_{n-1} \leq t < S_n\} \iff \left\{\sum_{i=1}^{n-1} X_i \leq t < \sum_{i=1}^{n-1} X_i + X_n\right\}.
\end{align*}
Thus $N(t)+1$ is a stopping time, and the result follows from Wald's Lemma.
\end{proof}
\end{document}