%~Mouliné par MaN_auto v.0.32.0 2024-06-07 08:45:22
\documentclass[CRMATH, Unicode, XML, biblatex]{cedram}

\TopicEN{Combinatorics, Probability theory}
\TopicFR{Combinatoire, Probabilités}

\addbibresource{crmath20221157.bib}


\usepackage{amssymb}
%\usepackage{mathrsfs}
%\usepackage[normalem]{ulem}
\usepackage{mathtools}
\usepackage{graphicx}
\usepackage{latexsym}

\usepackage{tikz}
%\usepackage{todonotes}




\renewcommand{\leq}{\leqslant}
\renewcommand{\geq}{\geqslant}


\usetikzlibrary{arrows,automata}


\DeclareFixedFont{\beaupetit}{T1}{ftp}{b}{n}{2cm}

\newcommand{\Gpoi}{\mathrm{G}_{\mathrm{Poi}}}
\newcommand{\Gpoii}{\mathrm{G}_{Poi}}
\newcommand{\Fpoi}{\mathrm{F}_{ \mathrm{Poi}}}
\newcommand{\Fpoii}{\mathrm{F}_{Poi}}
%\newcommand{\F'poi}{\mathrm{F}'_{ \mathrm{Poi}}}
\newcommand{\rme}{\mathrm{e}}
\newcommand{\rmP}{\mathrm{P}}
\newcommand{\bbS}{\mathbb{S}}
\newcommand{\bbE}{\mathbb{E}}
\newcommand{\bbR}{\mathbb{R}}
\newcommand{\bbP}{\mathbb{P}}

\newcommand{\Explo}{\mathrm{Explo}}
\renewcommand{\exp}{\mathrm{exp}}
\newcommand{\rBin}{\mathrm{Bin}}

\newcommand{\clP}{\mathcal{P}}
\newcommand{\clN}{\mathcal{N}}
\newcommand{\Minf}{\mathbf{M}^{(\infty)}_\q}
\newcommand{\q}{\mathsf{q}}
\newcommand{\qt}{\widetilde{\mathsf{q}}}
\newcommand{\tr}{\mathbf{t}}
\newcommand{\nub}{\nu_{\bullet}}
\newcommand{\nuw}{\nu_{\circ}}
\newcommand{\rhob}{\rho_{\bullet}}
\newcommand{\rhow}{\rho_{\circ}}
\newcommand{\mub}{\mu_{\bullet}}
\newcommand{\muw}{\mu_{\circ}}




%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\graphicspath{{./figures/}}

\newcommand*{\mk}{\mkern -1mu}
\newcommand*{\Mk}{\mkern -2mu}
\newcommand*{\mK}{\mkern 1mu}
\newcommand*{\MK}{\mkern 2mu}

%\hypersetup{urlcolor=purple, linkcolor=blue, citecolor=red}

\newcommand*{\relabel}{\renewcommand{\labelenumi}{(\theenumi)}}
\newcommand*{\romanenumi}{\renewcommand*{\theenumi}{\roman{enumi}}\relabel}
\newcommand*{\Romanenumi}{\renewcommand*{\theenumi}{\Roman{enumi}}\relabel}
\newcommand*{\alphenumi}{\renewcommand*{\theenumi}{\alph{enumi}}\relabel}
\newcommand*{\Alphenumi}{\renewcommand*{\theenumi}{\Alph{enumi}}\relabel}
\let\oldtilde\tilde
\renewcommand*{\tilde}[1]{\mathchoice{\widetilde{#1}}{\widetilde{#1}}{\oldtilde{#1}}{\oldtilde{#1}}}
\let\oldforall\forall
\renewcommand*{\forall}{\mathrel{\oldforall}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\title{Erd{\H{o}}s--R\'enyi Poissonized}
\alttitle{Erd\H{o}s--Rényi poissonisé}

\author{\firstname{Nicolas} \lastname{Curien}}
\address{Université Paris-Saclay}
\email{nicolas.curien@gmail.com}
\thanks{This work has been supported by ERC 740943 GeoBrown and by the French ANR RanTanPlan.}

\begin{abstract}
We introduce a variant of the Erd{\H{o}}s--R\'enyi random graph where the number of vertices is random and follows a Poisson law. A very simple Markov property of the model entails that the Lukasiewicz exploration is made of \emph{independent} Poisson increments. Using a vanilla Poisson counting process, this enables us to give very short proofs of classical results such as the phase transition for the giant component or the connectedness for the standard Erd{\H{o}}s--R\'enyi model.
\end{abstract}


\begin{altabstract}
On introduit une variante du graphe d'Erd\H{o}s--Rényi où le nombre de sommets est aléatoire et suit une loi de Poisson. Une propriété de Markov du graphe montre que le chemin de Lukasiewicz a des incréments indépendants. Cela permet de retrouver des résultats classiques comme la transition de phase pour l'existence de la composante géante en utilisant simplement des propriétés standards des processus de comptage de Poisson.
\end{altabstract}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\maketitle


\section{The \texorpdfstring{$ \Gpoii(\alpha,p)$}{GPoi(alpha,p)} model and its exploration}

Fix $\alpha >0$ and let $N \sim \clP(\alpha)$ be a random variable following a Poisson law of expectation $\alpha$. We consider the random graph $ \Gpoii(\alpha,p)$, which conditionally on $N$ is made of a classical Erd{\H{o}}s--R\'enyi $G(N,p)$ random graph (i.e. $N$ vertices where all $\binom{N}{2}$ edges are independent and present with probability $p$) that we call the \emph{core}, together with an infinite \emph{stack} of vertices; each pair of core and stack vertices are connected by an edge with probability $p$ independently. \emph{There are no edges between vertices of the stack}. See Figure~\ref{fig:ERP}.

\subsection{Markov property}
A step of \emph{exploration} in $ \Gpoi(\alpha,p)$ is the following: Fix a vertex $\rho$ of the stack (independently of the core) and reveal its neighbors $ y_1,\,\dots,\,y_K$ with $K\geq 0$ inside the core. Then, see those vertices $y_1,\,\dots,\,y_K$ as new vertices of the stack, in particular erase all possible edges between $y_1,\,\dots,\,y_k$ and between $y_1,\,\dots,\,y_k$ and other vertices of the stack. Denote by $ \Explo(\Gpoi(\alpha,p))$ the resulting random graph. The key observation is the following:

\begin{lemm}[Markov property of $ \Gpoii(\alpha,p)$] \label{lem:Markov}
Let $K \geq 0$ be the number of neighbors in the core of $ \Gpoi(\alpha,p)$ of the stack vertex $\rho$. Then $ K \sim \clP(\alpha p)$ and conditionally on $K$, the graph $ \Explo(\Gpoi(\alpha,p))$ has law $ \Gpoii(\alpha (1-p),p)$.
\end{lemm}


\begin{proof}
It is an instance of Poisson thinning. Call $N' = N -K$ the remaining number of vertices after the revelation of the $K$ neighbors of $\rho$ in the $G(N,p)$ part. Then remark the following factorization:
\begin{align*}
\bbP\left(K=k \text{ and } N'=n\right) &= \rme^{-\alpha} \frac{\alpha^{n+k}}{(n+k)!} \cdot \binom{n+ k}{k} p^{k} (1-p)^{n}\\ &= \left(\rme^{-\alpha(1-p)} \frac{\left(\alpha(1-p)\right)^{n}}{n!}\right) \cdot \left(\rme^{-\alpha p} \frac{(\alpha p)^{k}}{k!}\right).
\end{align*}
After removing all possible edges between any of the $K$ neighbors of $\rho$ and the former vertices of the stack, the statement follows, since conditionally on the status of the vertices (being in the stack, or in the remaining part), all remaining possible edges are i.i.d.~present with probability $p$.

\begin{figure}[!h]
\centering
\includegraphics[width=12cm]{ERP}
\caption{Exploration in the Poissonized version of the Erd{\H{o}}s-R\'enyi random graph. The stack is made of the white vertices on the left part while the core is represented by the gray part. After one step of exploration, the explored vertex (in red) is deleted as well as the edges linking the discovered vertices between each other or to the former stack. The resulting graph has law $ \Gpoii(\alpha (1-p),p)$.\qedhere\label{fig:ERP}
}
\end{figure}
\end{proof}

\subsection{Lukasiewicz exploration}
In particular, successive explorations in $ \Gpoii(\alpha,p)$ yields a sequence of \emph{independent} Poisson random variables with expectation $ \alpha p, \alpha p (1-p),\,\dots,\,\alpha p (1-p)^{k},\,\dots$ whose total sum is just a Poisson variable of parameter $ \alpha p \sum_{i\,\geq\,0} (1-p)^{i} = \alpha$, recovering the total number of vertices $N$ in the core as expected.


In the rest of the note, we shall focus on a specific exploration of the graph: we shall assume that iteratively, the discovered vertices are placed on top of the stack and that we successively explore the first vertex of the stack. We get the so-called \emph{Lukasiewicz exploration} of the graph $ \Gpoii(\alpha,p)$, see Figure~\ref{fig:lukaER}. We encode it in a process $ (\bbS_{k} : k \geq 0)$, the Lukasiewicz walk, defined by $ \bbS_{0}=0$ and where $ \Delta \bbS_{k} = \bbS_{k}- \bbS_{k-1}$ is the number of neighbors discovered at step $k$ minus one. Using Lemma~\ref{lem:Markov} we can write \emph{simultaneously} for all $k \geq 0$
\begin{align}\label{eq:lukapoisson}
\begin{split}
\bbS_{k} &= \left(\clP(\alpha p)-1\right) + \left(\clP(\alpha p (1-p))-1\right) + \dots + \left(\clP\left(\alpha p \left(1-p\right)^{k-1}\right)-1\right) \\
&= \clN\left(\alpha p \cdot \sum_{i=0}^{k-1} (1-p)^{i} \right)-k = \clN\left(\alpha \left(1-(1-p)^{k}\right)\right) -k,
\end{split} 
\end{align}
where all the Poisson random variables written above are independent and where $ (\clN(t): t \geq 0)$ is a standard unit-rate Poisson counting process on $ \bbR_{+}$. We shall only use the following standard limit theorems on the Poisson counting process
\begin{equation}\label{eq:lawll}
\frac{\clN(t)}{t} \xrightarrow[t\,\to\,\infty]{a.s.} 1, \quad \text{ and } \quad \frac{ (\clN(tn)-tn)}{ \sqrt{n}} \xrightarrow[n\,\to\,\infty]{(d)} (B_{t} : t \geq 0),
\end{equation}
where $(B_{t} : t \geq 0)$ is a standard linear Brownian motion. The left-hand side follows from the law of large numbers and the right-hand side from Donsker's invariance principle.

\begin{figure}[!h]
\centering
\includegraphics[width=12cm]{lukapoisson}
\caption{Lukasiewicz exploration of the graph $ \Gpoii(\alpha,p)$: the numbering reflects the order in which the vertices have been explored. The thick edges are kept whereas the thin red edges are discarded in the exploration. The thick (and very thick) edges form $ \Fpoii(\alpha, p)$ and the very thick ones form $ \mathrm{F'_{Poi}}(\alpha, p)$. The process $ \bbS$ on the right is obtained by concatenating the successive number of neighbors $-1$. \label{fig:lukaER}}
\end{figure}

\subsection{Relation to components}
Since $ \Gpoi(\alpha, p)$ has an infinite stack of vertices linked to each vertex of the core independently with probability $p$, as soon as $p >0$, the graph is connected and in fact all vertices of the core have infinite degree almost surely. However, if we only consider the edges that are not discarded in the Lukasiewicz exploration we obtain a spanning forest
\[
\Fpoi(\alpha,p) \subset \Gpoi(\alpha,p),
\]
whose Lukasiewciz walk is precisely $ \bbS$,
see Figure~\ref{fig:lukaER}. In particular, new minimal records of $ \bbS$ correspond to the discovery of a new tree component in $ \Fpoi(\alpha,p)$. If we further remove all vertices of the initial stack (together with the adjacent edges) we split $ \Fpoi(\alpha, p)$ into a finer forest $\mathrm{F}'_{\mathrm{Poi}}(\alpha,p)$ which spans the core and we can check the following graph inclusions
\begin{equation}\label{eq:inclusion}
\mathrm{F}'_{\mathrm{Poi}}(\alpha,p) \subset \underbrace{G(N,p)}_{\mathrm{Core}} \subset \Fpoi(\alpha,p) \subset \Gpoi(\alpha,n).
\end{equation}


\section{Phase transition for the giant}

Let us use the Lukasiewicz exploration of the Poissonized version of the Erd{\H{o}}s--R\'enyi random graph to give a straightforward proof of the well-known phase transition for the size of the largest connected component in $G(n,p)$.


\subsection{Existence of the giant component}

Fix $c>0$. Let $\alpha =n$ and $p \equiv p_n = \frac{c}{n}$ and denote by $ \bbS^{{(n)}}$ the resulting Lukasiewicz walk to emphasize the dependence in $n$. Since we have $(1- \frac{c}{n})^{[nt]} \to \rme^{-ct}$ as $n \to \infty$ uniformly over $ t \in \bbR_{+}$, using~\eqref{eq:lukapoisson} and the law of large numbers~\eqref{eq:lawll} we immediately deduce:


\begin{prop}[Fluid limit]\label{prop:fluid}
We have the following convergence in probability
\[
\sup_{t\,\geq\,0} \left\| \left(n^{-1} \cdot {\bbS^{(n)}_{[nt]}}\right) - \left(1- \rme^{{-ct}}-t\right)\right\| \xrightarrow[n\,\to\,\infty]{(\bbP)}0.
\]
\end{prop}


\begin{figure}[!h]
\centering
\includegraphics[width=10cm]{courbesERP}
\caption{Graphs of the functions $(1- \rme^{{-ct}}-t)_{t\,\geq\,0}$ for different of values of $c$: in blue $c=1/2$, in orange $c=1$, in green $c=2$ and in red $c=3$. Notice the root $\beta(c)$ satisfying $1-\beta(c) = \rme^{-c \beta(c)}$.\label{fig:graphsfunctions}}
\end{figure}

When $c>1$ we write $\beta(c)$ for the first positive root of the function $ t \mapsto 1- \rme^{{-ct}}-t$, and when $ c \leq 1$ there is no such root and we set $\beta(c) = 0$, see Figure~\ref{fig:graphsfunctions}. In the rest of the note, for random variables $X_{n}$ and a sequence $f(n)$ we write $X_{n} = o_{ \bbP}(f(n))$ if we have $X_{n}/f(n) \to 0$ in probability and $X_{n} = O_{ \bbP}(f(n))$ if $(X_{n}/f(n) : n \geq 1)$ is tight.


\begin{coro}[Phase transition for $G(\clP(n), \frac{c}{n})$] \label{cor:giant}
Let $N \sim \clP(n)$. If $c <1$ then the largest connected components \emph{in the core} $G(N,c/n)$ has size $o_{ \bbP}(n)$, whereas if $ c >1$ it contains a unique giant component of size $ \beta(c)n + o_ \bbP(n)$, and the second largest component has size $o_{ \bbP}(n)$.
\end{coro}


\begin{proof}
Using the sandwiching of~\eqref{eq:inclusion} it suffices to prove the similar statements for $ \Fpoii$ and $ \mathrm{F'_{Poi}}$. The size of the connected components in $\Fpoii(n, \frac{c}{n})$ are given by the lengths of the excursions of $ \bbS^{(n)}$ above its running infimum process $ \underline{ \bbS}^{(n)}_k := \inf \{ \bbS^{(n)}_j : 0 \leq j \leq k\}$. We denote by $(L^{(n)}_{i} : i \geq 1)$ those excursion lengths ranked in decreasing order. Notice that the excursion lengths above the running infimum of the function $ t \mapsto 1- \rme^{-ct}-t$ are given by $(\beta(c), 0, 0,\dots)$. A moment's thought using Proposition~\ref{prop:fluid} shows that
\[
\left(\frac{L^{(n)}_{i}}{n} : i \geq 1\right) \xrightarrow[n\,\to\,\infty]{(\bbP)} (\beta(c), 0, 0,\,\dots)
\]
for the $\ell^{\infty}$ norm. This proves the statement of the corollary for the random graph $\Fpoii(n, c/n)$. In the case $c \leq 1$, since $ \mathrm{F'_{Poi}} \subset \Fpoii$ and $\beta(c)=0$ there is nothing more to prove. However, when $c>1$ the removal of the initial stack vertices may split the giant component of $ \Fpoii(n,c/n)$ of size $\beta(c)n +o_{ \bbP}(n)$ into several components but a moment's though using the Lukasiewicz walk and Proposition~\ref{prop:fluid} again shows that one component of size $\beta(c)n + o_{ \bbP}(n)$ must remain.
\end{proof}


\subsection{Back to the $G(n,p)$ model} \label{paragraph:back}
The analogous statement in the case of $ G(n,p)$, \emph{where $n$ is fixed}, can easily be deduced. As usual, the idea for the depoissonization being that $ \clP(n)$ is concentrated around the value $n$ with $ \sqrt{n}$ fluctuations. Indeed, if we let $N_{-} \sim \clP(n - n^{7/12})$ and $ N_{+} \sim \clP(n + n^{7/12})$ then we have the natural inclusions
\begin{equation}\label{eq:sandwhich}
G\left(N_{-},p\right) \subset G(n,p) \subset G\left(N_{+},p\right),
\end{equation}
which hold with high probability when $n \to \infty$ because $ \bbP(\clP(n - n^{7/12}) \leq n) \to 1$ and $ \bbP(\clP(n +n^{7/12}) \geq n) \to 1$ as $n \to \infty$ (notice that we only coupled the cores of $ \Gpoii$ and not their stacks). By Corollary~\ref{cor:giant}, with high probability, both random graphs on the left-hand side and right-hand side of the last display have a unique component of size $ \approx \beta(c)n$, all others being of size negligible in front of $n$. We can then conclude that the same is true for the graph $G(n,p)$ sandwhiched between those two, and this is a classical result of Gilbert--Erd{\H{o}}s--R\'enyi~\cite{erdds1959random,erdHos1960evolution}.

\subsection{Refined estimates}

Let us turn to refined estimates on the cluster sizes still in the case $ \alpha=n$ and $p\equiv p_n =\frac{c}{n}$ for $ c >0$, using the Brownian limit in~\eqref{eq:lawll}. Getting from those results the analogs for the fixed-size Erd{\H{o}}s--R\'enyi via depoissonization is however more involved and we leave it open for further research. We start with the critical case $c=1$.

\subsubsection{Critical case and Aldous's limit}
In the critical case $p = \frac{1}{n}$ we can give an analog of a result of Aldous in the case of the standard fixed-size Erd{\H{o}}s-R\'enyi~\cite{aldous1997brownian} indicating that the size of the clusters in the near critical regime is of order $ n^{2/3}$:

\begin{prop}[Near critical case]\label{prop:aldous}
Fix $\lambda \in \bbR$. For $ p\equiv p_n = \frac{1}{n} + \frac{\lambda}{n^{{4/3}}}$ with $\lambda \in \bbR$, the Lukasiewicz walk of $ \Gpoii(n, p_n)$ satisfies
\[
\left(n^{-1/3} \cdot {\bbS^{(n)}_{\left[n^{2/3}t\right]}}\right)_{t\,\geq\,0} \xrightarrow[n\,\to\,\infty]{(d)} \left(B_{t} + \lambda t - \frac{t^{2}}{2}\right)_{t\,\geq\,0},
\]
where the convergence holds in distribution for the uniform norm over every compact of $ \bbR_{+}$.
\end{prop}

\begin{proof}
Fix $A>0$. Putting $k = [n^{2/3}t]$ for $t \in [0,A]$ in the equation~\eqref{eq:lukapoisson}, we have
\begin{equation}\label{eq:dse}
n \left(1-\left(1- \frac{1}{n}- \frac{\lambda}{n^{4/3}}\right)^{\left[n^{2/3}t\right]}\right) = tn^{2/3} + \lambda t n^{1/3} - \frac{t^{2}}{2} n^{1/3} + o\left(n^{1/3}\right),
\end{equation}
as $n \to \infty$ and where the little $o$ is uniform in $t \in [0,A]$. The second item of~\eqref{eq:lawll} together with Skorokhod representation theorem show that on a common probability space we can build for each $m \geq 1$ a Poisson counting process $ \clN^{(m)}$ and a Brownian motion $B$ so that we have the \emph{almost sure} convergence:
\begin{equation}\label{eq:skorokhod}
\frac{\left(\clN^{(m)}(tm)-tm\right)}{\sqrt{m}} \xrightarrow[m\,\to\,\infty]{a.s.} \left(B_{t} : t \geq 0\right)
\end{equation}
for the uniform norm over every compact of $ \bbR_{+}$. Recalling~\eqref{eq:lukapoisson} those observations yield for $m = [n^{2/3}]$
\begin{align*}
\left(\frac{\bbS^{(n)}_{\left[n^{2/3}t\right]}}{n^{1/3}} \right)_{0\,\leq\,t\,\leq\,A} & \overset{(d)}{\underset{ \mathrm{ for\ each\ }n}{=}} \left(\frac{\clN^{(m)}\left(n \left(1-\left(1- \frac{1}{n}- \frac{\lambda}{n^{4/3}}\right)^{[n^{2/3}t]}\right)\right) -[n^{2/3}t]}{n^{1/3}}\right)_{0\,\leq\,t\,\leq\,A}\\
& \underset{~\eqref{eq:dse}}{=} \left(\frac{\clN^{(m)}\left(tm + \lambda t \sqrt{m} - \frac{t^{2}}{2} \sqrt{m} + o\left(\sqrt{m}\right) \right)- t m +o\left(\sqrt{m}\right)}{ \sqrt{m} + o(1)}\right)_{0\,\leq\,t\,\leq\,A} \\
&\underset{~\eqref{eq:skorokhod}}{\xrightarrow[n\,\to\,\infty]{a.s.}} \left(B_{t} + \lambda t - \frac{t^{2}}{2} \right)_{0\,\leq\,t\,\leq\,A},
\end{align*}
and this proves the proposition.
\end{proof}


\subsubsection{CLT for the giant}
We now suppose that we are in the supercritical regime $c>1$ and establish a central limit theorem of the size of the largest component in $ \Fpoii(n,c/n)$:

\begin{prop}\label{prop:cltgiant}
The size $L^{(n)}$ of the largest component in $ \Fpoii(n,c/n)$ satisfies
\[
\frac{L^{(n)}_{1} - \beta(c)n}{ \sqrt{n}} \xrightarrow[n\,\to\,\infty]{(d)} \frac{B_{\beta({c})}}{1-c^*}, \quad \text{ where }c^{*} = c(1- \beta(c)),
\]
where $B$ is the Brownian motion appearing in~\eqref{eq:lawll}.
\end{prop}

\begin{rema*}
In the case of the the Erd{\H{o}}s--R\'enyi $G(n, \frac{c}{n})$ for $c>1$ the central limit theorem of the size of the giant makes a variance $ \beta(c)(1-\beta(c))/(1-c^{*})^{2}$ appear, see~\cite{pittel1990tree,bollobas2012asymptotic}. The additional $(1-\beta(c))$ factor should be explained by the conditioning of our model to have a core of size $ n + o(\sqrt{n})$, but we leave the depoissonization open.
\end{rema*}

\begin{proof}
We already know from the proof of Corollary~\ref{cor:giant} that the giant component in $ \Fpoii(n,c/n)$ comes from the only macroscopic excursion of $ \bbS^{(n)}$ over its running infimum between times $0 \leq I^{(n)} < J^{(n)}$ with $I^{(n)}/n \to 0$ and $J^{(n)}/n \to \beta(c)$ in probability. Specifically, since $c>1$ we can fix $ \varepsilon \in (0, \beta(c)/2)$ and those times are defined by
\[
I^{(n)} = \inf\left\{ i \leq \varepsilon n : \bbS^{(n)}_i = \inf_{1\,\leq\,j\,\leq\,\varepsilon n} \bbS^{(n)}_j\right\} \quad \text{ and } J^{(n)} = \inf\left\{ i \geq I^{(n)} : \bbS^{(n)}_i = \bbS^{(n)}_{I^{(n)}}-1\right\},
\]
so that $ \bbS^{(n)}$ performs an excursion above is running infimum over the time interval $[I^{(n)}, J^{(n)}]$. We will then study the finer asymptotic of those times $I^{(n)}$ and $J^{(n)}$.
For fixed $k\geq 0$ we have $n(1-(1- \frac{c}{n})^k) \to c k$ as $n \to \infty$, using~\eqref{eq:lukapoisson} we deduce the following convergence in distribution
\[
\left(\bbS^{(n)}_{k} : k \geq 0\right) \xrightarrow[n\,\to\,\infty]{(d)} \left(\clN(c \cdot k) -k : k \geq 0\right).
\]
When $c >1$, using~\eqref{eq:lawll} it is not hard to see that for every $ \varepsilon>0$ we can find $A_{ \varepsilon}>0$ so that $(\bbS^{(n)}_{k} : k \geq 0)$ is positive over $[A_ \varepsilon, \beta(c) n/2]$ with probability at least $1- \varepsilon$. We deduce from the last two remarks the convergence in law of the time $I^{(n)}$ towards $ \mathcal{I} := \mathrm{argmin}\{ \clN(c \cdot k)-k : k \geq 0\}$ which is an almost surely finite random variable since $c >1$ by~\eqref{eq:lawll}. In particular $(I^{(n)} : n \geq 1)$ is tight.
On the other hand, the time $J^{(n)}$ can be further estimated. Put $k = [\beta(c) n + x \sqrt{n}]$ for $x \in \bbR$, then as long as $x \equiv x_{n}$ is negligible in front of $ \sqrt{n}$ we have
\[
n \left(1-\left(1- \frac{c}{n}\right)^{\left[\beta(c)n + x \sqrt{n}\right]}\right) = n \beta(c) + c \left(1- \beta(c)\right) x \sqrt{n} + o\left((|x|+1) \sqrt{n}\right).
\]
Using the same coupling as in~\eqref{eq:skorokhod} for $m=n$ we can write simultaneously for all $k = [\beta(c) n + x \sqrt{n}]\\ x$ such that $x = o(\sqrt{n}),$
\begin{align*}
\bbS^{(n)}_{k} & \overset{(d)}{\underset{ \text{ for\ each\ }n, \forall k }{=}} \clN^{(n)}\left(n \left(1-\left(1- \frac{c}{n}\right)^{k}\right)\right) -k \\
& = {\clN^{(n)}\left(n \beta(c) + c (1- \beta(c)) x \sqrt{n} + o\left((|x|+1) \sqrt{n}\right)\right)} - n \beta(c) - x \sqrt{n} \\
& \underset{~\eqref{eq:skorokhod}}{=} \sqrt{n}\left(B_{ \beta(c)} + x\left(c(1- \beta(c))-1\right) + o_{\bbP}(1+|x|)\right).
\end{align*}
where $(B_t : t \geq 0)$ is the Brownian motion appearing in~\eqref{eq:skorokhod}. Since $(c(1- \beta(c))-1) <0$ when $c>1$, the last display is positive when $x$ is very negative, whereas it is negative when $x$ is very positive. We easily deduce that the time $J^{(n)} \approx \beta(c)n$ at which the last display crosses zero at the scale $ \sqrt{n}$ satisfies
\begin{equation}\label{eq:cltgiant}
\frac{J^{(n)} - \beta(c)n}{ \sqrt{n}} \xrightarrow[n\,\to\,\infty]{(d)} \frac{B_{\beta({c})}}{1-c^*}, \quad \text{ where }c^{*} = c(1- \beta(c)).
\end{equation}
Since $I^{{(n)}}$ converge in law, the variable $ L^{(n)} = J^{(n)}-I^{(n)}$ satisfies the same central limit behavior.
\end{proof}



\section{Connectedness}
As another application of our Poissonization technique, let us give a short proof of the sharp phase transition for connectedness in the fixed-size Erd{\H{o}}s--R\'enyi~\cite{erdds1959random,erdHos1960evolution}:

\begin{prop}
For $c \in \bbR$ we have
\[
\bbP\left(G\left(n, \frac{\log n +c}{n}\right) \text{ is connected}\right) \xrightarrow[n\,\to\,\infty]{} \rme^{- \rme^{-c}}.
\]
\end{prop}

\begin{proof}
Let $p\equiv p_n = \frac{\log n +c}{n}$. We shall first prove the convergence of the proposition when the number $N$ of vertices of the Erd{\H{o}}s--R\'enyi graph is random and distributed according to one plus a Poisson law of expectation $n$. Connectedness of this graph is equivalent to the fact $ \Fpoii(n,p)$ has only one non-trivial component (the others being isolated vertices of the stack), or equivalently that the Lukasiewicz walk $(\bbS^{(n)})$ starts with a (large) excursion and once it has reached level $-1$, it makes only jumps of $-1$ forever. Using~\eqref{eq:lukapoisson} and~\eqref{eq:lawll}, it is easy to see that the first hitting time $\tau_{-1}^{(n)}$ of $-1$ by the process $ \bbS^{(n)}$ is concentrated around $n$ and more precisely using similar calculations as in Proposition~\ref{prop:cltgiant} we have
\begin{equation}\label{eq:tau-1n}
\frac{\tau_{-1}^{(n)}-n}{ \sqrt{n}} \xrightarrow[n\,\to\,\infty]{(d)} B_{1}.
\end{equation}
Besides, since the increments of $ \bbS^{(n)}$ are Poisson and independent, by the Markov property of the exploration we have that
\begin{align*}
\bbP\left(\rho \cup G(N,p) \text{ is connected}\right) &=
\bbP\left(\Delta \bbS^{(n)}_{i} =-1, \forall i \geq \tau_{-1}^{(n)} \right)= \bbE\left[\bbP\left(\Delta \bbS^{(n)}_{i} =-1, \forall i \geq \tau_{-1}^{(n)} \,\middle|\, \tau_{-1}^{(n)}\right)\right] \\
&= \bbE\left[\bbP\left(\clP\left(n \left(1-p_n\right)^{\tau_{-1}^{(n)}}\right) =0\right)\right] = \bbE\left[\exp\left(- n\left (1- \frac{\log n +c }{n}\right)^{\tau_{-1}^{(n)}}\right)\right],
\end{align*}
and by~\eqref{eq:tau-1n} the random variable inside the expectation converges in probability to $ \rme^{-{ \rme^{-c}}}$ since $\tau_{-1}^{{(n)}} = n + O_{ \bbP}(\sqrt{n})$. The desired statement follows. To come back to the fixed-size $G(n,p)$ model, notice that the function $ \phi(n,p) = \bbP(G(n,p) \text{ is connected})$ is increasing in $p$ for $n$ fixed, but the monotonicity in $n$ is not clear. However, the natural inclusion $ G(n,p) \subset G(n+1,p)$ enables to write
\begin{multline*}
\phi(n+1,p)\\
\begin{aligned}
&\geq \bbP(G(n,p) \text{ is connected and the }(n+1)^{\rm th} \text{ vertex is connected to one of the first $n$ vertices}) \\
&\geq \phi(n,p) - \rmP\left(\rBin(n,p) =0\right) \geq \phi(n,p) - \rme^{-np}.
\end{aligned}
\end{multline*}
Recalling that $p_{n} = \frac{\log n +c}{n}$ we deduce that if $0\leq k_{n} = o(n)$ then we have $\phi(n+k_{n}, p_{n}) \geq \phi(n,p_{n}) + o(1)$. With the notation of~\eqref{eq:sandwhich} this shows that
\[
\bbP\left(G\left(N_{-},p_{n}\right) \text{ is connected}\right)-o(1) \leq \bbP\left(G\left(n,p_{n}\right) \text{ is connected}\right) \leq \bbP\left(G\left(N_{+},p_{n}\right) \text{ is connected}\right)+o(1),
\]
and by sandwhiching, the middle term does converge to $\rme^{- \rme^{-c}}$.
\end{proof}


\section*{Comments}
There is no doubt that the Poissonnized model $ \Gpoii$ can be used to recover other estimates on the standard $G(n,p)$ model such as the number of components, the logarithmic size of the components in the subcritical regime\dots\ However, it is not clear to us how to adapt the Poissonnized model for other random graphs such as the configuration model or rank-1 random graphs.


\section*{Acknowledgments}
We thank Svante Janson for feedback on a first version of this note and to Yuval Peres, Bal\'azs R\'ath, Felix Joos and the two anonymous referees for pointing to us a few shortcomings which greatly helped improving the note.

\section*{Declaration of interests}
The authors do not work for, advise, own shares in, or receive funds from any organization that could benefit from this article, and have declared no affiliations other than their research organizations.

\printbibliography
\end{document}
