\documentclass[12pt]{article}
\usepackage{e-jc}
\specs{P4}{19}{2012}

\usepackage{amsmath,amsthm,amssymb}
\newcommand{\E}{{\mathbb{\,E}}}
\newcommand{\I}{{\mathbb{I}}}
\newcommand{\Var} {\mathrm {Var}}
\newcommand{\Prob} {\mathbb P}
\newcommand{\prob}[1]{\Prob\left\{#1\right\}}
\newcommand{\pc}[2]{\prob{#1\,|\,#2}}
\newcommand{\Ec}[2]{\E[#1\,|\,#2]}
\newcommand{\pr}{\operatorname{Pr}}
\newcommand{\Ind} {\mathbb I}
\newcommand{\eps} {\varepsilon}
\newcommand{\fee} {\varphi}
\newcommand{\R} {\mathbb R}
\newcommand{\Z} {\mathbb Z}
\newcommand{\N} {\mathbb N}
\newcommand{\cond}[2]{\left[\,#1\,|\,#2\,\right]}
\newcommand{\aut}{\operatorname{aut}}
\newcommand{\Sc}{\mathcal{S}}
\newcommand{\al}{\alpha^*}
\newcommand{\e}{\mathrm{e}}
\newcommand{\Bi}{\operatorname{Bi}}
\newcommand{\psic}{\Psi_{C_4}}

      
\newtheorem{theorem}{Theorem}
\newtheorem{lemma}[theorem]{Lemma}
\newtheorem{corollary}[theorem]{Corollary}
\newtheorem{proposition}[theorem]{Proposition}
\newtheorem{conjecture}{Conjecture}
\newtheorem*{conjecture*}{Conjecture}
\newtheorem*{proposition*}{Proposition}
\theoremstyle{remark}
\newtheorem{remark}{Remark}
\newtheorem*{remark*}{Remark}

  \title{On the Upper Tail of Counts of\\
  Strictly Balanced Subgraphs}

  \author{Matas \v{S}ileikis \\ 
  \small Department of Mathematics and Computer Science, \\[-0.8ex]
  \small Adam Mickiewicz University, Pozna\'n, Poland \\
  \small \texttt{matas.sileikis@gmail.com}\\
  }

\date{\dateline{Aug 20, 2011}{Dec 19, 2012}{Jan 6, 2012}\\
     \small Mathematics Subject Classification: 05C80}

\begin{document}
  \maketitle
  \begin{abstract}
    Let $X_G$ be the number of copies of $G$ in the Erd\H{o}s-R\'{e}nyi binomial random graph $\mathbb G(n,p)$. 
  Janson, Oleszkiewicz and Ruci\'nski proved that for every $t > 1$ 
  \begin{equation*}
    \exp \{-O_t( M^*_G \ln (1/p))\} \leq \prob{X_G \geq t\E X_G} \leq \exp\{-\Omega_t(M^*_G)\},
  \end{equation*}
  where $M^*_G$ is a certain function of $n$ and $p$. For $G = K_3$ the logarithmic gap between the bounds was closed by Chatterjee and, independently, DeMarco and Kahn. 
  We provide matching bounds for strictly balanced $G$, when $\E X_G \leq \ln n$. Also, we give matching bounds for $C_4$,~$K_4$, and stars $K_{1,k}$ in a broader range of $\E X_G$. In particular, this improves some results of Janson and Ruci\'nski for which the so called deletion method was used. 
  \end{abstract}
  \section{Introduction}
  For a fixed graph $G$, let $X_G$ be the number of copies of $G$ in the random graph $\mathbb{G}(n,p)$ (see, e.g., \cite{JLR} for definition) and let $\mu_G = \E X_G$. We consider the asymptotics of 
  \begin{equation*}
    -\ln \prob{X_G \geq t\mu_G}
  %  \label{tail}
  \end{equation*}
  for fixed $t > 1$, as $n \to \infty$. In the sequel, we always assume that $G$ has at least one edge.
  
  We use the asymptotic notation $O, \Omega, \Theta, o, \asymp, \gg$ as it is defined in \cite{JLR} with implicit constants possibly depending on $G$.  Subscripts added to these symbols indicate additional parameters on which the implicit constants depend. We treat~$p$ as a function of $n$, and $n$ is assumed to be sufficiently large. The numbers of vertices and edges of a graph $H$ are denoted, respectively, by $v_H$ and $e_H$.
  
  Let $\Psi_H = n^{v_H} p^{e_H} \asymp \mu_H$, and $\Phi_G = \min_{H \subseteq G} \Psi_H$, where the minimum is taken over subgraphs $H$ with $e_H >0$. Let $m_G = \max_{H \subseteq G} e_H/v_H$ be the \emph{maximum density} of $G$. In the present paper we assume, for simplicity, that $p \geq n^{-1/m_G}$. This is equivalent to $\Phi_G \geq 1$. Recall that $n^{-1/m_G}$ is the threshold for the event $\{X_G > 0\}$. See \cite[Remark 8.1]{JOR} for an explanation of why we are not interested in $p$ below the threshold.
  
  Also, to avoid some trivialities, we assume that $t\mu_G$ is at most the number of copies of $G$ in $K_n$, since otherwise $\prob{X_G \geq t\mu_G} = 0$.
  

  Janson, Oleszkiewicz and Ruci\'nski \cite{JOR} proved that for every $t > 1$ 
  \begin{equation}
    p^{O_t( M^*_G)} \leq \prob{X_G \geq t\mu_G} \leq \exp\{-\Omega_t(M^*_G)\}.
    \label{JOR}
  \end{equation}
  In \eqref{JOR}, $M^*_G$ is a function of $n$ and $p$ satisfying
  \begin{equation}
    M^*_G \asymp \begin{cases}
      \min_{H \subseteq G} \Psi_H^{1/\al_H}, \quad &\text{if } p \leq n^{-1/\Delta_G}, \\
      n^2p^{\Delta_G}, \quad &\text{if } p \geq n^{-1/\Delta_G},
    \end{cases}
    \label{Mg}
  \end{equation}
  where $\al_H$ stands for the \textit{fractional independence number}, defined as the maximum of $\sum_v\alpha_v$ over all assignments of nonnegative weights $\alpha_v$ to the vertices satisfying $\alpha_u + \alpha_v \leq 1$ for every edge $uv$ of $H$ (see \cite{JOR} for some properties of $\al_H$), while $\Delta_G$ is the maximum degree. For example, $\al_{K_3} = 3/2$, and $M^*_{K_3} \asymp n^2p^2$ for all $p$. 
  
  One should bear in mind that the right-hand side of \eqref{Mg} is continuous as a function of~$p$, and takes a simple form $n^ap^b$ with some constants $a, b > 0$ in each of a finite number of intervals (see \cite[Section 7]{JOR} for a thorough analysis).
  
  The logarithms of the upper and lower bounds in \eqref{JOR} differ by a multiplicative factor $\ln (1/p)$. Recently Chatterjee \cite{chatterjee} and, independently, DeMarco and Kahn~\cite{DK} closed this logarithmic gap for $G = K_3$. Chatterjee proved that if $p > Cn^{-1}\ln n$, where $C$ is a constant depending on $t$, then
  \begin{equation}
    \prob{X_{K_3} \geq t\mu_{K_3}} = \exp \left\{ -\Theta_t \left( n^2p^2 \ln (1/p) \right )\right\},
    \label{DKlog}
  \end{equation}
which means that in this range of $p$ the lower bound in \eqref{JOR} is sharp.
Shorly after the preprint of Chatterjee \cite{chatterjee} was posted, a preprint by DeMarco and Kahn \cite{DK} appeared on arXiv with an alternative proof of \eqref{DKlog} for $p > n^{-1} \ln n$. They also proved that if $n^{-1} \leq p \leq  n^{-1}\ln n$, then
  \begin{equation}
    \prob{X_{K_3} \geq t\mu_{K_3}} = \exp \left\{ -\Theta_t \left( n^3p^3 \right) \right\}.
    \label{DK}
  \end{equation}
 Thus, in a small range above the threshold neither of the bounds in \eqref{JOR} is sharp.
  Asymptotics \eqref{DKlog} and \eqref{DK} can be combined into a single result:
  \begin{equation}
    \prob {X_{K_3} \geq t\mu_{K_3}} = 
    \exp \left\{ -\Theta_t\left( \min \left\{ \Psi_{K_3} , M^*_{K_3} \ln(1/p) \right\} \right) \right\}.
    \label{both}
  \end{equation}

  A graph $G$ is called \emph{balanced} if $\max_{H \subseteq G} e_H/v_H$ is attained by $H = G$, and, in particular, \emph{strictly balanced}, if it is attained by $H = G$ only.

  The first result of this paper is the following theorem, which gives asymptotics analogous to \eqref{DK} for strictly balanced $G$, when $\Psi_G$ is logarithmically small.
  \begin{theorem}
    \label{thm}
  Let $G$ be a strictly balanced graph. Then, for every $t > 1$ and $\Psi_G \leq \ln n$,
  \begin{equation}
    \prob{X_G \geq t \mu_G} \leq \exp \{-\Omega_t(\mu_G)\},
    \label{upper}
  \end{equation}
and for $\Psi_G \leq \ln^a n$, where $a = \alpha^*_G/(\alpha^{*}_G - 1)$, 
\begin{equation}\label{lower}
  \prob{X_G \geq t \mu_G} \geq \exp \{-O_t(\mu_G)\}.
\end{equation}
  \end{theorem}
\noindent  Let us clarify the above restrictions on $\Psi_G$. 
The upper (respectively, lower) bound holds for $\Psi_G \leq B \ln n$ (respectively, $\Psi_G \leq B \ln^a n$) for any fixed $B > 0$, if we let the implicit constants depend on $B$. Therefore, for simplicity, we take $B = 1$.
  For the upper bound~\eqref{upper} the restriction $\Psi_G \leq \ln n$ seems to be a technical limitation. It is the cost we pay for the simplicity of the proof. We hope that with more effort it can be relaxed to $\Psi_G \leq \ln^a n$ to match the range of the validity of the lower bound. A suggestion how one could try to obtain such a relaxation is outlined in Remark \ref{rem2}.

  \begin{remark}\label{rem}
    The lower bound \eqref{lower} is obtained by extending the approach of DeMarco and Kahn \cite{DK}. The bound holds much more generally, but in Theorem \ref{thm} we state it only for the range where it is better than the lower bound in \eqref{JOR}. In other words, inequality $\Psi_G \leq \ln^a n$ is roughly equivalent to $\mu_G \leq M_G^* \ln (1/p)$ (see Remark \ref{rem1}). 
  \end{remark}
  
  Actually, in the proof of \eqref{lower} we bound the tail $\prob{X_G \geq t\mu_G}$ from below by the probability that the number of copies is exactly $\lceil t\mu_G \rceil$, and, moreover, these copies are vertex-disjoint.

  During his plenary lecture at the conference ``Random Structures and Algorithms, Atlanta, May 2011'' J. Kahn presented a result (to appear in \cite{DK2}) that for any clique $G = K_r$ asymptotics analogous to~\eqref{both} hold, namely,  
  \begin{equation}
    \label{eq:cliques}
    \prob{X_{K_r} \geq t\mu_{K_r}} = \exp \left\{ -\Theta_t\left( \min \{\mu_{K_r}, M^*_{K_r} \ln(1/p) \} \right) \right\} .
  \end{equation}
In the same lecture the following conjecture of DeMarco and Kahn was stated:
  \begin{conjecture}\label{conj}
    If $G$ is a graph and $t > 1$, then for every $p$
    \begin{equation*}      
      \prob{X_G \geq t\mu_G} = \exp \{ -\Theta_t( \min \{ \Phi_G, M_G^* \ln ( 1/p ) \} ) \}.
    \end{equation*}
  \end{conjecture}
\noindent Moreover, in the aforementioned talk it was mentioned that for any graph $G$ and any subgraph $H \subseteq G$ it is not hard to prove that
  \begin{equation}\label{eq:DKlower}
    \prob{X_G \ge 2 \mu_G} \ge \exp \left\{ -O(\mu_H) \right\},
  \end{equation}
  which is more general than \eqref{lower}. 
  However, we do not know whether a proof of \eqref{eq:DKlower} in full generality is going to be published soon. Therefore, acknowledging the origin of the idea, we provide a proof of \eqref{lower}.
  
  In this paper we also prove the asymptotics for $G = K_4, C_4$, and $G = K_{1,k}$, $k \geq2$, thus supporting Conjecture \ref{conj}. We still assume some conditions for~$p$, but these are less restrictive than the condition $\Psi_G \leq \ln n$ assumed in Theorem~\ref{thm}: the order of $\Psi_G$ may now be a power of $n$. Note that all these graphs are strictly balanced.
\begin{theorem}
  \label{thm2}
  Let $\gamma > 0$ and $t > 1$.
  \renewcommand{\theenumi}{(\roman{enumi})}
  \renewcommand{\labelenumi}{\theenumi}
  \begin{enumerate}
    \item[\rm{(i)}]\label{thm2b} If $p \leq n^{-4/5 - \gamma}$, then
      \begin{equation*}
	\prob {X_{C_4} \geq t\mu_{C_4}} = 
	\exp \left\{ -\Theta_{t, \gamma}\left( \min \left\{ \Psi_{C_4} , M^*_{C_4} \ln n \right\} \right) \right\}.
      \end{equation*}
    \item[\rm{(ii)}]\label{thm2a} If $p \leq n^{-1/2 - \gamma}$, then
      \begin{equation*}
	\prob {X_{K_4} \geq t\mu_{K_4}} = 
	\exp \left\{ -\Theta_{t, \gamma}\left( \min \left\{ \Psi_{K_4} , M^*_{K_4} \ln n \right\} \right) \right\}.
      \end{equation*}
    \item[\rm{(iii)}] \label{thm2c}  For every $k \ge 2$, if $K = K_{1,k}$ is the $k$-armed star and $p \leq n^{-\theta - \gamma}$, where $\theta = 1 + k^{-1} - k^{-2}$, then
      \begin{equation*}
	\prob {X_K \geq t\mu_K} = 
	\exp \left\{ -\Theta_{t, \gamma}\left( \min \left\{ \Psi_{K} , M^*_{K} \ln n \right\} \right) \right\}.
      \end{equation*}
  \end{enumerate}
  \end{theorem}
\noindent Notice that the threshold for the existence of the star $K_{1,k}$ is $p = n^{-1 - k^{-1}}$.

  Well before the asymptotics for triangles were found, Janson and Ruci\'nski~\cite{JR} improved the upper bound in \eqref{JOR} in some range of $p$ for graphs $G = K_4$ and $G = C_4$ by inserting a factor ${\ln^{1/2} n}$ in the exponent. 
  Assume that $\mu_{C_4} \geq C \ln n$ for some large $C>0$ and $p \leq n^{-2/3 - \gamma}$. Then
  \begin{equation}
    \prob{X_{C_4} \geq 2\mu_{C_4}} \leq \exp\{-\Omega_{\gamma}(M^*_{C_4}\ln^{1/2} n)\}.
    \label{C4}
  \end{equation}
  If $\mu_{K_4} \geq C\ln n$ and $p \leq n^{-1/2 - \gamma}$ for some constant $\gamma > 0$, then
  \begin{equation}
    \prob{X_{K_4} \geq 2\mu_{K_4}} \leq \exp\{-\Omega_{\gamma}(M^*_{K_4}\ln^{1/2} n)\}.
    \label{K4}
  \end{equation}
  Although these inequalities are stated for $t=2$, the proof in \cite{JR} can be easily extended to all $t > 1$.

  Theorem \ref{thm2}.\ref{thm2b} improves \eqref{C4} for $p \leq n^{-4/5-\gamma}$ and Theorem \ref{thm2}.\ref{thm2a} improves~\eqref{K4} for $p \le n^{-1/2-\gamma}$. Also, note that Theorem \ref{thm2} assumes no restriction on $p$ from below, except for the universally assumed $p \geq n^{-1/m_G}$.
  Finally, observe that part (ii) of Theorem \ref{thm2} is a special case of \eqref{eq:cliques}.

  The inequalities \eqref{C4} and \eqref{K4} were obtained by the so called deletion method. We prove Theorem \ref{thm2} using an older method of approximation by disjoint copies, originating from Spencer \cite{spencer} (see \cite[Section 2.3.4]{infamous}). However, in the proof of Theorem \ref{thm2} for $C_4$ and $K_4$, we retain the ad hoc part of the original proof of \eqref{C4} and \eqref{K4} by Janson and Ruci\'nski \cite{JR}, which involves a two-fold application of Chernoff's bound and heavily relies on the simplicity of $C_4$ and $K_4$.

\section{The Proof of Theorem \ref{thm}}\label{sec:pfthm}
  The example of the triangle suggests that, roughly speaking, for small $p$ the upper tail of~$X_G$ behaves as if the copies of $G$ were disjoint. Recalling that $n^3p^3 \asymp~\mu_{K_3}$, compare~\eqref{DK} with the following fact. Let $D_G^e$ be the maximum number of edge-disjoint copies of $G$, and let $\fee(\eps) = (1 + \eps) \ln(1 + \eps) - \eps$.  Lemma~2.46 in \cite{JLR} states that for every $\eps > 0$ 
  \begin{align}
    \prob{D_G^e \geq (1 + \eps)\mu_G} &\leq \exp\{-\mu_G\fee(\eps)\} \label{janson1} \\
    &\leq \exp \left\{-\frac{\eps^2}{2(1 + \eps/3)}\mu_G\right\}.
    \label{janson}
  \end{align}
We proceed with some auxiliary facts. The advantage of using $\Psi_G$ instead of $\mu_G$ is that it satisfies the log-modularity property: if $G_1, G_2$ are graphs, then 
  \begin{equation}\label{modularity}
    \Psi_{G_1 \cup G_2} \Psi_{G_1 \cap G_2} = \Psi_{G_1}\Psi_{G_2}. 
  \end{equation}
%Hence, to answer the question when the right-hand side of \eqref{disjoint} is asymptotically better than the lower bound in \eqref{JOR} we need to consider the inequality
Recall that for strictly balanced $G$ we have $m_G = e_G/v_G$, so the threshold for existence of $G$ is $p = n^{-v_G/e_G}$.
\begin{proposition}
  \label{psi}
  If a graph $G$ is strictly balanced, then the following facts hold.
  \begin{itemize}
    \item[\rm{(i)}] $G$ is connected.
    \item[\rm{(ii)}] If $H$ is a proper subgraph of $G$ and $p \geq n^{-v_G/e_G}$, then 
      \begin{equation}\label{prop1}
	\Psi_H \geq n^c, 
      \end{equation}
      where $c = c(H) = v_H - e_H v_G /e_G > 0$.
    \item[\rm{(iii)}] There is a constant $b \in (0, v_G/e_G)$ such that for $p \in [n^{-v_G/e_G}, n^{-b}]$ and $H \subseteq G$ with $v_H > 0$ we have 
      \begin{equation}\label{prop2}
	\Psi_G \leq \Psi_H.
      \end{equation}
    \item[\rm{(iv)}] Let $a > 0$ be a constant. If $p \geq n^{-v_G/e_G}$ and $\Psi_G \leq \ln^a n$, then 
      \begin{equation*}
	M^*_G \asymp \Psi_G^{1/\al_G}.
      \end{equation*}
  \end{itemize}
\end{proposition}
\begin{proof}
  {(i)} If $G$ were not connected, then 
 it would have a connected component as dense as $G$ itself, contradicting the fact that $G$ is strictly balanced.

 {(ii)} To prove \eqref{prop1}, note that the condition $p \geq n^{-v_G/e_G}$ implies 
  $$\Psi_H = n^{v_H} p^{e_H} \ge n^{v_H - e_Hv_G/e_G}.$$
  Since $G$ is strictly balanced, we have $e_G/v_G > e_H/v_H$, which is equivalent to the inequality $v_H - e_Hv_G/e_G > 0$. 

  {(iii)} Inequality \eqref{prop2} follows from (ii) with $b = (v_G - \min_{H \subsetneq G}c(H))/e_G$:
  \begin{equation*}
    \Psi_G = n^{v_G} p^{e_G} \leq n^{v_G - be_G} \leq n^{c(H)} \leq \Psi_H, \qquad H \subsetneq G.
  \end{equation*}
  
  {(iv)} Clearly $\Psi_G \le \ln^a n$ implies $p \ll n^{-1/\Delta_G}$. Therefore by \eqref{Mg}
      \begin{equation*}
	M^*_G \asymp \min_{H \subseteq G} \Psi_G^{1/\al_G}.
      \end{equation*}
      Let $H$ be a proper subgraph of $G$. We have $\Psi_H \ge n^c$ from (ii). It follows that
      $$\Psi_H^{1/\al_H} \ge n^{c/\al_H} \gg (\ln n)^{a/\al_G} \ge \Psi_G^{1/\al_G}.$$
\end{proof}

\begin{proof}[Proof of \eqref{upper}]
  Let $\mathcal F_G = \{F = G_1 \cup G_2 : 0 < e_{G_1 \cap G_2} < e_G\}$ be the set of all unlabeled graphs obtained by taking a union of two distinct copies of $G$ with at least one common edge. For example $\mathcal F_{K_3} = \{K_4^-\}$, where $K_4^-$ is $K_4$ with one edge removed. Recall that $D_G^e$ is the maximum number of edge-disjoint copies of $G$ in $\mathbb G(n,p)$. Obviously, either $X_F \geq 1$ for some $F \in \mathcal F_G$ or $D_G^e = X_G$, therefore
  \begin{equation}
    \prob{X_G \geq t\mu_G} \leq \prob{D_G^e \geq t\mu_G} + \sum_{F \in \mathcal F_G} \prob{X_F \geq 1}.
    \label{star}
  \end{equation}
  Clearly, it suffices to show that all probabilities on the right-hand side of \eqref{star} are bounded by $\exp \left\{ -\Omega_t(\mu_G) \right\}.$
  Applying \eqref{janson} with $\eps = t-1$, we get	
  $$\prob{D_G^e \geq t\mu_G} \leq \exp \left\{ -\Omega_t(\mu_G) \right\}.$$
 We bound the remaining terms in \eqref{star} using Markov's inequality:
    $$\prob{X_F \geq 1} \leq \mu_F \leq \Psi_F.$$
    If $F = G_1 \cup G_2 \in \mathcal F_G$ and $H = G_1 \cap G_2$, then, by the log-modularity \eqref{modularity}, $\Psi_F = \Psi_G^2 /\Psi_H.$ 
    %We have $\Psi_G \leq \ln n$. 
    By Proposition \ref{psi}.(ii) we have that $\Psi_H \geq n^c$ for some constant $c > 0$. Moreover, by the assumption, $\Psi_G \leq \ln n$. Therefore
    $$\ln \Psi_F \leq  2 \ln \ln n - c \ln n  = -\Omega(\ln n).$$
    Finally, since $\mu_G \leq \Psi_G \leq \ln n$, we get
    $$\prob{X_F \geq 1} \le \Psi_F \le \exp\{-\Omega(\ln n)\}\le \exp\left\{ -\Omega(\mu_G) \right\}.$$
\end{proof}

\begin{proof}[Proof of \eqref{lower}]
  Let $N(F,H)$ be the number of copies of a graph $H$ in another graph $F$. Let~$xG$ stand for a union of $x$ vertex-disjoint copies of $G$. Let 
  $$D_G^v = \max \left\{ x : \mathbb G(n,p) \text{ contains } xG \right\}$$
  be the size of a largest collection of vertex disjoint copies.
  Writing $x = \lceil t\mu_G \rceil$, we have
  \begin{equation}\label{bulve}
    \prob{X_G \geq t\mu_G} \geq \prob{X_G = D_G^v = x}.
  \end{equation}
  Let $\mathcal{G}$ be the set of all copies of $G$ in $K_n$. Consider the  family 
  $$\mathbf{F} = \{\Sc \subset \mathcal{G} : |\Sc| = x, \text{ copies in $\Sc$ are vertex disjoint}\}.$$
  For every $\mathcal S \in \mathbf{F}$, we define events
  $$R_{\mathcal{S}} = \{\mathcal{S} \text{ is the set of all copies of } G \text{ in } \mathbb G(n,p)\} ,$$
  $$Q_{\mathcal{S}} = \{\text{every copy } S \in \mathcal{S} \text{ appears in } \mathbb G(n,p)\}.$$
  By Proposition \ref{psi}.(i), $G$ is connected, so the events $\{ R_\mathcal{S} \}_{\Sc \in \mathbf{F}}$ are mutually exclusive. Therefore
  \begin{equation}
    \label{A}
    \prob{ X_G = D_G^v = x} = \sum_{\mathcal{S} \in \mathbf{F}} \prob{R_{\mathcal{S}}}.
  \end{equation}
  %There are $(n)_{v_G}/\aut(G)$ ways to choose a copy $G$, where 
  Let $\aut(G)$ stand for the number of automorphisms of $G$. Then
%  The number of sequences of $x$ vertex-disjoint copies of $G$ is $(n)_{v_Gx}/\aut(G)^x$, because after $k$ copies have been chosen, there are there are $(n-kv_G)_{v_G}/\aut(G)$ ways to choose one more copy. Since we ignore the order of copies inside each set $\Sc \in \mathbf F$, it follows that
  %Given a set of vertices $U$ with $|U| = u$ there are ${(u)_{v_G}}/{\aut(G)}$ copies of $G$ with vertices in $U$, therefore we have 
    \begin{align*}
      |\mathbf{F}| = 
     % &\frac 1 {x!} \cdot \frac{(n)_{v_G}}{\aut(G)} \frac{(n-v_G )_{v_G}}{\aut(G)}\dots \frac{(n-v_G(x-1))}{\aut(G)} = 
     \frac{(n)_{v_G x}}{\aut(G)^x x!} \geq \frac {(n)_{v_Gx}}{\aut(G)^x x^x}.
    \end{align*}
    Using a standard inequality $(n)_m \geq \left( n/e \right)^m$, we get
    \begin{equation}
      \label{B}
      |\mathbf{F}| \geq \frac {n^{v_G x} }{e^{v_G x}\aut(G)^x x^x} = \frac{n^{v_G x}}{x^x} \exp \{-\Theta(x)\}.
    \end{equation}
    Note that 
    \begin{equation}\label{eq:RSc}
      \prob{R_\Sc} = \prob{R_\Sc | Q_\Sc}\prob{Q_\Sc} = \prob{R_\Sc | Q_\Sc} p^{e_G x}.
    \end{equation}
    Also, by symmetry the probability $q := \prob{R_\Sc | Q_\Sc}$ is independent of $\Sc$. Therefore from \eqref{A}, \eqref{B}, and \eqref{eq:RSc} we infer that
    \begin{equation}
      \prob{X_G  = D_G^v = x} \geq \frac{(n^{v_G}p^{e_G})^x q }{x^x} \exp\{-\Theta(x)\}.
      \label{C1}
    \end{equation}
    By the assumption that $p \geq n^{-1/m_G}$, we have $\Psi_G = n^{v_G}p^{e_G} \asymp x/t$, so \eqref{C1} implies  
    \begin{equation}
      \prob{X_G  = D_G^v = x} \geq q \exp \{- \Theta_t(\Psi_G)\}.
      \label{C}
    \end{equation}
    Fix $\Sc\in \mathbf F$ and let $F = \cup_{S \in \Sc} S$. Let $\mathbb G_\Sc(n,p)$ be the random graph $\mathbb G(n,p)$ conditioned on $Q_\Sc$, i.e., the random graph obtained by adding to $F$ each of the remaining $\binom n 2 - e_G x$ edges with probability $p$, independently of others. 
   % Define events 
   % $$A(G') = \{G' \nsubseteq \mathbb G_\Sc(n,p)\}, \qquad G' \in \mathcal G.$$
   % Then
   We have 
    \begin{equation}
      q =
      %\prob{R_\Sc|Q_\Sc} = 
      \Prob\Big\{\bigcap_{G' \in \mathcal{G} \setminus \Sc} \{G' \nsubseteq \mathbb G_\Sc(n,p)\} \Big\}.
      \label{vienas}
    \end{equation}
    Notice that each of the events $\{G' \nsubseteq \mathbb G_\Sc(n,p)\}$ is decreasing.
    %, i.e., if $\omega_1 \supseteq \omega_2$ are two instances of $\mathbb G_\Sc(n,p)$, then $\omega_1 \in A(G')$ implies $\omega_2 \in A(G')$. 
    Therefore the right-hand side of \eqref{vienas} can be bounded using the FKG inequality (see, e.g., \cite[Theorem 2.12]{JLR}; the set $\Gamma$ to which this theorem refers is $E(K_n) \setminus E(F)$ in the present context). Thus we obtain
    \begin{equation}
      q %\prob{R_\Sc | Q_\Sc} 
      \geq \prod_{G' \in \mathcal G \setminus \Sc } \prob{G' \nsubseteq \mathbb G_\Sc(n,p)} .
      \label{du}
    \end{equation}
    Given $H \subsetneq G$, the number of copies $G' \in \mathcal{G} \setminus \Sc$ whose intersection with $F$ is a copy of $H$ is at most $N(F,H)(n-v_H)_{v_G - v_H} = N(xG,H) \frac{(n)_{v_G}}{(n)_{v_H}}$. 
The probability that such a copy $G'$ does not exist in $\mathbb G_\Sc(n,p)$  is 
    $$1 - p^{e_G - e_H} \geq \exp \left\{-\frac{p^{e_G - e_H}}{1 - p^{e_G - e_H}}  \right\} \geq \exp \left\{- c_p \frac{p^{e_G}}{p^{e_H}}  \right\},$$
 where $c_p = 1/(1-p)$. Hence \eqref{du} implies 
    \begin{align}
      q &\geq \prod_{H \subsetneq G} \prod_{G' \cap F \cong H} \exp \left\{-c_p  \frac{p^{e_G}}{p^{e_H}} \right\} \notag \\
      &\geq \prod_{H \subsetneq G} \exp \left\{ -c_p N(xG,H) \cdot \frac{(n)_{v_G}p^{e_G}}{(n)_{v_H}p^{e_H}}  \right\}. \label{tarpas} 
    \end{align}
The assumption $\Psi_G \leq \ln^a n$ is equivalent to 
\begin{equation}
  p \leq n^{-1/m_G} (\ln n)^{a/e_G}. 
  \label{pupper2}
\end{equation}
By \eqref{pupper2}, we have $p = o(1)$, therefore $c_p \to 1$. Also, $(n)_{v_G}p^{e_G} \asymp \Psi_G$ and $(n)_{v_H}p^{e_H} \asymp \Psi_H$. Hence \eqref{tarpas} gives that

    \begin{equation}
      q \geq \exp \left\{ - \Theta\left( \Psi_G \max_{H \subsetneq G} \frac{N(xG,H)}{\Psi_H} \right) \right\}. \label{trys}
    \end{equation}
    If $H$ is empty (i.e., has no vertices), then $N(xG, H) = 1 = \Psi_H$. Hence the maximum in~\eqref{trys} is at least $1$. Therefore from \eqref{bulve}, \eqref{C}, and \eqref{trys} we get
\begin{equation}
      \prob{X_G \geq t\mu_G} \geq \exp \left\{ -O_t \left( \Psi_G\max_{H \subsetneq G}\frac{N(xG,H)}{\Psi_H} \right) \right\}.
      \label{disjoint}
    \end{equation}
    To finish the proof, it is enough to show that for $H \subsetneq G$ we have $N(xG,H) = O_t(\Psi_H)$. Indeed, then $\Psi_G \asymp \mu_G$ and \eqref{disjoint} imply \eqref{lower}.
As we have seen, if $H$ is the empty graph, then $N(xG,H) = \Psi_H$. Let us further assume that $v_H > 0$. 
By \eqref{pupper2} and Proposition \ref{psi}.(iii), the inequality $\Psi_G \leq \Psi_H$ holds for $n$ large enough, i.e., $\Psi_G = O(\Psi_H)$.
    If $H$ is connected, then any copy of $H$ counted in $N(xG,H)$ must lie entirely in one of the $x$ copies of $G$. Hence $N(xG, H) = x N(G, H) \asymp_t \Psi_G = O_t(\Psi_H)$.
    If $H$ is not connected, then let $H_1, \dots, H_c$ be the connected components of $H$. Since each component $H_i$ has $x N(G, H_i)$ copies in $xG$,
    $$N(xG, H) \leq x^c \prod_{i=1}^cN(G, H_i) \asymp_t \Psi_G^c.$$
    On the other hand, by log-modularity, $\Psi_H = \prod_i \Psi_{H_i} = \Omega(\Psi_G^c)$ and therefore $N(xG, H) = O_t(\Psi_H)$.
\end{proof}


\section{The Proof of Theorem \ref{thm2}}\label{sec:pfthm2star}
Note that the conditions on $p$ imply $\ln (1/p) \asymp \ln n$. Therefore, in view of Remark \ref{rem}, the lower bounds are given by \eqref{JOR} and \eqref{lower}. So it remains  to prove the upper bounds.
  
The proof is presented as follows. We start with an argument, which works for any graph $G$. Then we finish the proof for $G = C_4$. Since the proof for $G = K_4$ is very similar, we just mention the differences. Finally, we point out the changes that need to be made to the proof for $C_4$ in order to prove the result for stars $K_{1,k}$.

  Let $L$ be the random intersection graph, the vertices of which are the copies of $G$ in $\mathbb G(n,p)$ and two vertices are connected by an edge if the corresponding copies have an edge in common.  
  An easy graph-theoretic result, appearing implicitly in Spencer \cite{spencer} (see, e.g., Janson \cite{janson} for the proof) states that for any graph $L$
  \begin{equation}\label{spen}
    v_L \leq \alpha_L + 2\beta_L\Delta_L,
  \end{equation}
  where $\alpha_L$ is the independence number of $L$, $\beta_L$ is the size of a largest induced matching in $L$, and, recall, $\Delta_L$ is the maximum degree of $L$.

 In our setting, $v_L=X_G$ and $\alpha_L = D_G^e$, while $\beta_L \leq \sum_{F \in \mathcal F_G} D_F^e$, where $\mathcal F_G$ is the set of all graphs formed by a union of two distinct edge-intersecting copies of $G$, as defined in the proof of Theorem \ref{thm}.   

  For fixed vertices $u$ and $v$, let $X_G^{uv}$ be the number of copies of $G$ containing the edge~$uv$. Then $\Delta_L \leq e_G \max_{uv} X_G^{uv}$, where the maximum is taken over all edges of $K_n$. Clearly, the distribution of $X_G^{uv}$ does not depend on $uv$. Therefore, when the choice of $uv$ does not matter, we write $X_G^{(2)}$ instead.
 
In view of the observations above, \eqref{spen} implies 
\begin{equation}\label{spen2}
  X_G \leq D_G^e + \sum_{F \in \mathcal F_G} D_F^e \cdot 2 e_G \max_{uv} X_G^{uv}. 
\end{equation}
  Let $\delta$ be such that $t = 1 + \delta(1 + 2e_G|\mathcal F_G|)$.
  Then, by \eqref{spen2}, for every $d > 0$,
  \begin{multline}
    \label{triple}
    \Prob\{X_G \geq t\mu_G\} \leq \prob{D_G^e \geq (1 + \delta)\mu_G} \\
    + \sum_{F \in \mathcal F_G} \prob{ D_F^e \geq \frac{\delta\mu_G}{d}} + \binom n 2 \prob{X_G^{(2)} > d}.    
  \end{multline}

  \begin{proof}[Proof of Theorem \ref{thm2}.\ref{thm2b}]
 % In the rest of the proof we assume $G = C_4$ (
  %Clearly $\Psi_{C_4} = n^4p^4$ and 
  From \cite{JOR} we know that 
  \begin{equation}\label{emstar}
    M^*_{C_4} \asymp n^2p^2 = \Psi_{C_4}^{1/2}.
  \end{equation}
  %Since both $G=K_4$ and $G = C_4$ are regular, they satisfy $M^*_G \asymp n^2p^{\Delta_G}$ (see \cite{JOR}), so for both graphs $M^*_{G} \asymp \Psi_G^{1/2}$. 
  Hence our aim is to prove that the right-hand side of \eqref{triple} is bounded by 
  %$\exp \left\{ -\Omega_{\delta,\gamma}\left( \min \left\{ n^4p^4, n^2p^2 \ln n \right\} \right) \right\}$.
  $$\exp \left\{ -\Omega_{\delta,\gamma}\left( \min \left\{ \Psi_{C_4}, \Psi_{C_4}^{1/2} \ln n \right\} \right) \right\}.$$
  %$\exp\{-\Omega_{\delta,\gamma} (\Psi_G) \}$ when $\Psi_G \leq \ln^2 n$, and by $\exp \{ -\Omega_{\delta,\gamma} (\Psi_G^{1/2}\ln n) \}$, when $\Psi_G > \ln^2 n$. 
  By inequality \eqref{janson}, the first term in \eqref{triple} is at most 
  $$\exp \{-\Omega_\delta(\mu_{C_4})\} \le \exp\{-\Omega_{\delta}(\min\{\psic, \psic^{1/2} \ln n\})\}.$$
  %To bound the second term in \eqref{triple}, for each $F \in \mathcal F_G$ we apply \eqref{chernoff}:
  To bound the second term in \eqref{triple}, we apply the following slightly weaker but more convenient form of \eqref{janson1}. Noting that $\fee(\eps) \geq (1 + \eps) \ln \frac{1+\eps}{\e}$ and using a substitute $x = (1 + \eps)\mu_G$, we get an inequality
  \begin{equation}
    \prob{D_G^e \geq x} \leq \exp \left\{ - x \ln \frac{x}{\e\mu_G} \right\}, \qquad \e = 2.71\dots.
    \label{chernoff}
  \end{equation}
  Hence, for $F \in \mathcal F_{C_4}$, by \eqref{chernoff},
%  $$\prob{D_F^e \geq \frac{\delta\mu_G}{d}} \leq \exp \left\{ -\frac{\delta\mu_G}{d}\ln \frac{\delta \mu_G}{\e d \mu_F}  \right\}.$$
  $$\prob{D_F^e \geq \frac{\delta\mu_{C_4}}{d}} \leq \exp \left\{ -\frac{\delta\mu_{C_4}}{d}\ln \frac{\delta \mu_{C_4}}{\e d \mu_F}  \right\}.$$
  %Set $d = \max \left\{ \ln n, \Psi_G^{1/2} \right\}$. 
  Put
  $$d = \max \{ \ln n, \psic^{1/2} \} = \max \left\{ \ln n, n^2p^2 \right\}.$$
  Then
  \begin{equation}\label{obuolys}
    \prob{D_F^e \geq \frac{\delta\mu_{C_4}}{d}} \leq \exp \left\{ -\Omega_\delta \left( \min \left\{ \frac{\psic}{\ln n}, \psic^{1/2} \right\} \ln \frac{\psic}{d\Psi_F} \right) \right\}.
  \end{equation}
  %It suffices to show that for each $F$ we have  $\mu_G/(d\mu_F) = \Omega(n^c)$ for some $c>0$, probably depending on $\gamma$.
  It remains to check that the logarithmic factor in \eqref{obuolys} is of order $\ln n$. Note that by log-modularity \eqref{modularity}, $\psic/\Psi_F = \Psi_H / \psic$, where $H$ is the intersection of the two copies of~${C_4}$ that make up $F$. Hence, it suffices to show that $\Psi_H / (d\psic) = \Omega(n^c)$ for some $c>0$, probably depending on $\gamma$. Consider two cases.

Case (i): $\Psi_{C_4} \leq \ln^2 n$. Then $d = \ln n$. By Proposition \ref{psi}.(ii), we have that $\Psi_H$ is at least some positive power of $n$. Therefore 
$$\frac{\Psi_H}{d \Psi_{C_4}} \geq \frac{\Psi_H}{\ln^3 n} = \Omega(n^c), \quad c > 0.$$

  Case (ii): $\Psi_{C_4} > \ln^2 n$. Then $d = \psic^{1/2}$, and so 
  \begin{equation}
    \label{bamba}
    \frac {\Psi_H}{d\psic} = \frac{\Psi_H}{\Psi_{C_4}^{3/2}} = n^{v_H - 6} p^{e_H - 6}.
  \end{equation}
 Restriction $p \leq n^{-4/5 - \gamma}$ implies that the right-hand side of \eqref{bamba} is at least $n^{b(H)}$ with
%$c(K_4,H) = v_H - 6 + (1/2 + \gamma)(9 - e_H)$, and 
\begin{equation}\label{cH}
  b(H) = v_H - 6 + (4/5 + \gamma)(6 - e_H). 
\end{equation}
Note that $H$ is a proper subgraph with at least one edge. Observe that $b(H) \geq b(H')$, where $H'$ is a proper subgraph of $G$ on $v_H$ vertices and with maximum number of edges. Checking that $H' = K_2, P_3, P_4$ give corresponding values $b(H') = 5\gamma, 1/5 + 4\gamma, 2/5 + 3\gamma$, we obtain that $\Psi_H/(d\psic) \ge n^c$, where $c = \min \left\{ 5\gamma, 2/5 + 3\gamma \right\} > 0$, thus concluding Case~(ii).

Concerning the third term in \eqref{triple}, we use the proof from \cite[Example 6.3]{JR} which, by a double application of the Chernoff bound, shows that for every $m > 0$,
%\begin{equation}	
%  \prob{X_{K_4}^2 \geq d} \leq \prob{\Bi(n-2, p^2) \geq m} + \prob{\Bi(\binom{\lfloor m \rfloor }{2}, p) \geq d },
%  \label{degreeK}
%\end{equation}
%\begin{equation}
%  \prob{X_{C_4}^2 \geq d} \leq \prob{\Bi(n-2, 2p) \geq m} + \prob{\Bi(\binom{\lfloor m \rfloor }{2}, p) \geq \frac d 2 }.
%  \label{degreeC}
%\end{equation}
%Choose $m = d \ln n$. 
%Applying to \eqref{degreeC} Chernoff's bound in the form \eqref{chernoff}, we get
%\begin{equation}
%  \prob{X_{C_4}^{(2)} \geq d} \leq \exp \left\{ -d\ln n \cdot \ln \left( \frac{d \ln n}{2\e np} \right) \right\} + \exp \left\{ - \frac d 2 \ln \left( \frac{2}{\e dp \ln^2 n} \right) \right\}.
%  \label{degreeCs}
%\end{equation}
%\begin{equation}
%  \prob{X_{K_4}^{(2)} \geq d} \leq \exp \left\{ -m \ln \left( \frac{m}{\e np^2} \right) \right\} + \exp \left\{ - d \ln \left( \frac{2d}{\e m^2p} \right) \right\},
%  \label{degreeKi}
%\end{equation}
\begin{equation}
  \prob{X_{C_4}^{(2)} > d} \leq \exp \left\{ -m \ln \left( \frac{m}{2\e np} \right) \right\} + \exp \left\{ - \frac d 2 \ln \left( \frac{d}{\e m^2p} \right) \right\}.
  \label{degreeCi}
\end{equation}
Recall that $d = \max\{ \ln n, \psic^{1/2} \}$. Set $m = d \ln n$. Hence
$$d = \max\{\ln n, n^2p^2\}, \qquad \text{and} \qquad m = \max \left\{ \ln^2 n, n^2p^2 \ln n \right\}.$$
Note that in 
%both \eqref{degreeKi} and 
\eqref{degreeCi} the logarithmic factor in the first term is $\Omega(1)$, while the one in the second term is $\Omega_\gamma(\ln n)$. This is indeed so, since 
%for $K_4$ we have
%$$\frac m {np^2} = \max \left\{ \frac{\ln^2 n}{np^2}, np\ln n \right\} \geq  \left( \frac{\ln^3 n} {p} \right)^{1/2}\geq n^{1/4 + \gamma/2} \ln^{3/2} n ,$$
%%where the first inequality holds because maximum is at least the geometric mean, and
%$$\frac{d}{m^2p} = \frac{1}{dp \ln^2 n} \geq  \min \left\{ \frac{1 }{p\ln^3 n}, \frac{1}{n^2p^4 \ln^2 n} \right\} \geq \min \left\{ \frac {n^{1/2+\gamma}}{\ln^3 n}, \frac{n^{4\gamma}}{\ln^2 n} \right\}.$$
%While for $C_4$ we have
$$\frac m {np} = \max \left\{ \frac{\ln^2 n}{np}, np \ln n \right\} \geq  \ln^{3/2} n, \qquad \text{and}$$
$$\frac{d}{m^2p} = \frac{1}{dp \ln^2 n} \geq  \min \left\{ \frac{1 }{p\ln^3 n}, \frac{1}{n^2p^3 \ln^2 n} \right\} \geq \min \left\{ \frac{n^{4/5+\gamma}}{\ln^3 n}, \frac{n^{2/5+3\gamma}}{\ln^2 n} \right\}.$$
Thus, \eqref{degreeCi} implies that
%for both $K_4$ and $C_4$
$$\prob{X_{C_4}^{(2)} \geq d} \leq \exp \left\{ -\Omega_\gamma\left( \max\left\{ \ln^2 n, \psic^{1/2} \ln n \right\} \right) \right\}.$$
We can ignore the factor $\binom n 2$ in \eqref{triple}, since it contributes to the exponent only an additive term $O(\ln n)$. This completes the proof, since 
  \begin{equation*}
     \max \{ \ln^2 n, \psic^{1/2} \ln n \} \ge \min \{ \psic, \psic^{1/2} \ln n \}.
  \end{equation*}
\end{proof}
\begin{proof}[Proof of Theorem \ref{thm2}.\ref{thm2a}]
  The proof is very similar to that of Theorem \ref{thm2}.\ref{thm2b}. One needs to replace $n^2p^2$ by $n^2p^3$ in \eqref{emstar}, and $p^{e_H-6}$ by $p^{e_H - 9}$ in \eqref{bamba}. Also we use the restriction $p \leq n^{-1/2-\gamma}$, so \eqref{cH} becomes $b(H) = v_H - 6 + (1/2 + \gamma)(9-e_H)$. In the present proof we need to check $b(H') > 0$ for $H' = K_2, K_3, K_4^-$.
  The proof from \cite[Example 6.2]{JR} gives that
  \begin{equation}\label{degreeKi}
  \prob{X_{K_4}^{(2)} > d} \leq \exp \left\{ -m \ln \left( \frac{m}{\e np^2} \right) \right\} + \exp \left\{ - d \ln \left( \frac{2d}{\e m^2p} \right) \right\}.
\end{equation}
Just like in the proof for $C_4$, we set $d = \max \left\{ \ln n, \Psi_{K_4}^{1/2} \right\}$ and $m = d \ln n$, so
$$d = \max \left\{ \ln n, n^2p^3 \right\} \qquad \text{and} \qquad m = \max \left\{ \ln^2 n, n^2p^3 \ln n \right\}. $$
Further, we note that the logarithmic factors in the exponents of \eqref{degreeKi} are $\Omega(1)$ and $\Omega_\gamma(\ln n)$, respectively, because 
$$\frac m {np^2} = \max \left\{ \frac{\ln^2 n}{np^2}, np\ln n \right\} \geq  \left( \frac{\ln^3 n} {p} \right)^{1/2}\geq \ln^{3/2} n,$$
and
%where the first inequality holds because maximum is at least the geometric mean, and
$$\frac{d}{m^2p} = \frac{1}{dp \ln^2 n} \geq  \min \left\{ \frac{1 }{p\ln^3 n}, \frac{1}{n^2p^4 \ln^2 n} \right\} \geq \min \left\{ \frac {n^{1/2+\gamma}}{\ln^3 n}, \frac{n^{4\gamma}}{\ln^2 n} \right\}.$$
Finally, we finish the proof precisely as in the case $G = C_4$.
\end{proof}
%In the present proof we get a better bound, because for large $p$ we have $d = M^*_G$, while the corresponding value in \cite{JR} was of order $M^*_G (\ln n)^{-1/2}$.
%where $\Bi(n,p)$ stands for a binomial random variable. We apply to each probability on the right hand side of \eqref{degreeK} and \eqref{degreeC} the Chernoff bound 
\begin{proof}[Proof of Theorem \ref{thm2}.\ref{thm2c}]
  %The approach of Theorem \ref{thm2} works also for any $k$-armed star $G=K_{1,k}$. 
  From \cite[Corollary 1.8]{JOR} we know that in the given range of $p$ we have $M^*_K \asymp \Psi_K^{1/k}$.

  The argument is, again, similar to that of Theorem \ref{thm2}.\ref{thm2b}. 
  %The beginning that is true for general $G$ remains unchanged. 
  At the appropriate step we set $d = \max \left\{ \ln n, \Psi_K^{(k-1)/k} \right\}$.  
  Then we apply \eqref{chernoff} to obtain a bound analogous to \eqref{obuolys}. To show that $\Psi_H/(d\Psi_K) = \Omega(n^c)$, we notice that the intersection $H$ of two distinct copies of $K$ is a star with at most $k-1$ arms and use the condition $p \leq n^{-\theta - \gamma}$. 
  
  Further we observe, that the number of $k$-stars containing a particular edge $uv$ is $\binom {\deg(u)} {k-1} + \binom {\deg(v)} {k-1}$. Therefore, writing $\Bi(n,p)$ for a binomial random variable, we get that
  $$\prob{X_{K}^{(2)} \geq d} \leq 2\prob{ \binom {\Bi(n-2,p)}{k-1} \geq d/2 }.$$
  Then the Chernoff bound yields 
  \begin{equation}\label{citrina}
    \prob{X_{K}^{(2)} > d} \leq \exp \left\{ - \Omega\left(d^{1/ (k-1)} \ln \frac{d^{ 1/(k-1)}}{\e np} \right) \right\}. 
  \end{equation}
  It is easy to check that the logarithmic factor in the exponent in \eqref{citrina} is $\Omega(\ln n)$, whence the order of the exponent is 
  %$\max\left\{ (\ln n)^{1 + 1/(k-1)}, \Psi_K^{1/k} \ln n \right\}$. 
  %So the factor $\binom n 2$ in \eqref{triple} can be ignored. 
  %The proof is finished, because
  $$\max\left\{ (\ln n)^{1 + \frac{1}{k-1}}, \Psi_K^{1/k} \ln n \right\} \geq \min \left\{ \Psi_K, \Psi_K^{1/k} \ln n \right\} \asymp \min \left\{ \Psi_K, M^*_K \ln n \right\} .$$
\end{proof}

\section{Further Remarks}
\begin{remark}\label{rem1}
  Let $G$ be strictly balanced. From the proof of \eqref{lower} one can see that a lower bound $\exp \left\{ -\Theta_t(\mu_G) \right\}$ holds, as long as $\Psi_G = \min_{H \subseteq G} \Psi_H$ and $v_G t \mu_G \leq n$. However, this bound is better than the lower bound in \eqref{JOR} only when $\Psi_G \leq \ln^a n$. More precisely, 
      if $\Psi_G \leq \ln^a n$, then
      \begin{equation}
	\Psi_G =  O_t ( M^*_G \ln (1/p)),
	\label{asymp}
      \end{equation}
  while if $\Psi_G \gg \ln^a n$, then 
      \begin{equation}
	\Psi_G  \gg M^*_G \ln (1/p).
	\label{gg}
      \end{equation}
      Let us prove \eqref{asymp} and \eqref{gg}. The restriction $v_G t\mu_G \leq n$ implies 
  \begin{equation}
    p = \linebreak O_t(n^{-(v_G - 1)/e_G}).
    \label{pupper}
  \end{equation}
  The assumption $p \geq n^{-1/m_G}$ and \eqref{pupper} clearly imply $\ln (1/p) \asymp_t \ln n$. So it is enough to prove \eqref{asymp} and \eqref{gg} with $\ln n$ instead of $\ln (1/p)$.
  
  The assumption $\Psi_G \leq \ln^a n$ is equivalent to $\Psi_G \leq \Psi_G^{1/\al_G}\ln n.$
  On the other hand, by Proposition \ref{psi}.(iv), $\Psi_G^{1/\al_G} \asymp M_G^*$, so \eqref{asymp} holds. 

Since $1/\Delta_G \leq v_G/(2e_G) \leq (v_G -1)/e_G$, inequality \eqref{pupper} gives that $p = \linebreak O_t(n^{-1/\Delta_G})$. Hence from \eqref{Mg} we have that 
$$M^*_G \asymp_t \min_{H \subseteq G} \Psi_H^{1/\alpha^*_H} .$$
Thus, the right-hand side of \eqref{gg} is $O_t\left(\Psi_G^{1/\al_G} \ln n\right)$. 
Since $\Psi_G \gg \ln^a n$ is equivalent to $\Psi_G \gg \Psi_G^{1/\al_G} \ln n$, we conclude that \eqref{gg} holds.

%  To see this, note first that the restriction $t\mu_G \le n $ implies 
%  \begin{equation}\label{eq:pOt}
%    p = O_t(n^{-(v_G-1)/e_G}).
%  \end{equation}
%  Recall that we assume $p \geq n^{-1/m_G}$. This, together with \eqref{eq:pOt} implies that 
%  \begin{equation}\label{eq:lnn}
%    \ln (1/p) \asymp_t \ln n. 
%  \end{equation}
%  From \eqref{Mg} , it is easy to see, that 
%  \begin{equation}\label{eq:pienas}
%    M^*_G \asymp \min_{H \subseteq G} \Psi_H^{1/\al_H} \le \Psi_G^{1/\al_G}.
%  \end{equation}
%  Suppose that 
%  \begin{equation}\label{eq:more}
%    \Psi_G > \ln^a n.
%  \end{equation}
%  Inequality \eqref{eq:more} is equivalent to $\Psi_G > \Psi_G^{1/\al_G} \ln n$. So, from \eqref{eq:pienas} and \eqref{eq:lnn} we get that
%  $$\mu_G \asymp \Psi_G > \Psi_G^{1/\al_G} \ln n = \Omega_t(M_G^* \ln (1/p)),$$
%  which means that the lower bound $\exp \left\{ -\Theta_t(\mu_G) \right\}$ does not improve the old lower bound if \eqref{eq:more} holds. 
%  On the other hand, if \eqref{eq:more} does not hold, then $p \leq n^{-v_G/e_G} (\ln n)^{a/e_G}$. Moreover, $G$ is strictly balanced, so by Proposition \ref{psi} we have $M_G^* \asymp \Psi_G^{1/\al_G}$. Hence
%  $$\mu \asymp \Psi_G \leq \Psi_G^{1/\al_G} \ln n \asymp_t M_G^* \ln (1/p).$$
\end{remark}
%Relation \eqref{gg} implies that for non-balanced graphs \eqref{disjoint} is never better than the lower bound in \eqref{JOR}. Indeed, when $p \geq n^{-1/m_G}$, we have $\Psi_G = \Omega(n^{\eps}) \gg \ln^a n$ and therefore \eqref{gg} holds.

\begin{remark}\label{rem2}
  Let $L$ be the intersection graph of copies of $G$, as defined in the proof of Theorem \ref{thm2}. In the proof of the upper bound in Theorem \ref{thm}, we implicitly use the trivial fact that if $L$ has no edges, then the largest independent set in $L$ is the whole vertex set. This implies the inequality~\eqref{star}. However, one can get a stronger conclusion by using the fact that if edges are few, then $L$ has a large independent set. Indeed, by Tur\'an's Theorem applied to the complement of $L$ we get that
  $$D_G^e \geq \frac{X_G^2}{X_G + 2\sum_{F \in \mathcal F_G} X_F}.$$
  This gives an inequality
  \begin{equation*}
    \prob{X_G \geq t\mu_G} \leq \prob{D_G^e \geq (1 + \eps)\mu_G} + \sum_{F \in \mathcal F_G} \prob{X_F \geq \eps \mu_G},
  \end{equation*}
  with properly chosen $\eps= \eps(t) > 0$.
  Therefore it is of interest to find out in which range of $p$ the inequality
  \begin{equation}
    \label{expect}
    \prob {X_F \geq \eps \mu_G} \leq \exp \left\{ -\Omega_\eps(\mu_G) \right\}
  \end{equation}
  holds, since this might extend the upper bound in Theorem \ref{thm} to larger $p$ (hopefully, up to $n^{-1/m_G}(\ln n)^{a/e_G}$). As we have seen in the proof of Theorem~\ref{thm}, if $\Psi_G \leq \ln n$, then \eqref{expect} is given by Markov's inequality. It is plausible that a stronger inequality might hold. Note that since $\Psi_G$ is less than a power of $\ln n$, we have $\mu_F \ll 1$. In particular, $p$ is below the threshold of the existence of $F$.
\end{remark}
%\begin{remark}
%If $G$ is not strictly balanced, then consider the following observation based on the talk of J. Kahn in RSA 2011. Let $H \subset G$ be the strictly balanced subgraph, which attains minimum in the definition of $m_G$. Lemma \ref{lemma} gives that with probability at least $\exp \left\{ -O_t\left( \Psi_H \right) \right\}$ there are at least $2t \mu_H$ copies of $H$. Then by an approach in \cite{JOR} (see proof of Theorem 3.1), we can show that $t\mu_G$ of these copies of $H$ extend to copies of $G$ with probability $p^{\Theta(1)}$. Hence in certain range of $p$ we obtain a lower bound
%$$\prob{X_G \geq t\mu_G} \geq \exp \left\{ -O_t\left( \Psi_H \right) \right\}.$$
%This partially gives the lower bound of Conjecture \ref{conj}, since, by the choice of $H$, in a certain range of $p$ above threshold we have $\Phi_G \asymp \Psi_H$.
%\end{remark}
  \emph{Added in proof.} After the first version of the present paper was submitted, L. Warnke kindly pointed out to the author that \eqref{upper} is implied by Corollary~5.1 in \cite{vu}. The latter corollary is based on the polynomial concentration technique, a powerful tool which we do not use in our proof. 
\subsection*{Acknowledgements}
The author is grateful to his supervisor A. Ruci\'nski for useful discussions, helpful suggestions, and numerous corrections. He also thanks R. DeMarco for sending the preprint of his paper with J. Kahn and an anonymous referee for a careful reading and many valuable remarks.


\bibliographystyle{plain}

\begin{thebibliography}{10}

\bibitem{chatterjee}
Sourav Chatterjee.
\newblock The missing log in large deviations for triangle counts.
\newblock {\em Random Struct. Algorithms}, 2011.
\newblock doi: 10.1002/rsa.20381.

\bibitem{DK2}
Bobby DeMarco and Jeff Kahn.
\newblock Upper tails for cliques.
\newblock Preprint available from \texttt{http://arxiv.org/pdf/1111.6687v1},
  2011.

\bibitem{DK}
Bobby DeMarco and Jeff Kahn.
\newblock Upper tails for triangles.
\newblock {\em Random Struct. Algorithms}, 2011.
\newblock doi: 10.1002/rsa.20382.

\bibitem{janson}
Svante Janson.
\newblock Poisson approximation for large deviations.
\newblock {\em Random Struct. Algorithms}, 1(2):221--229, 1990.

\bibitem{JLR}
Svante Janson, Tomasz {\L}uczak, and Andrzej Rucinski.
\newblock {\em Random graphs}.
\newblock Wiley-Interscience Series in Discrete Mathematics and Optimization.
  Wiley-Interscience, New York, 2000.

\bibitem{JOR}
Svante Janson, Krzysztof Oleszkiewicz, and Andrzej Ruci{\'n}ski.
\newblock Upper tails for subgraph counts in random graphs.
\newblock {\em Israel J. Math.}, 142:61--92, 2004.

\bibitem{infamous}
Svante Janson and Andrzej Ruci{\'n}ski.
\newblock The infamous upper tail.
\newblock {\em Random Struct. Algorithms}, 20(3):317--342, 2002.

\bibitem{JR}
Svante Janson and Andrzej Ruci{\'n}ski.
\newblock The deletion method for upper tail estimates.
\newblock {\em Combinatorica}, 24(4):615--640, 2004.

\bibitem{spencer}
Joel Spencer.
\newblock Counting extensions.
\newblock {\em J. Combin. Theory Ser. A}, 55(2):247--255, 1990.

\bibitem{vu}
Van~H. Vu.
\newblock On the concentration of multi-variate polynomials with small
  expectation.
\newblock {\em Random Struct. Algorithms}, 16(4):344--363, 2000.

\end{thebibliography}


\end{document}
