\documentclass[12pt]{article}
\usepackage{e-jc}
\specs{P2.5}{24(2)}{2017}


\usepackage{amsmath,amsthm,amssymb}
\usepackage[colorlinks=true,citecolor=black,linkcolor=black,urlcolor=blue]{hyperref}
\newcommand{\arxiv}[1]{\href{http://arxiv.org/abs/#1}{\texttt{arXiv:#1}}}
\usepackage[utf8]{inputenc}
\usepackage{enumerate}


\theoremstyle{plain}
%\newtheorem{theorem}{Theorem}
\newtheorem{theorem}{Theorem}[section]
\newtheorem{lemma}[theorem]{Lemma}
\newtheorem{corollary}[theorem]{Corollary}
\newtheorem{proposition}[theorem]{Proposition}
\newtheorem{fact}[theorem]{Fact}
\newtheorem{observation}[theorem]{Observation}
\newtheorem{claim}[theorem]{Claim}

\theoremstyle{definition}
\newtheorem{definition}[theorem]{Definition}
\newtheorem{example}[theorem]{Example}
\newtheorem{conjecture}[theorem]{Conjecture}
\newtheorem{open}[theorem]{Open Problem}
\newtheorem{problem}[theorem]{Problem}
\newtheorem{question}[theorem]{Question}

\theoremstyle{remark}
\newtheorem{remark}[theorem]{Remark}
\newtheorem{note}[theorem]{Note}



\title{\bf Large cliques in sparse random intersection graphs}

\author{Mindaugas Bloznelis\thanks{Supported by the Research Council of Lithuania (MIP-052/2010, MIP-067/2013).}\\
\small Faculty of Mathematics and Informatics\\[-0.8ex]
\small Vilnius University\\[-0.8ex] 
\small Lithuania\\
\small\tt mindaugas.bloznelis@mif.vu.lt 
\and
 Valentas Kurauskas\footnotemark[1] \\ 
\small Institute of Mathematics and Informatics\footnote{The main contribution of the second author was made during PhD studies at the
 Faculty of Mathematics and Informatics, Vilnius University.}\\[-0.8ex]
\small Vilnius University\\[-0.8ex] 
\small Lithuania\\
\small\tt valentas@gmail.com
}

\date{\dateline{Jun 18, 2016}{Mar 28, 2017}{Apr 13, 2017}\\
\small Mathematics Subject Classifications: 05C80; 05C82; 05C85; 05C69}


\def\Prob#1{\mbox{{\rm Prob}}\,[#1]}
\def\E{{\mathbb E}\,}
\def\Pr{\mbox{{\rm Pr}}\,}
\let\eps=\epsilon
\def\enddiscard{}
\long\def\discard#1\enddiscard{}
\def\Po{\mbox{\rm Po}}
\newcommand{\pr}{\mathbb P}
\newcommand{\whp}{whp }

%\newcommand{\m}[1]{\marginpar{\tiny{#1}}}
%\newcommand{\m}[1]{}

\newcommand{\cp}{{\mathcal P}}

\newcommand{\ca}{{\mathcal A}}
\newcommand{\cb}{{\mathcal B}}
\newcommand{\cc}{{\mathcal C}}
\newcommand{\cs}{{\mathcal S}}
\newcommand{\cd}{{\mathcal D}}
\newcommand{\cf}{{\mathcal F}}
\newcommand{\cg}{{\mathcal G}}
\newcommand{\ch}{{\mathcal H}}
\newcommand{\ck}{{\mathcal K}}
\newcommand{\cm}{{\mathcal M}}
\newcommand{\ct}{{\mathcal T}}
\newcommand{\cu}{{\mathcal U}}
\newcommand{\cv}{{\mathcal V}}
\newcommand{\ii}{{\mathbb I}}
%\newcommand{\I}{{\mathbf I}}
\newcommand{\I}{{\mathbb I}}


\hyphenation{mono-chro-matic}

\begin{document}

\maketitle


\begin{abstract}

An intersection graph defines an adjacency relation between subsets  $S_1,\dots, S_n$  of a finite set  
$W=\{w_1,\dots, w_m\}$: the subsets $S_i$ and $S_j$ are 
adjacent if they intersect. Assuming that the subsets are drawn independently at random according to the probability distribution
$\mathbb{P}(S_i=A)=P(|A|){\tbinom{m}{|A|}}^{-1}$, $A\subseteq W$, where $P$ is a probability on $\{0, 1, \dots, m\}$,
we obtain the random intersection graph $G=G(n,m,P)$.


We establish  the asymptotic order of the clique number $\omega(G)$ 
 of  a sparse
 random intersection graph as $n,m\to+\infty$.
For $m = \Theta(n)$
we show that 
the maximum clique is of 
size
\[
(1-\alpha/2)^{-\alpha/2}n^{1-\alpha/2}(\ln n)^{-\alpha/2}(1+o_P(1))
\]
in the case where the asymptotic degree distribution of $G$ is a
power-law with exponent $\alpha \in (1,2)$. It is 
of 
size
$\frac {\ln n} {\ln \ln n}(1+o_P(1))$
if the degree distribution has bounded variance, i.e., $\alpha>2$. 
We construct a simple
polynomial-time algorithm which finds a clique of the optimal order 
$\omega(G) (1-o_P(1))$. 

\bigskip\noindent \textbf{Keywords:} clique; random intersection graph; greedy algorithm;
complex network; power-law; clustering
\end{abstract}



\section{Introduction}
\label{sec.intro}


Bianconi and Marsili \cite{bianc06}  observed 
that ``scale-free'' real networks can have 
very large cliques; they gave an argument suggesting that the clique number can grow polynomially with graph order $n$ if the degree variance is unbounded.
Using a more precise analysis, Janson, \L uczak and Norros \cite{jln10} 
established exact asymptotics 
for the clique number in a power-law random graph model where 
edge probabilities are proportional
to the product of weights of their endpoints.

  
Another 
feature of a real network that  
affects
formation of cliques
is the clustering property: the probability of a link between two randomly chosen vertices 
increases dramatically after we learn 
about the presence of their common neighbour \cite{WassermanFaust1994},
\cite{WattsStrogatz1998}.
An interesting question is about 
tracing the relation between the 
clustering property
and the clique number in a power-law network.   
This question is addressed in the present paper: we
show precise asymptotics  %\m{exact asymptotic} 
for the clique number of a related random intersection graph $G(n,m,P)$, 
which  admits a tunable clustering 
coefficient and  power-law degree distribution 
\cite{bloznelis12, bloznelis11, bloznelis08,  deijfen}. 
We find that 
 the effect of  
clustering  on the clique number only
shows up for the degree sequences having
finite variance. In particular, 
for a finite variance power-law (asymptotic) degree,  
the clique number 
 of $G(n,m,P)$ 
is $\frac{\ln n}{\ln \ln n}(1+o_P(1))$ while the clique number 
of the random graph of  \cite{jln10} is at most $4$. We note that the random graph
of  \cite{jln10}
 has conditionally independent edges and does not possess the clustering
property.
For a power-law  (asymptotic) degree with infinite 
variance the  clique number 
 asymptotics for both random graphs are the same.



In the language of hypergraphs, the question considered in the paper is about the size of  the largest intersecting 
family in a random hypergraph on the
vertex set $[m]$, 
where $n$ identically distributed 
hyperedges 
are of  
random sizes
distributed according to $P$. 

The paper is organized as follows. In this section we 
collect several facts about 
the
random intersection graph $G(n,m,P)$ and 
present our main results on the clique number asymptotics. 
Proofs of these results are given in Sections 2 and 3. In Section 4 we 
present and rigorously analyse algorithms for finding large cliques. 

{\it Random intersection graphs}
 first studied by Karo\'nski, Scheinerman and 
Singer-Cohen  \cite{karonski99} 
are convenient models of affiliation networks, 
a class of social networks where
two actors are declared adjacent if they share some common attributes 
\cite{WassermanFaust1994}. 
We denote by $V=\{v_1,\dots, v_n\}$ the set of vertices (actors of the network). 
Vertices 
%of the random graph $G(n,m,P)$
 are represented by the collections
of attributes $S_1,\dots, S_n$ selected by actors
independently at random from the attribute set $W=\{w_1,\dots, w_m\}$
according to the probability distribution
$\pr(S_i=A)=P(|A|){\tbinom{m}{|A|}}^{-1}$, $A\subseteq W$. Here $P$ is the 
probability distribution of the sizes $X_i:=|S_i|$ of the attribute sets. 
%selected by actors.
We interpret $X_1,\dots, X_n$ as  weights modelling the actors' activity.
% We note that  every attribute $w_i\in W$ has equal chances 
%to be selected by an actor. 
This  
model has been introduced 
by Godehardt and Jaworski \cite{gj03}. 
It is  related to the random graph of \cite{jln10}: in both models, 
given the weights,
edge probabilities are (approximately) proportional to the product of 
weights of their endpoints. Consequently, their degree sequences 
% are defined 
%by the weights 
behave 
in a similar way. A distinctive feature of $G(n,m,P)$ is that it 
admits a nonvanishing tunable clustering coefficient, defined in (\ref{eq.cc}) below. Analysis and detection of large cliques in social networks is another source of motivation of our study.
 


Formally, we will consider a sequence $\{G(n)\}=\{G(n), n = 1, 2, \dots\}$ of 
random intersection graphs 
$G(n) = G(n, m, P)$, where $P = P(n)$ and $m = m(n)\to+\infty$ as $n\to+\infty$.
Let 
$X=X(n)$
denote a random variable distributed 
according to $P(n)$ and define 
$Y=Y(n) := \sqrt {\frac n m} X(n)$.
If 
not explicitly stated otherwise, the limits below will be taken as $n \to +\infty$. We use the standard notation $o()$, $O()$, $\Omega()$, $\Theta()$, $o_P()$, $O_P()$,
see, for example,~\cite{jlr}.
For positive sequences $(a_n)$, $(b_n)$ we write $a_n \sim b_n$ if $a_n/b_n \to 1$, 
%{\color{red}and write
$a_n\ll b_n$ if $a_n/b_n\to 0$.
%}
For a sequence of events $\{\ca_n\}$, 
we say that $\ca_n$ occurs with high probability  (\emph{whp}), 
if $\pr(\ca_n) \to 1$.
%\m{whp}
 
Assuming that $Y(n)$ converges in $L_1$ to some
 random variable $Y_0$ with $\E Y_0<\infty$ one can show that
the  degree of the typical vertex of $G(n)$,
equivalently, the degree $D_1 = D_1(n)$ of vertex $v_1 \in V$,
converges in distribution to a Poisson mixture: 
$\pr(D_1=k)\to \E e^{\lambda}\lambda^k/k!$, $k=0,1,\dots$, see  \cite{bloznelis11}.
 Here
$\lambda=Y_0\E Y_0$ is a random variable. Since the Poisson distribution is
tightly concentrated around its mean, one can show that the asymptotic 
degree distribution
is power-law with an exponent $\alpha>1$ whenever the  distribution of  $Y_0$ is. 
For our purposes the weak convergence to a power law  is not 
sufficient as we need a tight control on the tail $\pr(Y(n)\ge k)$ for $k\approx n^{1/2}$. To this aim we introduce the following 
condition: 
 given $\alpha>0$ and slowly varying function $L$ there is $\eps_0>0$ such that for each sequence $\{x_n\}$ with
 $n^{0.5-\eps_0}\le x_n\le n^{0.5+\eps_0}$ we have
 \begin{equation}\label{eq.pldef2}
  \pr\left (Y(n) \ge x_n \right) \sim L(x_n) x_n^{-\alpha}.
\end{equation}
 Our first result concerns the power-law random intersection graph $G(n,m,P)$ with infinite degree variance. Its conditions are formulated in terms of parameters defining the random graph. 
 
 \begin{theorem} \label{thm.main} Let $m,n\to+\infty$. Let $1<\alpha<2$.
   Assume that  $\{G(n)\}$ is a sequence of random 
 intersection graphs. Suppose that $\{Y(n)\}$ satisfies condition (\ref{eq.pldef2}) and
 \begin{equation}\label{eq.Y}
    \E Y(n) = O(1).
\end{equation}
   Suppose that for some $\beta > \max \{2-\alpha, \alpha - 1\}$ 
we have $m = m(n) = \Omega (n^\beta)$.
   Then the clique number of $G(n)$ is
%{\color{red}
\begin{equation}\label{2013-09-17}
   \omega(G(n)) = \left(1 -  \alpha/2 \right) ^ {- \alpha/2} L\left( (n \ln n)^{1/2}\right) n ^ {1-\alpha / 2} (\ln n)^{-\alpha/2} (1+o_P(1)).
\end{equation}
\end{theorem}
 We note that  (\ref{eq.pldef2}) and (\ref{2013-09-17}) refer to the same function $L$. For $L\equiv 1$ we obtain Pareto tails. 
 We remark that condition (\ref{eq.pldef2}) can be replaced by 
 a related condition (\ref{eq.degheavytail}) on the tail of the degree distribution, see Lemma \ref{lem.degrees} below.
 



The asymptotics in (\ref{2013-09-17}) turn out to be the same as in the model of Janson, \L uczak and Norros \cite{jln10}
with corresponding parameters. %In the proof we establish matching upper an lower bounds. 
Let us mention that the
lower bound for
$\omega(G(n))$ follows by an argument of \cite{jln10}: a clique of order (\ref{2013-09-17}) is formed by vertices $v \in V$ with
largest set sizes $|S_v|$.
%, 
% which is not sensitive to the statistical dependence of edges of $G(n)$.
 To show a matching upper 
 %\m{buvo: lower}
 bound we developed
a method  
based on  a 
result of 
Alon, Jiang, Miller and Pritkin \cite{alon03}
in extremal combinatorics. Among several auxiliary combinatorial lemmas,  
Lemma~\ref{lem.DR} about distinct representatives 
%when a random set intersects a family of
%other sets 
may be of an independent interest. 


In the case where the degree distribution has bounded variance, 
in addition to determining the asymptotic order of $\omega(G(n))$, we also describe the structure of  
a maximal clique. To this aim, 
%For this purpose 
it is convenient to interpret attributes 
$w\in W$ as colours.
The set of vertices $T_w=\{v\in V:\, w\in S_v\}$ induces a clique in $G(n)$ which
we denote also
$T_w$. We say that every edge
of $T_w$ receives colour $w$ and call this clique {\it monochromatic}. Note that
the edges of
$G(n)$ are covered by the union of monochromatic cliques  $T_w$, $w\in W$.
We denote the size of the largest monochromatic clique by $\omega'(G(n))$. Another, out of many ways, when a clique on $S \subseteq V$ arises,
is when $G(n)$ contains a \textit{rainbow} clique on $S$: for each pair of vertices $\{x,y\} \subseteq S$ we can assign
a different attribute $w = w_{\{x,y\}}$ such that $x,y \in T_w$.
The size of the largest rainbow clique in $G(n)$ will be denoted by $\omega_R(G(n))$.
%\m{buvo: $\le$}
 Clearly,
 $\omega(G(n))\ge \max \{\omega'(G(n)), \omega_R(G(n)) \}$.


%The next theorem shows that the largest clique
%is a monochromatic clique (plus possibly a few extra vertices). 
%
%
%
%
%******************************************************************************************
 \begin{theorem}\label{thm.finite} Let $m,n\to+\infty$.
  Assume that $\{G(n)\}$ is a sequence of 
  random intersection graphs satisfying (\ref{eq.Y}). Suppose  $Var Y(n) = O(1)$. 
  Then 
   \[
     \omega(G(n)) = \omega'(G(n)) + O_P(1).
    \]
    If, additionally, 
    %$m\to +\infty$ and 
    for some positive sequence $\{\eps_n\}$ converging to zero we have
    \begin{align} \label{eq.uni1}
        &n \pr(Y(n) > \eps_n n^{1/2}) \to 0,
  \end{align}
  then $\omega(G(n)) = \max \{\omega'(G(n)), \omega_R(G(n))\}$ and $\omega_R(G(n)) \le 3$ whp.
\end{theorem}
%***************************************************************************************
%
%
%
By Markov's inquality,
condition (\ref{eq.uni1}) 
is satisfied by a uniformly 
square integrable sequence $\{Y(n)\}$. 

Next, we state a result about the size of the largest monochromatic clique. For this purpose
we can relate the random intersection graph to the \textit{balls in bins} model. Let
every vertex $v\in V$ throw $|S_v|$ balls into the bins $w_1,\dots, w_m$ 
uniformly at random,
subject to the condition that every bin receives at most one ball from each vertex.
Then $\omega'(G(n))$ counts the maximum number of balls contained in a bin.
 Let $M(N, m)$ denote the maximum number of balls  contained in any of $m$ bins after
$N$ balls were thrown into $m$ bins uniformly and independently at random.
%The next result says that the probability distribution of $\omega'(G(n))$ can be approximated
%by that of $M(N, m)$, with $N\approx n\E X(n)=\E(X_1+\dots+X_n)$.
The asymptotics of $M(N, m)$ are well known, see, e.g., 
Section~6 of Kolchin et al~\cite{kolchin}.

Denote by $d_{TV}(\xi,\eta)= 2^{-1}\sum_{i\ge 0}|\pr(\xi =i) - \pr(\eta=i)|$ 
the \textit{total variation distance} between probability distributions
of non-negative integer valued random variables $\xi$ and $\eta$. 
%
%
%
%
%
%
%
%****************************************************************************************
\begin{lemma}\label{thm.omega.mono} 
Assume that  $\{G(n)\}$ is a sequence of 
random intersection graphs satisfying $m = \Omega(n)$, 
%\footnote{A small technical remark: Theorem~\ref{thm.omega.mono}
% does not make the assumption (\ref{eq.uni1}), but asks that $\E Y$ is 
%bounded away from zero.}
$\E Y(n) = \Theta(1)$ and $Var Y(n) = O(1)$.  Then
\[
d_{TV}(\omega'(G(n)), M(\lfloor n\E X (n) \rfloor, m)) \to 0.
\]
\end{lemma}
%*******************************************************************************************
The proof of Lemma~\ref{thm.omega.mono} is straightforward, but technical; it can be found in \cite{kurauskas}.
The case $m = o(n)$ is omitted, since:
%***************************************************************************

\begin{remark} \label{rmk.omega.mono} 
%{\color{red}
No sequence of random intersection graphs $\{G(n)\}$ satisfies
$m = o(n)$, $\E Y(n)$ $= \Theta(1)$ and $Var Y(n) = O(1)$ simultaneously.
%}
\end{remark}

\begin{proof} %{Remark~\ref{rmk.omega.mono}}
  Suppose the above relations are satisfied. % $m = o(n)$, $\E Y = \Theta(1)$ and $\E Y^2 = O(1)$.
  Since $X=X(n)$ is a non-negative integer,
  we have $\E X^2 \ge \E X$. But $\E X^2 = O(m/n)$ and $\E X = \Theta( (m/n)^{1/2})$, so
  $\E X^2 = o(\E X)$, a contradiction.
\end{proof}

\medskip

%
%****************************************************************************
%
% 
Let us discuss the relation between the clique number and the clustering coefficient. We recall
that the \textit{(global) clustering coefficient} of
a graph $G$ is the conditional probability
\begin{equation}\label{eq.cc}
 \pr(v_1^*\sim v_2^*|v_1^*\sim v_3^*, v_2^*\sim v_3^*),
\end{equation}
where $(v_1^*, v_2^*, v_3^*)$ is a triple of vertices of $G$ drawn uniformly at random and
$v_i^* \sim v_j^*$ is the event that $v_i^*$ and $v_j^*$ are adjacent in $G$.
%, i.e., $uv \in E(G(n))$.
It was shown in~\cite{bloznelis11}  that the clustering coefficient of $G(n,m,P)$ approximately
equals to
\[
\frac {\E X(n)} {\E X(n)^2} = \frac {n^{1/2}\E Y(n)} {m^{1/2}\E Y^2(n)}.
\]
%%%
%%%Bloznelis \cite{bloznelis11} proved that assuming $Y(n)$ is uniformly square integrable,
%%%the conditional probability
%%%(called the \textit{clustering coefficient} of $G(n)$)
%%%\begin{equation}\label{2013-09-19}
%%%    \pr(v_1\sim v_2|v_1\sim v_3, v_2\sim v_3)\approx \frac {\E X(n)} {\E X(n)^2} = \frac {n^{1/2}\E Y(n)} {m^{1/2}\E Y^2(n)}
%%%\end{equation}
%%%%vanishes for $m/n\to+\infty$ and $\E Y^2(n)\to+\infty$, see \cite{bloznelis11}. 
%%%%%%It only
In particular, it
attains 
a non-trivial 
asymptotic %%%
value when $m=\Theta(n)$ and $\E Y^2(n)$ $=\Theta(1)$. 
%%%pakeitimo pabaiga
In the latter case 
 Theorem~\ref{thm.finite} and 
Lemma~\ref{thm.omega.mono}
 together with the asymptotics for $M(N,m)$ (Theorem~II.6.1 of \cite{kolchin}), 
imply 
that
\[
  \omega(G(n)) = \frac {\ln n} {\ln \ln n} \left( 1 + o_P(1) \right).
\]
It is here where the positive clustering coefficient comes into play: sparse random intersection graphs have
cliques of unbounded size even when $Var(D_1(n)) = \Theta(1)$, see Lemma~\ref{lem.uintequiv}(i). 
In contrast, the clique number of a sparse  Erd\H os-R\'enyi random graph $G(n,c/n)$ 
is at most $3$, and in the model of \cite{jln10}, with bounded degree variance,
 the largest clique whp has at most 4 vertices.






For both of our main results, Theorem~\ref{thm.main} and Theorem~\ref{thm.finite},
we have corresponding 
simple polynomial-time algorithms that construct a clique of the optimal order whp.
For  a power-law graph with $\alpha \in (1,2)$, it is the greedy algorithm 
of \cite{jln10}: sort  vertices 
%of a graph
in descending order according to their degree; traverse vertices in 
that order and ``grow'' a clique, 
by adding a vertex
if it is
connected to each vertex in the current clique.
%
For  
%sparse 
a graph with bounded degree variance 
%(the case considered in Theorem~\ref{thm.finite})
we propose the following algorithm:
 for each pair of adjacent vertices, 
check if their common neighbours form a clique.
Output the vertices of a largest such clique together with the corresponding pair.
%take %any maximal clique
%formed by that pair and their common neighbours. 
%Output the biggest maximal clique found in this way. 
More details and analysis of each of the algorithms are given 
in Section~\ref{sec.algo} below.




In practical situations a graph may be assumed to be distributed as a random 
intersection graph,
but information about the subset size distribution may not be available. 
In such a case, 
%Suppose $(G(n), n = 1, 2, \dots )$ is a sequence of random intersection graphs. 
instead of condition (\ref{eq.pldef2}) 
for the 
tail of the normalised subset size $Y(n)$, we may consider a similar condition 
for the tail of $D_1(n)$:
% = d_{G(n)}(1)$ 
%of the vertex $1\in V$ in $G(n)$: 
%\m{Suvienodinti žymėjimus $D_v$ ir $d(v)$?}
there are constants $\alpha' > 1, \eps' > 0$ and
a slowly varying function $L'(x)$ such that     for any 
sequence $t_n$ with $n^{1/2 - \eps'} \le t_n \le n^{1/2+\eps'}$
    \begin{equation} \label{eq.degheavytail}
        \pr(D_1(n) \ge t_n) \sim L'(t_n) t_n^{-\alpha'}.
    \end{equation}
The following two lemmas are proved in \cite{kurauskas}.  Let $\mathbb{I}_A$ be the indicator
of $A$.
%
%
%*************************************************************************************
\begin{lemma} \label{lem.degrees} 
    Assume that  $\{G(n)\}$ is 
a sequence of random intersection graphs such that 
%Let $D_1 = D_1(n)$ be 
%the degree of vertex 1 in $G(n)$ and let $Y=Y(n)$ be defined as in (\ref{eq.Y}).
    for some $\eps > 0$ we have
    \begin{equation} \label{eq.uniform}
        \E Y(n) \I_{Y(n) \ge n^{1/2 - \eps}} \to 0.
    \end{equation}
    Suppose that either $(\E Y(n))^2$ or $\E D_1(n)$ converges to a positive number, say, $d$. 
    
    Then both limits exist and are equal,  $\lim\E D_1(n)=\lim(\E Y(n))^2=d$.
Furthermore, the condition (\ref{eq.degheavytail}) holds if and only if 
(\ref{eq.pldef2}) holds. In that case,
    $\alpha' = \alpha$ and $L'(t) = d^{\alpha/2} L(t)$.
\end{lemma}
%*************************************************************************************
%
%
%
Thus, under a mild additional assumption (\ref{eq.uniform}), condition (\ref{eq.pldef2}) 
of Theorem~\ref{thm.main} can be replaced by  (\ref{eq.degheavytail}).
Similarly, the condition  $Var Y(n)=O(1)$ of  Theorem~\ref{thm.finite}
can be replaced by 
the condition $Var D_1(n)=O(1)$. 
%
%
%
%
\begin{lemma} \label{lem.degvar}
    Assume that  $\{G(n)\}$ is a sequence of random intersection graphs and for some
positive sequence $\{\eps_n\}$ converging to zero we have
    \begin{equation} \label{eq.uni2}
      \E Y^2(n) \I_{Y(n) > \eps_n n^{1/2}} \to 0. 
  \end{equation}
    Suppose that either $\E Y(n) = \Theta(1)$ or $\E D_1(n) = \Theta(1)$. Then
    \begin{align}
        \E D_1(n) &= (\E Y(n))^2  + o(1) \label{eq.ED12}
        \\ Var D_1(n) &= (\E Y(n))^2 (Var Y(n) + 1)   + o(1). 
\label{eq.VarD}
    \end{align}
\end{lemma}

Our last lemma exposes a close relationship
between $Y(n)$ and $D_1(n)$ for $m=\Theta(n)$ under a natural uniform integrability requirement.
A recent further work \cite{kurauskasweak} on general sparse random graphs shows that uniform integrability of an appropriate degree moment
is a necessary and sufficient condition to be able to approximate global subgraph count statistics by a small number of local samples.

\begin{lemma}\label{lem.uintequiv}
   Assume that  $\{G(n)\}$ is a sequence of random intersection graphs, $m \to +\infty$. Let $k$ be a positive integer.
   \begin{enumerate}[(i)]
   \item Assume $m = \Theta(n)$. Then $\E D_1(n)^k = \Theta(1)$ and $D_1(n)^k$ is uniformly integrable if and only if $\E Y(n)^k = \Theta(1)$ and $Y(n)^k$ is uniformly integrable.
   \item Assume $m = o(n)$. If $\E D_1(n) = \Theta(1)$ then $D_1(n)$ is not uniformly integrable.   
   \end{enumerate}
\end{lemma}






%********************************************
Let us end the introduction by a discussion of some related literature. 
The largest intersecting subset problem for \textit{uniform} hypergraphs was
%, for example, 
considered by 
%Alon and Kim \cite{alonkim}, see also 
Balogh, Bohman and Mubayi~\cite{bbm2009}.
Although the motivation  and the approach of \cite{bbm2009} are 
different from ours, 
their result yields Theorem~\ref{thm.finite} for 
sparse and dense \textit{uniform} random intersection graphs (where all sets
have the same deterministic size).

%{\it Related work.}
Small cliques in random intersection graphs 
%In the literature constant-sized clique subgraphs of 
%random intersection graphs 
were studied in  
\cite{karonski99}, where edge density thresholds 
for emergence of constant size 
cliques were determined, and in \cite{rybstark2010}, where the Poisson approximation
to the distribution 
of the number of small cliques was established.
The clique number was studied by Nikoletseas, Raptopoulos and Spirakis \cite{nrs2012}, see also 
Behrisch, Taraz and Ueckerdt \cite{btu09}, 
in the case, where $m\approx n^{\beta}$, for some $0<\beta<1$. We note that
the papers \cite{btu09, karonski99, nrs2012, rybstark2010} considered a particular random 
intersection graph with the binomial distribution $P\sim Bin(m, p)$.
%In the sparse regime
%our Theorem~\ref{thm.finite} generalizes the main result of \cite{nrs2012}.
Sparse graphs $G(n,m,P)$ with $P\sim Bin(m,p)$ and $m = o(n)$ are covered by our Theorem~\ref{thm.finite}. However, they are not very interesting: they consist of $n-o_P(n)$ isolated vertices, see the proof of Lemma~\ref{lem.uintequiv}(ii).
Meanwhile, our results include the case $m=\Omega(n)$. They were obtained independently of \cite{bbm2009, btu09, nrs2012}\footnote{
They were first presented 
at the Lithuanian young scientists' conference in February, 2012, see \cite{infobalt}.}.
In the bounded variance regime we, as well as \cite{bbm2009, btu09, nrs2012}, discovered the same universal phenomenon:
the largest clique is whp formed by a single attribute. 

Similarly as in \cite{bianc06, jln10}, we have a kind of ``phase transition'' %for the clique number
%determined by
as the tail index $\alpha$ for
 the random subset size (or vertex degree) varies, see (\ref{eq.pldef2}).
Assume, for example, that $m = \Theta(n)$ and $P$ is
asymptotically a Pareto distribution.
When $\alpha < 2$, the random graph $G(n,m,P)$ whp contains
cliques of only logarithmic size. When $\alpha > 2$, it whp contains a ``giant'' clique of polynomial size.
It would be interesting to determine the clique number in the case $\alpha=2$. Our proofs in Section~\ref{sec.power_law}
and the results of \cite{bbm2009} show that as $G(n)$ becomes denser, the property that the largest clique is
generated by a single attribute ceases to hold. For such dense $G(n)$ the exact asymptotics of $\omega(G(n))$ remains an interesting open problem (but see the recent work \cite{gho2014} and the references therein).

%cases the asymptotics of $\omega(G(n))$ remain open for dense graphs $G(n)$, 
%see also Remarks 1--3 of \cite{bbm2009}.

%\medskip
%**************************************************************************
%%
%%The rest of the paper is organized as follows. In Section~\ref{sec.power_law} we 
%%study sparse random power-law intersection graphs with index 
%%$\alpha \in (1,2)$, 
%%introduce the result on ``rainbow'' cliques by Alon, Jiang, Miller and Pritkin
%%(Lemma~\ref{lem.rainbow}) and prove Theorem~\ref{thm.main}.
%%In Section~\ref{sec.finite.variance} we %relate our model 
%%%to the balls and bins model and 
%%prove Theorem~\ref{thm.finite}. 
%%In Section~\ref{sec.algo} we present and analyse algorithms 
%%for finding large cliques in $G(n,m,P)$. Finally we prove Lemma~\ref{lem.uintequiv}.
%%%In Section~\ref{sec.tailequivalence} we prove 
%%%Lemmas~\ref{lem.degrees}~and~\ref{lem.degvar}.
%%%Finally we give some concluding remarks.
%%%Finally, in the last section we give some concluding remarks. 




\section{Power-law tails}
\label{sec.power_law}








\subsection{Proof of Theorem~\ref{thm.main}}
\label{subsec.prelim}

We start by introducing some notation. Given  a family of 
subsets $\{S_v, v\in V'\}$ of an attribute set $W'$, we denote by $G(V',W')$
 the 
 \emph{intersection graph} on the vertex set $V'$ defined by this family: $u,v\in V'$ are adjacent
(denoted $u\sim v$)  
whenever $S_u\cap S_v\not=\emptyset$.
Let $H=(V_H,E_H)$ be a hypergraph where the set of (hyper-)edges $E_H$ can be a multiset.  Recall
that for $w \in W'$, $T_w = \{v \in V': w \in S_v\}$.
We say that $G=G(V',W')$ \textit{contains a copy} of $H$ on $S \subseteq V'$ if there is a bijection $\sigma: V_H \to S$
and an injection $f: E_H \to W'$ such that $\sigma(e) \subseteq T_{f(e)}$ for each $e \in E_H$. Here $\sigma(e) = \{\sigma(x): x \in e\}$.
A hypergraph $H$ corresponds to a graph $C(H)$ on the vertex set $V_H$, 
obtained by replacing each edge $e \in E_H$ with a clique on $e$ (we merge repeated edges).
$H$ is called a \textit{clique cover} of $C(H)$. $G$ contains
a subgraph isomorphic to $G'$ if and only if it contains a copy of some clique cover of $G'$, see \cite{karonski99}.
The \textit{number of copies}, $Q(H,G)$ of $H$ in $G$ is the number of different tuples $(S, \sigma, f)$ that realise 
a copy of $H$ in $G$. For $v \in V_H$, its \textit{degree} in $H$ is $d_H(v) = |\{e\in E_H: v \in e\}|$.
If $G$ contains a copy of $H$, where $H$ is a graph (each edge is a pair), 
we say that $G$ \textit{contains a rainbow} $H$; this extends the above definition of a rainbow clique.
%
%We need the following simple bound (see also \cite{karonski99}).
%We will make use of 
The next simple bound generalises an estimate obtained by Karo\'nski et al~\cite{karonski99}.

\begin{lemma}\label{lem.countbound}
    Let $H$ be any hypergraph with $h$ vertices of degrees $d_1, \dots, d_h$. Suppose $H$
    has $k$ edges. Let $G=G(n,m,P)$ be a random intersection graph, and let $X$ be distributed according to $P$.
    %with $t = \sup_i \{i: P(i) > 0\}$. Then
    \[
        \E Q(H,G) \le n^h m^k \prod_{i=1}^h \frac {\E X^{d_i}} {m^{d_i}}.
    \]
\end{lemma}
\begin{proof}
    Let $S_1, \dots, S_n$ be the random sets of $G$.
    Given $|S_i|$, the probability that the set $S_i$ contains $d_i$ fixed attributes is $(|S_i|)_{d_i} / (m)_{d_i}$. Thus,
    the probability that a copy of $H$ is realised by fixed $\sigma$ and $f$ is
    \[
        \E \prod_{i=1}^h \frac { (|S_i|)_{d_i} }  {(m)_{d_i}} \le  \prod_{i=1}^h \frac {\E X^{d_i}} {m^{d_i}}.
    \]
    The claim follows by summing over all $(m)_k=m (m-1) \dots (m-k+1)$ choices of $\sigma$ and $(n)_h$ choices of $f$.
    %Finally, the number of different bijections $\sigma$ is $(n)_h$ and the number of injections $f$ is $(m)_k$.
\end{proof}


\medskip

%We say that an attribute $w\in W'$ \emph{covers} the edge $u\sim v$ of $G(V',W')$ 
%whenever $w\in S_u\cap S_v$. In this case we also say that the edge $u\sim v$ receives 
%colour $w$.
%In particular, an attribute $w$ covers all edges
%of the (monochromatic) clique  subgraph $T_w$ of $G(V',W')$ 
%induced by the vertex set $T_w=\{v\in V':\, w\in S_v\}$. Given a graph $H$, we say that 
%$G(V',W')$ \emph{contains a rainbow} $H$ if there is a subgraph $H'\subseteq G(V',W')$ 
%isomorphic to $H$ such that
%every edge  of $H'$ can be prescribed 
%an attribute that covers this edge so that all prescribed attributes are different.
% from the collection of sets of colours received by  edges
%of $H'$ we can select a system of distinct representatives (so that every selected colour 
%represents  a unique edge of $H'$).



We denote by $e(G)$ the size of the set $E(G)$ of edges of a graph $G$.
Given two graphs $G=(V(G),E(G))$ and $R=(V(R),E(R))$  
we denote by $G\vee R$ the graph on %the
vertices %vertex set
$V(G)\cup V(R)$ and
with edges % the edge set 
$E(G)\cup E(R)$. In what follows we 
%will always 
assume that $V(G)=V(R)$ unless stated otherwise. %if not mentioned otherwise. 
Let $t$ be a positive integer and let $R$ be an arbitrary
%a non-random
graph on the vertex set $V'$. 
Assuming that subsets $S_v$, $v\in V'$ are drawn at random, introduce the event 
$Rainbow(G(V',W'),R,t)$ that the graph $G(V',W')\vee R$  has a clique $H$ of 
%\m{siulau palikti kaip buvo, zr. kas vadinama order of a graph}
size
$|V(H)|=t$ with 
the property that every edge  $e=xy$ of the set $E(H) \setminus E(R)$ can be prescribed 
an attribute $w_e$ such that $x,y \in T_{w_e}$ so that all prescribed attributes are different.


In the case where every vertex $v$ of 
the random intersection graph $G(n,m,P)$ includes attributes independently at random
with probability $p=p(n)$, the size $|S_v|$ of the attribute set has binomial distribution 
$P\sim Bin(m,p)$. We denote such graph
$G(n,m,p)$ and call it a \emph{binomial} random intersection graph.
We note that for 
$mp\to+\infty$ the sizes $|S_v|$ of random sets are concentrated % \m{concentrates} 
around their mean value 
$\E |S_v|=mp$. An application of   Chernoff's bound (see, e.g., \cite{cmcd98})
\begin{equation} \label{eq.chernoff}
\pr( \left| B - mp \right| > \eps mp) \le 2 e^{-\frac13 {\eps^2 mp}},
\end{equation}
where $B$ is a binomial random variable $B\sim Bin(m,p)$ and $0<\eps < 3/2$,
implies 
\begin{equation} \label{eq.barB}
\pr(\exists v \in [n] : \left| |S_v| - mp \right| > y) \le n \pr( | |S_v| - mp| > y) \to 0
\end{equation}
for any $y = y(n)$ such that $y / \sqrt {mp \ln n} \to +\infty$ and $y/(mp)<3/2$.


%We write $a\wedge b =\min\{a,b\}$ and $a\vee b=\max\{a,b\}$.


Let us prove Theorem~\ref{thm.main}.  
%For every member $G(n)=G(V,W)$ of a sequence 
%$\{G(n)\}$ 
%satisfying the conditions of 
%Theorem~\ref{thm.main} 
For a number
 $\eps_1\in (0,\eps_0)$, where $\eps_0$ is defined in (\ref{eq.pldef2}), and each $n$
define subgraphs $G_i\subseteq G(n)$, $i=0,1,2$, induced by the vertex sets
\begin{align*}
 & V_0 = V_0(n) = \{v\in V(G(n)):\, |S_v| < \theta_1\}; \\
 & V_1 = V_1(n) =  \{v\in V(G(n)):\, \theta_1 \leq |S_v| \leq \theta_2\};\\
 & V_2 = V_2(n) = \{v\in V(G(n)):\, \theta_2 < |S_v|\},
\end{align*}
respectively. Here %$X_v=|S_v|$ denotes the size of the attribute set prescribed to a vertex $v$ and
%the numbers
\begin{align*}
&\theta_1 = \theta_1(n) = m^{1/2} n^{-\eps_1}; &\theta_2 = \theta_2(n) = \left( (1-\alpha/2) m \ln n + m e_1 \right)^{1/2},
\end{align*}
with
$e_1 = e_1(n) = \max(0, \ln L( (n \ln n)^{1/2}))$. Note that $e_1 = 0$ if $L(x) = 1$ is constant.
%We have
%$V=V_0\cup V_1\cup V_2$ and $V_i\cap V_j=\emptyset$ for $i\not= j$.
%
Write
\[
    K =K(n) =  L\left( (n \ln n)^{1/2}\right) n ^ {1-\alpha / 2} (\ln n)^{-\alpha/2}.
\]
%Theorem~\ref{thm.main} follows from the three lemmas below.
%be as in Theorem~\ref{thm.main}.
%The first lemma gives a lower bound for the clique number of $G(n)$. 
%
We will prove three lemmas. Let $\beta$ be as in Theorem~\ref{thm.main}.
\begin{lemma}\label{lem.G2} $\omega(G_2) \ge (1-o_P(1))\left(1 -  \alpha/2 \right) ^ {- \alpha/2}K$.
\end{lemma}
%The next two lemmas provide an upper bound.
%
\begin{lemma} \label{lem.G0}
    If $\eps_1 < \frac \beta 6$ then there is $\delta > 0$ such that
    $\pr \left( \omega(G_0) \ge n^{1-\alpha/2 - \delta} \right) \to 0.$
\end{lemma}

\begin{lemma} \label{lem.G1} % If %$\liminf \frac{\ln m}{\ln n} \ge 1$ then 
    If $\eps_1 < \frac {\beta - 2 + \alpha} {24}$ then
 $\omega(G_1) = o_P(K).$
\end{lemma}
Our proof of 
Lemma~\ref{lem.G0} (Lemma~\ref{lem.G1}) works for any $m = \Omega(n^\beta)$ with $\beta > \alpha-1$ ($\beta > 2 - \alpha$).
The proof of Lemma~\ref{lem.G2} works for arbitrary $m=m(n)\ge 1$.

\medskip

\begin{proof}[Proof of Theorem~\ref{thm.main}]
  We choose $0<\eps_1 < \min\{(\alpha - 1)/6, (\beta - 2 + \alpha)/24, \eps_0\}$.
%{\color{red}  
The theorem follows from the inequalities
$\omega(G_2)\le \omega(G) \le \omega(G_0) + \omega(G_1) + \omega(G_2)$ 
and  Lemmas~\ref{lem.G2}-\ref{lem.G1}.
%}
\end{proof}
\vskip 0.1 cm


%*********************************************



\subsection{Proof of Lemma \ref{lem.G2}}\label{subsec.large}




In this section we use ideas from \cite{jln10} to give a lower bound on the clique number.
We first note the following 
%some preliminary
auxiliary
facts.
%
\begin{lemma}\label{lem.lambertW}
  Suppose $a=a(n), b = b(n)$ are sequences of positive real numbers such that $0 < \ln b + a \to +\infty$. 
  Let $z=z(n)$ be the positive root of 
  %\begin{equation} \label{eq.lambertW}
    $a - \ln z - b z^2 = 0$. 
  %\end{equation}
    Then $z \sim \left( (a + 0.5 \ln (2 b)) / b\right)^{1/2}$.
\end{lemma}
\begin{proof}
  Changing the variables $t = 2 b z^2$ we get $t + \ln(t) = 2 a + \ln (2b)$.
  From the assumption it follows that $t + \ln t \sim t$, which proves the claim. % and therefore $z_n \sim \sqrt{\frac{2a + \ln(2b)}{2b}}.$
\end{proof}

\begin{lemma}[\cite{galambosseneta}] \label{lem.sv}
Let $x\to+\infty$. 
For any slowly varying function $L$  and any $0<t_1<t_2<+\infty$ the convergence
$L(tx)/L(x)\to 1$ is uniform in $t\in [t_1,t_2]$. Furthermore,  
we have
 $\ln  L(x) = o(\ln x)$.
\end{lemma}

\begin{proof}[Proof of Lemma~\ref{lem.G2}]
Write $N = |V_2|$ and let
\[
v^{(1)}, v^{(2)}, \dots, v^{(N)}
\]
be the vertices of $V_2$ listed in an arbitrary order. %\m{!} %a descending order of their set sizes, i.e.  $|S_{v^{(i)}}| \le |S_{v^{(i-1)}}|$ for $i = 2,\dots,N$.
We consider a greedy algorithm for finding a clique in $G$ proposed by Janson, \L uczak  and Norros \cite{jln10}. % (they use descending ordering by the set sizes, see also Section~\ref{sec.algo}). 
Let $A^{0} = \emptyset$. In the step $i = 1,2,\dots, N$ 
let $A^{i} = A^{i-1} \cup \{v^{(i)}\}$ if $v^{(i)}$ is incident to each of the vertices $v^{(j)}$, $j = 1,\dots,i-1$. Otherwise,
let $A^{i} = A^{i-1}$. 
This algorithm produces a clique $H$ on the set of vertices $A^N$, so that % and $H$ demonstrates that
$\omega(G_2) \ge |A^N|$.  

Write $\theta = \theta_2$ and
let $L_{\theta} = V_2 \setminus A^{N}$ be the set of 
vertices that failed to be added to $A^N$. 
We will show that 
%\m{nesuprantu kodel netinka $|L_\theta|/N$, kai 
%nagrinejamas atvejis kai N arteja i begalybe. cia formalus korektiskumas (dalyba is nulio)
%neturi prasmes. bet kuris skaitytojas suks galva kuriam galui cia +1}
\[
    \frac {|L_{\theta}|} N = o_P(1)
\]
and
\[
N = \left(1 - \alpha/2 \right)^{-\alpha/2} L\left ( (n \ln n)^{1/2} \right)  (\ln n)^{-\alpha/2} n^{1-\alpha/2} ( 1 - o_P(1)).
\] 
%{\color{red} 
From (\ref{eq.pldef2}) we obtain for $N \sim Bin(n, q)$ with 
$q = \pr (X(n) >  \theta)$
  \begin{align*}
      \E N = nq &= n\pr \left( (m/n)^{1/2} Y_n > \theta \right) 
  \\ &\sim L \left( (n/m)^{1/2} \theta \right) n^{1-\alpha/2} m^{\alpha/2} \theta^{-\alpha}
  \\ &\sim \left( 1 - \alpha/2 \right)^{-\alpha/2} L(\sqrt{n \ln n}) (\ln n)^{-\alpha/2} n^{1-\alpha/2} .
\end{align*}
Here we used estimates  $L((n/m)^{1/2} \theta) \sim L(\sqrt {n \ln n})$
and $\ln L (\sqrt {n \ln n}) = o(\ln n)$, see Lemma~\ref{lem.sv}. 
Furthermore, by the concentration property of the binomial distribution, see, e.g.,
 (\ref{eq.chernoff}), 
we have $N=(1+o_P(1))\E N$.
%}



%{\color{red} 
The remaining bound % $|L_{\theta}|/N \le (|L_{\theta}|+1)/(N+1) = o_P(1)$ 
follows
 from %the relation  $\pr(N\ge 1)=1-o_P(1)$  and 
 the bound
$\E (L_{\theta}/N)=o(1)$, shown below (define this ratio to be 0 on the rare event $N=0$).%}  

  Let $p_1$ be the probability that two random 
%{\color{red}
independent subsets of $W=[m]$ of 
size $\lceil \theta \rceil$
%}
do not intersect. 
%\m{ištrinta: Conditionally on $N$,}
  The number of vertices in $L_{\theta}$ is at most the number of pairs in $x,y \in V_2$ where $S_x$ and $S_y$ do not intersect. %, which is at least $p_1$.
  %
  Therefore by the first moment method
%  {\color{red}
\[
  \E \frac{|L_{\theta}|} N = \E \E\left(\frac{|L_{\theta}|} N \Bigl| N\right)
\le 
\E \E \left ( \frac {\binom N 2 p_1} {N}  \Big| N \right) \le \frac {p_1\E N } 2,
  \]
where
  \[
  p_1 = \frac {\binom {m-\theta} \theta} {\binom m \theta} 
\le 
\left( 1-\frac \theta m \right)^\theta 
\le 
e^{-\theta^2/m}.
  \]
Now it is straightforward to check that for some constant $c$ we have 
$p_1 \E N  \le c (\ln n)^{-\alpha/2}  \to 0$. 
%}
This completes the proof. 
\end{proof}

\medskip

Let us briefly explain the intuition for the choice of $\theta$ in the above proof. 
For simplicity assume $L(x) = 1$ for all $x$ so that $e_1 = 0$. 
Could the same method yield a bigger clique if $\theta_2$ is smaller? 
%{\color{red}
We remark that the product $p_1\E N$ as well as its upper bound 
$n^{1-\alpha/2}m^{\alpha/2}\theta^{-\alpha}e^{-\theta^2/m}$ (which we used above)
%\m{siulau palikti, nes intarpas (that we have used) skirtas pagelbeti skaitytojui 
%susieti ankstesni iverti (kuriame tokia nelygybe buvo naudota bet neuzrasyta)
% su tolimesne analize....   ištrinta: that we have used} 
are decreasing functions of $\theta$.
Hence, if we wanted this upper bound to be $o(1)$ then $\theta$ should be at least as large as the 
solution to the equation
\begin{align*}
 n^{1-\alpha/2} m^{\alpha/2} \theta^{-\alpha} e^{-\theta^2/m} = 1
 \intertext{or, equivalently, to the equation}
%\begin{equation*} %\label{eq.theta}
 \alpha^{-1} \ln n + \frac 1 2 \ln (m/n) - \ln \theta - \frac {\theta^2} {\alpha m} = 0.
\end{align*}
After we write the last relation as in Lemma~\ref{lem.lambertW} %in the form
%(\ref{eq.lambertW})
where
$a = \alpha^{-1} \ln n + (1/2) \ln (m/n)$ and $b =  (\alpha m)^{-1}$ 
satisfy
$b e^{2a} = \alpha^{-1} n^{\frac 2 \alpha - 1} \to +\infty$, we obtain from 
Lemma~\ref{lem.lambertW} that
the solution $\theta$
%}
%of (\ref{eq.theta}) 
satisfies
\[
    \theta \sim \left(\frac {(2/\alpha) \ln n - \ln (n/m) + \ln (2/\alpha m)} {2/\alpha m} \right)^{1/2} \sim \left((1-\alpha/2) m \ln n\right)^{1/2}.
\]








%******************************************************************************************



\subsection {Proof of Lemma \ref{lem.G0}}\label{subsec.small}

Before proving Lemma  \ref{lem.G0} we collect some preliminary %\m{auxliary} 
results.
%
\begin{lemma}\label{lem.karonski}
  Let $h\ge 2$ be an integer.
Let $\{G(n)\}$ be a sequence of binomial random intersection graphs
$G(n) = G(n,m,p)$, were $m = m(n)$ and $p = p(n)$ satisfy
  $p n^{1/(h-1)}m^{1/2} \to a \in \{0, +\infty\}$. Then
  \begin{align*}
  \pr(G \text{ contains a rainbow $K_h$}) \to 
         \begin{cases}
             0, &\mbox{if } a=0, \\
             1, &\mbox{if } a = +\infty.
          \end{cases}
  \end{align*}
\end{lemma}
\begin{proof} 
%\m{įrodymas nereikalingas? atvejis $a = 1$ nereikalingas; 
%be to \cite{karonski99} yra klaida\dots}
  The case $a= +\infty$ follows from Claim 2 of \cite{karonski99}. For the case $a=0$ we have,
 by the first moment method,
  \begin{align*}
  \pr(G \text{ contains a rainbow $K_h$}) &\le \binom n h (m)_{\binom h 2} p^{2\binom h 2} 
  %\\ &
  \le %\left(h! \binom h 2 !\right)^{-1}
        \left( n^{1/(h-1)} m^{1/2} p \right)^{h(h-1)} \to 0. \qedhere
  \end{align*}
\end{proof}
\medskip

Next is an upper bound
for the size $\omega'(G)$ of the largest monochromatic clique.
%
\begin{lemma}\label{lem.ballsbins} Let $1<\alpha<2$.
Assume that  $\{G(n)\}$ is a sequence of random 
 intersection graphs satisfying (\ref{eq.Y}), (\ref{eq.pldef2}).
   Suppose that for some $\beta > \alpha - 1$ 
we have $m=\Omega (n^\beta)$.
  Then there is a constant $\delta > 0$ such that whp
$\omega'(G(n)) \le n^{1-\alpha/2 - \delta}$.
\end{lemma}

\begin{proof}
    %Write $V = V(G(n)) = [n]$, $W = W(G(n)) = \{w_1, \dots, w_m\}$ and 
Let $X = X(n)$  and $Y = Y(n)$ be as defined above (\ref{eq.Y}).
  %{\color{red} 
  Since for any  $w \in W$ and  $v \in V$ 
  \[
  \pr (w \in S_v) 
= 
\sum_{k=0}^\infty \frac k m \pr (|S_v| = k) = \frac {\E X} m = \frac{ \E Y} {\sqrt{mn}},
  \]
and the  number of elements of  the set $T_w=\{v:\, w\in S_v\}$ is binomially distributed 
  \begin{equation*} %\label{eq.mono}
    |T_w| \sim Bin \left(n, \frac {\E Y} {\sqrt {m n}}\right),
  \end{equation*}
%}
  we have, for any positive integer $k$
 \[
 \pr (|T_w| \ge k) 
\le 
\binom n k \left ( \frac {\E Y} {\sqrt{mn}}\right)^k 
\le 
\left(\frac{en}{k}\right)^k\left( \frac {\E Y} {\sqrt{mn}} \right)^k 
\le  
\left( \frac {c_1} k \sqrt {\frac n m}\right)^k
 \]
for $c_1 = e \sup_n \E Y$. Therefore, by the union bound,
\[
\pr \left( \omega'(G(n)) \ge k \right) \le m \left(\frac {c_1} k \sqrt{\frac n m}\right)^k.
\]
Fix $\delta$ with $0 < \delta < \min ( (\beta-\alpha + 1)/4, 1- \alpha/2, \beta/2)$.
We have 
\begin{align*}
    &\pr \left( \omega'(G(n)) \ge n^{1-\alpha/2 - \delta} \right) \le m \left( c_1 n^{\alpha/2 - 1/2 + \delta} m^{-1/2} \right)^{\left \lceil n^{1-\alpha/2 - \delta} \right \rceil}
    \\  &= m^{1 - (\delta/\beta) \left \lceil n^{1-\alpha/2 - \delta} \right \rceil} \left( c_1 n^{\alpha/2 - 1/2 + \delta} m^{-1/2 + \delta/\beta} \right)^{\left \lceil n^{1-\alpha/2 - \delta} \right \rceil}\to 0
\end{align*}
since $m \to \infty$, $n^{1-\alpha/2 - \delta} \to \infty$ 
and $m=\Omega(n^{\beta})$ implies
\[
n^{\alpha/2 - 1/2 + \delta} m^{-1/2 + \delta/\beta} 
\to 0.\qedhere
\]
\end{proof}
%Buvo gražus ir tikslus įrodymas dideliems m..
%Putting $k = \ln n$ and $c = e^2 c_1^2$ the natural logarithm of the expression on the right is 
%\begin{align*}
%&     \ln m - k \ln k + (k/2)(\ln n - \ln m) + k \ln c_1
%\\    &= \ln n ( (\ln n)/2 - \ln \ln n + \ln c_1) - \ln m ( (\ln n)/2 -1)
%\\    &\le \ln n ( (\ln n)/2 - \ln \ln n + \ln c_1) - (\ln n - 2 \ln \ln n + \ln c) \left( (\ln n)/2-1 \right)
%\\     & = \ln c - 2 \ln \ln n \to - \infty
%\end{align*}
%which completes the proof.
%\end{proof}
%\m{versija su $|T_w| \le \frac {\ln n} {\ln \ln n}$ tinka �iek tiek didesniems $m$}
%\begin{lemma}\label{lem.ballsbins}
%   Let $X$ be a non-negative integer-valued random variable with $\E X < \infty$.
%   Let $G = G(n,m, X)$ be a random uniform intersection graph with iid set sizes $X_v v \in V = [n]$ where $X_v \sim X$ for $v \in V$. 
%   Suppose that $\frac m n = \Theta(1)$. Then for any positive $\delta$ whp $\max_{w} |T_w| \le (1+\delta) \frac {\ln n} {\ln \ln n}$. 
% \end{lemma}

%\begin{proof}
%  For a fixed key $w \in W$ and a vertex $v \in V$
%  \[
%  \pr (w \in S_v) = \sum_{k=0}^{\infty} \frac k m \pr (|S_v| = k) = \frac {\E |S_v|} m = \frac{ \E X} m.
%  \]
%  Since $|T_w| \sim Binom(n, \frac {\E X} m)$ we have
% \[
% \pr (|T_w| \ge k) \le \binom n k \left ( \frac {\E X} m\right)^k \le \left( \frac {en \E X} {k m} \right)^k \le  \left( \frac c k\right)^k
% \]
%for some $c > 0$. Therefore 
%\[
%\pr \left( \max_w|T_w| \ge k \right) \le m \left(\frac c k\right)^k = e^{\ln m - k \ln k + O(k)} \rightarrow 0.
%\]
%for $k = (1+\eps) \frac {\ln n} {\ln \ln n}$ and any  $\eps > 0$.
%\end{proof}
\vskip 0.1 cm

The last and the most important fact we need relates the maximum clique 
size with the maximum rainbow clique 
size in an  intersection graph. 
An edge-colouring of a graph is called
$t$-good if each colour appears at most $t$ times at each vertex.
%{\color{red} 
We say that an edge-coloured graph contains a rainbow copy of $H$ if it
 contains a subgraph isomorphic to $H$ with all edges receiving different colours 
 (as in our more general definition given above.)%}
%
\begin{lemma}[\cite{alon03}] \label{lem.rainbow}
  There is a constant $c$ such that
  every $t$-good coloured complete graph on more than $\frac{c t h^3} {\ln h}$ vertices contains a rainbow copy of $K_h$.
\end{lemma}

\bigskip

\begin{proof}[Proof of Lemma~\ref{lem.G0}]
%{\color{red} 
Fix an integer $h > 1 + \frac 1 {\eps_1}$ and denote 
$t=n^{1- \alpha/2 - \delta}$
and $k=\lceil \frac{c t h^3} {\ln h}\rceil$, where the positive constants $\delta$ and $c$ are from Lemmas
\ref{lem.ballsbins}
and \ref{lem.rainbow}, respectively.
We first show that  
\begin{equation}\label{eq.npkh}
     \pr(G_0 \text{ contains a rainbow } K_h) =o(1).
   \end{equation}
We note that for the  binomial intersection graph ${\tilde G}=G(n,m,p)$  with 
$p = p(n)= m^{-1/2} n^{-\eps_1} + m^{-2/3}$ Lemma~\ref{lem.karonski} implies
\begin{equation}\label{eq.npkh+}
     \pr({\tilde G} \text{ contains a rainbow } K_h) = o(1).
   \end{equation}
 Let ${\tilde S}_v$ (respectively $S_v$), $v\in V$,
denote the random 
subsets prescribed to vertices of ${\tilde G}$ (respectively $G(n)$).
Given the set sizes $|S_v|, |{\tilde S}_v|$, $v\in V$, satisfying $|{\tilde S}_v|>\theta_1$, 
for each $v$, 
we 
couple  the random sets of $G_0$ and ${\tilde G}$ so that $S_v\subseteq {\tilde S}_v$, 
for all $v\in V_0$. Now $G_0$ becomes a subgraph of ${\tilde G}$ and, since $\eps_1 < \beta / 6$, 
(\ref{eq.npkh}) follows from (\ref{eq.npkh+}) and the fact that 
whp $\min_{v}|{\tilde S}_v|>\theta_1$ %\m{buvo $\max_v$} 
by (\ref{eq.barB}) applied to $\tilde{G}$ with $y=m^{1/3}$.

 
Next, we colour every edge $xy$  of $G_0$ by an arbitrary element of
  $S_x \cap S_y$ and observe that the inequality $\omega'(G(n))\le t$ (which holds
with probability $1-o(1)$, by Lemma~\ref{lem.ballsbins}) implies that
the obtained colouring is $t$-good. Furthermore, by Lemma~\ref{lem.rainbow}, every 
$k$-clique of %the coloured
$G_0$ contains a rainbow clique; however the probability of the 
latter event is negligibly small  by (\ref{eq.npkh}). We conclude that 
$\pr(\omega(G_0)\ge k)=o(1)$ thus proving the lemma.
%}
\end{proof}

\medskip

After the submission of our paper, we became aware that Nikoletseas, Raptopaulos 
and Spirakis \cite{nrs2012} independently also used the idea that a large clique must 
contain a large rainbow clique to analyse the clique number in the binomial random intersection graph. 
The proof of \cite{nrs2012} heavily relies on the properties of the binomial distribution,
and does not seem to lead to an alternative proof of Lemma~\ref{lem.G0}.


%*********************************************************************************************



%\newpage

\subsection {Proof of Lemma~\ref{lem.G1}}
\label{subsec.intermediate}

We start with a combinatorial lemma which is of independent interest.

\begin{lemma}\label{lem.DR}
%{\color{red} 
Given positive integers $a_1,\dots, a_k$, let 
$\{A_1, \dots, A_k\}$ be a family of subsets of $[m]$ of sizes $|A_i|=a_i$.
%Let $s\ge k$ and 
Let $S$ be a random subset of $[m]$ of size $s \ge k$.
Suppose that $a_1+\dots+a_k\le m$. Then the probability
  \begin{equation}\label{eq.DR}
    \pr \left( \{S \cap A_1, \dots, S\cap A_k\} \text{ has a system of distinct representatives} \right)
  \end{equation}
  is maximised when $\{A_i\}$ are pairwise disjoint.
%}
\end{lemma}

\begin{proof}
  Call any of $\binom m s$ possible outcomes $c$ for $S$ a configuration.
% and let $\cc$ be the family of all configurations.
% Given a configuration $c \in \cc$, colour an element $x \in [n]$ black if $x \in c$ and 
% colour it white otherwise.
  Given $\cf = \{A_1, \dots, A_k\}$ let $\cc_{DR}(\cf)$ be the set of  configurations 
$c$ 
such that $c \cap \cf = \{c \cap A_1, \dots, c\cap A_k\}$ has 
a system of distinct representatives. Write
\[
d(\cf) = \sum_{1 \le i < j \le k} |A_i \cap A_j|.
\]
Suppose the claim is false. Out of all families
%  Suppose $\cf$ is a family that maximises (\ref{eq.DR}), and
that maximize (\ref{eq.DR}) pick a family $\cf$ with smallest $d(\cf)$.
Then $d(\cf) > 0$ and we can assume that
%  suppose
%{\color{red} that 
there is an element $x \in [m]$ 
such that  $x \in A_1 \cap A_2$. %}
  Since $\sum_{i=1}^k |A_i| \le m$, there is an element $y$ in 
the complement of $\bigcup_{A\in \cf} A$.

  Define $A_1' = (A_1 \setminus \{x\}) \cup \{y\}$ and 
consider the family  $\cf' = \{A_1', A_2, \dots, A_k\}$.
Observe that the family of configurations $\cc = \cc_{DR}(\cf) \setminus \cc_{DR}(\cf')$ 
has the following property: for each  $c \in \cc$ we have
$x\in c$  
%and $y\notin c$,
and
it is not possible to find a set of distinct representatives for $c \cap \cf$ 
where $A_1$ is matched
with an element other than $x$ (indeed such a set of distinct representatives, 
if existed,
would imply $c\in \cc_{DR}(\cf')$). Consequently, there is a set of distinct representatives
for sets $c \cap A_2,\dots, c\cap A_k$ which does not use $x$. Since the latter set of 
distinct representatives together with $y$ is a set of distinct representatives
for $c\cap \cf'$, we conclude that 
%$c\not\in \cc_{DR}(\cf')$
$c\in \cc$
implies $y\notin c$.


Now, for $c \in \cc$, let $c_{xy}=(c\cup\{y\})\setminus\{x\}$ 
be the configuration with  $x$ and $y$ swapped. 
%Suppose $c \in \cc$. 
Then $c_{xy} \not \in \cc_{DR}(\cf)$ and  $c_{xy} \in \cc_{DR}(\cf')$,
 because $y\in c_{xy}$ and can be matched with $A_1$. Thus
 each  configuration $c \in \cc$ is %\m{gal ``assigned``? nes mapped, mano galva,
% yra matematinis terminas,
%is karto pradedi ieškoti formaliai apibrėžto atvaizdžio, o tekste siekiama paprastumo}
 %mapped to
 assigned
a unique  
configuration $c_{xy} \in \cc_{DR}(\cf') \setminus \cc_{DR}(\cf)$.
This shows that $|\cc_{DR}(\cf')| \ge |\cc_{DR}(\cf)|$. But
$d(\cf') \le d(\cf) - 1$, which contradicts our assumption
about the minimality of~$d(\cf)$.
%and 
%{\color{red} contradicts to our assumption 
%that $\cf$ maximises (\ref{eq.DR}).}
%  Let $f: \cc \to \cc$ be the bijection which swaps the colours of $x$ and $y$.
%  Observe that $c \in \cc_{DR}(\cf)$ implies $f(c) \in \cc_{DR}(\cf')$ (this can be seen by considering the four possible assignments of colours to $x$ and $y$).
%  Since $f$ is a bijection $|\cc_{DR}(\cf)| \le |\cc_{DR}(\cf')|$ and the proof is complete.
\end{proof}

\medskip

The next lemma is a version of a result of Erd\H{o}s and R\'{e}nyi about the maximum 
clique of the random graph $G(n,p)$ (see, e.g., \cite{jlr}). 
%
\begin{lemma} \label{lem.planted}
%Consider a sequence of Erd\H{o}s and R\'{e}nyi random graphs 
%$\left(G(n,p) \right.$ $\left. n = 1,2,\dots\right)$ with 
    Let $n\to+\infty$. Assume that a sequence $p_n$ with $p_n \in [0,1]$ converges to 1.
Let  $\{r_n\}$ be a positive sequence,
satisfying 
$r_n = o ({\tilde K}^2)$, where ${\tilde K}=\frac { 2 \ln n} {1-p_n}$.

There are positive sequences $\{\delta_n\}$ and $\{\eps_n\}$ 
converging to zero, such that  $\delta_n{\tilde K}\to+\infty$ and 
for 
%any$n$ and 
%any
a sequence of 
%non-random
arbitrary
 graphs $\{R_n\}$ %on vertices $\{1,\dots,n\}$ 
with $V(R_n) = [n]$ and $e(R_n) \le r_n$ 
the number $Q_n$ of
cliques of size $\lfloor {\tilde K} (1 + \delta_n) \rfloor$ in $G(n,p_n) \vee R_n$ satisfies
\[
 \E Q_n \le \eps_n.
\]
\end{lemma}

\begin{proof}
  Write $p=p_n, r=r_n$. Pick a positive sequence 
$\delta=\delta_n$ so that $\delta_n \rightarrow 0$ and 
$\ln^{-1} n + (1-p) +  \frac r {{\tilde K}^2} = o(\delta)$.
  Let $a = \left \lfloor {\tilde K}(1+\delta) \right \rfloor$.
  We have
 \begin{equation}\label{2013-07-27}
  \E Q_n 
\le 
\binom n a p^{\binom a 2 - r} 
\le 
\left( \frac {e n} a \right)^a p^{\frac {a(a-1)} 2 - r}=e^{aB},
 \end{equation}
where, by the inequality $\ln p\le -(1-p)$, for $n$ large enough,
\begin{align*}
 B
%\ln (en/a)+ \left( \frac {a-1} 2 - \frac r a \right)\ln p 
&\le \ln (en/a)  -\left(\frac {a-1} 2 - \frac r a  \right)(1-p)
\\& \le \ln n - \frac {a (1-p)}2 + \frac {r(1-p)}{a}
%\\& 
\le 
 (-1+o(1))\delta\ln n  \rightarrow -\infty. \qedhere
\end{align*}
%which completes the proof.
\end{proof}

\vskip 0.2 cm


%\vskip 0.2cm

\begin{lemma}\label{lem.plantedrig}
  Let $\{G(n)\}$ be a sequence of binomial random intersection graphs $G(n) = G(n,m,p)$, 
where $m=m_n\to+\infty$
and $p=p_n\to 0$ as $n\to+\infty$.
Let $\{r_n\}$ be a sequence of positive integers.
Set  ${\bar K} = 2 e^{mp^2}\ln n$. %%%For positive sequences $\{a_n\}, \{b_n\}$, write $a_n\ll b_n$ if $a_n/b_n\to 0$. 
Assume that  $r_n\ll{\bar K}^2$ and
\begin{equation}\label{2013_08_17.mpK}
mp^2\to+\infty,
\qquad
\ln n\ll mp,
\qquad 
{\bar K}p\to 0,
\qquad
{\bar K}\le n/2.
\end{equation}

  There are positive sequences $\{\eps_n\}, \{\delta_n\}$ 
converging to zero such that $\delta_n{\bar K}\to+\infty$ and
  for %any non-random
  an arbitrary
  sequence of graphs $\{R_n\}$ with $V(R_n)=V(G(n))$
and $e(R_n) \le r_n$
\begin{equation}\label{2013_08_16.lem.plantedrig}
\pr \left( Rainbow(G(n), R_n, {\bar K}(1+\delta_n)) \right) \le \eps_n, 
\qquad 
n = 1, 2, \dots
\end{equation}
\end{lemma}
Here we choose $\{\delta_n\}$ such that ${\bar K}(1+\delta_n)$ were an integer.
%Call a $K$-set $U \subseteq V$ good if the set of all pairs of $U$ that are not in $R$ form a rainbow subgraph in $G$.

\medskip

\begin{proof}
Let $\{x_n\}$ be a positive sequence such that
 \begin{equation*} \label{x-2013-08-14}%\label{eq.epsilon}
   p x_n \to 0, \quad  x_n\ll mp 
\quad
\mbox{ and } 
\quad
\sqrt{mp \ln n} \ll x_n
 \end{equation*}
(one can take, e.g., $x_n=\varphi_n\sqrt{mp\ln n}$, with $\varphi_n \to +\infty$ 
satisfying
$\varphi^2_n {\bar K}p\to 0$).

Given $n$, we truncate the random sets $S_v$, 
prescribed to vertices $v\in V$ of the graph $G(n)$ %$G=G(n,m,p)$, 
%\m{išimta: up} 
to the size
 $M = \lfloor mp + x_n\rfloor$. Denote 
\begin{equation*}
  \bar{S}_v=
  \begin{cases}
    S_v, \text{ if } |S_v| \le M,\\
    \text{$M$ element random subset of $S_v$, otherwise}.
  \end{cases}
\end{equation*}
We remark that for 
the event $B=\{S_v={\bar S}_v, \forall v\in V\}$ Chernoff's bound  implies
% Chernoff's inequality imply as $n\to+\infty$
\begin{equation}\label{2013_08_16.B}
\pr(B)=1-o(1). 
\end{equation}

Now, let  $t \in [{\bar K}, 2 {\bar K}]$ and  let $T = \{u_1, \dots, u_t\}$ be a subset of $V$
of size $t$. 
By $R_T$ we denote the subgraph of $R_n$ induced by the vertex set $T$.
Given $i \in \{1, \dots, t\}$, let $T_i\subseteq \{u_1,\dots u_{i-1}\}$ denote the subset of 
vertices which are not adjacent to $u_i$ in $R_T$. Let $A_T(i)$ denote the event that
   sets
$\{\bar{S}_{u} \cap S_{u_i}, \, u\in T_i\}$
have distinct representatives (in particular, none of the sets is empty).
Furthermore, let $A_T$ denote the event that all $A_T(i)$, $1\le i \le t$ hold simultaneously.
%\[
%A_T = \bigcap_{i=1}^t A_T(i).
%\]
We shall prove  below that 
whenever $n$ is large enough
\begin{equation}\label{eq.AT}
  \pr(A_T) \le \left( 1- (1-p)^M \right)^{\binom t 2 - e(R_T)}.
\end{equation}
Next, proceeding as in  Lemma~\ref{lem.planted} we find positive sequences 
$\{\delta'_n\}$, $\{\epsilon'_n\}$ converging to zero such that the number $Q'_n$ of 
subsets $T\subseteq V$ of size 
\begin{displaymath}
 a'= \Big\lfloor \frac{2\ln n}{(1-p)^M}(1+\delta'_n)\Big\rfloor
\end{displaymath}
that satisfy the event $A_T$ has expected value $\E Q'_n\le \epsilon'_n$. 
For this purpose, we 
apply (\ref{2013-07-27}) to $a'$ and $p'=1-(1-p)^M$, and use (\ref{eq.AT}). We remark that 
$a'={\bar K}(1+\delta_n'')$, where $\{\delta''_n\}$ converges 
to zero  and $\delta_n''{\bar K}\to+\infty$. Indeed, we have 
$\delta_n'\ln n/(1-p)^M\to+\infty$, by Lemma~\ref{lem.planted}, and we have 
%To show this relation 
%we expand 
$(1-p)^M=e^{-mp^2 - O(px_n + mp^3)}$ 
with
%and invoke the bound 
$px_n+mp^3=o(1)$. In particular, for large $n$, we have 
$a'\in [{\bar K}, 2{\bar K}]$.   

The key observation of the proof is that the events $B$ and $Rainbow(G(n), R_n, a' )$ 
imply  
$Q'_n > 0$. Hence, by Markov's inequality,
\begin{displaymath}
 \pr(Rainbow(G(n), R_n, a' )\cap B)\le \pr(Q'_n>0)
\le 
\E Q'_n
\le 
\epsilon'_n.
\end{displaymath}
Finally, invoking (\ref{2013_08_16.B}) we obtain 
(\ref{2013_08_16.lem.plantedrig}).

\vskip 0.2cm

It remains to show (\ref{eq.AT}). We write
\[
\pr(A_T) = \prod_{i=1}^t \pr \left(A_T(i) | A_T(1), \dots, A_T(i-1)\right)
\]
and evaluate, for $1\le i\le t$,
\begin{equation}\label{2013_08_17.AT}
\pr(A_T(i) | A_T(1), \dots, A_T(i-1)) \le \left( 1 - (1-p)^M \right)^{|T_i|}.  
\end{equation}
Now (\ref{eq.AT}) follows from the simple  identity 
$\sum_{1\le i\le t}|T_i|={\binom t 2}-e(R_T)$.
Let us prove (\ref{2013_08_17.AT}). For this purpose we apply Lemma~\ref{lem.DR}.
We first
condition on $\{{\bar S}_{u}$, $u\in T_i\}$ and the size
$|S_{u_i}|$ of $S_{u_i}$. By 
Lemma~\ref{lem.DR} the conditional probability 
\[
\pr(A_T(i)\,  \bigl| \, \bar{S}_{u},\ u\in T_i, \, |S_{u_i}|)
\]
is maximized when the sets ${\bar S}_{u}$, $u\in T_i$ are pairwise disjoint
%and have the largest possible sizes $|{\bar S}_{u}|=M$ 
(at this point we verify that ${\bar K} p \to 0$ and $t\le 2 {\bar K}$ implies the condition of  
Lemma~\ref{lem.DR} that
$\sum_{u\in T_i}|{\bar S}_{u}|\le tM<m$, for large $n$). Secondly, we drop
the conditioning on $|S_{u_i}|$ and allow $S_{u_i}$ to choose its element independently at 
random with probability $p$. In this way we obtain (\ref{2013_08_17.AT}).
\end{proof}



\begin{lemma}\label{lem.G1general}
%{\color{red}
Let $\{G(n)\}$ be a sequence of random binomial intersection graphs $G(n) = G(n,m,p)$, 
where $m=m(n)\to+\infty$
and $p=p(n)\to 0$ as $n\to+\infty$.
 Assume that 
\begin{displaymath}
np = O(1),
\qquad
m (n p)^3 \ll  {\bar K}^2,
\end{displaymath}
where ${\bar K} = 2 e^{mp^2}\ln n$. 
Assume, in addition, that (\ref{2013_08_17.mpK}) holds.  

  Then there is a sequence $\{\delta_n\}$ converging to zero such that 
$\delta_n{\bar K}\to+\infty$ and
  \[
   \pr \left(\omega(G(n)) > {\bar K}(1 + \delta_n) \right) \to 0.
   \]
%}
\end{lemma}


\begin{proof} Given $n$, let $U$ be a random subset of $V=V(G(n))$ 
with binomial number of elements $|U|\sim Bin(n,p)$ and such that, 
for any $k=0,1,\dots$,  conditionally,
given the event $|U| = k$, 
the subset $U$ is uniformly 
distributed 
over the class of subsets of $V$ of size $k$.
  Recall that $T_w\subseteq V$ denotes the set of vertices that have chosen an attribute 
  $w \in W$.  
We remark that  $T_w$, $w \in W$ are iid random subsets 
having the same probability distribution as $U$.


We call an attribute $w$ \emph{big} if $|T_w| \ge 3$, otherwise $w$ is \emph{small}. Let $W_B$ and $W_S$
denote the  sets of big and small attributes. Denote by $G_{B}$ 
(respectively, $G_S$)
the subgraph of $G=G(n)$ consisting
of edges  
covered by big (respectively, small) attributes ($w$ covers an edge $xy$ if $x,y \in T_w$).
We observe that, given $G_B$, the random sets $T_z$, $z\in W_S$, defining the edges of $G_S$  are (conditionally) independent.
We are going to replace them by bigger sets, denoted $T'_z$, by adding some more elements as follows. 
Given $T_z$, we first generate independent random variables 
${\mathbb I}_z$ and 
$|\Delta_z|$, where ${\mathbb I}_z$ has Bernoulli distribution with success probability $p'=\pr(|U|\le 2)$ and 
where $\pr(|\Delta_z|=k)=\pr(|U|=k)/(1-p')$, $k=3,4,\dots$. 
Secondly, for ${\mathbb I}_z=1$ we put $T'_z=T_z$. Otherwise we put $T'_z=T_z\cup \Delta_z$, where
$\Delta_z$ is a  subset of $V\setminus T_z$ of size $|\Delta_z|-|T_z|\ge 1$ drawn uniformly at random. 
We note that given $G_B$, the random sets $T'_z$, $z\in W_S$ are (conditionally) independent and have the same probability distribution as $U$.
Next we generate independent random subsets $T'_w$ of $V$, for $w\in W_B$, so that they have the same distribution as $U$ and are independent 
of $G_S$, $G_B$ and $T'_z$, $z\in W_S$. %Given $G_B$, 
The collection of random
sets $\{T'_w, w\in W_B\cup W_S\}$ defines binomial random intersection graph $G'$ having the same distribution as $G(n,m,p)$.








We remark that $G_S\subseteq G'$ and every edge of $G_S$ can be assigned a small 
attribute that covers this edge 
and the assigned attributes are all different. 
On the other hand, the graph $G_B$ is relatively small.
Indeed,
since each  $w$ covers $\tbinom{|T_w|}{2}$ edges, 
   the expected number of edges  of $G_{B}$ is at most
\begin{eqnarray}\nonumber
 \E \sum_{w\in W}\binom{T_w}{2}{\mathbb I}_{\{|T_w|\ge 3\}}
=
m\E\binom{T_w}{2}{\mathbb I}_{\{|T_w|\ge 3\}}
\le 
m\sum_{k\ge 3} \binom k 2 \binom n k p^k.
\end{eqnarray}
Invoking the simple bound
   \[
  \sum_{k\ge 3} \binom k 2 \binom n k p^k \le  (np)^2 (e^{np} - 1)/2  = O((np)^3)
  \]
we obtain $\E e(G_B)=O(m(n(p)^3)$.



Now we choose an integer sequence $\{r_n\}$ such that  
 $m (n p)^3 \ll r_n \ll {\bar K}^2$ and write, for an integer $K'>0$,
 \begin{equation}\label{2013_08_19}
 \pr\left(\omega(G)\ge K'\right)
 \le 
 \E \pr\left(\omega(G)\ge K'|G_B\right){\mathbb I}_{\{e(G_B)\le r_n\}}+
\pr\left(e(G_B)\ge r_n\right).
 \end{equation}
 Here, by Markov's inequality,  $\pr(e(G_B)\ge r_n)\le r_n^{-1}\E e(G_B)=o(1)$. 
Furthermore, we 
%choose $K'={\bar K}(1+\delta_n)$ and
 observe that $\omega(G)\ge K'$ 
implies the event $Rainbow(G',G_B, K')$. Hence,
\begin{displaymath}
 \pr\left(\omega(G)\ge K'|G_B\right)
\le
\pr\left(Rainbow(G',G_B, K')|G_B\right).
\end{displaymath}
We choose $K'={\bar K}(1+\delta_n)$
 and apply Lemma~\ref{lem.plantedrig} 
 to the conditional  probability on the right. %in the right-hand side. 
At this point we specify $\{\delta_n\}$ and find positive $\epsilon_n\to 0$
 such that \[\pr\left(Rainbow(G',G_B, K')|G_B\right)\le \epsilon_n\] 
uniformly in $G_B$ satisfying 
$e(G_B)\le r_n$.
 Hence, (\ref{2013_08_19}) implies 
$\pr\left(\omega(G)\ge {\bar K}(1+\delta_n)\right)\le \epsilon_n+o(1)=o(1)$.
\end{proof}

\medskip

Now we are ready to prove Lemma~\ref{lem.G1}.

\medskip

\begin{proof}[Proof of Lemma~\ref{lem.G1}]
Let
\begin{equation}\label{2013_08_21.eps}
0<\epsilon<2^{-1}\min\{1,\, 1-2^{-1}\alpha,\, \beta-2+\alpha-6\alpha\epsilon_1\},
\end{equation}
define $\theta > 0$ by $\theta^2=(1-\varepsilon-2^{-1}\alpha)m\ln n$ and let ${\bar G}_1$ be the subgraph of $G_1$ induced by vertices $v\in V_1$ with
$|S_v|\le \theta$. Let 
$D=|V(G_1)\setminus V({\bar G}_1)|$ denote the number of vertices of $G_1$ that
do not belong to ${\bar G}_1$. 
%


First, using (\ref{eq.pldef2}) and Lemma~\ref{lem.sv} we estimate the expected value of $D$ 
for $n\to+\infty$ 
\begin{equation}\label{2013_08_21+}
\E D=n\left(\pr(|S_v|\ge \theta)-\pr(|S_v|\ge \theta_2)\right)
\le (h(\epsilon)+o(1))K.
\end{equation}
Note that 
$h(\epsilon):=(1-\epsilon-2^{-1}\alpha)^{-\alpha/2} - (1-2^{-1}\alpha)^{-\alpha/2}\to 0$
can be made arbitrarily small by taking $\eps$ small enough.
%as $\varepsilon\to 0$.
%
%Letting $\epsilon\to 0$ we obtain from (\ref{2013_08_21+}) that $D=o_P(K)$. 
%
%We complete the proof  by showing that 
Next, we claim that for any $\varepsilon$ satisfying 
(\ref{2013_08_21.eps}),
\begin{equation}\label{2013_08_22.++}
 \pr\left(\omega({\bar G}_1) \ge 4 n ^ {1 - 2^{-1}\eps-2^{-1}\alpha} \ln n \right)=o(1).
\end{equation}
 Note that $n ^ {1 - 2^{-1}\eps-2^{-1}\alpha} \ln n \ll K$. Clearly, the lemma follows from  (\ref{2013_08_21+}), Markov's inequality and (\ref{2013_08_21.eps}), since
$\omega(G_1)\le D+\omega({\bar G}_1)$ and
we can replace $\eps$ by a sequence $\eps_n' \to 0$ so that both terms are $o_P(K)$.
%show that each summand on the right is of order
% $o_P(K)$ for appropriately.

It remains to prove (\ref{2013_08_22.++}). 
Let ${\bar N}$ be a binomial random variable with distribution $Bin(n,\pr(|S_v|>\theta_1))$,
and
let 
\begin{displaymath}
{\bar n}=(1+\epsilon)n^{1- 2^{-1}\alpha + \alpha \eps_1}L(n^{0.5-\epsilon_1})
\quad
\
{\text{ and}}
\quad
\
{\bar p}^2=(1-2^{-1}\epsilon-2^{-1}\alpha)m^{-1}\ln n. 
\end{displaymath}
We couple ${\bar G}_1$ with 
the binomial random intersection graph $G'=G({\bar n}, m,{\bar p})$ so that
the event that ${\bar G}_1$ is isomorphic to a subgraph of $G'$, denoted 
${\bar G}_1\subseteq G'$, has probability 
\begin{equation}\label{2013_08_22.couple}
\pr({\bar G}_1\subseteq G')=1-o(1). 
\end{equation}
%We argue that  
Such a coupling is possible because the events
$A=\{$every vertex of $G'$ is prescribed at least $\theta$ attributes$\}$
and $B=\{|V({\bar G}_1)|\le {\bar n}\}$ occur whp. Indeed,
the bound $\pr(A)=1-o(1)$ follows from 
Chernoff's inequality (\ref{eq.barB}). To get the bound $\pr(B)=1-o(1)$ we first couple
binomial random variables $|V({\bar G}_1)|\sim Bin(n,\pr(\theta_1<|S_v|<\theta))$ 
and ${\bar N}$
so that $\pr(|V({\bar G}_1)|\le {\bar N})=1$ and then invoke the bound
$\pr({\bar N}\le {\bar n})=1-o(1)$, which follows from  (\ref{eq.pldef2}) and 
Chernoff's inequality.

Next we apply Lemma~\ref{lem.G1general} to $G'$ (the conditions of the lemma on ${\bar n}$, $m$ and ${\bar p}$ can be easily checked) and obtain the bound
\begin{equation}
   \pr \left( \omega(G') > 4 n ^ {1 - 2^{-1}\eps-2^{-1}\alpha} \ln {\bar n }\right)=o(1),
\end{equation}
which together with (\ref{2013_08_22.couple}) implies (\ref{2013_08_22.++}).
%$\pr \left( \omega({\bar G}_1) > 4 n ^ {1- 2^{-1}\eps-2^{-1}\alpha} \ln n \right)=o(1)$.
\end{proof}

\medskip

One might ask if we could employ the results of \cite{nrs2012} in the above proof, since the random intersection graph is binomial.
The answer seems to be ``no'': the condition of $m = {\bar n}^\beta$ with $\beta < 1$ required by \cite{nrs2012} is not satisfied. In fact $m / {\bar n}$ grows polynomially with
$n$. Theorem~3 of \cite{bbm2009} suggests $G'$ is in the range where largest clique is not formed by a single attribute, and
the precise asymptotics of $\omega(G')$ for such dense graphs remains an open problem.

  

\section{Bounded variance}
\label{sec.finite.variance}

%We give a short proof of Theorem~\ref{thm.finite}. 

\medskip

\begin{proof}[Proof of Theorem~\ref{thm.finite}]
  %Assume first $m \to +\infty$.
   Fix $c > 0$. Define truncated probability distribution $P_n' = P_{n,c}'$ by setting $P_n'(k) = P_n(k)$
   if $k < \lfloor c \sqrt m\rfloor$ and $P_n'( \lfloor c \sqrt m \rfloor) = \sum_{i \ge \lfloor c \sqrt m \rfloor} P_n(i)$.
   Let $G_c(n)$ be the random intersection graph $G(n, m, P_n')$ with sets $S_1', \dots, S_n'$ defined as follows.
   Given a realisation of the random sets $S_1, \dots, S_n$ of $G(n)$, for each $v \in V(G(n)) = V(G_c(n))$ with
   $|S_v| \le c \sqrt m$ set $S_v' = S_v$. For those $v$ where $|S_v| >  c \sqrt m$, let $S_v'$ be a uniformly random subset
   of size $\lfloor c \sqrt m \rfloor$ from $S_v$, chosen for each $v$ independently (conditioned
   on $S_1, \dots, S_n$). Write $X = X(n)$, $Y = Y(n)$, and let $X'=X'(n)$ have distribution $P_n'$.
   %Denote $X_v' = |S_v'|$.
   
   The graph $G_c(n)$ is a subgraph of $G(n)$ and the edges that differ have at least one endpoint in the set
   $R_c = \{v \in V(G(n)): |S_v| > c \sqrt m\}$. Also by Markov's inequality $|R_c| = O_P(1)$ since
   \[
       \E |R_c| = n \pr(X > c \sqrt m) \le \frac {n \E X^2} {c^2 m} = \frac {\E Y^2} {c^2},
   \]
   Let $H_1$ be the  hypergraph on $\{1,2,3,4\}$ with edges $\{\{1,2,3\},$ $\{1,4\},$ $\{2,3,4\}\}$.
   Let $H_2$ be the hypergraph on $\{1,2,3,4\}$ with edges
   $\{ \{1,2\},$ $\{1,3\},$ $\{1,4\},$ $\{2,3,4\} \}$. % Let $K_4$ be the complete graph on 4 vertices.
   By Lemma~\ref{lem.countbound} and trivial bounds $\E (X')^2 \le \E X^2$, $\E (X')^3 \le c \sqrt m \E X^2$,
   we get
   \begin{align}
       &\E Q(H_1, G_c(n)) \le n^4 m^3 \left( \frac {\E (X')^2} {m^2} \right)^4 \le \frac { (\E Y^2)^4} {m}; \label{eq.H1}
       \\ &\E Q(H_2, G_c(n)) \le n^4 m^4 \left( \frac {\E (X')^2} {m^2} \right)^3 \frac {\E (X')^3} {m^3} \le \frac {c (\E Y^2)^4 } {\sqrt m};     \label{eq.H2}
       \\ &\E Q(K_4, G_c(n)) \le n^4 m^6 \left( \frac {\E (X')^3} {m^3} \right)^4 \le (c \E Y^2)^4. \label{eq.H3}
   \end{align}
   Thus $G_c(n)$ whp contains no copy of $H_1$ or $H_2$. 
   We claim that whp
   \begin{equation}\label{eq.omegaGc}
       \omega(G_c(n)) = \max(\omega'(G_c(n)), \omega_R(G_c(n))).
   \end{equation}
   Indeed, let $S$ be a
   set of vertices in $G_c(n)$, such that there is a clique on $S$, of the maximum size.
   Let $Q$ be a subset of $S$, such that $\cap_{v \in Q} S_v' \ne \emptyset$ of the largest cardinality. (\ref{eq.omegaGc}) is trivially true when $|Q| \le 1$,
   so we can assume $|S| \ge |Q| \ge 2$. If $|Q| = 2$, $G_c(n)$ contains a rainbow clique on $S$, so that 
   $\omega(G_c(n)) = \omega_R(G_c(n))$. Suppose $\omega(G_c(n)) > \omega'(G_c(n))$. Then $|Q| < |S|$, and let $x$ be an arbitrary vertex in $S \setminus Q$. The maximality of $Q$ implies that $G_c(n)$ contains a copy of either $H_1$ or $H_2$ on a subset of $Q$ (with $1$ mapped to $x$). 
   But we have shown that this does not occur whp. 
   %But by (\ref{eq.H1} - \ref{eq.H2}) $G_c(n)$ contains
   %no copy of $H_1$ or $H_2$ whp.
   So whp $|Q| \in \{2,|S|\}$. This finishes the proof of (\ref{eq.omegaGc}).
   
   A rainbow $K_h$ in $G_c(n)$ yields $(h)_4$ copies of $K_4$. Therefore by (\ref{eq.H3}) and 
   Markov's inequality for $h \ge 4$ 
   \[
       \pr (\omega_R(G_c(n)) \ge h) \le \pr \left(Q(K_4, G_c(n)) \ge (h)_4 \right) \le  \frac {c^4 (\E Y^2)^4(1 + o(1))} {(h)_4},
   \]
   so $\omega_R(G_c(n)) = O_P(1)$. Using (\ref{eq.omegaGc}) and our bound for $|R_c|$, we get
   \[
       \omega'(G(n)) \le \omega(G(n)) \le \omega'(G_c(n)) + \omega_R(G_c(n)) + |R_c| = \omega'(G(n)) + O_P(1).
   \]
   Finally, suppose (\ref{eq.uni1}) holds. Our proof above holds for any fixed $c > 0$. By (\ref{eq.H3}) for arbitrarily small $\eps>0$ we can pick $c > 0$ such that $\pr(G_c(n) \mbox { contains a copy of }K_4) < \eps+o(1)$, $\pr(|R_c| > 0) \le \E |R_c| \to 0$ and (\ref{eq.omegaGc}) holds. This implies that whp
   \[
       \omega(G(n)) = \max(\omega'(G(n)), \omega_R(G(n))) \mbox{ and } \omega_R(G(n)) \le 3. \qedhere
   \]
   \end{proof}
  %   Now suppose $m=\Theta(1)$. Then the expected number of 
%non-isolated vertices is at 
%most $n \pr(X \ge 1) \le n \E X^2 = O(1)$, 
%so    $\omega(G(n)) = O_P(1)$ by Markov's inequality. The theorem %holds for all $m=\Omega(1)$ by the subsequence 
%principle \cite{jlr}.
%\end{proof}

\medskip

If the condition $m \to + \infty$ is dropped in the second part of Theorem~\ref{thm.finite}, 
then we have whp $\omega(G(n)) \le \omega'(G(n))+c$ for $c > 0$ \cite{kurauskas}. It is easy to see that we can take $c=0$ when $\omega'(G(n))>4$ whp. Otherwise, it seems that $c=2$ is best possible.

\section{Algorithms}
\label{sec.algo}



%Random intersection graphs provide theoretical models for real affiliation networks, like
%the actor network, scientific collaboration network and others. 
Random intersection graphs provide theoretical models for real %\m{V: pakeista}
networks, such as %like
the affilation (actor, scientific collaboration) networks.
%and others. 
Although the model assumptions 
about the 
distribution of the family of random sets  defining the intersection graph are 
rather stringent (independence and a particular form of the distribution), these models 
%explain clustering properties of the corresponding real networks with remarkable accuracy,
yield random graphs with clustering properties similar to those found in real networks, 
\cite{bloznelis11}. While observing a real network we may or may not have
information about the sets of attributes prescribed to vertices. Therefore it is important
to have algorithms suited to random intersection graphs that do not use any data related
to  attribute sets prescribed to vertices. In this section we consider 
two such algorithms that find cliques of order $(1+o(1))\omega(G)$ in a
sparse 
random intersection graph~$G$.

%In the case of a power law random intersection graph having the (asymptotic) 
%degree distribution
%with an infinite second moment 
The \textsc{Greedy-Clique} algorithm of \cite{jln10}
finds a clique of the optimal order $(1-o_P(1))\omega(G)$ in
a random intersection graph, %having the (asymptotic) 
in the case where (asymptotic) degree distribution is a power-law
with exponent $\alpha \in (1, 2)$.

\medskip

\begin{center}
\fbox{\begin{minipage}{4.5in}

\textsc{Greedy-Clique}(G):

   \smallskip

   \qquad \textit{Let $v^{(1)}, \dots, v^{(n)}$ be the vertices of $G$ in order of decreasing degree}

\qquad $M \leftarrow \emptyset$

\qquad \textbf{for} $i = 1$ to $n$ %\textbf{do}

\qquad \qquad \textbf{if} $v^{(i)}$ \textit{is adjacent to each vertex in} $M$ \textbf{then}

\qquad \qquad \qquad $M \leftarrow M \cup \{v^{(i)}\}$

\qquad \textbf{return} $M$

\end{minipage}}
\end{center}
Here 
%and in the ``mono-clique'' algorithm below 
we assume that 
graphs are represented by 
the adjacency list data structure. 
 The implicit computational model behind our running time estimates in this section is random-access machine (RAM). 
%The following remark says that if our graph
% has a power law degree distribution without a finite second moment, 
%then \textsc{Greedy-Clique} finds a clique of the optimal order.
%
\begin{proposition}\label{prop.gcalg}
%    Consider a sequence of random intersection graphs $(G(n))$ as in Theorem~\ref{thm.main}.    
    Assume that the conditions of Theorem \ref{thm.main} hold. 
    Suppose  that $\E Y = \Theta(1)$ and that (\ref{eq.uniform}) holds for some $\eps > 0$.
    Then on 
input $G=G(n)$ \textsc{Greedy-Clique} outputs a clique of size 
$\omega(G(n)) (1 - o_P(1))$ in time $O(n^2)$.
\end{proposition}
%
By Lemma~\ref{lem.degrees}, the above result remains true if the conditions 
(\ref{eq.pldef2}) and $\E Y(n)=\Theta(1)$ are replaced by the conditions 
(\ref{eq.degheavytail})  and $\E D_1=\Theta(1)$. 
Proposition~\ref{prop.gcalg} is proved in a similar way as Lemma~\ref{lem.G2}, but it does 
not follow from Lemma~\ref{lem.G2}, since
\textsc{Greedy-Clique} is not allowed to know the attribute subset sizes.
The proof of Proposition~\ref{prop.gcalg} is given 
in \cite{kurauskas}, p. 87.






For random intersection graphs with bounded degree variance
we suggest the following simple algorithm, which, as we shall see, can be implemented to run
in expected time $O(n)$.
%For sequences of random intersection graphs with bounded degree variance (as in Theorem~\ref{thm.finite} and Theorem~\ref{thm.omega.mono})
%, where $\E Y^2 = O(1)$, or the degree
%variance is bounded, 
%we need a different but also almost trivial algorithm.

\smallskip

%\begin{samepage}

\begin{center}
\fbox{\begin{minipage}{4.5in}

    \textsc{Mono-Clique}(G):

\smallskip

\qquad \textbf{for} $uv \in E(G)$ 

\qquad \qquad $D(uv) \leftarrow |\Gamma(u) \cap \Gamma(v)|$

%\qquad $F \leftarrow \ln n$ edges $uv$ with largest $d(uv)$, sorted in decreasing order of $d(uv)$

\qquad \textbf{for} $uv \in E(G)$ in order of decreasing $D(uv)$ %\qquad \textit{(second phase)}

\qquad \qquad $S \leftarrow \Gamma(u) \cap \Gamma(v)$

\qquad \qquad \textbf{if} $S$ \textit{is a clique} \textbf{then}  

\qquad \qquad \qquad \textbf{return} $S \cup \{u,v\}$

\qquad \textbf{return} $\{1\} \cap V(G)$ (if all fails, return a trivial clique)
%\end{samepage}
\end{minipage}}
\end{center}
%Here $\Gamma(v)$ denotes the set of vertices that includes $v$ and all its neighbours.
Here $\Gamma(v)$ denotes the set of neighbours of $v$. Note that $D(uv) = |\Gamma(u) \cap \Gamma(v)|$ is the number of triangles that contain the edge $uv$.
Below we also discuss the clique percolation method \cite{pdfv2005} which achieves a similar performance on
sparse random intersection graphs.
For alternative algorithms, proved to work under different/stronger conditions (for example, $P \sim Bin(m,p)$), or aiming to reconstruct
all of the monochromatic cliques $|T_w|$, see \cite{btu09, nrs2012}.

\smallskip

%The last algorithm is very simple to analyse.
%A slightly more robust version is given below.
%, which still gives the same result would be to construct a maximal clique starting from each edge (without backtracking).
%It is difficult to imagine a simpler algorithm for finding large cliques.

\begin{theorem}\label{thm.algfinite} 
Assume that  $\{G(n)\}$ is a sequence of random intersection graphs such that
 $n=O(m)$ and
$\E Y^2(n)=O(1)$.
 Let $C = C(n)$ be the clique constructed by 
 \textsc{Mono-Clique} on input $G(n)$. Then $\E \left(\omega(G(n)) - |C|\right)^2 = O(1)$. Furthermore, if there is a sequence $\{\omega_n\}$, such that $\omega_n \to \infty$ and whp $\omega(G(n)) \ge \omega_n$, then $|C| = \omega(G(n))$ whp.
\end{theorem} 

\begin{proof} 
    Given distinct vertices $v_1, v_2, v_3, v_4 \in [n]$, let 
${\cal C}(v_1, v_2, v_3, v_4)$
    be the event that $G(n)$ contains a cycle with edges 
$\{v_1v_2, v_2v_3, v_3v_4, v_1v_4\}$ and
    $S_{v_2} \cap S_{v_4} = \emptyset$. 
%We call such a cycle ``marked``. 
Let $Z$ denote 
the number of tuples $(v_1,v_2,v_3,v_4)$
    of distinct vertices in $[n]$ such that
    ${\cal C}(v_1,v_2,v_3,v_4)$ hold. %We will show below that
    Assume for now that
    \begin{equation} \label{eq.Xcyc}
       \E Z = O(1),
    \end{equation}
    which will be proved later.

    Let $K \subseteq [n]$ be the (lexicographically first) largest clique of $G(n)$. Denote
$s = |K|$. If $s \le 2$ or there is a pair $\{x,y\} \subseteq K$, $x \ne y$ such that
    $G(n)[\Gamma(x) \cap \Gamma(y)]$ is a clique, 
then the algorithm returns a clique of size $s$ (since we consider edges in the decreasing order of $D(uv)$). Otherwise, for each such pair $\{x,y\}$ 
    there are $x',y' \in   \Gamma(x) \cap \Gamma(y)$, 
$x' \ne y'$ with $x' y' \not \in E(G(n))$. That is,
    ${\cal C}(x,x',y,y')$ holds and
      $\binom s 2 \le Z$.
    Thus, if $\binom s 2 > Z$, 
the algorithm returns a clique $C$ of size  $s$. Otherwise, the algorithm may fail and return a clique $C$ of size $1$.
    In any case we have that
    \[
      s  - |C| \le \sqrt {2 Z} + 1
    \]
    and using (\ref{eq.Xcyc})
    \[
     \E (\omega(G(n)) - |C|)^2 \le \E (\sqrt{2 Z} + 1)^2 = O(1).
    \]
   Also if $\omega(G(n)) \ge \omega_n$ whp, then by  (\ref{eq.Xcyc}) and
 Markov's inequality
   \[
   \pr(|C| \ne \omega(G(n))) 
\le 
\pr(\omega(G(n)) < \omega_n) 
+ 
\pr \left(Z \ge \binom {\omega_n} 2 \right) \to 0.
   \]
   It remains to show  (\ref{eq.Xcyc}).
   Let $H_1, H_2, H_3$ be the hypergraphs on vertex set $\{1,2,3,4\}$ with edge sets
   $\{\{1,2\}, \{2,3\}, \{3,4\}, \{4,1\}\}$, $\{\{1,2,3\}, \{3,4\}, \{4,1\}\}$ and $\{\{1,2,3\},$ $\{3,4,1\}\}$ respectively. It is easy
   to see that
   \[
       Z \le Q(H_1, G(n)) + Q(H_2, G(n)) + Q(H_3, G(n)).
   \]
   Write $X = X(n)$. By Lemma~\ref{lem.countbound} since $m = \Omega(n)$ and $\E Y(n)^2 = O(1)$
   \[
       \E Z \le n^4 m^4 \left(\frac {\E X^2} {m^2} \right)^4 + n^4 m^3 \frac {\E X} {m} \left(\frac {\E X^2} {m^2} \right)^3  + 
        n^4 m^2  \left(\frac {\E X} {m} \right)^2 \left(\frac {\E X^2} {m^2} \right)^2 = O(1). \qedhere
    \]
%%
%%
%%
%%    What is the probability of the event ${\cal C}(1,2,3,4)$? Clearly, 
%%${\cal C}(1,2,3,4)$ implies at least one of the following events:
%%    \begin{itemize}
%%        \item ${\cal A}_1: $ there are distinct attributes $w_1, w_2, w_3, w_4 \in W$ 
%%such that $w_1 \in S_1 \cap S_2$,
%%$w_2 \in S_2 \cap S_3$, $w_3 \in S_3 \cap S_4$ and $w_4 \in S_1 \cap S_4$;
%%        \item ${\cal A}_2: $ there are distinct $w_1, w_2, w_3 \in W$, 
%%such that $w_1 \in S_1 \cap S_2 \cap S_3$, $w_2 \in S_3 \cap S_4$ and $w_3 \in S_1 \cap S_4$;
%%        \item ${\cal A}_3: $ there are distinct $w_1, w_2, w_3 \in W$, 
%%such that $w_1 \in S_1 \cap S_2$, $w_2 \in S_2 \cap S_3$ and $w_3 \in S_1 \cap S_3 \cap S_4$;
%%        \item ${\cal A}_4: $ there are distinct $w_1, w_2 \in W$, 
%%such that $w_1 \in S_1 \cap S_2 \cap S_3$ and $w_2 \in S_1 \cap S_3 \cap S_4$.
%%    \end{itemize}
%%    Conditioning on $|S_1|, |S_2|, |S_3|, |S_4|$ and using the union bound and independence we obtain, similarly as in Lemma~\ref{lem.PRK4}
%%    \begin{align*}
%%        &\pr ({\cal A}_1) \le (m)_4 \E \frac {(|S_1|)_2 (|S_2|)_2 (|S_3|)_2 (|S_4|)_2} {(m)_2^4} \le \frac { (\E Y^2)^4} {n^4};
%%        \\  &\pr({\cal A}_2) = \pr({\cal A}_3) \le (m)_3 \E \frac {(|S_1|)_2 |S_2| (|S_3|)_2 (|S_4|)_2} { (m)_2^3 m} \le \frac {(\E Y^2)^3 (\E Y)} {m^{0.5} n^{3.5}};
%%    \\  &\pr({\cal A}_4) \le (m)_2 \E \frac{(|S_1|)_2 |S_2| (|S_3|)_2 |S_4|} {(m)_2^2 m^2} \le \frac {(\E Y^2)^2 (\E Y)^2} {m n^3}.
%%    \end{align*}
%%Furthermore, by symmetry and $m = \Omega(n)$, 
%%    \begin{displaymath}
%%    \E X 
%%\le 
%%(n)_4 \left( \pr({\cal A}_1) + \pr({\cal A}_2) + \pr({\cal A}_3) + \pr({\cal A}_4) \right) 
%% =O(1).
%%    \end{displaymath}
\end{proof}


%\bigskip


\begin{proposition}\label{prop.monocliquetime}
    Consider a sequence of
    sparse 
    random intersection graphs $\{G(n)\}$ as in Lemma~\ref{thm.omega.mono}. 
	\textsc{Mono-Clique} can be implemented so that its expected running time on $G(n)$ is $O(n)$.
\end{proposition}

%\medskip 
%Note: here we assume the RAM computational model, see, i.e., \cite{ramram}.
%
%\medskip

\begin{proof}
    Consider the running time of the first loop (i.e., precomputing $D(uv)$ for all edges $uv$).    
    We can assume that each list in the adjacency list structure 
is sorted in increasing order (recall that vertices are elements of  $V=[n]$).
    Otherwise, given $G(n)$, they can be sorted using any standard sorting 
algorithm in time $O(n + \sum_{v \in [n]} D_v^2)$,
    where $D_v = d_{G(n)}(v)$ is the degree of $v$ in $G(n)$.
    The intersection of two lists of lengths $k_1$ and $k_2$ can be found 
in $O(k_1 + k_2)$ time,
    so that expected total time for finding common neighbours  is 
    \[ 
      O\left(n + \E \sum_{uv \in E(G(n))} (D_u + D_v)\right) 
=  
O \left(n + \E \sum_{v \in [n]} D_v^2 \right) = O(n).
    \]
    To see the last bound, notice that the sum of degree squares in a graph counts the number of 2-paths plus the number of edges in the graph.
    The number of 2-paths in $G(n)$ is bounded by the number of copies of hypergraphs $\tilde{H}_1$ and $\tilde{H}_2$ on $\{1,2,3\}$,
    where $\tilde{H}_1$ has edges $\{\{1,2\}, \{2,3\}\}$ and $\tilde{H}_2$ has a single edge $\{1,2,3\}$. Using our notation,
    $\sum_{v \in V} D_v^2 \le Q(\tilde{H}_1, G(n)) + Q(\tilde{H}_2, G(n)) + Q(K_2, G(n))$.
    Applying Lemma~\ref{lem.countbound} and the bound $m = \Omega(n)$, see also \cite{karonski99, kurauskasweak}, %$\E D_1^2 \le (m)_2 (X)_2 (\E X)^2 + (\E X)^3/m^2$.% (\ref{eq.ED2}) in the proof of Lemma~\ref{lem.degvar}.
    \[
        \E \sum_{v \in V} D_v^2 \le \frac {n^3 \E X^2 (\E X)^2} {m^2}  + \frac {n^3 (\E X)^3 } {m^2} + \frac {n^2 (\E X)^2} {m} = O(n).
    \]
    The second loop can be implemented so that the edge $uv$ with the
    next largest value of $D(uv)$ is found at each iteration 
(we avoid sorting the list of edges in advance to keep the running time linear). 
    In this way picking
    the next edge requires at most $c e(G(n))$ steps 
    %where $e(G(n))$ denotes the number of edges of $G(n)$ and 
%and
for a constant $c$.
We recall 
that the number of edges $uv \in E(G)$ with 
$\Gamma(u,v):=\Gamma(u)\cap \Gamma (v)\not=\emptyset$
that fail to induce a clique 
 is at most the number $Z$
 of 
%marked 
cycles considered in the proof of Theorem~\ref{thm.algfinite} above. Therefore,  
 the total number of steps used in picking $D(uv)$ 
in decreasing order
 is at most $c Z e(G(n))$ and
\begin{displaymath}
 Z\,e(G(n))
=
\sum_{(i,j,k,l)}{\mathbb I}_{{\cal C}(i,j,k,l)} e(G(n)).
\end{displaymath}
Now 
%\m{pirma keitimo dalis, praleisti N_1, N_2 yra tinkama. Antra keitimo dalis
%man atrodo nesekminga: jei ivardini, kad naudoji nepriklausomuma, turetum formuleje 
%islaikyti lygybes zenkla, kad skaitytojas suprastu, kaip ir kur tas nepriklausomumas 
%panaudojamas. Nelygybe tapo neakivaizdi, nes paslepti du zingsniai: 
%du kart vertini antra demeni< 2n.   Manau, tai yra bereikalingas skaitytojo apsunkinimas}
\[
 e(G(n))  = \sum_{s<t:\, \{s,t\}\cap \{i,j,k,l\}=\emptyset} {\mathbb I}_{\{s\sim t\}} + \sum_{s<t:\, \{s,t\}\cap \{i,j,k,l\}\ne\emptyset} {\mathbb I}_{\{s\sim t\}}.
\]
Note, that the second sum on the right is at most $4 n$.
Also, if $\{s,t\} \cap \{i,j,k,l\} = \emptyset$, the events $s \sim t$ and ${\cal C}(i,j,k,l)$ are independent, therefore
\begin{align*}
 \E \left( {\mathbb I}_{\cc(i,j,k,l)}  \sum_{s<t:\, \{s,t\}\cap \{i,j,k,l\}=\emptyset} {\mathbb I}_{\{s\sim t\}} \right) 
        &=  \pr(\cc(i,j,k,l))  \sum_{s<t:\, \{s,t\}\cap \{i,j,k,l\}=\emptyset} \pr(s\sim t) 
	\\
 &\le \pr(\cc(i,j,k,l)) \E e(G(n)).
\end{align*}
%${\cal C}(i,j,k,l)$ we obtain
%r\begin{displaymath}
% \E Z\,e(G(n))=\E Z \E N_1+cn\E Z=O(n).
%\end{displaymath}
Finally, invoking
the simple bound 
%\begin{displaymath}
$\E e(G(n))=\tbinom{n}{2}\pr(u\sim v)=O(n)$,
%\end{displaymath}
and  (\ref{eq.Xcyc})  
we get
\begin{displaymath}
	\E Z\,e(G(n)) \le (\E e(G(n)) + 4 n) \sum_{(i,j,k,l)} \pr(\cc(i,j,k,l)) = (\E e(G(n)) + 4 n) \E Z = O(n).
\end{displaymath}



    Now let us estimate the time of the rest of the iteration of the second loop. 
The total expected time to find common neighbours is again $O(n)$, 
so we only consider the time spent for checking if 
$\Gamma(u,v)$ is a clique. We can test if a set $S$ is a clique in time
proportional to 
$e(G(n))$ by scanning the edges incident to vertices in $S$ once and verifying
that the number of neighbours in $S$ for each vertex $v \in S$ is $|S|-1$.
When $S =\Gamma(u, v)$ is not a clique, i.e. $x,y \in S$
and $xy \not \in E(G(n))$, the event ${\cal C}(u, x, v, y)$ holds.
Thus by the previous bound, the total expected time spent in the second loop
is again $O(\E Z\,e(G(n))) = O(n)$.
\end{proof}

\bigskip

%Using a similar argument as in the last proof,
%it can be shown that the expected running time for a random instance of $G(n)$ 
%as in Theorem~\ref{thm.algfinite} is $O(n)$. 
Combining the next lemma with Lemma~\ref{thm.omega.mono}
we can show that \textsc{Mono-Clique} whp finds a clique of size at least $\omega'(G(n))$.

\begin{lemma} \label{lem.twogood}
    Let $\{G(n)\}$ be as in Lemma~\ref{thm.omega.mono} and let $M = M(G(n))$
    be the monochromatic clique of size $\omega'(G(n))$ generated by
    the attribute with the smallest index. %Suppose further $m = o (n^3)$.
    Then
    whp $G(n)$ has an edge $uv$ such that $\{u, v\} \cup (\Gamma(u) \cap \Gamma(v)) = M$.
\end{lemma}

The proof can be found in \cite{kurauskas} p. 93.
%long and uses pairing with a random instance of balls and bins introduced 
%in the proof of Theorem~\ref{thm.omega.mono}, see \cite{kurauskas_phd}.

\medskip

%In fact, sparse random intersection graphs have a simple structure such that
%the
Finally, as an alternative, we show that a popular, robust and simple \textit{clique percolation method} \cite{pdfv2005}
can be used to find all largest cliques in the bounded degree variance case.
Following \cite{pdfv2005},  define a \textit{$k$-clique-community} in a graph $G$ as a union of all $k$-cliques (complete subgraphs of
size $k$) that can be reached from each other through a series of adjacent $k$-cliques, where adjacency means
sharing $k - 1$ vertices. %The 3-clique communities of $G$ form
%a family of pairwise edge-disjoint 2-connected subgraphs of $G$, they partition
%all triangles of $G$, and two triangles sharing an edge belong to the same community.

Call a 3-clique community $H$ of an intersection graph $G=G(V,W)$ \textit{monochromatic} if it
is a subgraph of a monochromatic clique in $G$.

\begin{lemma}\label{lem.perc3struct}
    Let $\{G(n)\}$ be as in Lemma~\ref{thm.omega.mono}.
    The largest 3-clique community which is not monochromatic has size $O_P(1)$.
\end{lemma}

Thus if $\omega'(G(n)) \ge \omega_n$ whp for some $\omega_n \to \infty$, the largest
3-clique communities are the largest cliques of $G(n)$ (which are monochromatic).  
The \textsc{Mono-Clique} algorithm can be used to find them efficiently.

%A similar analysis as in the proof of 
%Theorem~\ref{thm.algfinite} could
%be used to implement an algorithm that finds all 3-clique communities in $G(n)$ in expected linear time.
%It is easy to implement an algorithm that finds all 3-clique communities
%in time proportional to the number of triangles in $G(n)$.
%This number for sparse $G(n)$ with finite degree variance
%is linear in expectation and (for $m=\Theta(n)$) in probability \cite{kurauskasweak}.

\medskip

\begin{proof}[Proof of Lemma~\ref{lem.perc3struct}]
    Let $C$ be a 3-clique community which is not monochromatic. We can assume $|V(C)| \ge 4$.
    Note that each monochromatic clique $T_w$ either shares all of its edges 
    or none of its edges with $C$. By the definition of a 3-clique community and the
    fact that $C$ is not monochromatic, whenever $T_w \subset V(C)$,
    $C$ must contain a triangle on vertices $\{x,y,z\}$
    with $x,y \in T_w$ and $z \not \in T_w$.

    Let $H_1,H_2,H_3$ be as in the proof of Theorem~\ref{thm.algfinite}.
    Consider a vertex $v$ in $C$. First suppose there is $w$ such that $T_w \subset V(C)$, $v \in T_w$ and
    $|T_w| \ge 3$. Let $x,y,z$ be a triangle in $C$ such that $x,y \in T_w$ and $z \in V(C) \setminus T_w$.
    If $v \not \in \{x,y\}$ then $G(n)$ contains a copy of $H_2$ or a copy of $H_3$ on vertices $\{v,x,y,z\}$. 
    If $v \in \{x,y\}$, we can take another vertex $v' \in T_w \setminus \{x,y\}$
    and get a copy of $H_2$ or $H_3$ on $\{v',x,y,z\}$. 

    Now suppose all $T_w$ such that $T_w \subset C$ and $v \in T_w$ have size $2$. Since $|C| \ge 4$, it is
    easy to see that for some $x,y,z \in V(G(n))$ there is a copy of
     $H_1$ or $H_2$ on $\{v, x, y, z\}$.

    For each $v \in V(C)$ we can assign a copy of $H_1$, $H_2$ or $H_3$ containing $v$. Since each copy can be obtained at most 4 times,
    by Markov's inequality and the bounds $\E Q(H_i, G(n)) = O(1)$, $i\in\{1,2,3\}$ shown in the proof of Theorem~\ref{thm.algfinite}
    \[
    |V(C)| \le 4 \left( Q(H_1, G(n)) + Q(H_2, G(n)) + Q(H_3, G(n) \right) = O_P(1). \qedhere
    \]
\end{proof}

\section{Proof of Lemma~\ref{lem.uintequiv}}


%The distributions $P_n$ with $P_n(\lfloor \sqrt \frac m n) = 1-\frac 1 m$ and $P_n(m) = \frac 1 m$ show that condition $m=\Theta(n)$ in (iii) cannot be replaced by condition $m=\Omega(n)$.
\begin{proof}[Proof of Lemma~\ref{lem.uintequiv}]
Write $X = X(n)$, $Y=Y(n)$, $D_1=D_1(n)$ and assume $X=|S_1|$.
    For arbitrary $n,m \to +\infty$ suppose $\E D_1 = \Omega(1)$ and $D_1$ is uniformly integrable.  
    We claim
    \begin{equation}\label{eq.uintequiv.1}
        \pr(X > 0)  = \Omega(1).
    \end{equation}
    If this did not hold, then there would be a subsequence of $n$, such that for any $C > 0$ we would have $\E D_1 \mathbb{I}_{D_1 \le C} \le C \pr(D_1 > 0) \le C \pr(X > 0) \to 0$.
    %, which implies  $\lim \inf \E D_1 \mathbb{I}_{D_1 \le C} = 0$.
    This contradicts the assumption about $D_1$.
    %Therefore, in contradiction to our assumption, $\lim \inf D_1 \mathbb{I}_{D_1 \le C} = 0$. %This proves (\ref{eq.uintequiv.1}).

    \textit{Proof of (i)}.  For the ``if'' part, 
    suppose $\E Y^k = O(1)$ and $Y^k$ is uniformly integrable.
    We claim that $D_1^k$ must be uniformly integrable.
    If not, from a subsequence of $n$ such that $\E D_1^k \mathbb{I}_{D_1 > \omega_n}$ has a limit in $(0, +\infty]$ with $\omega_n \to +\infty$, 
    pick a subsubsequence $A$, such that $\E Y_n^k \to y > 0$
    and $Y_n^k$ converges weakly to a random variable with mean $y$ as $n\to+\infty$, $n \in A$. For $k=1,2$ and $m=\Omega(n)$,
    using 
    Theorem~2.1 of \cite{bloznelis11}, 
    Lemma~\ref{lem.degrees} and Lemma~\ref{lem.degvar} we get that $D_1^k$ is uniformly integrable on $A$, a contradiction.
    For all  $k \ge 1$ and $m=\Theta(n)$ Lemma~4.7 and Theorem~3.1 of \cite{kurauskasweak}  similarly shows that
    $D_1^k$ is uniformy integrable.  The proof that $\E D_1^k = \Theta(1)$ follows similarly.

    %and the ``subsubsequence principle'' (see, i.e., \cite{jlr}). 
    %This argument actually holds for $m = \Omega(n)$. 

    We will prove the ``only if'' part by contradiction. Suppose $\E D_1^k = \Theta(1)$ and $D_1^k$ is uniformly integrable, but $Y^k$ is not. Then, since $m=\Theta(n)$, for some $\omega_n \to +\infty$ and a subsequence $A$ we have
    \begin{equation}\label{eq.uintequiv.y}
        \lim_{n\to+\infty, n \in A} \E X^k \mathbb{I}_{X \ge \omega_n} > 0.
    \end{equation}
    Below we will take limits over $n\to +\infty$, $n \in A$.
    We can assume $\omega_n = o(\sqrt{m})$. Write $p_0 = \lim \inf \pr(X \in [1,\omega_n))$, and let $B \subseteq A$ be a subsequence that realises the infimum.
        First assume $p_0 = 0$.
        For $n \in B$ by (\ref{eq.uintequiv.1}) we have $\pr(X \ge \omega_n) = \Omega(1)$. 
        %This implies $\lim \inf{n\to\infty,n \in B} (\E X) / \omega_n > 0$. 
        The probability $p(m,s,t)$ that
        two independent random subsets of sizes $s$ and $t$ of $[m]$ intersect satisfies
        \begin{equation}\label{eq.uintequiv.2}
            \frac{st} m \left(1 - \frac {st} m\right)  \le p(m,s,t) \le \frac {st} m.
        \end{equation}
 It follows by monotonicity of $p(m,s,t)$ and the linearity of expectation that
 \[
     \E D_1 \ge (n-1) \pr(X \ge \omega_n)^2 p(m, \omega_n, \omega_n) = \Omega \left(\frac{n \omega_n^2} m\right), 
 \]
 so $\E D_1 \to +\infty$ as $n\to +\infty$, $n \in B$. This is a contradiction, so it must be $p_0 > 0$.   

 Now consider arbitrary $n \in A$.
 Let $T=T(n)$ be the number of sets in $S_2, \dots, S_n$ of $G(n)$ that are non-empty.
 Define $Z = 0.2 n m^{-1} p_0 X$. Let $x$ be any integer with $x \ge \omega_n$
 and $\pr(X=x) > 0$.
 Given $X=x$, let $\tilde D$ have distribution $Bin(\lceil 0.5 p_0 n \rceil, \frac X m)$.  We have $Z < 0.5 \E({\tilde D}|X=x)$ and $\E ({\tilde D}|X=x) \ge 0.5 p_0 n m^{-1} \omega_n \ge c \omega_n$ for a constant $c > 0$ which does not depend on $n$ or $x$. Applying
 a Chernoff bound we get
 \[
     \pr({\tilde D} < Z |X=x) \le e^{-\frac 1 8 \E ({\tilde D}|X=x)} \le e^{-\frac 1 8 c \omega_n}. 
 \]
 Since the probability that a uniformly random subset of $[m]$ of size $X$ intersects a fixed non-empty set is at least $\frac X m$, we get
 \begin{align*}
   & \pr(D_1 \ge Z | X=x) \ge  \pr(D_1 \ge Z | X=x, T \ge 0.5 p_0 n) \pr(T \ge 0.5 p_0 n | X=x)
   \\ & \ge \pr({\tilde D} \ge Z | X=x) \pr(T \ge 0.5 p_0 n)
%\\ &
\ge (1 - e^{- \frac 1 8 c \omega_n}) \pr(T \ge 0.5 p_0 n).
%\end{align*}
\intertext{Therefore}
  %\begin{align*}
    &\E(D_1^k \mathbb{I}_{D_1 \ge 0.2 n m^{-1} p_0 \omega_n})
  %\\ &
  \ge \E \E(D_1^k \mathbb{I}_{D_1 \ge Z} \mathbb{I}_{X \ge \omega_n} | X)
  \\ &\ge \E Z^k \mathbb{I}_{X \ge \omega_n} (1 - e^{- \frac 1 8 c \omega_n}) \pr(T \ge 0.5 p_0 n) = \Theta(\E X^k \mathbb{I}_{X > \omega_n}) =\Omega(1).
 \end{align*}
 This is a contradiction to the fact that $D_1^k$ is uniformly integrable. 
 Thus (\ref{eq.uintequiv.y}) does not hold and $Y^k$ is uniformly integrable. 
 Now the fact that $\E Y^k = \Theta(1)$ follows by contradiction
 as we apply 
 Lemma~4.7 and Theorem~3.1 of Kurauskas \cite{kurauskasweak} (or Lemma~\ref{lem.degrees} and Lemma~\ref{lem.degvar} for $k=1,2$) with $\tilde{Y}=Y\mathbb{I}_{Y < C}$ and a large constant $C$.
 This finishes the proof of (i).

 It is easy to check that the ``only if'' part fails for $n = o(m)$ and $k \ge 1$. To get a counterexample, consider $X$ with
 $\pr(X = m) = (m n)^{-1/2}$ and $\pr(X = \lfloor m^{1/2} n^{-1/2} \rfloor) = 1 - (mn)^{-1/2}$.

    \textit{Proof of (ii)}.  
    %Suppose $\frac n m \to +\infty$ as $n\to +\infty$, $n \in A$ for some subsequence $A$. We further take limits as $n\to+\infty$, $n \in A$.
    %Without loss of generality we can assume $\E D_1 \to d \in (0,\infty)$ as $n\to \infty$, $n \in A$.
    %Denote by $W=W(n)$ the set of attributes of $G(n)$.
    Suppose the contrary, i.e., $D_1$ is uniformly integrable. 
    By (\ref{eq.uintequiv.1}), $\pr(X > 0) \ge a - o(1)$ for some $a > 0$, and by the law of large numbers, there are at least $0.5 a n$ non-empty sets of $G(n)$ whp.  On the latter event, we see that $G(n)$ contains $m$ disjoint cliques that together cover at least $0.5 a n$ vertices (group the non-empty sets of $G(n)$ according to the smallest attribute they contain). Using a standard convexity argument, the number of edges in $G(n)$ is at least
    \[
        %\sum_{w \in W} \binom {|T_w|} 2 \ge
        m \binom { \lfloor \frac {0.5 a n} m \rfloor}  2 = n \Omega \left(\frac n m \right),
    \]
    with probability $1 - o(1)$.
    Thus $\E D_1 = 2 \E e(G(n)) / n \to +\infty$, which contradicts that $\E D_1 = \Theta(1)$.
\end{proof}



\begin{thebibliography}{99}
    \bibitem{albertbarabasi2002} R.~Albert and A.L.~Barab\'asi. \newblock Statistical mechanics of complex networks. \newblock {\em Rev. Mod. Phys.}, 74:47, 2002.

  \bibitem{alon03} N.~Alon, T.~Jiang, Z.~Miller, and D.~Pritkin. 
\newblock Properly coloured subgraphs and rainbow subgraphs in edge-colourings with local constraints, 
\newblock {\em Random Struct. Algor.}, 23:409--433, 2003.
%  \bibitem{alonkim} N. Alon and J. H. Kim, 
%On the degree, size, and chromatic index of a uniform hypergraph, 
%\emph{J. Combin. Theory Ser. A} \textbf {77} (1997), 165--170. %77(1)
%\m{išimta Alon \& Kim}

\bibitem{bbm2009}  J.~Balogh, T.~Bohman, and D.~Mubayi.
\newblock Erd\H{o}s-Ko-Rado in random hypergraphs. 
{\em Comb. Probab.  Comp.}, 
18:629--646, 2009.

  \bibitem{btu09} M.~Behrisch, A.~Taraz, and M.~Ueckerdt. \newblock Colouring random intersection graphs and
      complex networks \newblock {\em SIAM J. Discrete Math.}, 23:288--299, 2009.

  \bibitem{bianc06} 
G.~Bianconi and M.~Marsili.
\newblock Emergence of large cliques in random scale-free networks. 
\newblock {\em Europhys. Lett.}, 
74:740--746, 2006.
  \bibitem{bloznelis12} 
M.~Bloznelis, J.~Jaworski, and V.~Kurauskas. \newblock
Assortativity and clustering coefficient of sparse random intersection graphs. 
\newblock {\em Electron. J. Probab.},
18(38):1--24, 2013.
  \bibitem{bloznelis11} 
M. Bloznelis. \newblock 
Degree and clustering coefficient in sparse random intersection graphs. \newblock
{\em Ann. Appl. Probab.}, 23(3):1254--1289, 2013.
  \bibitem{bloznelis08} M.~Bloznelis. \newblock Degree distribution of a typical vertex in a general random intersection graph. \newblock {\em Lithuanian Math. J.}, 48:38--45, 2008.
%\bibitem{extended} M. Bloznelis and V. Kurauskas,
%Large cliques in sparse random intersection graphs (extended version), 2013,
%\url{http://web.vu.lt/mif/v.kurauskas/files/2013/09/maxcliqueRIGext.pdf}.

  \bibitem{deijfen} M.~Deijfen and W.~Kets. \newblock Random intersection graphs with tunable degree distribution and clustering. \newblock {\em Probab. Eng. Inf. Sci.}, 23:661--674, 2009.

  \bibitem{galambosseneta} J.~Galambos and E.~Seneta. \newblock Regularly varying sequences. \newblock {\em Proc. Amer. Math. Soc.}, 41:110--116, 1973.

  \bibitem{gho2014} M.M.~Gauy, H.~H\`an, and I.C.~Oliveira. \newblock Erd\H{o}s-Ko-Rado for random hypergraphs: asymptotics and stability. \newblock \arxiv{1409.3634}, 2014.
  
  \bibitem{gj03} E.~Godehardt and J.~Jaworski. \newblock Two models of random intersection graphs for classification. \newblock In {\em Studies in Classification, Data Analysis and Knowledge Organization}, O. Optiz and M. Schwaiger, Eds, Springer, Berlin, 22:67--82, 2003.
 
  \bibitem{infobalt} \url{http://www.infobalt.lt/lt/members/stories/i/7} (in Lithuanian), \newline accessed 2016-06-03.

 \bibitem{jln10}  S.~Janson, T.~{\L}uczak, and I.~Norros. \newblock Large cliques in a power-law random graph. \newblock {\em J. Appl. Probab.}, 47:1124--1135, 2010. 
 
 \bibitem{jlr} S.~Janson, T.~ {\L}uczak, and A.~Ruci\'nski. \newblock Random Graphs,\newblock {\em Wiley-Interscience Series
      in Discrete Mathematics and Optimization}, Wiley-Interscience, New York, 2000.
  \bibitem{karonski99}  M.~Karo\'nski, E.R.~Scheinerman, and K.B.~Singer-Cohen, \newblock On random intersection graphs: the subgraph problem. \newblock {\em Comb. Probab. Comp.}, 8:131--159, 1999.
  \bibitem{kolchin} V.~Kolchin, B.~Sevstyanov, and V.~Chistyakov, \newblock Random Allocations. \newblock {\em V.H. Winston and Sons}, Washington D.C., 1978.

  \bibitem{kurauskas} V.~Kurauskas. \newblock On two models of random graphs. \newblock PhD thesis, Vilnius University,
  2013. Available at \url{http://vddb.laba.lt/obj/LT-eLABa-0001:E.02~2013~D_20131216_081822-36288}.
  \bibitem{kurauskasweak} V.~Kurauskas. \newblock On local weak limit and subgraph counts for sparse random graphs. \newblock \arxiv{1504.08103}, 2015.

\bibitem{cmcd98} C.~McDiarmid. \newblock Concentration. \newblock In {\em Probabilistic Methods for Algorithmic Discrete Mathematics}, M. Habib, C. McDiarmid, J. Ramirez-Alfonsin and B. Reed Eds., Springer, New York 195--248, 1998.
\bibitem{nrs2012} S.~Nikoletseas, C.~Raptopoulos, and P.G.~Spirakis.
\newblock Maximum cliques in graphs with small intersection number and random intersection graphs.
\newblock {\em Mathematical Foundations of Computer Science 2012}, 
Springer Berlin Heidelberg, 728--739, 2012.

\bibitem{pdfv2005} G.~Palla, I.~Der\'{e}nyi, I.~Farkas, and T.~Vicsek. \newblock Uncovering the overlapping community structure of complex networks in nature and society. \newblock{\em Nature}, 435:814--818, 2005. 

%\bibitem{mitzenmacher05} M. Mitzenmacher and E. Upfal, Probability and Computing. Randomized Algorithms and Probabilistic Analysis, \textit{Cambridge University Press}, 2005.
  \bibitem{rybstark2010} K.~Rybarczyk and D.~Stark. \newblock Poisson approximation of the number of cliques in random intersection graphs. \newblock {\em J. Appl. Probab.}, 47:826--840, 2010.





\bibitem{WassermanFaust1994} S.~Wasserman and K.~Faust. 
\newblock Social Network Analysis: Methods and Applications.
\newblock{\em Cambridge University Press}, Cambridge, 1994.



\bibitem{WattsStrogatz1998}
D. J.~Watts and S.~Strogatz. \newblock Collective dynamics of `small-world' networks. \newblock {\em Nature}, 393:440--442, 1998.
 




\end{thebibliography}

\end{document}

